diff --git a/.Jenkinsfile b/.Jenkinsfile deleted file mode 100644 index da1157e9..00000000 --- a/.Jenkinsfile +++ /dev/null @@ -1,47 +0,0 @@ -pipeline { - agent { - docker { - image 'ubuntu_tester' - args '-u root:root -v ${JENKINS_HOME}/html/docs:/docs -v ${JENKINS_HOME}/html/_ci:/ci' - } - } - environment { - TRAVIS = 1 - PJ_NAME = 'fastNLP' - POST_URL = 'https://open.feishu.cn/open-apis/bot/v2/hook/14719364-818d-4f88-9057-7c9f0eaaf6ae' - } - stages { - stage('Package Installation') { - steps { - sh 'python setup.py install' - } - } - stage('Parallel Stages') { - parallel { - stage('Document Building') { - steps { - sh 'cd docs && make prod' - sh 'rm -rf /docs/${PJ_NAME}' - sh 'mv docs/build/html /docs/${PJ_NAME}' - } - } - stage('Package Testing') { - steps { - sh 'pip install fitlog' - sh 'pytest ./tests --html=test_results.html --self-contained-html' - } - } - } - } - } - post { - failure { - sh 'post 1' - } - success { - sh 'post 0' - sh 'post github' - } - } - -} \ No newline at end of file diff --git a/.coverage b/.coverage deleted file mode 100644 index a6d89bc8..00000000 --- a/.coverage +++ /dev/null @@ -1 +0,0 @@ -!coverage.py: This is a private format, don't read it directly!{"lines":{"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/__init__.py":[12,14,15,18,19,20,22,23,24,26,27,29,30,31,32,33,34,35,37,38,39,41,42,43,45,46,47,48,50,51,52,53,55,56,57,58,59,60,62,64,66,68,69,70,71,72],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/__init__.py":[6,9,10,11,12,13,14,15,16,17,18,21,22,23,24,25,26,27],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/embedding.py":[128,129,130,131,4,133,7,8,11,12,13,140,15,141,142,18,146,148,143,144,145,155,157,39,41,169,43,45,174,47,48,177,178,49,51,181,182,52,55,185,186,179,60,61,63,193,68,199,72,201,73,75,76,205,82,85,86,87,89,90,91,93,104,111,119,120,121,122,123,124,125,126,127],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/utils.py":[4,5,6,7,9,42,43,12,44,45,46,16,24,57,26,27,28,25,31],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/__init__.py":[13,15,17,19,20,21,22,24,26,27,28,30,32,33,35,36,37,38,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,63,64,65,67,68,69,70,72,73,74,75,78,79,80,83,84,85,86,87,88,89,90,91,92,93,94],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/_logger.py":[1,130,131,4,132,134,7,8,9,10,11,137,13,140,15,16,143,19,20,24,25,26,155,27,29,30,31,32,33,45,46,47,49,50,51,52,53,56,78,79,80,83,84,88,92,94,95,99,100,101,102,103,106,107,108,110,114,119,125,127],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/batch.py":[4,6,7,8,11,13,14,15,16,18,19,20,21,24,29,32,33,34,35,36,37,38,40,42,43,44,45,47,50,57,58,59,60,61,62,63,64,65,67,68,69,70,73,74,75,76,80,81,83,84,85,87,92,99,100,101,102,103,105,106,108,109,112,113,114,115,116,117,119,120,122,124,125,126,127,129,130,131,132,133,135,136,138,139,141,146,171,174,175,176,177,178,181,182,183,184,185,186,187,189,190,193,194,202,204,207,211,215,223,224,225,226,227,228,229,230,233],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/sampler.py":[3,5,6,7,8,134,135,11,140,13,137,16,149,150,151,24,153,26,155,156,158,160,34,162,163,164,166,165,40,167,42,170,43,46,52,54,55,58,186,187,188,190,191,192,193,68,70,71,72,73,75,83,84,86,87,89,90,91,92,93,94,96,97,98,100,102,103,104,105,106,107,108,109,110,112,113,114,115,117,120],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/dataset.py":[515,516,518,532,543,550,552,554,560,562,570,578,585,586,587,589,590,592,606,607,608,609,610,611,617,619,631,632,633,634,635,640,641,643,660,676,688,694,696,702,704,722,723,725,726,727,728,729,734,737,738,859,740,742,751,752,753,754,755,756,757,758,862,760,761,762,763,764,765,766,767,768,770,771,772,774,791,792,793,794,795,796,285,287,290,291,803,293,294,806,296,297,298,299,300,301,302,303,811,305,809,824,314,316,317,318,319,320,321,834,835,836,837,838,322,323,324,325,326,327,328,334,329,332,337,849,338,339,340,342,335,344,857,858,347,348,861,345,350,346,865,864,863,866,860,351,868,354,353,356,871,867,875,869,870,360,363,364,877,365,367,369,883,884,886,376,377,378,379,380,381,382,383,384,385,386,387,388,894,895,896,897,402,409,410,412,413,415,420,421,422,423,425,426,427,431,432,434,872,441,443,445,447,451,452,453,454,459,873,474,807,486,487,488,490,491,493,499,500,502,503,505,506,507,509],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/field.py":[4,7,8,9,12,13,14,15,16,18,19,21,22,535,25,26,27,28,29,30,33,34,35,36,37,38,41,43,44,557,46,559,560,47,562,48,52,53,54,563,56,57,58,59,60,564,62,63,64,568,570,67,68,572,70,71,72,65,74,578,76,580,78,590,80,591,585,83,592,85,593,87,594,89,595,596,597,598,599,95,96,97,98,99,100,613,101,614,102,615,609,616,617,618,104,106,108,622,624,113,114,115,116,629,117,118,120,119,122,130,131,132,133,134,135,136,137,138,651,139,653,140,141,142,146,147,148,149,150,663,152,659,661,157,158,159,160,162,165,677,167,169,681,682,683,685,686,175,687,177,178,688,180,181,182,183,184,690,691,187,692,693,190,694,192,697,200,201,202,205,206,207,209,211,212,214,220,221,222,226,45,236,242,244,252,254,255,256,257,259,261,278,565,566,567,298,569,571,318,573,574,575,339,576,577,579,359,581,582,379,584,586,626,398,419,695,428,429,430,431,432,433,434,435,436,437,438,439,441,443,444,445,446,447,448,450,451,452,453,454,455,456,458,459,460,465,482,484,485,487,490,491,610],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/utils.py":[3,5,517,6,7,518,10,11,12,13,14,521,16,17,18,19,20,524,22,23,527,528,26,27,535,29,536,540,541,542,543,35,544,545,547,546,40,548,551,552,553,554,46,550,49,563,564,53,565,567,568,569,59,60,574,62,63,64,67,522,592,599,609,615,530,118,119,120,121,122,124,125,126,127,641,129,130,132,131,134,647,648,649,650,651,652,135,139,140,656,142,144,147,659,145,146,662,663,664,151,666,667,152,669,670,153,672,673,674,163,676,165,678,679,168,681,682,643,685,644,645,192,709,217,218,219,220,222,736,738,227,739,740,226,229,232,233,230,148,231,745,234,235,149,236,237,238,239,240,244,245,241,242,243,246,247,248,249,250,251,252,253,254,255,256,259,260,263,154,271,273,274,156,277,157,280,158,642,288,289,159,291,292,293,294,295,296,297,298,161,301,316,333,334,335,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,364,388,389,390,391,392,393,396,397,405,411,413,416,417,421,430,433,436,437,438,439,440,290,445,449,451,452,454,456,457,458,460,463,465,466,469,470,471,475,476,477,478,479,480,485,496,497,498,499,500,501,502,503,505,506,507,508,509,510,511],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/instance.py":[58,5,37,39,7,11,46,47,48,52,53,55,56,24,26,59,28,30],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/const.py":[4,7,11,29,30,31,32,33,34,35,36,37,39,42,43,45,51,56,61,64,65,67,70,71,73,76,77,79],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/callback.py":[1024,513,1026,1030,1031,1036,529,531,1043,1044,1060,1071,562,51,53,1077,55,56,57,58,59,60,61,62,63,64,65,66,576,68,69,1092,583,72,73,74,76,78,80,81,84,85,87,88,89,91,92,603,606,97,612,106,108,621,109,623,110,113,111,118,120,123,125,128,130,133,135,648,138,140,143,145,660,148,150,153,155,159,161,674,164,166,681,169,171,683,685,686,175,687,177,688,179,692,693,183,696,701,189,191,703,705,706,708,197,710,199,721,722,210,212,723,726,724,728,729,730,220,733,222,741,229,231,743,745,746,748,237,749,239,750,751,752,753,756,245,754,247,758,761,759,252,765,254,766,767,768,770,771,260,773,262,774,775,776,778,779,780,781,782,783,777,785,786,275,787,788,789,791,790,273,794,283,795,796,797,287,799,289,800,801,802,805,293,295,303,818,820,821,310,311,312,313,822,315,316,823,318,830,824,321,322,826,836,828,827,831,832,833,841,329,331,332,333,334,839,336,337,842,851,339,340,341,852,855,348,349,350,863,351,353,864,357,870,871,361,875,365,369,881,373,377,889,890,381,385,389,902,393,907,397,912,401,405,410,411,922,929,420,428,945,946,437,961,964,455,968,457,459,461,462,463,468,469,471,472,473,987,479,482,504,489,491,1003,492,493,494,496,1009,497,1014,1016,506,1020],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/tester.py":[34,35,37,38,40,41,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,62,66,92,94,95,97,100,102,103,104,105,106,107,109,110,111,118,119,121,127,131,132,134,138,139,141,148,149,150,151,152,153,154,155,156,158,159,160,162,164,165,166,167,170,171,173,174,176,177,178,181,182,183,184,185,187,188,189,190,191,192,194,195,196,197,199,206,207,209,211,213,214,215,217,223,224,225,226,227,228],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/metrics.py":[4,6,7,8,9,12,13,15,16,18,19,20,21,22,23,24,25,26,29,117,119,120,121,122,124,133,137,138,141,151,156,158,165,166,179,180,181,182,183,185,186,187,188,192,193,194,195,200,208,209,213,215,230,231,235,236,239,240,241,242,246,247,249,250,253,254,255,256,257,258,259,262,263,264,265,267,270,271,272,274,277,278,279,280,281,282,284,285,286,287,288,290,292,295,305,307,309,311,313,314,316,329,330,332,336,340,341,343,345,346,347,348,350,354,355,356,357,359,360,362,369,370,371,372,373,376,386,388,389,390,391,392,393,394,395,396,398,399,400,401,402,406,437,468,477,479,480,481,482,483,484,485,486,488,489,491,492,493,496,504,505,506,507,508,509,510,511,512,514,515,516,520,561,564,566,568,570,573,574,575,576,577,578,579,580,581,582,586,587,588,589,590,592,593,595,597,598,599,601,609,612,616,620,622,623,624,625,633,634,635,636,637,638,640,641,643,644,646,647,648,649,651,652,653,655,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,681,686,687,688,689,690,691,692,694,695,696,697,699,700,702,704,712,713,714,716,719,726,727,728,729,730,732,733,734,736,738,742,743,747,750,759,760,761,762,763,766,776,777,778,779,780,781,784,799,802,804,806,808,810,811,813,814,816,818,819,820,821,823,825,827,836,837,838,839,841,842,845,846,850,851,852,853,855,856,857,858,859,862,863,864,865,867,868,870,873,875,876,878,879,880,881,883,884,885,887,888,891,893,895,897,900,901,903,905,907,909,910,911,912,914,916,918,919,920,921,923,929,932,933,935,936,937,939,940,942,944,945,946,947,949],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/vocabulary.py":[4,7,8,11,12,13,15,16,17,18,21,26,35,40,42,43,44,46,49,54,56,57,58,59,61,62,64,67,90,92,93,94,95,96,97,98,99,100,102,104,105,116,117,118,120,121,133,134,135,137,145,146,147,148,149,150,151,153,154,166,168,169,181,182,184,190,191,192,193,194,195,197,198,199,200,201,202,203,204,205,206,207,209,214,215,217,219,221,229,231,242,244,251,252,253,254,258,259,273,279,280,282,283,285,287,289,291,292,295,296,297,301,302,303,304,305,311,313,317,337,338,342,343,344,345,346,348,349,350,352,354,355,356,358,359,360,361,368,369,370,371,377,379,385,387,398,400,401,406,407,408,410,411,416,417,418,420,428,430,443,447,448,450,451,453,457,458,460,463,465,466],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/_parallel_utils.py":[1,97,3,5,7,8,9,10,11,76,104,14,105,107],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/losses.py":[4,6,8,9,11,12,13,14,17,18,20,21,23,24,25,26,27,28,29,30,31,34,37,39,40,41,43,52,55,62,63,76,77,78,79,80,82,83,84,85,89,90,91,92,102,110,112,113,114,115,119,120,122,123,125,126,127,128,129,130,131,134,135,136,137,139,141,142,143,145,148,149,150,151,152,153,155,156,157,158,160,162,163,165,168,188,190,192,193,194,195,198,201,222,224,225,226,227,228,229,230,232,233,234,235,236,239,240,241,242,243,245,246,249,259,261,262,263,264,265,267,268,271,280,282,283,284,285,286,288,289,292,303,305,306,307,308,309,310,312,313,316,323,325,326,327,329,331,332,333,334,335,336,337,338,339,340,341,343,345,347,353,356,357,358,359,360,361,366,374,377,386,387,395,410,432],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/optimizer.py":[4,6,7,8,9,135,138,12,13,14,15,18,151,24,26,27,156,29,30,32,35,41,43,47,48,51,54,61,68,70,71,72,73,75,76,78,80,83,90,92,93,95,96,98,99,101,103,106],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/trainer.py":[517,518,519,521,522,523,524,525,526,527,528,529,530,531,532,533,534,536,537,538,539,540,541,545,547,548,551,552,553,554,555,556,557,558,559,560,561,562,564,565,567,570,571,573,593,594,598,599,600,601,602,603,604,606,607,608,609,619,620,621,622,623,624,625,626,627,628,629,630,634,635,637,639,640,641,643,644,645,646,647,648,649,650,651,652,653,654,656,657,658,659,660,662,663,666,667,668,669,672,673,674,676,677,679,680,681,682,683,685,686,687,688,689,690,691,693,694,695,696,697,698,700,701,705,707,708,711,712,713,715,716,717,720,721,722,723,724,725,727,728,857,730,737,740,742,746,747,352,749,750,751,752,755,757,764,765,766,768,775,777,800,802,812,813,816,818,823,824,825,826,827,829,319,831,321,832,835,324,325,326,833,328,329,330,843,332,333,841,847,336,848,338,851,340,341,342,343,344,339,853,854,855,349,350,351,856,345,346,858,347,348,864,865,868,869,353,354,355,356,358,872,873,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,895,896,898,899,900,901,902,903,904,905,907,908,909,910,911,913,914,915,916,917,918,919,920,924,925,927,928,418,932,936,425,426,427,937,941,942,939,940,431,943,433,944,945,947,437,438,948,949,441,954,950,444,951,958,449,450,961,962,964,454,965,456,968,458,970,971,974,466,482,484,485,489,490,491,498,499,502,503,506,507,510,511],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/static_embedding.py":[4,7,9,11,12,13,14,16,17,18,19,20,21,22,25,66,69,70,71,72,75,76,77,78,79,83,84,91,92,119,121,122,123,124,127,128,130,133,134,135,136,140,141,142,143,144,146,147,148,150,151,153,154,155,156,158,164,165,166,167,168,169,171,179,181,182,186,188,202,204,205,207,209,210,226,227,229,230,231,232,233,237,238,239,240,241,242,243,244,245,246,247,248,249,250,252,254,257,258,259,260,261,262,269,270,271,272,275,277,279,283,284,285,286,287,288,290,292,299,300,301,302,303,304],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/__init__.py":[13,15,17,19,21,22,23,24,25,26,28,29,30,31,32,33,34,35,37,38,40,42,43,44,45,46,48,50,51,52,53,54,55,57,58,59,60,61,63,65,66,67,68,69,70,71,72,73,74,75,76,78,79,83,84,85,87,88],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/embed_loader.py":[4,6,7,10,11,12,14,16,17,20,22,23,24,25,34,39,41,44,45,46,63,64,66,67,68,69,70,71,72,73,75,76,77,78,80,81,82,83,84,86,88,90,91,92,93,100,101,102,103,104,105,106,107,108,109,111,112,114,116,117,118,133,134,135,136,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,157,166,168,169,170,171,173,174,175,176,177,178,180,181,182,183,184,185,187,188,190],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/data_bundle.py":[4,6,9,10,13,142,27,29,30,31,159,33,45,55,184,64,74,203,83,92,117],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/model_io.py":[32,3,5,6,9,42,12,17,19,53,22,55,62],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/__init__.py":[44,47,49,50,51,52,53,54,56,57,58,59,60,61,62,63,65,66,68,70,71,72,73,74,76,77,78,79,80,81,82,83],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/classification.py":[1,259,4,5,6,7,8,9,261,264,12,13,14,15,16,17,19,20,21,279,24,291,164,45,47,304,50,178,180,306,309,183,72,73,201,339,244,119,120],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/loader.py":[65,66,1,4,33,70,7,67,9,10,11,12,68,78,15,19,21,22,24,63],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/file_utils.py":[4,7,8,9,10,11,14,15,16,17,18,19,21,22,23,25,28,29,30,32,33,35,36,38,40,41,43,44,45,46,50,51,52,53,54,58,60,61,62,63,64,65,66,67,68,69,71,73,74,76,77,78,79,83,84,85,86,87,88,89,90,91,92,93,94,96,97,98,99,102,103,104,107,108,109,110,114,159,186,202,228,252,273,293,306,418,427,434,443],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/utils.py":[33,34,35,4,36,7,10,11,12,14,17,81],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/conll.py":[1,4,5,6,7,8,9,10,11,12,15,16,17,18,19,146,21,22,23,24,25,150,278,28,279,282,286,287,408,421,175,177,183,62,446,64,448,451,325,204,78,208,92,349,222,273,224,351,354,227,404,117,405,119,125],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/file_reader.py":[33,34,3,35,5,7,9,41,42,12,43,78,47,44,24,25,26,30],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/csv.py":[32,1,34,33,4,35,36,7,8,9,10,37,13,24,26,27,28,29,30],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/cws.py":[1,4,38,7,8,9,10,11,39,13,14,15,47,18,56],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/json.py":[1,4,38,7,8,9,10,13,25,27],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/loader/matching.py":[1,129,4,5,6,7,8,11,12,13,15,16,17,18,19,20,273,277,23,159,35,37,40,170,298,300,303,184,186,189,318,66,216,98,228,109,241,243,246,120,122],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/__init__.py":[9,11,13,15,16,17,18,19,21,22,23,24,25,26,28,29,30,31,32,33,34,35,36,37,38,39,42,43,44,46,47,48],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/classification.py":[1,4,5,6,7,8,134,264,11,392,13,15,16,17,18,19,20,21,22,408,24,410,28,414,32,34,37,172,52,182,315,320,449,195,197,70,201,333,335,339,89,218,247,228,104,106,119,249,382],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/pipe.py":[1,4,7,10,13,14,23],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/utils.py":[1,66,153,4,5,6,39,9,137,11,12,15,87,121,91],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/conll.py":[1,4,5,6,7,8,9,12,13,14,15,16,17,18,19,20,141,272,23,286,288,34,36,293,43,306,308,182,313,192,328,330,79,208,210,215,225,98,227,100,233,113,114],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/matching.py":[128,1,129,259,4,5,6,7,8,9,10,11,12,13,14,15,135,140,18,19,20,21,22,146,147,25,152,260,134,169,42,171,44,177,50,265,266,191,64,141,271,272,247,248,122,123,253,254],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/io/pipe/cws.py":[1,4,7,8,136,10,11,12,13,14,17,155,157,34,168,50,65,202,84,110,254],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/__init__.py":[18,22,23,25,27,29,31,33,34,35,37,38,39,40,42,44,45,46,47,49,52,53,54,55,56],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/decoder/__init__.py":[4,6,7,8,9,12,13,14,15],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/decoder/crf.py":[1,4,5,8,9,11,12,15,29,31,32,33,34,35,36,37,38,40,41,42,43,44,46,47,48,50,51,52,53,54,55,56,57,58,59,60,63,73,74,75,76,93,94,95,96,97,98,102,121,122,123,124,125,126,127,128,157,170,173,175,177,178,181,182,183,184,186,187,192,194,196,204,205,206,207,209,211,212,213,214,215,216,218,219,221,223,231,232,233,236,237,238,240,242,243,244,245,246,247,248,250,252,261,262,263,264,265,267,269,282,283,284,287,288,289,290,291,295,296,297,298,299,300,301,302,303,304,306,310,311,312,314,316,317,318,319,320,321,322,323,328,329],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/utils.py":[4,134,7,8,11,12,14,15,16,19,35,37,39,41,43,45,47,49,52,54,56,57,60,61,62,63,64,65,67,68,69,70,72,73,74,75,77,80,83,120],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/decoder/mlp.py":[1,4,7,8,10,13,44,46,47,48,49,50,51,52,53,55,57,60,61,62,64,65,71,72,73,75,76,79,86,88,93,94,95,96,98,99],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/decoder/utils.py":[1,4,6,9],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/__init__.py":[4,9,10,12,14,16,18,20,21,22,24,25,26,27,29,32,33,34,35,36,37,38,39,40],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/attention.py":[128,1,4,132,7,9,10,11,13,16,20,22,23,24,25,26,27,28,30,38,39,40,41,42,43,46,175,55,184,57,186,58,59,60,61,62,64,65,66,67,69,198,70,71,73,74,75,76,77,78,80,212,88,89,90,92,93,94,97,98,99,100,101,102,105,106,107,110,126],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/bert.py":[512,4,517,7,10,11,12,13,14,15,17,18,20,21,22,24,25,28,30,44,571,70,586,587,75,76,77,591,78,79,80,81,82,83,84,85,600,86,87,92,100,107,621,110,115,119,632,125,126,129,133,136,654,149,150,153,154,667,155,156,158,159,160,161,162,165,167,169,170,171,172,173,689,177,178,180,181,182,183,184,187,188,189,703,191,192,193,194,197,198,199,200,715,204,205,206,208,209,210,212,214,727,215,216,217,219,220,221,222,224,225,226,229,230,743,744,232,747,235,239,241,242,243,244,245,248,249,250,251,252,253,255,256,257,258,259,262,263,776,264,265,266,268,269,270,271,274,275,786,276,277,278,279,283,796,284,285,286,289,290,291,292,293,294,296,809,297,298,299,300,303,304,816,305,306,307,308,310,311,312,313,314,317,318,319,320,833,321,323,324,325,326,327,328,329,330,331,334,335,848,336,337,338,340,852,854,343,344,345,346,349,877,374,376,377,378,385,386,387,388,389,390,391,393,396,909,399,400,401,402,403,404,406,407,409,410,417,424,425,427,428,429,430,431,432,433,434,435,437,500,509,510],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/char_encoder.py":[1,4,5,7,8,10,14,25,27,28,29,30,32,34,36,41,43,45,47,48,49,50,52,54,55,57,58,61,68,70,77,78,80,81,82,83,84,85,87,92,93,94,95,96,98,99],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/conv_maxpool.py":[1,4,6,7,8,11,23,25,26,28,29,32,33,36,37,38,43,52,59,60,69,77,79,80,81,82,84,85,86],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/lstm.py":[4,7,10,11,12,15,30,33,34,35,36,37,38,40,41,42,44,45,46,47,49,51,61,62,65,66,67,68,69,72,73,74,75,76,77,82],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/pooling.py":[1,129,4,5,6,7,135,9,10,137,13,141,25,27,38,62,67,69,73,85,86,88,92,102,107,109,114],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/star_transformer.py":[3,6,9,10,11,12,15,32,34,35,36,38,40,41,42,43,44,45,46,48,49,53,63,65,67,68,69,71,72,76,77,78,79,80,81,82,83,85,87,89,91,94,95,96,99,100,101,102,104,107,109,111,112,114,116,117,118,119,120,121,122,123,124,125,126,127,129,130,132,134,137,138,140,141,142,143,144,146,149,151,153,154,156,158,159,160,161,162,163,164,165,166],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/transformer.py":[1,4,6,8,9,12,26,28,29,30,31,32,33,34,35,36,37,39,46,47,48,49,50,51,52,54,55,56,58,65,66,69,70,71,72,73],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/dropout.py":[1,4,7,10,14,16,17,18,19,20,24],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/variational_rnn.py":[3,6,7,8,11,12,13,15,16,25,28,31,33,34,35,36,37,38,40,52,53,54,55,56,58,59,60,61,62,63,64,66,67,69,70,73,74,75,76,77,79,80,81,82,83,84,85,86,87,88,89,96,97,98,99,102,120,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,146,147,148,149,150,151,152,153,155,163,164,165,166,167,168,169,170,172,173,175,176,177,178,179,181,182,183,184,185,186,187,188,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,210,212,213,215,216,218,219,221,224,239,241,242,243,245,246,249,264,266,270,274,289,291,295],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/elmo_embedding.py":[4,7,136,10,11,12,13,14,15,141,17,18,19,20,21,23,155,163,171,173,305,58,61,92,99,111,119],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/modules/encoder/_elmo.py":[514,3,515,5,7,263,9,10,11,12,264,14,528,17,409,410,309,56,65,453,327,328,85,98,493,239,240,251,510],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/contextual_embedding.py":[99,4,7,104,10,12,76,14,15,16,17,18,19,20,23,24,27],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/bert_embedding.py":[4,7,8,135,11,12,14,15,16,17,271,19,20,21,22,23,24,149,273,27,157,168,171,186,67,198,71,203,207,211,215,95,98,227,361,115,250],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/char_embedding.py":[4,7,8,11,12,13,14,16,17,18,19,20,21,22,25,57,61,62,64,65,67,68,70,71,72,85,87,88,89,91,92,93,94,95,98,99,101,104,106,108,109,110,111,113,120,121,122,123,124,125,127,128,129,130,131,132,133,134,135,136,137,138,142,143,145,161,168,169,170,172,173,174,175,177,180,211,216,217,219,221,222,224,225,226,239,241,242,243,245,246,247,248,249,252,253,255,258,260,261,263,264,265,267,274,275,276,277,278,279,281,282,283,284,285,286,289,290,291,292,297,299,301,318],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/embeddings/stack_embedding.py":[4,7,10,12,13,15,18,37,39,40,41,42,43,44,45,46,48,49,50,51,52,53,55,64,71,75,87,92,99,100,101,102,103,104],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/__init__.py":[32,33,34,9,11,13,14,16,18,19,20,21,23,24,27,28,30,31],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/base_model.py":[32,1,33,3,5,7,10,12,14,15,17,20,24,25,26,27,29,30],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/bert.py":[4,6,8,10,11,13,14,15,16,17,20,57,58,59,60,61,65,67,68,69,71,77,78,80,81,82,83,84,86,91,93,98,135,136,137,138,139,142,144,145,146,148,154,155,156,157,158,159,160,161,162,164,169,171,176,215,216,217,218,219,222,224,225,226,228,234,235,236,237,239,251,253,258,300,301,302,303,306,308,311,313,319,320,321,322,323,324,326,343,345],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/biaffine_parser.py":[3,5,517,6,520,9,10,11,12,522,14,523,16,17,18,19,20,21,22,23,24,25,530,536,28,539,542,534,544,33,34,35,36,37,38,39,40,41,545,546,547,548,46,47,48,49,50,51,52,53,45,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,73,74,75,76,524,77,78,79,80,525,81,84,82,87,526,527,92,93,94,95,528,96,97,99,101,102,103,104,105,107,108,109,110,111,112,531,114,115,116,117,118,119,120,121,122,532,124,125,126,533,128,131,136,138,139,141,142,151,152,153,154,155,156,157,158,160,161,170,171,172,173,174,175,176,177,178,179,182,188,190,191,192,193,194,195,198,200,549,207,208,209,210,211,42,214,43,222,44,224,225,226,227,229,236,237,238,241,262,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,305,306,307,308,310,311,312,313,314,315,316,317,318,322,323,324,325,326,327,328,329,330,331,333,334,335,336,337,338,339,341,342,344,362,366,368,369,371,372,373,376,377,378,379,380,381,382,383,385,386,387,391,392,393,394,397,400,402,403,405,406,416,417,418,419,420,421,422,424,437,438,439,440,441,442,443,444,445,446,447,449,450,451,452,453,454,456,469,470,471,472,473,474,477,489,493,494,495,496,497,498,499,502],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/cnn_text_classification.py":[4,7,10,11,13,14,15,16,19,32,38,39,42,43,44,45,46,47,48,50,57,58,59,60,62,63,64,65,67,74,75,76],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/sequence_labeling.py":[3,5,6,10,11,12,14,15,16,17,18,19,20,21,22,25,39,41,61,75,78,82,93,95,96,98,99,100,101,102,104,112,113,114,116,118,120,122,124,132,134,136,138,140,141,143,151,152,153,154,155,156,158,159,160,161,162,163,165,170,171,174,189,191,193,195,196,197,198,199,200,201,202,203,204,206,207,213,218,219,221,229,230,231,232,233,234,236,237,238,239,240,241,243,252,253,254,257,259,263,264,267,269,270,271,272,273,274,275,277,279,287,289,296],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/snli.py":[4,6,9,10,11,12,14,15,16,17,20,32,35,36,38,41,42,43,44,45,48,49,50,51,52,54,57,58,59,60,61,63,65,66,68,77,78,79,80,81,82,83,87,89,90,91,92,94,95,99,100,101,102,104,105,107,113,115,116,117,121,122,123,124,126,127,128,129,130,131,134,136,137,138,139,142,143,144,145,146,147,148,149,151,153,154,155,156,158,160,162,165,167,168,169,174,177,178,179,182,183,184,185,186,187,189,190,193,194,195,196,197,198,199,202,204,205,208,209,211,213,214,215,216,217,218,220],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/models/star_transformer.py":[3,5,6,7,8,11,12,14,15,16,17,20,36,38,46,47,48,49,51,52,53,54,55,56,58,67,68,69,70,73,74,75,76,77,78,79,80,83,84,85,88,89,90,91,92,93,94,95,96,99,100,101,102,105,123,133,134,135,136,137,138,139,140,141,142,143,145,152,153,154,155,156,158,165,166,167,170,188,198,199,200,201,202,203,204,205,206,207,208,210,217,218,219,220,221,223,230,231,232,235,253,263,264,265,266,267,268,269,270,271,272,273,275,284,285,287,288,289,291,292,293,294,296,305,306,307],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/dist_trainer.py":[3,4,5,6,7,9,10,11,12,13,14,15,16,18,19,20,21,22,23,24,25,26,152,29,30,157,34,169,47,304,50,179,183,312,58,320,332,343,355,229],"/hdd/fudanNLP/fastNLP/fastNLP/fastNLP/core/predictor.py":[1,4,7,9,11,12,13,14,17,25,27,28,31,32,33,35,42,44,47,48,49,50,51,53,56,58,59,60,61,62,64,67,68,69,70,80,81]}} \ No newline at end of file diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 17f7654f..00000000 --- a/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -.gitignore - -.DS_Store -.ipynb_checkpoints -*.pyc -__pycache__ -*.swp -.vscode/ -.idea/** - -caches - -# fitlog -.fitlog -logs/ -.fitconfig - -docs/build diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 4fc99810..00000000 --- a/.travis.yml +++ /dev/null @@ -1,30 +0,0 @@ -language: python -python: - - "3.6" - -env: - - TRAVIS=1 - -# command to install dependencies -install: - - pip install --quiet -r requirements.txt - - pip install --quiet fitlog - - pip install pytest>=3.6 - - pip install pytest-cov -# command to run tests -script: -# - python -m spacy download en - - pytest --cov=fastNLP tests/ - -after_success: - - bash <(curl -s https://codecov.io/bash) - -notifications: - webhooks: - urls: - - https://open.feishu.cn/officialapp/notify/55ba4b15d04608e875c122f11484a4e2fa807c42b9ca074509bea654d1b99ca6 - on_success: always # default: always - on_failure: always # default: always - on_start: never # default: never - on_cancel: always # default: always - on_error: always # default: always diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 61279be1..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -include requirements.txt -include LICENSE -include README.md -prune tests/ -prune reproduction/ -prune fastNLP/api -prune fastNLP/automl \ No newline at end of file diff --git a/README.md b/README.md index a4b0a484..2fd27048 100644 --- a/README.md +++ b/README.md @@ -6,133 +6,4 @@ ![Hex.pm](https://img.shields.io/hexpm/l/plug.svg) [![Documentation Status](https://readthedocs.org/projects/fastnlp/badge/?version=latest)](http://fastnlp.readthedocs.io/?badge=latest) -fastNLP是一款轻量级的自然语言处理(NLP)工具包,目标是快速实现NLP任务以及构建复杂模型。 - -fastNLP具有如下的特性: - -- 统一的Tabular式数据容器,简化数据预处理过程; -- 内置多种数据集的Loader和Pipe,省去预处理代码; -- 各种方便的NLP工具,例如Embedding加载(包括ELMo和BERT)、中间数据cache等; -- 部分[数据集与预训练模型](https://docs.qq.com/sheet/DVnpkTnF6VW9UeXdh?c=A1A0A0)的自动下载; -- 提供多种神经网络组件以及复现模型(涵盖中文分词、命名实体识别、句法分析、文本分类、文本匹配、指代消解、摘要等任务); -- Trainer提供多种内置Callback函数,方便实验记录、异常捕获等。 - -## 安装指南 - -fastNLP 依赖以下包: - -+ numpy>=1.14.2 -+ torch>=1.0.0 -+ tqdm>=4.28.1 -+ nltk>=3.4.1 -+ requests -+ spacy -+ prettytable>=0.7.2 - -其中torch的安装可能与操作系统及 CUDA 的版本相关,请参见 [PyTorch 官网](https://pytorch.org/) 。 -在依赖包安装完成后,您可以在命令行执行如下指令完成安装 - -```shell -pip install fastNLP -python -m spacy download en -``` - - -## fastNLP教程 -中文[文档](http://www.fastnlp.top/docs/fastNLP/)、 [教程](http://www.fastnlp.top/docs/fastNLP/user/quickstart.html) - -### 快速入门 - -- [Quick-1. 文本分类](http://www.fastnlp.top/docs/fastNLP/tutorials/%E6%96%87%E6%9C%AC%E5%88%86%E7%B1%BB.html) -- [Quick-2. 序列标注](http://www.fastnlp.top/docs/fastNLP/tutorials/%E5%BA%8F%E5%88%97%E6%A0%87%E6%B3%A8.html) - -### 详细使用教程 - -- [1. 使用DataSet预处理文本](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_1_data_preprocess.html) -- [2. 使用Vocabulary转换文本与index](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_2_vocabulary.html) -- [3. 使用Embedding模块将文本转成向量](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_3_embedding.html) -- [4. 使用Loader和Pipe加载并处理数据集](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_4_load_dataset.html) -- [5. 动手实现一个文本分类器I-使用Trainer和Tester快速训练和测试](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_5_loss_optimizer.html) -- [6. 动手实现一个文本分类器II-使用DataSetIter实现自定义训练过程](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_6_datasetiter.html) -- [7. 使用Metric快速评测你的模型](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_7_metrics.html) -- [8. 使用Modules和Models快速搭建自定义模型](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_8_modules_models.html) -- [9. 使用Callback自定义你的训练过程](http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_9_callback.html) - -### 扩展教程 - -- [Extend-1. BertEmbedding的各种用法](http://www.fastnlp.top/docs/fastNLP/tutorials/extend_1_bert_embedding.html) -- [Extend-2. 分布式训练简介](http://www.fastnlp.top/docs/fastNLP/tutorials/extend_2_dist.html) -- [Extend-3. 使用fitlog 辅助 fastNLP 进行科研](http://www.fastnlp.top/docs/fastNLP/tutorials/extend_3_fitlog.html) - - -## 内置组件 - -大部分用于的 NLP 任务神经网络都可以看做由词嵌入(embeddings)和两种模块:编码器(encoder)、解码器(decoder)组成。 - -以文本分类任务为例,下图展示了一个BiLSTM+Attention实现文本分类器的模型流程图: - - -![](./docs/source/figures/text_classification.png) - -fastNLP 在 embeddings 模块中内置了几种不同的embedding:静态embedding(GloVe、word2vec)、上下文相关embedding -(ELMo、BERT)、字符embedding(基于CNN或者LSTM的CharEmbedding) - -与此同时,fastNLP 在 modules 模块中内置了两种模块的诸多组件,可以帮助用户快速搭建自己所需的网络。 两种模块的功能和常见组件如下: - - - - - - - - - - - - - - - - -
类型 功能 例子
encoder 将输入编码为具有具有表示能力的向量 Embedding, RNN, CNN, Transformer, ... -
decoder 将具有某种表示意义的向量解码为需要的输出形式 MLP, CRF, ...
- - -## 项目结构 - -
- - - -fastNLP的大致工作流程如上图所示,而项目结构如下: - - - - - - - - - - - - - - - - - - - - - - - - - - -
fastNLP 开源的自然语言处理库
fastNLP.core 实现了核心功能,包括数据处理组件、训练器、测试器等
fastNLP.models 实现了一些完整的神经网络模型
fastNLP.modules 实现了用于搭建神经网络模型的诸多组件
fastNLP.embeddings 实现了将序列index转为向量序列的功能,包括读取预训练embedding等
fastNLP.io 实现了读写功能,包括数据读入与预处理,模型读写,数据与模型自动下载等
- -
- -*In memory of @FengZiYjun. May his soul rest in peace. We will miss you very very much!* +dev0.8.0正在开发中 \ No newline at end of file diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index f91e0445..00000000 --- a/codecov.yml +++ /dev/null @@ -1,5 +0,0 @@ -ignore: -- "reproduction" # ignore folders and all its contents -- "setup.py" -- "docs" -- "tutorials" \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 35306867..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXAPIDOC = sphinx-apidoc -SPHINXBUILD = sphinx-build -SPHINXPROJ = fastNLP -SOURCEDIR = source -BUILDDIR = build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -apidoc: - $(SPHINXAPIDOC) -efM -o source ../$(SPHINXPROJ) - -server: - cd build/html && python -m http.server - -dev: - rm -f source/$(SPHINXPROJ).* source/modules.rst && rm -rf build && make apidoc && make html && make server - -prod: - make apidoc && make html - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 2bb6953c..00000000 --- a/docs/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# 快速入门 fastNLP 文档编写 - -本教程为 fastNLP 文档编写者创建,文档编写者包括合作开发人员和文档维护人员。您在一般情况下属于前者, -只需要了解整个框架的部分内容即可。 - -## 合作开发人员 - -FastNLP的文档使用基于[reStructuredText标记语言](http://docutils.sourceforge.net/rst.html)的 -[Sphinx](http://sphinx.pocoo.org/)工具生成,由[Read the Docs](https://readthedocs.org/)网站自动维护生成。 -一般开发者只要编写符合reStructuredText语法规范的文档并通过[PR](https://help.github.com/en/articles/about-pull-requests), -就可以为fastNLP的文档贡献一份力量。 - -如果你想在本地编译文档并进行大段文档的编写,您需要安装Sphinx工具以及sphinx-rtd-theme主题: -```bash -fastNLP/docs> pip install sphinx -fastNLP/docs> pip install sphinx-rtd-theme -``` -然后在本目录下执行 `make dev` 命令。该命令只支持Linux和MacOS系统,期望看到如下输出: -```bash -fastNLP/docs> make dev -rm -rf build/html && make html && make server -Running Sphinx v1.5.6 -making output directory... -...... -Build finished. The HTML pages are in build/html. -cd build/html && python -m http.server -Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ... -``` -现在您浏览器访问 http://localhost:8000/ 查看文档。如果你在远程服务器尚进行工作,则访问地址为 http://{服务器的ip地址}:8000/ 。 -但您必须保证服务器的8000端口是开放的。如果您的电脑或远程服务器的8000端口被占用,程序会顺延使用8001、8002……等端口。 -当你结束访问时,您可以使用Control(Ctrl) + C 来结束进程。 - -我们在[这里](./source/user/example.rst)列举了fastNLP文档经常用到的reStructuredText语法(网页查看请结合Raw模式), -您可以通过阅读它进行快速上手。FastNLP大部分的文档都是写在代码中通过Sphinx工具进行抽取生成的, - -## 文档维护人员 - -文档维护人员需要了解 Makefile 中全部命令的含义,并了解到目前的文档结构 -是在 sphinx-apidoc 自动抽取的基础上进行手动修改得到的。 -文档维护人员应进一步提升整个框架的自动化程度,并监督合作开发人员不要破坏文档项目的整体结构。 \ No newline at end of file diff --git a/docs/check_tools.py b/docs/check_tools.py deleted file mode 100644 index 59d942fc..00000000 --- a/docs/check_tools.py +++ /dev/null @@ -1,191 +0,0 @@ -import inspect -import os -import sys - - -def _colored_string(string: str, color: str or int) -> str: - """在终端中显示一串有颜色的文字 - :param string: 在终端中显示的文字 - :param color: 文字的颜色 - :return: - """ - if isinstance(color, str): - color = { - "black": 30, "Black": 30, "BLACK": 30, - "red": 31, "Red": 31, "RED": 31, - "green": 32, "Green": 32, "GREEN": 32, - "yellow": 33, "Yellow": 33, "YELLOW": 33, - "blue": 34, "Blue": 34, "BLUE": 34, - "purple": 35, "Purple": 35, "PURPLE": 35, - "cyan": 36, "Cyan": 36, "CYAN": 36, - "white": 37, "White": 37, "WHITE": 37 - }[color] - return "\033[%dm%s\033[0m" % (color, string) - - -def gr(string, flag): - if flag: - return _colored_string(string, "green") - else: - return _colored_string(string, "red") - - -def find_all_modules(): - modules = {} - children = {} - to_doc = set() - root = '../fastNLP' - for path, dirs, files in os.walk(root): - for file in files: - if file.endswith('.py'): - name = ".".join(path.split('/')[1:]) - if file.split('.')[0] != "__init__": - name = name + '.' + file.split('.')[0] - __import__(name) - m = sys.modules[name] - modules[name] = m - try: - m.__all__ - except: - print(name, "__all__ missing") - continue - if m.__doc__ is None: - print(name, "__doc__ missing") - continue - if "undocumented" not in m.__doc__: - to_doc.add(name) - for module in to_doc: - t = ".".join(module.split('.')[:-1]) - if t in to_doc: - if t not in children: - children[t] = set() - children[t].add(module) - for m in children: - children[m] = sorted(children[m]) - return modules, to_doc, children - - -def create_rst_file(modules, name, children): - m = modules[name] - with open("./source/" + name + ".rst", "w") as fout: - t = "=" * len(name) - fout.write(name + "\n") - fout.write(t + "\n") - fout.write("\n") - fout.write(".. automodule:: " + name + "\n") - if name != "fastNLP.core" and len(m.__all__) > 0: - fout.write(" :members: " + ", ".join(m.__all__) + "\n") - short = name[len("fastNLP."):] - if not (short.startswith('models') or short.startswith('modules') or short.startswith('embeddings')): - fout.write(" :inherited-members:\n") - fout.write("\n") - if name in children: - fout.write("子模块\n------\n\n.. toctree::\n :maxdepth: 1\n\n") - for module in children[name]: - fout.write(" " + module + "\n") - - -def check_file(m, name): - names = name.split('.') - test_name = "test." + ".".join(names[1:-1]) + ".test_" + names[-1] - try: - __import__(test_name) - tm = sys.modules[test_name] - except ModuleNotFoundError: - tm = None - tested = tm is not None - funcs = {} - classes = {} - for item, obj in inspect.getmembers(m): - if inspect.isclass(obj) and obj.__module__ == name and not obj.__name__.startswith('_'): - this = (obj.__doc__ is not None, tested and obj.__name__ in dir(tm), {}) - for i in dir(obj): - func = getattr(obj, i) - if inspect.isfunction(func) and not i.startswith('_'): - this[2][i] = (func.__doc__ is not None, False) - classes[obj.__name__] = this - if inspect.isfunction(obj) and obj.__module__ == name and not obj.__name__.startswith('_'): - this = (obj.__doc__ is not None, tested and obj.__name__ in dir(tm)) # docs - funcs[obj.__name__] = this - return funcs, classes - - -def check_files(modules, out=None): - for name in sorted(modules.keys()): - print(name, file=out) - funcs, classes = check_file(modules[name], name) - if out is None: - for f in funcs: - print("%-30s \t %s \t %s" % (f, gr("文档", funcs[f][0]), gr("测试", funcs[f][1]))) - for c in classes: - print("%-30s \t %s \t %s" % (c, gr("文档", classes[c][0]), gr("测试", classes[c][1]))) - methods = classes[c][2] - for f in methods: - print(" %-28s \t %s" % (f, gr("文档", methods[f][0]))) - else: - for f in funcs: - if not funcs[f][0]: - print("缺少文档 %s" % (f), file=out) - if not funcs[f][1]: - print("缺少测试 %s" % (f), file=out) - for c in classes: - if not classes[c][0]: - print("缺少文档 %s" % (c), file=out) - if not classes[c][1]: - print("缺少测试 %s" % (c), file=out) - methods = classes[c][2] - for f in methods: - if not methods[f][0]: - print("缺少文档 %s" % (c + "." + f), file=out) - print(file=out) - - -def main_check(): - sys.path.append("..") - print(_colored_string('Getting modules...', "Blue")) - modules, to_doc, children = find_all_modules() - print(_colored_string('Done!', "Green")) - print(_colored_string('Creating rst files...', "Blue")) - for name in to_doc: - create_rst_file(modules, name, children) - print(_colored_string('Done!', "Green")) - print(_colored_string('Checking all files...', "Blue")) - check_files(modules, out=open("results.txt", "w")) - print(_colored_string('Done!', "Green")) - - -def check_file_r(file_path): - with open(file_path) as fin: - content = fin.read() - index = -3 - cuts = [] - while index != -1: - index = content.find('"""',index+3) - cuts.append(index) - cuts = cuts[:-1] - assert len(cuts)%2 == 0 - write_content = "" - last = 0 - for i in range(len(cuts)//2): - start, end = cuts[i+i], cuts[i+i+1] - if content[start-1] == "r": - write_content += content[last:end+3] - else: - write_content += content[last:start] + "r" - write_content += content[start:end+3] - last = end + 3 - write_content += content[last:] - with open(file_path, "w") as fout: - fout.write(write_content) - - -def add_r(base_path='../fastNLP'): - for path, _, files in os.walk(base_path): - for f in files: - if f.endswith(".py"): - check_file_r(os.path.abspath(os.path.join(path,f))) - # sys.exit(0) - - -if __name__ == "__main__": - add_r() diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index cfa9c93a..00000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -sphinx==3.2.1 -docutils==0.16 -sphinx-rtd-theme==0.5.0 -readthedocs-sphinx-search==0.1.0rc3 \ No newline at end of file diff --git a/docs/source/_static/notebooks/extend_1_bert_embedding.ipynb b/docs/source/_static/notebooks/extend_1_bert_embedding.ipynb deleted file mode 100644 index 2169c8b5..00000000 --- a/docs/source/_static/notebooks/extend_1_bert_embedding.ipynb +++ /dev/null @@ -1,260 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# BertEmbedding的各种用法\n", - "Bert自从在 BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding 中被提出后,因其性能卓越受到了极大的关注,在这里我们展示一下在fastNLP中如何使用Bert进行各类任务。其中中文Bert我们使用的模型的权重来自于 中文Bert预训练 。\n", - "\n", - "为了方便大家的使用,fastNLP提供了预训练的Embedding权重及数据集的自动下载,支持自动下载的Embedding和数据集见 数据集 。或您可从 使用Embedding模块将文本转成向量 与 使用Loader和Pipe加载并处理数据集 了解更多相关信息\n", - "\n", - "\n", - "下面我们将介绍通过使用Bert来进行文本分类, 中文命名实体识别, 文本匹配, 中文问答。\n", - "\n", - "## 1. 使用Bert进行文本分类\n", - "\n", - "文本分类是指给定一段文字,判定其所属的类别。例如下面的文本情感分类\n", - "\n", - " *1, 商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错!*\n", - "\n", - "这里我们使用fastNLP提供自动下载的微博分类进行测试" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import WeiboSenti100kPipe\n", - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForSequenceClassification\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam\n", - "import torch\n", - "\n", - "data_bundle =WeiboSenti100kPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "# 载入BertEmbedding\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True)\n", - "\n", - "# 载入模型\n", - "model = BertForSequenceClassification(embed, len(data_bundle.get_vocab('target')))\n", - "\n", - "# 训练模型\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model,\n", - " optimizer=Adam(model_params=model.parameters(), lr=2e-5),\n", - " loss=CrossEntropyLoss(), device=device,\n", - " batch_size=8, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=AccuracyMetric(), n_epochs=2, print_every=1)\n", - "trainer.train()\n", - "\n", - "# 测试结果\n", - "from fastNLP import Tester\n", - "\n", - "tester = Tester(data_bundle.get_dataset('test'), model, batch_size=128, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2. 使用Bert进行命名实体识别\n", - "\n", - "命名实体识别是给定一句话,标记出其中的实体。一般序列标注的任务都使用conll格式,conll格式是至一行中通过制表符分隔不同的内容,使用空行分隔 两句话,例如下面的例子\n", - "\n", - "```\n", - " 中 B-ORG\n", - " 共 I-ORG\n", - " 中 I-ORG\n", - " 央 I-ORG\n", - " 致 O\n", - " 中 B-ORG\n", - " 国 I-ORG\n", - " 致 I-ORG\n", - " 公 I-ORG\n", - " 党 I-ORG\n", - " 十 I-ORG\n", - " 一 I-ORG\n", - " 大 I-ORG\n", - " 的 O\n", - " 贺 O\n", - " 词 O\n", - "```\n", - "\n", - "这部分内容请参考 快速实现序列标注模型\n", - "\n", - "## 3. 使用Bert进行文本匹配\n", - "\n", - "文本匹配任务是指给定两句话判断他们的关系。比如,给定两句话判断前一句是否和后一句具有因果关系或是否是矛盾关系;或者给定两句话判断两句话是否 具有相同的意思。这里我们使用" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import CNXNLIBertPipe\n", - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForSentenceMatching\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam\n", - "from fastNLP.core.optimizer import AdamW\n", - "from fastNLP.core.callback import WarmupCallback\n", - "from fastNLP import Tester\n", - "import torch\n", - "\n", - "data_bundle = CNXNLIBertPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "print(data_bundle)\n", - "\n", - "# 载入BertEmbedding\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True)\n", - "\n", - "# 载入模型\n", - "model = BertForSentenceMatching(embed, len(data_bundle.get_vocab('target')))\n", - "\n", - "# 训练模型\n", - "callbacks = [WarmupCallback(warmup=0.1, schedule='linear'), ]\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model,\n", - " optimizer=AdamW(params=model.parameters(), lr=4e-5),\n", - " loss=CrossEntropyLoss(), device=device,\n", - " batch_size=8, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=AccuracyMetric(), n_epochs=5, print_every=1,\n", - " update_every=8, callbacks=callbacks)\n", - "trainer.train()\n", - "\n", - "tester = Tester(data_bundle.get_dataset('test'), model, batch_size=8, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 4. 使用Bert进行中文问答\n", - "\n", - "问答任务是给定一段内容,以及一个问题,需要从这段内容中找到答案。 例如:\n", - "\n", - "```\n", - "\"context\": \"锣鼓经是大陆传统器乐及戏曲里面常用的打击乐记谱方法,以中文字的声音模拟敲击乐的声音,纪录打击乐的各种不同的演奏方法。常\n", - "用的节奏型称为「锣鼓点」。而锣鼓是戏曲节奏的支柱,除了加强演员身段动作的节奏感,也作为音乐的引子和尾声,提示音乐的板式和速度,以及\n", - "作为唱腔和念白的伴奏,令诗句的韵律更加抑扬顿锉,段落分明。锣鼓的运用有约定俗成的程式,依照角色行当的身份、性格、情绪以及环境,配合\n", - "相应的锣鼓点。锣鼓亦可以模仿大自然的音响效果,如雷电、波浪等等。戏曲锣鼓所运用的敲击乐器主要分为鼓、锣、钹和板四类型:鼓类包括有单\n", - "皮鼓(板鼓)、大鼓、大堂鼓(唐鼓)、小堂鼓、怀鼓、花盆鼓等;锣类有大锣、小锣(手锣)、钲锣、筛锣、马锣、镗锣、云锣;钹类有铙钹、大\n", - "钹、小钹、水钹、齐钹、镲钹、铰子、碰钟等;打拍子用的檀板、木鱼、梆子等。因为京剧的锣鼓通常由四位乐师负责,又称为四大件,领奏的师\n", - "傅称为:「鼓佬」,其职责有如西方乐队的指挥,负责控制速度以及利用各种手势提示乐师演奏不同的锣鼓点。粤剧吸收了部份京剧的锣鼓,但以木鱼\n", - "和沙的代替了京剧的板和鼓,作为打拍子的主要乐器。以下是京剧、昆剧和粤剧锣鼓中乐器对应的口诀用字:\",\n", - "\"question\": \"锣鼓经是什么?\",\n", - "\"answers\": [\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " },\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " },\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " }\n", - "]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "您可以通过以下的代码训练 (原文代码:[CMRC2018](https://github.com/ymcui/cmrc2018) )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForQuestionAnswering\n", - "from fastNLP.core.losses import CMRC2018Loss\n", - "from fastNLP.core.metrics import CMRC2018Metric\n", - "from fastNLP.io.pipe.qa import CMRC2018BertPipe\n", - "from fastNLP import Trainer, BucketSampler\n", - "from fastNLP import WarmupCallback, GradientClipCallback\n", - "from fastNLP.core.optimizer import AdamW\n", - "import torch\n", - "\n", - "data_bundle = CMRC2018BertPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "print(data_bundle)\n", - "\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn', requires_grad=True, include_cls_sep=False, auto_truncate=True,\n", - " dropout=0.5, word_dropout=0.01)\n", - "model = BertForQuestionAnswering(embed)\n", - "loss = CMRC2018Loss()\n", - "metric = CMRC2018Metric()\n", - "\n", - "wm_callback = WarmupCallback(schedule='linear')\n", - "gc_callback = GradientClipCallback(clip_value=1, clip_type='norm')\n", - "callbacks = [wm_callback, gc_callback]\n", - "\n", - "optimizer = AdamW(model.parameters(), lr=5e-5)\n", - "\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer,\n", - " sampler=BucketSampler(seq_len_field_name='context_len'),\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric,\n", - " callbacks=callbacks, device=device, batch_size=6, num_workers=2, n_epochs=2, print_every=1,\n", - " test_use_tqdm=False, update_every=10)\n", - "trainer.train(load_best_model=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "训练结果(和论文中报道的基本一致):\n", - "\n", - "```\n", - " In Epoch:2/Step:1692, got best dev performance:\n", - " CMRC2018Metric: f1=85.61, em=66.08\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_1_data_preprocess.ipynb b/docs/source/_static/notebooks/tutorial_1_data_preprocess.ipynb deleted file mode 100644 index a987e7f2..00000000 --- a/docs/source/_static/notebooks/tutorial_1_data_preprocess.ipynb +++ /dev/null @@ -1,292 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# fastNLP中的DataSet" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+------------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+------------------------------+---------------------------------------------+---------+\n", - "| This is the first instance . | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "| Second instance . | ['Second', 'instance', '.'] | 3 |\n", - "| Third instance . | ['Third', 'instance', '.'] | 3 |\n", - "+------------------------------+---------------------------------------------+---------+\n" - ] - } - ], - "source": [ - "from fastNLP import DataSet\n", - "data = {'raw_words':[\"This is the first instance .\", \"Second instance .\", \"Third instance .\"],\n", - " 'words': [['this', 'is', 'the', 'first', 'instance', '.'], ['Second', 'instance', '.'], ['Third', 'instance', '.']],\n", - " 'seq_len': [6, 3, 3]}\n", - "dataset = DataSet(data)\n", - "# 传入的dict的每个key的value应该为具有相同长度的list\n", - "print(dataset)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSet的构建" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+----------------------------+---------------------------------------------+---------+\n", - "| This is the first instance | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "+----------------------------+---------------------------------------------+---------+" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "dataset = DataSet()\n", - "instance = Instance(raw_words=\"This is the first instance\",\n", - " words=['this', 'is', 'the', 'first', 'instance', '.'],\n", - " seq_len=6)\n", - "dataset.append(instance)\n", - "dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+----------------------------+---------------------------------------------+---------+\n", - "| This is the first instance | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "| Second instance . | ['Second', 'instance', '.'] | 3 |\n", - "+----------------------------+---------------------------------------------+---------+" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "dataset = DataSet([\n", - " Instance(raw_words=\"This is the first instance\",\n", - " words=['this', 'is', 'the', 'first', 'instance', '.'],\n", - " seq_len=6),\n", - " Instance(raw_words=\"Second instance .\",\n", - " words=['Second', 'instance', '.'],\n", - " seq_len=3)\n", - " ])\n", - "dataset" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSet的删除" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----+---+\n", - "| a | c |\n", - "+----+---+\n", - "| -5 | 0 |\n", - "| -4 | 0 |\n", - "| -3 | 0 |\n", - "| -2 | 0 |\n", - "| -1 | 0 |\n", - "| 0 | 0 |\n", - "| 1 | 0 |\n", - "| 2 | 0 |\n", - "| 3 | 0 |\n", - "| 4 | 0 |\n", - "+----+---+" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "dataset = DataSet({'a': range(-5, 5), 'c': [0]*10})\n", - "dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+---+\n", - "| c |\n", - "+---+\n", - "| 0 |\n", - "| 0 |\n", - "| 0 |\n", - "| 0 |\n", - "+---+" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 不改变dataset,生成一个删除了满足条件的instance的新 DataSet\n", - "dropped_dataset = dataset.drop(lambda ins:ins['a']<0, inplace=False)\n", - "# 在dataset中删除满足条件的instance\n", - "dataset.drop(lambda ins:ins['a']<0)\n", - "# 删除第3个instance\n", - "dataset.delete_instance(2)\n", - "# 删除名为'a'的field\n", - "dataset.delete_field('a')\n", - "dataset" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 简单的数据预处理" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False\n" - ] - }, - { - "data": { - "text/plain": [ - "4" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 检查是否存在名为'a'的field\n", - "print(dataset.has_field('a')) # 或 ('a' in dataset)\n", - "# 将名为'a'的field改名为'b'\n", - "dataset.rename_field('c', 'b')\n", - "# DataSet的长度\n", - "len(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+------------------------------+-------------------------------------------------+\n", - "| raw_words | words |\n", - "+------------------------------+-------------------------------------------------+\n", - "| This is the first instance . | ['This', 'is', 'the', 'first', 'instance', '.'] |\n", - "| Second instance . | ['Second', 'instance', '.'] |\n", - "| Third instance . | ['Third', 'instance', '.'] |\n", - "+------------------------------+-------------------------------------------------+" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "data = {'raw_words':[\"This is the first instance .\", \"Second instance .\", \"Third instance .\"]}\n", - "dataset = DataSet(data)\n", - "\n", - "# 将句子分成单词形式, 详见DataSet.apply()方法\n", - "dataset.apply(lambda ins: ins['raw_words'].split(), new_field_name='words')\n", - "\n", - "# 或使用DataSet.apply_field()\n", - "dataset.apply_field(lambda sent:sent.split(), field_name='raw_words', new_field_name='words')\n", - "\n", - "# 除了匿名函数,也可以定义函数传递进去\n", - "def get_words(instance):\n", - " sentence = instance['raw_words']\n", - " words = sentence.split()\n", - " return words\n", - "dataset.apply(get_words, new_field_name='words')\n", - "dataset" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_2_vocabulary.ipynb b/docs/source/_static/notebooks/tutorial_2_vocabulary.ipynb deleted file mode 100644 index 50862293..00000000 --- a/docs/source/_static/notebooks/tutorial_2_vocabulary.ipynb +++ /dev/null @@ -1,343 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# fastNLP中的 Vocabulary\n", - "## 构建 Vocabulary" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(['复', '旦', '大', '学']) # 加入新的字\n", - "vocab.add_word('上海') # `上海`会作为一个整体\n", - "vocab.to_index('复') # 应该会为3\n", - "vocab.to_index('我') # 会输出1,Vocabulary中默认pad的index为0, unk(没有找到的词)的index为1\n", - "\n", - "# 在构建target的Vocabulary时,词表中应该用不上pad和unk,可以通过以下的初始化\n", - "vocab = Vocabulary(unknown=None, padding=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['positive', 'negative']...)" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab.add_word_lst(['positive', 'negative'])" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "data": { - "text/plain": [ - "0" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab.to_index('positive')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 没有设置 unk 的情况" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "ename": "ValueError", - "evalue": "word `neutral` not in vocabulary", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mvocab\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_index\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'neutral'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# 会报错,因为没有unk这种情况\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36mto_index\u001b[0;34m(self, w)\u001b[0m\n\u001b[1;32m 414\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m \u001b[0mint\u001b[0m \u001b[0mindex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mnumber\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 415\u001b[0m \"\"\"\n\u001b[0;32m--> 416\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__getitem__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 417\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 418\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36m_wrapper\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_word2idx\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrebuild\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuild_vocab\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 44\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 45\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0m_wrapper\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, w)\u001b[0m\n\u001b[1;32m 272\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_word2idx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munknown\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 274\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"word `{}` not in vocabulary\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 275\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 276\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0m_check_build_vocab\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: word `neutral` not in vocabulary" - ] - } - ], - "source": [ - "vocab.to_index('neutral') # 会报错,因为没有unk这种情况" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 设置 unk 的情况" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(0, '')" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary(unknown='', padding=None)\n", - "vocab.add_word_lst(['positive', 'negative'])\n", - "vocab.to_index('neutral'), vocab.to_word(vocab.to_index('neutral'))" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['positive', 'negative']...)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+---------------------------------------------------+--------+\n", - "| chars | target |\n", - "+---------------------------------------------------+--------+\n", - "| [4, 2, 2, 5, 6, 7, 3] | 0 |\n", - "| [8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 3] | 1 |\n", - "+---------------------------------------------------+--------+\n" - ] - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "from fastNLP import DataSet\n", - "\n", - "dataset = DataSet({'chars': [\n", - " ['今', '天', '天', '气', '很', '好', '。'],\n", - " ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。']\n", - " ],\n", - " 'target': ['neutral', 'negative']\n", - "})\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.from_dataset(dataset, field_name='chars')\n", - "vocab.index_dataset(dataset, field_name='chars')\n", - "\n", - "target_vocab = Vocabulary(padding=None, unknown=None)\n", - "target_vocab.from_dataset(dataset, field_name='target')\n", - "target_vocab.index_dataset(dataset, field_name='target')\n", - "print(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['今', '天', '心', '情', '很']...)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "from fastNLP import DataSet\n", - "\n", - "tr_data = DataSet({'chars': [\n", - " ['今', '天', '心', '情', '很', '好', '。'],\n", - " ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。']\n", - " ],\n", - " 'target': ['positive', 'negative']\n", - "})\n", - "dev_data = DataSet({'chars': [\n", - " ['住', '宿', '条', '件', '还', '不', '错'],\n", - " ['糟', '糕', '的', '天', '气', ',', '无', '法', '出', '行', '。']\n", - " ],\n", - " 'target': ['positive', 'negative']\n", - "})\n", - "\n", - "vocab = Vocabulary()\n", - "# 将验证集或者测试集在建立词表是放入no_create_entry_dataset这个参数中。\n", - "vocab.from_dataset(tr_data, field_name='chars', no_create_entry_dataset=[dev_data])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - " 4%|▎ | 2.31M/63.5M [00:00<00:02, 22.9MB/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "http://212.129.155.247/embedding/glove.6B.50d.zip not found in cache, downloading to /tmp/tmpvziobj_e\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 63.5M/63.5M [00:01<00:00, 41.3MB/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Finish download from http://212.129.155.247/embedding/glove.6B.50d.zip\n", - "Copy file to /remote-home/ynzheng/.fastNLP/embedding/glove.6B.50d\n", - "Found 2 out of 6 words in the pre-training embedding.\n", - "tensor([[ 0.9497, 0.3433, 0.8450, -0.8852, -0.7208, -0.2931, -0.7468, 0.6512,\n", - " 0.4730, -0.7401, 0.1877, -0.3828, -0.5590, 0.4295, -0.2698, -0.4238,\n", - " -0.3124, 1.3423, -0.7857, -0.6302, 0.9182, 0.2113, -0.5744, 1.4549,\n", - " 0.7546, -1.6165, -0.0085, 0.0029, 0.5130, -0.4745, 2.5306, 0.8594,\n", - " -0.3067, 0.0578, 0.6623, 0.2080, 0.6424, -0.5246, -0.0534, 1.1404,\n", - " -0.1370, -0.1836, 0.4546, -0.5096, -0.0255, -0.0286, 0.1805, -0.4483,\n", - " 0.4053, -0.3682]], grad_fn=)\n", - "tensor([[ 0.1320, -0.2392, 0.1732, -0.2390, -0.0463, 0.0494, 0.0488, -0.0886,\n", - " 0.0224, -0.1300, 0.0369, 0.1800, 0.0750, -0.0183, 0.2264, 0.1628,\n", - " 0.1261, -0.1259, 0.1663, -0.1230, -0.1904, -0.0532, 0.1397, -0.0259,\n", - " -0.1799, 0.0226, 0.1858, 0.1981, 0.1338, 0.2394, 0.0248, 0.0203,\n", - " -0.1722, -0.1683, -0.1892, 0.0874, 0.0562, -0.0394, 0.0306, -0.1761,\n", - " 0.1015, -0.0171, 0.1172, 0.1357, 0.1519, -0.0011, 0.1572, 0.1265,\n", - " -0.2391, -0.0258]], grad_fn=)\n", - "tensor([[ 0.1318, -0.2552, -0.0679, 0.2619, -0.2616, 0.2357, 0.1308, -0.0118,\n", - " 1.7659, 0.2078, 0.2620, -0.1643, -0.8464, 0.0201, 0.0702, 0.3978,\n", - " 0.1528, -0.2021, -1.6184, -0.5433, -0.1786, 0.5389, 0.4987, -0.1017,\n", - " 0.6626, -1.7051, 0.0572, -0.3241, -0.6683, 0.2665, 2.8420, 0.2684,\n", - " -0.5954, -0.5004, 1.5199, 0.0396, 1.6659, 0.9976, -0.5597, -0.7049,\n", - " -0.0309, -0.2830, -0.1356, 0.6429, 0.4149, 1.2362, 0.7659, 0.9780,\n", - " 0.5851, -0.3018]], grad_fn=)\n", - "tensor([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0.]], grad_fn=)\n", - "tensor([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0.]], grad_fn=)\n" - ] - } - ], - "source": [ - "import torch\n", - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word('train')\n", - "vocab.add_word('only_in_train') # 仅在train出现,但肯定在预训练词表中不存在\n", - "vocab.add_word('test', no_create_entry=True) # 该词只在dev或test中出现\n", - "vocab.add_word('only_in_test', no_create_entry=True) # 这个词在预训练的词表中找不到\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "print(embed(torch.LongTensor([vocab.to_index('train')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('only_in_train')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('test')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('only_in_test')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_3_embedding.ipynb b/docs/source/_static/notebooks/tutorial_3_embedding.ipynb deleted file mode 100644 index 154a0756..00000000 --- a/docs/source/_static/notebooks/tutorial_3_embedding.ipynb +++ /dev/null @@ -1,524 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 5 out of 7 words in the pre-training embedding.\n", - "torch.Size([1, 5, 50])\n" - ] - } - ], - "source": [ - "import torch\n", - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]]) # 将文本转为index\n", - "print(embed(words).size()) # StaticEmbedding的使用和pytorch的nn.Embedding是类似的" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([1, 5, 30])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=30)\n", - "\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 256])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import ElmoEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 512])\n" - ] - } - ], - "source": [ - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False, layers='1,2')\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 256])\n" - ] - } - ], - "source": [ - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=True, layers='mix')\n", - "print(embed(words).size()) # 三层输出按照权重element-wise的加起来" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 768])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased')\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 1536])\n" - ] - } - ], - "source": [ - "# 使用后面两层的输出\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='10,11')\n", - "print(embed(words).size()) # 结果将是在最后一维做拼接" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 7, 768])\n" - ] - } - ], - "source": [ - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', include_cls_sep=True)\n", - "print(embed(words).size()) # 结果将在序列维度上增加2\n", - "# 取出句子的cls表示\n", - "cls_reps = embed(words)[:, 0] # shape: [batch_size, 768]" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 768])\n" - ] - } - ], - "source": [ - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max')\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 10 words out of 10.\n", - "torch.Size([1, 9, 768])\n" - ] - } - ], - "source": [ - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo . [SEP] another sentence .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max')\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo . [SEP] another sentence .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "torch.Size([1, 5, 64])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import CNNCharEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "# character的embedding维度大小为50,返回的embedding结果维度大小为64。\n", - "embed = CNNCharEmbedding(vocab, embed_size=64, char_emb_size=50)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "torch.Size([1, 5, 64])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import LSTMCharEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "# character的embedding维度大小为50,返回的embedding结果维度大小为64。\n", - "embed = LSTMCharEmbedding(vocab, embed_size=64, char_emb_size=50)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 5 out of 7 words in the pre-training embedding.\n", - "50\n", - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "30\n", - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "256\n", - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "512\n", - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "768\n", - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "1536\n", - "80\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import *\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "static_embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "print(static_embed.embedding_dim) # 50\n", - "char_embed = CNNCharEmbedding(vocab, embed_size=30)\n", - "print(char_embed.embedding_dim) # 30\n", - "elmo_embed_1 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='2')\n", - "print(elmo_embed_1.embedding_dim) # 256\n", - "elmo_embed_2 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='1,2')\n", - "print(elmo_embed_2.embedding_dim) # 512\n", - "bert_embed_1 = BertEmbedding(vocab, layers='-1', model_dir_or_name='en-base-cased')\n", - "print(bert_embed_1.embedding_dim) # 768\n", - "bert_embed_2 = BertEmbedding(vocab, layers='2,-1', model_dir_or_name='en-base-cased')\n", - "print(bert_embed_2.embedding_dim) # 1536\n", - "stack_embed = StackEmbedding([static_embed, char_embed])\n", - "print(stack_embed.embedding_dim) # 80" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import *\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', requires_grad=True) # 初始化时设定为需要更新\n", - "embed.requires_grad = False # 修改BertEmbedding的权重为不更新" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[ 0.3633, -0.2091, -0.0353, -0.3771, -0.5193]],\n", - " grad_fn=)\n", - "tensor([[ 0.0926, -0.4812, -0.7744, 0.4836, -0.5475]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"The the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练词向量时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5)\n", - "print(embed(torch.LongTensor([vocab.to_index('The')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "All word in the vocab have been lowered. There are 6 words, 4 unique lowered words.\n", - "tensor([[ 0.4530, -0.1558, -0.1941, 0.3203, 0.0355]],\n", - " grad_fn=)\n", - "tensor([[ 0.4530, -0.1558, -0.1941, 0.3203, 0.0355]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"The the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, lower=True)\n", - "print(embed(torch.LongTensor([vocab.to_index('The')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1 out of 4 words have frequency less than 2.\n", - "tensor([[ 0.4724, -0.7277, -0.6350, -0.5258, -0.6063]],\n", - " grad_fn=)\n", - "tensor([[ 0.7638, -0.0552, 0.1625, -0.2210, 0.4993]],\n", - " grad_fn=)\n", - "tensor([[ 0.7638, -0.0552, 0.1625, -0.2210, 0.4993]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"the the the a\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2)\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('a')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 out of 5 words have frequency less than 2.\n", - "All word in the vocab have been lowered. There are 5 words, 4 unique lowered words.\n", - "tensor([[ 0.1943, 0.3739, 0.2769, -0.4746, -0.3181]],\n", - " grad_fn=)\n", - "tensor([[ 0.5892, -0.6916, 0.7319, -0.3803, 0.4979]],\n", - " grad_fn=)\n", - "tensor([[ 0.5892, -0.6916, 0.7319, -0.3803, 0.4979]],\n", - " grad_fn=)\n", - "tensor([[-0.1348, -0.2172, -0.0071, 0.5704, -0.2607]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"the the the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2, lower=True)\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('a')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('A')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_4_load_dataset.ipynb b/docs/source/_static/notebooks/tutorial_4_load_dataset.ipynb deleted file mode 100644 index f6de83bc..00000000 --- a/docs/source/_static/notebooks/tutorial_4_load_dataset.ipynb +++ /dev/null @@ -1,309 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Loader和Pipe加载并处理数据集\n", - "\n", - "这一部分是关于如何加载数据集的教程\n", - "\n", - "## Part I: 数据集容器DataBundle\n", - "\n", - "而由于对于同一个任务,训练集,验证集和测试集会共用同一个词表以及具有相同的目标值,所以在fastNLP中我们使用了 DataBundle 来承载同一个任务的多个数据集 DataSet 以及它们的词表 Vocabulary 。下面会有例子介绍 DataBundle 的相关使用。\n", - "\n", - "DataBundle 在fastNLP中主要在各个 Loader 和 Pipe 中被使用。 下面我们先介绍一下 Loader 和 Pipe 。\n", - "\n", - "## Part II: 加载的各种数据集的Loader\n", - "\n", - "在fastNLP中,所有的 Loader 都可以通过其文档判断其支持读取的数据格式,以及读取之后返回的 DataSet 的格式, 例如 ChnSentiCorpLoader \n", - "\n", - "- download() 函数:自动将该数据集下载到缓存地址,默认缓存地址为~/.fastNLP/datasets/。由于版权等原因,不是所有的Loader都实现了该方法。该方法会返回下载后文件所处的缓存地址。\n", - "\n", - "- _load() 函数:从一个数据文件中读取数据,返回一个 DataSet 。返回的DataSet的格式可从Loader文档判断。\n", - "\n", - "- load() 函数:从文件或者文件夹中读取数据为 DataSet 并将它们组装成 DataBundle。支持接受的参数类型有以下的几种\n", - "\n", - " - None, 将尝试读取自动缓存的数据,仅支持提供了自动下载数据的Loader\n", - " - 文件夹路径, 默认将尝试在该文件夹下匹配文件名中含有 train , test , dev 的文件,如果有多个文件含有相同的关键字,将无法通过该方式读取\n", - " - dict, 例如{'train':\"/path/to/tr.conll\", 'dev':\"/to/validate.conll\", \"test\":\"/to/te.conll\"}。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1944 instances.\n", - "\ttrain has 17196 instances.\n", - "\tdev has 1858 instances.\n", - "\n" - ] - } - ], - "source": [ - "from fastNLP.io import CWSLoader\n", - "\n", - "loader = CWSLoader(dataset_name='pku')\n", - "data_bundle = loader.load()\n", - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里表示一共有3个数据集。其中:\n", - "\n", - " 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance\n", - "\n", - "也可以取出DataSet,并打印DataSet中的具体内容" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------------------------------------------------------+\n", - "| raw_words |\n", - "+----------------------------------------------------------------+\n", - "| 迈向 充满 希望 的 新 世纪 —— 一九九八年 新年 讲话 ... |\n", - "| 中共中央 总书记 、 国家 主席 江 泽民 |\n", - "+----------------------------------------------------------------+\n" - ] - } - ], - "source": [ - "tr_data = data_bundle.get_dataset('train')\n", - "print(tr_data[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Part III: 使用Pipe对数据集进行预处理\n", - "\n", - "通过 Loader 可以将文本数据读入,但并不能直接被神经网络使用,还需要进行一定的预处理。\n", - "\n", - "在fastNLP中,我们使用 Pipe 的子类作为数据预处理的类, Loader 和 Pipe 一般具备一一对应的关系,该关系可以从其名称判断, 例如 CWSLoader 与 CWSPipe 是一一对应的。一般情况下Pipe处理包含以下的几个过程,\n", - "1. 将raw_words或 raw_chars进行tokenize以切分成不同的词或字; \n", - "2. 再建立词或字的 Vocabulary , 并将词或字转换为index; \n", - "3. 将target 列建立词表并将target列转为index;\n", - "\n", - "所有的Pipe都可通过其文档查看该Pipe支持处理的 DataSet 以及返回的 DataBundle 中的Vocabulary的情况; 如 OntoNotesNERPipe\n", - "\n", - "各种数据集的Pipe当中,都包含了以下的两个函数:\n", - "\n", - "- process() 函数:对输入的 DataBundle 进行处理, 然后返回处理之后的 DataBundle 。process函数的文档中包含了该Pipe支持处理的DataSet的格式。\n", - "- process_from_file() 函数:输入数据集所在文件夹,使用对应的Loader读取数据(所以该函数支持的参数类型是由于其对应的Loader的load函数决定的),然后调用相对应的process函数对数据进行预处理。相当于是把Load和process放在一个函数中执行。\n", - "\n", - "接着上面 CWSLoader 的例子,我们展示一下 CWSPipe 的功能:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1944 instances.\n", - "\ttrain has 17196 instances.\n", - "\tdev has 1858 instances.\n", - "In total 2 vocabs:\n", - "\tchars has 4777 entries.\n", - "\ttarget has 4 entries.\n", - "\n" - ] - } - ], - "source": [ - "from fastNLP.io import CWSPipe\n", - "\n", - "data_bundle = CWSPipe().process(data_bundle)\n", - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "表示一共有3个数据集和2个词表。其中:\n", - "\n", - "- 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance\n", - "- 2个词表分别为chars词表与target词表。其中chars词表为句子文本所构建的词表,一共有4777个不同的字;target词表为目标标签所构建的词表,一共有4种标签。\n", - "\n", - "相较于之前CWSLoader读取的DataBundle,新增了两个Vocabulary。 我们可以打印一下处理之后的DataSet" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+---------------------+---------------------+---------------------+---------+\n", - "| raw_words | chars | target | seq_len |\n", - "+---------------------+---------------------+---------------------+---------+\n", - "| 迈向 充满 希望... | [1224, 178, 674,... | [0, 1, 0, 1, 0, ... | 29 |\n", - "| 中共中央 总书记... | [11, 212, 11, 33... | [0, 3, 3, 1, 0, ... | 15 |\n", - "+---------------------+---------------------+---------------------+---------+\n" - ] - } - ], - "source": [ - "tr_data = data_bundle.get_dataset('train')\n", - "print(tr_data[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到有两列为int的field: chars和target。这两列的名称同时也是DataBundle中的Vocabulary的名称。可以通过下列的代码获取并查看Vocabulary的 信息" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary(['B', 'E', 'S', 'M']...)\n" - ] - } - ], - "source": [ - "vocab = data_bundle.get_vocab('target')\n", - "print(vocab)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Part IV: fastNLP封装好的Loader和Pipe\n", - "\n", - "fastNLP封装了多种任务/数据集的 Loader 和 Pipe 并提供自动下载功能,具体参见文档 [数据集](https://docs.qq.com/sheet/DVnpkTnF6VW9UeXdh?c=A1A0A0)\n", - "\n", - "## Part V: 不同格式类型的基础Loader\n", - "\n", - "除了上面提到的针对具体任务的Loader,我们还提供了CSV格式和JSON格式的Loader\n", - "\n", - "**CSVLoader** 读取CSV类型的数据集文件。例子如下:\n", - "\n", - "```python\n", - "from fastNLP.io.loader import CSVLoader\n", - "data_set_loader = CSVLoader(\n", - " headers=('raw_words', 'target'), sep='\\t'\n", - ")\n", - "```\n", - "\n", - "表示将CSV文件中每一行的第一项将填入'raw_words' field,第二项填入'target' field。其中项之间由'\\t'分割开来\n", - "\n", - "```python\n", - "data_set = data_set_loader._load('path/to/your/file')\n", - "```\n", - "\n", - "文件内容样例如下\n", - "\n", - "```csv\n", - "But it does not leave you with much . 1\n", - "You could hate it for the same reason . 1\n", - "The performances are an absolute joy . 4\n", - "```\n", - "\n", - "读取之后的DataSet具有以下的field\n", - "\n", - "| raw_words | target |\n", - "| --------------------------------------- | ------ |\n", - "| But it does not leave you with much . | 1 |\n", - "| You could hate it for the same reason . | 1 |\n", - "| The performances are an absolute joy . | 4 |\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**JsonLoader** 读取Json类型的数据集文件,数据必须按行存储,每行是一个包含各类属性的Json对象。例子如下\n", - "\n", - "```python\n", - "from fastNLP.io.loader import JsonLoader\n", - "loader = JsonLoader(\n", - " fields={'sentence1': 'raw_words1', 'sentence2': 'raw_words2', 'gold_label': 'target'}\n", - ")\n", - "```\n", - "\n", - "表示将Json对象中'sentence1'、'sentence2'和'gold_label'对应的值赋给'raw_words1'、'raw_words2'、'target'这三个fields\n", - "\n", - "```python\n", - "data_set = loader._load('path/to/your/file')\n", - "```\n", - "\n", - "数据集内容样例如下\n", - "```\n", - "{\"annotator_labels\": [\"neutral\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"neutral\", ... }\n", - "{\"annotator_labels\": [\"contradiction\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"contradiction\", ... }\n", - "{\"annotator_labels\": [\"entailment\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"entailment\", ... }\n", - "```\n", - "\n", - "读取之后的DataSet具有以下的field\n", - "\n", - "| raw_words0 | raw_words1 | target |\n", - "| ------------------------------------------------------ | ------------------------------------------------- | ------------- |\n", - "| A person on a horse jumps over a broken down airplane. | A person is training his horse for a competition. | neutral |\n", - "| A person on a horse jumps over a broken down airplane. | A person is at a diner, ordering an omelette. | contradiction |\n", - "| A person on a horse jumps over a broken down airplane. | A person is outdoors, on a horse. | entailment |" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_5_loss_optimizer.ipynb b/docs/source/_static/notebooks/tutorial_5_loss_optimizer.ipynb deleted file mode 100644 index cba78175..00000000 --- a/docs/source/_static/notebooks/tutorial_5_loss_optimizer.ipynb +++ /dev/null @@ -1,603 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Trainer和Tester快速训练和测试" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据读入和处理" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/remote-home/ynzheng/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/io/loader/classification.py:340: UserWarning: SST2's test file has no target.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1821 instances.\n", - "\ttrain has 67349 instances.\n", - "\tdev has 872 instances.\n", - "In total 2 vocabs:\n", - "\twords has 16292 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| raw_words | target | words | seq_len |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| hide new secretions from the p... | 1 | [4110, 97, 12009, 39, 2, 6843,... | 7 |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...)\n" - ] - } - ], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "\n", - "pipe = SST2Pipe()\n", - "databundle = pipe.process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "print(databundle)\n", - "print(databundle.get_dataset('train')[0])\n", - "print(databundle.get_vocab('words'))" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "4925 872 75\n" - ] - } - ], - "source": [ - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "print(len(train_data),len(dev_data),len(test_data))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------------+-----------+--------+-------+---------+\n", - "| field_names | raw_words | target | words | seq_len |\n", - "+-------------+-----------+--------+-------+---------+\n", - "| is_input | False | False | True | True |\n", - "| is_target | False | True | False | False |\n", - "| ignore_type | | False | False | False |\n", - "| pad_value | | 0 | 0 | 0 |\n", - "+-------------+-----------+--------+-------+---------+\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "train_data.print_field_meta()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用内置模型训练" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.models import CNNText\n", - "\n", - "#词嵌入的维度\n", - "EMBED_DIM = 100\n", - "\n", - "#使用CNNText的时候第一个参数输入一个tuple,作为模型定义embedding的参数\n", - "#还可以传入 kernel_nums, kernel_sizes, padding, dropout的自定义值\n", - "model_cnn = CNNText((len(vocab),EMBED_DIM), num_classes=2, dropout=0.1)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import AccuracyMetric\n", - "from fastNLP import Const\n", - "\n", - "# metrics=AccuracyMetric() 在本例中与下面这行代码等价\n", - "metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import CrossEntropyLoss\n", - "\n", - "# loss = CrossEntropyLoss() 在本例中与下面这行代码等价\n", - "loss = CrossEntropyLoss(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "# 这表示构建了一个损失函数类,由func计算损失函数,其中将从模型返回值或者DataSet的target=True的field\n", - "# 当中找到一个参数名为`pred`的参数传入func一个参数名为`input`的参数;找到一个参数名为`label`的参数\n", - "# 传入func作为一个名为`target`的参数\n", - "#下面自己构建了一个交叉熵函数,和之后直接使用fastNLP中的交叉熵函数是一个效果\n", - "import torch\n", - "from fastNLP import LossFunc\n", - "func = torch.nn.functional.cross_entropy\n", - "loss_func = LossFunc(func, input=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "import torch.optim as optim\n", - "\n", - "#使用 torch.optim 定义优化器\n", - "optimizer=optim.RMSprop(model_cnn.parameters(), lr=0.01, alpha=0.99, eps=1e-08, weight_decay=0, momentum=0, centered=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-11-31-25\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=3080.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.75 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:308/3080: \n", - "\r", - "AccuracyMetric: acc=0.751147\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.83 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:616/3080: \n", - "\r", - "AccuracyMetric: acc=0.755734\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:924/3080: \n", - "\r", - "AccuracyMetric: acc=0.758028\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.88 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:1232/3080: \n", - "\r", - "AccuracyMetric: acc=0.741972\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.96 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:1540/3080: \n", - "\r", - "AccuracyMetric: acc=0.728211\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.87 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:1848/3080: \n", - "\r", - "AccuracyMetric: acc=0.755734\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.04 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:2156/3080: \n", - "\r", - "AccuracyMetric: acc=0.732798\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.57 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:2464/3080: \n", - "\r", - "AccuracyMetric: acc=0.747706\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:2772/3080: \n", - "\r", - "AccuracyMetric: acc=0.732798\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:3080/3080: \n", - "\r", - "AccuracyMetric: acc=0.740826\n", - "\n", - "\r\n", - "In Epoch:3/Step:924, got best dev performance:\n", - "AccuracyMetric: acc=0.758028\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.758028}},\n", - " 'best_epoch': 3,\n", - " 'best_step': 924,\n", - " 'seconds': 160.58}" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Trainer\n", - "\n", - "#训练的轮数和batch size\n", - "N_EPOCHS = 10\n", - "BATCH_SIZE = 16\n", - "\n", - "#如果在定义trainer的时候没有传入optimizer参数,模型默认的优化器为torch.optim.Adam且learning rate为lr=4e-3\n", - "#这里只使用了loss作为损失函数输入,感兴趣可以尝试其他损失函数(如之前自定义的loss_func)作为输入\n", - "trainer = Trainer(model=model_cnn, train_data=train_data, dev_data=dev_data, loss=loss, metrics=metrics,\n", - "optimizer=optimizer,n_epochs=N_EPOCHS, batch_size=BATCH_SIZE)\n", - "trainer.train()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=5.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.773333\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.773333}}" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Tester\n", - "\n", - "tester = Tester(test_data, model_cnn, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_6_datasetiter.ipynb b/docs/source/_static/notebooks/tutorial_6_datasetiter.ipynb deleted file mode 100644 index 2caa4cc2..00000000 --- a/docs/source/_static/notebooks/tutorial_6_datasetiter.ipynb +++ /dev/null @@ -1,681 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Trainer和Tester快速训练和测试" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据读入和处理" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/remote-home/ynzheng/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/io/loader/classification.py:340: UserWarning: SST2's test file has no target.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1821 instances.\n", - "\ttrain has 67349 instances.\n", - "\tdev has 872 instances.\n", - "In total 2 vocabs:\n", - "\twords has 16292 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| raw_words | target | words | seq_len |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| hide new secretions from the p... | 1 | [4110, 97, 12009, 39, 2, 6843,... | 7 |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...)\n" - ] - } - ], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "\n", - "pipe = SST2Pipe()\n", - "databundle = pipe.process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "print(databundle)\n", - "print(databundle.get_dataset('train')[0])\n", - "print(databundle.get_vocab('words'))" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "4925 872 75\n" - ] - } - ], - "source": [ - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "print(len(train_data),len(dev_data),len(test_data))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------------+-----------+--------+-------+---------+\n", - "| field_names | raw_words | target | words | seq_len |\n", - "+-------------+-----------+--------+-------+---------+\n", - "| is_input | False | False | True | True |\n", - "| is_target | False | True | False | False |\n", - "| ignore_type | | False | False | False |\n", - "| pad_value | | 0 | 0 | 0 |\n", - "+-------------+-----------+--------+-------+---------+\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "train_data.print_field_meta()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import AccuracyMetric\n", - "from fastNLP import Const\n", - "\n", - "# metrics=AccuracyMetric() 在本例中与下面这行代码等价\n", - "metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSetIter初探" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7],\n", - " [15618, 3204, 5, 1675, 0]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7],\n", - " [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 12])}\n", - "batch_y: {'target': tensor([0, 1])}\n" - ] - } - ], - "source": [ - "from fastNLP import BucketSampler\n", - "from fastNLP import DataSetIter\n", - "\n", - "tmp_data = dev_data[:10]\n", - "# 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。\n", - "# 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket)\n", - "sampler = BucketSampler(batch_size=2, seq_len_field_name='seq_len')\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, -1, -1, -1, -1, -1, -1, -1, -1, -1,\n", - " -1, -1, -1]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7],\n", - " [15618, 3204, 5, 1675, -1]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n", - "batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7],\n", - " [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, -1, -1, -1, -1, -1, -1, -1, -1]]), 'seq_len': tensor([20, 12])}\n", - "batch_y: {'target': tensor([0, 1])}\n" - ] - } - ], - "source": [ - "tmp_data.set_pad_val('words',-1)\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([12, 20])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [15618, 3204, 5, 1675, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n" - ] - } - ], - "source": [ - "from fastNLP.core.field import Padder\n", - "import numpy as np\n", - "class FixLengthPadder(Padder):\n", - " def __init__(self, pad_val=0, length=None):\n", - " super().__init__(pad_val=pad_val)\n", - " self.length = length\n", - " assert self.length is not None, \"Creating FixLengthPadder with no specific length!\"\n", - "\n", - " def __call__(self, contents, field_name, field_ele_dtype, dim):\n", - " #计算当前contents中的最大长度\n", - " max_len = max(map(len, contents))\n", - " #如果当前contents中的最大长度大于指定的padder length的话就报错\n", - " assert max_len <= self.length, \"Fixed padder length smaller than actual length! with length {}\".format(max_len)\n", - " array = np.full((len(contents), self.length), self.pad_val, dtype=field_ele_dtype)\n", - " for i, content_i in enumerate(contents):\n", - " array[i, :len(content_i)] = content_i\n", - " return array\n", - "\n", - "#设定FixLengthPadder的固定长度为40\n", - "tmp_padder = FixLengthPadder(pad_val=0,length=40)\n", - "#利用dataset的set_padder函数设定words field的padder\n", - "tmp_data.set_padder('words',tmp_padder)\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用DataSetIter自己编写训练过程\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-----start training-----\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.68 seconds!\n", - "Epoch 0 Avg Loss: 0.66 AccuracyMetric: acc=0.708716 29307ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.38 seconds!\n", - "Epoch 1 Avg Loss: 0.41 AccuracyMetric: acc=0.770642 52200ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.51 seconds!\n", - "Epoch 2 Avg Loss: 0.16 AccuracyMetric: acc=0.747706 70268ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.96 seconds!\n", - "Epoch 3 Avg Loss: 0.06 AccuracyMetric: acc=0.741972 90349ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.04 seconds!\n", - "Epoch 4 Avg Loss: 0.03 AccuracyMetric: acc=0.740826 114250ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "Epoch 5 Avg Loss: 0.02 AccuracyMetric: acc=0.738532 134742ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.65 seconds!\n", - "Epoch 6 Avg Loss: 0.01 AccuracyMetric: acc=0.731651 154503ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "Epoch 7 Avg Loss: 0.01 AccuracyMetric: acc=0.738532 175397ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.36 seconds!\n", - "Epoch 8 Avg Loss: 0.01 AccuracyMetric: acc=0.733945 192384ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.84 seconds!\n", - "Epoch 9 Avg Loss: 0.01 AccuracyMetric: acc=0.744266 214417ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=5.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.04 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.786667\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.786667}}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import BucketSampler\n", - "from fastNLP import DataSetIter\n", - "from fastNLP.models import CNNText\n", - "from fastNLP import Tester\n", - "import torch\n", - "import time\n", - "\n", - "embed_dim = 100\n", - "model = CNNText((len(vocab),embed_dim), num_classes=2, dropout=0.1)\n", - "\n", - "def train(epoch, data, devdata):\n", - " optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", - " lossfunc = torch.nn.CrossEntropyLoss()\n", - " batch_size = 32\n", - "\n", - " # 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。\n", - " # 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket)\n", - " train_sampler = BucketSampler(batch_size=batch_size, seq_len_field_name='seq_len')\n", - " train_batch = DataSetIter(batch_size=batch_size, dataset=data, sampler=train_sampler)\n", - "\n", - " start_time = time.time()\n", - " print(\"-\"*5+\"start training\"+\"-\"*5)\n", - " for i in range(epoch):\n", - " loss_list = []\n", - " for batch_x, batch_y in train_batch:\n", - " optimizer.zero_grad()\n", - " output = model(batch_x['words'])\n", - " loss = lossfunc(output['pred'], batch_y['target'])\n", - " loss.backward()\n", - " optimizer.step()\n", - " loss_list.append(loss.item())\n", - "\n", - " #这里verbose如果为0,在调用Tester对象的test()函数时不输出任何信息,返回评估信息; 如果为1,打印出验证结果,返回评估信息\n", - " #在调用过Tester对象的test()函数后,调用其_format_eval_results(res)函数,结构化输出验证结果\n", - " tester_tmp = Tester(devdata, model, metrics=AccuracyMetric(), verbose=0)\n", - " res=tester_tmp.test()\n", - "\n", - " print('Epoch {:d} Avg Loss: {:.2f}'.format(i, sum(loss_list) / len(loss_list)),end=\" \")\n", - " print(tester_tmp._format_eval_results(res),end=\" \")\n", - " print('{:d}ms'.format(round((time.time()-start_time)*1000)))\n", - " loss_list.clear()\n", - "\n", - "train(10, train_data, dev_data)\n", - "#使用tester进行快速测试\n", - "tester = Tester(test_data, model, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_7_metrics.ipynb b/docs/source/_static/notebooks/tutorial_7_metrics.ipynb deleted file mode 100644 index ef791683..00000000 --- a/docs/source/_static/notebooks/tutorial_7_metrics.ipynb +++ /dev/null @@ -1,1206 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Metric快速评测你的模型\n", - "\n", - "和上一篇教程一样的实验准备代码" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", - "from fastNLP.models import CNNText\n", - "import torch\n", - "\n", - "databundle = SST2Pipe().process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "\n", - "model = CNNText((len(vocab),100), num_classes=2, dropout=0.1)\n", - "loss = CrossEntropyLoss()\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "进行训练时,fastNLP提供了各种各样的 metrics 。 如前面的教程中所介绍,AccuracyMetric 类的对象被直接传到 Trainer 中用于训练" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-37-08\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.28 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.747706\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.17 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.745413\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.19 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.74656\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.15 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.762615\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.736239\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.16 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.761468\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.727064\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.731651\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.52 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.760321\n", - "\n", - "\r\n", - "In Epoch:4/Step:616, got best dev performance:\n", - "AccuracyMetric: acc=0.762615\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.762615}},\n", - " 'best_epoch': 4,\n", - " 'best_step': 616,\n", - " 'seconds': 32.63}" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=metric)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "除了 AccuracyMetric 之外,SpanFPreRecMetric 也是一种非常见的评价指标, 例如在序列标注问题中,常以span的方式计算 F-measure, precision, recall。\n", - "\n", - "另外,fastNLP 还实现了用于抽取式QA(如SQuAD)的metric ExtractiveQAMetric。 用户可以参考下面这个表格。\n", - "\n", - "| 名称 | 介绍 |\n", - "| -------------------- | ------------------------------------------------- |\n", - "| `MetricBase` | 自定义metrics需继承的基类 |\n", - "| `AccuracyMetric` | 简单的正确率metric |\n", - "| `SpanFPreRecMetric` | 同时计算 F-measure, precision, recall 值的 metric |\n", - "| `ExtractiveQAMetric` | 用于抽取式QA任务 的metric |\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义自己的metrics\n", - "\n", - "在定义自己的metrics类时需继承 fastNLP 的 MetricBase, 并覆盖写入 evaluate 和 get_metric 方法。\n", - "\n", - "- evaluate(xxx) 中传入一个批次的数据,将针对一个批次的预测结果做评价指标的累计\n", - "\n", - "- get_metric(xxx) 当所有数据处理完毕时调用该方法,它将根据 evaluate函数累计的评价指标统计量来计算最终的评价结果\n", - "\n", - "以分类问题中,Accuracy计算为例,假设model的forward返回dict中包含 pred 这个key, 并且该key需要用于Accuracy:\n", - "\n", - "```python\n", - "class Model(nn.Module):\n", - " def __init__(xxx):\n", - " # do something\n", - " def forward(self, xxx):\n", - " # do something\n", - " return {'pred': pred, 'other_keys':xxx} # pred's shape: batch_size x num_classes\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Version 1\n", - "\n", - "假设dataset中 `target` 这个 field 是需要预测的值,并且该 field 被设置为了 target 对应的 `AccMetric` 可以按如下的定义" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import MetricBase\n", - "\n", - "class AccMetric(MetricBase):\n", - "\n", - " def __init__(self):\n", - " super().__init__()\n", - " # 根据你的情况自定义指标\n", - " self.total = 0\n", - " self.acc_count = 0\n", - "\n", - " # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value\n", - " # pred, target 的参数是 fastNLP 的默认配置\n", - " def evaluate(self, pred, target):\n", - " # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric\n", - " self.total += target.size(0)\n", - " self.acc_count += target.eq(pred).sum().item()\n", - "\n", - " def get_metric(self, reset=True): # 在这里定义如何计算metric\n", - " acc = self.acc_count/self.total\n", - " if reset: # 是否清零以便重新计算\n", - " self.acc_count = 0\n", - " self.total = 0\n", - " return {'acc': acc}\n", - " # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-37-41\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.27 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccMetric: acc=0.7431192660550459\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccMetric: acc=0.7522935779816514\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.51 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccMetric: acc=0.7477064220183486\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccMetric: acc=0.7442660550458715\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.5 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccMetric: acc=0.7362385321100917\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.45 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccMetric: acc=0.7293577981651376\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.33 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccMetric: acc=0.7190366972477065\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccMetric: acc=0.7419724770642202\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.34 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccMetric: acc=0.7350917431192661\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.18 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccMetric: acc=0.6846330275229358\n", - "\n", - "\r\n", - "In Epoch:2/Step:308, got best dev performance:\n", - "AccMetric: acc=0.7522935779816514\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccMetric': {'acc': 0.7522935779816514}},\n", - " 'best_epoch': 2,\n", - " 'best_step': 308,\n", - " 'seconds': 42.7}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=AccMetric())\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Version 2\n", - "\n", - "如果需要复用 metric,比如下一次使用 `AccMetric` 时,dataset中目标field不叫 `target` 而叫 `y` ,或者model的输出不是 `pred`\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "class AccMetric(MetricBase):\n", - " def __init__(self, pred=None, target=None):\n", - " \"\"\"\n", - " 假设在另一场景使用时,目标field叫y,model给出的key为pred_y。则只需要在初始化AccMetric时,\n", - " acc_metric = AccMetric(pred='pred_y', target='y')即可。\n", - " 当初始化为acc_metric = AccMetric() 时,fastNLP会直接使用 'pred', 'target' 作为key去索取对应的的值\n", - " \"\"\"\n", - "\n", - " super().__init__()\n", - "\n", - " # 如果没有注册该则效果与 Version 1 就是一样的\n", - " self._init_param_map(pred=pred, target=target) # 该方法会注册label和pred. 仅需要注册evaluate()方法会用到的参数名即可\n", - "\n", - " # 根据你的情况自定义指标\n", - " self.total = 0\n", - " self.acc_count = 0\n", - "\n", - " # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value\n", - " # pred, target 的参数是 fastNLP 的默认配置\n", - " def evaluate(self, pred, target):\n", - " # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric\n", - " self.total += target.size(0)\n", - " self.acc_count += target.eq(pred).sum().item()\n", - "\n", - " def get_metric(self, reset=True): # 在这里定义如何计算metric\n", - " acc = self.acc_count/self.total\n", - " if reset: # 是否清零以便重新计算\n", - " self.acc_count = 0\n", - " self.total = 0\n", - " return {'acc': acc}\n", - " # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-38-24\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccMetric: acc=0.7511467889908257\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccMetric: acc=0.7454128440366973\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccMetric: acc=0.7224770642201835\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccMetric: acc=0.7534403669724771\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.41 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccMetric: acc=0.7396788990825688\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccMetric: acc=0.7442660550458715\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.45 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccMetric: acc=0.6903669724770642\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.25 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccMetric: acc=0.7293577981651376\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccMetric: acc=0.7006880733944955\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccMetric: acc=0.7339449541284404\n", - "\n", - "\r\n", - "In Epoch:4/Step:616, got best dev performance:\n", - "AccMetric: acc=0.7534403669724771\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccMetric': {'acc': 0.7534403669724771}},\n", - " 'best_epoch': 4,\n", - " 'best_step': 616,\n", - " 'seconds': 34.74}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=AccMetric())\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "``MetricBase`` 将会在输入的字典 ``pred_dict`` 和 ``target_dict`` 中进行检查.\n", - "``pred_dict`` 是模型当中 ``forward()`` 函数或者 ``predict()`` 函数的返回值.\n", - "``target_dict`` 是DataSet当中的ground truth, 判定ground truth的条件是field的 ``is_target`` 被设置为True.\n", - "\n", - "``MetricBase`` 会进行以下的类型检测:\n", - "\n", - "1. self.evaluate当中是否有 varargs, 这是不支持的.\n", - "2. self.evaluate当中所需要的参数是否既不在 ``pred_dict`` 也不在 ``target_dict`` .\n", - "3. self.evaluate当中所需要的参数是否既在 ``pred_dict`` 也在 ``target_dict`` .\n", - "\n", - "除此以外,在参数被传入self.evaluate以前,这个函数会检测 ``pred_dict`` 和 ``target_dict`` 当中没有被用到的参数\n", - "如果kwargs是self.evaluate的参数,则不会检测\n", - "\n", - "self.evaluate将计算一个批次(batch)的评价指标,并累计。 没有返回值\n", - "self.get_metric将统计当前的评价指标并返回评价结果, 返回值需要是一个dict, key是指标名称,value是指标的值\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_8_modules_models.ipynb b/docs/source/_static/notebooks/tutorial_8_modules_models.ipynb deleted file mode 100644 index 2784cca1..00000000 --- a/docs/source/_static/notebooks/tutorial_8_modules_models.ipynb +++ /dev/null @@ -1,1014 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Modules和Models快速搭建自定义模型\n", - "\n", - "modules 和 models 用于构建 fastNLP 所需的神经网络模型,它可以和 torch.nn 中的模型一起使用。 下面我们会分三节介绍编写构建模型的具体方法。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们首先准备好和上篇教程一样的基础实验代码" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", - "import torch\n", - "\n", - "databundle = SST2Pipe().process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "\n", - "loss = CrossEntropyLoss()\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 models 中的模型\n", - "\n", - "fastNLP 在 models 模块中内置了如 CNNText 、 SeqLabeling 等完整的模型,以供用户直接使用。 以文本分类的任务为例,我们从 models 中导入 CNNText 模型,用它进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-56-04\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.760321\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.727064\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.758028\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.759174\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.47 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.743119\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.15 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.75344\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.12 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n", - "\r\n", - "In Epoch:1/Step:154, got best dev performance:\n", - "AccuracyMetric: acc=0.760321\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.760321}},\n", - " 'best_epoch': 1,\n", - " 'best_step': 154,\n", - " 'seconds': 29.3}" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP.models import CNNText\n", - "\n", - "model_cnn = CNNText((len(vocab),100), num_classes=2, dropout=0.1)\n", - "\n", - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_cnn)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在 iPython 环境输入 model_cnn ,我们可以看到 model_cnn 的网络结构" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "CNNText(\n", - " (embed): Embedding(\n", - " (embed): Embedding(16292, 100)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (conv_pool): ConvMaxpool(\n", - " (convs): ModuleList(\n", - " (0): Conv1d(100, 30, kernel_size=(1,), stride=(1,), bias=False)\n", - " (1): Conv1d(100, 40, kernel_size=(3,), stride=(1,), padding=(1,), bias=False)\n", - " (2): Conv1d(100, 50, kernel_size=(5,), stride=(1,), padding=(2,), bias=False)\n", - " )\n", - " )\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (fc): Linear(in_features=120, out_features=2, bias=True)\n", - ")" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_cnn" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 nn.torch 编写模型\n", - "\n", - "FastNLP 完全支持使用 pyTorch 编写的模型,但与 pyTorch 中编写模型的常见方法不同, 用于 fastNLP 的模型中 forward 函数需要返回一个字典,字典中至少需要包含 pred 这个字段。\n", - "\n", - "下面是使用 pyTorch 中的 torch.nn 模块编写的文本分类,注意观察代码中标注的向量维度。 由于 pyTorch 使用了约定俗成的维度设置,使得 forward 中需要多次处理维度顺序" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "import torch.nn as nn\n", - "\n", - "class LSTMText(nn.Module):\n", - " def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5):\n", - " super().__init__()\n", - "\n", - " self.embedding = nn.Embedding(vocab_size, embedding_dim)\n", - " self.lstm = nn.LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True, dropout=dropout)\n", - " self.fc = nn.Linear(hidden_dim * 2, output_dim)\n", - " self.dropout = nn.Dropout(dropout)\n", - "\n", - " def forward(self, words):\n", - " # (input) words : (batch_size, seq_len)\n", - " words = words.permute(1,0)\n", - " # words : (seq_len, batch_size)\n", - "\n", - " embedded = self.dropout(self.embedding(words))\n", - " # embedded : (seq_len, batch_size, embedding_dim)\n", - " output, (hidden, cell) = self.lstm(embedded)\n", - " # output: (seq_len, batch_size, hidden_dim * 2)\n", - " # hidden: (num_layers * 2, batch_size, hidden_dim)\n", - " # cell: (num_layers * 2, batch_size, hidden_dim)\n", - "\n", - " hidden = torch.cat((hidden[-2, :, :], hidden[-1, :, :]), dim=1)\n", - " hidden = self.dropout(hidden)\n", - " # hidden: (batch_size, hidden_dim * 2)\n", - "\n", - " pred = self.fc(hidden.squeeze(0))\n", - " # result: (batch_size, output_dim)\n", - " return {\"pred\":pred}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们同样可以在 iPython 环境中查看这个模型的网络结构" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "LSTMText(\n", - " (embedding): Embedding(16292, 100)\n", - " (lstm): LSTM(100, 64, num_layers=2, dropout=0.5, bidirectional=True)\n", - " (fc): Linear(in_features=128, out_features=2, bias=True)\n", - " (dropout): Dropout(p=0.5, inplace=False)\n", - ")" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_lstm = LSTMText(len(vocab), 100, 2)\n", - "model_lstm " - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-56-34\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.36 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.59289\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.35 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.674312\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.724771\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.748853\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.741972\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.754587\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.28 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.740826\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.23 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.751147\n", - "\n", - "\r\n", - "In Epoch:5/Step:770, got best dev performance:\n", - "AccuracyMetric: acc=0.756881\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.756881}},\n", - " 'best_epoch': 5,\n", - " 'best_step': 770,\n", - " 'seconds': 45.69}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_lstm)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 modules 编写模型\n", - "\n", - "下面我们使用 fastNLP.modules 中的组件来构建同样的网络。由于 fastNLP 统一把 batch_size 放在第一维, 在编写代码的过程中会有一定的便利。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MyText(\n", - " (embedding): Embedding(\n", - " (embed): Embedding(16292, 100)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (lstm): LSTM(\n", - " (lstm): LSTM(100, 64, num_layers=2, batch_first=True, bidirectional=True)\n", - " )\n", - " (mlp): MLP(\n", - " (hiddens): ModuleList()\n", - " (output): Linear(in_features=128, out_features=2, bias=True)\n", - " (dropout): Dropout(p=0.5, inplace=False)\n", - " )\n", - ")" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP.modules import LSTM, MLP\n", - "from fastNLP.embeddings import Embedding\n", - "\n", - "\n", - "class MyText(nn.Module):\n", - " def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5):\n", - " super().__init__()\n", - "\n", - " self.embedding = Embedding((vocab_size, embedding_dim))\n", - " self.lstm = LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True)\n", - " self.mlp = MLP([hidden_dim*2,output_dim], dropout=dropout)\n", - "\n", - " def forward(self, words):\n", - " embedded = self.embedding(words)\n", - " _,(hidden,_) = self.lstm(embedded)\n", - " pred = self.mlp(torch.cat((hidden[-1],hidden[-2]),dim=1))\n", - " return {\"pred\":pred}\n", - " \n", - "model_text = MyText(len(vocab), 100, 2)\n", - "model_text" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-57-19\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "16a35f2b0ef0457dae15c5f240a19a3a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.38 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.767202\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.743119\n", - "\n" - ] - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_lstm)\n", - "trainer.train()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/tutorial_9_callback.ipynb b/docs/source/_static/notebooks/tutorial_9_callback.ipynb deleted file mode 100644 index ed71a9b0..00000000 --- a/docs/source/_static/notebooks/tutorial_9_callback.ipynb +++ /dev/null @@ -1,622 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用 Callback 自定义你的训练过程" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 什么是 Callback\n", - "- 使用 Callback \n", - "- 一些常用的 Callback\n", - "- 自定义实现 Callback" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "什么是Callback\n", - "------\n", - "\n", - "Callback 是与 Trainer 紧密结合的模块,利用 Callback 可以在 Trainer 训练时,加入自定义的操作,比如梯度裁剪,学习率调节,测试模型的性能等。定义的 Callback 会在训练的特定阶段被调用。\n", - "\n", - "fastNLP 中提供了很多常用的 Callback ,开箱即用。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用 Callback\n", - " ------\n", - "\n", - "使用 Callback 很简单,将需要的 callback 按 list 存储,以对应参数 ``callbacks`` 传入对应的 Trainer。Trainer 在训练时就会自动执行这些 Callback 指定的操作了。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:34:46.465871Z", - "start_time": "2019-09-17T07:34:30.648758Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1200 instances.\n", - "\ttrain has 9600 instances.\n", - "\tdev has 1200 instances.\n", - "In total 2 vocabs:\n", - "\tchars has 4409 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "training epochs started 2019-09-17-03-34-34\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.863333\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.886667\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.890833\n", - "\n", - "\r\n", - "In Epoch:3/Step:900, got best dev performance:\n", - "AccuracyMetric: acc=0.890833\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import (Callback, EarlyStopCallback,\n", - " Trainer, CrossEntropyLoss, AccuracyMetric)\n", - "from fastNLP.models import CNNText\n", - "import torch.cuda\n", - "\n", - "# prepare data\n", - "def get_data():\n", - " from fastNLP.io import ChnSentiCorpPipe as pipe\n", - " data = pipe().process_from_file()\n", - " print(data)\n", - " data.rename_field('chars', 'words')\n", - " train_data = data.datasets['train']\n", - " dev_data = data.datasets['dev']\n", - " test_data = data.datasets['test']\n", - " vocab = data.vocabs['words']\n", - " tgt_vocab = data.vocabs['target']\n", - " return train_data, dev_data, test_data, vocab, tgt_vocab\n", - "\n", - "# prepare model\n", - "train_data, dev_data, _, vocab, tgt_vocab = get_data()\n", - "device = 'cuda:0' if torch.cuda.is_available() else 'cpu'\n", - "model = CNNText((len(vocab),50), num_classes=len(tgt_vocab))\n", - "\n", - "# define callback\n", - "callbacks=[EarlyStopCallback(5)]\n", - "\n", - "# pass callbacks to Trainer\n", - "def train_with_callback(cb_list):\n", - " trainer = Trainer(\n", - " device=device,\n", - " n_epochs=3,\n", - " model=model, \n", - " train_data=train_data, \n", - " dev_data=dev_data, \n", - " loss=CrossEntropyLoss(), \n", - " metrics=AccuracyMetric(), \n", - " callbacks=cb_list, \n", - " check_code_level=-1\n", - " )\n", - " trainer.train()\n", - "\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "fastNLP 中的 Callback\n", - "-------\n", - "fastNLP 中提供了很多常用的 Callback,如梯度裁剪,训练时早停和测试验证集,fitlog 等等。具体 Callback 请参考 fastNLP.core.callbacks" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:35:02.182727Z", - "start_time": "2019-09-17T07:34:49.443863Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "training epochs started 2019-09-17-03-34-49\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.13 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.12 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.890833\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.890833\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.09 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.09 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.8875\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.8875\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.885\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.885\n", - "\n", - "\r\n", - "In Epoch:1/Step:300, got best dev performance:\n", - "AccuracyMetric: acc=0.890833\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import EarlyStopCallback, GradientClipCallback, EvaluateCallback\n", - "callbacks = [\n", - " EarlyStopCallback(5),\n", - " GradientClipCallback(clip_value=5, clip_type='value'),\n", - " EvaluateCallback(dev_data)\n", - "]\n", - "\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "自定义 Callback\n", - "------\n", - "\n", - "这里我们以一个简单的 Callback作为例子,它的作用是打印每一个 Epoch 平均训练 loss。\n", - "\n", - "#### 创建 Callback\n", - " \n", - "要自定义 Callback,我们要实现一个类,继承 fastNLP.Callback。\n", - "\n", - "这里我们定义 MyCallBack ,继承 fastNLP.Callback 。\n", - "\n", - "#### 指定 Callback 调用的阶段\n", - " \n", - "Callback 中所有以 on_ 开头的类方法会在 Trainer 的训练中在特定阶段调用。 如 on_train_begin() 会在训练开始时被调用,on_epoch_end() 会在每个 epoch 结束时调用。 具体有哪些类方法,参见 Callback 文档。\n", - "\n", - "这里, MyCallBack 在求得loss时调用 on_backward_begin() 记录当前 loss ,在每一个 epoch 结束时调用 on_epoch_end() ,求当前 epoch 平均loss并输出。\n", - "\n", - "#### 使用 Callback 的属性访问 Trainer 的内部信息\n", - " \n", - "为了方便使用,可以使用 Callback 的属性,访问 Trainer 中的对应信息,如 optimizer, epoch, n_epochs,分别对应训练时的优化器,当前 epoch 数,和总 epoch 数。 具体可访问的属性,参见文档 Callback 。\n", - "\n", - "这里, MyCallBack 为了求平均 loss ,需要知道当前 epoch 的总步数,可以通过 self.step 属性得到当前训练了多少步。\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:43:10.907139Z", - "start_time": "2019-09-17T07:42:58.488177Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "training epochs started 2019-09-17-03-42-58\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.883333\n", - "\n", - "Avg loss at epoch 1, 0.100254\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.8775\n", - "\n", - "Avg loss at epoch 2, 0.183511\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.13 seconds!\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.875833\n", - "\n", - "Avg loss at epoch 3, 0.257103\n", - "\r\n", - "In Epoch:1/Step:300, got best dev performance:\n", - "AccuracyMetric: acc=0.883333\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import Callback\n", - "from fastNLP import logger\n", - "\n", - "class MyCallBack(Callback):\n", - " \"\"\"Print average loss in each epoch\"\"\"\n", - " def __init__(self):\n", - " super().__init__()\n", - " self.total_loss = 0\n", - " self.start_step = 0\n", - " \n", - " def on_backward_begin(self, loss):\n", - " self.total_loss += loss.item()\n", - " \n", - " def on_epoch_end(self):\n", - " n_steps = self.step - self.start_step\n", - " avg_loss = self.total_loss / n_steps\n", - " logger.info('Avg loss at epoch %d, %.6f', self.epoch, avg_loss)\n", - " self.start_step = self.step\n", - "\n", - "callbacks = [MyCallBack()]\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/source/_static/notebooks/序列标注.ipynb b/docs/source/_static/notebooks/序列标注.ipynb deleted file mode 100644 index 15118708..00000000 --- a/docs/source/_static/notebooks/序列标注.ipynb +++ /dev/null @@ -1,912 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 序列标注\n", - "\n", - "这一部分的内容主要展示如何使用fastNLP实现序列标注(Sequence labeling)任务。您可以使用fastNLP的各个组件快捷,方便地完成序列标注任务,达到出色的效果。 在阅读这篇教程前,希望您已经熟悉了fastNLP的基础使用,尤其是数据的载入以及模型的构建,通过这个小任务的能让您进一步熟悉fastNLP的使用。\n", - "\n", - "## 命名实体识别(name entity recognition, NER)\n", - "\n", - "命名实体识别任务是从文本中抽取出具有特殊意义或者指代性非常强的实体,通常包括人名、地名、机构名和时间等。 如下面的例子中\n", - "\n", - "*我来自复旦大学*\n", - "\n", - "其中“复旦大学”就是一个机构名,命名实体识别就是要从中识别出“复旦大学”这四个字是一个整体,且属于机构名这个类别。这个问题在实际做的时候会被 转换为序列标注问题\n", - "\n", - "针对\"我来自复旦大学\"这句话,我们的预测目标将是[O, O, O, B-ORG, I-ORG, I-ORG, I-ORG],其中O表示out,即不是一个实体,B-ORG是ORG( organization的缩写)这个类别的开头(Begin),I-ORG是ORG类别的中间(Inside)。\n", - "\n", - "在本tutorial中我们将通过fastNLP尝试写出一个能够执行以上任务的模型。\n", - "\n", - "## 载入数据\n", - "\n", - "fastNLP的数据载入主要是由Loader与Pipe两个基类衔接完成的,您可以通过《使用Loader和Pipe处理数据》了解如何使用fastNLP提供的数据加载函数。下面我们以微博命名实体任务来演示一下在fastNLP进行序列标注任务。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n", - "| raw_chars | target | chars | seq_len |\n", - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n", - "| ['科', '技', '全', '方', '位',... | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0,... | [792, 1015, 156, 198, 291, 714... | 26 |\n", - "| ['对', ',', '输', '给', '一',... | [0, 0, 0, 0, 0, 0, 3, 1, 0, 0,... | [123, 2, 1205, 115, 8, 24, 101... | 15 |\n", - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n" - ] - } - ], - "source": [ - "from fastNLP.io import WeiboNERPipe\n", - "data_bundle = WeiboNERPipe().process_from_file()\n", - "print(data_bundle.get_dataset('train')[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型构建\n", - "\n", - "首先选择需要使用的Embedding类型。关于Embedding的相关说明可以参见《使用Embedding模块将文本转成向量》。 在这里我们使用通过word2vec预训练的中文汉字embedding。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 3321 out of 3471 words in the pre-training embedding.\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "\n", - "embed = StaticEmbedding(vocab=data_bundle.get_vocab('chars'), model_dir_or_name='cn-char-fastnlp-100d')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "选择好Embedding之后,我们可以使用fastNLP中自带的 fastNLP.models.BiLSTMCRF 作为模型。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.models import BiLSTMCRF\n", - "\n", - "data_bundle.rename_field('chars', 'words') # 这是由于BiLSTMCRF模型的forward函数接受的words,而不是chars,所以需要把这一列重新命名\n", - "model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5,\n", - " target_vocab=data_bundle.get_vocab('target'))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 进行训练\n", - "下面我们选择用来评估模型的metric,以及优化用到的优化函数。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import SpanFPreRecMetric\n", - "from torch.optim import Adam\n", - "from fastNLP import LossInForward\n", - "\n", - "metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target'))\n", - "optimizer = Adam(model.parameters(), lr=1e-2)\n", - "loss = LossInForward()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用Trainer进行训练, 您可以通过修改 device 的值来选择显卡。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-13-53-24\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=430.0), HTML(value='')), layout=Layout(di…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.89 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:43/430: \n", - "\r", - "SpanFPreRecMetric: f=0.067797, pre=0.192771, rec=0.041131\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.9 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:86/430: \n", - "\r", - "SpanFPreRecMetric: f=0.344086, pre=0.568047, rec=0.246787\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.88 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:129/430: \n", - "\r", - "SpanFPreRecMetric: f=0.446701, pre=0.653465, rec=0.339332\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.81 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:172/430: \n", - "\r", - "SpanFPreRecMetric: f=0.479871, pre=0.642241, rec=0.383033\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.91 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:215/430: \n", - "\r", - "SpanFPreRecMetric: f=0.486312, pre=0.650862, rec=0.388175\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.87 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:258/430: \n", - "\r", - "SpanFPreRecMetric: f=0.541401, pre=0.711297, rec=0.437018\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.86 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:301/430: \n", - "\r", - "SpanFPreRecMetric: f=0.430335, pre=0.685393, rec=0.313625\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.82 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:344/430: \n", - "\r", - "SpanFPreRecMetric: f=0.477759, pre=0.665138, rec=0.372751\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.81 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:387/430: \n", - "\r", - "SpanFPreRecMetric: f=0.500759, pre=0.611111, rec=0.424165\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:430/430: \n", - "\r", - "SpanFPreRecMetric: f=0.496025, pre=0.65, rec=0.401028\n", - "\n", - "\r\n", - "In Epoch:6/Step:258, got best dev performance:\n", - "SpanFPreRecMetric: f=0.541401, pre=0.711297, rec=0.437018\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'SpanFPreRecMetric': {'f': 0.541401,\n", - " 'pre': 0.711297,\n", - " 'rec': 0.437018}},\n", - " 'best_epoch': 6,\n", - " 'best_step': 258,\n", - " 'seconds': 121.39}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Trainer\n", - "import torch\n", - "\n", - "device= 0 if torch.cuda.is_available() else 'cpu'\n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer,\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 进行测试\n", - "训练结束之后过,可以通过 Tester 测试其在测试集上的性能" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=17.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.54 seconds!\n", - "[tester] \n", - "SpanFPreRecMetric: f=0.439024, pre=0.685279, rec=0.322967\n" - ] - }, - { - "data": { - "text/plain": [ - "{'SpanFPreRecMetric': {'f': 0.439024, 'pre': 0.685279, 'rec': 0.322967}}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Tester\n", - "tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric)\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用更强的Bert做序列标注\n", - "\n", - "在fastNLP使用Bert进行任务,您只需要把fastNLP.embeddings.StaticEmbedding 切换为 fastNLP.embeddings.BertEmbedding(可修改 device 选择显卡)。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-chinese-wwm/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-chinese-wwm/chinese_wwm_pytorch.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 3384 words out of 3471.\n", - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-13-58-51\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1130.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.7 seconds!\n", - "Evaluation on dev at Epoch 1/10. Step:113/1130: \n", - "SpanFPreRecMetric: f=0.008114, pre=0.019231, rec=0.005141\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.49 seconds!\n", - "Evaluation on dev at Epoch 2/10. Step:226/1130: \n", - "SpanFPreRecMetric: f=0.467866, pre=0.467866, rec=0.467866\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.6 seconds!\n", - "Evaluation on dev at Epoch 3/10. Step:339/1130: \n", - "SpanFPreRecMetric: f=0.566879, pre=0.482821, rec=0.686375\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.56 seconds!\n", - "Evaluation on dev at Epoch 4/10. Step:452/1130: \n", - "SpanFPreRecMetric: f=0.651972, pre=0.59408, rec=0.722365\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.69 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:565/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.640909, pre=0.574338, rec=0.724936\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.52 seconds!\n", - "Evaluation on dev at Epoch 6/10. Step:678/1130: \n", - "SpanFPreRecMetric: f=0.661836, pre=0.624146, rec=0.70437\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.67 seconds!\n", - "Evaluation on dev at Epoch 7/10. Step:791/1130: \n", - "SpanFPreRecMetric: f=0.683429, pre=0.615226, rec=0.768638\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.37 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:904/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.674699, pre=0.634921, rec=0.719794\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.42 seconds!\n", - "Evaluation on dev at Epoch 9/10. Step:1017/1130: \n", - "SpanFPreRecMetric: f=0.693878, pre=0.650901, rec=0.742931\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.46 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1130/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.686845, pre=0.62766, rec=0.758355\n", - "\n", - "\r\n", - "In Epoch:9/Step:1017, got best dev performance:\n", - "SpanFPreRecMetric: f=0.693878, pre=0.650901, rec=0.742931\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=17.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.96 seconds!\n", - "[tester] \n", - "SpanFPreRecMetric: f=0.626561, pre=0.596112, rec=0.660287\n" - ] - }, - { - "data": { - "text/plain": [ - "{'SpanFPreRecMetric': {'f': 0.626561, 'pre': 0.596112, 'rec': 0.660287}}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "\n", - "from fastNLP.io import WeiboNERPipe\n", - "data_bundle = WeiboNERPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "from fastNLP.embeddings import BertEmbedding\n", - "embed = BertEmbedding(vocab=data_bundle.get_vocab('words'), model_dir_or_name='cn')\n", - "model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5,\n", - " target_vocab=data_bundle.get_vocab('target'))\n", - "\n", - "from fastNLP import SpanFPreRecMetric\n", - "from torch.optim import Adam\n", - "from fastNLP import LossInForward\n", - "metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target'))\n", - "optimizer = Adam(model.parameters(), lr=2e-5)\n", - "loss = LossInForward()\n", - "\n", - "from fastNLP import Trainer\n", - "import torch\n", - "device= 5 if torch.cuda.is_available() else 'cpu'\n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer, batch_size=12,\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device)\n", - "trainer.train()\n", - "\n", - "from fastNLP import Tester\n", - "tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric)\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/_static/notebooks/文本分类.ipynb b/docs/source/_static/notebooks/文本分类.ipynb deleted file mode 100644 index 66439a76..00000000 --- a/docs/source/_static/notebooks/文本分类.ipynb +++ /dev/null @@ -1,564 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 文本分类(Text classification)\n", - "文本分类任务是将一句话或一段话划分到某个具体的类别。比如垃圾邮件识别,文本情绪分类等。\n", - "\n", - "Example:: \n", - "1,商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错!\n", - "\n", - "\n", - "其中开头的1是只这条评论的标签,表示是正面的情绪。我们将使用到的数据可以通过http://dbcloud.irocn.cn:8989/api/public/dl/dataset/chn_senti_corp.zip 下载并解压,当然也可以通过fastNLP自动下载该数据。\n", - "\n", - "数据中的内容如下图所示。接下来,我们将用fastNLP在这个数据上训练一个分类网络。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![jupyter](./cn_cls_example.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 步骤\n", - "一共有以下的几个步骤 \n", - "(1) 读取数据 \n", - "(2) 预处理数据 \n", - "(3) 选择预训练词向量 \n", - "(4) 创建模型 \n", - "(5) 训练模型 " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (1) 读取数据\n", - "fastNLP提供多种数据的自动下载与自动加载功能,对于这里我们要用到的数据,我们可以用\\ref{Loader}自动下载并加载该数据。更多有关Loader的使用可以参考\\ref{Loader}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import ChnSentiCorpLoader\n", - "\n", - "loader = ChnSentiCorpLoader() # 初始化一个中文情感分类的loader\n", - "data_dir = loader.download() # 这一行代码将自动下载数据到默认的缓存地址, 并将该地址返回\n", - "data_bundle = loader.load(data_dir) # 这一行代码将从{data_dir}处读取数据至DataBundle" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "DataBundle的相关介绍,可以参考\\ref{}。我们可以打印该data_bundle的基本信息。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看出,该data_bundle中一个含有三个\\ref{DataSet}。通过下面的代码,我们可以查看DataSet的基本情况" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(data_bundle.get_dataset('train')[:2]) # 查看Train集前两个sample" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (2) 预处理数据\n", - "在NLP任务中,预处理一般包括: (a)将一整句话切分成汉字或者词; (b)将文本转换为index \n", - "\n", - "fastNLP中也提供了多种数据集的处理类,这里我们直接使用fastNLP的ChnSentiCorpPipe。更多关于Pipe的说明可以参考\\ref{Pipe}。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import ChnSentiCorpPipe\n", - "\n", - "pipe = ChnSentiCorpPipe()\n", - "data_bundle = pipe.process(data_bundle) # 所有的Pipe都实现了process()方法,且输入输出都为DataBundle类型" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(data_bundle) # 打印data_bundle,查看其变化" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到除了之前已经包含的3个\\ref{DataSet}, 还新增了两个\\ref{Vocabulary}。我们可以打印DataSet中的内容" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(data_bundle.get_dataset('train')[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "新增了一列为数字列表的chars,以及变为数字的target列。可以看出这两列的名称和刚好与data_bundle中两个Vocabulary的名称是一致的,我们可以打印一下Vocabulary看一下里面的内容。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "char_vocab = data_bundle.get_vocab('chars')\n", - "print(char_vocab)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Vocabulary是一个记录着词语与index之间映射关系的类,比如" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "index = char_vocab.to_index('选')\n", - "print(\"'选'的index是{}\".format(index)) # 这个值与上面打印出来的第一个instance的chars的第一个index是一致的\n", - "print(\"index:{}对应的汉字是{}\".format(index, char_vocab.to_word(index))) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (3) 选择预训练词向量 \n", - "由于Word2vec, Glove, Elmo, Bert等预训练模型可以增强模型的性能,所以在训练具体任务前,选择合适的预训练词向量非常重要。在fastNLP中我们提供了多种Embedding使得加载这些预训练模型的过程变得更加便捷。更多关于Embedding的说明可以参考\\ref{Embedding}。这里我们先给出一个使用word2vec的中文汉字预训练的示例,之后再给出一个使用Bert的文本分类。这里使用的预训练词向量为'cn-fastnlp-100d',fastNLP将自动下载该embedding至本地缓存,fastNLP支持使用名字指定的Embedding以及相关说明可以参见\\ref{Embedding}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "\n", - "word2vec_embed = StaticEmbedding(char_vocab, model_dir_or_name='cn-char-fastnlp-100d')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (4) 创建模型\n", - "这里我们使用到的模型结构如下所示,补图" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from torch import nn\n", - "from fastNLP.modules import LSTM\n", - "import torch\n", - "\n", - "# 定义模型\n", - "class BiLSTMMaxPoolCls(nn.Module):\n", - " def __init__(self, embed, num_classes, hidden_size=400, num_layers=1, dropout=0.3):\n", - " super().__init__()\n", - " self.embed = embed\n", - " \n", - " self.lstm = LSTM(self.embed.embedding_dim, hidden_size=hidden_size//2, num_layers=num_layers, \n", - " batch_first=True, bidirectional=True)\n", - " self.dropout_layer = nn.Dropout(dropout)\n", - " self.fc = nn.Linear(hidden_size, num_classes)\n", - " \n", - " def forward(self, chars, seq_len): # 这里的名称必须和DataSet中相应的field对应,比如之前我们DataSet中有chars,这里就必须为chars\n", - " # chars:[batch_size, max_len]\n", - " # seq_len: [batch_size, ]\n", - " chars = self.embed(chars)\n", - " outputs, _ = self.lstm(chars, seq_len)\n", - " outputs = self.dropout_layer(outputs)\n", - " outputs, _ = torch.max(outputs, dim=1)\n", - " outputs = self.fc(outputs)\n", - " \n", - " return {'pred':outputs} # [batch_size,], 返回值必须是dict类型,且预测值的key建议设为pred\n", - "\n", - "# 初始化模型\n", - "model = BiLSTMMaxPoolCls(word2vec_embed, len(data_bundle.get_vocab('target')))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (5) 训练模型\n", - "fastNLP提供了Trainer对象来组织训练过程,包括完成loss计算(所以在初始化Trainer的时候需要指定loss类型),梯度更新(所以在初始化Trainer的时候需要提供优化器optimizer)以及在验证集上的性能验证(所以在初始化时需要提供一个Metric)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import Trainer\n", - "from fastNLP import CrossEntropyLoss\n", - "from torch.optim import Adam\n", - "from fastNLP import AccuracyMetric\n", - "\n", - "loss = CrossEntropyLoss()\n", - "optimizer = Adam(model.parameters(), lr=0.001)\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快\n", - "\n", - "trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, \n", - " optimizer=optimizer, batch_size=32, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=metric, device=device)\n", - "trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型\n", - "\n", - "# 在测试集上测试一下模型的性能\n", - "from fastNLP import Tester\n", - "print(\"Performance on test is:\")\n", - "tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device)\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 使用Bert进行文本分类" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# 只需要切换一下Embedding即可\n", - "from fastNLP.embeddings import BertEmbedding\n", - "\n", - "# 这里为了演示一下效果,所以默认Bert不更新权重\n", - "bert_embed = BertEmbedding(char_vocab, model_dir_or_name='cn', auto_truncate=True, requires_grad=False)\n", - "model = BiLSTMMaxPoolCls(bert_embed, len(data_bundle.get_vocab('target')), )\n", - "\n", - "\n", - "import torch\n", - "from fastNLP import Trainer\n", - "from fastNLP import CrossEntropyLoss\n", - "from torch.optim import Adam\n", - "from fastNLP import AccuracyMetric\n", - "\n", - "loss = CrossEntropyLoss()\n", - "optimizer = Adam(model.parameters(), lr=2e-5)\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快\n", - "\n", - "trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, \n", - " optimizer=optimizer, batch_size=16, dev_data=data_bundle.get_dataset('test'),\n", - " metrics=metric, device=device, n_epochs=3)\n", - "trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型\n", - "\n", - "# 在测试集上测试一下模型的性能\n", - "from fastNLP import Tester\n", - "print(\"Performance on test is:\")\n", - "tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device)\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 基于词进行文本分类" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "由于汉字中没有显示的字与字的边界,一般需要通过分词器先将句子进行分词操作。\n", - "下面的例子演示了如何不基于fastNLP已有的数据读取、预处理代码进行文本分类。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (1) 读取数据" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里我们继续以之前的数据为例,但这次我们不使用fastNLP自带的数据读取代码 " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import ChnSentiCorpLoader\n", - "\n", - "loader = ChnSentiCorpLoader() # 初始化一个中文情感分类的loader\n", - "data_dir = loader.download() # 这一行代码将自动下载数据到默认的缓存地址, 并将该地址返回" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "下面我们先定义一个read_file_to_dataset的函数, 即给定一个文件路径,读取其中的内容,并返回一个DataSet。然后我们将所有的DataSet放入到DataBundle对象中来方便接下来的预处理" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "from fastNLP import DataSet, Instance\n", - "from fastNLP.io import DataBundle\n", - "\n", - "\n", - "def read_file_to_dataset(fp):\n", - " ds = DataSet()\n", - " with open(fp, 'r') as f:\n", - " f.readline() # 第一行是title名称,忽略掉\n", - " for line in f:\n", - " line = line.strip()\n", - " target, chars = line.split('\\t')\n", - " ins = Instance(target=target, raw_chars=chars)\n", - " ds.append(ins)\n", - " return ds\n", - "\n", - "data_bundle = DataBundle()\n", - "for name in ['train.tsv', 'dev.tsv', 'test.tsv']:\n", - " fp = os.path.join(data_dir, name)\n", - " ds = read_file_to_dataset(fp)\n", - " data_bundle.set_dataset(name=name.split('.')[0], dataset=ds)\n", - "\n", - "print(data_bundle) # 查看以下数据集的情况\n", - "# In total 3 datasets:\n", - "# train has 9600 instances.\n", - "# dev has 1200 instances.\n", - "# test has 1200 instances." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (2) 数据预处理" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在这里,我们首先把句子通过 [fastHan](http://gitee.com/fastnlp/fastHan) 进行分词操作,然后创建词表,并将词语转换为序号。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastHan import FastHan\n", - "from fastNLP import Vocabulary\n", - "\n", - "model=FastHan()\n", - "# model.set_device('cuda')\n", - "\n", - "# 定义分词处理操作\n", - "def word_seg(ins):\n", - " raw_chars = ins['raw_chars']\n", - " # 由于有些句子比较长,我们只截取前128个汉字\n", - " raw_words = model(raw_chars[:128], target='CWS')[0]\n", - " return raw_words\n", - "\n", - "for name, ds in data_bundle.iter_datasets():\n", - " # apply函数将对内部的instance依次执行word_seg操作,并把其返回值放入到raw_words这个field\n", - " ds.apply(word_seg, new_field_name='raw_words')\n", - " # 除了apply函数,fastNLP还支持apply_field, apply_more(可同时创建多个field)等操作\n", - " # 同时我们增加一个seq_len的field\n", - " ds.add_seq_len('raw_words')\n", - "\n", - "vocab = Vocabulary()\n", - "\n", - "# 对raw_words列创建词表, 建议把非训练集的dataset放在no_create_entry_dataset参数中\n", - "# 也可以通过add_word(), add_word_lst()等建立词表,请参考http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_2_vocabulary.html\n", - "vocab.from_dataset(data_bundle.get_dataset('train'), field_name='raw_words', \n", - " no_create_entry_dataset=[data_bundle.get_dataset('dev'), \n", - " data_bundle.get_dataset('test')]) \n", - "\n", - "# 将建立好词表的Vocabulary用于对raw_words列建立词表,并把转为序号的列存入到words列\n", - "vocab.index_dataset(data_bundle.get_dataset('train'), data_bundle.get_dataset('dev'), \n", - " data_bundle.get_dataset('test'), field_name='raw_words', new_field_name='words')\n", - "\n", - "# 建立target的词表,target的词表一般不需要padding和unknown\n", - "target_vocab = Vocabulary(padding=None, unknown=None) \n", - "# 一般情况下我们可以只用训练集建立target的词表\n", - "target_vocab.from_dataset(data_bundle.get_dataset('train'), field_name='target') \n", - "# 如果没有传递new_field_name, 则默认覆盖原词表\n", - "target_vocab.index_dataset(data_bundle.get_dataset('train'), data_bundle.get_dataset('dev'), \n", - " data_bundle.get_dataset('test'), field_name='target')\n", - "\n", - "# 我们可以把词表保存到data_bundle中,方便之后使用\n", - "data_bundle.set_vocab(field_name='words', vocab=vocab)\n", - "data_bundle.set_vocab(field_name='target', vocab=target_vocab)\n", - "\n", - "# 我们把words和target分别设置为input和target,这样它们才会在训练循环中被取出并自动padding, 有关这部分更多的内容参考\n", - "# http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_6_datasetiter.html\n", - "data_bundle.set_target('target')\n", - "data_bundle.set_input('words', 'seq_len') # DataSet也有这两个接口\n", - "# 如果某些field,您希望它被设置为target或者input,但是不希望fastNLP自动padding或需要使用特定的padding方式,请参考\n", - "# http://www.fastnlp.top/docs/fastNLP/fastNLP.core.dataset.html\n", - "\n", - "print(data_bundle.get_dataset('train')[:2]) # 我们可以看一下当前dataset的内容\n", - "\n", - "# 由于之后需要使用之前定义的BiLSTMMaxPoolCls模型,所以需要将words这个field修改为chars(因为该模型的forward接受chars参数)\n", - "data_bundle.rename_field('words', 'chars')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (3) 选择预训练词向量" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里我们选择腾讯的预训练中文词向量,可以在 [腾讯词向量](https://ai.tencent.com/ailab/nlp/en/embedding.html) 处下载并解压。这里我们不能直接使用BERT,因为BERT是基于中文字进行预训练的。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "\n", - "word2vec_embed = StaticEmbedding(data_bundle.get_vocab('words'), \n", - " model_dir_or_name='/path/to/Tencent_AILab_ChineseEmbedding.txt')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import Trainer\n", - "from fastNLP import CrossEntropyLoss\n", - "from torch.optim import Adam\n", - "from fastNLP import AccuracyMetric\n", - "\n", - "# 初始化模型\n", - "model = BiLSTMMaxPoolCls(word2vec_embed, len(data_bundle.get_vocab('target')))\n", - "\n", - "# 开始训练\n", - "loss = CrossEntropyLoss()\n", - "optimizer = Adam(model.parameters(), lr=0.001)\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快\n", - "\n", - "trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, \n", - " optimizer=optimizer, batch_size=32, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=metric, device=device)\n", - "trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型\n", - "\n", - "# 在测试集上测试一下模型的性能\n", - "from fastNLP import Tester\n", - "print(\"Performance on test is:\")\n", - "tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device)\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index d1db2330..00000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/master/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath('../../')) - -# -- Project information ----------------------------------------------------- - -project = 'fastNLP' -copyright = '2020, xpqiu' -author = 'xpqiu' - -# The short X.Y version -version = '0.6.0' -# The full version, including alpha/beta/rc tags -release = '0.6.0' - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', - 'sphinx.ext.autosummary', - 'sphinx.ext.mathjax', - 'sphinx.ext.todo' -] - -autodoc_default_options = { - 'member-order': 'bysource', - 'special-members': '__init__', - 'undoc-members': False, -} - -autoclass_content = "class" - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] -# template_bridge -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = "zh_CN" - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ['modules.rst'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -html_theme_options = { - 'collapse_navigation': False, - 'titles_only': True -} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {} - - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = 'fastNLP doc' - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'fastNLP.tex', 'fastNLP Documentation', - 'xpqiu', 'manual'), -] - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'fastnlp', 'fastNLP Documentation', - [author], 1) -] - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'fastNLP', 'fastNLP Documentation', - author, 'fastNLP', 'One line description of project.', - 'Miscellaneous'), -] - - -# -- Extension configuration ------------------------------------------------- -def maybe_skip_member(app, what, name, obj, skip, options): - if obj.__doc__ is None: - return True - if name == "__init__": - return False - if name.startswith("_"): - return True - return False - - -def setup(app): - app.connect('autodoc-skip-member', maybe_skip_member) diff --git a/docs/source/fastNLP.core.batch.rst b/docs/source/fastNLP.core.batch.rst deleted file mode 100644 index 50ad6fed..00000000 --- a/docs/source/fastNLP.core.batch.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.batch -================== - -.. automodule:: fastNLP.core.batch - :members: BatchIter, DataSetIter, TorchLoaderIter - :inherited-members: - diff --git a/docs/source/fastNLP.core.callback.rst b/docs/source/fastNLP.core.callback.rst deleted file mode 100644 index 5a508e03..00000000 --- a/docs/source/fastNLP.core.callback.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.callback -===================== - -.. automodule:: fastNLP.core.callback - :members: Callback, GradientClipCallback, EarlyStopCallback, FitlogCallback, EvaluateCallback, LRScheduler, ControlC, LRFinder, TensorboardCallback, WarmupCallback, SaveModelCallback, CallbackException, EarlyStopError - :inherited-members: - diff --git a/docs/source/fastNLP.core.const.rst b/docs/source/fastNLP.core.const.rst deleted file mode 100644 index 82a1992e..00000000 --- a/docs/source/fastNLP.core.const.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.const -================== - -.. automodule:: fastNLP.core.const - :members: Const - :inherited-members: - diff --git a/docs/source/fastNLP.core.dataset.rst b/docs/source/fastNLP.core.dataset.rst deleted file mode 100644 index e13d7f1c..00000000 --- a/docs/source/fastNLP.core.dataset.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.dataset -==================== - -.. automodule:: fastNLP.core.dataset - :members: DataSet - :inherited-members: - diff --git a/docs/source/fastNLP.core.field.rst b/docs/source/fastNLP.core.field.rst deleted file mode 100644 index 73dad8af..00000000 --- a/docs/source/fastNLP.core.field.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.field -================== - -.. automodule:: fastNLP.core.field - :members: Padder, AutoPadder, EngChar2DPadder - :inherited-members: - diff --git a/docs/source/fastNLP.core.instance.rst b/docs/source/fastNLP.core.instance.rst deleted file mode 100644 index 010567b9..00000000 --- a/docs/source/fastNLP.core.instance.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.instance -===================== - -.. automodule:: fastNLP.core.instance - :members: Instance - :inherited-members: - diff --git a/docs/source/fastNLP.core.losses.rst b/docs/source/fastNLP.core.losses.rst deleted file mode 100644 index daf246f8..00000000 --- a/docs/source/fastNLP.core.losses.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.losses -=================== - -.. automodule:: fastNLP.core.losses - :members: LossBase, LossFunc, LossInForward, CrossEntropyLoss, BCELoss, L1Loss, NLLLoss - :inherited-members: - diff --git a/docs/source/fastNLP.core.metrics.rst b/docs/source/fastNLP.core.metrics.rst deleted file mode 100644 index fe304e78..00000000 --- a/docs/source/fastNLP.core.metrics.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.metrics -==================== - -.. automodule:: fastNLP.core.metrics - :members: MetricBase, AccuracyMetric, SpanFPreRecMetric, CMRC2018Metric, ClassifyFPreRecMetric - :inherited-members: - diff --git a/docs/source/fastNLP.core.optimizer.rst b/docs/source/fastNLP.core.optimizer.rst deleted file mode 100644 index 44e45c4f..00000000 --- a/docs/source/fastNLP.core.optimizer.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.optimizer -====================== - -.. automodule:: fastNLP.core.optimizer - :members: Optimizer, SGD, Adam, AdamW - :inherited-members: - diff --git a/docs/source/fastNLP.core.rst b/docs/source/fastNLP.core.rst deleted file mode 100644 index 15fe29d5..00000000 --- a/docs/source/fastNLP.core.rst +++ /dev/null @@ -1,25 +0,0 @@ -fastNLP.core -============ - -.. automodule:: fastNLP.core - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.core.batch - fastNLP.core.callback - fastNLP.core.const - fastNLP.core.dataset - fastNLP.core.field - fastNLP.core.instance - fastNLP.core.losses - fastNLP.core.metrics - fastNLP.core.optimizer - fastNLP.core.sampler - fastNLP.core.tester - fastNLP.core.trainer - fastNLP.core.utils - fastNLP.core.vocabulary diff --git a/docs/source/fastNLP.core.sampler.rst b/docs/source/fastNLP.core.sampler.rst deleted file mode 100644 index 56291894..00000000 --- a/docs/source/fastNLP.core.sampler.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.sampler -==================== - -.. automodule:: fastNLP.core.sampler - :members: Sampler, BucketSampler, SequentialSampler, RandomSampler - :inherited-members: - diff --git a/docs/source/fastNLP.core.tester.rst b/docs/source/fastNLP.core.tester.rst deleted file mode 100644 index 90ec2a88..00000000 --- a/docs/source/fastNLP.core.tester.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.tester -=================== - -.. automodule:: fastNLP.core.tester - :members: Tester - :inherited-members: - diff --git a/docs/source/fastNLP.core.trainer.rst b/docs/source/fastNLP.core.trainer.rst deleted file mode 100644 index 92c08718..00000000 --- a/docs/source/fastNLP.core.trainer.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.trainer -==================== - -.. automodule:: fastNLP.core.trainer - :members: Trainer - :inherited-members: - diff --git a/docs/source/fastNLP.core.utils.rst b/docs/source/fastNLP.core.utils.rst deleted file mode 100644 index 027a43e9..00000000 --- a/docs/source/fastNLP.core.utils.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.utils -================== - -.. automodule:: fastNLP.core.utils - :members: cache_results, seq_len_to_mask, get_seq_len - :inherited-members: - diff --git a/docs/source/fastNLP.core.vocabulary.rst b/docs/source/fastNLP.core.vocabulary.rst deleted file mode 100644 index ac07a8c6..00000000 --- a/docs/source/fastNLP.core.vocabulary.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.core.vocabulary -======================= - -.. automodule:: fastNLP.core.vocabulary - :members: Vocabulary, VocabularyOption - :inherited-members: - diff --git a/docs/source/fastNLP.embeddings.bert_embedding.rst b/docs/source/fastNLP.embeddings.bert_embedding.rst deleted file mode 100644 index 1b59dc35..00000000 --- a/docs/source/fastNLP.embeddings.bert_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.bert_embedding -================================= - -.. automodule:: fastNLP.embeddings.bert_embedding - :members: BertEmbedding, BertWordPieceEncoder - diff --git a/docs/source/fastNLP.embeddings.char_embedding.rst b/docs/source/fastNLP.embeddings.char_embedding.rst deleted file mode 100644 index bc8d64f9..00000000 --- a/docs/source/fastNLP.embeddings.char_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.char_embedding -================================= - -.. automodule:: fastNLP.embeddings.char_embedding - :members: CNNCharEmbedding, LSTMCharEmbedding - diff --git a/docs/source/fastNLP.embeddings.contextual_embedding.rst b/docs/source/fastNLP.embeddings.contextual_embedding.rst deleted file mode 100644 index 74e5f5be..00000000 --- a/docs/source/fastNLP.embeddings.contextual_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.contextual_embedding -======================================= - -.. automodule:: fastNLP.embeddings.contextual_embedding - :members: ContextualEmbedding - diff --git a/docs/source/fastNLP.embeddings.elmo_embedding.rst b/docs/source/fastNLP.embeddings.elmo_embedding.rst deleted file mode 100644 index b8c6d41c..00000000 --- a/docs/source/fastNLP.embeddings.elmo_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.elmo_embedding -================================= - -.. automodule:: fastNLP.embeddings.elmo_embedding - :members: ElmoEmbedding - diff --git a/docs/source/fastNLP.embeddings.embedding.rst b/docs/source/fastNLP.embeddings.embedding.rst deleted file mode 100644 index 6793446b..00000000 --- a/docs/source/fastNLP.embeddings.embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.embedding -============================ - -.. automodule:: fastNLP.embeddings.embedding - :members: Embedding, TokenEmbedding - diff --git a/docs/source/fastNLP.embeddings.rst b/docs/source/fastNLP.embeddings.rst deleted file mode 100644 index f4f4a3e0..00000000 --- a/docs/source/fastNLP.embeddings.rst +++ /dev/null @@ -1,20 +0,0 @@ -fastNLP.embeddings -================== - -.. automodule:: fastNLP.embeddings - :members: Embedding, TokenEmbedding, StaticEmbedding, ElmoEmbedding, BertEmbedding, BertWordPieceEncoder, StackEmbedding, LSTMCharEmbedding, CNNCharEmbedding, get_embeddings - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.embeddings.bert_embedding - fastNLP.embeddings.char_embedding - fastNLP.embeddings.contextual_embedding - fastNLP.embeddings.elmo_embedding - fastNLP.embeddings.embedding - fastNLP.embeddings.stack_embedding - fastNLP.embeddings.static_embedding - fastNLP.embeddings.utils diff --git a/docs/source/fastNLP.embeddings.stack_embedding.rst b/docs/source/fastNLP.embeddings.stack_embedding.rst deleted file mode 100644 index a07d1ef5..00000000 --- a/docs/source/fastNLP.embeddings.stack_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.stack_embedding -================================== - -.. automodule:: fastNLP.embeddings.stack_embedding - :members: StackEmbedding - diff --git a/docs/source/fastNLP.embeddings.static_embedding.rst b/docs/source/fastNLP.embeddings.static_embedding.rst deleted file mode 100644 index 219ce0e5..00000000 --- a/docs/source/fastNLP.embeddings.static_embedding.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.static_embedding -=================================== - -.. automodule:: fastNLP.embeddings.static_embedding - :members: StaticEmbedding - diff --git a/docs/source/fastNLP.embeddings.utils.rst b/docs/source/fastNLP.embeddings.utils.rst deleted file mode 100644 index 077487c1..00000000 --- a/docs/source/fastNLP.embeddings.utils.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.embeddings.utils -======================== - -.. automodule:: fastNLP.embeddings.utils - :members: get_embeddings - diff --git a/docs/source/fastNLP.io.data_bundle.rst b/docs/source/fastNLP.io.data_bundle.rst deleted file mode 100644 index 71a921f1..00000000 --- a/docs/source/fastNLP.io.data_bundle.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.data_bundle -====================== - -.. automodule:: fastNLP.io.data_bundle - :members: DataBundle - :inherited-members: - diff --git a/docs/source/fastNLP.io.embed_loader.rst b/docs/source/fastNLP.io.embed_loader.rst deleted file mode 100644 index 581f5c1b..00000000 --- a/docs/source/fastNLP.io.embed_loader.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.embed_loader -======================= - -.. automodule:: fastNLP.io.embed_loader - :members: EmbedLoader, EmbeddingOption - :inherited-members: - diff --git a/docs/source/fastNLP.io.file_utils.rst b/docs/source/fastNLP.io.file_utils.rst deleted file mode 100644 index 0815e068..00000000 --- a/docs/source/fastNLP.io.file_utils.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.file_utils -===================== - -.. automodule:: fastNLP.io.file_utils - :members: cached_path, get_filepath, get_cache_path, split_filename_suffix, get_from_cache - :inherited-members: - diff --git a/docs/source/fastNLP.io.loader.rst b/docs/source/fastNLP.io.loader.rst deleted file mode 100644 index c6d0dc55..00000000 --- a/docs/source/fastNLP.io.loader.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.loader -================= - -.. automodule:: fastNLP.io.loader - :members: Loader, YelpLoader, YelpFullLoader, YelpPolarityLoader, IMDBLoader, SSTLoader, SST2Loader, ChnSentiCorpLoader, THUCNewsLoader, WeiboSenti100kLoader, ConllLoader, Conll2003Loader, Conll2003NERLoader, OntoNotesNERLoader, CTBLoader, MsraNERLoader, PeopleDailyNERLoader, WeiboNERLoader, CSVLoader, JsonLoader, CWSLoader, MNLILoader, QuoraLoader, SNLILoader, QNLILoader, RTELoader, CNXNLILoader, BQCorpusLoader, LCQMCLoader, CoReferenceLoader - :inherited-members: - diff --git a/docs/source/fastNLP.io.model_io.rst b/docs/source/fastNLP.io.model_io.rst deleted file mode 100644 index 183122b1..00000000 --- a/docs/source/fastNLP.io.model_io.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.model_io -=================== - -.. automodule:: fastNLP.io.model_io - :members: ModelLoader, ModelSaver - :inherited-members: - diff --git a/docs/source/fastNLP.io.pipe.rst b/docs/source/fastNLP.io.pipe.rst deleted file mode 100644 index 178d35a9..00000000 --- a/docs/source/fastNLP.io.pipe.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.pipe -=============== - -.. automodule:: fastNLP.io.pipe - :members: Pipe, CWSPipe, YelpFullPipe, YelpPolarityPipe, SSTPipe, SST2Pipe, IMDBPipe, ChnSentiCorpPipe, THUCNewsPipe, WeiboSenti100kPipe, Conll2003NERPipe, OntoNotesNERPipe, MsraNERPipe, WeiboNERPipe, PeopleDailyPipe, Conll2003Pipe, MatchingBertPipe, RTEBertPipe, SNLIBertPipe, QuoraBertPipe, QNLIBertPipe, MNLIBertPipe, CNXNLIBertPipe, BQCorpusBertPipe, LCQMCBertPipe, MatchingPipe, RTEPipe, SNLIPipe, QuoraPipe, QNLIPipe, MNLIPipe, LCQMCPipe, CNXNLIPipe, BQCorpusPipe, RenamePipe, GranularizePipe, MachingTruncatePipe, CoReferencePipe - :inherited-members: - diff --git a/docs/source/fastNLP.io.rst b/docs/source/fastNLP.io.rst deleted file mode 100644 index 54373df4..00000000 --- a/docs/source/fastNLP.io.rst +++ /dev/null @@ -1,20 +0,0 @@ -fastNLP.io -========== - -.. automodule:: fastNLP.io - :members: DataBundle, EmbedLoader, Loader, YelpLoader, YelpFullLoader, YelpPolarityLoader, IMDBLoader, SSTLoader, SST2Loader, ChnSentiCorpLoader, THUCNewsLoader, WeiboSenti100kLoader, ConllLoader, Conll2003Loader, Conll2003NERLoader, OntoNotesNERLoader, CTBLoader, MsraNERLoader, WeiboNERLoader, PeopleDailyNERLoader, CSVLoader, JsonLoader, CWSLoader, MNLILoader, QuoraLoader, SNLILoader, QNLILoader, RTELoader, CNXNLILoader, BQCorpusLoader, LCQMCLoader, Pipe, YelpFullPipe, YelpPolarityPipe, SSTPipe, SST2Pipe, IMDBPipe, ChnSentiCorpPipe, THUCNewsPipe, WeiboSenti100kPipe, Conll2003Pipe, Conll2003NERPipe, OntoNotesNERPipe, MsraNERPipe, PeopleDailyPipe, WeiboNERPipe, CWSPipe, MatchingBertPipe, RTEBertPipe, SNLIBertPipe, QuoraBertPipe, QNLIBertPipe, MNLIBertPipe, MatchingPipe, RTEPipe, SNLIPipe, QuoraPipe, QNLIPipe, MNLIPipe, ModelLoader, ModelSaver - :inherited-members: - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.io.data_bundle - fastNLP.io.embed_loader - fastNLP.io.file_utils - fastNLP.io.loader - fastNLP.io.model_io - fastNLP.io.pipe - fastNLP.io.utils diff --git a/docs/source/fastNLP.io.utils.rst b/docs/source/fastNLP.io.utils.rst deleted file mode 100644 index 3bff3c45..00000000 --- a/docs/source/fastNLP.io.utils.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.io.utils -================ - -.. automodule:: fastNLP.io.utils - :members: check_loader_paths - :inherited-members: - diff --git a/docs/source/fastNLP.models.bert.rst b/docs/source/fastNLP.models.bert.rst deleted file mode 100644 index b0c813f9..00000000 --- a/docs/source/fastNLP.models.bert.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.bert -=================== - -.. automodule:: fastNLP.models.bert - :members: BertForSequenceClassification, BertForSentenceMatching, BertForMultipleChoice, BertForTokenClassification, BertForQuestionAnswering - diff --git a/docs/source/fastNLP.models.biaffine_parser.rst b/docs/source/fastNLP.models.biaffine_parser.rst deleted file mode 100644 index 395638fe..00000000 --- a/docs/source/fastNLP.models.biaffine_parser.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.biaffine_parser -============================== - -.. automodule:: fastNLP.models.biaffine_parser - :members: BiaffineParser, GraphParser - diff --git a/docs/source/fastNLP.models.cnn_text_classification.rst b/docs/source/fastNLP.models.cnn_text_classification.rst deleted file mode 100644 index e9ed7ee1..00000000 --- a/docs/source/fastNLP.models.cnn_text_classification.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.cnn_text_classification -====================================== - -.. automodule:: fastNLP.models.cnn_text_classification - :members: CNNText - diff --git a/docs/source/fastNLP.models.rst b/docs/source/fastNLP.models.rst deleted file mode 100644 index 21cf41a7..00000000 --- a/docs/source/fastNLP.models.rst +++ /dev/null @@ -1,18 +0,0 @@ -fastNLP.models -============== - -.. automodule:: fastNLP.models - :members: CNNText, SeqLabeling, AdvSeqLabel, ESIM, StarTransEnc, STSeqLabel, STNLICls, STSeqCls, BiaffineParser, GraphParser, BertForSequenceClassification, BertForSentenceMatching, BertForMultipleChoice, BertForTokenClassification, BertForQuestionAnswering - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.models.bert - fastNLP.models.biaffine_parser - fastNLP.models.cnn_text_classification - fastNLP.models.sequence_labeling - fastNLP.models.snli - fastNLP.models.star_transformer diff --git a/docs/source/fastNLP.models.sequence_labeling.rst b/docs/source/fastNLP.models.sequence_labeling.rst deleted file mode 100644 index dcd1300e..00000000 --- a/docs/source/fastNLP.models.sequence_labeling.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.sequence_labeling -================================ - -.. automodule:: fastNLP.models.sequence_labeling - :members: SeqLabeling, AdvSeqLabel, BiLSTMCRF - diff --git a/docs/source/fastNLP.models.snli.rst b/docs/source/fastNLP.models.snli.rst deleted file mode 100644 index eed02139..00000000 --- a/docs/source/fastNLP.models.snli.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.snli -=================== - -.. automodule:: fastNLP.models.snli - :members: ESIM - diff --git a/docs/source/fastNLP.models.star_transformer.rst b/docs/source/fastNLP.models.star_transformer.rst deleted file mode 100644 index 80ab5b33..00000000 --- a/docs/source/fastNLP.models.star_transformer.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.models.star_transformer -=============================== - -.. automodule:: fastNLP.models.star_transformer - :members: StarTransEnc, STNLICls, STSeqCls, STSeqLabel - diff --git a/docs/source/fastNLP.modules.decoder.rst b/docs/source/fastNLP.modules.decoder.rst deleted file mode 100644 index de6e0d9d..00000000 --- a/docs/source/fastNLP.modules.decoder.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.modules.decoder -======================= - -.. automodule:: fastNLP.modules.decoder - :members: MLP, ConditionalRandomField, viterbi_decode, allowed_transitions - diff --git a/docs/source/fastNLP.modules.encoder.rst b/docs/source/fastNLP.modules.encoder.rst deleted file mode 100644 index a402cb67..00000000 --- a/docs/source/fastNLP.modules.encoder.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.modules.encoder -======================= - -.. automodule:: fastNLP.modules.encoder - :members: ConvolutionCharEncoder, LSTMCharEncoder, ConvMaxpool, LSTM, StarTransformer, TransformerEncoder, VarRNN, VarLSTM, VarGRU, MaxPool, MaxPoolWithMask, KMaxPool, AvgPool, AvgPoolWithMask, MultiHeadAttention, BiAttention, SelfAttention - diff --git a/docs/source/fastNLP.modules.rst b/docs/source/fastNLP.modules.rst deleted file mode 100644 index 9c44e461..00000000 --- a/docs/source/fastNLP.modules.rst +++ /dev/null @@ -1,15 +0,0 @@ -fastNLP.modules -=============== - -.. automodule:: fastNLP.modules - :members: ConvolutionCharEncoder, LSTMCharEncoder, ConvMaxpool, LSTM, StarTransformer, TransformerEncoder, VarRNN, VarLSTM, VarGRU, MaxPool, MaxPoolWithMask, KMaxPool, AvgPool, AvgPoolWithMask, MultiHeadAttention, MLP, ConditionalRandomField, viterbi_decode, allowed_transitions, TimestepDropout - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.modules.decoder - fastNLP.modules.encoder - fastNLP.modules.utils diff --git a/docs/source/fastNLP.modules.utils.rst b/docs/source/fastNLP.modules.utils.rst deleted file mode 100644 index 101a0f45..00000000 --- a/docs/source/fastNLP.modules.utils.rst +++ /dev/null @@ -1,6 +0,0 @@ -fastNLP.modules.utils -===================== - -.. automodule:: fastNLP.modules.utils - :members: initial_parameter, summary - diff --git a/docs/source/fastNLP.rst b/docs/source/fastNLP.rst deleted file mode 100644 index 097ad0b2..00000000 --- a/docs/source/fastNLP.rst +++ /dev/null @@ -1,18 +0,0 @@ -fastNLP -======= - -.. automodule:: fastNLP - :members: Instance, FieldArray, DataSetIter, BatchIter, TorchLoaderIter, Vocabulary, DataSet, Const, Trainer, Tester, Callback, GradientClipCallback, EarlyStopCallback, FitlogCallback, EvaluateCallback, LRScheduler, ControlC, LRFinder, TensorboardCallback, WarmupCallback, SaveModelCallback, CallbackException, EarlyStopError, Padder, AutoPadder, EngChar2DPadder, AccuracyMetric, SpanFPreRecMetric, ExtractiveQAMetric, Optimizer, SGD, Adam, AdamW, Sampler, SequentialSampler, BucketSampler, RandomSampler, LossFunc, CrossEntropyLoss, L1Loss, BCELoss, NLLLoss, LossInForward, cache_results, logger - :inherited-members: - -子模块 ------- - -.. toctree:: - :maxdepth: 1 - - fastNLP.core - fastNLP.embeddings - fastNLP.io - fastNLP.models - fastNLP.modules diff --git a/docs/source/figures/fitlogChart.png b/docs/source/figures/fitlogChart.png deleted file mode 100644 index 57ae1683..00000000 Binary files a/docs/source/figures/fitlogChart.png and /dev/null differ diff --git a/docs/source/figures/fitlogTable.png b/docs/source/figures/fitlogTable.png deleted file mode 100644 index 37551634..00000000 Binary files a/docs/source/figures/fitlogTable.png and /dev/null differ diff --git a/docs/source/figures/procedures.PNG b/docs/source/figures/procedures.PNG deleted file mode 100644 index 982249e8..00000000 Binary files a/docs/source/figures/procedures.PNG and /dev/null differ diff --git a/docs/source/figures/sequence_labeling.PNG b/docs/source/figures/sequence_labeling.PNG deleted file mode 100644 index 397f0a24..00000000 Binary files a/docs/source/figures/sequence_labeling.PNG and /dev/null differ diff --git a/docs/source/figures/text_classification.png b/docs/source/figures/text_classification.png deleted file mode 100644 index 21502708..00000000 Binary files a/docs/source/figures/text_classification.png and /dev/null differ diff --git a/docs/source/figures/workflow.png b/docs/source/figures/workflow.png deleted file mode 100644 index 3cf4e70e..00000000 Binary files a/docs/source/figures/workflow.png and /dev/null differ diff --git a/docs/source/index.rst b/docs/source/index.rst deleted file mode 100644 index ff77a6fc..00000000 --- a/docs/source/index.rst +++ /dev/null @@ -1,56 +0,0 @@ -fastNLP 中文文档 -===================== - -`fastNLP `_ 是一款轻量级的自然语言处理(NLP)工具包。你既可以用它来快速地完成一个NLP任务, -也可以用它在研究中快速构建更复杂的模型。 - -.. hint:: - - 如果你是从 readthedocs 访问的该文档,请跳转到我们的 `最新网站 `_ - -fastNLP具有如下的特性: - -- 统一的Tabular式数据容器,简化数据预处理过程; -- 内置多种数据集的 :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` ,省去预处理代码; -- 各种方便的NLP工具,例如Embedding加载(包括 :class:`~fastNLP.embeddings.ElmoEmbedding` 和 :class:`~fastNLP.embeddings.BertEmbedding` )、中间数据cache等; -- 部分 `数据集与预训练模型 `_ 的自动下载; -- 提供多种神经网络组件以及复现模型(涵盖中文分词、命名实体识别、句法分析、文本分类、文本匹配、指代消解、摘要等任务); -- :class:`~fastNLP.Trainer` 提供多种内置 :mod:`~fastNLP.core.callback` 函数,方便实验记录、异常捕获等. - - -用户手册 ----------------- - -.. toctree:: - :maxdepth: 2 - - 安装指南 - 快速入门 - 详细教程 - -API 文档 -------------- - -除了用户手册之外,你还可以通过查阅 API 文档来找到你所需要的工具。 - -.. toctree:: - :titlesonly: - :maxdepth: 2 - - fastNLP - - -:doc:`API变动列表 ` - -fitlog文档 ----------- - -您可以 `点此 `_ 查看fitlog的文档。 -fitlog 是由我们团队开发的日志记录+代码管理的工具。 - -索引与搜索 -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/source/modules.rst b/docs/source/modules.rst deleted file mode 100644 index e9a92cb7..00000000 --- a/docs/source/modules.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP -======= - -.. toctree:: - :maxdepth: 4 - - fastNLP diff --git a/docs/source/tutorials/cn_cls_example.png b/docs/source/tutorials/cn_cls_example.png deleted file mode 100644 index 5055bb02..00000000 Binary files a/docs/source/tutorials/cn_cls_example.png and /dev/null differ diff --git a/docs/source/tutorials/extend_1_bert_embedding.rst b/docs/source/tutorials/extend_1_bert_embedding.rst deleted file mode 100644 index b902b8ec..00000000 --- a/docs/source/tutorials/extend_1_bert_embedding.rst +++ /dev/null @@ -1,231 +0,0 @@ -============================== -BertEmbedding的各种用法 -============================== - -Bert自从在 `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding `_ -中被提出后,因其性能卓越受到了极大的关注,在这里我们展示一下在fastNLP中如何使用Bert进行各类任务。其中中文Bert我们使用的模型的权重来自于 -`中文Bert预训练 `_ 。 - -为了方便大家的使用,fastNLP提供了预训练的Embedding权重及数据集的自动下载,支持自动下载的Embedding和数据集见 -`数据集 `_ 。或您可从 :doc:`/tutorials/tutorial_3_embedding` 与 -:doc:`/tutorials/tutorial_4_load_dataset` 了解更多相关信息。 - ----------------------------------- -中文任务 ----------------------------------- -下面我们将介绍通过使用Bert来进行文本分类, 中文命名实体识别, 文本匹配, 中文问答。 - -.. note:: - - 本教程必须使用 GPU 进行实验,并且会花费大量的时间 - -1. 使用Bert进行文本分类 ----------------------------------- -文本分类是指给定一段文字,判定其所属的类别。例如下面的文本情感分类 - -.. code-block:: text - - 1, 商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错! - -这里我们使用fastNLP提供自动下载的微博分类进行测试 - -.. code-block:: python - - from fastNLP.io import WeiboSenti100kPipe - from fastNLP.embeddings import BertEmbedding - from fastNLP.models import BertForSequenceClassification - from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam - import torch - - data_bundle =WeiboSenti100kPipe().process_from_file() - data_bundle.rename_field('chars', 'words') - - # 载入BertEmbedding - embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True) - - # 载入模型 - model = BertForSequenceClassification(embed, len(data_bundle.get_vocab('target'))) - - # 训练模型 - device = 0 if torch.cuda.is_available() else 'cpu' - trainer = Trainer(data_bundle.get_dataset('train'), model, - optimizer=Adam(model_params=model.parameters(), lr=2e-5), - loss=CrossEntropyLoss(), device=device, - batch_size=8, dev_data=data_bundle.get_dataset('dev'), - metrics=AccuracyMetric(), n_epochs=2, print_every=1) - trainer.train() - - # 测试结果 - from fastNLP import Tester - - tester = Tester(data_bundle.get_dataset('test'), model, batch_size=128, metrics=AccuracyMetric()) - tester.test() - -输出结果:: - - In Epoch:1/Step:12499, got best dev performance: - AccuracyMetric: acc=0.9838 - Reloaded the best model. - Evaluate data in 63.84 seconds! - [tester] - AccuracyMetric: acc=0.9815 - - -2. 使用Bert进行命名实体识别 ----------------------------------- -命名实体识别是给定一句话,标记出其中的实体。一般序列标注的任务都使用conll格式,conll格式是至一行中通过制表符分隔不同的内容,使用空行分隔 -两句话,例如下面的例子 - -.. code-block:: text - - 中 B-ORG - 共 I-ORG - 中 I-ORG - 央 I-ORG - 致 O - 中 B-ORG - 国 I-ORG - 致 I-ORG - 公 I-ORG - 党 I-ORG - 十 I-ORG - 一 I-ORG - 大 I-ORG - 的 O - 贺 O - 词 O - -这部分内容请参考 :doc:`/tutorials/序列标注` - - -3. 使用Bert进行文本匹配 ----------------------------------- -文本匹配任务是指给定两句话判断他们的关系。比如,给定两句话判断前一句是否和后一句具有因果关系或是否是矛盾关系;或者给定两句话判断两句话是否 -具有相同的意思。这里我们使用 - -.. code-block:: python - - from fastNLP.io import CNXNLIBertPipe - from fastNLP.embeddings import BertEmbedding - from fastNLP.models import BertForSentenceMatching - from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam - from fastNLP.core.optimizer import AdamW - from fastNLP.core.callback import WarmupCallback - from fastNLP import Tester - import torch - - data_bundle = CNXNLIBertPipe().process_from_file() - data_bundle.rename_field('chars', 'words') - print(data_bundle) - - # 载入BertEmbedding - embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True) - - # 载入模型 - model = BertForSentenceMatching(embed, len(data_bundle.get_vocab('target'))) - - # 训练模型 - callbacks = [WarmupCallback(warmup=0.1, schedule='linear'), ] - device = 0 if torch.cuda.is_available() else 'cpu' - trainer = Trainer(data_bundle.get_dataset('train'), model, - optimizer=AdamW(params=model.parameters(), lr=4e-5), - loss=CrossEntropyLoss(), device=device, - batch_size=8, dev_data=data_bundle.get_dataset('dev'), - metrics=AccuracyMetric(), n_epochs=5, print_every=1, - update_every=8, callbacks=callbacks) - trainer.train() - - tester = Tester(data_bundle.get_dataset('test'), model, batch_size=8, metrics=AccuracyMetric()) - tester.test() - -运行结果:: - - In Epoch:3/Step:73632, got best dev performance: - AccuracyMetric: acc=0.781928 - Reloaded the best model. - Evaluate data in 18.54 seconds! - [tester] - AccuracyMetric: acc=0.783633 - - -4. 使用Bert进行中文问答 ----------------------------------- -问答任务是给定一段内容,以及一个问题,需要从这段内容中找到答案。 -例如:: - - "context": "锣鼓经是大陆传统器乐及戏曲里面常用的打击乐记谱方法,以中文字的声音模拟敲击乐的声音,纪录打击乐的各种不同的演奏方法。常 - 用的节奏型称为「锣鼓点」。而锣鼓是戏曲节奏的支柱,除了加强演员身段动作的节奏感,也作为音乐的引子和尾声,提示音乐的板式和速度,以及 - 作为唱腔和念白的伴奏,令诗句的韵律更加抑扬顿锉,段落分明。锣鼓的运用有约定俗成的程式,依照角色行当的身份、性格、情绪以及环境,配合 - 相应的锣鼓点。锣鼓亦可以模仿大自然的音响效果,如雷电、波浪等等。戏曲锣鼓所运用的敲击乐器主要分为鼓、锣、钹和板四类型:鼓类包括有单 - 皮鼓(板鼓)、大鼓、大堂鼓(唐鼓)、小堂鼓、怀鼓、花盆鼓等;锣类有大锣、小锣(手锣)、钲锣、筛锣、马锣、镗锣、云锣;钹类有铙钹、大 - 钹、小钹、水钹、齐钹、镲钹、铰子、碰钟等;打拍子用的檀板、木鱼、梆子等。因为京剧的锣鼓通常由四位乐师负责,又称为四大件,领奏的师 - 傅称为:「鼓佬」,其职责有如西方乐队的指挥,负责控制速度以及利用各种手势提示乐师演奏不同的锣鼓点。粤剧吸收了部份京剧的锣鼓,但以木鱼 - 和沙的代替了京剧的板和鼓,作为打拍子的主要乐器。以下是京剧、昆剧和粤剧锣鼓中乐器对应的口诀用字:", - "question": "锣鼓经是什么?", - "answers": [ - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - }, - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - }, - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - } - ] - -您可以通过以下的代码训练 (原文代码:`CMRC2018 `_) - -.. code-block:: python - - from fastNLP.embeddings import BertEmbedding - from fastNLP.models import BertForQuestionAnswering - from fastNLP.core.losses import CMRC2018Loss - from fastNLP.core.metrics import CMRC2018Metric - from fastNLP.io.pipe.qa import CMRC2018BertPipe - from fastNLP import Trainer, BucketSampler - from fastNLP import WarmupCallback, GradientClipCallback - from fastNLP.core.optimizer import AdamW - import torch - - data_bundle = CMRC2018BertPipe().process_from_file() - data_bundle.rename_field('chars', 'words') - - print(data_bundle) - - embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn', requires_grad=True, include_cls_sep=False, auto_truncate=True, - dropout=0.5, word_dropout=0.01) - model = BertForQuestionAnswering(embed) - loss = CMRC2018Loss() - metric = CMRC2018Metric() - - wm_callback = WarmupCallback(schedule='linear') - gc_callback = GradientClipCallback(clip_value=1, clip_type='norm') - callbacks = [wm_callback, gc_callback] - - optimizer = AdamW(model.parameters(), lr=5e-5) - - device = 0 if torch.cuda.is_available() else 'cpu' - trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer, - sampler=BucketSampler(seq_len_field_name='context_len'), - dev_data=data_bundle.get_dataset('dev'), metrics=metric, - callbacks=callbacks, device=device, batch_size=6, num_workers=2, n_epochs=2, print_every=1, - test_use_tqdm=False, update_every=10) - trainer.train(load_best_model=False) - -训练结果(和原论文中报道的基本一致):: - - In Epoch:2/Step:1692, got best dev performance: - CMRC2018Metric: f1=85.61, em=66.08 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/extend_2_dist.rst b/docs/source/tutorials/extend_2_dist.rst deleted file mode 100644 index b8175306..00000000 --- a/docs/source/tutorials/extend_2_dist.rst +++ /dev/null @@ -1,223 +0,0 @@ -Distributed Parallel Training -============================= - -原理 ----- - -随着深度学习模型越来越复杂,单个GPU可能已经无法满足正常的训练。比如BERT等预训练模型,更是在多个GPU上训练得到的。为了使用多GPU训练,Pytorch框架已经提供了 -`nn.DataParallel `_ 以及 -`nn.DistributedDataParallel `_ 两种方式的支持。 -`nn.DataParallel `_ -很容易使用,但是却有着GPU负载不均衡,单进程速度慢等缺点,无法发挥出多GPU的全部性能。因此,分布式的多GPU训练方式 -`nn.DistributedDataParallel `_ -是更好的选择。然而,因为分布式训练的特点, -`nn.DistributedDataParallel `_ -常常难以理解和使用,也很难debug。所以,在使用分布式训练之前,需要理解它的原理。 - -在使用 -`nn.DistributedDataParallel `_ -时,模型会被复制到所有使用的GPU,通常每个GPU上存有一个模型,并被一个单独的进程控制。这样有N块GPU,就会产生N个进程。当训练一个batch时,这一batch会被分为N份,每个进程会使用batch的一部分进行训练,然后在必要时进行同步,并通过网络传输需要同步的数据。这时,只有模型的梯度会被同步,而模型的参数不会,所以能缓解大部分的网络传输压力,网络传输不再是训练速度的瓶颈之一。你可能会好奇,不同步模型的参数,怎么保证不同进程所训练的模型相同?只要每个进程初始的模型是同一个,具有相同的参数,而之后每次更新,都使用相同的梯度,就能保证梯度更新后的模型也具有相同的参数了。 - -为了让每个进程的模型初始化完全相同,通常这N个进程都是由单个进程复制而来的,这时需要对分布式的进程进行初始化,建立相互通信的机制。在 -Pytorch 中,我们用 -`distributed.init_process_group `_ -函数来完成,需要在程序开头就加入这一步骤。初始化完成后,每一个进程用唯一的编号 -``rank`` 进行区分,从 0 到 N-1递增,一般地,我们将 ``rank`` 为 0 -的进程当作主进程,而其他 ``rank`` 的进程为子进程。每个进程还要知道 -``world_size`` ,即分布式训练的总进程数 -N。训练时,每个进程使用batch的一部分,互相不能重复,这里通过 -`nn.utils.data.DistributedSampler `_ -来实现。 - -使用方式 --------- - -Pytorch的分布式训练使用起来非常麻烦,难以理解,可以从给出的\ `官方教程 `_ \ 中看到。而\ ``fastNLP`` -提供了 -``DistTrainer``\ ,将大部分的分布式训练的细节进行了封装,只需简单的改动训练代码,就能直接用上分布式训练。那么,具体怎么将普通的训练代码改成支持分布式训练的代码呢。下面我们来讲一讲分布式训练的完整流程。通常,分布式程序的多个进程是单个进程的复制。假设我们用N个GPU进行分布式训练,我们需要启动N个进程,这时,在命令行使用: - -.. code:: shell - - python -m torch.distributed.launch --nproc_per_node=N train_script.py --args - -其中\ ``N``\ 是需要启动的进程数,\ ``train_script.py``\ 为训练代码,\ ``--args``\ 是自定义的命令行参数。在启动了N个进程之后,如果我们在\ ``train_script.py``\ 的训练代码中正常配置,分布式训练就能正常进行。 - -此外,还可以使用环境变量\ ``CUDA_VISIBLE_DEVICES``\ 设置指定的GPU,比如在8卡机器上使用编号为4,5,6,7的4块GPU: - -.. code:: shell - - CUDA_VISIBLE_DEVICES=4,5,6,7 python -m torch.distributed.launch --nproc_per_node=N train_script.py --args - -在 ``train_script.py`` -训练代码中,有一些必须的配置。为了清晰的叙述,这里放一个简单的分布式训练代码,省去多余细节: - -.. code:: python - - import torch.distributed as dist - from fastNLP import DistTrainer, get_local_rank - import fastNLP as fnlp - - def main(options): - # options为训练所需的参数,batch_size等 - - set_seed(options.seed) - - # 初始化分布式进程 - dist.init_process_group('nccl') - - ######## 读取数据 - if get_local_rank() != 0: - dist.barrier() # 先让主进程(rank==0)先执行,进行数据处理,预训模型参数下载等操作,然后保存cache - data = get_processed_data() - model = get_model(data.get_vocab("words"), data.get_vocab("target")) - if get_local_rank() == 0: - dist.barrier() # 主进程执行完后,其余进程开始读取cache - ######## - - # 初始化Trainer,训练等,与普通训练差别不大 - def get_trainer(model, data): - # 注意设置的callback有两种,一种只在主进程执行,一种在所有进程都执行 - callbacks_master = [fnlp.FitlogCallback()] - callbacks_all = [fnlp.WarmupCallback(warmup=options.warmup)] - trainer = DistTrainer( - save_path='save', - train_data=data.get_dataset("train"), - dev_data=data.get_dataset("dev"), - model=model, - loss=fnlp.CrossEntropyLoss(), - metrics=fnlp.AccuracyMetric(), - metric_key="acc", - optimizer=fnlp.AdamW(model.parameters(), lr=options.lr), - callbacks_master=callbacks_master, # 仅在主进程执行(如模型保存,日志记录) - callbacks_all=callbacks_all, # 在所有进程都执行(如梯度裁剪,学习率衰减) - batch_size_per_gpu=options.batch_size, # 指定每个GPU的batch大小 - update_every=options.update, - n_epochs=options.epochs, - use_tqdm=True, - ) - return trainer - - trainer = get_trainer(model, data) - trainer.train() - -指定进程编号 -^^^^^^^^^^^^ - -首先,为了区分不同的进程,初始时需要对每个进程传入\ ``rank``\ 。这里一般分为\ ``node_rank``\ 和\ ``local_rank``\ ,分别表示进程处于哪一机器以及同机器上处于第几进程。如果在单一机器上,\ ``node_rank``\ 可以省略。\ ``local_rank``\ 一般通过命令行参数\ ``--local_rank``\ 传入,为\ ``int``\ 类型。也可以通过环境变量传入\ ``local_rank``\ ,只需在\ ``torch.distributed.launch``\ 时,使用\ ``--use_env``\ 参数。无论哪种方式,在训练脚本中,都要获取到\ ``local_rank``\ ,用于初始化分布式通信,以及区分进程。如果你使用\ ``fastNLP``\ ,可以通过\ ``fastNLP.get_local_rank``\ 来得到\ ``local_rank``\ 。 - -初始化进程 -^^^^^^^^^^ - -在获取了\ ``local_rank``\ 等重要参数后,在开始训练前,我们需要建立不同进程的通信和同步机制。这时我们使用\ `torch.distributed.init_process_group `_ -来完成。通常,我们只需要 ``torch.distributed.init_process_group('nccl')`` -来指定使用\ ``nccl``\ 后端来进行同步即可。其他参数程序将读取环境变量自动设置。如果想手动设置这些参数,比如,使用TCP进行通信,可以设置: - -.. code:: python - - init_process_group('nccl', init_method='tcp://localhost:55678', - rank=args.rank, world_size=N) - -或者使用文件进行通信: - -.. code:: python - - init_process_group('nccl', init_method='file:///mnt/nfs/sharedfile', - world_size=N, rank=args.rank) - -注意,此时必须显式指定\ ``world_size``\ 和\ ``rank``\ ,具体可以参考 -`torch.distributed.init_process_group `_ -的使用文档。 - -在初始化分布式通信后,再初始化\ ``DistTrainer``\ ,传入数据和模型,就完成了分布式训练的代码。代码修改完成后,使用上面给出的命令行启动脚本,就能成功运行分布式训练。但是,如果数据处理,训练中的自定义操作比较复杂,则可能需要额外的代码修改。下面列出一些需要特别注意的地方,在使用分布式训练前,请仔细检查这些事项。 - -注意事项 --------- - -在执行完 -`torch.distributed.init_process_group `_ -后,我们就可以在不同进程间完成传输数据,进行同步等操作。这些操作都可以在\ `torch.distributed `_ -中找到。其中,最重要的是 -`barrier `_ -以及 -`get_rank `_ -操作。对于训练而言,我们关心的是读入数据,记录日志,模型初始化,模型参数更新,模型保存等操作。这些操作大多是读写操作,在多进程状态下,这些操作都必须小心进行,否则可能出现难以预料的bug。而在\ ``fastNLP``\ 中,大部分操作都封装在 -``DistTrainer`` 中,只需保证数据读入和模型初始化正确即可完成训练。 - -写操作 -^^^^^^ - -一般而言,读入操作需要在每一个进程都执行,因为每个进程都要使用读入的数据和模型参数进行训练。而写出操作只需在其中一个进程(通常为主进程)执行,因为每一个进程保存的模型都相同,都处于同一训练状态。所以,通常单进程的训练脚本中,只需要修改写出操作的部分,通过加入对进程\ ``rank``\ 的判断,仅让其中一个进程执行写操作: - -.. code:: python - - import torch.distributed as dist - - # 仅在主进程才执行 - if dist.get_rank() == 0: - do_wirte_op() # 一些写操作 - dist.barrier() # 确保写完成后,所有进程再执行(若进程无需读入写出的数据,可以省去) - -若使用\ ``fastNLP``\ 中的\ ``DistTrainer``\ ,也可以这样写: - -.. code:: python - - # 判断是否是主进程的trainer - if trainer.is_master: - do_wirte_op() - dist.barrier() - -读操作 -^^^^^^ - -然而有些时候,我们需要其中一个进程先执行某些操作,等这一进程执行完后,其它进程再执行这一操作。比如,在读入数据时,我们有时需要从网上下载,再处理,将处理好的数据保存,供反复使用。这时,我们不需要所有进程都去下载和处理数据,只需要主进程进行这些操作,其它进程等待。直到处理好的数据被保存后,其他进程再从保存位置直接读入数据。这里可以参考范例代码中的读取数据: - -.. code:: python - - if dist.get_rank() != 0: - dist.barrier() # 先让主进程(rank==0)先执行,进行数据处理,预训模型参数下载等操作,然后保存cache - - # 这里会自动处理数据,或直接读取保存的cache - data = get_processed_data() - model = get_model(data.get_vocab("words"), data.get_vocab("target")) - - if dist.get_rank() == 0: - dist.barrier() # 主进程执行完后,其余进程开始读取cache - -也可以显式的将主进程和其它进程的操作分开: - -.. code:: python - - if dist.get_rank() == 0: - data = do_data_processing() # 数据处理 - dist.barrier() - else: - dist.barrier() - data = load_processed_data() # 读取cache - -日志操作 -^^^^^^^^ - -通常,我们需要知道训练的状态,如当前在第几个epoch,模型当前的loss等等。单进程训练时,我们可以直接使用\ ``print``\ 将这些信息输出到命令行或日志文件。然而,在多进程时,\ ``print``\ 会导致同样的信息在每一进程都输出,造成问题。这一问题和写操作类似,也可以通过判断进程的编号之后再输出。问题是,日志通常在训练的很多地方都有输出,逐一加上判断代码是非常繁琐的。这里,建议统一修改为: - -.. code:: python - - from fastNLP import logger - logger.info('....') # 替换print - -在\ ``DistTrainer``\ 中,主进程的\ ``logger``\ 级别为\ ``INFO``\ ,而其它进程为\ ``WARNING``\ 。这样级别为\ ``INFO``\ 的信息只会在主进程输出,不会造成日志重复问题。若需要其它进程中的信息,可以使用\ ``logger.warning``\ 。 - -注意,\ ``logger``\ 的级别设置只有初始化了\ ``DistTrainer``\ 后才能生效。如果想要在初始化进程后就生效,需要在分布式通信初始化后,执行\ ``init_logger_dist``\ 。 - -Callback -^^^^^^^^ - -``fastNLP``\ 的一个特色是可以使用\ ``Callback``\ 在训练时完成各种自定义操作。而这一特色在\ ``DistTrainer``\ 中得以保留。但是,这时需要特别注意\ ``Callback``\ 是否只需要在主进程执行。一些\ ``Callback``\ ,比如调整学习率,梯度裁剪等,会改变模型的状态,因此需要在所有进程上都执行,将它们通过\ ``callback_all``\ 参数传入\ ``DistTrainer``\ 。而另一些\ ``Callback``\ ,比如\ ``fitlog``\ ,保存模型,不会改变模型的状态,而是进行数据写操作,因此仅在主进程上执行,将它们通过\ ``callback_master``\ 传入。 - -在自定义\ ``Callback``\ 时,请遵循一个原则,改变训练或模型状态的操作在所有进程中执行,而数据写到硬盘请在主进程单独进行。这样就能避免进程间失去同步,或者磁盘写操作的冲突。 - -Debug -^^^^^ - -多进程的程序很难进行debug,如果出现问题,可以先参考报错信息进行处理。也可以在程序中多输出日志,定位问题。具体情况,具体分析。在debug时,要多考虑进程同步和异步的操作,判断问题是程序本身导致的,还是由进程间没有同步而产生。 - -其中,有一个常见问题是程序卡住不动。具体表现为训练暂停,程序没有输出,但是GPU利用率保持100%。这一问题是由进程失去同步导致的。这时只能手动\ ``kill``\ GPU上残留的进程,再检查代码。需要检查进程同步的位置,比如模型\ ``backward()``\ 时,\ ``barrier()``\ 时等。同时,也要检查主进程与其它进程操作不同的位置,比如存储模型,evaluate模型时等。注意,失去同步的位置可能并不是程序卡住的位置,所以需要细致的检查。 diff --git a/docs/source/tutorials/extend_3_fitlog.rst b/docs/source/tutorials/extend_3_fitlog.rst deleted file mode 100644 index 152e18fe..00000000 --- a/docs/source/tutorials/extend_3_fitlog.rst +++ /dev/null @@ -1,122 +0,0 @@ -============================================ -使用fitlog 辅助 fastNLP 进行科研 -============================================ - -本文介绍结合使用 fastNLP 和 fitlog 进行科研的方法。 - -首先,我们需要安装 `fitlog `_ 。你需要确认你的电脑中没有其它名为 `fitlog` 的命令。 - -我们从命令行中进入到一个文件夹,现在我们要在文件夹中创建我们的 fastNLP 项目。你可以在命令行输入 `fitlog init test1` , -然后你会看到如下提示:: - - Initialized empty Git repository in /Users/fdujyn/workspaces/test1/.git/ - Auto commit by fitlog - Initialized empty Git repository in /Users/fdujyn/workspaces/test1/.git/ - Fitlog project test1 is initialized. - -这表明你已经创建成功了项目文件夹,并且在项目文件夹中已经初始化了 Git。如果你不想初始化 Git, -可以参考文档 `命令行工具 `_ - -现在我们进入你创建的项目文件夹 test1 中,可以看到有一个名为 logs 的文件夹,后面我们将会在里面存放你的实验记录。 -同时也有一个名为 main.py 的文件,这是我们推荐你使用的训练入口文件。文件的内容如下:: - - import fitlog - - fitlog.commit(__file__) # auto commit your codes - fitlog.add_hyper_in_file (__file__) # record your hyperparameters - - """ - Your training code here, you may use these functions to log your result: - fitlog.add_hyper() - fitlog.add_loss() - fitlog.add_metric() - fitlog.add_best_metric() - ...... - """ - - fitlog.finish() # finish the logging - -我们推荐你保留除注释外的四行代码,它们有助于你的实验, -他们的具体用处参见文档 `用户 API `_ - -我们假定你要进行前两个教程中的实验,并已经把数据复制到了项目根目录下的 tutorial_sample_dataset.csv 文件中。 -现在我们编写如下的训练代码,使用 :class:`~fastNLP.core.callback.FitlogCallback` 进行实验记录保存:: - - import fitlog - from fastNLP import Vocabulary, Trainer, CrossEntropyLoss, AccuracyMetric - from fastNLP.io import CSVLoader - from fastNLP.models import CNNText - from fastNLP.core.callback import FitlogCallback - - fitlog.commit(__file__) # auto commit your codes - fitlog.add_hyper_in_file (__file__) # record your hyperparameters - - ############hyper - word_embed = 50 - dropout = 0.1 - ############hyper - - loader = CSVLoader(headers=('raw_sentence', 'label'), sep='\t') - dataset = loader.load("tutorial_sample_dataset.csv") - - dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='sentence') - dataset.apply(lambda x: x['sentence'].split(), new_field_name='words', is_input=True) - dataset.apply(lambda x: int(x['label']), new_field_name='target', is_target=True) - vocab = Vocabulary(min_freq=2).from_dataset(dataset, field_name='words') - vocab.index_dataset(dataset, field_name='words',new_field_name='words') - - model = CNNText((len(vocab),word_embed), num_classes=5, padding=2, dropout=dropout) - - train_dev_data, test_data = dataset.split(0.1) - train_data, dev_data = train_dev_data.split(0.1) - - trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data, - loss=CrossEntropyLoss(), metrics=AccuracyMetric(), - callbacks=[FitlogCallback(test_data)]) - trainer.train() - - fitlog.finish() # finish the logging - -用命令行在项目目录下执行 `python main.py` 之后,输出结果如下:: - - Auto commit by fitlog - input fields after batch(if batch size is 2): - words: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 11]) - target fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - - training epochs started 2019-05-23-21-11-51 - Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.285714 - - Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.285714 - - Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.285714 - - Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.428571 - - Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.571429 - - Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.571429 - - Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.285714 - - Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.142857 - - Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.285714 - - Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.571429 - - - In Epoch:5/Step:10, got best dev performance:AccuracyMetric: acc=0.571429 - Reloaded the best model. - -现在,我们在项目目录下输入 `fitlog log logs` ,命令行会启动一个网页,默认 url 为 ``0.0.0.0:5000`` 。 -我们在浏览器中打开网页,可以看到如下的统计表格: - -.. image:: ../figures/fitlogTable.png - -如果我们点击action中的最后一个键钮,可以看到详细的 loss 图: - -.. image:: ../figures/fitlogChart.png - -更多的教程还在编写中,敬请期待~ \ No newline at end of file diff --git a/docs/source/tutorials/tutorial_1_data_preprocess.rst b/docs/source/tutorials/tutorial_1_data_preprocess.rst deleted file mode 100644 index d9132546..00000000 --- a/docs/source/tutorials/tutorial_1_data_preprocess.rst +++ /dev/null @@ -1,172 +0,0 @@ -============================== -fastNLP中的DataSet -============================== - -:class:`~fastNLP.DataSet` 是fastNLP用于承载数据的类,一般训练集、验证集和测试集会被加载为三个单独的 :class:`~fastNLP.DataSet` 对象。 - -:class:`~fastNLP.DataSet` 中的数据组织形式类似一个表格,比如下面 :class:`~fastNLP.DataSet` 一共有3列,列在fastNLP中被称为field。 - -.. csv-table:: - :header: "raw_chars", "chars", "seq_len" - - "历任公司副总经理、总工程师,", "[历 任 公 司 副 总 经 理 、 总 工 程 师 ,]", 6 - "Third instance .", "[Third, instance, .]", 3 - "...", "[...]", "..." - -每一行是一个instance (在fastNLP中被称为 :mod:`~fastNLP.core.Instance` ), -每一列是一个field (在fastNLP中称为 :mod:`~fastNLP.core.FieldArray` )。 - -DataSet的构建 ------------------------------ - -我们使用传入字典的方式初始化一个DataSet,这是 :class:`~fastNLP.DataSet` 初始化的最基础的方式 - -.. code-block:: python - - from fastNLP import DataSet - data = {'raw_words':["This is the first instance .", "Second instance .", "Third instance ."], - 'words': [['this', 'is', 'the', 'first', 'instance', '.'], ['Second', 'instance', '.'], ['Third', 'instance', '.']], - 'seq_len': [6, 3, 3]} - dataset = DataSet(data) - # 传入的dict的每个key的value应该为具有相同长度的list - print(dataset) - -输出为:: - - +------------------------------+------------------------------------------------+---------+ - | raw_words | words | seq_len | - +------------------------------+------------------------------------------------+---------+ - | This is the first instance . | ['this', 'is', 'the', 'first', 'instance', ... | 6 | - | Second instance . | ['Second', 'instance', '.'] | 3 | - | Third instance . | ['Third', 'instance', '.'] | 3 | - +------------------------------+------------------------------------------------+---------+ - - -我们还可以使用 :func:`~fastNLP.DataSet.append` 方法向DataSet增加数据 - -.. code-block:: python - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet() - instance = Instance(raw_words="This is the first instance", - words=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6) - dataset.append(instance) - # 可以继续append更多内容,但是append的instance应该和前面的instance拥有完全相同的field - -另外,我们还可以用 :class:`~fastNLP.Instance` 数组的方式构建DataSet - -.. code-block:: python - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet([ - Instance(raw_words="This is the first instance", - words=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6), - Instance(raw_words="Second instance .", - words=['Second', 'instance', '.'], - seq_len=3) - ]) - -在初步构建完DataSet之后,我们可以通过 `for` 循环遍历 :class:`~fastNLP.DataSet` 中的内容。 - -.. code-block:: python - - for instance in dataset: - # do something - -DataSet的删除 ------------------------------ - -FastNLP 同样提供了多种删除数据的方法 :func:`~fastNLP.DataSet.drop` 、 :func:`~fastNLP.DataSet.delete_instance` 和 :func:`~fastNLP.DataSet.delete_field` -我们先用下面的代码生成一个只有两列的样例DataSet,第一列的值分别为 -5 ~ 4,第二列的值均为 0. - -.. code-block:: python - - from fastNLP import DataSet - dataset = DataSet({'a': range(-5, 5), 'c': [0]*10}) - -然后我们使用三种方法进行删除,删除后的DataSet仅包含名为 c 的一列,包含4个值为0 的数据。 - -.. code-block:: python - - # 不改变dataset,生成一个删除了满足条件的instance的新 DataSet - dropped_dataset = dataset.drop(lambda ins:ins['a']<0, inplace=False) - # 在dataset中删除满足条件的instance - dataset.drop(lambda ins:ins['a']<0) - # 删除第3个instance - dataset.delete_instance(2) - # 删除名为'a'的field - dataset.delete_field('a') - - -简单的数据预处理 ------------------------------ - -因为 fastNLP 中的数据是按列存储的,所以大部分的数据预处理操作是以列( :mod:`~fastNLP.core.field` )为操作对象的。 -首先,我们可以检查特定名称的 :mod:`~fastNLP.core.field` 是否存在,并对其进行改名。 - -.. code-block:: python - - # 检查是否存在名为'a'的field - dataset.has_field('a') # 或 ('a' in dataset) - # 将名为'c'的field改名为'b' - dataset.rename_field('c', 'b') - # DataSet的长度 - len(dataset) - -其次,我们可以使用 :func:`~fastNLP.DataSet.apply` 或 :func:`~fastNLP.DataSet.apply_field` 进行数据预处理操作操作。 -使用以上的两个方法需要传入一个函数,函数可以是 lambda 匿名函数,也可以是完整定义的函数,fastNLP将对DataSet遍历地应用该函数。 -同时,你还可以用 ``new_field_name`` 参数指定函数返回值组成的新 :mod:`~fastNLP.core.field` 的名称。 - -.. code-block:: python - - from fastNLP import DataSet - data = {'raw_words':["This is the first instance .", "Second instance .", "Third instance ."]} - dataset = DataSet(data) - - # 将句子分成单词形式, 详见DataSet.apply()方法 - dataset.apply(lambda ins: ins['raw_words'].split(), new_field_name='words') - - # 或使用DataSet.apply_field() - dataset.apply_field(lambda sent:sent.split(), field_name='raw_words', new_field_name='words') - - # 除了匿名函数,也可以定义函数传递进去 - def get_words(instance): - sentence = instance['raw_words'] - words = sentence.split() - return words - dataset.apply(get_words, new_field_name='words') - -除了手动处理数据集之外,你还可以使用 fastNLP 提供的各种 :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` 来进行数据处理。 -详细请参考这篇教程 :doc:`使用Loader和Pipe处理数据 ` 。 - - -fastNLP中field的命名习惯 ------------------------------ - -在英文任务中,fastNLP常用的field名称有: - - - **raw_words**: 表示的是原始的str。例如"This is a demo sentence ."。存在多个raw_words的情况,例如matching任务,它们会被定义为raw_words0, raw_words1。但在conll格式下,raw_words列也可能为["This", "is", "a", "demo", "sentence", "."]的形式。 - - **words**: 表示的是已经tokenize后的词语。例如["This", "is", "a", "demo", "sentence"], 但由于str并不能直接被神经网络所使用,所以words中的内容往往被转换为int,如[3, 10, 4, 2, 7, ...]等。多列words的情况,会被命名为words0, words1 - - **target**: 表示目标值。分类场景下,只有一个值;序列标注场景下是一个序列。 - - **seq_len**: 一般用于表示words列的长度 - -在中文任务中,fastNLP常用的field名称有: - - - **raw_words**: 如果原始汉字序列中已经包含了词语的边界,则该列称为raw_words。如"上海 浦东 开发 与 法制 建设 同步"。 - - **words**: 表示单独的汉字词语序列。例如["上海", "", "浦东", "开发", "与", "法制", "建设", ...]或[2, 3, 4, ...] - - **raw_chars**: 表示的是原始的连续汉字序列。例如"这是一个示例。" - - **chars**: 表示已经切分为单独的汉字的序列。例如["这", "是", "一", "个", "示", "例", "。"]。但由于神经网络不能识别汉字,所以一般该列会被转为int形式,如[3, 4, 5, 6, ...]。 - - **target**: 表示目标值。分类场景下,只有一个值;序列标注场景下是一个序列 - - **seq_len**: 表示输入序列的长度 - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_2_vocabulary.rst b/docs/source/tutorials/tutorial_2_vocabulary.rst deleted file mode 100644 index e8855d99..00000000 --- a/docs/source/tutorials/tutorial_2_vocabulary.rst +++ /dev/null @@ -1,140 +0,0 @@ -============================== -fastNLP中的Vocabulary -============================== - -:class:`~fastNLP.Vocabulary` 是包含字或词与index关系的类,用于将文本转换为index。 - - -构建Vocabulary ------------------------------ - -.. code-block:: python - - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst(['复', '旦', '大', '学']) # 加入新的字 - vocab.add_word('上海') # `上海`会作为一个整体 - vocab.to_index('复') # 应该会为3 - vocab.to_index('我') # 会输出1,Vocabulary中默认pad的index为0, unk(没有找到的词)的index为1 - - # 在构建target的Vocabulary时,词表中应该用不上pad和unk,可以通过以下的初始化 - vocab = Vocabulary(unknown=None, padding=None) - vocab.add_word_lst(['positive', 'negative']) - vocab.to_index('positive') # 输出0 - vocab.to_index('neutral') # 会报错,因为没有unk这种情况 - -除了通过以上的方式建立词表,Vocabulary还可以通过使用下面的函数直接从 :class:`~fastNLP.DataSet` 中的某一列建立词表以及将该列转换为index - -.. code-block:: python - - from fastNLP import Vocabulary - from fastNLP import DataSet - - dataset = DataSet({'chars': [ - ['今', '天', '天', '气', '很', '好', '。'], - ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。'] - ], - 'target': ['neutral', 'negative'] - }) - - vocab = Vocabulary() - # 从该dataset中的chars列建立词表 - vocab.from_dataset(dataset, field_name='chars') - # 使用vocabulary将chars列转换为index - vocab.index_dataset(dataset, field_name='chars') - - target_vocab = Vocabulary(padding=None, unknown=None) - target_vocab.from_dataset(dataset, field_name='target') - target_vocab.index_dataset(dataset, field_name='target') - print(dataset) - -输出内容为:: - - +---------------------------------------------------+--------+ - | chars | target | - +---------------------------------------------------+--------+ - | [4, 2, 2, 5, 6, 7, 3] | 0 | - | [8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 3] | 1 | - +---------------------------------------------------+--------+ - - -一些使用tips ------------------------------ - -在使用from_dataset()函数建立词表时,将测试集和验证集放入参数no_create_entry_dataset中,如下所示 - -.. code-block:: python - - from fastNLP import Vocabulary - from fastNLP import DataSet - - tr_data = DataSet({'chars': [ - ['今', '天', '心', '情', '很', '好', '。'], - ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。'] - ], - 'target': ['positive', 'negative'] - }) - dev_data = DataSet({'chars': [ - ['住', '宿', '条', '件', '还', '不', '错'], - ['糟', '糕', '的', '天', '气', ',', '无', '法', '出', '行', '。'] - ], - 'target': ['positive', 'negative'] - }) - - vocab = Vocabulary() - # 将验证集或者测试集在建立词表是放入no_create_entry_dataset这个参数中。 - vocab.from_dataset(tr_data, field_name='chars', no_create_entry_dataset=[dev_data]) - -:class:`~fastNLP.Vocabulary` 中的 `no_create_entry` ,如果您并不关心具体的原理,您可以直接采取以下的建议:在添加来自于非训练集的词的时候将该参数置为True, 或将非训练集数据 -传入 `no_create_entry_dataset` 参数。它们的意义是在接下来的模型会使用pretrain的embedding(包括glove, word2vec, elmo与bert)且会finetune的 -情况下,如果仅使用来自于train的数据建立vocabulary,会导致只出现在test与dev中的词语无法充分利用到来自于预训练embedding的信息(因为他们 -会被认为是unk),所以在建立词表的时候将test与dev考虑进来会使得最终的结果更好。 - -通过与fastNLP中的各种Embedding配合使用,会有如下的效果, -如果一个词出现在了train中,但是没在预训练模型中,embedding会为随机初始化,且它单独的一个vector,如果finetune embedding的话, -这个词在更新之后可能会有更好的表示; 而如果这个词仅出现在了dev或test中,那么就不能为它们单独建立vector,而应该让它指向unk这个vector的 -值(当unk的值更新时,这个词也使用的是更新之后的vector)。所以被认为是no_create_entry的token,将首先从预训练的词表中寻找它的表示,如 -果找到了,就使用该表示; 如果没有找到,则认为该词的表示应该为unk的表示。 - -下面我们结合部分 :class:`~fastNLP.embeddings.StaticEmbedding` 的例子来说明下该值造成的影响,如果您对 :class:`~fastNLP.embeddings.StaticEmbedding` 不太了解,您可以先参考 :doc:`使用Embedding模块将文本转成向量 ` 部分再来阅读该部分 - -.. code-block:: python - - import torch - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word('train') - vocab.add_word('only_in_train') # 仅在train出现,但肯定在预训练词表中不存在 - vocab.add_word('test', no_create_entry=True) # 该词只在dev或test中出现 - vocab.add_word('only_in_test', no_create_entry=True) # 这个词在预训练的词表中找不到 - - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d') - print(embed(torch.LongTensor([vocab.to_index('train')]))) - print(embed(torch.LongTensor([vocab.to_index('only_in_train')]))) - print(embed(torch.LongTensor([vocab.to_index('test')]))) - print(embed(torch.LongTensor([vocab.to_index('only_in_test')]))) - print(embed(torch.LongTensor([vocab.unknown_idx]))) - -输出结果(只截取了部分vector):: - - tensor([[ 0.9497, 0.3433, 0.8450, -0.8852, ...]], grad_fn=) # train,en-glove-6b-50d,找到了该词 - tensor([[ 0.0540, -0.0557, -0.0514, -0.1688, ...]], grad_fn=) # only_in_train,en-glove-6b-50d,使用了随机初始化 - tensor([[ 0.1318, -0.2552, -0.0679, 0.2619, ...]], grad_fn=) # test,在en-glove-6b-50d中找到了这个词 - tensor([[0., 0., 0., 0., 0., ...]], grad_fn=) # only_in_test, en-glove-6b-50d中找不到这个词,使用unk的vector - tensor([[0., 0., 0., 0., 0., ...]], grad_fn=) # unk,使用zero初始化 - -首先train和test都能够从预训练中找到对应的vector,所以它们是各自的vector表示; only_in_train在预训练中找不到,StaticEmbedding为它 -新建了一个entry,所以它有一个单独的vector; 而only_in_test在预训练中找不到改词,因此被指向了unk的值(fastNLP用零向量初始化unk),与最后一行unk的 -表示相同。 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_3_embedding.rst b/docs/source/tutorials/tutorial_3_embedding.rst deleted file mode 100644 index 147fbb8c..00000000 --- a/docs/source/tutorials/tutorial_3_embedding.rst +++ /dev/null @@ -1,462 +0,0 @@ -========================================= -使用Embedding模块将文本转成向量 -========================================= - -这一部分是一个关于在fastNLP当中使用embedding的教程。 - -教程目录: - - - `Part I: embedding介绍`_ - - `Part II: 使用预训练的静态embedding`_ - - `Part III: 使用随机初始化的embedding`_ - - `Part IV: ELMo Embedding`_ - - `Part V: Bert Embedding`_ - - `Part VI: 使用character-level的embedding`_ - - `Part VII: 叠加使用多个embedding`_ - - `Part VIII: Embedding的其它说明`_ - - `Part IX: StaticEmbedding的使用建议`_ - - - -Part I: embedding介绍 ---------------------------------------- - -Embedding是一种词嵌入技术,可以将字或者词转换为实向量。目前使用较多的预训练词嵌入有word2vec, fasttext, glove, character embedding, -elmo以及bert。 -但使用这些词嵌入方式的时候都需要做一些加载上的处理,比如预训练的word2vec, fasttext以及glove都有着超过几十万个词语的表示,但一般任务大概 -只会用到其中的几万个词,如果直接加载所有的词汇,会导致内存占用变大以及训练速度变慢,需要从预训练文件中抽取本次实验的用到的词汇;而对于英文的 -elmo和character embedding, 需要将word拆分成character才能使用;Bert的使用更是涉及到了Byte pair encoding(BPE)相关的内容。为了方便 -大家的使用,fastNLP通过 :class:`~fastNLP.Vocabulary` 统一了不同embedding的使用。下面我们将讲述一些例子来说明一下 - - - -Part II: 使用预训练的静态embedding ---------------------------------------- - -在fastNLP中,加载预训练的word2vec, glove以及fasttext都使用的是 :class:`~fastNLP.embeddings.StaticEmbedding` 。另外,为了方便大家的 -使用,fastNLP提供了多种静态词向量的自动下载并缓存(默认缓存到~/.fastNLP/embeddings文件夹下)的功能,支持自动下载的预训练向量可以在 -`下载文档 `_ 查看。 - -.. code-block:: python - - import torch - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d') - - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) # 将文本转为index - print(embed(words).size()) # StaticEmbedding的使用和pytorch的nn.Embedding是类似的 - -输出为:: - - torch.Size([1, 5, 50]) - -fastNLP的StaticEmbedding在初始化之后,就和pytorch中的Embedding是类似的了。 :class:`~fastNLP.embeddings.StaticEmbedding` 的初始化 -主要是从model_dir_or_name提供的词向量中抽取出 :class:`~fastNLP.Vocabulary` 中词语的vector。 - -除了可以通过使用预先提供的Embedding, :class:`~fastNLP.embeddings.StaticEmbedding` 也支持加载本地的预训练词向量,glove, word2vec以及 -fasttext格式的。通过将model_dir_or_name修改为本地的embedding文件路径,即可使用本地的embedding。 - - -Part III: 使用随机初始化的embedding ---------------------------------------- - -有时候需要使用随机初始化的Embedding,也可以通过使用 :class:`~fastNLP.embeddings.StaticEmbedding` 获得。只需要将model_dir_or_name -置为None,且传入embedding_dim,如下例所示 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=30) - - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 30]) - - - -Part IV: ELMo Embedding ------------------------------------------------------------ - -在fastNLP中,我们提供了ELMo和BERT的embedding: :class:`~fastNLP.embeddings.ElmoEmbedding` -和 :class:`~fastNLP.embeddings.BertEmbedding` 。可自动下载的ElmoEmbedding可以 -从 `下载文档 `_ 找到。 - -与静态embedding类似,ELMo的使用方法如下: - -.. code-block:: python - - from fastNLP.embeddings import ElmoEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False) - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 256]) - -也可以输出多层的ELMo结果,fastNLP将在不同层的结果在最后一维上拼接,下面的代码需要在上面的代码执行结束之后执行 - -.. code-block:: python - - embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False, layers='1,2') - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 512]) - -另外,根据 `Deep contextualized word representations `_ ,不同层之间使用可学习的权重可以使得ELMo的效果更好,在fastNLP中可以通过以下的初始化 -实现3层输出的结果通过可学习的权重进行加法融合。 - -.. code-block:: python - - embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=True, layers='mix') - print(embed(words).size()) # 三层输出按照权重element-wise的加起来 - -输出为:: - - torch.Size([1, 5, 256]) - - - -Part V: Bert Embedding ------------------------------------------------------------ - -虽然Bert并不算严格意义上的Embedding,但通过将Bert封装成Embedding的形式将极大减轻使用的复杂程度。可自动下载的Bert Embedding可以 -从 `下载文档 `_ 找到。我们将使用下面的例子讲述一下 -BertEmbedding的使用 - -.. code-block:: python - - from fastNLP.embeddings import BertEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased') - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 768]) - -可以通过申明使用指定层数的output也可以使用多层的output,下面的代码需要在上面的代码执行结束之后执行 - -.. code-block:: python - - # 使用后面两层的输出 - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='10,11') - print(embed(words).size()) # 结果将是在最后一维做拼接 - -输出为:: - - torch.Size([1, 5, 1536]) - -在Bert中还存在两个特殊的字符[CLS]和[SEP],默认情况下这两个字符是自动加入并且在计算结束之后会自动删除,以使得输入的序列长度和输出的序列 -长度是一致的,但是有些分类的情况,必须需要使用[CLS]的表示,这种情况可以通过在初始化时申明一下需要保留[CLS]的表示,如下例所示 - -.. code-block:: python - - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', include_cls_sep=True) - print(embed(words).size()) # 结果将在序列维度上增加2 - # 取出句子的cls表示 - cls_reps = embed(words)[:, 0] # shape: [batch_size, 768] - -输出为:: - - torch.Size([1, 7, 768]) - -在英文Bert模型中,一个英文单词可能会被切分为多个subword,例如"fairness"会被拆分为 ``["fair", "##ness"]`` ,这样一个word对应的将有两个输出, -:class:`~fastNLP.embeddings.BertEmbedding` 会使用pooling方法将一个word的subword的表示合并成一个vector,通过pool_method可以控制 -该pooling方法,支持的有"first"(即使用fair的表示作为fairness的表示), "last"(使用##ness的表示作为fairness的表示), "max"(对fair和 -##ness在每一维上做max),"avg"(对fair和##ness每一维做average)。 - -.. code-block:: python - - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max') - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 768]) - -另外,根据 `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding `_ , -Bert在针对具有两句话的任务时(如matching,Q&A任务),句子之间通过[SEP]拼接起来,前一句话的token embedding为0, -后一句话的token embedding为1。BertEmbedding能够自动识别句子中间的[SEP]来正确设置对应的token_type_id的。 - -.. code-block:: python - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo . [SEP] another sentence .".split()) - - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max') - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo . [SEP] another sentence .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 9, 768]) - -在多个[SEP]的情况下,将会使token_type_id不断0,1循环。比如"first sentence [SEP] second sentence [SEP] third sentence", 它们的 -token_type_id将是[0, 0, 0, 1, 1, 1, 0, 0]。但请注意[SEP]一定要大写的,不能是[sep],否则无法识别。 - -更多 :class:`~fastNLP.embedding.BertEmbedding` 的使用,请参考 :doc:`/tutorials/extend_1_bert_embedding` - - -Part VI: 使用character-level的embedding ------------------------------------------------------ - -除了预训练的embedding以外,fastNLP还提供了两种Character Embedding: :class:`~fastNLP.embeddings.CNNCharEmbedding` 和 -:class:`~fastNLP.embeddings.LSTMCharEmbedding` 。一般在使用character embedding时,需要在预处理的时候将word拆分成character,这 -会使得预处理过程变得非常繁琐。在fastNLP中,使用character embedding也只需要传入 :class:`~fastNLP.Vocabulary` 即可,而且该 -Vocabulary与其它Embedding使用的Vocabulary是一致的,下面我们看两个例子。 - -CNNCharEmbedding的使用例子如下: - -.. code-block:: python - - from fastNLP.embeddings import CNNCharEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - # character的embedding维度大小为50,返回的embedding结果维度大小为64。 - embed = CNNCharEmbedding(vocab, embed_size=64, char_emb_size=50) - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 64]) - -与CNNCharEmbedding类似,LSTMCharEmbedding的使用例子如下: - -.. code-block:: python - - from fastNLP.embeddings import LSTMCharEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - # character的embedding维度大小为50,返回的embedding结果维度大小为64。 - embed = LSTMCharEmbedding(vocab, embed_size=64, char_emb_size=50) - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) - -输出为:: - - torch.Size([1, 5, 64]) - - -Part VII: 叠加使用多个embedding ------------------------------------------------------ - -单独使用Character Embedding往往效果并不是很好,需要同时结合word embedding。在fastNLP中可以通过 :class:`~fastNLP.embeddings.StackEmbedding` -来叠加embedding,具体的例子如下所示 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding, StackEmbedding, CNNCharEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - word_embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d') - char_embed = CNNCharEmbedding(vocab, embed_size=64, char_emb_size=50) - embed = StackEmbedding([word_embed, char_embed]) - - words = torch.LongTensor([[vocab.to_index(word) for word in "this is a demo .".split()]]) - print(embed(words).size()) # 输出embedding的维度为50+64=114 - -输出为:: - - torch.Size([1, 5, 114]) - -:class:`~fastNLP.embeddings.StaticEmbedding` , :class:`~fastNLP.embeddings.ElmoEmbedding` , -:class:`~fastNLP.embeddings.CNNCharEmbedding` , :class:`~fastNLP.embeddings.BertEmbedding` 等都可以互相拼接。 -:class:`~fastNLP.embeddings.StackEmbedding` 的使用也是和其它Embedding是一致的,即输出index返回对应的表示。但能够拼接起来的Embedding -必须使用同样的 :class:`~fastNLP.Vocabulary` ,因为只有使用同样的 :class:`~fastNLP.Vocabulary` 才能保证同一个index指向的是同一个词或字 - - - -Part VIII: Embedding的其它说明 ------------------------------------------------------------ - -(1) 获取各种Embedding的dimension - -.. code-block:: python - - from fastNLP.embeddings import * - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - static_embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d') - print(static_embed.embedding_dim) # 50 - char_embed = CNNCharEmbedding(vocab, embed_size=30) - print(char_embed.embedding_dim) # 30 - elmo_embed_1 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='2') - print(elmo_embed_1.embedding_dim) # 256 - elmo_embed_2 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='1,2') - print(elmo_embed_2.embedding_dim) # 512 - bert_embed_1 = BertEmbedding(vocab, layers='-1', model_dir_or_name='en-base-cased') - print(bert_embed_1.embedding_dim) # 768 - bert_embed_2 = BertEmbedding(vocab, layers='2,-1', model_dir_or_name='en-base-cased') - print(bert_embed_2.embedding_dim) # 1536 - stack_embed = StackEmbedding([static_embed, char_embed]) - print(stack_embed.embedding_dim) # 80 - -(2) 设置Embedding的权重是否更新 - -.. code-block:: python - - from fastNLP.embeddings import * - - vocab = Vocabulary() - vocab.add_word_lst("this is a demo .".split()) - - embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', requires_grad=True) # 初始化时设定为需要更新 - embed.requires_grad = False # 修改BertEmbedding的权重为不更新 - -(3) 各种Embedding中word_dropout与dropout的说明 - -fastNLP中所有的Embedding都支持传入word_dropout和dropout参数,word_dropout指示的是以多大概率将输入的word置为unk的index,这样既可以 -是的unk得到训练,也可以有一定的regularize效果; dropout参数是在获取到word的表示之后,以多大概率将一些维度的表示置为0。 - -如果使用 :class:`~fastNLP.embeddings.StackEmbedding` 且需要用到word_dropout,建议将word_dropout设置在 :class:`~fastNLP.embeddings.StackEmbedding` 上。 - - - -Part IX: StaticEmbedding的使用建议 ------------------------------------------------------------ - -在英文的命名实体识别(NER)任务中,由 `Named Entity Recognition with Bidirectional LSTM-CNNs `_ 指出,同时使用cnn character embedding和word embedding -会使得NER的效果有比较大的提升。正如你在上节中看到的那样,fastNLP支持将 :class:`~fastNLP.embeddings.CNNCharEmbedding` -与 :class:`~fastNLP.embeddings.StaticEmbedding` 拼成一个 :class:`~fastNLP.embeddings.StackEmbedding` 。如果通过这种方式使用,需要 -在预处理文本时,不要将词汇小写化(因为Character Embedding需要利用词语中的大小写信息)且不要将出现频次低于某个阈值的word设置为unk(因为 -Character embedding需要利用字形信息);但 :class:`~fastNLP.embeddings.StaticEmbedding` 使用的某些预训练词嵌入的词汇表中只有小写的词 -语, 且某些低频词并未在预训练中出现需要被剔除。即(1) character embedding需要保留大小写,而预训练词向量不需要保留大小写。(2) -character embedding需要保留所有的字形, 而static embedding需要设置一个最低阈值以学到更好的表示。 - -(1) fastNLP如何解决关于大小写的问题 - -fastNLP通过在 :class:`~fastNLP.embeddings.StaticEmbedding` 增加了一个lower参数解决该问题。如下面的例子所示 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary().add_word_lst("The the a A".split()) - # 下面用随机的StaticEmbedding演示,但与使用预训练词向量时效果是一致的 - embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5) - print(embed(torch.LongTensor([vocab.to_index('The')]))) - print(embed(torch.LongTensor([vocab.to_index('the')]))) - -输出为:: - - tensor([[-0.4685, 0.4572, 0.5159, -0.2618, -0.6871]], grad_fn=) - tensor([[ 0.2615, 0.1490, -0.2491, 0.4009, -0.3842]], grad_fn=) - -可以看到"The"与"the"的vector是不一致的。但如果我们在初始化 :class:`~fastNLP.embeddings.StaticEmbedding` 将lower设置为True,效果将 -如下所示 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary().add_word_lst("The the a A".split()) - # 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的 - embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, lower=True) - print(embed(torch.LongTensor([vocab.to_index('The')]))) - print(embed(torch.LongTensor([vocab.to_index('the')]))) - -输出为:: - - tensor([[-0.2237, 0.6825, -0.3459, -0.1795, 0.7516]], grad_fn=) - tensor([[-0.2237, 0.6825, -0.3459, -0.1795, 0.7516]], grad_fn=) - -可以看到"The"与"the"的vector是一致的。他们实际上也是引用的同一个vector。通过将lower设置为True,可以在 :class:`~fastNLP.embeddings.StaticEmbedding` -实现类似具备相同小写结果的词语引用同一个vector。 - -(2) fastNLP如何解决min_freq的问题 - -fastNLP通过在 :class:`~fastNLP.embeddings.StaticEmbedding` 增加了一个min_freq参数解决该问题。如下面的例子所示 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary().add_word_lst("the the the a".split()) - # 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的 - embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2) - print(embed(torch.LongTensor([vocab.to_index('the')]))) - print(embed(torch.LongTensor([vocab.to_index('a')]))) - print(embed(torch.LongTensor([vocab.unknown_idx]))) - -输出为:: - - tensor([[ 0.0454, 0.3375, 0.6758, -0.2026, -0.4715]], grad_fn=) - tensor([[-0.7602, 0.0149, 0.2733, 0.3974, 0.7371]], grad_fn=) - tensor([[-0.7602, 0.0149, 0.2733, 0.3974, 0.7371]], grad_fn=) - -其中最后一行为unknown值的vector,可以看到a的vector表示与unknown是一样的,这是由于a的频次低于了2,所以被指向了unknown的表示;而the由于 -词频超过了2次,所以它是单独的表示。 - -在计算min_freq时,也会考虑到lower的作用,比如 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - from fastNLP import Vocabulary - - vocab = Vocabulary().add_word_lst("the the the a A".split()) - # 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的 - embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2, lower=True) - print(embed(torch.LongTensor([vocab.to_index('the')]))) - print(embed(torch.LongTensor([vocab.to_index('a')]))) - print(embed(torch.LongTensor([vocab.to_index('A')]))) - print(embed(torch.LongTensor([vocab.unknown_idx]))) - -输出为:: - - tensor([[-0.7453, -0.5542, 0.5039, 0.6195, -0.4723]], grad_fn=) # the - tensor([[ 0.0170, -0.0995, -0.5743, -0.2469, -0.2095]], grad_fn=) # a - tensor([[ 0.0170, -0.0995, -0.5743, -0.2469, -0.2095]], grad_fn=) # A - tensor([[ 0.6707, -0.5786, -0.6967, 0.0111, 0.1209]], grad_fn=) # unk - -可以看到a不再和最后一行的unknown共享一个表示了,这是由于a与A都算入了a的词频,且A的表示也是a的表示。 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_4_load_dataset.rst b/docs/source/tutorials/tutorial_4_load_dataset.rst deleted file mode 100644 index 4fb69d1b..00000000 --- a/docs/source/tutorials/tutorial_4_load_dataset.rst +++ /dev/null @@ -1,219 +0,0 @@ -======================================= -使用Loader和Pipe加载并处理数据集 -======================================= - -这一部分是关于如何加载数据集的教程 - -教程目录: - - - `Part I: 数据集容器DataBundle`_ - - `Part II: 加载的各种数据集的Loader`_ - - `Part III: 使用Pipe对数据集进行预处理`_ - - `Part IV: fastNLP封装好的Loader和Pipe`_ - - `Part V: 不同格式类型的基础Loader`_ - - -Part I: 数据集容器DataBundle ------------------------------------- - -而由于对于同一个任务,训练集,验证集和测试集会共用同一个词表以及具有相同的目标值,所以在fastNLP中我们使用了 :class:`~fastNLP.io.DataBundle` -来承载同一个任务的多个数据集 :class:`~fastNLP.DataSet` 以及它们的词表 :class:`~fastNLP.Vocabulary` 。下面会有例子介绍 :class:`~fastNLP.io.DataBundle` -的相关使用。 - -:class:`~fastNLP.io.DataBundle` 在fastNLP中主要在各个 :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` 中被使用。 -下面我们先介绍一下 :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` 。 - -Part II: 加载的各种数据集的Loader -------------------------------------- - -在fastNLP中,所有的 :class:`~fastNLP.io.Loader` 都可以通过其文档判断其支持读取的数据格式,以及读取之后返回的 :class:`~fastNLP.DataSet` 的格式, -例如 :class:`~fastNLP.io.ChnSentiCorpLoader` 。 - - - **download()** 函数:自动将该数据集下载到缓存地址,默认缓存地址为~/.fastNLP/datasets/。由于版权等原因,不是所有的Loader都实现了该方法。该方法会返回下载后文件所处的缓存地址。 - - **_load()** 函数:从一个数据文件中读取数据,返回一个 :class:`~fastNLP.DataSet` 。返回的DataSet的格式可从Loader文档判断。 - - **load()** 函数:从文件或者文件夹中读取数据为 :class:`~fastNLP.DataSet` 并将它们组装成 :class:`~fastNLP.io.DataBundle`。支持接受的参数类型有以下的几种 - - - None, 将尝试读取自动缓存的数据,仅支持提供了自动下载数据的Loader - - 文件夹路径, 默认将尝试在该文件夹下匹配文件名中含有 `train` , `test` , `dev` 的文件,如果有多个文件含有相同的关键字,将无法通过该方式读取 - - dict, 例如{'train':"/path/to/tr.conll", 'dev':"/to/validate.conll", "test":"/to/te.conll"}。 - -.. code-block:: python - - from fastNLP.io import CWSLoader - - loader = CWSLoader(dataset_name='pku') - data_bundle = loader.load() - print(data_bundle) - -输出内容为:: - - In total 3 datasets: - dev has 1831 instances. - train has 17223 instances. - test has 1944 instances. - -这里表示一共有3个数据集。其中: - - - 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance - -也可以取出DataSet,并打印DataSet中的具体内容 - -.. code-block:: python - - tr_data = data_bundle.get_dataset('train') - print(tr_data[:2]) - -输出为:: - - +--------------------------------------------------------------------------------------+ - | raw_words | - +--------------------------------------------------------------------------------------+ - | 迈向 充满 希望 的 新 世纪 —— 一九九八年 新年 讲话 ( 附 图片 1 张 ) | - | 中共中央 总书记 、 国家 主席 江 泽民 | - +--------------------------------------------------------------------------------------+ - -Part III: 使用Pipe对数据集进行预处理 ------------------------------------------- -通过 :class:`~fastNLP.io.Loader` 可以将文本数据读入,但并不能直接被神经网络使用,还需要进行一定的预处理。 - -在fastNLP中,我们使用 :class:`~fastNLP.io.Pipe` 的子类作为数据预处理的类, :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` 一般具备一一对应的关系,该关系可以从其名称判断, -例如 :class:`~fastNLP.io.CWSLoader` 与 :class:`~fastNLP.io.CWSPipe` 是一一对应的。一般情况下Pipe处理包含以下的几个过程,(1)将raw_words或 -raw_chars进行tokenize以切分成不同的词或字; (2) 再建立词或字的 :class:`~fastNLP.Vocabulary` , 并将词或字转换为index; (3)将target -列建立词表并将target列转为index; - -所有的Pipe都可通过其文档查看该Pipe支持处理的 :class:`~fastNLP.DataSet` 以及返回的 :class:`~fastNLP.io.DataBundle` 中的Vocabulary的情况; -如 :class:`~fastNLP.io.OntoNotesNERPipe` - -各种数据集的Pipe当中,都包含了以下的两个函数: - - - process() 函数:对输入的 :class:`~fastNLP.io.DataBundle` 进行处理, 然后返回处理之后的 :class:`~fastNLP.io.DataBundle` 。process函数的文档中包含了该Pipe支持处理的DataSet的格式。 - - process_from_file() 函数:输入数据集所在文件夹,使用对应的Loader读取数据(所以该函数支持的参数类型是由于其对应的Loader的load函数决定的),然后调用相对应的process函数对数据进行预处理。相当于是把Load和process放在一个函数中执行。 - -接着上面 :class:`~fastNLP.io.CWSLoader` 的例子,我们展示一下 :class:`~fastNLP.io.CWSPipe` 的功能: - -.. code-block:: python - - from fastNLP.io import CWSPipe - - data_bundle = CWSPipe().process(data_bundle) - print(data_bundle) - -输出内容为:: - - In total 3 datasets: - dev has 1831 instances. - train has 17223 instances. - test has 1944 instances. - In total 2 vocabs: - chars has 4777 entries. - target has 4 entries. - -表示一共有3个数据集和2个词表。其中: - - - 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance - - 2个词表分别为chars词表与target词表。其中chars词表为句子文本所构建的词表,一共有4777个不同的字;target词表为目标标签所构建的词表,一共有4种标签。 - -相较于之前CWSLoader读取的DataBundle,新增了两个Vocabulary。 我们可以打印一下处理之后的DataSet - -.. code-block:: python - - tr_data = data_bundle.get_dataset('train') - print(tr_data[:2]) - -输出为:: - - +---------------------------------------------------+------------------------------------+------------------------------------+---------+ - | raw_words | chars | target | seq_len | - +---------------------------------------------------+------------------------------------+------------------------------------+---------+ - | 迈向 充满 希望 的 新 世纪 —— 一九九八年... | [1224, 178, 674, 544, 573, 435,... | [0, 1, 0, 1, 0, 1, 2, 2, 0, 1, ... | 29 | - | 中共中央 总书记 、 国家 主席 江 泽民 | [11, 212, 11, 335, 124, 256, 10... | [0, 3, 3, 1, 0, 3, 1, 2, 0, 1, ... | 15 | - +---------------------------------------------------+------------------------------------+------------------------------------+---------+ - -可以看到有两列为int的field: chars和target。这两列的名称同时也是DataBundle中的Vocabulary的名称。可以通过下列的代码获取并查看Vocabulary的 -信息 - -.. code-block:: python - - vocab = data_bundle.get_vocab('target') - print(vocab) - -输出为:: - - Vocabulary(['B', 'E', 'S', 'M']...) - - -Part IV: fastNLP封装好的Loader和Pipe ------------------------------------------- - -fastNLP封装了多种任务/数据集的 :class:`~fastNLP.io.Loader` 和 :class:`~fastNLP.io.Pipe` 并提供自动下载功能,具体参见文档 -`数据集 `_ - - -Part V: 不同格式类型的基础Loader --------------------------------------------------------- - -除了上面提到的针对具体任务的Loader,我们还提供了CSV格式和JSON格式的Loader - -:class:`~fastNLP.io.loader.CSVLoader` 读取CSV类型的数据集文件。例子如下: - - .. code-block:: python - - from fastNLP.io.loader import CSVLoader - data_set_loader = CSVLoader( - headers=('raw_words', 'target'), sep='\t' - ) - # 表示将CSV文件中每一行的第一项将填入'raw_words' field,第二项填入'target' field。 - # 其中项之间由'\t'分割开来 - - data_set = data_set_loader._load('path/to/your/file') - - 文件内容样例如下 :: - - But it does not leave you with much . 1 - You could hate it for the same reason . 1 - The performances are an absolute joy . 4 - - 读取之后的DataSet具有以下的field - - .. csv-table:: - :header: raw_words, target - - "But it does not leave you with much .", "1" - "You could hate it for the same reason .", "1" - "The performances are an absolute joy .", "4" - -:class:`~fastNLP.io.JsonLoader` 读取Json类型的数据集文件,数据必须按行存储,每行是一个包含各类属性的Json对象。例子如下: - - .. code-block:: python - - from fastNLP.io.loader import JsonLoader - loader = JsonLoader( - fields={'sentence1': 'raw_words1', 'sentence2': 'raw_words2', 'gold_label': 'target'} - ) - # 表示将Json对象中'sentence1'、'sentence2'和'gold_label'对应的值赋给'raw_words1'、'raw_words2'、'target'这三个fields - - data_set = loader._load('path/to/your/file') - - 数据集内容样例如下 :: - - {"annotator_labels": ["neutral"], "captionID": "3416050480.jpg#4", "gold_label": "neutral", "pairID": "3416050480.jpg#4r1n", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is training his horse for a competition.", "sentence2_binary_parse": "( ( A person ) ( ( is ( ( training ( his horse ) ) ( for ( a competition ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (VP (VBG training) (NP (PRP$ his) (NN horse)) (PP (IN for) (NP (DT a) (NN competition))))) (. .)))"} - {"annotator_labels": ["contradiction"], "captionID": "3416050480.jpg#4", "gold_label": "contradiction", "pairID": "3416050480.jpg#4r1c", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is at a diner, ordering an omelette.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is ( at ( a diner ) ) ) , ) ( ordering ( an omelette ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (PP (IN at) (NP (DT a) (NN diner))) (, ,) (S (VP (VBG ordering) (NP (DT an) (NN omelette))))) (. .)))"} - {"annotator_labels": ["entailment"], "captionID": "3416050480.jpg#4", "gold_label": "entailment", "pairID": "3416050480.jpg#4r1e", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is outdoors, on a horse.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is outdoors ) , ) ( on ( a horse ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (ADVP (RB outdoors)) (, ,) (PP (IN on) (NP (DT a) (NN horse)))) (. .)))"} - - 读取之后的DataSet具有以下的field - - .. csv-table:: - :header: raw_words0, raw_words1, target - - "A person on a horse jumps over a broken down airplane.", "A person is training his horse for a competition.", "neutral" - "A person on a horse jumps over a broken down airplane.", "A person is at a diner, ordering an omelette.", "contradiction" - "A person on a horse jumps over a broken down airplane.", "A person is outdoors, on a horse.", "entailment" - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_5_loss_optimizer.rst b/docs/source/tutorials/tutorial_5_loss_optimizer.rst deleted file mode 100644 index 846a07a5..00000000 --- a/docs/source/tutorials/tutorial_5_loss_optimizer.rst +++ /dev/null @@ -1,248 +0,0 @@ -============================================================================== -使用Trainer和Tester快速训练和测试 -============================================================================== - -我们使用前面介绍过的 :doc:`/tutorials/文本分类` 任务来进行详细的介绍。这里我们把数据集换成了SST2,使用 :class:`~fastNLP.Trainer` 和 :class:`~fastNLP.Tester` 来进行快速训练和测试。 - -.. note:: - - 本教程中的代码没有使用 GPU 。读者可以自行修改代码,扩大数据量并使用 GPU 进行训练。 - -数据读入和处理 ------------------ - -数据读入 - 我们可以使用 fastNLP :mod:`fastNLP.io` 模块中的 :class:`~fastNLP.io.SST2Pipe` 类,轻松地读取以及预处理SST2数据集。:class:`~fastNLP.io.SST2Pipe` 对象的 - :meth:`~fastNLP.io.SST2Pipe.process_from_file` 方法能够对读入的SST2数据集进行数据的预处理,方法的参数为paths, 指要处理的文件所在目录,如果paths为None,则会自动下载数据集,函数默认paths值为None。 - 此函数返回一个 :class:`~fastNLP.io.DataBundle`,包含SST2数据集的训练集、测试集、验证集以及source端和target端的字典。其训练、测试、验证数据集含有四个 :mod:`~fastNLP.core.field` : - - * raw_words: 原source句子 - * target: 标签值 - * words: index之后的raw_words - * seq_len: 句子长度 - - 读入数据代码如下: - - .. code-block:: python - - from fastNLP.io import SST2Pipe - - pipe = SST2Pipe() - databundle = pipe.process_from_file() - vocab = databundle.get_vocab('words') - print(databundle) - print(databundle.get_dataset('train')[0]) - print(databundle.get_vocab('words')) - - - 输出数据如下:: - - In total 3 datasets: - test has 1821 instances. - train has 67349 instances. - dev has 872 instances. - In total 2 vocabs: - words has 16293 entries. - target has 2 entries. - - +-------------------------------------------+--------+--------------------------------------+---------+ - | raw_words | target | words | seq_len | - +-------------------------------------------+--------+--------------------------------------+---------+ - | hide new secretions from the parental ... | 1 | [4111, 98, 12010, 38, 2, 6844, 9042] | 7 | - +-------------------------------------------+--------+--------------------------------------+---------+ - - Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...) - - 除了可以对数据进行读入的Pipe类,fastNLP还提供了读入和下载数据的Loader类,不同数据集的Pipe和Loader及其用法详见 :doc:`/tutorials/tutorial_4_load_dataset` 。 - -数据集分割 - 由于SST2数据集的测试集并不带有标签数值,故我们分割出一部分训练集作为测试集。下面这段代码展示了 :meth:`~fastNLP.DataSet.split` 的使用方法, - 为了能让读者快速运行完整个教程,我们只取了训练集的前5000个数据。 - - .. code-block:: python - - train_data = databundle.get_dataset('train')[:5000] - train_data, test_data = train_data.split(0.015) - dev_data = databundle.get_dataset('dev') - print(len(train_data),len(dev_data),len(test_data)) - - 输出结果为:: - - 4925 872 75 - -数据集 :meth:`~fastNLP.DataSet.set_input` 和 :meth:`~fastNLP.DataSet.set_target` 函数 - :class:`~fastNLP.io.SST2Pipe` 类的 :meth:`~fastNLP.io.SST2Pipe.process_from_file` 方法在预处理过程中还将训练、测试、验证 - 集的 `words` 、`seq_len` :mod:`~fastNLP.core.field` 设定为input,同时将 `target` :mod:`~fastNLP.core.field` 设定 - 为target。我们可以通过 :class:`~fastNLP.core.Dataset` 类的 :meth:`~fastNLP.core.Dataset.print_field_meta` 方法查看各个 :mod:`~fastNLP.core.field` 的设定情况,代码如下: - - .. code-block:: python - - train_data.print_field_meta() - - 输出结果为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - 其中is_input和is_target分别表示是否为input和target。ignore_type为true时指使用 :class:`~fastNLP.DataSetIter` 取出batch数 - 据时fastNLP不会进行自动padding,pad_value指对应 :mod:`~fastNLP.core.field` padding所用的值,这两者只有 - 当 :mod:`~fastNLP.core.field` 设定为input或者target的时候才有存在的意义。 - - is_input为true的 :mod:`~fastNLP.core.field` 在 :class:`~fastNLP.DataSetIter` 迭代取出的batch_x 中,而is_target为true - 的 :mod:`~fastNLP.core.field` 在 :class:`~fastNLP.DataSetIter` 迭代取出的 batch_y 中。 - 具体分析见 :doc:`使用DataSetIter实现自定义训练过程 ` 。 - -使用内置模型训练 ---------------------- -模型定义和初始化 - 我们可以导入 fastNLP 内置的文本分类模型 :class:`~fastNLP.models.CNNText` 来对模型进行定义,代码如下: - - .. code-block:: python - - from fastNLP.models import CNNText - - #词嵌入的维度 - EMBED_DIM = 100 - - #使用CNNText的时候第一个参数输入一个tuple,作为模型定义embedding的参数 - #还可以传入 kernel_nums, kernel_sizes, padding, dropout的自定义值 - model_cnn = CNNText((len(vocab),EMBED_DIM), num_classes=2, dropout=0.1) - - 使用fastNLP快速搭建自己的模型详见 :doc:`/tutorials/tutorial_8_modules_models` 。 - -评价指标 - 训练模型需要提供一个评价指标。这里使用准确率做为评价指标。 - - * ``pred`` 参数对应的是模型的 forward 方法返回的 dict 中的一个 key 的名字。 - * ``target`` 参数对应的是 :class:`~fastNLP.DataSet` 中作为标签的 :mod:`~fastNLP.core.field` 的名字。 - - 这里我们用 :class:`~fastNLP.Const` 来辅助命名,如果你自己编写模型中 forward 方法的返回值或 - 数据集中 :mod:`~fastNLP.core.field` 的名字与本例不同, 你可以把 ``pred`` 参数和 ``target`` 参数设定符合自己代码的值。代码如下: - - .. code-block:: python - - from fastNLP import AccuracyMetric - from fastNLP import Const - - # metrics=AccuracyMetric() 在本例中与下面这行代码等价 - metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET) - - -损失函数 - 训练模型需要提供一个损失函数 - ,fastNLP中提供了直接可以导入使用的四种loss,分别为: - - * :class:`~fastNLP.CrossEntropyLoss`:包装了torch.nn.functional.cross_entropy()函数,返回交叉熵损失(可以运用于多分类场景) - * :class:`~fastNLP.BCELoss`:包装了torch.nn.functional.binary_cross_entropy()函数,返回二分类的交叉熵 - * :class:`~fastNLP.L1Loss`:包装了torch.nn.functional.l1_loss()函数,返回L1 损失 - * :class:`~fastNLP.NLLLoss`:包装了torch.nn.functional.nll_loss()函数,返回负对数似然损失 - - 下面提供了一个在分类问题中常用的交叉熵损失。注意它的 **初始化参数** 。 - - * ``pred`` 参数对应的是模型的 forward 方法返回的 dict 中的一个 key 的名字。 - * ``target`` 参数对应的是 :class:`~fastNLP.DataSet` 中作为标签的 :mod:`~fastNLP.core.field` 的名字。 - - 这里我们用 :class:`~fastNLP.Const` 来辅助命名,如果你自己编写模型中 forward 方法的返回值或 - 数据集中 :mod:`~fastNLP.core.field` 的名字与本例不同, 你可以把 ``pred`` 参数和 ``target`` 参数设定符合自己代码的值。 - - .. code-block:: python - - from fastNLP import CrossEntropyLoss - - # loss = CrossEntropyLoss() 在本例中与下面这行代码等价 - loss = CrossEntropyLoss(pred=Const.OUTPUT, target=Const.TARGET) - - 除了使用fastNLP已经包装好的了损失函数,也可以通过fastNLP中的LossFunc类来构建自己的损失函数,方法如下: - - .. code-block:: python - - # 这表示构建了一个损失函数类,由func计算损失函数,其中将从模型返回值或者DataSet的target=True的field - # 当中找到一个参数名为`pred`的参数传入func一个参数名为`input`的参数;找到一个参数名为`label`的参数 - # 传入func作为一个名为`target`的参数 - #下面自己构建了一个交叉熵函数,和之后直接使用fastNLP中的交叉熵函数是一个效果 - import torch - from fastNLP import LossFunc - func = torch.nn.functional.cross_entropy - loss_func = LossFunc(func, input=Const.OUTPUT, target=Const.TARGET) - -优化器 - 定义模型运行的时候使用的优化器,可以直接使用torch.optim.Optimizer中的优化器,并在实例化 :class:`~fastNLP.Trainer` 类的时候传入优化器实参 - - .. code-block:: python - - import torch.optim as optim - - #使用 torch.optim 定义优化器 - optimizer=optim.RMSprop(model_cnn.parameters(), lr=0.01, alpha=0.99, eps=1e-08, weight_decay=0, momentum=0, centered=False) - -快速训练 - 现在我们对上面定义的模型使用 :class:`~fastNLP.Trainer` 进行训练。 - 除了使用 :class:`~fastNLP.Trainer`进行训练,我们也可以通过使用 :class:`~fastNLP.DataSetIter` 来编写自己的训练过程,具体见 :doc:`/tutorials/tutorial_6_datasetiter` - - .. code-block:: python - - from fastNLP import Trainer - - #训练的轮数和batch size - N_EPOCHS = 10 - BATCH_SIZE = 16 - - #如果在定义trainer的时候没有传入optimizer参数,模型默认的优化器为torch.optim.Adam且learning rate为lr=4e-3 - #这里只使用了loss作为损失函数输入,感兴趣可以尝试其他损失函数(如之前自定义的loss_func)作为输入 - trainer = Trainer(model=model_cnn, train_data=train_data, dev_data=dev_data, loss=loss, metrics=metrics, - optimizer=optimizer,n_epochs=N_EPOCHS, batch_size=BATCH_SIZE) - trainer.train() - - 训练过程的输出如下:: - - input fields after batch(if batch size is 2): - words: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 13]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - target fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - - training epochs started 2020-02-26-16-45-40 - Evaluate data in 0.5 seconds! - Evaluation on dev at Epoch 1/10. Step:308/3080: - AccuracyMetric: acc=0.677752 - - ...... - - Evaluate data in 0.44 seconds! - Evaluation on dev at Epoch 10/10. Step:3080/3080: - AccuracyMetric: acc=0.725917 - - - In Epoch:5/Step:1540, got best dev performance: - AccuracyMetric: acc=0.740826 - Reloaded the best model. - -快速测试 - 与 :class:`~fastNLP.Trainer` 对应,fastNLP 也提供了 :class:`~fastNLP.Tester` 用于快速测试,用法如下 - - .. code-block:: python - - from fastNLP import Tester - - tester = Tester(test_data, model_cnn, metrics=AccuracyMetric()) - tester.test() - - 训练过程输出如下:: - - Evaluate data in 0.43 seconds! - [tester] - AccuracyMetric: acc=0.773333 - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_6_datasetiter.rst b/docs/source/tutorials/tutorial_6_datasetiter.rst deleted file mode 100644 index eab14301..00000000 --- a/docs/source/tutorials/tutorial_6_datasetiter.rst +++ /dev/null @@ -1,423 +0,0 @@ -============================================================================== -使用DataSetIter实现自定义训练过程 -============================================================================== - -我们使用前面介绍过的 :doc:`/tutorials/文本分类` 任务来进行详细的介绍。这里我们把数据集换成了SST2,使用 :class:`~fastNLP.DataSetIter` 类来编写自己的训练过程。 -DataSetIter初探之前的内容与 :doc:`/tutorials/tutorial_5_loss_optimizer` 中的完全一样,如已经阅读过可以跳过。 - -.. note:: - - 本教程中的代码没有使用 GPU 。读者可以自行修改代码,扩大数据量并使用 GPU 进行训练。 - -数据读入和预处理 --------------------- - -数据读入 - 我们可以使用 fastNLP :mod:`fastNLP.io` 模块中的 :class:`~fastNLP.io.SST2Pipe` 类,轻松地读取以及预处理SST2数据集。:class:`~fastNLP.io.SST2Pipe` 对象的 - :meth:`~fastNLP.io.SST2Pipe.process_from_file` 方法能够对读入的SST2数据集进行数据的预处理,方法的参数为paths, 指要处理的文件所在目录,如果paths为None,则会自动下载数 据集,函数默认paths值为None。 - 此函数返回一个 :class:`~fastNLP.io.DataBundle`,包含SST2数据集的训练集、测试集、验证集以及source端和target端的字典。其训练、测试、验证数据集含有四个 :mod:`~fastNLP.core.field` : - - * raw_words: 原source句子 - * target: 标签值 - * words: index之后的raw_words - * seq_len: 句子长度 - - 读入数据代码如下: - - .. code-block:: python - - from fastNLP.io import SST2Pipe - - pipe = SST2Pipe() - databundle = pipe.process_from_file() - vocab = databundle.get_vocab('words') - print(databundle) - print(databundle.get_dataset('train')[0]) - print(databundle.get_vocab('words')) - - - 输出数据如下:: - - In total 3 datasets: - test has 1821 instances. - train has 67349 instances. - dev has 872 instances. - In total 2 vocabs: - words has 16293 entries. - target has 2 entries. - - +-------------------------------------------+--------+--------------------------------------+---------+ - | raw_words | target | words | seq_len | - +-------------------------------------------+--------+--------------------------------------+---------+ - | hide new secretions from the parental ... | 1 | [4111, 98, 12010, 38, 2, 6844, 9042] | 7 | - +-------------------------------------------+--------+--------------------------------------+---------+ - - Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...) - - 除了可以对数据进行读入的Pipe类,fastNLP还提供了读入和下载数据的Loader类,不同数据集的Pipe和Loader及其用法详见 :doc:`/tutorials/tutorial_4_load_dataset` 。 - -数据集分割 - 由于SST2数据集的测试集并不带有标签数值,故我们分割出一部分训练集作为测试集。下面这段代码展示了 :meth:`~fastNLP.DataSet.split` 的使用方法, - 为了能让读者快速运行完整个教程,我们只取了训练集的前5000个数据。 - - .. code-block:: python - - train_data = databundle.get_dataset('train')[:5000] - train_data, test_data = train_data.split(0.015) - dev_data = databundle.get_dataset('dev') - print(len(train_data),len(dev_data),len(test_data)) - - 输出结果为:: - - 4925 872 75 - -数据集 :meth:`~fastNLP.DataSet.set_input` 和 :meth:`~fastNLP.DataSet.set_target` 函数 - :class:`~fastNLP.io.SST2Pipe` 类的 :meth:`~fastNLP.io.SST2Pipe.process_from_file` 方法在预处理过程中还将训练、测试、验证集 - 的 `words` 、`seq_len` :mod:`~fastNLP.core.field` 设定为input,同时将`target` :mod:`~fastNLP.core.field` 设定为target。 - 我们可以通过 :class:`~fastNLP.core.Dataset` 类的 :meth:`~fastNLP.core.Dataset.print_field_meta` 方法查看各个 - :mod:`~fastNLP.core.field` 的设定情况,代码如下: - - .. code-block:: python - - train_data.print_field_meta() - - 输出结果为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - 其中is_input和is_target分别表示是否为input和target。ignore_type为true时指使用 :class:`~fastNLP.DataSetIter` 取出batch数 - 据时fastNLP不会进行自动padding,pad_value指对应 :mod:`~fastNLP.core.field` padding所用的值,这两者只有当 - :mod:`~fastNLP.core.field` 设定为input或者target的时候才有存在的意义。 - - is_input为true的 :mod:`~fastNLP.core.field` 在 :class:`~fastNLP.DataSetIter` 迭代取出的 batch_x 中, - 而 is_target为true的 :mod:`~fastNLP.core.field` 在 :class:`~fastNLP.DataSetIter` 迭代取出的 batch_y 中。 - 具体分析见下面DataSetIter的介绍过程。 - - -评价指标 - 训练模型需要提供一个评价指标。这里使用准确率做为评价指标。 - - * ``pred`` 参数对应的是模型的 forward 方法返回的 dict 中的一个 key 的名字。 - * ``target`` 参数对应的是 :class:`~fastNLP.DataSet` 中作为标签的 :mod:`~fastNLP.core.field` 的名字。 - - 这里我们用 :class:`~fastNLP.Const` 来辅助命名,如果你自己编写模型中 forward 方法的返回值或 - 数据集中 :mod:`~fastNLP.core.field` 的名字与本例不同, 你可以把 ``pred`` 参数和 ``target`` 参数设定符合自己代码的值。代码如下: - - .. code-block:: python - - from fastNLP import AccuracyMetric - from fastNLP import Const - - # metrics=AccuracyMetric() 在本例中与下面这行代码等价 - metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET) - - -DataSetIter初探 --------------------------- - -DataSetIter - fastNLP定义的 :class:`~fastNLP.DataSetIter` 类,用于定义一个batch,并实现batch的多种功能,在初始化时传入的参数有: - - * dataset: :class:`~fastNLP.DataSet` 对象, 数据集 - * batch_size: 取出的batch大小 - * sampler: 规定使用的 :class:`~fastNLP.Sampler` 若为 None, 使用 :class:`~fastNLP.RandomSampler` (Default: None) - * as_numpy: 若为 True, 输出batch为 `numpy.array`. 否则为 `torch.Tensor` (Default: False) - * prefetch: 若为 True使用多进程预先取出下一batch. (Default: False) - -sampler - fastNLP 实现的采样器有: - - * :class:`~fastNLP.BucketSampler` 可以随机地取出长度相似的元素 【初始化参数: num_buckets:bucket的数量; batch_size:batch大小; seq_len_field_name:dataset中对应序列长度的 :mod:`~fastNLP.core.field` 的名字】 - * SequentialSampler: 顺序取出元素的采样器【无初始化参数】 - * RandomSampler:随机化取元素的采样器【无初始化参数】 - -Padder - 在fastNLP里,pad是与一个 :mod:`~fastNLP.core.field` 绑定的。即不同的 :mod:`~fastNLP.core.field` 可以使用不同的pad方式,比如在英文任务中word需要的pad和 - character的pad方式往往是不同的。fastNLP是通过一个叫做 :class:`~fastNLP.Padder` 的子类来完成的。 - 默认情况下,所有field使用 :class:`~fastNLP.AutoPadder` - 。大多数情况下直接使用 :class:`~fastNLP.AutoPadder` 就可以了。 - 如果 :class:`~fastNLP.AutoPadder` 或 :class:`~fastNLP.EngChar2DPadder` 无法满足需求, - 也可以自己写一个 :class:`~fastNLP.Padder` 。 - -DataSetIter自动padding - 以下代码展示了DataSetIter的简单使用: - - .. code-block:: python - - from fastNLP import BucketSampler - from fastNLP import DataSetIter - - tmp_data = dev_data[:10] - # 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。 - # 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket) - sampler = BucketSampler(batch_size=2, seq_len_field_name='seq_len') - batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler) - for batch_x, batch_y in batch: - print("batch_x: ",batch_x) - print("batch_y: ", batch_y) - - 输出结果如下:: - - batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15, - 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17, - 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8, - 1323, 4398, 7], - [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3, - 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308, - 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0]]), 'seq_len': tensor([33, 21])} - batch_y: {'target': tensor([1, 0])} - batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7], - [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])} - batch_y: {'target': tensor([0, 1])} - batch_x: {'words': tensor([[ 4, 277, 685, 18, 7], - [15618, 3204, 5, 1675, 0]]), 'seq_len': tensor([5, 4])} - batch_y: {'target': tensor([1, 1])} - batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136, - 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7], - [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133, - 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])} - batch_y: {'target': tensor([0, 0])} - batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619, - 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7], - [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9, - 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 12])} - batch_y: {'target': tensor([0, 1])} - - 可以看到那些设定为input的 :mod:`~fastNLP.core.field` 都出现在batch_x中,而设定为target的 :mod:`~fastNLP.core.field` 则出现在batch_y中。同时对于同一个batch_x中的两个数据,长度偏短的那个会被自动padding到和长度偏长的句子长度一致,默认的padding值为0。 - -Dataset改变padding值 - 可以通过 :meth:`~fastNLP.core.Dataset.set_pad_val` 方法修改默认的pad值,代码如下: - - .. code-block:: python - - tmp_data.set_pad_val('words',-1) - batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler) - for batch_x, batch_y in batch: - print("batch_x: ",batch_x) - print("batch_y: ", batch_y) - - 输出结果如下:: - - batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15, - 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17, - 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8, - 1323, 4398, 7], - [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3, - 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308, - 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1]]), 'seq_len': tensor([33, 21])} - batch_y: {'target': tensor([1, 0])} - batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7], - [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])} - batch_y: {'target': tensor([0, 1])} - batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136, - 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7], - [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133, - 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])} - batch_y: {'target': tensor([0, 0])} - batch_x: {'words': tensor([[ 4, 277, 685, 18, 7], - [15618, 3204, 5, 1675, -1]]), 'seq_len': tensor([5, 4])} - batch_y: {'target': tensor([1, 1])} - batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619, - 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7], - [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9, - 1217, 7, -1, -1, -1, -1, -1, -1, -1, -1]]), 'seq_len': tensor([20, 12])} - batch_y: {'target': tensor([0, 1])} - - 可以看到使用了-1进行padding。 - -Dataset个性化padding - 如果我们希望对某一些 :mod:`~fastNLP.core.field` 进行个性化padding,可以自己构造Padder类,并使用 :meth:`~fastNLP.core.Dataset.set_padder` 函数修改padder来实现。下面通过构造一个将数据padding到固定长度的padder进行展示: - - .. code-block:: python - - from fastNLP.core.field import Padder - import numpy as np - class FixLengthPadder(Padder): - def __init__(self, pad_val=0, length=None): - super().__init__(pad_val=pad_val) - self.length = length - assert self.length is not None, "Creating FixLengthPadder with no specific length!" - - def __call__(self, contents, field_name, field_ele_dtype, dim): - #计算当前contents中的最大长度 - max_len = max(map(len, contents)) - #如果当前contents中的最大长度大于指定的padder length的话就报错 - assert max_len <= self.length, "Fixed padder length smaller than actual length! with length {}".format(max_len) - array = np.full((len(contents), self.length), self.pad_val, dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - array[i, :len(content_i)] = content_i - return array - - #设定FixLengthPadder的固定长度为40 - tmp_padder = FixLengthPadder(pad_val=0,length=40) - #利用dataset的set_padder函数设定words field的padder - tmp_data.set_padder('words',tmp_padder) - batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler) - for batch_x, batch_y in batch: - print("batch_x: ",batch_x) - print("batch_y: ", batch_y) - - 输出结果如下:: - - batch_x: {'words': tensor([[ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9, - 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - [ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619, - 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([12, 20])} - batch_y: {'target': tensor([1, 0])} - batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15, - 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17, - 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8, - 1323, 4398, 7, 0, 0, 0, 0, 0, 0, 0], - [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3, - 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308, - 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([33, 21])} - batch_y: {'target': tensor([1, 0])} - batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0], - [ 14, 10, 437, 32, 78, 3, 78, 437, 7, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0]]), 'seq_len': tensor([9, 9])} - batch_y: {'target': tensor([0, 1])} - batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136, - 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133, - 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 20])} - batch_y: {'target': tensor([0, 0])} - batch_x: {'words': tensor([[ 4, 277, 685, 18, 7, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - [15618, 3204, 5, 1675, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([5, 4])} - batch_y: {'target': tensor([1, 1])} - - 在这里所有的 `words` 都被pad成了长度为40的list。 - - -使用DataSetIter自己编写训练过程 ------------------------------------- - 如果你想用类似 PyTorch 的使用方法,自己编写训练过程,可以参考下面这段代码。 - 其中使用了 fastNLP 提供的 :class:`~fastNLP.DataSetIter` 来获得小批量训练的小批量数据, - 使用 :class:`~fastNLP.BucketSampler` 做为 :class:`~fastNLP.DataSetIter` 的参数来选择采样的方式。 - - 以下代码使用BucketSampler作为 :class:`~fastNLP.DataSetIter` 初始化的输入,运用 :class:`~fastNLP.DataSetIter` 自己写训练程序 - - .. code-block:: python - - from fastNLP import BucketSampler - from fastNLP import DataSetIter - from fastNLP.models import CNNText - from fastNLP import Tester - import torch - import time - - embed_dim = 100 - model = CNNText((len(vocab),embed_dim), num_classes=2, dropout=0.1) - - def train(epoch, data, devdata): - optimizer = torch.optim.Adam(model.parameters(), lr=0.001) - lossfunc = torch.nn.CrossEntropyLoss() - batch_size = 32 - - # 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。 - # 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket) - train_sampler = BucketSampler(batch_size=batch_size, seq_len_field_name='seq_len') - train_batch = DataSetIter(batch_size=batch_size, dataset=data, sampler=train_sampler) - - start_time = time.time() - print("-"*5+"start training"+"-"*5) - for i in range(epoch): - loss_list = [] - for batch_x, batch_y in train_batch: - optimizer.zero_grad() - output = model(batch_x['words']) - loss = lossfunc(output['pred'], batch_y['target']) - loss.backward() - optimizer.step() - loss_list.append(loss.item()) - - #这里verbose如果为0,在调用Tester对象的test()函数时不输出任何信息,返回评估信息; 如果为1,打印出验证结果,返回评估信息 - #在调用过Tester对象的test()函数后,调用其_format_eval_results(res)函数,结构化输出验证结果 - tester_tmp = Tester(devdata, model, metrics=AccuracyMetric(), verbose=0) - res=tester_tmp.test() - - print('Epoch {:d} Avg Loss: {:.2f}'.format(i, sum(loss_list) / len(loss_list)),end=" ") - print(tester_tmp._format_eval_results(res),end=" ") - print('{:d}ms'.format(round((time.time()-start_time)*1000))) - loss_list.clear() - - train(10, train_data, dev_data) - #使用tester进行快速测试 - tester = Tester(test_data, model, metrics=AccuracyMetric()) - tester.test() - - 这段代码的输出如下:: - - -----start training----- - - Evaluate data in 2.68 seconds! - Epoch 0 Avg Loss: 0.66 AccuracyMetric: acc=0.708716 29307ms - - Evaluate data in 0.38 seconds! - Epoch 1 Avg Loss: 0.41 AccuracyMetric: acc=0.770642 52200ms - - Evaluate data in 0.51 seconds! - Epoch 2 Avg Loss: 0.16 AccuracyMetric: acc=0.747706 70268ms - - Evaluate data in 0.96 seconds! - Epoch 3 Avg Loss: 0.06 AccuracyMetric: acc=0.741972 90349ms - - Evaluate data in 1.04 seconds! - Epoch 4 Avg Loss: 0.03 AccuracyMetric: acc=0.740826 114250ms - - Evaluate data in 0.8 seconds! - Epoch 5 Avg Loss: 0.02 AccuracyMetric: acc=0.738532 134742ms - - Evaluate data in 0.65 seconds! - Epoch 6 Avg Loss: 0.01 AccuracyMetric: acc=0.731651 154503ms - - Evaluate data in 0.8 seconds! - Epoch 7 Avg Loss: 0.01 AccuracyMetric: acc=0.738532 175397ms - - Evaluate data in 0.36 seconds! - Epoch 8 Avg Loss: 0.01 AccuracyMetric: acc=0.733945 192384ms - - Evaluate data in 0.84 seconds! - Epoch 9 Avg Loss: 0.01 AccuracyMetric: acc=0.744266 214417ms - - Evaluate data in 0.04 seconds! - [tester] - AccuracyMetric: acc=0.786667 - - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_7_metrics.rst b/docs/source/tutorials/tutorial_7_metrics.rst deleted file mode 100644 index 5ab09c24..00000000 --- a/docs/source/tutorials/tutorial_7_metrics.rst +++ /dev/null @@ -1,135 +0,0 @@ -=============================== -使用Metric快速评测你的模型 -=============================== - -在进行训练时,fastNLP提供了各种各样的 :mod:`~fastNLP.core.metrics` 。 -如前面的教程中所介绍,:class:`~fastNLP.AccuracyMetric` 类的对象被直接传到 :class:`~fastNLP.Trainer` 中用于训练 - -.. code-block:: python - - trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model, - loss=loss, device=device, metrics=metric) - trainer.train() - -除了 :class:`~fastNLP.AccuracyMetric` 之外,:class:`~fastNLP.SpanFPreRecMetric` 也是一种非常见的评价指标, -例如在序列标注问题中,常以span的方式计算 F-measure, precision, recall。 - -另外,fastNLP 还实现了用于抽取式QA(如SQuAD)的metric :class:`~fastNLP.ExtractiveQAMetric`。 -用户可以参考下面这个表格,点击第一列查看各个 :mod:`~fastNLP.core.metrics` 的详细文档。 - -.. csv-table:: - :header: 名称, 介绍 - - :class:`~fastNLP.core.metrics.MetricBase` , 自定义metrics需继承的基类 - :class:`~fastNLP.core.metrics.AccuracyMetric` , 简单的正确率metric - :class:`~fastNLP.core.metrics.SpanFPreRecMetric` , "同时计算 F-measure, precision, recall 值的 metric" - :class:`~fastNLP.core.metrics.ExtractiveQAMetric` , 用于抽取式QA任务 的metric - -更多的 :mod:`~fastNLP.core.metrics` 正在被添加到 fastNLP 当中,敬请期待。 - ------------------------------- -定义自己的metrics ------------------------------- - -在定义自己的metrics类时需继承 fastNLP 的 :class:`~fastNLP.core.metrics.MetricBase`, -并覆盖写入 ``evaluate`` 和 ``get_metric`` 方法。 - - evaluate(xxx) 中传入一个批次的数据,将针对一个批次的预测结果做评价指标的累计 - - get_metric(xxx) 当所有数据处理完毕时调用该方法,它将根据 evaluate函数累计的评价指标统计量来计算最终的评价结果 - -以分类问题中,accuracy 计算为例,假设 model 的 `forward` 返回 dict 中包含 `pred` 这个 key , 并且该 key 需要用于 accuracy:: - - class Model(nn.Module): - def __init__(xxx): - # do something - def forward(self, xxx): - # do something - return {'pred': pred, 'other_keys':xxx} # pred's shape: batch_size x num_classes - -假设dataset中 `target` 这个 field 是需要预测的值,并且该 field 被设置为了 target 对应的 `AccMetric` 可以按如下的定义( Version 1, 只使用这一次):: - - from fastNLP import MetricBase - - class AccMetric(MetricBase): - - def __init__(self): - super().__init__() - # 根据你的情况自定义指标 - self.total = 0 - self.acc_count = 0 - - # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value - # pred, target 的参数是 fastNLP 的默认配置 - def evaluate(self, pred, target): - # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric - self.total += target.size(0) - self.acc_count += target.eq(pred).sum().item() - - def get_metric(self, reset=True): # 在这里定义如何计算metric - acc = self.acc_count/self.total - if reset: # 是否清零以便重新计算 - self.acc_count = 0 - self.total = 0 - return {'acc': acc} - # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中 - - -如果需要复用 metric,比如下一次使用 `AccMetric` 时,dataset中目标field不叫 `target` 而叫 `y` ,或者model的输出不是 `pred` (Version 2):: - - class AccMetric(MetricBase): - def __init__(self, pred=None, target=None): - """ - 假设在另一场景使用时,目标field叫y,model给出的key为pred_y。则只需要在初始化AccMetric时, - acc_metric = AccMetric(pred='pred_y', target='y')即可。 - 当初始化为acc_metric = AccMetric() 时,fastNLP会直接使用 'pred', 'target' 作为key去索取对应的的值 - """ - - super().__init__() - - # 如果没有注册该则效果与 Version 1 就是一样的 - self._init_param_map(pred=pred, target=target) # 该方法会注册 pred 和 target . 仅需要注册evaluate()方法会用到的参数名即可 - - # 根据你的情况自定义指标 - self.total = 0 - self.acc_count = 0 - - # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value - # pred, target 的参数是 fastNLP 的默认配置 - def evaluate(self, pred, target): - # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric - self.total += target.size(0) - self.acc_count += target.eq(pred).sum().item() - - def get_metric(self, reset=True): # 在这里定义如何计算metric - acc = self.acc_count/self.total - if reset: # 是否清零以便重新计算 - self.acc_count = 0 - self.total = 0 - return {'acc': acc} - # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中 - -``MetricBase`` 将会在输入的字典 ``pred_dict`` 和 ``target_dict`` 中进行检查. -``pred_dict`` 是模型当中 ``forward()`` 函数或者 ``predict()`` 函数的返回值. -``target_dict`` 是DataSet当中的ground truth, 判定ground truth的条件是field的 ``is_target`` 被设置为True. - -``MetricBase`` 会进行以下的类型检测: - -1. self.evaluate当中是否有 varargs, 这是不支持的. -2. self.evaluate当中所需要的参数是否既不在 ``pred_dict`` 也不在 ``target_dict`` . -3. self.evaluate当中所需要的参数是否既在 ``pred_dict`` 也在 ``target_dict`` . - -除此以外,在参数被传入self.evaluate以前,这个函数会检测 ``pred_dict`` 和 ``target_dict`` 当中没有被用到的参数 -如果kwargs是self.evaluate的参数,则不会检测 - -self.evaluate将计算一个批次(batch)的评价指标,并累计。 没有返回值 -self.get_metric将统计当前的评价指标并返回评价结果, 返回值需要是一个dict, key是指标名称,value是指标的值 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_8_modules_models.rst b/docs/source/tutorials/tutorial_8_modules_models.rst deleted file mode 100644 index 226c3be5..00000000 --- a/docs/source/tutorials/tutorial_8_modules_models.rst +++ /dev/null @@ -1,193 +0,0 @@ -====================================== -使用Modules和Models快速搭建自定义模型 -====================================== - -:mod:`~fastNLP.modules` 和 :mod:`~fastNLP.models` 用于构建 fastNLP 所需的神经网络模型,它可以和 torch.nn 中的模型一起使用。 -下面我们会分三节介绍编写构建模型的具体方法。 - - -使用 models 中的模型 ----------------------- - -fastNLP 在 :mod:`~fastNLP.models` 模块中内置了如 :class:`~fastNLP.models.CNNText` 、 -:class:`~fastNLP.models.SeqLabeling` 等完整的模型,以供用户直接使用。 -以文本分类的任务为例,我们从 models 中导入 :class:`~fastNLP.models.CNNText` 模型,用它进行训练。 - -.. code-block:: python - - from fastNLP.models import CNNText - - model_cnn = CNNText((len(vocab),100), num_classes=2, dropout=0.1) - - trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric, - loss=loss, device=device, model=model_cnn) - trainer.train() - -在 iPython 环境输入 `model_cnn` ,我们可以看到 ``model_cnn`` 的网络结构 - -.. parsed-literal:: - - CNNText( - (embed): Embedding( - (embed): Embedding(16292, 100) - (dropout): Dropout(p=0.0, inplace=False) - ) - (conv_pool): ConvMaxpool( - (convs): ModuleList( - (0): Conv1d(100, 30, kernel_size=(1,), stride=(1,), bias=False) - (1): Conv1d(100, 40, kernel_size=(3,), stride=(1,), padding=(1,), bias=False) - (2): Conv1d(100, 50, kernel_size=(5,), stride=(1,), padding=(2,), bias=False) - ) - ) - (dropout): Dropout(p=0.1, inplace=False) - (fc): Linear(in_features=120, out_features=2, bias=True) - ) - -FastNLP 中内置的 models 如下表所示,您可以点击具体的名称查看详细的 API: - -.. csv-table:: - :header: 名称, 介绍 - - :class:`~fastNLP.models.CNNText` , 使用 CNN 进行文本分类的模型 - :class:`~fastNLP.models.SeqLabeling` , 简单的序列标注模型 - :class:`~fastNLP.models.AdvSeqLabel` , 更大网络结构的序列标注模型 - :class:`~fastNLP.models.ESIM` , ESIM 模型的实现 - :class:`~fastNLP.models.StarTransEnc` , 带 word-embedding的Star-Transformer模 型 - :class:`~fastNLP.models.STSeqLabel` , 用于序列标注的 Star-Transformer 模型 - :class:`~fastNLP.models.STNLICls` ,用于自然语言推断 (NLI) 的 Star-Transformer 模型 - :class:`~fastNLP.models.STSeqCls` , 用于分类任务的 Star-Transformer 模型 - :class:`~fastNLP.models.BiaffineParser` , Biaffine 依存句法分析网络的实现 - :class:`~fastNLP.models.BiLSTMCRF`, 使用BiLSTM与CRF进行序列标注 - - -使用 nn.torch 编写模型 ----------------------------- - -FastNLP 完全支持使用 pyTorch 编写的模型,但与 pyTorch 中编写模型的常见方法不同, -用于 fastNLP 的模型中 forward 函数需要返回一个字典,字典中至少需要包含 ``pred`` 这个字段。 - -下面是使用 pyTorch 中的 torch.nn 模块编写的文本分类,注意观察代码中标注的向量维度。 -由于 pyTorch 使用了约定俗成的维度设置,使得 forward 中需要多次处理维度顺序 - -.. code-block:: python - - import torch - import torch.nn as nn - - class LSTMText(nn.Module): - def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5): - super().__init__() - - self.embedding = nn.Embedding(vocab_size, embedding_dim) - self.lstm = nn.LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True, dropout=dropout) - self.fc = nn.Linear(hidden_dim * 2, output_dim) - self.dropout = nn.Dropout(dropout) - - def forward(self, words): - # (input) words : (batch_size, seq_len) - words = words.permute(1,0) - # words : (seq_len, batch_size) - - embedded = self.dropout(self.embedding(words)) - # embedded : (seq_len, batch_size, embedding_dim) - output, (hidden, cell) = self.lstm(embedded) - # output: (seq_len, batch_size, hidden_dim * 2) - # hidden: (num_layers * 2, batch_size, hidden_dim) - # cell: (num_layers * 2, batch_size, hidden_dim) - - hidden = torch.cat((hidden[-2, :, :], hidden[-1, :, :]), dim=1) - hidden = self.dropout(hidden) - # hidden: (batch_size, hidden_dim * 2) - - pred = self.fc(hidden.squeeze(0)) - # result: (batch_size, output_dim) - return {"pred":pred} - -我们同样可以在 iPython 环境中查看这个模型的网络结构 - -.. parsed-literal:: - - LSTMText( - (embedding): Embedding(16292, 100) - (lstm): LSTM(100, 64, num_layers=2, dropout=0.5, bidirectional=True) - (fc): Linear(in_features=128, out_features=2, bias=True) - (dropout): Dropout(p=0.5, inplace=False) - ) - - -使用 modules 编写模型 ----------------------------- - -下面我们使用 :mod:`fastNLP.modules` 中的组件来构建同样的网络。由于 fastNLP 统一把 ``batch_size`` 放在第一维, -在编写代码的过程中会有一定的便利。 - -.. code-block:: python - - from fastNLP.modules import Embedding, LSTM, MLP - - class MyText(nn.Module): - def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5): - super().__init__() - - self.embedding = Embedding((vocab_size, embedding_dim)) - self.lstm = LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True) - self.mlp = MLP([hidden_dim*2,output_dim], dropout=dropout) - - def forward(self, words): - embedded = self.embedding(words) - _,(hidden,_) = self.lstm(embedded) - pred = self.mlp(torch.cat((hidden[-1],hidden[-2]),dim=1)) - return {"pred":pred} - -我们自己编写模型的网络结构如下 - -.. parsed-literal:: - - MyText( - (embedding): Embedding( - (embed): Embedding(16292, 100) - (dropout): Dropout(p=0.0, inplace=False) - ) - (lstm): LSTM( - (lstm): LSTM(100, 64, num_layers=2, batch_first=True, bidirectional=True) - ) - (mlp): MLP( - (hiddens): ModuleList() - (output): Linear(in_features=128, out_features=2, bias=True) - (dropout): Dropout(p=0.5, inplace=False) - ) - ) - -FastNLP 中包含的各种模块如下表,您可以点击具体的名称查看详细的 API,也可以通过 :doc:`/fastNLP.modules` 进行了解。 - -.. csv-table:: - :header: 名称, 介绍 - - :class:`~fastNLP.modules.ConvolutionCharEncoder` , char级别的卷积 encoder - :class:`~fastNLP.modules.LSTMCharEncoder` , char级别基于LSTM的 encoder - :class:`~fastNLP.modules.ConvMaxpool` , 结合了Convolution和Max-Pooling于一体的模块 - :class:`~fastNLP.modules.LSTM` , LSTM模块, 轻量封装了PyTorch的LSTM - :class:`~fastNLP.modules.StarTransformer` , Star-Transformer 的encoder部分 - :class:`~fastNLP.modules.TransformerEncoder` , Transformer的encoder模块,不包含embedding层 - :class:`~fastNLP.modules.VarRNN` , Variational Dropout RNN 模块 - :class:`~fastNLP.modules.VarLSTM` , Variational Dropout LSTM 模块 - :class:`~fastNLP.modules.VarGRU` , Variational Dropout GRU 模块 - :class:`~fastNLP.modules.MaxPool` , Max-pooling模块 - :class:`~fastNLP.modules.MaxPoolWithMask` , 带mask矩阵的max pooling。在做 max-pooling的时候不会考虑mask值为0的位置。 - :class:`~fastNLP.modules.AvgPool` , Average-pooling模块 - :class:`~fastNLP.modules.AvgPoolWithMask` , 带mask矩阵的average pooling。在做 average-pooling的时候不会考虑mask值为0的位置。 - :class:`~fastNLP.modules.MultiHeadAttention` , MultiHead Attention 模块 - :class:`~fastNLP.modules.MLP` , 简单的多层感知器模块 - :class:`~fastNLP.modules.ConditionalRandomField` , 条件随机场模块 - :class:`~fastNLP.modules.viterbi_decode` , 给定一个特征矩阵以及转移分数矩阵,计算出最佳的路径以及对应的分数 (与 :class:`~fastNLP.modules.ConditionalRandomField` 配合使用) - :class:`~fastNLP.modules.allowed_transitions` , 给定一个id到label的映射表,返回所有可以跳转的列表(与 :class:`~fastNLP.modules.ConditionalRandomField` 配合使用) - :class:`~fastNLP.modules.TimestepDropout` , 简单包装过的Dropout 组件 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/tutorial_9_callback.rst b/docs/source/tutorials/tutorial_9_callback.rst deleted file mode 100644 index 5ecdb88d..00000000 --- a/docs/source/tutorials/tutorial_9_callback.rst +++ /dev/null @@ -1,140 +0,0 @@ -=================================================== -使用 Callback 自定义你的训练过程 -=================================================== - -- `什么是Callback`_ -- `使用 Callback`_ -- `fastNLP 中的 Callback`_ -- `自定义 Callback`_ - - -什么是Callback ---------------------- - -:class:`~fastNLP.core.callback.Callback` 是与 :class:`~fastNLP.core.trainer.Trainer` 紧密结合的模块,利用 Callback 可以在 :class:`~fastNLP.core.trainer.Trainer` 训练时,加入自定义的操作,比如梯度裁剪,学习率调节,测试模型的性能等。定义的 Callback 会在训练的特定阶段被调用。 - -fastNLP 中提供了很多常用的 :class:`~fastNLP.core.callback.Callback` ,开箱即用。 - - -使用 Callback ---------------------- - -使用 Callback 很简单,将需要的 callback 按 list 存储,以对应参数 ``callbacks`` 传入对应的 Trainer。Trainer 在训练时就会自动执行这些 Callback 指定的操作了。 - - -.. code-block:: python - - from fastNLP import (Callback, EarlyStopCallback, - Trainer, CrossEntropyLoss, AccuracyMetric) - from fastNLP.models import CNNText - import torch.cuda - - # prepare data - def get_data(): - from fastNLP.io import ChnSentiCorpPipe as pipe - data = pipe().process_from_file() - print(data) - data.rename_field('chars', 'words') - train_data = data.get_dataset('train') - dev_data = data.get_dataset('dev') - test_data = data.get_dataset('test') - vocab = data.get_vocab('words') - tgt_vocab = data.get_vocab('target') - return train_data, dev_data, test_data, vocab, tgt_vocab - - # prepare model - train_data, dev_data, _, vocab, tgt_vocab = get_data() - device = 'cuda:0' if torch.cuda.is_available() else 'cpu' - model = CNNText((len(vocab),50), num_classes=len(tgt_vocab)) - - # define callback - callbacks=[EarlyStopCallback(5)] - - # pass callbacks to Trainer - def train_with_callback(cb_list): - trainer = Trainer( - device=device, - n_epochs=3, - model=model, - train_data=train_data, - dev_data=dev_data, - loss=CrossEntropyLoss(), - metrics=AccuracyMetric(), - callbacks=cb_list, - check_code_level=-1 - ) - trainer.train() - - train_with_callback(callbacks) - - - -fastNLP 中的 Callback ---------------------- - -fastNLP 中提供了很多常用的 Callback,如梯度裁剪,训练时早停和测试验证集,fitlog 等等。具体 Callback 请参考 :mod:`fastNLP.core.callback` - -.. code-block:: python - - from fastNLP import EarlyStopCallback, GradientClipCallback, EvaluateCallback - callbacks = [ - EarlyStopCallback(5), - GradientClipCallback(clip_value=5, clip_type='value'), - EvaluateCallback(dev_data) - ] - - train_with_callback(callbacks) - -自定义 Callback ---------------------- - -这里我们以一个简单的 Callback作为例子,它的作用是打印每一个 Epoch 平均训练 loss。 - -1. 创建 Callback - - 要自定义 Callback,我们要实现一个类,继承 :class:`~fastNLP.core.callback.Callback` 。这里我们定义 ``MyCallBack`` ,继承 fastNLP.Callback 。 - -2. 指定 Callback 调用的阶段 - - Callback 中所有以 `on_` 开头的类方法会在 Trainer 的训练中在特定阶段调用。 如 on_train_begin() 会在训练开始时被调用,on_epoch_end() - 会在每个 epoch 结束时调用。 具体有哪些类方法,参见 :class:`~fastNLP.core.callback.Callback` 文档。这里, MyCallBack 在求得loss时调用 on_backward_begin() 记录 - 当前 loss,在每一个 epoch 结束时调用 on_epoch_end() ,求当前 epoch 平均loss并输出。 - -3. 使用 Callback 的属性访问 Trainer 的内部信息 - - 为了方便使用,可以使用 :class:`~fastNLP.core.callback.Callback` 的属性,访问 :class:`~fastNLP.core.trainer.Trainer` 中的对应信息,如 optimizer, epoch, n_epochs,分别对应训练时的优化器, - 当前 epoch 数,和总 epoch 数。 具体可访问的属性,参见 :class:`~fastNLP.core.callback.Callback` 。这里, MyCallBack 为了求平均 loss ,需要知道当前 epoch 的总步 - 数,可以通过 self.step 属性得到当前训练了多少步。 - -.. code-block:: python - - from fastNLP import Callback - from fastNLP import logger - - class MyCallBack(Callback): - """Print average loss in each epoch""" - def __init__(self): - super().__init__() - self.total_loss = 0 - self.start_step = 0 - - def on_backward_begin(self, loss): - self.total_loss += loss.item() - - def on_epoch_end(self): - n_steps = self.step - self.start_step - avg_loss = self.total_loss / n_steps - logger.info('Avg loss at epoch %d, %.6f', self.epoch, avg_loss) - self.start_step = self.step - - callbacks = [MyCallBack()] - train_with_callback(callbacks) - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/序列标注.rst b/docs/source/tutorials/序列标注.rst deleted file mode 100644 index 3fec110d..00000000 --- a/docs/source/tutorials/序列标注.rst +++ /dev/null @@ -1,208 +0,0 @@ -===================== -序列标注 -===================== - -这一部分的内容主要展示如何使用fastNLP实现序列标注(Sequence labeling)任务。您可以使用fastNLP的各个组件快捷,方便地完成序列标注任务,达到出色的效果。 -在阅读这篇教程前,希望您已经熟悉了fastNLP的基础使用,尤其是数据的载入以及模型的构建。通过这个小任务,能让您进一步熟悉fastNLP的使用。 - -.. note:: - - 本教程推荐使用 GPU 进行实验 - -命名实体识别(name entity recognition, NER) ------------------------------------------- - -命名实体识别任务是从文本中抽取出具有特殊意义或者指代性非常强的实体,通常包括人名、地名、机构名和时间等。 -如下面的例子中 - - 我来自复旦大学。 - -其中“复旦大学”就是一个机构名,命名实体识别就是要从中识别出“复旦大学”这四个字是一个整体,且属于机构名这个类别。这个问题在实际做的时候会被 -转换为序列标注问题 - -针对"我来自复旦大学"这句话,我们的预测目标将是[O, O, O, B-ORG, I-ORG, I-ORG, I-ORG],其中O表示out,即不是一个实体,B-ORG是ORG( -organization的缩写)这个类别的开头(Begin),I-ORG是ORG类别的中间(Inside)。 - -在本tutorial中我们将通过fastNLP尝试写出一个能够执行以上任务的模型。 - -载入数据 ------------------------------------------- -fastNLP的数据载入主要是由Loader与Pipe两个基类衔接完成的,您可以通过 :doc:`使用Loader和Pipe处理数据 ` -了解如何使用fastNLP提供的数据加载函数。下面我们以微博命名实体任务来演示一下在fastNLP进行序列标注任务。 - -.. code-block:: python - - from fastNLP.io import WeiboNERPipe - data_bundle = WeiboNERPipe().process_from_file() - print(data_bundle.get_dataset('train')[:2]) - -打印的数据如下 :: - - +-------------------------------------------------+------------------------------------------+------------------------------------------+---------+ - | raw_chars | target | chars | seq_len | - +-------------------------------------------------+------------------------------------------+------------------------------------------+---------+ - | ['一', '节', '课', '的', '时', '间', '真', '... | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, ... | [8, 211, 775, 3, 49, 245, 89, 26, 101... | 16 | - | ['回', '复', '支', '持', ',', '赞', '成', '... | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... | [116, 480, 127, 109, 2, 446, 134, 2, ... | 59 | - +-------------------------------------------------+------------------------------------------+------------------------------------------+---------+ - - -模型构建 --------------------------------- - -首先选择需要使用的Embedding类型。关于Embedding的相关说明可以参见 :doc:`使用Embedding模块将文本转成向量 ` 。 -在这里我们使用通过word2vec预训练的中文汉字embedding。 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - - embed = StaticEmbedding(vocab=data_bundle.get_vocab('chars'), model_dir_or_name='cn-char-fastnlp-100d') - -选择好Embedding之后,我们可以使用fastNLP中自带的 :class:`fastNLP.models.BiLSTMCRF` 作为模型。 - -.. code-block:: python - - from fastNLP.models import BiLSTMCRF - - data_bundle.rename_field('chars', 'words') # 这是由于BiLSTMCRF模型的forward函数接受的words,而不是chars,所以需要把这一列重新命名 - model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5, - target_vocab=data_bundle.get_vocab('target')) - -进行训练 --------------------------------- - -下面我们选择用来评估模型的metric,以及优化用到的优化函数。 - -.. code-block:: python - - from fastNLP import SpanFPreRecMetric - from torch.optim import Adam - from fastNLP import LossInForward - - metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target')) - optimizer = Adam(model.parameters(), lr=1e-2) - loss = LossInForward() - -使用Trainer进行训练, 您可以通过修改 device 的值来选择显卡。 - -.. code-block:: python - - from fastNLP import Trainer - import torch - - device= 0 if torch.cuda.is_available() else 'cpu' - trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer, - dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device) - trainer.train() - -训练过程输出为:: - - input fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - words: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) - target fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - - training epochs started 2019-09-25-10-43-09 - Evaluate data in 0.62 seconds! - Evaluation on dev at Epoch 1/10. Step:43/430: - SpanFPreRecMetric: f=0.070352, pre=0.100962, rec=0.053985 - - ... - - Evaluate data in 0.61 seconds! - Evaluation on dev at Epoch 10/10. Step:430/430: - SpanFPreRecMetric: f=0.51223, pre=0.581699, rec=0.457584 - - - In Epoch:7/Step:301, got best dev performance: - SpanFPreRecMetric: f=0.515528, pre=0.65098, rec=0.426735 - Reloaded the best model. - -进行测试 --------------------------------- - -训练结束之后过,可以通过 :class:`~fastNLP.Tester` 测试其在测试集上的性能 - -.. code-block:: python - - from fastNLP import Tester - - tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric) - tester.test() - -输出为:: - - [tester] - SpanFPreRecMetric: f=0.482399, pre=0.530086, rec=0.442584 - - -使用更强的Bert做序列标注 --------------------------------- - -在fastNLP使用Bert进行任务,您只需要把 :class:`fastNLP.embeddings.StaticEmbedding` 切换为 :class:`fastNLP.embeddings.BertEmbedding` (可修改 device 选择显卡)。 - -.. code-block:: python - - from fastNLP.io import WeiboNERPipe - from fastNLP.models import BiLSTMCRF - - data_bundle = WeiboNERPipe().process_from_file() - data_bundle.rename_field('chars', 'words') - - from fastNLP.embeddings import BertEmbedding - embed = BertEmbedding(vocab=data_bundle.get_vocab('words'), model_dir_or_name='cn') - model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5, - target_vocab=data_bundle.get_vocab('target')) - - from fastNLP import SpanFPreRecMetric - from torch.optim import Adam - from fastNLP import LossInForward - metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target')) - optimizer = Adam(model.parameters(), lr=2e-5) - loss = LossInForward() - - from fastNLP import Trainer - import torch - device= 0 if torch.cuda.is_available() else 'cpu' - trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer, batch_size=12, - dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device) - trainer.train() - - from fastNLP import Tester - tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric) - tester.test() - -输出为:: - - training epochs started 2019-09-25-07-15-43 - Evaluate data in 2.02 seconds! - Evaluation on dev at Epoch 1/10. Step:113/1130: - SpanFPreRecMetric: f=0.0, pre=0.0, rec=0.0 - - ... - - Evaluate data in 2.17 seconds! - Evaluation on dev at Epoch 10/10. Step:1130/1130: - SpanFPreRecMetric: f=0.647332, pre=0.589852, rec=0.717224 - - In Epoch:6/Step:678, got best dev performance: - SpanFPreRecMetric: f=0.669963, pre=0.645238, rec=0.696658 - Reloaded the best model. - - Evaluate data in 1.82 seconds! - [tester] - SpanFPreRecMetric: f=0.641774, pre=0.626424, rec=0.657895 - -可以看出通过使用Bert,效果有明显的提升,从48.2提升到了64.1。 - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
diff --git a/docs/source/tutorials/文本分类.rst b/docs/source/tutorials/文本分类.rst deleted file mode 100644 index 30f6cf4f..00000000 --- a/docs/source/tutorials/文本分类.rst +++ /dev/null @@ -1,542 +0,0 @@ -文本分类 -============================= - -文本分类(Text classification)任务是将一句话或一段话划分到某个具体的类别。比如垃圾邮件识别,文本情绪分类等。这篇教程可以带你从零开始了解 fastNLP 的使用 - -.. note:: - - 本教程推荐使用 GPU 进行实验 - -.. code-block:: text - - 1, 商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错! - -其中开头的1是只这条评论的标签,表示是正面的情绪。我们将使用到的数据可以通过 `此链接 `_ -下载并解压,当然也可以通过fastNLP自动下载该数据。 - -数据中的内容如下图所示。接下来,我们将用fastNLP在这个数据上训练一个分类网络。 - -.. figure:: ./cn_cls_example.png - :alt: jupyter - -步骤 ----- - -一共有以下的几个步骤: - -1. `读取数据 <#id4>`_ - -2. `预处理数据 <#id5>`_ - -3. `选择预训练词向量 <#id6>`_ - -4. `创建模型 <#id7>`_ - -5. `训练模型 <#id8>`_ - -(1) 读取数据 -~~~~~~~~~~~~~~~~~~~~ - -fastNLP提供多种数据的自动下载与自动加载功能,对于这里我们要用到的数据,我们可以用 :class:`~fastNLP.io.Loader` 自动下载并加载该数据。 -更多有关Loader的使用可以参考 :mod:`~fastNLP.io.loader` - -.. code-block:: python - - from fastNLP.io import ChnSentiCorpLoader - - loader = ChnSentiCorpLoader() # 初始化一个中文情感分类的loader - data_dir = loader.download() # 这一行代码将自动下载数据到默认的缓存地址, 并将该地址返回 - data_bundle = loader.load(data_dir) # 这一行代码将从{data_dir}处读取数据至DataBundle - - -DataBundle的相关介绍,可以参考 :class:`~fastNLP.io.DataBundle` 。我们可以打印该data\_bundle的基本信息。 - -.. code-block:: python - - print(data_bundle) - - -.. code-block:: text - - In total 3 datasets: - dev has 1200 instances. - train has 9600 instances. - test has 1200 instances. - In total 0 vocabs: - - - -可以看出,该data\_bundle中一个含有三个 :class:`~fastNLP.DataSet` 。通过下面的代码,我们可以查看DataSet的基本情况 - -.. code-block:: python - - print(data_bundle.get_dataset('train')[:2]) # 查看Train集前两个sample - - -.. code-block:: text - - +-----------------------------+--------+ - | raw_chars | target | - +-----------------------------+--------+ - | 选择珠江花园的原因就是方... | 1 | - | 15.4寸笔记本的键盘确实爽... | 1 | - +-----------------------------+--------+ - -(2) 预处理数据 -~~~~~~~~~~~~~~~~~~~~ - -在NLP任务中,预处理一般包括: - -(a) 将一整句话切分成汉字或者词; - -(b) 将文本转换为index - -fastNLP中也提供了多种数据集的处理类,这里我们直接使用fastNLP的ChnSentiCorpPipe。更多关于Pipe的说明可以参考 :mod:`~fastNLP.io.pipe` 。 - -.. code-block:: python - - from fastNLP.io import ChnSentiCorpPipe - - pipe = ChnSentiCorpPipe() - data_bundle = pipe.process(data_bundle) # 所有的Pipe都实现了process()方法,且输入输出都为DataBundle类型 - - print(data_bundle) # 打印data_bundle,查看其变化 - - -.. code-block:: text - - In total 3 datasets: - dev has 1200 instances. - train has 9600 instances. - test has 1200 instances. - In total 2 vocabs: - chars has 4409 entries. - target has 2 entries. - - - -可以看到除了之前已经包含的3个 :class:`~fastNLP.DataSet` ,还新增了两个 :class:`~fastNLP.Vocabulary` 。我们可以打印DataSet中的内容 - -.. code-block:: python - - print(data_bundle.get_dataset('train')[:2]) - - -.. code-block:: text - - +-----------------+--------+-----------------+---------+ - | raw_chars | target | chars | seq_len | - +-----------------+--------+-----------------+---------+ - | 选择珠江花园... | 0 | [338, 464, 1... | 106 | - | 15.4寸笔记本... | 0 | [50, 133, 20... | 56 | - +-----------------+--------+-----------------+---------+ - - -新增了一列为数字列表的chars,以及变为数字的target列。可以看出这两列的名称和刚好与data\_bundle中两个Vocabulary的名称是一致的,我们可以打印一下Vocabulary看一下里面的内容。 - -.. code-block:: python - - char_vocab = data_bundle.get_vocab('chars') - print(char_vocab) - - -.. code-block:: text - - Vocabulary(['选', '择', '珠', '江', '花']...) - - -Vocabulary是一个记录着词语与index之间映射关系的类,比如 - -.. code-block:: python - - index = char_vocab.to_index('选') - print("'选'的index是{}".format(index)) # 这个值与上面打印出来的第一个instance的chars的第一个index是一致的 - print("index:{}对应的汉字是{}".format(index, char_vocab.to_word(index))) - - -.. code-block:: text - - '选'的index是338 - index:338对应的汉字是选 - - -(3) 选择预训练词向量 -~~~~~~~~~~~~~~~~~~~~ - -由于Word2vec, Glove, Elmo, Bert等预训练模型可以增强模型的性能,所以在训练具体任务前,选择合适的预训练词向量非常重要。 -在fastNLP中我们提供了多种Embedding使得加载这些预训练模型的过程变得更加便捷。 -这里我们先给出一个使用word2vec的中文汉字预训练的示例,之后再给出一个使用Bert的文本分类。 -这里使用的预训练词向量为'cn-fastnlp-100d',fastNLP将自动下载该embedding至本地缓存, -fastNLP支持使用名字指定的Embedding以及相关说明可以参见 :mod:`fastNLP.embeddings` - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - - word2vec_embed = StaticEmbedding(char_vocab, model_dir_or_name='cn-char-fastnlp-100d') - - -.. code-block:: text - - Found 4321 out of 4409 compound in the pre-training embedding. - -(4) 创建模型 -~~~~~~~~~~~~ - -.. code-block:: python - - from torch import nn - from fastNLP.modules import LSTM - import torch - - # 定义模型 - class BiLSTMMaxPoolCls(nn.Module): - def __init__(self, embed, num_classes, hidden_size=400, num_layers=1, dropout=0.3): - super().__init__() - self.embed = embed - - self.lstm = LSTM(self.embed.embedding_dim, hidden_size=hidden_size//2, num_layers=num_layers, - batch_first=True, bidirectional=True) - self.dropout_layer = nn.Dropout(dropout) - self.fc = nn.Linear(hidden_size, num_classes) - - def forward(self, chars, seq_len): # 这里的名称必须和DataSet中相应的field对应,比如之前我们DataSet中有chars,这里就必须为chars - # chars:[batch_size, max_len] - # seq_len: [batch_size, ] - chars = self.embed(chars) - outputs, _ = self.lstm(chars, seq_len) - outputs = self.dropout_layer(outputs) - outputs, _ = torch.max(outputs, dim=1) - outputs = self.fc(outputs) - - return {'pred':outputs} # [batch_size,], 返回值必须是dict类型,且预测值的key建议设为pred - - # 初始化模型 - model = BiLSTMMaxPoolCls(word2vec_embed, len(data_bundle.get_vocab('target'))) - -(5) 训练模型 -~~~~~~~~~~~~ - -fastNLP提供了Trainer对象来组织训练过程,包括完成loss计算(所以在初始化Trainer的时候需要指定loss类型),梯度更新(所以在初始化Trainer的时候需要提供优化器optimizer)以及在验证集上的性能验证(所以在初始化时需要提供一个Metric) - -.. code-block:: python - - from fastNLP import Trainer - from fastNLP import CrossEntropyLoss - from torch.optim import Adam - from fastNLP import AccuracyMetric - - loss = CrossEntropyLoss() - optimizer = Adam(model.parameters(), lr=0.001) - metric = AccuracyMetric() - device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快 - - trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, - optimizer=optimizer, batch_size=32, dev_data=data_bundle.get_dataset('dev'), - metrics=metric, device=device) - trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型 - - # 在测试集上测试一下模型的性能 - from fastNLP import Tester - print("Performance on test is:") - tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device) - tester.test() - - -.. code-block:: text - - input fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - chars: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 106]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - target fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - - Evaluate data in 0.01 seconds! - training epochs started 2019-09-03-23-57-10 - - Evaluate data in 0.43 seconds! - Evaluation on dev at Epoch 1/10. Step:300/3000: - AccuracyMetric: acc=0.81 - - Evaluate data in 0.44 seconds! - Evaluation on dev at Epoch 2/10. Step:600/3000: - AccuracyMetric: acc=0.8675 - - Evaluate data in 0.44 seconds! - Evaluation on dev at Epoch 3/10. Step:900/3000: - AccuracyMetric: acc=0.878333 - - .... - - Evaluate data in 0.48 seconds! - Evaluation on dev at Epoch 9/10. Step:2700/3000: - AccuracyMetric: acc=0.8875 - - Evaluate data in 0.43 seconds! - Evaluation on dev at Epoch 10/10. Step:3000/3000: - AccuracyMetric: acc=0.895833 - - In Epoch:7/Step:2100, got best dev performance: - AccuracyMetric: acc=0.8975 - Reloaded the best model. - - Evaluate data in 0.34 seconds! - [tester] - AccuracyMetric: acc=0.8975 - - {'AccuracyMetric': {'acc': 0.8975}} - - - -PS: 使用Bert进行文本分类 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. code-block:: python - - # 只需要切换一下Embedding即可 - from fastNLP.embeddings import BertEmbedding - - # 这里为了演示一下效果,所以默认Bert不更新权重 - bert_embed = BertEmbedding(char_vocab, model_dir_or_name='cn', auto_truncate=True, requires_grad=False) - model = BiLSTMMaxPoolCls(bert_embed, len(data_bundle.get_vocab('target'))) - - - import torch - from fastNLP import Trainer - from fastNLP import CrossEntropyLoss - from torch.optim import Adam - from fastNLP import AccuracyMetric - - loss = CrossEntropyLoss() - optimizer = Adam(model.parameters(), lr=2e-5) - metric = AccuracyMetric() - device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快 - - trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, - optimizer=optimizer, batch_size=16, dev_data=data_bundle.get_dataset('test'), - metrics=metric, device=device, n_epochs=3) - trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型 - - # 在测试集上测试一下模型的性能 - from fastNLP import Tester - print("Performance on test is:") - tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device) - tester.test() - - -.. code-block:: text - - loading vocabulary file ~/.fastNLP/embedding/bert-chinese-wwm/vocab.txt - Load pre-trained BERT parameters from file ~/.fastNLP/embedding/bert-chinese-wwm/chinese_wwm_pytorch.bin. - Start to generating word pieces for word. - Found(Or segment into word pieces) 4286 words out of 4409. - input fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - chars: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 106]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - target fields after batch(if batch size is 2): - target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - - Evaluate data in 0.05 seconds! - training epochs started 2019-09-04-00-02-37 - - Evaluate data in 15.89 seconds! - Evaluation on dev at Epoch 1/3. Step:1200/3600: - AccuracyMetric: acc=0.9 - - Evaluate data in 15.92 seconds! - Evaluation on dev at Epoch 2/3. Step:2400/3600: - AccuracyMetric: acc=0.904167 - - Evaluate data in 15.91 seconds! - Evaluation on dev at Epoch 3/3. Step:3600/3600: - AccuracyMetric: acc=0.918333 - - In Epoch:3/Step:3600, got best dev performance: - AccuracyMetric: acc=0.918333 - Reloaded the best model. - Performance on test is: - - Evaluate data in 29.24 seconds! - [tester] - AccuracyMetric: acc=0.919167 - - {'AccuracyMetric': {'acc': 0.919167}} - - -PS: 基于词进行文本分类 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -由于汉字中没有显示的字与字的边界,一般需要通过分词器先将句子进行分词操作。 -下面的例子演示了如何不基于fastNLP已有的数据读取、预处理代码进行文本分类。 - -(1) 读取数据 -~~~~~~~~~~~~~~~~~~~~ - -这里我们继续以之前的数据为例,但这次我们不使用fastNLP自带的数据读取代码 - -.. code-block:: python - - from fastNLP.io import ChnSentiCorpLoader - - loader = ChnSentiCorpLoader() # 初始化一个中文情感分类的loader - data_dir = loader.download() # 这一行代码将自动下载数据到默认的缓存地址, 并将该地址返回 - -获取到的data_dir下应该有类似以下的文件 - -.. code-block:: text - - - chn_senti_corp - - train.tsv - - dev.tsv - - test.tsv - -如果打开任何一个文件查看,会发现里面的格式均为 - -.. code-block:: text - - target raw_chars - 1 这个宾馆比较陈旧了,特价的房间也很一般。总体来说一般 - 0 怀着十分激动的心情放映... - -下面我们先定义一个read_file_to_dataset的函数, 即给定一个文件路径,读取其中的内容,并返回一个DataSet。然后我们将所有的DataSet放入到DataBundle对象中来方便接下来的预处理 - -.. code-block:: python - - import os - from fastNLP import DataSet, Instance - from fastNLP.io import DataBundle - - - def read_file_to_dataset(fp): - ds = DataSet() - with open(fp, 'r') as f: - f.readline() # 第一行是title名称,忽略掉 - for line in f: - line = line.strip() - target, chars = line.split('\t') - ins = Instance(target=target, raw_chars=chars) - ds.append(ins) - return ds - - data_bundle = DataBundle() - for name in ['train.tsv', 'dev.tsv', 'test.tsv']: - fp = os.path.join(data_dir, name) - ds = read_file_to_dataset(fp) - data_bundle.set_dataset(name=name.split('.')[0], dataset=ds) - - print(data_bundle) # 查看以下数据集的情况 - # In total 3 datasets: - # train has 9600 instances. - # dev has 1200 instances. - # test has 1200 instances. - -(2) 数据预处理 -~~~~~~~~~~~~~~~~~~~~ - -在这里,我们首先把句子通过 fastHan_ 进行分词操作,然后创建词表,并将词语转换为序号。 - -.. _fastHan: https://gitee.com/fastnlp/fastHan - -.. code-block:: python - - from fastHan import FastHan - from fastNLP import Vocabulary - - model=FastHan() - # model.set_device('cuda') # 可以注视掉这一行增加速度 - - # 定义分词处理操作 - def word_seg(ins): - raw_chars = ins['raw_chars'] - # 由于有些句子比较长,我们只截取前128个汉字 - raw_words = model(raw_chars[:128], target='CWS')[0] - return raw_words - - for name, ds in data_bundle.iter_datasets(): - # apply函数将对内部的instance依次执行word_seg操作,并把其返回值放入到raw_words这个field - ds.apply(word_seg, new_field_name='raw_words') - # 除了apply函数,fastNLP还支持apply_field, apply_more(可同时创建多个field)等操作 - # 同时我们增加一个seq_len的field - ds.add_seq_len('raw_words') - - vocab = Vocabulary() - - # 对raw_words列创建词表, 建议把非训练集的dataset放在no_create_entry_dataset参数中 - # 也可以通过add_word(), add_word_lst()等建立词表,请参考http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_2_vocabulary.html - vocab.from_dataset(data_bundle.get_dataset('train'), field_name='raw_words', - no_create_entry_dataset=[data_bundle.get_dataset('dev'), - data_bundle.get_dataset('test')]) - - # 将建立好词表的Vocabulary用于对raw_words列建立词表,并把转为序号的列存入到words列 - vocab.index_dataset(data_bundle.get_dataset('train'), data_bundle.get_dataset('dev'), - data_bundle.get_dataset('test'), field_name='raw_words', new_field_name='words') - - # 建立target的词表,target的词表一般不需要padding和unknown - target_vocab = Vocabulary(padding=None, unknown=None) - # 一般情况下我们可以只用训练集建立target的词表 - target_vocab.from_dataset(data_bundle.get_dataset('train'), field_name='target') - # 如果没有传递new_field_name, 则默认覆盖原词表 - target_vocab.index_dataset(data_bundle.get_dataset('train'), data_bundle.get_dataset('dev'), - data_bundle.get_dataset('test'), field_name='target') - - # 我们可以把词表保存到data_bundle中,方便之后使用 - data_bundle.set_vocab(field_name='words', vocab=vocab) - data_bundle.set_vocab(field_name='target', vocab=target_vocab) - - # 我们把words和target分别设置为input和target,这样它们才会在训练循环中被取出并自动padding, 有关这部分更多的内容参考 - # http://www.fastnlp.top/docs/fastNLP/tutorials/tutorial_6_datasetiter.html - data_bundle.set_target('target') - data_bundle.set_input('words') # DataSet也有这两个接口 - # 如果某些field,您希望它被设置为target或者input,但是不希望fastNLP自动padding或需要使用特定的padding方式,请参考 - # http://www.fastnlp.top/docs/fastNLP/fastNLP.core.dataset.html - - print(data_bundle.get_dataset('train')[:2]) # 我们可以看一下当前dataset的内容 - - # +--------+-----------------------+-----------------------+----------------------+ - # | target | raw_chars | raw_words | words | - # +--------+-----------------------+-----------------------+----------------------+ - # | 0 | 选择珠江花园的原因... | ['选择', '珠江', ... | [2, 3, 4, 5, 6, 7... | - # | 0 | 15.4寸笔记本的键盘... | ['15.4', '寸', '笔... | [71, 72, 73, 74, ... | - # +--------+-----------------------+-----------------------+----------------------+ - - # 由于之后需要使用之前定义的BiLSTMMaxPoolCls模型,所以需要将words这个field修改为chars - data_bundle.rename_field('words', 'chars') - -我们可以打印一下vocab看一下当前的词表内容 - -.. code-block:: python - - print(data_bundle.get_vocab('chars')) - # Vocabulary([选择, 珠江, 花园, 的, 原因]...) - -(3) 选择预训练词向量 -~~~~~~~~~~~~~~~~~~~~ - -这里我们选择腾讯的预训练中文词向量,可以在 腾讯词向量_ 处下载并解压。这里我们不能直接使用BERT,因为BERT是基于中文字进行预训练的。 - -.. _腾讯词向量: https://ai.tencent.com/ailab/nlp/en/embedding.html - -下面我们使用 :mod:`fastNLP.embeddings` 加载该词向量,fastNLP会抽取vocabulary中包含的词的向量,并随机初始化不包含在文件中的词语的词向量。 - -.. code-block:: python - - from fastNLP.embeddings import StaticEmbedding - - word2vec_embed = StaticEmbedding(data_bundle.get_vocab('chars'), model_dir_or_name='/path/to/Tencent_AILab_ChineseEmbedding.txt') - -再之后的模型定义与训练过程与上面是一致的,这里就不再赘述了。 - - - ----------------------------------- -代码下载 ----------------------------------- - -.. raw:: html - - 点击下载 IPython Notebook 文件
- diff --git a/docs/source/user/api_update.rst b/docs/source/user/api_update.rst deleted file mode 100644 index 08a6bdbe..00000000 --- a/docs/source/user/api_update.rst +++ /dev/null @@ -1,15 +0,0 @@ -=========================== -API变动列表 -=========================== - -2020.4.14 -======================== - -修改了 :class:`fastNLP.core.callback.ControlC` 的 API。 - -原来的参数 ``quit_all`` 修改为 ``quit_and_do`` ,仍然接收一个 bool 值。新增可选参数 ``action`` ,接收一个待执行的函数, -在 ``quit_and_do`` 的值为 ``True`` 时,退出训练过程后执行该函数。 ``action`` 的默认值是退出整个程序,与原有功能一致。 - -.. note:: - 原有用法 `ControlC(True)` 和 `ControlC(False)` 均可以继续正确执行,但 `ControlC(quit_all=True/False)` 需要修改为 - `ControlC(quit_and_do=True/False)` 。 \ No newline at end of file diff --git a/docs/source/user/example.rst b/docs/source/user/example.rst deleted file mode 100644 index 63535058..00000000 --- a/docs/source/user/example.rst +++ /dev/null @@ -1,162 +0,0 @@ -====== -大标题 -====== - -.. note:: - 中文标题需要符号的数量至少是中文字数的两倍 - -.. warning:: - 符号的数量只可以多,不可以少。 - -小标题1 -########### - -小标题2 -********* - -小标题3(正常使用) -======================== - -小标题4 -------------------- - -推荐使用大标题、小标题3和小标题4 - -官方文档 http://docutils.sourceforge.net/docs/user/rst/quickref.html - -`熟悉markdown的同学推荐参考这篇文章 `_ - -\<\>内表示的是链接地址,\<\>外的是显示到外面的文字 - -常见语法 -============ - -*emphasis* - -**strong** - -`text` - -``inline literal`` - -http://docutils.sf.net/ 孤立的网址会自动生成链接 - -显示为特定的文字的链接 `sohu `_ - -突出显示的 - 上面文字 - -正常缩进 - - 形成锻炼 - - - -特殊模块 -============ - -选项会自动识别 - --v An option --o file Same with value ---delta A long option ---delta=len Same with value - - -图片 - -.. image:: ../figures/procedures.PNG - :height: 200 - :width: 560 - :scale: 50 - :alt: alternate text - :align: center - -显示一个冒号的代码块:: - - 中间要空一行 - -:: - - 不显示冒号的代码块 - -.. code-block:: python - - :linenos: - :emphasize-lines: 1,3 - - print("专业的代码块") - print("") - print("有行号和高亮") - -数学块 -========== - -.. math:: - - H_2O + Na = NaOH + H_2 \uparrow - -复杂表格 -========== - -+------------------------+------------+----------+----------+ -| Header row, column 1 | Header 2 | Header 3 | Header 4 | -| (header rows optional) | | | | -+========================+============+==========+==========+ -| body row 1, column 1 | column 2 | column 3 | column 4 | -+------------------------+------------+----------+----------+ -| body row 2 | Cells may span columns. | -+------------------------+------------+---------------------+ -| body row 3 | Cells may | - Table cells | -+------------------------+ span rows. | - contain | -| body row 4 | | - body elements. | -+------------------------+------------+---------------------+ - -简易表格 -========== - -===== ===== ====== - Inputs Output ------------- ------ - A B A or B -===== ===== ====== -False False False -True True True -===== ===== ====== - -csv 表格 -============ - -.. csv-table:: - :header: sentence, target - - This is the first instance ., 0 - Second instance ., 1 - Third instance ., 1 - ..., ... - - - -[重要]各种链接 -=================== - -各种链接帮助我们连接到fastNLP文档的各个位置 - -\<\>内表示的是链接地址,\<\>外的是显示到外面的文字 - -:doc:`根据文件名链接 ` - -:mod:`~fastNLP.core.batch` - -:class:`~fastNLP.Batch` - -~表示只显示最后一项 - -:meth:`fastNLP.DataSet.apply` - -下面这个代码是不可行的,必须要用 r""" 才行: - -.. code:: - - :param float beta: f_beta分数, :math:`f_{beta} = \frac{(1 + {beta}^{2})*(pre*rec)}{({beta}^{2}*pre + rec)}` . 常用为 `beta=0.5, 1, 2` 若为0.5则精确率的权重高于召回率;若为1,则两者平等;若为2,则召回率权重高于精确率。 - diff --git a/docs/source/user/installation.rst b/docs/source/user/installation.rst deleted file mode 100644 index b4156f6a..00000000 --- a/docs/source/user/installation.rst +++ /dev/null @@ -1,24 +0,0 @@ -=============== -安装指南 -=============== - -.. contents:: - :local: - -fastNLP 依赖如下包:: - - numpy>=1.14.2 - torch>=1.0.0 - tqdm>=4.28.1 - nltk>=3.4.1 - requests - spacy - prettytable>=0.7.2 - -其中torch的安装可能与操作系统及 CUDA 的版本相关,请参见 `PyTorch 官网 `_ 。 -在依赖包安装完成的情况,您可以在命令行执行如下指令完成安装 - -.. code:: shell - - >>> pip install fastNLP - >>> python -m spacy download en diff --git a/docs/source/user/quickstart.rst b/docs/source/user/quickstart.rst deleted file mode 100644 index 40039af6..00000000 --- a/docs/source/user/quickstart.rst +++ /dev/null @@ -1,14 +0,0 @@ -=============== -快速入门 -=============== - -如果你想用 fastNLP 来快速地解决某类 NLP 问题,你可以参考以下教程之一: - -.. toctree:: - :maxdepth: 1 - - /tutorials/文本分类 - /tutorials/序列标注 - -这些教程是简单地介绍了 fastNLP 的使用流程,其中文本分类相对简单,序列标注则较为复杂。更多的教程分析见 :doc:`/user/tutorials` - diff --git a/docs/source/user/tutorials.rst b/docs/source/user/tutorials.rst deleted file mode 100644 index 7296ea72..00000000 --- a/docs/source/user/tutorials.rst +++ /dev/null @@ -1,25 +0,0 @@ -======================== -fastNLP 详细使用教程 -======================== - -这里是更详细的使用教程。对于大部分的用户,我们建议你从第一篇开始顺序阅读;如果你只想了解其中的一部分,也可以进行选读。 - -.. toctree:: - :maxdepth: 1 - - 使用DataSet预处理文本 - 使用Vocabulary转换文本与index - 使用Embedding模块将文本转成向量 - 使用Loader和Pipe加载并处理数据集 - 使用Trainer和Tester快速训练和测试 - 使用DataSetIter实现自定义训练过程 - 使用Metric快速评测你的模型 - 使用Modules和Models快速搭建自定义模型 - 使用Callback自定义你的训练过程 - -.. toctree:: - :maxdepth: 1 - - 拓展阅读1:BertEmbedding的各种用法 - 拓展阅读2:分布式训练简介 - 拓展阅读3:使用fitlog 辅助 fastNLP 进行科研 diff --git a/fastNLP/__init__.py b/fastNLP/__init__.py deleted file mode 100644 index efc46888..00000000 --- a/fastNLP/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -r""" -fastNLP 由 :mod:`~fastNLP.core` 、 :mod:`~fastNLP.io` 、:mod:`~fastNLP.embeddings` 、 :mod:`~fastNLP.modules`、 -:mod:`~fastNLP.models` 等子模块组成,你可以查看每个模块的文档。 - -- :mod:`~fastNLP.core` 是fastNLP 的核心模块,包括 DataSet、 Trainer、 Tester 等组件。详见文档 :mod:`fastNLP.core` -- :mod:`~fastNLP.io` 是实现输入输出的模块,包括了数据集的读取,模型的存取等功能。详见文档 :mod:`fastNLP.io` -- :mod:`~fastNLP.embeddings` 提供用于构建复杂网络模型所需的各种embedding。详见文档 :mod:`fastNLP.embeddings` -- :mod:`~fastNLP.modules` 包含了用于搭建神经网络模型的诸多组件,可以帮助用户快速搭建自己所需的网络。详见文档 :mod:`fastNLP.modules` -- :mod:`~fastNLP.models` 包含了一些使用 fastNLP 实现的完整网络模型,包括 :class:`~fastNLP.models.CNNText` 、 :class:`~fastNLP.models.SeqLabeling` 等常见模型。详见文档 :mod:`fastNLP.models` - -fastNLP 中最常用的组件可以直接从 fastNLP 包中 import ,他们的文档如下: -""" -__all__ = [ - "Instance", - "FieldArray", - - "DataSetIter", - "BatchIter", - "TorchLoaderIter", - - "Vocabulary", - "DataSet", - "Const", - - "Trainer", - "Tester", - - "DistTrainer", - "get_local_rank", - - "Callback", - "GradientClipCallback", - "EarlyStopCallback", - "FitlogCallback", - "EvaluateCallback", - "LRScheduler", - "ControlC", - "LRFinder", - "TensorboardCallback", - "WarmupCallback", - 'SaveModelCallback', - "CallbackException", - "EarlyStopError", - "CheckPointCallback", - - "Padder", - "AutoPadder", - "EngChar2DPadder", - - # "CollateFn", - "ConcatCollateFn", - - "MetricBase", - "AccuracyMetric", - "SpanFPreRecMetric", - "CMRC2018Metric", - "ClassifyFPreRecMetric", - "ConfusionMatrixMetric", - - "Optimizer", - "SGD", - "Adam", - "AdamW", - - "Sampler", - "SequentialSampler", - "BucketSampler", - "RandomSampler", - "SortedSampler", - "ConstantTokenNumSampler", - - "LossFunc", - "CrossEntropyLoss", - "MSELoss", - "L1Loss", - "BCELoss", - "NLLLoss", - "LossInForward", - "LossBase", - "CMRC2018Loss", - - "cache_results", - - 'logger', - "init_logger_dist", -] -__version__ = '0.6.0' - -import sys - -from . import embeddings -from . import models -from . import modules -from .core import * -from .doc_utils import doc_process -from .io import loader, pipe - -doc_process(sys.modules[__name__]) diff --git a/fastNLP/core/__init__.py b/fastNLP/core/__init__.py deleted file mode 100644 index d2963d13..00000000 --- a/fastNLP/core/__init__.py +++ /dev/null @@ -1,112 +0,0 @@ -r""" -core 模块里实现了 fastNLP 的核心框架,常用的功能都可以从 fastNLP 包中直接 import。当然你也同样可以从 core 模块的子模块中 import, -例如 :class:`~fastNLP.DataSetIter` 组件有两种 import 的方式:: - - # 直接从 fastNLP 中 import - from fastNLP import DataSetIter - - # 从 core 模块的子模块 batch 中 import DataSetIter - from fastNLP.core.batch import DataSetIter - -对于常用的功能,你只需要在 :mod:`fastNLP` 中查看即可。如果想了解各个子模块的具体作用,您可以在下面找到每个子模块的具体文档。 - -""" -__all__ = [ - "DataSet", - - "Instance", - - "FieldArray", - "Padder", - "AutoPadder", - "EngChar2DPadder", - - "ConcatCollateFn", - - "Vocabulary", - - "DataSetIter", - "BatchIter", - "TorchLoaderIter", - - "Const", - - "Tester", - "Trainer", - - "DistTrainer", - "get_local_rank", - - "cache_results", - "seq_len_to_mask", - "get_seq_len", - "logger", - "init_logger_dist", - - "Callback", - "GradientClipCallback", - "EarlyStopCallback", - "FitlogCallback", - "EvaluateCallback", - "LRScheduler", - "ControlC", - "LRFinder", - "TensorboardCallback", - "WarmupCallback", - 'SaveModelCallback', - "CallbackException", - "EarlyStopError", - "CheckPointCallback", - - "LossFunc", - "CrossEntropyLoss", - "L1Loss", - "BCELoss", - "BCEWithLogits", - "NLLLoss", - "LossInForward", - "CMRC2018Loss", - "MSELoss", - "LossBase", - - "MetricBase", - "AccuracyMetric", - "SpanFPreRecMetric", - "CMRC2018Metric", - "ClassifyFPreRecMetric", - "ConfusionMatrixMetric", - - "Optimizer", - "SGD", - "Adam", - "AdamW", - - "SequentialSampler", - "BucketSampler", - "RandomSampler", - "Sampler", - "SortedSampler", - "ConstantTokenNumSampler" -] - -from ._logger import logger, init_logger_dist -from .batch import DataSetIter, BatchIter, TorchLoaderIter -from .callback import Callback, GradientClipCallback, EarlyStopCallback, FitlogCallback, EvaluateCallback, \ - LRScheduler, ControlC, LRFinder, TensorboardCallback, WarmupCallback, SaveModelCallback, CallbackException, \ - EarlyStopError, CheckPointCallback -from .const import Const -from .dataset import DataSet -from .field import FieldArray, Padder, AutoPadder, EngChar2DPadder -from .instance import Instance -from .losses import LossFunc, CrossEntropyLoss, L1Loss, BCELoss, NLLLoss, \ - LossInForward, CMRC2018Loss, LossBase, MSELoss, BCEWithLogits -from .metrics import AccuracyMetric, SpanFPreRecMetric, CMRC2018Metric, ClassifyFPreRecMetric, MetricBase,\ - ConfusionMatrixMetric -from .optimizer import Optimizer, SGD, Adam, AdamW -from .sampler import SequentialSampler, BucketSampler, RandomSampler, Sampler, SortedSampler, ConstantTokenNumSampler -from .tester import Tester -from .trainer import Trainer -from .utils import cache_results, seq_len_to_mask, get_seq_len -from .vocabulary import Vocabulary -from .collate_fn import ConcatCollateFn -from .dist_trainer import DistTrainer, get_local_rank diff --git a/fastNLP/core/_logger.py b/fastNLP/core/_logger.py deleted file mode 100644 index 9051f700..00000000 --- a/fastNLP/core/_logger.py +++ /dev/null @@ -1,179 +0,0 @@ -r""" -Logger 是fastNLP中记录日志的模块,logger封装了logging模块的Logger, -具体使用方式与直接使用logging.Logger相同,同时也新增一些简单好用的API -使用方式: -from fastNLP import logger -# -# logger 可以和 logging.Logger 一样使用 -logger.info('your msg') -logger.error('your msg') - -# logger 新增的API -# 将日志输出到文件,以及输出的日志等级 -logger.add_file('/path/to/log', level='INFO') -# 定义在命令行中的显示格式和日志等级 -logger.set_stdout('tqdm', level='WARN') - -""" - -__all__ = [ - 'logger', - 'init_logger_dist' -] - -import logging -import logging.config -import os -import sys -import warnings -from torch import distributed as dist - -ROOT_NAME = 'fastNLP' - -try: - import fitlog -except ImportError: - fitlog = None -try: - from tqdm.auto import tqdm -except ImportError: - tqdm = None - -if tqdm is not None: - class TqdmLoggingHandler(logging.Handler): - def __init__(self, level=logging.INFO): - super().__init__(level) - - def emit(self, record): - try: - msg = self.format(record) - tqdm.write(msg) - self.flush() - except (KeyboardInterrupt, SystemExit): - raise - except: - self.handleError(record) -else: - class TqdmLoggingHandler(logging.StreamHandler): - def __init__(self, level=logging.INFO): - super().__init__(sys.stdout) - self.setLevel(level) - - -def _get_level(level): - if isinstance(level, int): - pass - else: - level = level.lower() - level = {'info': logging.INFO, 'debug': logging.DEBUG, - 'warn': logging.WARN, 'warning': logging.WARN, - 'error': logging.ERROR}[level] - return level - - -def _add_file_handler(logger, path, level='INFO'): - for h in logger.handlers: - if isinstance(h, logging.FileHandler): - if os.path.abspath(path) == h.baseFilename: - # file path already added - return - - # File Handler - if os.path.exists(path): - assert os.path.isfile(path) - warnings.warn('log already exists in {}'.format(path)) - dirname = os.path.abspath(os.path.dirname(path)) - os.makedirs(dirname, exist_ok=True) - - file_handler = logging.FileHandler(path, mode='a') - file_handler.setLevel(_get_level(level)) - file_formatter = logging.Formatter(fmt='%(asctime)s - %(module)s - [%(levelname)s] - %(message)s', - datefmt='%Y/%m/%d %H:%M:%S') - file_handler.setFormatter(file_formatter) - logger.addHandler(file_handler) - - -def _set_stdout_handler(logger, stdout='tqdm', level='INFO'): - level = _get_level(level) - if stdout not in ['none', 'plain', 'tqdm']: - raise ValueError('stdout must in one of {}'.format(['none', 'plain', 'tqdm'])) - # make sure to initialize logger only once - stream_handler = None - for i, h in enumerate(logger.handlers): - if isinstance(h, (logging.StreamHandler, TqdmLoggingHandler)): - stream_handler = h - break - if stream_handler is not None: - logger.removeHandler(stream_handler) - - # Stream Handler - if stdout == 'plain': - stream_handler = logging.StreamHandler(sys.stdout) - elif stdout == 'tqdm': - stream_handler = TqdmLoggingHandler(level) - else: - stream_handler = None - - if stream_handler is not None: - stream_formatter = logging.Formatter('%(message)s') - stream_handler.setLevel(level) - stream_handler.setFormatter(stream_formatter) - logger.addHandler(stream_handler) - - -class FastNLPLogger(logging.getLoggerClass()): - def __init__(self, name): - super().__init__(name) - - def add_file(self, path='./log.txt', level='INFO'): - r"""add log output file and the output level""" - _add_file_handler(self, path, level) - - def set_stdout(self, stdout='tqdm', level='INFO'): - r"""set stdout format and the output level""" - _set_stdout_handler(self, stdout, level) - - -logging.setLoggerClass(FastNLPLogger) - - -# print(logging.getLoggerClass()) -# print(logging.getLogger()) - -def _init_logger(path=None, stdout='tqdm', level='INFO'): - r"""initialize logger""" - level = _get_level(level) - - # logger = logging.getLogger() - logger = logging.getLogger(ROOT_NAME) - logger.propagate = False - logger.setLevel(1) # make the logger the lowest level - - _set_stdout_handler(logger, stdout, level) - - # File Handler - if path is not None: - _add_file_handler(logger, path, level) - - return logger - - -def _get_logger(name=None, level='INFO'): - level = _get_level(level) - if name is None: - name = ROOT_NAME - assert isinstance(name, str) - if not name.startswith(ROOT_NAME): - name = '{}.{}'.format(ROOT_NAME, name) - logger = logging.getLogger(name) - logger.setLevel(level) - return logger - - -logger = _init_logger(path=None, level='INFO') - - -def init_logger_dist(): - global logger - rank = dist.get_rank() - logger.setLevel(logging.INFO if rank == 0 else logging.WARNING) diff --git a/fastNLP/core/_parallel_utils.py b/fastNLP/core/_parallel_utils.py deleted file mode 100644 index bcfd3b59..00000000 --- a/fastNLP/core/_parallel_utils.py +++ /dev/null @@ -1,107 +0,0 @@ -r"""undocumented""" - -__all__ = [] - -import threading - -import torch -from torch import nn -from torch.nn.parallel.parallel_apply import get_a_var -from torch.nn.parallel.replicate import replicate -from torch.nn.parallel.scatter_gather import scatter_kwargs, gather - - -def parallel_apply(modules, func_name, inputs, kwargs_tup=None, devices=None): - r"""Applies each `module` in :attr:`modules` in parallel on arguments - contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword) - on each of :attr:`devices`. - - :attr:`modules`, :attr:`inputs`, :attr:`kwargs_tup` (if given), and - :attr:`devices` (if given) should all have same length. Moreover, each - element of :attr:`inputs` can either be a single object as the only argument - to a module, or a collection of positional arguments. - """ - assert len(modules) == len(inputs) - if kwargs_tup is not None: - assert len(modules) == len(kwargs_tup) - else: - kwargs_tup = ({},) * len(modules) - if devices is not None: - assert len(modules) == len(devices) - else: - devices = [None] * len(modules) - - lock = threading.Lock() - results = {} - grad_enabled = torch.is_grad_enabled() - - def _worker(i, module, input, kwargs, device=None): - torch.set_grad_enabled(grad_enabled) - if device is None: - device = get_a_var(input).get_device() - try: - with torch.cuda.device(device): - # this also avoids accidental slicing of `input` if it is a Tensor - if not isinstance(input, (list, tuple)): - input = (input,) - output = getattr(module, func_name)(*input, **kwargs) - with lock: - results[i] = output - except Exception as e: - with lock: - results[i] = e - - if len(modules) > 1: - threads = [threading.Thread(target=_worker, - args=(i, module, input, kwargs, device)) - for i, (module, input, kwargs, device) in - enumerate(zip(modules, inputs, kwargs_tup, devices))] - - for thread in threads: - thread.start() - for thread in threads: - thread.join() - else: - _worker(0, modules[0], inputs[0], kwargs_tup[0], devices[0]) - - outputs = [] - for i in range(len(inputs)): - output = results[i] - if isinstance(output, Exception): - raise output - outputs.append(output) - return outputs - - -def _data_parallel_wrapper(func_name, device_ids, output_device): - r""" - 这个函数是用于对需要多卡执行的函数的wrapper函数。参考的nn.DataParallel的forward函数 - - :param str, func_name: 对network中的这个函数进行多卡运行 - :param device_ids: nn.DataParallel中的device_ids - :param output_device: nn.DataParallel中的output_device - :return: - """ - - def wrapper(network, *inputs, **kwargs): - inputs, kwargs = scatter_kwargs(inputs, kwargs, device_ids, dim=0) - if len(device_ids) == 1: - return getattr(network, func_name)(*inputs[0], **kwargs[0]) - replicas = replicate(network, device_ids[:len(inputs)]) - outputs = parallel_apply(replicas, func_name, inputs, kwargs, device_ids[:len(replicas)]) - return gather(outputs, output_device) - - return wrapper - - -def _model_contains_inner_module(model): - r""" - - :param nn.Module model: 模型文件,判断是否内部包含model.module, 多用于check模型是否是nn.DataParallel, - nn.parallel.DistributedDataParallel。主要是在做形参匹配的时候需要使用最内部的model的function。 - :return: bool - """ - if isinstance(model, nn.Module): - if isinstance(model, (nn.DataParallel, nn.parallel.DistributedDataParallel)): - return True - return False diff --git a/fastNLP/core/batch.py b/fastNLP/core/batch.py deleted file mode 100644 index 94942f09..00000000 --- a/fastNLP/core/batch.py +++ /dev/null @@ -1,465 +0,0 @@ -r""" -batch 模块实现了 fastNLP 所需的 :class:`~fastNLP.core.batch.DataSetIter` 类。 - -""" -__all__ = [ - "BatchIter", - "DataSetIter", - "TorchLoaderIter", -] - -import atexit -import abc - -from numbers import Number -import numpy as np -import torch -import torch.utils.data -from collections import defaultdict - -from .dataset import DataSet -from .sampler import SequentialSampler, Sampler -from ._logger import logger - - -_python_is_exit = False - - -def _set_python_is_exit(): - global _python_is_exit - _python_is_exit = True - - -atexit.register(_set_python_is_exit) - - -def _pad(batch_dict, dataset, as_numpy): - result = {} - for n, vlist in batch_dict.items(): - f = dataset.field_arrays[n] - if f.padder is None: - result[n] = np.array(vlist) - else: - res = f.pad(vlist) - if not as_numpy: - res, _ = _to_tensor(res, field_dtype=f.dtype) - result[n] = res - - return result - - -class DataSetGetter: - r""" - 传递给torch.utils.data.DataLoader获取数据,DataLoder会传入int的idx获取数据(调用这里的__getitem__()函数)。 - """ - def __init__(self, dataset: DataSet, as_numpy=False): - self.dataset = dataset - self.as_numpy = as_numpy - self.idx_list = list(range(len(dataset))) - - self.x_names = {n for n, f in dataset.get_all_fields().items() if f.is_input} - self.y_names = {n for n, f in dataset.get_all_fields().items() if f.is_target} - - def __getitem__(self, idx: int): - # mapping idx to sampled idx - idx = self.idx_list[idx] - ins = self.dataset[idx] - return idx, ins - - def __len__(self): - return len(self.dataset) - - def collate_fn(self, ins_list: list): - r""" - - :param batch: [[idx1, x_dict1, y_dict1], [idx2, x_dict2, y_dict2], [xx, xx, xx]] - :return: - """ - indices = [] - sin_x, sin_y = defaultdict(list), defaultdict(list) - # 收集需要关注的field的数据 - for idx, ins in ins_list: - indices.append(idx) - for n, v in ins.items(): - if n in self.x_names: - sin_x[n].append(v) - if n in self.y_names: - sin_y[n].append(v) - # 根据情况,进行pad - sin_x = _pad(sin_x, dataset=self.dataset, as_numpy=self.as_numpy) - sin_y = _pad(sin_y, dataset=self.dataset, as_numpy=self.as_numpy) - - if not self.dataset.collater.is_empty(): - bx, by = self.dataset._collate_batch(ins_list) - sin_x.update(bx) - sin_y.update(by) - - return indices, sin_x, sin_y - - def __getattr__(self, item): - if hasattr(self.dataset, item): - return getattr(self.dataset, item) - else: - raise AttributeError("'DataSetGetter' object has no attribute '{}'".format(item)) - - -class SamplerAdapter(torch.utils.data.Sampler): - r""" - 用于传入torch.utils.data.DataLoader中,DataLoader会调用__iter__()方法获取index(一次只取一个int) - - """ - def __init__(self, sampler, dataset): - super().__init__(dataset) - self.sampler = sampler - self.dataset = dataset - - def __len__(self): - return len(self.dataset) - - def __iter__(self): - return iter(self.sampler(self.dataset)) - - -class BatchIter: - r""" - Trainer用于迭代数据的类。继承该类,并实现get_num_batches(), get_batch_indices(), num_batches(), __iter__()方法以及dataset属性。 - - """ - def __init__(self, dataset, batch_size=1, sampler=None, - num_workers=0, pin_memory=False, drop_last=False, - timeout=0, worker_init_fn=None, collate_fn=None, - batch_sampler=None): - if isinstance(sampler, Sampler): # 如果时fastNLP的sampler需要adapt一下 - sampler = SamplerAdapter(sampler=sampler or SequentialSampler(), dataset=dataset) - self.sampler = sampler - self.batch_sampler = batch_sampler - - # DataLoader的collate_fn输入是List[],里面的元素是dataset[index]返回的结果 - if collate_fn is None: - # pytoch <= 1.1 中不能设置collate_fn=None - self.dataiter = torch.utils.data.DataLoader( - dataset=dataset, batch_size=batch_size, sampler=self.sampler, - num_workers=num_workers, - pin_memory=pin_memory, drop_last=drop_last, - timeout=timeout, worker_init_fn=worker_init_fn, - batch_sampler=batch_sampler) - else: - self.dataiter = torch.utils.data.DataLoader( - dataset=dataset, batch_size=batch_size, sampler=self.sampler, - collate_fn=collate_fn, num_workers=num_workers, - pin_memory=pin_memory, drop_last=drop_last, - timeout=timeout, worker_init_fn=worker_init_fn, - batch_sampler=batch_sampler) - - # 以sampler的数量为准,因为DistributedSampler的时候每个进程上并不是所有的数据都用上了 - if self.batch_sampler is None: - self._num_batches = self.get_num_batches(len(self.dataiter.sampler), batch_size, drop_last) - else: - self._num_batches = len(self.batch_sampler) - self.batch_size = batch_size - self.cur_batch_indices = None - - @property - def num_batches(self): - return self._num_batches - - @num_batches.setter - def num_batches(self, value): - self._num_batches = value - - def init_iter(self): - pass - - @staticmethod - def get_num_batches(num_samples, batch_size, drop_last): - r""" - 计算batch的数量。用于前端显示进度 - - :param int num_samples: - :param int batch_size: - :param bool drop_last: 如果最后一个batch没有batch_size这么多,是否就丢掉。 - :return: - """ - num_batches = num_samples // batch_size - if not drop_last and (num_samples % batch_size > 0): - num_batches += 1 - return num_batches - - def get_batch_indices(self): - r""" - 获取最近输出的batch的index。用于溯源当前batch的数据 - - :return: - """ - return self.cur_batch_indices - - def __len__(self): - return self.num_batches - - @property - def dataset(self): - r""" - 获取正在参与iterate的dataset - - :return: - """ - return self.dataiter.dataset - - @abc.abstractmethod - def __iter__(self): - r""" - 用于实际数据循环的类,返回值需要为两个dict, 第一个dict中的内容会认为是input, 第二个dict中的内容会认为是target - - :return: - """ - raise NotImplemented - - -class DataSetIter(BatchIter): - r""" - DataSetIter 用于从 `DataSet` 中按一定的顺序, 依次按 ``batch_size`` 的大小将数据取出,通过使用DataSetIter,可以不需要考虑 - 输入的padding(由DataSet中每列的Padder决定了)以及不需要考虑将数据转为tensor。 - 组成 `x` 和 `y`:: - - batch = DataSetIter(data_set, batch_size=16, sampler=SequentialSampler()) - num_batch = len(batch) - for batch_x, batch_y in batch: - # do stuff ... - - """ - def __init__(self, dataset, batch_size=1, sampler=None, as_numpy=False, num_workers=0, pin_memory=False, - drop_last=False, timeout=0, worker_init_fn=None, batch_sampler=None): - r""" - - :param dataset: :class:`~fastNLP.DataSet` 对象, 数据集 - :param int batch_size: 取出的batch大小 - :param sampler: 规定使用的 :class:`~fastNLP.Sampler` 方式. 若为 ``None`` , 使用 :class:`~fastNLP.SequentialSampler`. - - Default: ``None`` - :param bool as_numpy: 若为 ``True`` , 输出batch为 numpy.array. 否则为 :class:`torch.Tensor`. - - Default: ``False`` - :param int num_workers: 使用多少个进程来预处理数据 - :param bool pin_memory: 是否将产生的tensor使用pin memory, 可能会加快速度。 - :param bool drop_last: 如果最后一个batch没有batch_size这么多sample,就扔掉最后一个 - :param timeout: 生成一个batch的timeout值 - :param worker_init_fn: 在每个worker启动时调用该函数,会传入一个值,该值是worker的index。 - :param batch_sampler: 当每次batch取出的数据数量不一致时,可以使用该sampler。batch_sampler每次iter应该输出一个list的index。 - 当batch_sampler不为None时,参数batch_size, sampler, drop_last会被忽略。 - """ - assert isinstance(dataset, DataSet) - dataset = DataSetGetter(dataset, as_numpy) - collate_fn = dataset.collate_fn - if batch_sampler is not None: - batch_size = 1 - sampler = None - drop_last = False - super().__init__( - dataset=dataset, batch_size=batch_size, sampler=sampler, - num_workers=num_workers, pin_memory=pin_memory, - drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn, - collate_fn=collate_fn, batch_sampler=batch_sampler - ) - - def __iter__(self): - self.init_iter() - for indices, batch_x, batch_y in self.dataiter: - self.cur_batch_indices = indices - yield batch_x, batch_y - - -class TorchLoaderIter(BatchIter): - r""" - 与DataSetIter类似,但可以用于非fastNLP的数据容器对象,以及可以实现完全自定义的生成batch的方式,然后与Trainer,Tester可以实现 - 与DataSetIter一样的对接。 - 需要保证传入的数据容器实现了实现了以下的方法 - - Example:: - - import random - from fastNLP import TorchLoaderIter - import torch - class UdfDataSet: - def __init__(self, num_samples): - self.num_samples = num_samples - - def __getitem__(self, idx): # 必须实现的方法,输入参数是一个int,范围为[0, len(self)) - x = [random.random() for _ in range(3)] - y = random.random() - return x,y - - def __len__(self): # 需要实现该方法返回值需要是一个int数据 - return self.num_samples - - # 需要实现collact_fn将数据转换为tensor - def collate_fn(data_list): - # [(x1,y1), (x2,y2), ...], 这里的输入实际上是将UdfDataSet的__getitem__输入结合为list - xs, ys = [], [] - for l in data_list: - x, y = l - xs.append(x) - ys.append(y) - # 不需要转移到gpu,Trainer或Tester会将其转移到model所在的device - x,y = torch.FloatTensor(xs), torch.FloatTensor(ys) - return {'x':x, 'y':y}, {'y':y} # 第一个dict中内容类似于DataSet中的input列,第二个dict的内容类似于target列 - - udf_dataset = UdfDataSet(10) - dataset = TorchLoaderIter(udf_dataset, collate_fn=collate_fn) - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(3, 1) - def forward(self, x, y): - return {'loss':torch.pow(self.fc(x).squeeze(-1)-y, 2).sum()} - def predict(self, x): - return {'pred':self.fc(x).squeeze(0)} - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=None, print_every=2, dev_data=dataset, - metrics=AccuracyMetric(target='y'), use_tqdm=False) - trainer.train(load_best_model=False) - - 除此之外,还可以通过该方法实现OnTheFly的训练,如下面的代码所示 - - Example:: - - import tempfile - import random - import torch - tmp_file_handler, tmp_file_path = tempfile.mkstemp(text=True) - try: - num_samples, data = 10, [] - for _ in range(num_samples): - x, y = [random.random() for _ in range(3)], random.random() - data.append(x + [y]) - with open(tmp_file_path, 'w') as f: - for d in data: - f.write(' '.join(map(str, d)) + '\n') - - class FileDataSet: - def __init__(self, tmp_file): - num_samples = 0 - line_pos = [0] # 对应idx是某一行对应的位置 - self.tmp_file_handler = open(tmp_file, 'r', encoding='utf-8') - line = self.tmp_file_handler.readline() - while line: - if line.strip(): - num_samples += 1 - line_pos.append(self.tmp_file_handler.tell()) - line = self.tmp_file_handler.readline() - self.tmp_file_handler.seek(0) - self.num_samples = num_samples - self.line_pos = line_pos - - def __getitem__(self, idx): - line_start, line_end = self.line_pos[idx], self.line_pos[idx + 1] - self.tmp_file_handler.seek(line_start) - line = self.tmp_file_handler.read(line_end - line_start).strip() - values = list(map(float, line.split())) - x, y = values[:3], values[-1] - return x, y - - def __len__(self): - return self.num_samples - - def collate_fn(data_list): - # [(x1,y1), (x2,y2), ...], 这里的输入实际上是将UdfDataSet的__getitem__输入结合为list - xs, ys = [], [] - for l in data_list: - x, y = l - xs.append(x) - ys.append(y) - x, y = torch.FloatTensor(xs), torch.FloatTensor(ys) - return {'x': x, 'y': y}, {'y': y} # 第一个dict中内容类似于DataSet中的input列,第二个dict的内容类似于target列 - - file_data = FileDataSet(tmp_file_path) - dataset = TorchLoaderIter(file_data, collate_fn=collate_fn) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(3, 1) - - def forward(self, x, y): - return {'loss': torch.pow(self.fc(x).squeeze(-1) - y, 2).sum()} - - def predict(self, x): - return {'pred': self.fc(x).squeeze(0)} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=None, print_every=2, dev_data=dataset, - metrics=AccuracyMetric(target='y'), use_tqdm=False, n_epochs=2) - trainer.train(load_best_model=False) - - finally: - import os - if os.path.exists(tmp_file_path): - os.remove(tmp_file_path) - - """ - def __init__(self, dataset, collate_fn, batch_size=1, sampler=None, - num_workers=0, pin_memory=False, drop_last=False, - timeout=0, worker_init_fn=None, - batch_sampler=None): - r""" - - :param dataset: 实现了__getitem__和__len__方法的数据容器。 - :param callable collate_fn: 用于将样本组合成batch的函数。输入为[dataset[idx1], dataset[idx2], ...], 即dataset中 - __getitem__返回值组成的list,返回值必须为两个dict,其中第一个dict会被认为是input,第二个dict中的内容被认为是target。 - 需要转换为tensor的数据,需要在collate_fn中转化,但不需要转移到对应device。 - :param int batch_size: 取出的batch大小 - :param sampler: 规定使用的 :class:`~fastNLP.Sampler` 方式. 若为 ``None`` , 使用 :class:`~fastNLP.SequentialSampler`. - Default: ``None`` - :param int num_workers: 使用多少个进程来预处理数据 - :param bool pin_memory: 是否将产生的tensor使用pin memory, 可能会加快速度。 - :param bool drop_last: 如果最后一个batch没有batch_size这么多sample,就扔掉最后一个 - :param timeout: 生成一个batch的timeout值 - :param worker_init_fn: 在每个worker启动时调用该函数,会传入一个值,该值是worker的index。 - :param batch_sampler: 当每次batch取出的数据数量不一致时,可以使用该sampler。batch_sampler每次iter应该输出一个list的index。 - 当batch_sampler不为None时,参数batch_size, sampler, drop_last会被忽略。 - """ - assert len(dataset) > 0 - assert collate_fn is not None, "You must pass collate_fn to pad the batch." - if batch_sampler is not None: - batch_size = 1 - sampler = None - drop_last = False - - super().__init__( - dataset=dataset, batch_size=batch_size, sampler=sampler, - num_workers=num_workers, pin_memory=pin_memory, - drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn, - collate_fn=collate_fn, batch_sampler=batch_sampler - ) - - def __iter__(self): - self.init_iter() - for batch_x, batch_y in self.dataiter: - self.cur_batch_indices = None - yield batch_x, batch_y - - -def _to_tensor(batch, field_dtype): - r""" - - :param batch: np.array() - :param field_dtype: 数据类型 - :return: batch, flag. 如果传入的数据支持转为tensor,返回的batch就是tensor,且flag为True;如果传入的数据不支持转为tensor, - 返回的batch就是原来的数据,且flag为False - """ - try: - if field_dtype is not None and isinstance(field_dtype, type)\ - and issubclass(field_dtype, Number) \ - and not isinstance(batch, torch.Tensor): - new_batch = torch.as_tensor(batch) - flag = True - else: - new_batch = batch - flag = False - if torch.is_tensor(new_batch): - if 'float' in new_batch.dtype.__repr__(): - new_batch = new_batch.float() - elif 'int' in new_batch.dtype.__repr__(): - new_batch = new_batch.long() - return new_batch, flag - except Exception as e: - raise e diff --git a/fastNLP/core/callback.py b/fastNLP/core/callback.py deleted file mode 100644 index 808ddbf5..00000000 --- a/fastNLP/core/callback.py +++ /dev/null @@ -1,1235 +0,0 @@ -r""" -callback模块实现了 fastNLP 中的许多 callback 类,用于增强 :class:`~fastNLP.Trainer` 类。 - -虽然Trainer本身已经集成了一些功能,但仍然不足以囊括训练过程中可能需要到的功能, -比如负采样,learning rate decay 和 early stop等。 -为了解决这个问题,fastNLP引入了callback的机制,:class:`~fastNLP.Callback` 是一种在Trainer训练过程中特定阶段会运行的函数集合。 -关于 :class:`~fastNLP.Trainer` 的详细文档,请参见 :mod:`trainer 模块` - -我们将 :meth:`~fastNLP.Trainer.train` 这个函数内部分为以下的阶段,在对应阶段会触发相应的调用:: - - callback.on_train_begin() # 开始进行训练 - for i in range(1, n_epochs+1): - callback.on_epoch_begin() # 开始新的epoch - for batch_x, batch_y in Batch: - callback.on_batch_begin(batch_x, batch_y, indices) # batch_x是设置为input的field,batch_y是设置为target的field - 获取模型输出 - callback.on_loss_begin() - 计算loss - callback.on_backward_begin() # 可以进行一些检查,比如loss是否为None - 反向梯度回传 - callback.on_backward_end() # 进行梯度截断等 - 进行参数更新 - callback.on_step_end() - callback.on_batch_end() - # 根据设置进行evaluation,比如这是本epoch最后一个batch或者达到一定step - if do evaluation: - callback.on_valid_begin() - 进行dev data上的验证 - callback.on_valid_end() # 可以进行在其它数据集上进行验证 - callback.on_epoch_end() # epoch结束调用 - callback.on_train_end() # 训练结束 - callback.on_exception() # 这是一个特殊的步骤,在训练过程中遭遇exception会跳转到这里。 - -如下面的例子所示,我们可以使用内置的 callback 组件,或者继承 :class:`~fastNLP.core.callback.Callback` -定义自己的 callback 组件:: - - from fastNLP import Callback, EarlyStopCallback, Trainer, CrossEntropyLoss, AccuracyMetric - from fastNLP.models import CNNText - - start_time = time.time() - - class MyCallback(Callback): - def on_epoch_end(self): - print('{:d}ms\n\n'.format(round((time.time()-start_time)*1000))) - - model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1) - trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data, loss=CrossEntropyLoss(), - metrics=AccuracyMetric(), callbacks=[MyCallback(),EarlyStopCallback(10)]) - trainer.train() - -""" -__all__ = [ - "Callback", - - "GradientClipCallback", - "EarlyStopCallback", - "FitlogCallback", - "EvaluateCallback", - "LRScheduler", - "ControlC", - "LRFinder", - "TensorboardCallback", - "WarmupCallback", - "SaveModelCallback", - - "CallbackException", - "EarlyStopError", - "CheckPointCallback" -] - -import os -import sys -from copy import deepcopy - -import torch - -from .utils import _save_model - -try: - from tensorboardX import SummaryWriter - - tensorboardX_flag = True -except: - tensorboardX_flag = False - -from .dataset import DataSet -from .tester import Tester -from ._logger import logger -from ._parallel_utils import _model_contains_inner_module - -try: - import fitlog -except: - pass - - -class Callback(object): - r""" - Callback是fastNLP中被设计用于增强 :class:`~fastNLP.Trainer` 的类。 - 如果Callback被传递给了 Trainer , 则 Trainer 会在对应的阶段调用Callback的函数, - 具体调用时机可以通过 :mod:`trainer 模块` 查看。 - 这是Callback的基类,所有的callback必须继承自这个类 - - """ - - def __init__(self): - super(Callback, self).__init__() - self._trainer = None # 在Trainer内部被重新赋值 - self._disabled = False - - def __repr__(self): - return self.__class__.__name__ - - @property - def trainer(self): - r""" - 该属性可以通过self.trainer获取到,一般情况下不需要使用这个属性。 - """ - return self._trainer - - @property - def grad_scaler(self): - r""" - float16的gradient scaler - """ - return self._trainer.grad_scaler - - @property - def auto_cast(self): - r""" - float16用的auto cast环境 - """ - return self._trainer.auto_cast - - @property - def step(self): - r"""当前运行到的step, 范围为[1, self.n_steps+1)""" - return self._trainer.step - - @property - def n_steps(self): - r"""Trainer一共会采多少个batch。当Trainer中update_every设置为非1的值时,该值不等于update的次数""" - return self._trainer.n_steps - - @property - def batch_size(self): - r"""train和evaluate时的batch_size为多大""" - return self._trainer.batch_size - - @property - def epoch(self): - r"""当前运行的epoch数,范围是[1, self.n_epochs+1)""" - return self._trainer.epoch - - @property - def n_epochs(self): - r"""一共会运行多少个epoch""" - return self._trainer.n_epochs - - @property - def optimizer(self): - r"""初始化Trainer时传递的Optimizer""" - return self._trainer.optimizer - - @property - def model(self): - r"""正在被Trainer训练的模型""" - return self._trainer.model - - @property - def pbar(self): - r"""如果在Callback中需要打印内容,请使用self.pbar.write(str)。否则可能出现命令行显示效果不太好的问题。在 - on_train_begin(), on_train_end(), on_exception()中请不要使用该属性,通过print输出即可。""" - return self._trainer.pbar - - @property - def update_every(self): - r"""Trainer中的模型多少次反向传播才进行一次梯度更新,在Trainer初始化时传入的。""" - return self._trainer.update_every - - @property - def batch_per_epoch(self): - r"""每个epoch一共有多少个batch,只有在on_epoch_begin之后才能调用该属性。""" - return self._trainer.batch_per_epoch - - @property - def is_master(self): - return self._trainer.is_master - - @property - def disabled(self): - return self._disabled - - @property - def logger(self): - return getattr(self._trainer, 'logger', logger) - - def on_train_begin(self): - r""" - 在Train过程开始之前调用。 - - :return: - """ - pass - - def on_epoch_begin(self): - r""" - 在每个epoch开始之前调用一次 - - :return: - """ - pass - - def on_batch_begin(self, batch_x, batch_y, indices): - r""" - 每次采集到一个batch的数据则调用一次。这里对batch_x或batch_y删除添加内容是可以影响到Trainer中内容的。所以在这一步 - 可以进行一些负采样之类的操作。batch_x和batch_y中的tensor已经被放置到了模型所在的设备上。 - - :param dict batch_x: DataSet中被设置为input的field的batch。 - :param dict batch_y: DataSet中被设置为target的field的batch。 - :param list(int) indices: 这次采样使用到的indices,可以通过DataSet[indices]获取出这个batch采出的Instance,在一些 - 情况下可以帮助定位是哪个Sample导致了错误。仅当num_workers=0时有效。 - :return: - """ - pass - - def on_loss_begin(self, batch_y, predict_y): - r""" - 在计算loss前调用,即这里修改batch_y或predict_y的值是可以影响到loss计算的。 - - :param dict batch_y: 在DataSet中被设置为target的field的batch集合。 - :param dict predict_y: 模型的forward()返回的结果。 - :return: - """ - pass - - def on_backward_begin(self, loss): - r""" - 在loss得到之后,但在反向传播之前。可能可以进行loss是否为NaN的检查。 - - :param torch.Tensor loss: 计算得到的loss值 - :return: - """ - pass - - def on_backward_end(self): - r""" - 反向梯度传播已完成,但由于update_every的设置,可能并不是每一次调用都有梯度。到这一步,还没有更新参数。 - - :return: - """ - pass - - def on_step_end(self): - r""" - 到这里模型的参数已经按照梯度更新。但可能受update_every影响,并不是每次都更新了。 - - :return: - """ - pass - - def on_batch_end(self): - r""" - 这一步与on_step_end是紧接着的。只是为了对称性加上了这一步。 - - """ - pass - - def on_valid_begin(self): - r""" - 如果Trainer中设置了验证,则发生验证前会调用该函数 - - :return: - """ - pass - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - r""" - 每次执行验证集的evaluation后会调用。 - - :param Dict[str: Dict[str: float]] eval_result: , evaluation的结果。一个例子为{'AccuracyMetric':{'acc':1.0}},即 - 传入的dict是有两层,第一层是metric的名称,第二层是metric的具体指标。 - :param str metric_key: 初始化Trainer时传入的metric_key。 - :param torch.Optimizer optimizer: Trainer中使用的优化器。 - :param bool is_better_eval: 当前dev结果是否比之前的好。 - :return: - """ - pass - - def on_epoch_end(self): - r""" - 每个epoch结束将会调用该方法 - """ - pass - - def on_train_end(self): - r""" - 训练结束,调用该方法 - """ - pass - - def on_exception(self, exception): - r""" - 当训练过程出现异常,会触发该方法 - :param exception: 某种类型的Exception,比如KeyboardInterrupt等 - """ - pass - - -def _transfer(func): - r"""装饰器,将对CallbackManager的调用转发到各个Callback子类. - - :param func: - :return: - """ - - def wrapper(manager, *arg): - returns = [] - for callback in manager.callbacks: - if callback.disabled: - continue - returns.append(getattr(callback, func.__name__)(*arg)) - return returns - - return wrapper - - -class CallbackManager(Callback): - r""" - 内部使用的Callback管理类 - """ - def __init__(self, env, callbacks=None): - r""" - - :param dict env: The key is the name of the Trainer attribute(str). The value is the attribute itself. - :param List[Callback] callbacks: - """ - super(CallbackManager, self).__init__() - # set attribute of trainer environment - self._env = env - self.callbacks = [] - if callbacks: - self.callbacks = self.prepare_callbacks(callbacks) - - def prepare_callbacks(self, callbacks): - if not callbacks: - return [] - if isinstance(callbacks, list): - if all([isinstance(cb, Callback) for cb in callbacks]) is True: - pass - else: - obj = [not isinstance(cb, Callback) for cb in callbacks][0] - raise TypeError(f"Expect sub-classes of Callback. Got {type(obj)}") - else: - raise TypeError(f"Expect callbacks in CallbackManager(callbacks) to be list. Got {type(callbacks)}.") - - for env_name, env_val in self._env.items(): - for callback in callbacks: - setattr(callback, '_' + env_name, env_val) # Callback.trainer - return callbacks - - @_transfer - def on_train_begin(self): - pass - - @_transfer - def on_epoch_begin(self): - pass - - @_transfer - def on_batch_begin(self, batch_x, batch_y, indices): - pass - - @_transfer - def on_loss_begin(self, batch_y, predict_y): - pass - - @_transfer - def on_backward_begin(self, loss): - pass - - @_transfer - def on_backward_end(self): - pass - - @_transfer - def on_step_end(self): - pass - - @_transfer - def on_batch_end(self): - pass - - @_transfer - def on_valid_begin(self): - pass - - @_transfer - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - pass - - @_transfer - def on_validation(self): - pass - - @_transfer - def on_epoch_end(self): - pass - - @_transfer - def on_train_end(self): - pass - - @_transfer - def on_exception(self, exception): - pass - - -class DistCallbackManager(CallbackManager): - def __init__(self, env, callbacks_all=None, callbacks_master=None): - super(DistCallbackManager, self).__init__(env) - assert 'trainer' in env - self._trainer = env['trainer'] - self.callbacks_master = [] - self.callbacks_all = [] - self.add_callback(callbacks_all, master=False) - self.add_callback(callbacks_master, master=True) - - def patch_callback(self, callbacks, disabled): - if not callbacks: - return - if not isinstance(callbacks, (list, tuple)): - callbacks = [callbacks] - for cb in callbacks: - cb._disabled = disabled - - def add_callback(self, cb, master=False): - if master: - self.patch_callback(cb, not self.is_master) - self.callbacks_master += self.prepare_callbacks(cb) - else: - self.callbacks_all += self.prepare_callbacks(cb) - self.callbacks = self.callbacks_all + self.callbacks_master - - -class GradientClipCallback(Callback): - r""" - 每次backward前,将parameter的gradient clip到某个范围。 - """ - - def __init__(self, parameters=None, clip_value=1, clip_type='norm'): - r""" - - :param None,torch.Tensor,List[torch.Tensor] parameters: 一般通过model.parameters()获得。 - 如果为None则默认对Trainer的model中所有参数进行clip - :param float clip_value: 将gradient 限制到[-clip_value, clip_value]。clip_value应该为正数 - :param str clip_type: 支持'norm', 'value' - 两种:: - - 1 'norm', 将gradient的norm rescale到[-clip_value, clip_value] - - 2 'value', 将gradient限制在[-clip_value, clip_value], - 小于-clip_value的gradient被赋值为-clip_value; - 大于clip_value的gradient被赋值为clip_value. - """ - super().__init__() - - from torch import nn - if clip_type == 'norm': - self.clip_fun = nn.utils.clip_grad_norm_ - elif clip_type == 'value': - self.clip_fun = nn.utils.clip_grad_value_ - else: - raise ValueError("Only supports `norm` or `value` right now.") - if parameters is not None: - self.parameters = list(parameters) - else: - self.parameters = None - self.clip_value = clip_value - - def on_backward_end(self): - if self.step%self.update_every==0: - if self.trainer.fp16: - self.grad_scaler.unscale_(self.optimizer) - if self.parameters is not None: - self.clip_fun(self.parameters, self.clip_value) - else: - self.clip_fun(self.model.parameters(), self.clip_value) - - -class EarlyStopCallback(Callback): - r""" - 多少个epoch没有变好就停止训练,相关类 :class:`~fastNLP.core.callback.EarlyStopError` - """ - - def __init__(self, patience): - r""" - - :param int patience: epoch的数量 - """ - super(EarlyStopCallback, self).__init__() - self.patience = patience - self.wait = 0 - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - if not is_better_eval: - # current result is getting worse - if self.wait == self.patience: - raise EarlyStopError("Early stopping raised.") - else: - self.wait += 1 - else: - self.wait = 0 - - def on_exception(self, exception): - if isinstance(exception, EarlyStopError): - logger.info("Early Stopping triggered in epoch {}!".format(self.epoch)) - else: - raise exception # 抛出陌生Error - - -class FitlogCallback(Callback): - r""" - 该callback可将loss和progress写入到fitlog中; 如果Trainer有dev的数据,将自动把dev的结果写入到log中; 同时还支持传入 - 一个(或多个)test数据集进行测试(只有在trainer具有dev时才能使用),每次在dev上evaluate之后会在这些数据集上验证一下。 - 并将验证结果写入到fitlog中。这些数据集的结果是根据dev上最好的结果报道的,即如果dev在第3个epoch取得了最佳,则 - fitlog中记录的关于这些数据集的结果就是来自第三个epoch的结果。 - """ - - def __init__(self, data=None, tester=None, log_loss_every=0, verbose=1, log_exception=False): - r""" - - :param ~fastNLP.DataSet,Dict[~fastNLP.DataSet] data: 传入DataSet对象,会使用多个Trainer中的metric对数据进行验证。如果需要 - 传入多个DataSet请通过dict的方式传入,dict的key将作为对应dataset的name传递给fitlog。data的结果的名称以'data'开头。 - :param ~fastNLP.Tester,Dict[~fastNLP.Tester] tester: Tester对象,将在on_valid_end时调用。tester的结果的名称以'tester'开头 - :param int log_loss_every: 多少个step记录一次loss(记录的是这几个batch的loss平均值),如果数据集较大建议将该值设置得 - 大一些,不然会导致log文件巨大。默认为0, 即不要记录loss。 - :param int verbose: 是否在终端打印evaluation的结果,0不打印。 - :param bool log_exception: fitlog是否记录发生的exception信息 - """ - super().__init__() - self.datasets = {} - self.testers = {} - self._log_exception = log_exception - assert isinstance(log_loss_every, int) and log_loss_every>=0 - if tester is not None: - if isinstance(tester, dict): - for name, test in tester.items(): - if not isinstance(test, Tester): - raise TypeError(f"{name} in tester is not a valid fastNLP.Tester.") - self.testers['tester-' + name] = test - if isinstance(tester, Tester): - self.testers['tester-test'] = tester - for tester in self.testers.values(): - setattr(tester, 'verbose', 0) - - if isinstance(data, dict): - for key, value in data.items(): - assert isinstance(value, DataSet), f"Only DataSet object is allowed, not {type(value)}." - for key, value in data.items(): - self.datasets['data-' + key] = value - elif isinstance(data, DataSet): - self.datasets['data-test'] = data - elif data is not None: - raise TypeError("data receives dict[DataSet] or DataSet object.") - - self.verbose = verbose - self._log_loss_every = log_loss_every - self._avg_loss = 0 - - def on_train_begin(self): - if (len(self.datasets) > 0 or len(self.testers) > 0) and self.trainer.dev_data is None: - raise RuntimeError("Trainer has no dev data, you cannot pass extra data to do evaluation.") - - if len(self.datasets) > 0: - for key, data in self.datasets.items(): - tester = Tester(data=data, model=self.model, - batch_size=self.trainer.kwargs.get('dev_batch_size', self.trainer.batch_size), - metrics=self.trainer.metrics, - verbose=0, - use_tqdm=self.trainer.kwargs.get('test_use_tqdm', self.trainer.use_tqdm), - sampler=self.trainer.kwargs.get('test_sampler', None)) - self.testers[key] = tester - fitlog.add_progress(total_steps=self.n_steps) - - def on_backward_begin(self, loss): - if self._log_loss_every>0: - self._avg_loss += loss.item() - if self.step%self._log_loss_every==0: - fitlog.add_loss(self._avg_loss/self._log_loss_every*self.update_every, name='loss', step=self.step, epoch=self.epoch) - self._avg_loss = 0 - - def on_valid_end(self, eval_result, metric_key, optimizer, better_result): - if better_result: - eval_result = deepcopy(eval_result) - eval_result['step'] = self.step - eval_result['epoch'] = self.epoch - fitlog.add_best_metric(eval_result) - fitlog.add_metric(eval_result, step=self.step, epoch=self.epoch) - if len(self.testers) > 0: - for key, tester in self.testers.items(): - try: - eval_result = tester.test() - if self.verbose != 0: - self.pbar.write("FitlogCallback evaluation on {}:".format(key)) - self.pbar.write(tester._format_eval_results(eval_result)) - fitlog.add_metric(eval_result, name=key, step=self.step, epoch=self.epoch) - if better_result: - fitlog.add_best_metric(eval_result, name=key) - except Exception as e: - self.pbar.write("Exception happens when evaluate on DataSet named `{}`.".format(key)) - raise e - - def on_train_end(self): - fitlog.finish() - - def on_exception(self, exception): - fitlog.finish(status=1) - if self._log_exception: - fitlog.add_other(repr(exception), name='except_info') - - -class EvaluateCallback(Callback): - r""" - 通过使用该Callback可以使得Trainer在evaluate dev之外还可以evaluate其它数据集,比如测试集。每一次验证dev之前都会先验证EvaluateCallback - 中的数据。 - """ - - def __init__(self, data=None, tester=None): - r""" - :param ~fastNLP.DataSet,Dict[~fastNLP.DataSet] data: 传入DataSet对象,会使用Trainer中的metric对数据进行验证。如果需要传入多个 - DataSet请通过dict的方式传入。 - :param ~fastNLP.Tester,Dict[~fastNLP.DataSet] tester: Tester对象, 通过使用Tester对象,可以使得验证的metric与Trainer中 - 的metric不一样。 - """ - super().__init__() - self.datasets = {} - self.testers = {} - if tester is not None: - if isinstance(tester, dict): - for name, test in tester.items(): - if not isinstance(test, Tester): - raise TypeError(f"{name} in tester is not a valid fastNLP.Tester.") - self.testers['tester-' + name] = test - if isinstance(tester, Tester): - self.testers['tester-test'] = tester - for tester in self.testers.values(): - setattr(tester, 'verbose', 0) - - if isinstance(data, dict): - for key, value in data.items(): - assert isinstance(value, DataSet), f"Only DataSet object is allowed, not {type(value)}." - for key, value in data.items(): - self.datasets['data-' + key] = value - elif isinstance(data, DataSet): - self.datasets['data-test'] = data - elif data is not None: - raise TypeError("data receives dict[DataSet] or DataSet object.") - - def on_train_begin(self): - if len(self.datasets) > 0 and self.trainer.dev_data is None: - raise RuntimeError("Trainer has no dev data, you cannot pass extra DataSet to do evaluation.") - - if len(self.datasets) > 0: - for key, data in self.datasets.items(): - tester = Tester(data=data, model=self.model, - batch_size=self.trainer.kwargs.get('dev_batch_size', self.batch_size), - metrics=self.trainer.metrics, verbose=0, - use_tqdm=self.trainer.test_use_tqdm) - self.testers[key] = tester - - def on_valid_end(self, eval_result, metric_key, optimizer, better_result): - if len(self.testers) > 0: - for key, tester in self.testers.items(): - try: - eval_result = tester.test() - self.logger.info("EvaluateCallback evaluation on {}:".format(key)) - self.logger.info(tester._format_eval_results(eval_result)) - except Exception as e: - self.logger.error("Exception happens when evaluate on DataSet named `{}`.".format(key)) - raise e - -class LRScheduler(Callback): - r""" - 对PyTorch LR Scheduler的包装以使得其可以被Trainer所使用 - """ - - def __init__(self, lr_scheduler): - r""" - :param torch.optim.lr_scheduler._LRScheduler lr_scheduler: PyTorch的lr_scheduler - """ - super(LRScheduler, self).__init__() - import torch.optim - if isinstance(lr_scheduler, torch.optim.lr_scheduler._LRScheduler): - self.scheduler = lr_scheduler - else: - raise ValueError(f"Expect torch.optim.lr_scheduler for LRScheduler. Got {type(lr_scheduler)}.") - - def on_epoch_end(self): - self.scheduler.step(self.epoch) - - -class ControlC(Callback): - r""" - 检测到 control+C 时的反馈 - """ - - @staticmethod - def quit_all(): - import sys - sys.exit(0) # 直接退出程序 - - def __init__(self, quit_and_do, action=quit_all): - r""" - :param bool quit_and_do: 若为True,则检测到control+C 进行后续操作(默认值为:直接退出程序);否则只退出Trainer。 - """ - - super(ControlC, self).__init__() - if type(quit_and_do) != bool: - raise ValueError("In KeyBoardInterrupt, quit_and_do arguemnt must be a bool.") - self.quit_and_do = quit_and_do - self.action = action - - def on_exception(self, exception): - if isinstance(exception, KeyboardInterrupt): - if self.quit_and_do is True: - self.action() - else: - pass - else: - raise exception # 抛出陌生Error - - -class SmoothValue(object): - r"""work for LRFinder""" - - def __init__(self, beta: float): - self.beta, self.n, self.mov_avg = beta, 0, 0 - self.smooth = None - - def add_value(self, val: float) -> None: - r"""Add `val` to calculate updated smoothed value.""" - self.n += 1 - self.mov_avg = self.beta * self.mov_avg + (1 - self.beta) * val - self.smooth = self.mov_avg / (1 - self.beta ** self.n) - - -class LRFinder(Callback): - r""" - 用第一个 epoch 找最佳的学习率,从第二个epoch开始应用它 - """ - - def __init__(self, start_lr=1e-6, end_lr=10): - r""" - - :param float start_lr: 学习率下界 - :param float end_lr: 学习率上界 - """ - super(LRFinder, self).__init__() - self.start_lr, self.end_lr = start_lr, end_lr - - self.stop = False - self.best_loss = 0. - self.best_lr = None - self.loss_history = [] - self.smooth_value = SmoothValue(0.8) - self.opt = None - self.find = None - - @property - def lr_gen(self): - scale = (self.end_lr - self.start_lr) / self.batch_per_epoch - return (self.start_lr + scale * (step + 1) for step in range(self.batch_per_epoch)) - - @property - def num_it(self): - return self.batch_per_epoch - - def on_epoch_begin(self): - if self.epoch == 1: # first epoch - self.opt = self.trainer.optimizer # pytorch optimizer - self.opt.param_groups[0]["lr"] = self.start_lr - # save model - torch.save(self.model.state_dict(), 'tmp') - self.find = True - - def on_backward_begin(self, loss): - if self.find: - if torch.isnan(loss) or self.stop is True: - self.stop = True - return - loss_val = loss.detach().mean().item() - self.loss_history.append(loss_val) - self.smooth_value.add_value(loss_val) - if self.best_loss == 0. or self.smooth_value.smooth < self.best_loss: - self.best_loss = self.smooth_value.smooth - self.best_lr = self.opt.param_groups[0]["lr"] - - def on_batch_end(self, *args): - if self.find: - lr = next(self.lr_gen, None) - if lr is None or self.stop is True or self.loss_history[-1] > 4 * self.best_loss: - self.stop = True - return - self.opt.param_groups[0]["lr"] = lr - # self.loader.load_pytorch(self.trainer.model, "tmp") - - def on_epoch_end(self): - if self.epoch == 1: # first epoch - self.opt.param_groups[0]["lr"] = self.best_lr - self.find = False - # reset model - states = torch.load('tmp') - self.model.load_state_dict(states) - os.remove('tmp') - self.pbar.write("Model reset. \nFind best lr={}".format(self.best_lr)) - - -class TensorboardCallback(Callback): - r""" - 接受以下一个或多个字符串作为参数: - - "model" - - "loss" - - "metric" - - .. warning:: - fastNLP 已停止对此功能的维护,请等待 fastNLP 兼容 PyTorch1.1 的下一个版本。 - 或者使用和 fastNLP 高度配合的 fitlog(参见 :doc:`/tutorials/extend_3_fitlog` )。 - - """ - - def __init__(self, *options): - super(TensorboardCallback, self).__init__() - args = {"model", "loss", "metric"} - for opt in options: - if opt not in args: - raise ValueError("Unrecognized argument {}. Expect one of {}".format(opt, args)) - self.options = options - self._summary_writer = None - self.graph_added = False - - def on_train_begin(self): - save_dir = self.trainer.save_path - if save_dir is None: - path = os.path.join("./", 'tensorboard_logs_{}'.format(self.trainer.start_time)) - else: - path = os.path.join(save_dir, 'tensorboard_logs_{}'.format(self.trainer.start_time)) - if tensorboardX_flag: - self._summary_writer = SummaryWriter(path) - else: - self._summary_writer = None - - def on_batch_begin(self, batch_x, batch_y, indices): - if "model" in self.options and self.graph_added is False: - # tesorboardX 这里有大bug,暂时没法画模型图 - # from fastNLP.core.utils import _build_args - # inputs = _build_args(self.trainer.model, **batch_x) - # args = tuple([value for value in inputs.values()]) - # args = args[0] if len(args) == 1 else args - # self._summary_writer.add_graph(self.trainer.model, torch.zeros(32, 2)) - self.graph_added = True - - def on_backward_begin(self, loss): - if "loss" in self.options and self._summary_writer: - self._summary_writer.add_scalar("loss", loss.item(), global_step=self.trainer.step) - - if "model" in self.options and self._summary_writer: - for name, param in self.trainer.model.named_parameters(): - if param.requires_grad: - self._summary_writer.add_scalar(name + "_mean", param.mean(), global_step=self.trainer.step) - # self._summary_writer.add_scalar(name + "_std", param.std(), global_step=self.trainer.step) - self._summary_writer.add_scalar(name + "_grad_mean", param.grad.mean(), - global_step=self.trainer.step) - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - if "metric" in self.options and self._summary_writer: - for name, metric in eval_result.items(): - for metric_key, metric_val in metric.items(): - self._summary_writer.add_scalar("valid_{}_{}".format(name, metric_key), metric_val, - global_step=self.trainer.step) - - def on_train_end(self): - if self._summary_writer: - self._summary_writer.close() - del self._summary_writer - - def on_exception(self, exception): - if hasattr(self, "_summary_writer"): - self._summary_writer.close() - del self._summary_writer - - -class CheckPointCallback(Callback): - def __init__(self, save_path, delete_when_train_finish=True, recovery_fitlog=True): - r""" - 用于在每个epoch结束的时候保存一下当前的Trainer状态,可以用于恢复之前的运行。使用最近的一个epoch继续训练 - 一段示例代码 - Example1:: - - >>> callback = CheckPointCallback('chkp.pt') - >>> trainer = Trainer(xxx, callback=callback) - >>> trainer.train() # 如果训练过程没结束就fail,请直接再次运行即可(请务必保证与上次使用了完全相同的数据与超参数) - - Example2:: - - >>> fitlog.set_log_dir('xxx') - >>> callback = CheckPointCallback('chkp.pt') # 一定要在set_log_dir下一行就接着CheckPointCallback - >>> trainer = Trainer(xxx, callback=callback) - >>> trainer.train() # 如果训练过程没结束就fail,请直接再次运行即可(请务必保证与上次使用了完全相同的数据与超参数) - - :param str save_path: 将状态保存到哪个位置。需要指定一个具体的路径,比如'checkpoints/chtp.pt'。如果检查到该文件存在,将在 - Trainer开始训练的时候自动从这个Checkpoint处开始运行。 - :param bool delete_when_train_finish: 如果Train正常运行完毕,是否自动删除。删除该文件可以使得路径自动复用。 - :param bool recovery_fitlog: 是否恢复fitlog为对应的log,如果为True请将本Callback放在fitlog.set_log_dir后面一行初始化。 - 如果为False,将新建一个log folder否则继续使用之前的。 - """ - super().__init__() - self.save_path = os.path.abspath(os.path.expanduser(save_path)) - self.delete_when_train_finish = delete_when_train_finish - self.recover_fitlog = recovery_fitlog - try: - import fitlog - except: - self.recover_fitlog = False - if os.path.exists(os.path.expanduser(self.save_path)): - logger.info("The train will start from the checkpoint saved in {}.".format(self.save_path)) - if self.recover_fitlog: - states = torch.load(self.save_path) - if 'fitlog_log_dir' in states: - try: - import fitlog - log_dir = states['fitlog_log_dir'] - if 'fitlog_save_log_dir' in states: - log_dir = states['fitlog_save_log_dir'] - fitlog.set_log_dir(log_dir, new_log=True) - except: - logger.error("Fail to recovery the fitlog states.") - - def on_train_begin(self): - r""" - 当train开始时,且需要恢复上次训练时,会做以下的操作 - (1) 重新加载model权重 - (2) 重新加载optimizer的状态 - (3) 加载当前epoch数 - (4) 加载当前最佳evaluate的性能 - (5) (optional) 自动将fitlog设置到上次log出继续 - - :return: - """ - if os.path.exists(os.path.expanduser(self.save_path)): - states = torch.load(self.save_path) - model = self.model - if _model_contains_inner_module(model): - model = model.module - model.load_state_dict(states['model']) - self.optimizer.load_state_dict(states['optimizer']) - if 'grad_scaler' in states: - self.grad_scaler.load_state_dict(states['grad_scaler']) - self.trainer.epoch = states['epoch'] + 1 # 因为是结束储存的,所以需要从下一个epoch开始 - self.trainer.step = states['step'] - if 'best_dev_epoch' in states: - self.trainer.best_dev_perf = states['best_dev_perf'] - self.trainer.best_dev_epoch = states['best_dev_epoch'] - self.trainer.best_dev_step = states['best_dev_step'] - self.trainer.best_metric_indicator = states['best_metric_indicator'] - logger.info("Load checkpoint from {}".format(os.path.expanduser(self.save_path))) - - def on_epoch_end(self): - r""" - 保存状态,使得结果可以被恢复 - - :param self: - :return: - """ - states = {} - model = self.model - if _model_contains_inner_module(model): - model = model.module - states['model'] = {name:param.cpu() for name, param in model.state_dict().items()} - states['optimizer'] = self.optimizer.state_dict() - states['grad_scaler'] = self.grad_scaler.state_dict() - states['epoch'] = self.epoch - states['step'] = self.step - if self.trainer.best_dev_epoch is not None: - states['best_dev_epoch'] = self.trainer.best_dev_epoch - states['best_dev_perf'] = self.trainer.best_dev_perf - states['best_dev_step'] = self.trainer.best_dev_step - states['best_metric_indicator'] = self.trainer.best_metric_indicator - if self.recover_fitlog: - try: - import fitlog - if fitlog._logger._log_dir is not None: - states['fitlog_log_dir'] = fitlog._logger._log_dir - if fitlog._logger._save_log_dir is not None: - states['fitlog_save_log_dir'] = fitlog._logger._save_log_dir - except: - pass - torch.save(states, self.save_path) - logger.debug("Checkpoint:{} has been saved in epoch:{}.".format(self.save_path, self.epoch)) - - def on_train_end(self): - # 训练结束,根据情况删除保存的内容 - if self.delete_when_train_finish: - if os.path.exists(self.save_path): - os.remove(self.save_path) - logger.debug("Checkpoint:{} has been removed.".format(self.save_path)) - - -class WarmupCallback(Callback): - r""" - learning rate按照一定的速率从0上升到设置的learning rate。 - """ - def __init__(self, warmup=0.1, schedule='constant'): - r""" - - :param int,float warmup: 如果warmup为int,则在该step之前,learning rate根据schedule的策略变化; 如果warmup为float, - 如0.1, 则前10%的step是按照schedule策略调整learning rate。 - :param str schedule: 以哪种方式调整。 - linear: 前warmup的step上升到指定的learning rate(从Trainer中的optimizer处获取的), 后warmup的step下降到0; - constant前warmup的step上升到指定learning rate,后面的step保持learning rate. - """ - super().__init__() - self.warmup = max(warmup, 0.) - - self.initial_lrs = [] # 存放param_group的learning rate - if schedule == 'constant': - self.get_lr = self._get_constant_lr - elif schedule == 'linear': - self.get_lr = self._get_linear_lr - else: - raise RuntimeError("Only support 'linear', 'constant'.") - - def _get_constant_lr(self, progress): - if progress1: - self.warmup = self.warmup/self.t_steps - self.t_steps = max(2, self.t_steps) # 不能小于2 - # 获取param_group的初始learning rate - for group in self.optimizer.param_groups: - self.initial_lrs.append(group['lr']) - - def on_backward_end(self): - if self.step%self.update_every==0: - progress = (self.step/self.update_every)/self.t_steps - for lr, group in zip(self.initial_lrs, self.optimizer.param_groups): - group['lr'] = lr * self.get_lr(progress) - - -class SaveModelCallback(Callback): - r""" - 由于Trainer在训练过程中只会保存最佳的模型, 该callback可实现多种方式的结果存储。 - 会根据训练开始的时间戳在save_dir下建立文件夹,再在文件夹下存放多个模型:: - - -save_dir - -2019-07-03-15-06-36 - -epoch:0_step:20_{metric_key}:{evaluate_performance}.pt # metric是给定的metric_key, evaluate_performance是性能 - -epoch:1_step:40_{metric_key}:{evaluate_performance}.pt - -2019-07-03-15-10-00 - -epoch:0_step:20_{metric_key}:{evaluate_performance}.pt # metric是给定的metric_key, evaluate_perfomance是性能 - """ - def __init__(self, save_dir, top=3, only_param=False, save_on_exception=False): - r""" - - :param str save_dir: 将模型存放在哪个目录下,会在该目录下创建以时间戳命名的目录,并存放模型。如果save_dir不存在将自动创建 - :param int top: 保存dev表现top多少模型。-1为保存所有模型。 - :param bool only_param: 是否只保存模型的权重。 - :param save_on_exception: 发生exception时,是否保存一份发生exception的模型。模型名称为epoch:x_step:x_Exception:{exception_name}. - """ - super().__init__() - - os.makedirs(save_dir, exist_ok=True) - self.save_dir = save_dir - if top < 0: - self.top = sys.maxsize - else: - self.top = top - self._ordered_save_models = [] # List[Tuple], Tuple[0]是metric, Tuple[1]是path。metric是依次变好的,所以从头删 - - self.only_param = only_param - self.save_on_exception = save_on_exception - - def on_train_begin(self): - self.save_dir = os.path.join(self.save_dir, self.trainer.start_time) - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - metric_value = list(eval_result.values())[0][metric_key] - self._save_this_model(metric_value) - - def _insert_into_ordered_save_models(self, pair): - # pair:(metric_value, model_name) - # 返回save的模型pair与删除的模型pair. pair中第一个元素是metric的值,第二个元素是模型的名称 - index = -1 - for _pair in self._ordered_save_models: - if _pair[0]>=pair[0] and self.trainer.increase_better: - break - if not self.trainer.increase_better and _pair[0]<=pair[0]: - break - index += 1 - save_pair = None - if len(self._ordered_save_models)=self.top and index!=-1): - save_pair = pair - self._ordered_save_models.insert(index+1, pair) - delete_pair = None - if len(self._ordered_save_models)>self.top: - delete_pair = self._ordered_save_models.pop(0) - return save_pair, delete_pair - - def _save_this_model(self, metric_value): - name = "epoch-{}_step-{}_{}-{:.6f}.pt".format(self.epoch, self.step, self.trainer.metric_key, metric_value) - save_pair, delete_pair = self._insert_into_ordered_save_models((metric_value, name)) - if save_pair: - try: - _save_model(self.model, model_name=name, save_dir=self.save_dir, only_param=self.only_param) - except Exception as e: - logger.error(f"The following exception:{e} happens when save model to {self.save_dir}.") - if delete_pair: - try: - delete_model_path = os.path.join(self.save_dir, delete_pair[1]) - if os.path.exists(delete_model_path): - os.remove(delete_model_path) - except Exception as e: - logger.error(f"Fail to delete model {name} at {self.save_dir} caused by exception:{e}.") - - def on_exception(self, exception): - if self.save_on_exception: - name = "epoch-{}_step-{}_Exception-{}.pt".format(self.epoch, self.step, exception.__class__.__name__) - _save_model(self.model, model_name=name, save_dir=self.save_dir, only_param=self.only_param) - - -class CallbackException(BaseException): - r""" - 当需要通过callback跳出训练的时候可以通过抛出CallbackException并在on_exception中捕获这个值。 - """ - - def __init__(self, msg): - r""" - - :param str msg: Exception的信息。 - """ - super(CallbackException, self).__init__(msg) - - -class EarlyStopError(CallbackException): - r""" - 用于EarlyStop时从Trainer训练循环中跳出。 - - """ - - def __init__(self, msg): - super(EarlyStopError, self).__init__(msg) - - -class EchoCallback(Callback): - r""" - 用于测试分布式训练 - - """ - def __init__(self, name, out=sys.stdout): - super(EchoCallback, self).__init__() - self.name = name - self.out = out # deprecated - - def __getattribute__(self, item): - if item.startswith('on_'): - logger.info('{}.{} has been called at pid: {}'.format(self.name, item, os.getpid())) - return super(EchoCallback, self).__getattribute__(item) - - -class _TesterCallback(Callback): - def __init__(self, data, model, metrics, metric_key=None, batch_size=16, num_workers=None, sampler=None, - use_tqdm=True): - super(_TesterCallback, self).__init__() - self.tester = Tester(data, model, - metrics=metrics, batch_size=batch_size, - num_workers=num_workers, verbose=0, sampler=sampler, use_tqdm=use_tqdm) - if metric_key is not None: - self.metric_key, self.increase_better = self._parse_metric_key(metric_key) - else: - self.metric_key = None - self.increase_better = True - self.score = None - - def on_valid_begin(self): - cur_score = self.tester.test() - eval_str = "Evaluation at Epoch {}/{}. Step:{}/{}. - {}".format( - self.epoch, self.n_epochs, self.step, self.n_steps, - self.tester._format_eval_results(cur_score)) - self.logger.info(eval_str) - is_better = self.compare_better(cur_score) - if is_better: - self.score = cur_score - return cur_score, is_better - - @staticmethod - def _get_score(metric_dict, key): - for metric in metric_dict.values(): - if key in metric: - return metric[key] - return None - - @staticmethod - def _parse_metric_key(metric_key): - # parse metric_key - # increase_better is True. It means the exp result gets better if the indicator increases. - # It is true by default. - increase_better = False if metric_key[0] == "-" else True - metric_key = metric_key[1:] if metric_key[0] == "+" or metric_key[0] == "-" else metric_key - return metric_key, increase_better - - def compare_better(self, a): - if self.score is None: - return True - if self.metric_key is None: - metric_key = list(list(self.score.values())[0].keys())[0] - self.metric_key, self.increase_better = self._parse_metric_key(metric_key) - k = self.metric_key - score = self._get_score(self.score, k) - new_score = self._get_score(a, k) - if score is None or new_score is None: - return False - if self.increase_better: - return score <= new_score - else: - return score >= new_score diff --git a/fastNLP/core/collate_fn.py b/fastNLP/core/collate_fn.py deleted file mode 100644 index 403af270..00000000 --- a/fastNLP/core/collate_fn.py +++ /dev/null @@ -1,147 +0,0 @@ -r"""undocumented""" -from builtins import sorted - -import torch -import numpy as np -from .field import _get_ele_type_and_dim -from .utils import logger -from copy import deepcopy - -__all__ = ['ConcatCollateFn'] - - -def _check_type(batch_dict, fields): - if len(fields) == 0: - raise RuntimeError - types = [] - dims = [] - for f in fields: - t, d = _get_ele_type_and_dim(batch_dict[f]) - types.append(t) - dims.append(d) - diff_types = set(types) - diff_dims = set(dims) - if len(diff_types) > 1 or len(diff_dims) > 1: - raise ValueError - return types[0] - - -def batching(samples, max_len=0, padding_val=0): - if len(samples) == 0: - return samples - if max_len <= 0: - max_len = max(s.shape[0] for s in samples) - batch = np.full((len(samples), max_len), fill_value=padding_val) - for i, s in enumerate(samples): - slen = min(s.shape[0], max_len) - batch[i][:slen] = s[:slen] - return batch - - -class Collater: - r""" - 辅助DataSet管理collate_fn的类 - - """ - def __init__(self): - self.collate_fns = {} - - def add_fn(self, fn, name=None): - r""" - 向collater新增一个collate_fn函数 - - :param callable fn: - :param str,int name: - :return: - """ - if name in self.collate_fns: - logger.warn(f"collate_fn:{name} will be overwritten.") - if name is None: - name = len(self.collate_fns) - self.collate_fns[name] = fn - - def is_empty(self): - r""" - 返回是否包含collate_fn - - :return: - """ - return len(self.collate_fns) == 0 - - def delete_fn(self, name=None): - r""" - 删除collate_fn - - :param str,int name: 如果为None就删除最近加入的collate_fn - :return: - """ - if not self.is_empty(): - if name in self.collate_fns: - self.collate_fns.pop(name) - elif name is None: - last_key = list(self.collate_fns.keys())[0] - self.collate_fns.pop(last_key) - - def collate_batch(self, ins_list): - bx, by = {}, {} - for name, fn in self.collate_fns.items(): - try: - batch_x, batch_y = fn(ins_list) - except BaseException as e: - logger.error(f"Exception:`{e}` happens when call collate_fn:`{name}`.") - raise e - bx.update(batch_x) - by.update(batch_y) - return bx, by - - def copy_from(self, col): - assert isinstance(col, Collater) - new_col = Collater() - new_col.collate_fns = deepcopy(col.collate_fns) - return new_col - - -class ConcatCollateFn: - r""" - field拼接collate_fn,将不同field按序拼接后,padding产生数据。 - - :param List[str] inputs: 将哪些field的数据拼接起来, 目前仅支持1d的field - :param str output: 拼接后的field名称 - :param pad_val: padding的数值 - :param max_len: 拼接后最大长度 - :param is_input: 是否将生成的output设置为input - :param is_target: 是否将生成的output设置为target - """ - - def __init__(self, inputs, output, pad_val=0, max_len=0, is_input=True, is_target=False): - super().__init__() - assert isinstance(inputs, list) - self.inputs = inputs - self.output = output - self.pad_val = pad_val - self.max_len = max_len - self.is_input = is_input - self.is_target = is_target - - @staticmethod - def _to_numpy(seq): - if torch.is_tensor(seq): - return seq.numpy() - else: - return np.array(seq) - - def __call__(self, ins_list): - samples = [] - for i, ins in ins_list: - sample = [] - for input_name in self.inputs: - sample.append(self._to_numpy(ins[input_name])) - samples.append(np.concatenate(sample, axis=0)) - batch = batching(samples, max_len=self.max_len, padding_val=self.pad_val) - b_x, b_y = {}, {} - if self.is_input: - b_x[self.output] = batch - if self.is_target: - b_y[self.output] = batch - - return b_x, b_y diff --git a/fastNLP/core/const.py b/fastNLP/core/const.py deleted file mode 100644 index e53c1f92..00000000 --- a/fastNLP/core/const.py +++ /dev/null @@ -1,84 +0,0 @@ -r""" -fastNLP包当中的field命名均符合一定的规范,该规范由fastNLP.Const类进行定义。 -""" - -__all__ = [ - "Const" -] - - -class Const: - r""" - fastNLP中field命名常量。 - - .. todo:: - 把下面这段改成表格 - - 具体列表:: - - INPUT 模型的序列输入 words(具有多列words时,依次使用words1, words2, ) - CHAR_INPUT 模型character输入 chars(具有多列chars时,依次使用chars1, chars2) - INPUT_LEN 序列长度 seq_len(具有多列seq_len时,依次使用seq_len1,seq_len2) - OUTPUT 模型输出 pred(具有多列pred时,依次使用pred1, pred2) - TARGET 真实目标 target(具有多列target时,依次使用target1,target2) - LOSS 损失函数 loss (具有多列loss时,依次使用loss1,loss2) - RAW_WORD 原文的词 raw_words (具有多列raw_words时,依次使用raw_words1, raw_words2) - RAW_CHAR 原文的字 raw_chars (具有多列raw_chars时,依次使用raw_chars1, raw_chars2) - - """ - INPUT = 'words' - CHAR_INPUT = 'chars' - INPUT_LEN = 'seq_len' - OUTPUT = 'pred' - TARGET = 'target' - LOSS = 'loss' - RAW_WORD = 'raw_words' - RAW_CHAR = 'raw_chars' - - @staticmethod - def INPUTS(i): - r"""得到第 i 个 ``INPUT`` 的命名""" - i = int(i) + 1 - return Const.INPUT + str(i) - - @staticmethod - def CHAR_INPUTS(i): - r"""得到第 i 个 ``CHAR_INPUT`` 的命名""" - i = int(i) + 1 - return Const.CHAR_INPUT + str(i) - - @staticmethod - def RAW_WORDS(i): - r"""得到第 i 个 ``RAW_WORDS`` 的命名""" - i = int(i) + 1 - return Const.RAW_WORD + str(i) - - @staticmethod - def RAW_CHARS(i): - r"""得到第 i 个 ``RAW_CHARS`` 的命名""" - i = int(i) + 1 - return Const.RAW_CHAR + str(i) - - @staticmethod - def INPUT_LENS(i): - r"""得到第 i 个 ``INPUT_LEN`` 的命名""" - i = int(i) + 1 - return Const.INPUT_LEN + str(i) - - @staticmethod - def OUTPUTS(i): - r"""得到第 i 个 ``OUTPUT`` 的命名""" - i = int(i) + 1 - return Const.OUTPUT + str(i) - - @staticmethod - def TARGETS(i): - r"""得到第 i 个 ``TARGET`` 的命名""" - i = int(i) + 1 - return Const.TARGET + str(i) - - @staticmethod - def LOSSES(i): - r"""得到第 i 个 ``LOSS`` 的命名""" - i = int(i) + 1 - return Const.LOSS + str(i) diff --git a/fastNLP/core/dataset.py b/fastNLP/core/dataset.py deleted file mode 100644 index 45a488d9..00000000 --- a/fastNLP/core/dataset.py +++ /dev/null @@ -1,1213 +0,0 @@ -r""" -:class:`~fastNLP.core.dataset.DataSet` 是fastNLP中用于承载数据的容器。可以将DataSet看做是一个表格, -每一行是一个sample (在fastNLP中被称为 :mod:`~fastNLP.core.instance` ), -每一列是一个feature (在fastNLP中称为 :mod:`~fastNLP.core.field` )。 - -.. csv-table:: Following is a demo layout of DataSet - :header: "sentence", "words", "seq_len" - - "This is the first instance .", "[This, is, the, first, instance, .]", 6 - "Second instance .", "[Second, instance, .]", 3 - "Third instance .", "[Third, instance, .]", 3 - "...", "[...]", "..." - -在fastNLP内部每一行是一个 :class:`~fastNLP.Instance` 对象; 每一列是一个 :class:`~fastNLP.FieldArray` 对象。 - ----------------------------- -1.DataSet的创建 ----------------------------- - -创建DataSet主要有以下的3种方式 - -1.1 传入dict ----------------------------- - - .. code-block:: - - from fastNLP import DataSet - data = {'sentence':["This is the first instance .", "Second instance .", "Third instance ."], - 'words': [['this', 'is', 'the', 'first', 'instance', '.'], ['Second', 'instance', '.'], ['Third', 'instance', '.'], - 'seq_len': [6, 3, 3]} - dataset = DataSet(data) - # 传入的dict的每个key的value应该为具有相同长度的list - -1.2 通过 Instance 构建 ----------------------------- - - .. code-block:: - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet() - instance = Instance(sentence="This is the first instance", - words=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6) - dataset.append(instance) - # 可以继续append更多内容,但是append的instance应该和第一个instance拥有完全相同的field - -1.3 通过 List[Instance] 构建 --------------------------------------- - - .. code-block:: - - from fastNLP import DataSet - from fastNLP import Instance - instances = [] - instances.append(Instance(sentence="This is the first instance", - ords=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6)) - instances.append(Instance(sentence="Second instance .", - words=['Second', 'instance', '.'], - seq_len=3)) - dataset = DataSet(instances) - --------------------------------------- -2.DataSet与预处理 --------------------------------------- - -常见的预处理有如下几种 - -2.1 从某个文本文件读取内容 --------------------------------------- - - .. code-block:: - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet() - filepath='some/text/file' - # 假设文件中每行内容如下(sentence label): - # This is a fantastic day positive - # The bad weather negative - # ..... - with open(filepath, 'r') as f: - for line in f: - sent, label = line.strip().split('\t') - dataset.append(Instance(sentence=sent, label=label)) - - .. note:: - 直接读取特定数据集的数据请参考 :doc:`/tutorials/tutorial_4_load_dataset` - -2.2 对DataSet中的内容处理 --------------------------------------- - - .. code-block:: - - from fastNLP import DataSet - data = {'sentence':["This is the first instance .", "Second instance .", "Third instance ."]} - dataset = DataSet(data) - # 将句子分成单词形式, 详见DataSet.apply()方法 - dataset.apply(lambda ins: ins['sentence'].split(), new_field_name='words') - # 或使用DataSet.apply_field() - dataset.apply_field(lambda sent:sent.split(), field_name='sentence', new_field_name='words') - # 除了匿名函数,也可以定义函数传递进去 - def get_words(instance): - sentence = instance['sentence'] - words = sentence.split() - return words - dataset.apply(get_words, new_field_name='words') - -2.3 删除DataSet的内容 --------------------------------------- - - .. code-block:: - - from fastNLP import DataSet - dataset = DataSet({'a': list(range(-5, 5))}) - # 返回满足条件的instance,并放入DataSet中 - dropped_dataset = dataset.drop(lambda ins:ins['a']<0, inplace=False) - # 在dataset中删除满足条件的instance - dataset.drop(lambda ins:ins['a']<0) # dataset的instance数量减少 - # 删除第3个instance - dataset.delete_instance(2) - # 删除名为'a'的field - dataset.delete_field('a') - - -2.4 遍历DataSet的内容 --------------------------------------- - - .. code-block:: - - for instance in dataset: - # do something - -2.5 一些其它操作 --------------------------------------- - - .. code-block:: - - # 检查是否存在名为'a'的field - dataset.has_field('a') # 或 ('a' in dataset) - # 将名为'a'的field改名为'b' - dataset.rename_field('a', 'b') - # DataSet的长度 - len(dataset) - --------------------------------------- -3.DataSet与自然语言处理(NLP) --------------------------------------- - -在目前深度学习的模型中,大都依赖于随机梯度下降法(SGD)进行模型的优化。随机梯度下降需要将数据切分成一个个的 batch, -一个batch进行一次前向计算(forward)与梯度后向传播(backward)。在自然语言处理的场景下,往往还需要对数据进行pad。这是 -由于句子的长度一般是不同的,但是一次batch中的每个field都必须是一个tensor,所以需要将所有句子都补齐到相同的长度。 - -3.1 DataSet与DataSetIter --------------------------------------- - - 我们先看fastNLP中如何将数据分成一个一个的batch的例子, 这里我们使用随机生成的数据来模拟一个二分类文本分类任务, - words和characters是输入,labels是文本类别 - - .. code-block:: - - from fastNLP import DataSet - from fastNLP import DataSetIter - from fastNLP import SequentialSampler - from fastNLP import EngChar2DPadder - - num_instances = 100 - # 假设每句话最少2个词,最多5个词; 词表的大小是100个; 一共26个字母,每个单词最短1个字母,最长5个字母 - lengths = [random.randint(2, 5) for _ in range(num_instances)] - data = {'words': [[random.randint(1, 100) for _ in range(lengths[idx]) ] for idx in range(num_instances)], - 'chars': [ - [[random.randint(1, 27) for _ in range(random.randint(1, 5))] - for _ in range(lengths[idx])] - for idx in range(num_instances)], - 'label': [random.randint(0, 1) for _ in range(num_instances)]} - - d = DataSet(data) - d.set_padder('chars', EngChar2DPadder()) # 因为英文character的pad方式与word的pad方式不一样 - - d.set_target('label') - d.set_input('words', 'chars') - - for batch_x, batch_y in DataSetIter(d, sampler=SequentialSampler(), batch_size=2): - print("batch_x:", batch_x) - print("batch_y:", batch_y) - break - # 输出为 - # {'words': tensor([[49, 27, 20, 36, 63], - # [53, 82, 23, 11, 0]]), 'chars': tensor([[[13, 3, 14, 25, 1], - # [ 8, 20, 12, 0, 0], - # [27, 8, 0, 0, 0], - # [ 1, 15, 26, 0, 0], - # [11, 24, 17, 0, 0]], - # - # [[ 6, 14, 11, 27, 22], - # [18, 6, 4, 19, 0], - # [19, 22, 9, 0, 0], - # [10, 25, 0, 0, 0], - # [ 0, 0, 0, 0, 0]]])} - # {'label': tensor([0, 0])} - - 其中 :class:`~fastNLP.DataSetIter` 是用于从DataSet中按照batch_size为大小取出batch的迭代器, - :class:`~fastNLP.SequentialSampler` 用于指示 :class:`~fastNLP.DataSetIter` 以怎样的 - 顺序从DataSet中取出instance以组成一个batch, - 更详细的说明请参照 :class:`~fastNLP.DataSetIter` 和 :class:`~fastNLP.SequentialSampler` 文档。 - - 通过 ``DataSet.set_input('words', 'chars')`` , fastNLP将认为 `words` 和 `chars` 这两个field都是input,并将它们都放入迭代器 - 生成的第一个dict中; ``DataSet.set_target('labels')`` , fastNLP将认为 `labels` 这个field是target,并将其放入到迭代器的第 - 二个dict中。如上例中所打印结果。分为input和target的原因是由于它们在被 :class:`~fastNLP.Trainer` 所使用时会有所差异, - 详见 :class:`~fastNLP.Trainer` - - 当把某个field设置为 `target` 或者 `input` 的时候(两者不是互斥的,可以同时设为两种),fastNLP不仅仅只是将其放 - 置到不同的dict中,而还会对被设置为 `input` 或 `target` 的 field 进行类型检查。类型检查的目的是为了看能否把该 field 转为 - pytorch的 :class:`torch.LongTensor` 或 :class:`torch.FloatTensor` 类型 - (也可以在 :class:`~fastNLP.DataSetIter` 中设置输出numpy类型,参考 :class:`~fastNLP.DataSetIter` )。 - - 如上例所示,fastNLP已将 `words` ,`chars` 和 `label` 转为了 :class:`Tensor` 类型。 - 如果 field 在每个 `instance` 都拥有相同的维度(不能超过两维),且最内层的元素都为相同的 type(int, float, np.int*, np.float*), - 则fastNLP默认将对该 field 进行pad。也支持全为str的field作为target和input,这种情况下,fastNLP默认不进行pad。 - 另外,当某个 field 已经被设置为了 target 或者 input 后,之后 `append` 的 - `instance` 对应的 field 必须要和前面已有的内容一致,否则会报错。 - - 可以查看field的dtype:: - - from fastNLP import DataSet - - d = DataSet({'a': [0, 1, 3], 'b':[[1.0, 2.0], [0.1, 0.2], [3]]}) - d.set_input('a', 'b') - d.a.dtype - >> numpy.int64 - d.b.dtype - >> numpy.float64 - # 默认情况下'a'这个field将被转换为torch.LongTensor,但如果需要其为torch.FloatTensor可以手动修改dtype - d.a.dtype = float # 请确保该field的确可以全部转换为float。 - - 如果某个field中出现了多种类型混合(比如一部分为str,一部分为int)的情况,fastNLP无法判断该field的类型,会报如下的 - 错误:: - - from fastNLP import DataSet - - d = DataSet({'data': [1, 'a']}) - d.set_input('data') - >> RuntimeError: Mixed data types in Field data: [, ] - - 可以通过设置以忽略对该field进行类型检查:: - - from fastNLP import DataSet - d = DataSet({'data': [1, 'a']}) - d.set_ignore_type('data') - d.set_input('data') - - 当某个field被设置为忽略type之后,fastNLP将不对其进行pad。 - -3.2 DataSet与pad --------------------------------------- - - 在fastNLP里,pad是与一个field绑定的。即不同的field可以使用不同的pad方式,比如在英文任务中word需要的pad和 - character的pad方式往往是不同的。fastNLP是通过一个叫做 :class:`~fastNLP.Padder` 的子类来完成的。 - 默认情况下,所有field使用 :class:`~fastNLP.AutoPadder` - 。可以通过使用以下方式设置Padder(如果将padder设置为None,则该field不会进行pad操作)。 - 大多数情况下直接使用 :class:`~fastNLP.AutoPadder` 就可以了。 - 如果 :class:`~fastNLP.AutoPadder` 或 :class:`~fastNLP.EngChar2DPadder` 无法满足需求, - 也可以自己写一个 :class:`~fastNLP.Padder` 。 - - .. code-block:: - - from fastNLP import DataSet - from fastNLP import EngChar2DPadder - import random - dataset = DataSet() - max_chars, max_words, sent_num = 5, 10, 20 - contents = [[ - [random.randint(1, 27) for _ in range(random.randint(1, max_chars))] - for _ in range(random.randint(1, max_words)) - ] for _ in range(sent_num)] - # 初始化时传入 - dataset.add_field('chars', contents, padder=EngChar2DPadder()) - # 直接设置 - dataset.set_padder('chars', EngChar2DPadder()) - # 也可以设置pad的value - dataset.set_pad_val('chars', -1) - -3.3 根据DataSet中多个field合成新的field ------------------------------------------------------------- - - DataSet支持在进行batch时,默认只能看到当前的field的值,但在某些训练中可能存在以下的情况: (1)需要两个field拼接成为一个field; - (2)需要在batch中进行负采样。这时候就需要能够同时利用多个field进行batch的操作,DataSet中的add_collate_fn()函数支持添加 - 自定义涉及多个field的collate_fn函数。例如下例中将两个field拼接成一个field的场景 - - .. code-block:: - - from fastNLP import DataSet, DataSetIter - import torch - - data = DataSet({ - 'x1': [[0, 1], - [2]], - 'x2': [[3], - [2, 4, 5]], - 'y': [0, 1] - }) - data.set_target('y') - - # 所有的collate_fn函数都接受list[(ind1, instance1), (ind2, instance2), ...]作为输入,其中ind1/ind2是该instance在dataset中 - # 的index,instance1/instance2是这次batch取出来的数据,包含了所有的field. - def concat_collate_fn(ins_list): - x1 = [ins['x1'] for ind,ins in ins_list] - x2 = [ins['x2'] for ind,ins in ins_list] - xs = [] - for i in range(len(ins_list)): - xs.append(torch.LongTensor(x1[i] + x2[i])) - # 需要自行pad并转换为tensor,但不需要移动到gpu - arr = torch.nn.utils.rnn.pad_sequence(xs, batch_first=True, padding_value=0) - b_x = {'x': arr} - b_y = {} - # 返回值一定是两个dict,第一个dict的值会认为是input,第二个dict的值会认为是target. 若名称与已有input或target重复,则 - # 采用返回值。 - return b_x, b_y - - data.add_collate_fn(concat_collate_fn) - - for batch_x, batch_y in DataSetIter(data, sampler=SequentialSampler(), batch_size=2): - print("batch_x:", batch_x) - print("batch_y:", batch_y) - # batch_x: {'x': tensor([[0, 1, 3, 0], - # [2, 2, 4, 5]])} - # batch_y: {'y': array([0, 1])} - - # 如果取batch过程含有一些参数,可以通过类来实现 - class ConCollateFn: - def __init__(self, max_len=3): - self.max_len = max_len - - def __call__(self, ins_list): # 实现该类的__call__函数 - x1 = [ins['x1'] for ind, ins in ins_list] - x2 = [ins['x2'] for ind, ins in ins_list] - xs = [] - for i in range(len(ins_list)): - xs.append(torch.LongTensor(x1[i] + x2[i])[:self.max_len]) - arr = torch.nn.utils.rnn.pad_sequence(xs, batch_first=True, padding_value=0) - b_x = {'x': arr} - b_y = {} - return b_x, b_y - data.delete_collate_fn() # 删除之前的collate_fn - data.add_collate_fn(ConCollateFn(max_len=3)) - for batch_x, batch_y in DataSetIter(data, sampler=SequentialSampler(), batch_size=2): - print("batch_x:", batch_x) - print("batch_y:", batch_y) - # batch_x: {'x': tensor([[0, 1, 3], - # [2, 2, 4]])} - # batch_y: {'y': array([0, 1])} - -""" -__all__ = [ - "DataSet", -] - -import _pickle as pickle -from copy import deepcopy - -import numpy as np -from prettytable import PrettyTable - -from ._logger import logger -from .const import Const -from .field import AppendToTargetOrInputException -from .field import AutoPadder -from .field import FieldArray -from .field import SetInputOrTargetException -from .instance import Instance -from .utils import pretty_table_printer -from .collate_fn import Collater -try: - from tqdm.auto import tqdm -except: - from .utils import _pseudo_tqdm as tqdm - - -class ApplyResultException(Exception): - def __init__(self, msg, index=None): - super().__init__(msg) - self.msg = msg - self.index = index # 标示在哪个数据遭遇到问题了 - -class DataSet(object): - r""" - fastNLP的数据容器,详细的使用方法见文档 :mod:`fastNLP.core.dataset` - """ - - def __init__(self, data=None): - r""" - - :param data: 如果为dict类型,则每个key的value应该为等长的list; 如果为list, - 每个元素应该为具有相同field的 :class:`~fastNLP.Instance` 。 - """ - self.field_arrays = {} - if data is not None: - if isinstance(data, dict): - length_set = set() - for key, value in data.items(): - length_set.add(len(value)) - assert len(length_set) == 1, "Arrays must all be same length." - for key, value in data.items(): - self.add_field(field_name=key, fields=value) - elif isinstance(data, list): - for ins in data: - assert isinstance(ins, Instance), "Must be Instance type, not {}.".format(type(ins)) - self.append(ins) - - else: - raise ValueError("data only be dict or list type.") - self._collater = Collater() - - @property - def collater(self): - if self._collater is None: - self._collater = Collater() - return self._collater - - @collater.setter - def collater(self, value): - assert isinstance(value, Collater) - self._collater = value - - def __contains__(self, item): - return item in self.field_arrays - - def __iter__(self): - def iter_func(): - for idx in range(len(self)): - yield self[idx] - - return iter_func() - - def _inner_iter(self): - class Iter_ptr: - def __init__(self, dataset, idx): - self.dataset = dataset - self.idx = idx - - def __getitem__(self, item): - assert item in self.dataset.field_arrays, "no such field:{} in Instance {}".format(item, self.dataset[ - self.idx]) - assert self.idx < len(self.dataset.field_arrays[item]), "index:{} out of range".format(self.idx) - return self.dataset.field_arrays[item][self.idx] - - def __setitem__(self, key, value): - raise TypeError("You cannot modify value directly.") - - def items(self): - ins = self.dataset[self.idx] - return ins.items() - - def __repr__(self): - return self.dataset[self.idx].__repr__() - - def inner_iter_func(): - for idx in range(len(self)): - yield Iter_ptr(self, idx) - - return inner_iter_func() - - def __getitem__(self, idx): - r"""给定int的index,返回一个Instance; 给定slice,返回包含这个slice内容的新的DataSet。 - - :param idx: can be int or slice. - :return: If `idx` is int, return an Instance object. - If `idx` is slice, return a DataSet object. - """ - if isinstance(idx, int): - return Instance(**{name: self.field_arrays[name][idx] for name in self.field_arrays}) - elif isinstance(idx, slice): - if idx.start is not None and (idx.start >= len(self) or idx.start <= -len(self)): - raise RuntimeError(f"Start index {idx.start} out of range 0-{len(self) - 1}") - data_set = DataSet() - for field_name, field in self.field_arrays.items(): - data_set.add_field(field_name=field_name, fields=field.content[idx], padder=field.padder, - is_input=field.is_input, is_target=field.is_target, ignore_type=field.ignore_type) - data_set.collater = self.collater.copy_from(self.collater) - return data_set - elif isinstance(idx, str): - if idx not in self: - raise KeyError("No such field called {} in DataSet.".format(idx)) - return self.field_arrays[idx] - elif isinstance(idx, list): - dataset = DataSet() - for i in idx: - assert isinstance(i, int), "Only int index allowed." - instance = self[i] - dataset.append(instance) - for field_name, field in self.field_arrays.items(): - dataset.field_arrays[field_name].to(field) - dataset.collater = self.collater.copy_from(self.collater) - return dataset - else: - raise KeyError("Unrecognized type {} for idx in __getitem__ method".format(type(idx))) - - def __getattr__(self, item): - # Not tested. Don't use !! - if item == "field_arrays": - raise AttributeError - if isinstance(item, str) and item in self.field_arrays: - return self.field_arrays[item] - - def __setstate__(self, state): - self.__dict__ = state - - def __getstate__(self): - return self.__dict__ - - def __len__(self): - r"""Fetch the length of the dataset. - - :return length: - """ - if len(self.field_arrays) == 0: - return 0 - field = iter(self.field_arrays.values()).__next__() - return len(field) - - def __repr__(self): - return str(pretty_table_printer(self)) - - def print_field_meta(self): - r""" - 输出当前field的meta信息, 形似下列的输出:: - - +-------------+-------+-------+ - | field_names | x | y | - +=============+=======+=======+ - | is_input | True | False | - | is_target | False | False | - | ignore_type | False | | - | pad_value | 0 | | - +-------------+-------+-------+ - - str field_names: DataSet中field的名称 - bool is_input: field是否为input - bool is_target: field是否为target - bool ignore_type: 是否忽略该field的type, 一般仅在该field至少为input或target时才有意义 - int pad_value: 该field的pad的值,仅在该field为input或target时有意义 - :return: - """ - if len(self.field_arrays)>0: - field_names = ['field_names'] - is_inputs = ['is_input'] - is_targets = ['is_target'] - pad_values = ['pad_value'] - ignore_types = ['ignore_type'] - - for name, field_array in self.field_arrays.items(): - field_names.append(name) - if field_array.is_input: - is_inputs.append(True) - else: - is_inputs.append(False) - if field_array.is_target: - is_targets.append(True) - else: - is_targets.append(False) - - if (field_array.is_input or field_array.is_target) and field_array.padder is not None: - pad_values.append(field_array.padder.get_pad_val()) - else: - pad_values.append(' ') - - if field_array._ignore_type: - ignore_types.append(True) - elif field_array.is_input or field_array.is_target: - ignore_types.append(False) - else: - ignore_types.append(' ') - table = PrettyTable(field_names=field_names) - fields = [is_inputs, is_targets, ignore_types, pad_values] - for field in fields: - table.add_row(field) - logger.info(table) - return table - - def append(self, instance): - r""" - 将一个instance对象append到DataSet后面。 - - :param ~fastNLP.Instance instance: 若DataSet不为空,则instance应该拥有和DataSet完全一样的field。 - - """ - if len(self.field_arrays) == 0: - # DataSet has no field yet - for name, field in instance.fields.items(): - # field = field.tolist() if isinstance(field, np.ndarray) else field - self.field_arrays[name] = FieldArray(name, [field]) # 第一个样本,必须用list包装起来 - else: - if len(self.field_arrays) != len(instance.fields): - raise ValueError( - "DataSet object has {} fields, but attempt to append an Instance object with {} fields." - .format(len(self.field_arrays), len(instance.fields))) - for name, field in instance.fields.items(): - assert name in self.field_arrays - try: - self.field_arrays[name].append(field) - except AppendToTargetOrInputException as e: - logger.error(f"Cannot append to field:{name}.") - raise e - - def add_fieldarray(self, field_name, fieldarray): - r""" - 将fieldarray添加到DataSet中. - - :param str field_name: 新加入的field的名称 - :param ~fastNLP.core.FieldArray fieldarray: 需要加入DataSet的field的内容 - :return: - """ - if not isinstance(fieldarray, FieldArray): - raise TypeError("Only fastNLP.FieldArray supported.") - if len(self) != len(fieldarray): - raise RuntimeError(f"The field to add must have the same size as dataset. " - f"Dataset size {len(self)} != field size {len(fieldarray)}") - fieldarray.name = field_name - self.field_arrays[field_name] = fieldarray - - def add_field(self, field_name, fields, padder=AutoPadder(), is_input=False, is_target=False, ignore_type=False): - r""" - 新增一个field - - :param str field_name: 新增的field的名称 - :param list fields: 需要新增的field的内容 - :param None,~fastNLP.Padder padder: 如果为None,则不进行pad,默认使用 :class:`~fastNLP.AutoPadder` 自动判断是否需要做pad。 - :param bool is_input: 新加入的field是否是input - :param bool is_target: 新加入的field是否是target - :param bool ignore_type: 是否忽略对新加入的field的类型检查 - """ - - if len(self.field_arrays) != 0: - if len(self) != len(fields): - raise RuntimeError(f"The field to add must have the same size as dataset. " - f"Dataset size {len(self)} != field size {len(fields)}") - self.field_arrays[field_name] = FieldArray(field_name, fields, is_target=is_target, is_input=is_input, - padder=padder, ignore_type=ignore_type) - - def delete_instance(self, index): - r""" - 删除第index个instance - - :param int index: 需要删除的instance的index,序号从0开始。 - """ - assert isinstance(index, int), "Only integer supported." - if len(self) <= index: - raise IndexError("{} is too large for as DataSet with {} instances.".format(index, len(self))) - if len(self) == 1: - self.field_arrays.clear() - else: - for field in self.field_arrays.values(): - field.pop(index) - return self - - def delete_field(self, field_name): - r""" - 删除名为field_name的field - - :param str field_name: 需要删除的field的名称. - """ - self.field_arrays.pop(field_name) - return self - - def copy_field(self, field_name, new_field_name): - r""" - 深度copy名为field_name的field到new_field_name - - :param str field_name: 需要copy的field。 - :param str new_field_name: copy生成的field名称 - :return: self - """ - if not self.has_field(field_name): - raise KeyError(f"Field:{field_name} not found in DataSet.") - fieldarray = deepcopy(self.get_field(field_name)) - fieldarray.name = new_field_name - self.add_fieldarray(field_name=new_field_name, fieldarray=fieldarray) - return self - - def has_field(self, field_name): - r""" - 判断DataSet中是否有名为field_name这个field - - :param str field_name: field的名称 - :return bool: 表示是否有名为field_name这个field - """ - if isinstance(field_name, str): - return field_name in self.field_arrays - return False - - def get_field(self, field_name): - r""" - 获取field_name这个field - - :param str field_name: field的名称 - :return: :class:`~fastNLP.FieldArray` - """ - if field_name not in self.field_arrays: - raise KeyError("Field name {} not found in DataSet".format(field_name)) - return self.field_arrays[field_name] - - def get_all_fields(self): - r""" - 返回一个dict,key为field_name, value为对应的 :class:`~fastNLP.FieldArray` - - :return dict: 返回如上所述的字典 - """ - return self.field_arrays - - def get_field_names(self) -> list: - r""" - 返回一个list,包含所有 field 的名字 - - :return list: 返回如上所述的列表 - """ - return sorted(self.field_arrays.keys()) - - def get_length(self): - r""" - 获取DataSet的元素数量 - - :return: int: DataSet中Instance的个数。 - """ - return len(self) - - def rename_field(self, field_name, new_field_name): - r""" - 将某个field重新命名. - - :param str field_name: 原来的field名称。 - :param str new_field_name: 修改为new_name。 - """ - if field_name in self.field_arrays: - self.field_arrays[new_field_name] = self.field_arrays.pop(field_name) - self.field_arrays[new_field_name].name = new_field_name - else: - raise KeyError("DataSet has no field named {}.".format(field_name)) - return self - - def set_target(self, *field_names, flag=True, use_1st_ins_infer_dim_type=True): - r""" - 将field_names的field设置为target - - Example:: - - dataset.set_target('labels', 'seq_len') # 将labels和seq_len这两个field的target属性设置为True - dataset.set_target('labels', 'seq_lens', flag=False) # 将labels和seq_len的target属性设置为False - - :param str field_names: field的名称 - :param bool flag: 将field_name的target状态设置为flag - :param bool use_1st_ins_infer_dim_type: 如果为True,将不会check该列是否所有数据都是同样的维度,同样的类型。将直接使用第一 - 行的数据进行类型和维度推断本列的数据的类型和维度。 - """ - assert isinstance(flag, bool), "Only bool type supported." - for name in field_names: - if name in self.field_arrays: - try: - self.field_arrays[name]._use_1st_ins_infer_dim_type = bool(use_1st_ins_infer_dim_type) - self.field_arrays[name].is_target = flag - except SetInputOrTargetException as e: - logger.error(f"Cannot set field:{name} as target.") - raise e - else: - raise KeyError("{} is not a valid field name.".format(name)) - return self - - def set_input(self, *field_names, flag=True, use_1st_ins_infer_dim_type=True): - r""" - 将field_names的field设置为input:: - - dataset.set_input('words', 'seq_len') # 将words和seq_len这两个field的input属性设置为True - dataset.set_input('words', flag=False) # 将words这个field的input属性设置为False - - :param str field_names: field的名称 - :param bool flag: 将field_name的input状态设置为flag - :param bool use_1st_ins_infer_dim_type: 如果为True,将不会check该列是否所有数据都是同样的维度,同样的类型。将直接使用第一 - 行的数据进行类型和维度推断本列的数据的类型和维度。 - """ - for name in field_names: - if name in self.field_arrays: - try: - self.field_arrays[name]._use_1st_ins_infer_dim_type = bool(use_1st_ins_infer_dim_type) - self.field_arrays[name].is_input = flag - except SetInputOrTargetException as e: - logger.error(f"Cannot set field:{name} as input, exception happens at the {e.index} value.") - raise e - else: - raise KeyError("{} is not a valid field name.".format(name)) - return self - - def set_ignore_type(self, *field_names, flag=True): - r""" - 将field设置为忽略类型状态。当某个field被设置了ignore_type, 则在被设置为target或者input时将不进行类型检查, - 默认情况下也不进行pad。如果仍需要pad该field,可通过自定义Padder实现,若该field需要转换为tensor,需要在padder - 中转换,但不需要在padder中移动到gpu。 - - :param str field_names: field的名称 - :param bool flag: 将field_name的ignore_type状态设置为flag - :return: - """ - assert isinstance(flag, bool), "Only bool type supported." - for name in field_names: - if name in self.field_arrays: - self.field_arrays[name].ignore_type = flag - else: - raise KeyError("{} is not a valid field name.".format(name)) - return self - - def set_padder(self, field_name, padder): - r""" - 为field_name设置padder:: - - from fastNLP import EngChar2DPadder - padder = EngChar2DPadder() - dataset.set_padder('chars', padder) # 则chars这个field会使用EngChar2DPadder进行pad操作 - - :param str field_name: 设置field的padding方式为padder - :param None,~fastNLP.Padder padder: 设置为None即删除padder, 即对该field不进行pad操作。 - """ - if field_name not in self.field_arrays: - raise KeyError("There is no field named {}.".format(field_name)) - self.field_arrays[field_name].set_padder(padder) - return self - - def set_pad_val(self, field_name, pad_val): - r""" - 为某个field设置对应的pad_val. - - :param str field_name: 修改该field的pad_val - :param int pad_val: 该field的padder会以pad_val作为padding index - """ - if field_name not in self.field_arrays: - raise KeyError("There is no field named {}.".format(field_name)) - self.field_arrays[field_name].set_pad_val(pad_val) - return self - - def get_input_name(self): - r""" - 返回所有is_input被设置为True的field名称 - - :return list: 里面的元素为被设置为input的field名称 - """ - return [name for name, field in self.field_arrays.items() if field.is_input] - - def get_target_name(self): - r""" - 返回所有is_target被设置为True的field名称 - - :return list: 里面的元素为被设置为target的field名称 - """ - return [name for name, field in self.field_arrays.items() if field.is_target] - - def apply_field(self, func, field_name, new_field_name=None, **kwargs): - r""" - 将DataSet中的每个instance中的名为 `field_name` 的field传给func,并获取它的返回值。 - - :param callable func: input是instance中名为 `field_name` 的field的内容。 - :param str field_name: 传入func的是哪个field。 - :param None,str new_field_name: 将func返回的内容放入到 `new_field_name` 这个field中,如果名称与已有的field相同,则覆 - 盖之前的field。如果为None则不创建新的field。 - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将名为 `new_field_name` 的field设置为input - - 2. is_target: bool, 如果为True则将名为 `new_field_name` 的field设置为target - - 3. ignore_type: bool, 如果为True则将名为 `new_field_name` 的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否使用tqdm显示预处理进度 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return List[Any]: 里面的元素为func的返回值,所以list长度为DataSet的长度 - """ - assert len(self) != 0, "Null DataSet cannot use apply_field()." - if not self.has_field(field_name=field_name): - raise KeyError("DataSet has no field named `{}`.".format(field_name)) - return self.apply(func, new_field_name, _apply_field=field_name, **kwargs) - - def apply_field_more(self, func, field_name, modify_fields=True, **kwargs): - r""" - 将 ``DataSet`` 中的每个 ``Instance`` 中的名为 `field_name` 的field 传给 func,并获取它的返回值。 - func 可以返回一个或多个 field 上的结果。 - - .. note:: - ``apply_field_more`` 与 ``apply_field`` 的区别参考 :meth:`~fastNLP.DataSet.apply_more` 中关于 ``apply_more`` 与 - ``apply`` 区别的介绍。 - - :param callable func: 参数是 ``DataSet`` 中的 ``Instance`` ,返回值是一个字典,key 是field 的名字,value 是对应的结果 - :param str field_name: 传入func的是哪个field。 - :param bool modify_fields: 是否用结果修改 `DataSet` 中的 `Field`, 默认为 True - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将被修改的field设置为input - - 2. is_target: bool, 如果为True则将被修改的field设置为target - - 3. ignore_type: bool, 如果为True则将被修改的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否使用tqdm显示预处理进度 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return Dict[str:Field]: 返回一个字典 - """ - assert len(self) != 0, "Null DataSet cannot use apply_field()." - if not self.has_field(field_name=field_name): - raise KeyError("DataSet has no field named `{}`.".format(field_name)) - return self.apply_more(func, modify_fields, _apply_field=field_name, **kwargs) - - def _add_apply_field(self, results, new_field_name, kwargs): - r""" - 将results作为加入到新的field中,field名称为new_field_name - - :param List[str] results: 一般是apply*()之后的结果 - :param str new_field_name: 新加入的field的名称 - :param dict kwargs: 用户apply*()时传入的自定义参数 - :return: - """ - extra_param = {} - if 'is_input' in kwargs: - extra_param['is_input'] = kwargs['is_input'] - if 'is_target' in kwargs: - extra_param['is_target'] = kwargs['is_target'] - if 'ignore_type' in kwargs: - extra_param['ignore_type'] = kwargs['ignore_type'] - if new_field_name in self.field_arrays: - # overwrite the field, keep same attributes - old_field = self.field_arrays[new_field_name] - if 'is_input' not in extra_param: - extra_param['is_input'] = old_field.is_input - if 'is_target' not in extra_param: - extra_param['is_target'] = old_field.is_target - if 'ignore_type' not in extra_param: - extra_param['ignore_type'] = old_field.ignore_type - self.add_field(field_name=new_field_name, fields=results, is_input=extra_param["is_input"], - is_target=extra_param["is_target"], ignore_type=extra_param['ignore_type'], - padder=self.get_field(new_field_name).padder) - else: - self.add_field(field_name=new_field_name, fields=results, is_input=extra_param.get("is_input", None), - is_target=extra_param.get("is_target", None), - ignore_type=extra_param.get("ignore_type", False)) - - def apply_more(self, func, modify_fields=True, **kwargs): - r""" - 将 ``DataSet`` 中每个 ``Instance`` 传入到func中,并获取它的返回值。func可以返回一个或多个 field 上的结果。 - - .. note:: - ``apply_more`` 与 ``apply`` 的区别: - - 1. ``apply_more`` 可以返回多个 field 的结果, ``apply`` 只可以返回一个field 的结果; - - 2. ``apply_more`` 的返回值是一个字典,每个 key-value 对中的 key 表示 field 的名字,value 表示计算结果; - - 3. ``apply_more`` 默认修改 ``DataSet`` 中的 field ,``apply`` 默认不修改。 - - :param callable func: 参数是 ``DataSet`` 中的 ``Instance`` ,返回值是一个字典,key 是field 的名字,value 是对应的结果 - :param bool modify_fields: 是否用结果修改 ``DataSet`` 中的 ``Field`` , 默认为 True - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将被修改的的field设置为input - - 2. is_target: bool, 如果为True则将被修改的的field设置为target - - 3. ignore_type: bool, 如果为True则将被修改的的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否使用tqdm显示预处理进度 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return Dict[str:Field]: 返回一个字典 - """ - # 返回 dict , 检查是否一直相同 - assert callable(func), "The func you provide is not callable." - assert len(self) != 0, "Null DataSet cannot use apply()." - idx = -1 - try: - results = {} - for idx, ins in tqdm(enumerate(self._inner_iter()), total=len(self), dynamic_ncols=True, - desc=kwargs.get('tqdm_desc', ''), - leave=False, disable=not kwargs.get('use_tqdm', False)): - if "_apply_field" in kwargs: - res = func(ins[kwargs["_apply_field"]]) - else: - res = func(ins) - if not isinstance(res, dict): - raise ApplyResultException("The result of func is not a dict", idx) - if idx == 0: - for key, value in res.items(): - results[key] = [value] - else: - for key, value in res.items(): - if key not in results: - raise ApplyResultException("apply results have different fields", idx) - results[key].append(value) - if len(res) != len(results): - raise ApplyResultException("apply results have different fields", idx) - except Exception as e: - if idx != -1: - if isinstance(e, ApplyResultException): - logger.error(e.msg) - logger.error("Exception happens at the `{}`th instance.".format(idx)) - raise e - - if modify_fields is True: - for field, result in results.items(): - self._add_apply_field(result, field, kwargs) - - return results - - def apply(self, func, new_field_name=None, **kwargs): - r""" - 将DataSet中每个instance传入到func中,并获取它的返回值. - - :param callable func: 参数是 ``DataSet`` 中的 ``Instance`` - :param None,str new_field_name: 将func返回的内容放入到 `new_field_name` 这个field中,如果名称与已有的field相同,则覆 - 盖之前的field。如果为None则不创建新的field。 - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将 `new_field_name` 的field设置为input - - 2. is_target: bool, 如果为True则将 `new_field_name` 的field设置为target - - 3. ignore_type: bool, 如果为True则将 `new_field_name` 的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否使用tqdm显示预处理进度 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return List[Any]: 里面的元素为func的返回值,所以list长度为DataSet的长度 - """ - assert callable(func), "The func you provide is not callable." - assert len(self) != 0, "Null DataSet cannot use apply()." - idx = -1 - try: - results = [] - for idx, ins in tqdm(enumerate(self._inner_iter()), total=len(self), dynamic_ncols=True, leave=False, - desc=kwargs.get('tqdm_desc', ''), - disable=not kwargs.get('use_tqdm', False)): - if "_apply_field" in kwargs: - results.append(func(ins[kwargs["_apply_field"]])) - else: - results.append(func(ins)) - except BaseException as e: - if idx != -1: - logger.error("Exception happens at the `{}`th instance.".format(idx)) - raise e - - if new_field_name is not None: - self._add_apply_field(results, new_field_name, kwargs) - - return results - - def add_seq_len(self, field_name: str, new_field_name=Const.INPUT_LEN): - r""" - 将使用len()直接对field_name中每个元素作用,将其结果作为sequence length, 并放入seq_len这个field。 - - :param field_name: str. - :param new_field_name: str. 新的field_name - :return: - """ - if self.has_field(field_name=field_name): - self.apply_field(len, field_name, new_field_name=new_field_name) - else: - raise KeyError(f"Field:{field_name} not found.") - return self - - def drop(self, func, inplace=True): - r""" - func接受一个Instance,返回bool值。返回值为True时,该Instance会被移除或者不会包含在返回的DataSet中。 - - :param callable func: 接受一个Instance作为参数,返回bool值。为True时删除该instance - :param bool inplace: 是否在当前DataSet中直接删除instance;如果为False,将返回一个新的DataSet。 - - :return: DataSet - """ - if inplace: - results = [ins for ins in self._inner_iter() if not func(ins)] - for name, old_field in self.field_arrays.items(): - self.field_arrays[name].content = [ins[name] for ins in results] - return self - else: - results = [ins for ins in self if not func(ins)] - if len(results) != 0: - dataset = DataSet(results) - for field_name, field in self.field_arrays.items(): - dataset.field_arrays[field_name].to(field) - return dataset - else: - return DataSet() - - def split(self, ratio, shuffle=True): - r""" - 将DataSet按照ratio的比例拆分,返回两个DataSet - - :param float ratio: 0 1, f'DataSet with {len(self)} instance cannot be split.' - assert isinstance(ratio, float) - assert 0 < ratio < 1 - all_indices = [_ for _ in range(len(self))] - if shuffle: - np.random.shuffle(all_indices) - split = int(ratio * len(self)) - if split == 0: - error_msg = f'Dev DataSet has {split} instance after split.' - logger.error(error_msg) - raise IndexError(error_msg) - dev_indices = all_indices[:split] - train_indices = all_indices[split:] - dev_set = DataSet() - train_set = DataSet() - for idx in dev_indices: - dev_set.append(self[idx]) - for idx in train_indices: - train_set.append(self[idx]) - for field_name in self.field_arrays: - train_set.field_arrays[field_name].to(self.field_arrays[field_name]) - dev_set.field_arrays[field_name].to(self.field_arrays[field_name]) - - train_set.collater.copy_from(self.collater) - dev_set.collater.copy_from(self.collater) - return train_set, dev_set - - def save(self, path): - r""" - 保存DataSet. - - :param str path: 将DataSet存在哪个路径 - """ - with open(path, 'wb') as f: - pickle.dump(self, f) - - @staticmethod - def load(path): - r""" - 从保存的DataSet pickle文件的路径中读取DataSet - - :param str path: 从哪里读取DataSet - :return: 读取后的 :class:`~fastNLP.读取后的DataSet`。 - """ - with open(path, 'rb') as f: - d = pickle.load(f) - assert isinstance(d, DataSet), "The object is not DataSet, but {}.".format(type(d)) - return d - - def add_collate_fn(self, fn, name=None): - r""" - 添加 CollateFn,collate_fn允许在生成的batch的过程中动态生成一些数据(在DataSetIter作为迭代器的情况下有效,默认情况下就是用的 - 这个)。支持依次添加多个collate_fn, 如果相同的key,后面的collate_fn的结果覆盖前面的collate_fn的结果。 - - :param callable fn: 传入一个可调用的function, 该function可接受的参数为List[(ind1, instance1), (ind2, instance2)] - (某个batch被选中的所有的indice以及instance),其中ind1/ind2是该instance在dataset中的index,instance1/instance2是 - 这次batch取出来的数据,包含了所有的field。返回值需要为两个dict,第一个dict的值将被认为是input,第二个dict的值被认为是 - target,返回的值至多允许一个空dict。若返回的dict中包含了被设置为input或target的field的名称,将覆盖dataset中的field。 - fastNLP不会将collate_fn的返回结果pad和转换为tensor,需要在collate_fn中完成pad和转换为tensor(不需要将tensor移动到 - gpu中,fastNLP会自动将其移动到特定gpu)。不要修改传入collate_fn中的数据,否则可能导致未知问题。 - :param str,int name: collate_fn的名称,如果不传入,默认使用自增长的数字作为key。相同的name会覆盖之前的collate_fn。 - """ - assert callable(fn), "You must pass in a callable object." - self.collater.add_fn(fn, name=name) - - def delete_collate_fn(self, name=None): - r""" - 删除某个collate_fn - - :param str,int name: 如果为None,则删除最近加入的collate_fn - :return: - """ - self.collater.delete_fn(name) - - def _collate_batch(self, ins_list): - return self.collater.collate_batch(ins_list) - - def concat(self, dataset, inplace=True, field_mapping=None): - """ - 将当前dataset与输入的dataset结合成一个更大的dataset,需要保证两个dataset都包含了相同的field。结合后的dataset的input,target - 以及collate_fn以当前dataset为准。当dataset中包含的field多于当前的dataset,则多余的field会被忽略;若dataset中未包含所有 - 当前dataset含有field,则会报错。 - - :param DataSet, dataset: 需要和当前dataset concat的dataset - :param bool, inplace: 是否直接将dataset组合到当前dataset中 - :param dict, field_mapping: 当dataset中的field名称和当前dataset不一致时,需要通过field_mapping把输入的dataset中的field - 名称映射到当前field. field_mapping为dict类型,key为dataset中的field名称,value是需要映射成的名称 - - :return: DataSet - """ - assert isinstance(dataset, DataSet), "Can only concat two datasets." - - fns_in_this_dataset = set(self.get_field_names()) - fns_in_other_dataset = dataset.get_field_names() - reverse_field_mapping = {} - if field_mapping is not None: - fns_in_other_dataset = [field_mapping.get(fn, fn) for fn in fns_in_other_dataset] - reverse_field_mapping = {v:k for k, v in field_mapping.items()} - fns_in_other_dataset = set(fns_in_other_dataset) - fn_not_seen = list(fns_in_this_dataset - fns_in_other_dataset) - - if fn_not_seen: - raise RuntimeError(f"The following fields are not provided in the dataset:{fn_not_seen}") - - if inplace: - ds = self - else: - ds = deepcopy(self) - - for fn in fns_in_this_dataset: - ds.get_field(fn).content.extend(deepcopy(dataset.get_field(reverse_field_mapping.get(fn, fn)).content)) - - return ds diff --git a/fastNLP/core/dist_trainer.py b/fastNLP/core/dist_trainer.py deleted file mode 100644 index 74ac7028..00000000 --- a/fastNLP/core/dist_trainer.py +++ /dev/null @@ -1,521 +0,0 @@ -r""" -分布式 Trainer -使用步骤 -1. 在代码中调用 DistTrainer,类似 Trainer,传入模型和数据等等参数 -2. 在命令行中,将 python your_script.py 替换为 python -m torch.distributed.launch --nproc_per_node=N your_script.py -""" -import logging -import os -import time -from datetime import datetime - -import contextlib -import torch -import torch.cuda -import torch.distributed as dist -import torch.optim -from torch.serialization import default_restore_location -from pkg_resources import parse_version -from torch.nn.parallel import DistributedDataParallel as DDP -from torch.utils.data.distributed import DistributedSampler -from tqdm import tqdm -import time - -from ._logger import logger, init_logger_dist -from .batch import DataSetIter, BatchIter -from .callback import DistCallbackManager, CallbackException -from .callback import _TesterCallback -from .dataset import DataSet -from .losses import _prepare_losser -from .optimizer import Optimizer -from .utils import _build_args -from .utils import _build_fp16_env -from .utils import _get_func_signature -from .utils import _move_dict_value_to_device -from .sampler import Sampler - -__all__ = [ - 'get_local_rank', - 'DistTrainer', -] - -def get_local_rank(): - r""" - 返回当前进程的 local rank, 0 到 N-1 ,N为当前分布式总进程数 - """ - if 'LOCAL_RANK' in os.environ: - return int(os.environ['LOCAL_RANK']) - from argparse import ArgumentParser - parser = ArgumentParser() - parser.add_argument('--local_rank', type=int) - args, _ = parser.parse_known_args() - if 'local_rank' in args and args.local_rank: - os.environ['LOCAL_RANK'] = str(args.local_rank) # for multiple calls for this function - return args.local_rank - raise RuntimeError('Please use "python -m torch.distributed.launch --nproc_per_node=N train_script.py') - - -class DistTrainer: - r""" - 分布式的 Trainer,支持分布式训练和混合精度的训练。具体实现原理请阅读 pytorch 官方文档。 - - Note: 使用分布式 Trainer 时会同时有多个进程执行训练代码。因此将单进程的训练代码改为多进程之前, - 请仔细检查,确保训练代码中的同步和互斥操作能正确执行(如模型保持,打印日志等) - """ - def __init__(self, train_data, model, optimizer=None, loss=None, - callbacks_all=None, callbacks_master=None, - batch_size_per_gpu=8, n_epochs=1, - num_workers=1, drop_last=False, - dev_data=None, metrics=None, metric_key=None, - update_every=1, print_every=10, validate_every=-1, - save_path=None, device='auto', - fp16=False, use_tqdm=True, sampler=None, **kwargs): - r""" - - :param train_data: 训练集, :class:`~fastNLP.DataSet` 类型。 - :param nn.modules, DDP model: 待训练的模型 - :param optimizer: `torch.optim.Optimizer` 优化器。如果为None,则Trainer使用默认的Adam(model.parameters(), lr=4e-3)这个优化器 - :param loss: 使用的 :class:`~fastNLP.core.losses.LossBase` 对象。当为None时,默认使用 :class:`~fastNLP.LossInForward` - :param list callbacks_all: 用于在train过程中起调节作用的回调函数,作用于所有训练进程中。 - 可使用的callback参见 :mod:`callback模块 ` - :param list callbacks_master: 用于在train过程中起调节作用的回调函数,只作用于其中一个进程( Master 进程)。 - 可使用的callback参见 :mod:`callback模块 ` - :param int batch_size_per_gpu: 训练时,每个进程的 batch 大小。 - :param int n_epochs: 需要优化迭代多少次。 - :param num_workers: int, 有多少个线程来进行数据pad处理。 - :param drop_last: 如果最后一个batch没有正好为batch_size这么多数据,就扔掉最后一个batch - :param dev_data: 用于做验证的DataSet, :class:`~fastNLP.DataSet` 类型。 - :param metrics: 验证的评估函数。可以只使用一个 :class:`Metric` , - 也可以使用多个 :class:`Metric` ,通过列表传入。 - 如验证时取得了更好的验证结果(如果有多个Metric,以列表中第一个Metric为准),且save_path不为None, - 则保存当前模型。Metric种类详见 :mod:`metrics模块 ` 。仅在传入dev_data时有效。 - :param str,None metric_key: :class:`Metric` 有时会有多个指标, - 比如 :class:`~fastNLP.core.metrics.SpanFPreRecMetric` 中包含了'f', 'pre', 'rec'。此时需 - 要指定以哪个指标为准。另外有些指标是越小效果越好,比如语言模型的困惑度,这种情况下,在key前面增加一个'-'来表 - 明验证时,值越小越好(比如: "-ppl")。仅在传入dev_data时有效。 - :param update_every: int, 多少步更新一次梯度。用于希望累计梯度的场景,比如需要128的batch_size, 但是直接设为128 - 会导致内存不足,通过设置batch_size=32, update_every=4达到目的。当optimizer为None时,该参数无效。 - :param int print_every: 多少次反向传播更新tqdm显示的loss; 如果use_tqdm=False, 则多少次反向传播打印loss。 - :param int validate_every: 多少个step在验证集上验证一次; 如果为-1,则每个epoch结束验证一次。仅在传入dev_data时有效。 - :param str,None save_path: 将模型保存路径,如果路径不存在,将自动创建文件夹。如果为None,则不保存模型。如果dev_data为None,则保存 - 最后一次迭代的模型。保存的时候不仅保存了参数,还保存了模型结构。即便使用DataParallel,这里也只保存模型。 - :param str device: 指定 device,可以是 gpu,cpu 或 auto - :param bool fp16: 指定是否使用半精度训练。 - :param bool use_tqdm: 是否使用tqdm来显示训练进度; 如果为False,则将loss打印在终端中。 - :param Sampler sampler: 使用的sampler,如果不指定,默认使用的DistributedSampler。使用这个参数的情况一般为,明确修改了每个 - rank的Dataset,使得每个rank上的dataset虽然sample数量一样多,但是sample其实不一样。 - :param kwargs: 支持配置可选参数 - bool test_use_tqdm: 在dev上验证的时候是否开启tqdm - Sampler test_sampler: 在evaluate的时候使用的sampler - int dev_batch_size: 在evaluate时,使用的evaluate的batch大小 - bool test_use_fp16: test时使用fp16 - bool set_grad_to_none: zero_grad时将grad设为None而不是0 - GradScaler grad_scaler: 自定义的梯度 scaler - bool pin_memory: 是否将产生的tensor使用pin memory, 可能会加快数据速度。一般在tensor较多或tensor维度较大时,有速度增益。 - bool find_unused_parameters: 在将model转化为DistributedDataParallel类型的时候,需要填入该参数,除非model内确实有 - forward没用上的参数,否则应该不需要用到该参数。 - """ - assert device in ['auto', 'cuda', 'cpu'], "Please set correct device in [auto', 'cuda', 'cpu']" - if device == 'auto': - device = 'cuda' if torch.cuda.is_available() else 'cpu' - - # init distributed - if device == 'cuda': - torch.cuda.set_device(get_local_rank()) - self.device = torch.device("cuda", get_local_rank()) - else: - self.device = torch.device(device) - - init_logger_dist() - - self.world_size = dist.get_world_size() - self.rank = dist.get_rank() # unique id for each process - - self.train_data = train_data - self.kwargs = kwargs - if kwargs.get('batch_size', None): - batch_size_per_gpu = int(kwargs.get('batch_size')) - self.batch_size_per_gpu = int(batch_size_per_gpu) - self.n_epochs = int(n_epochs) - self.num_data_workers = int(num_workers) - self.drop_last = drop_last - self.update_every = int(update_every) - self.print_every = int(print_every) - self.validate_every = int(validate_every) - self.save_path = save_path - self.losser = _prepare_losser(loss) - self.fp16 = fp16 - self.local_rank = get_local_rank() - self.callback_manager = DistCallbackManager( - env={"trainer": self}, callbacks_all=callbacks_all, - callbacks_master=callbacks_master) - self.test_manager = DistCallbackManager(env={'trainer': self}) - self.metric_key = metric_key - self.use_tqdm = use_tqdm - - # init fp16, must before DataParallel init - autocast, GradScaler = _build_fp16_env(dummy=not self.fp16) - self.auto_cast = autocast - user_grad_scaler = kwargs.get('grad_scaler', None) - if user_grad_scaler is not None: - assert self.fp16, "must set fp16=True to enable grad_scaler" - grad_scaler = user_grad_scaler - else: - grad_scaler = GradScaler() - self.grad_scaler = grad_scaler - - self.set_grad_to_none = kwargs.get('set_grad_to_none', False) - # init DataParallel - if isinstance(model, DDP): - self.ddp_model = model - else: - model.to(self.device) - if parse_version(torch.__version__)>=parse_version('1.1'): - self.ddp_model = DDP(model, device_ids=[self.local_rank], - output_device=self.local_rank, - find_unused_parameters=kwargs.get('find_unused_parameters', False)) - else: - self.ddp_model = DDP(model, device_ids=[self.local_rank], - output_device=self.local_rank) - self.model = self.ddp_model.module - - self._forward_func = self.model.forward - self.model.to(self.device) - - optimizer = self._get_optimizer(optimizer) - self.optimizer = optimizer - if isinstance(self.train_data, DataSet): - if sampler is None: - self.sampler = DistributedSampler(self.train_data) - else: - # sampler check - if sampler is not None and not isinstance(sampler, (Sampler, torch.utils.data.Sampler)): - raise ValueError( - f"The type of sampler should be fastNLP.BaseSampler or pytorch's Sampler, got {type(sampler)}") - elif hasattr(sampler, 'set_batch_size'): - sampler.set_batch_size(batch_size_per_gpu) - self.sampler = sampler - # concerning issue from https://github.com/pytorch/pytorch/issues/57273 - self.pin_memory = kwargs.get('pin_memory', False if parse_version(torch.__version__)==parse_version('1.9') else True) - self.data_iterator = self._get_data_iter(self.train_data) - self.batch_size = self.world_size * self.batch_size_per_gpu - self.n_steps = self._get_n_steps() - - self.dev_data = dev_data - self.metrics = metrics - self.test_use_tqdm = True - self.test_use_tqdm = kwargs.get('test_use_tqdm', self.use_tqdm) - dev_batch_size = kwargs.get('dev_batch_size', batch_size_per_gpu) - - # for evaluation, only run eval on master proc - if dev_data and metrics: - cb = _TesterCallback( - dev_data, self.model, metrics, - batch_size=dev_batch_size, num_workers=num_workers, sampler=kwargs.get('test_sampler', None), - use_tqdm=self.test_use_tqdm) - self.test_manager.add_callback([cb], master=True) - # Setup logging - # 同步start_time - sync_time = torch.tensor(time.time(), dtype=torch.double).to(self.device) - dist.broadcast(sync_time, src=0) - self.start_time = datetime.fromtimestamp(sync_time.item()).strftime('%Y-%m-%d-%H-%M-%S-%f') - # print('sync_time: {}, start_time: {}'.format(sync_time, self.start_time)) - - if self.save_path: - self.cp_save_path = self.save_path - else: - self.cp_save_path = None - # use INFO in the master, WARN for others - self.logger = logger - self.logger.info("Setup Distributed Trainer") - self.logger.warning("Process pid: {}, rank: {}, local rank: {}, device: {}, fp16: {}".format( - os.getpid(), self.rank, self.local_rank, self.device, self.fp16)) - self.logger.info("Num of processes: {}".format(self.world_size)) - self.logger.info("Use device: {}".format(device)) - - def _get_n_steps(self): - return len(self.data_iterator) * self.n_epochs - - def _get_data_iter(self, dataset): - if isinstance(dataset, DataSet): - return DataSetIter(dataset=dataset, batch_size=self.batch_size_per_gpu, sampler=self.sampler, - num_workers=self.num_data_workers, drop_last=self.drop_last, - pin_memory=self.pin_memory) - elif isinstance(dataset, BatchIter): - return dataset - else: - raise TypeError("train_data type {} not support".format(type(dataset))) - - def _get_optimizer(self, optimizer): - if isinstance(optimizer, torch.optim.Optimizer): - return optimizer - elif isinstance(optimizer, Optimizer): - return optimizer.construct_from_pytorch(self.ddp_model.parameters()) - elif optimizer is None: - return torch.optim.Adam(self.ddp_model.parameters(), lr=4e-3) - else: - if not (hasattr(optimizer, 'step') and callable(optimizer.step)): - raise TypeError("optimizer must have a callable step() function.") - else: - self.optimizer = optimizer - @property - def is_master(self): - r"""是否是主进程""" - return self.rank == 0 - - def train(self, load_best_model=True, on_exception='auto'): - r""" - 使用该函数使Trainer开始训练。 - - :param str on_exception: 在训练过程遭遇exception,并被 :py:class:Callback 的on_exception()处理后,是否继续抛出异常。 - 支持'ignore','raise', 'auto': 'ignore'将捕获异常,写在Trainer.train()后面的代码将继续运行; 'raise'将异常抛出; - 'auto'将ignore以下两种Exception: CallbackException与KeyboardInterrupt, raise其它exception. - :return dict: 返回一个字典类型的数据, - 内含以下内容:: - - seconds: float, 表示训练时长 - 以下三个内容只有在提供了dev_data的情况下会有。 - best_eval: Dict of Dict, 表示evaluation的结果。第一层的key为Metric的名称, - 第二层的key为具体的Metric - best_epoch: int,在第几个epoch取得的最佳值 - best_step: int, 在第几个step(batch)更新取得的最佳值 - - """ - try: - self.logger.info("###### Training epochs started ######") - self.logger.info('Total epochs: %d'% self.n_epochs) - self.logger.info('Total steps: %d'% self.n_steps) - self.logger.info('Num instances per GPU: %d'% self.batch_size_per_gpu) - self.logger.info('Num of steps per update: %d' % self.update_every) - self.logger.info('Total batch_size: %d'% - (self.batch_size_per_gpu * dist.get_world_size() * self.update_every)) - self.logger.info('Total num of samples: %d'% len(self.train_data)) - self.logger.info("Num of callbacks for all workers: {}".format( - len(self.callback_manager.callbacks_all))) - self.logger.info("Num of callbacks for master workers: {}".format( - len(self.callback_manager.callbacks_master))) - self.logger.info("Callbacks for all workers: {}".format( - [repr(cb) for cb in self.callback_manager.callbacks_all])) - self.logger.info("Callbacks for master workers: {}".format( - [repr(cb) for cb in self.callback_manager.callbacks_master])) - - start_time = time.time() - results = {} - if self.n_epochs <= 0: - self.logger.info("Training epoch is {}, nothing was done.".format(self.n_epochs)) - results['seconds'] = 0. - return results - - try: - self.callback_manager.on_train_begin() - self._train() - self.callback_manager.on_train_end() - - except BaseException as e: - self.callback_manager.on_exception(e) - if on_exception == 'auto': - if not isinstance(e, (CallbackException, KeyboardInterrupt)): - raise e - else: - self.logger.info('Catch {}, ignored.'.format(e.__class__.__name__)) - elif on_exception == 'raise': - raise e - - results['seconds'] = round(time.time() - start_time, 2) - self.logger.info("###### Train finished ######") - self.logger.info('Total train time: {} seconds.'. format(results['seconds'])) - if load_best_model and self.cp_save_path and len(self.test_manager.callbacks): - self.load_check_point(self._best_save_name()) - finally: - pass - dist.barrier() - return results - - def _train(self): - dist.barrier() - if not self.use_tqdm: - from .utils import _pseudo_tqdm as inner_tqdm - else: - inner_tqdm = tqdm - - self.step = 0 - self.epoch = 0 - self.pbar = inner_tqdm(total=self.n_steps, postfix='loss:{0:<6.5f}', - leave=False, dynamic_ncols=True, disable=not self.is_master) - pbar = self.pbar - avg_loss = 0 - data_iterator = self.data_iterator - self.ddp_model.zero_grad() - self.batch_per_epoch = self.data_iterator.num_batches - for epoch in range(1, self.n_epochs + 1): - self.epoch = epoch - pbar.set_description_str(desc="Epoch {}/{}".format(epoch, self.n_epochs)) - # early stopping - self.callback_manager.on_epoch_begin() - for batch_x, batch_y in data_iterator: - self.step += 1 - if self.step%self.update_every!=0: - no_sync = self.ddp_model.no_sync - else: - no_sync = contextlib.ExitStack - with no_sync(): - self.ddp_model.train() - _move_dict_value_to_device(batch_x, batch_y, device=self.device, non_blocking=self.pin_memory) - indices = data_iterator.get_batch_indices() - # negative sampling; replace unknown; re-weight batch_y - self.callback_manager.on_batch_begin(batch_x, batch_y, indices) - with self.auto_cast(): - prediction = self._data_forward(self.ddp_model, batch_x) - # edit prediction - self.callback_manager.on_loss_begin(batch_y, prediction) - loss = self._compute_loss(prediction, batch_y) - - avg_loss += loss.detach() - - # Is loss NaN or inf? requires_grad = False - self.callback_manager.on_backward_begin(loss) - self._grad_backward(loss) - self.callback_manager.on_backward_end() - self._update() - self.callback_manager.on_step_end() - - if self.step % self.print_every == 0: - avg_loss = float(avg_loss) / self.print_every - print_output = "loss:{:<6.5f}".format(avg_loss) - pbar.update(self.print_every) - pbar.set_postfix_str(print_output) - avg_loss = 0 - - self.callback_manager.on_batch_end() - - if (self.validate_every > 0 and self.step % self.validate_every == 0) and len(self.test_manager.callbacks): - self._do_validation() - - # ================= mini-batch end ==================== # - if self.validate_every < 0 and len(self.test_manager.callbacks): - self._do_validation() - - # lr decay; early stopping - self.callback_manager.on_epoch_end() - # =============== epochs end =================== # - pbar.close() - self.pbar = None - # ============ tqdm end ============== # - - def _clear_grad(self, optimizer): - if self.set_grad_to_none: - for group in optimizer.param_groups: - for p in group['params']: - if p.grad is not None: - p.grad = None - else: - optimizer.zero_grad() - - def _grad_backward(self, loss): - r"""Compute gradient with link rules. - - :param loss: a scalar where back-prop starts - - For PyTorch, just do "loss.backward()" - """ - if (self.step-1) % self.update_every == 0: - self._clear_grad(self.optimizer) - self.grad_scaler.scale(loss).backward() - - def _update(self): - r"""Perform weight update on a model. - - """ - if self.step % self.update_every == 0: - self.grad_scaler.step(self.optimizer) - self.grad_scaler.update() - - def _data_forward(self, network, x): - x = _build_args(self._forward_func, **x) - y = network(**x) - if not isinstance(y, dict): - raise TypeError( - f"The return value of {_get_func_signature(self._forward_func)} should be dict, got {type(y)}.") - return y - - def _compute_loss(self, predict, truth): - r"""Compute loss given prediction and ground truth. - - :param predict: prediction dict, produced by model.forward - :param truth: ground truth dict, produced by batch_y - :return: a scalar - """ - loss = self.losser(predict, truth) - if self.update_every > 1: - loss = loss / self.update_every - if loss.dim() > 0: - loss = loss.mean() - return loss - - def save_check_point(self, name=None, only_params=False): - r"""保存当前模型""" - # only master save models - if name is None: - name = 'checkpoint-{}.bin'.format(self.step) - os.makedirs(self.cp_save_path, exist_ok=True) - path = os.path.join(self.cp_save_path, name) - self.logger.info("Save checkpoint to {}".format(path)) - model_to_save = self.ddp_model.module - if only_params: - model_to_save = model_to_save.state_dict() - if self.is_master: - torch.save(model_to_save, path) - - def load_check_point(self, name): - path = os.path.join(self.cp_save_path, name) - self.logger.info('reload best model from %s', path) - model_load = torch.load( - path, - map_location=lambda s, l: default_restore_location(s, "cpu")) - if not isinstance(model_load, dict): - model_load = model_load.state_dict() - self.model.load_state_dict(model_load) - - def _best_save_name(self, auto_fix=True): - best_name = "best_" + "_".join([self.model.__class__.__name__, str(self.metric_key), self.start_time]) - return best_name - - def _do_validation(self): - with self.ddp_model.no_sync(): - # 因为模型参数不更新,可以关闭同步 - self.callback_manager.on_valid_begin() - eval_res = self.test_manager.on_valid_begin() - eval_res = list(filter(lambda x: x is not None, eval_res)) - if len(eval_res): - eval_res, is_better = list(zip(*eval_res)) - eval_res = eval_res[0] - is_better = is_better[0] - else: - eval_res, is_better = None, None - if self.metric_key is None and eval_res is not None: - eval_res0 = list(eval_res.values())[0] - self.metric_key = list(eval_res0.keys())[0] - # logger.info('{}, {}'.format(eval_res, is_better)) - # save better model on master node - if is_better is not None and self.cp_save_path: - if is_better: - self.save_check_point(self._best_save_name(), only_params=False) - dist.barrier() - - if not self.is_master and self.metric_key is None: - # 主进程自动得到了metric_key,而其它进程没有 - prefix = 'best_' + self.model.__class__.__name__ - suffix = self.start_time - fn_list = os.listdir(self.cp_save_path) - fn_list = [fn for fn in fn_list if fn.startswith(prefix) and fn.endswith(suffix)] - if len(fn_list) == 1: - best_name = fn_list[0] - self.metric_key = best_name[len(prefix):-len(suffix)].strip('_') - # print('RANK {} metric_key {}'.format(self.rank, self.metric_key)) - self.callback_manager.on_valid_end( - eval_res, self.metric_key, self.optimizer, is_better) - self.ddp_model.train() - - def close(self): - r"""关闭Trainer,销毁进程""" - dist.destroy_process_group() diff --git a/fastNLP/core/field.py b/fastNLP/core/field.py deleted file mode 100644 index 9834a653..00000000 --- a/fastNLP/core/field.py +++ /dev/null @@ -1,696 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "Padder", - "AutoPadder", - "EngChar2DPadder", -] - -from abc import abstractmethod -from collections import Counter -from copy import deepcopy -from numbers import Number -from typing import Any - -import numpy as np -import torch - -from ._logger import logger -from .utils import _is_iterable - - -class SetInputOrTargetException(Exception): - def __init__(self, msg, index=None, field_name=None): - super().__init__(msg) - self.msg = msg - self.index = index # 标示在哪个数据遭遇到问题了 - self.field_name = field_name # 标示当前field的名称 - - -class AppendToTargetOrInputException(Exception): - def __init__(self, msg, index=None, field_name=None): - super().__init__(msg) - self.msg = msg - self.index = index # 标示在哪个数据遭遇到问题了 - self.field_name = field_name # 标示当前field的名称 - - -def _get_ele_type_and_dim(cell: Any, dim=0): - r""" - 识别cell的类别与dimension的数量 - - numpy scalar type:https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.scalars.html - :param cell: - :param dim: - :return: - """ - if isinstance(cell, (str, Number, np.bool_)): - if hasattr(cell, 'dtype'): - return cell.dtype.type, dim - return type(cell), dim - elif isinstance(cell, list): - dim += 1 - res = [_get_ele_type_and_dim(cell_i, dim) for cell_i in cell] - types = set([i for i, j in res]) - dims = set([j for i, j in res]) - if len(types) > 1: - raise SetInputOrTargetException("Mixed types detected: {}.".format(list(types))) - elif len(types) == 0: - raise SetInputOrTargetException("Empty value encountered.") - if len(dims) > 1: - raise SetInputOrTargetException("Mixed dimension detected: {}.".format(list(dims))) - return types.pop(), dims.pop() - elif isinstance(cell, torch.Tensor): - return cell.dtype, cell.dim() + dim # 如果是torch.mean的结果是0 - elif isinstance(cell, np.ndarray): - if cell.dtype != np.dtype('O'): # 如果不是object的话说明是well-formatted的了 - return cell.dtype.type, cell.ndim + dim # dtype.type返回的会是np.int32, np.float等 - # 否则需要继续往下iterate - dim += 1 - res = [_get_ele_type_and_dim(cell_i, dim) for cell_i in cell] - types = set([i for i, j in res]) - dims = set([j for i, j in res]) - if len(types) > 1: - raise SetInputOrTargetException("Mixed types detected: {}.".format(list(types))) - elif len(types) == 0: - raise SetInputOrTargetException("Empty value encountered.") - if len(dims) > 1: - raise SetInputOrTargetException("Mixed dimension detected: {}.".format(list(dims))) - return types.pop(), dims.pop() - else: # 包含tuple, set, dict以及其它的类型 - raise SetInputOrTargetException(f"Cannot process type:{type(cell)}.") - - -class Padder: - r""" - 所有padder都需要继承这个类,并覆盖__call__方法。 - 用于对batch进行padding操作。传入的element是inplace的,即直接修改element可能导致数据变化,建议inplace修改之前deepcopy一份。 - - .. py:function:: __call__(self, contents, field_name, field_ele_dtype): - - """ - - def __init__(self, pad_val=0, **kwargs): - r""" - - :param List[Any] contents: 传入的element是inplace的,即直接修改element可能导致数据变化,建议inplace修改之前 - deepcopy一份。 - :param str, field_name: field的名称。 - :param np.int64,np.float64,np.str,None, field_ele_dtype: 该field的内层元素的类型。如果该field的ignore_type为True,该这个值为None。 - :return: np.array([padded_element]) - """ - self.pad_val = pad_val - - def set_pad_val(self, pad_val): - self.pad_val = pad_val - - def get_pad_val(self): - return self.pad_val - - @abstractmethod - def __call__(self, contents, field_name, field_ele_dtype, dim: int): - r""" - 传入的是List内容。假设有以下的DataSet。 - - :param List[Any] contents: 传入的element是inplace的,即直接修改element可能导致数据变化,建议inplace修改之前 - deepcopy一份。 - :param str, field_name: field的名称。 - :param np.int64,np.float64,np.str,None, field_ele_dtype: 该field的内层元素的类型。如果该field的ignore_type为True, - 该这个值为None。 - :param dim: 这个field的维度。当ignore_type为True时,该值为None - :return: np.array([padded_element]) - - Example:: - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet() - dataset.append(Instance(sent='this is a demo', length=4, - chars=[['t', 'h', 'i', 's'], ['i', 's'], ['a'], ['d', 'e', 'm', 'o']])) - dataset.append(Instance(sent='another one', length=2, - chars=[['a', 'n', 'o', 't', 'h', 'e', 'r'], ['o', 'n', 'e']])) - 如果调用 - batch = dataset.get([0,1], pad=True) - sent这个field的padder的__call__会接收到的内容会是 - [ - 'this is a demo', - 'another one' - ] - - length这个field的padder的__call__会接收到的内容会是 - [4, 2] - - chars这个field的padder的__call__会接收到的内容会是 - [ - [['t', 'h', 'i', 's'], ['i', 's'], ['a'], ['d', 'e', 'm', 'o']], - [['a', 'n', 'o', 't', 'h', 'e', 'r'], ['o', 'n', 'e']] - ] - - 即把每个instance中某个field的内容合成一个List传入 - - """ - raise NotImplementedError - - -class AutoPadder(Padder): - r""" - 根据contents的数据自动判定是否需要做padding。 - - 1 如果元素类型(元素类型是指field中最里层元素的数据类型, 可以通过FieldArray.dtype查看,比如['This', 'is', ...]的元素类 - 型为str, [[1,2], ...]的元素类型为int)的数据不为数值类型则不会进行pad - - 2 如果元素类型为数值类型,比如np.int64, np.float64, int, float, torch.int64等 - - 2.1 如果该field的内容为数值类型(包括int, float等),比如为seq_len, 则不进行padding - - 2.2 如果该field的内容等价于一维list, 那么会将Batch中的List pad为一样长。 - - 2.3 如果该field的内容等价于二维list,那么会按照英语character padding的方式进行padding。如果是character padding建议使用 - :class: fastNLP.EngChar2DPadder. - - 2.4 如果该field的内容等价于三维list,则如果每个instance在每个维度上相等,会组成一个batch的tensor返回,这种情况应该是为图片 - 的情况。 - - 3 其它情况不进行处理,返回一个np.array类型。 - """ - - def __init__(self, pad_val=0): - super().__init__(pad_val=pad_val) - - def __call__(self, contents, field_name, field_ele_dtype, dim): - if field_ele_dtype: - if dim > 3: - return np.array(contents) - if isinstance(field_ele_dtype, type) and \ - (issubclass(field_ele_dtype, np.number) or issubclass(field_ele_dtype, Number)): - if dim == 0: - array = np.array(contents, dtype=field_ele_dtype) - elif dim == 1: - max_len = max(map(len, contents)) - array = np.full((len(contents), max_len), self.pad_val, dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - array[i, :len(content_i)] = content_i - elif dim == 2: - max_len = max(map(len, contents)) - max_word_len = max([max([len(content_ii) for content_ii in content_i]) for - content_i in contents]) - array = np.full((len(contents), max_len, max_word_len), self.pad_val, dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - for j, content_ii in enumerate(content_i): - array[i, j, :len(content_ii)] = content_ii - else: - shape = np.shape(contents) - if len(shape) == 4: # 说明各dimension是相同的大小 - array = np.array(contents, dtype=field_ele_dtype) - else: - raise RuntimeError( - f"Field:{field_name} has 3 dimensions, every sample should have the same shape.") - return array - elif str(field_ele_dtype).startswith('torch'): - if dim == 0: - tensor = torch.tensor(contents).to(field_ele_dtype) - elif dim == 1: - max_len = max(map(len, contents)) - tensor = torch.full((len(contents), max_len), fill_value=self.pad_val, dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - tensor[i, :len(content_i)] = content_i.clone().detach() - elif dim == 2: - max_len = max(map(len, contents)) - max_word_len = max([max([len(content_ii) for content_ii in content_i]) for - content_i in contents]) - tensor = torch.full((len(contents), max_len, max_word_len), fill_value=self.pad_val, - dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - for j, content_ii in enumerate(content_i): - tensor[i, j, :len(content_ii)] = content_ii.clone().detach() - else: - shapes = set([np.shape(content_i) for content_i in contents]) - if len(shapes) > 1: - raise RuntimeError( - f"Field:{field_name} has 3 dimensions, every sample should have the same shape.") - shape = shapes.pop() - if len(shape) == 3: - tensor = torch.full([len(contents)] + list(shape), fill_value=self.pad_val, - dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - tensor[i] = content_i.clone().detach().to(field_ele_dtype) - else: - raise RuntimeError( - f"Field:{field_name} has 3 dimensions, every sample should have the same shape.") - return tensor - else: - return np.array(contents) # 不进行任何操作 - else: - return np.array(contents) - - -class EngChar2DPadder(Padder): - r""" - 用于为英语执行character级别的2D padding操作。对应的field内容应该类似[['T', 'h', 'i', 's'], ['a'], ['d', 'e', 'm', 'o']], - 但这个Padder只能处理index为int的情况。 - - padded过后的batch内容,形状为(batch_size, max_sentence_length, max_word_length). max_sentence_length为这个batch中最大句 - 子长度;max_word_length为这个batch中最长的word的长度:: - - from fastNLP import DataSet - from fastNLP import EngChar2DPadder - from fastNLP import Vocabulary - dataset = DataSet({'sent': ['This is the first demo', 'This is the second demo']}) - dataset.apply(lambda ins:[list(word) for word in ins['sent'].split()], new_field_name='chars') - vocab = Vocabulary() - vocab.from_dataset(dataset, field_name='chars') - vocab.index_dataset(dataset, field_name='chars') - dataset.set_input('chars') - padder = EngChar2DPadder() - dataset.set_padder('chars', padder) # chars这个field的设置为了EnChar2DPadder - - """ - - def __init__(self, pad_val=0, pad_length=0): - r""" - :param pad_val: int, pad的位置使用该index - :param pad_length: int, 如果为0则取一个batch中最大的单词长度作为padding长度。如果为大于0的数,则将所有单词的长度 - 都pad或截取到该长度. - """ - super().__init__(pad_val=pad_val) - - self.pad_length = pad_length - - def __call__(self, contents, field_name, field_ele_dtype, dim): - r""" - 期望输入类似于 - [ - [[0, 2], [2, 3, 4], ..], - [[9, 8, 2, 4], [1, 2,], ...], - .... - ] - - :param contents: - :param field_name: - :param field_ele_dtype - :return: - """ - if field_ele_dtype not in (np.int64, np.float64, int, float): - raise TypeError('dtype of Field:{} should be np.int64 or np.float64 to do 2D padding, get {}.'.format( - field_name, field_ele_dtype - )) - assert dim == 2, f"Field:{field_name} has {dim}, EngChar2DPadder only supports input with 2 dimensions." - if self.pad_length < 1: - max_char_length = max([max(len(char_lst) for char_lst in word_lst) for word_lst in contents]) - else: - max_char_length = self.pad_length - max_sent_length = max(len(word_lst) for word_lst in contents) - batch_size = len(contents) - dtype = type(contents[0][0][0]) - - padded_array = np.full((batch_size, max_sent_length, max_char_length), fill_value=self.pad_val, - dtype=dtype) - for b_idx, word_lst in enumerate(contents): - for c_idx, char_lst in enumerate(word_lst): - chars = char_lst[:max_char_length] - padded_array[b_idx, c_idx, :len(chars)] = chars - - return padded_array - - -class FieldArray: - def __init__(self, name, content, is_target=False, is_input=False, padder=AutoPadder(), ignore_type=False, - use_1st_ins_infer_dim_type=True): - if len(content) == 0: - raise RuntimeError("Empty fieldarray is not allowed.") - _content = content - try: - _content = list(_content) - except BaseException as e: - logger.error(f"Cannot convert content(of type:{type(content)}) into list.") - raise e - self.name = name - self.content = _content - self._ignore_type = ignore_type - # 根据input的情况设置input,target等 - self._cell_ndim = None # 多少维度, 如果value是1, dim为0; 如果value是[1, 2], dim=2 - self.dtype = None # 最内层的element都是什么类型的 - self._use_1st_ins_infer_dim_type = bool(use_1st_ins_infer_dim_type) - self._is_input = False - self._is_target = False - - if is_input: - self.is_input = is_input - if is_target: - self.is_target = is_target - - self.set_padder(padder) - - @property - def ignore_type(self): - return self._ignore_type - - @ignore_type.setter - def ignore_type(self, value): - if value: - self._cell_ndim = None - self.dtype = None - self._ignore_type = value - - @property - def is_input(self): - return self._is_input - - @is_input.setter - def is_input(self, value): - r""" - 当 field_array.is_input = True / False 时被调用 - """ - # 如果(value为True)且(_is_input和_is_target都是False)且(ignore_type为False) - if value is True and \ - self._is_target is False and \ - self._ignore_type is False: - self._check_dtype_and_ndim(only_check_1st_ins_dim_type=self._use_1st_ins_infer_dim_type) - if value is False and self._is_target is False: - self.dtype = None - self._cell_ndim = None - self._is_input = value - - @property - def is_target(self): - return self._is_target - - @is_target.setter - def is_target(self, value): - r""" - 当 field_array.is_target = True / False 时被调用 - """ - if value is True and \ - self._is_input is False and \ - self._ignore_type is False: - self._check_dtype_and_ndim(only_check_1st_ins_dim_type=self._use_1st_ins_infer_dim_type) - if value is False and self._is_input is False: - self.dtype = None - self._cell_ndim = None - self._is_target = value - - def _check_dtype_and_ndim(self, only_check_1st_ins_dim_type=True): - r""" - 检查当前content所有的element是否是同一个类型,且是否每个元素具有相同的维度。通过的话,设置_cell_ndim与_ele_type属性;没有 - 通过将直接报错. - - :param bool only_check_1st_ins_dim_type: 是否只检查第一个元素的type和dim - :return: - """ - cell_0 = self.content[0] - index = 0 - try: - type_0, dim_0 = _get_ele_type_and_dim(cell_0) - if not only_check_1st_ins_dim_type: - for cell in self.content[1:]: - index += 1 - type_i, dim_i = _get_ele_type_and_dim(cell) - if type_i != type_0: - raise SetInputOrTargetException( - "Type:{} in index {} is different from the first element with type:{}." - ".".format(type_i, index, type_0)) - if dim_0 != dim_i: - raise SetInputOrTargetException( - "Dimension:{} in index {} is different from the first element with " - "dimension:{}.".format(dim_i, index, dim_0)) - self._cell_ndim = dim_0 - self.dtype = type_0 - except SetInputOrTargetException as e: - e.index = index - raise e - - def append(self, val: Any): - r""" - :param val: 把该val append到fieldarray。 - :return: - """ - if (self._is_target or self._is_input) and self._ignore_type is False and not self._use_1st_ins_infer_dim_type: - type_, dim_ = _get_ele_type_and_dim(val) - if self.dtype != type_: - raise AppendToTargetOrInputException(f"Value(type:{type_}) are of different types with " - f"previous values(type:{self.dtype}).") - if self._cell_ndim != dim_: - raise AppendToTargetOrInputException(f"Value(dim:{dim_}) are of different dimensions with " - f"previous values(dim:{self._cell_ndim}).") - self.content.append(val) - else: - self.content.append(val) - - def pop(self, index): - r""" - 删除该field中index处的元素 - :param int index: 从0开始的数据下标。 - :return: - """ - self.content.pop(index) - - def __getitem__(self, indices): - return self.get(indices, pad=False) - - def __setitem__(self, idx, val): - assert isinstance(idx, int) - if (self._is_target or self._is_input) and self.ignore_type is False: # 需要检测类型 - type_, dim_ = _get_ele_type_and_dim(val) - if self.dtype != type_: - raise RuntimeError(f"Value(type:{type_}) are of different types with " - f"other values(type:{self.dtype}).") - if self._cell_ndim != dim_: - raise RuntimeError(f"Value(dim:{dim_}) are of different dimensions with " - f"previous values(dim:{self._cell_ndim}).") - self.content[idx] = val - - def get(self, indices, pad=True): - r""" - 根据给定的indices返回内容。 - - :param int,List[int] indices: 获取indices对应的内容。 - :param bool pad: 是否对返回的结果进行padding。仅对: (1) indices为List[int]; (2)padder不为None; (3)field设置了input - 或target,有效 - :return: 根据给定的indices返回的内容,可能是单个值或ndarray - """ - if isinstance(indices, int): - return self.content[indices] - - contents = [self.content[i] for i in indices] - if self.padder is None or pad is False: - return np.array(contents) - elif self.is_input or self.is_target: - return self.pad(contents) - else: - return np.array(contents) - - def pad(self, contents): - r""" - 传入list的contents,将contents使用padder进行padding,contents必须为从本FieldArray中取出的。 - - :param list contents: - :return: - """ - return self.padder(contents, field_name=self.name, field_ele_dtype=self.dtype, dim=self._cell_ndim) - - def set_padder(self, padder): - r""" - 设置padder,在这个field进行pad的时候用这个padder进行pad,如果为None则不进行pad。 - - :param padder: :class:`~fastNLP.Padder` 类型,设置为None即删除padder。 - """ - if padder is not None: - assert isinstance(padder, Padder), "padder must be of type `fastNLP.core.Padder`." - self.padder = deepcopy(padder) - else: - self.padder = None - - def set_pad_val(self, pad_val): - r""" - 修改padder的pad_val. - - :param int pad_val: 该field的pad值设置为该值。 - """ - if self.padder is not None: - self.padder.set_pad_val(pad_val) - return self - - def __len__(self): - r""" - Returns the size of FieldArray. - - :return int length: - """ - return len(self.content) - - def to(self, other): - r""" - 将other的属性复制给本FieldArray(other必须为FieldArray类型). - 属性包括 is_input, is_target, padder, ignore_type - - :param other: :class:`~fastNLP.FieldArray` 从哪个field拷贝属性 - :return: :class:`~fastNLP.FieldArray` - """ - assert isinstance(other, FieldArray), "Only supports fastNLP.FieldArray type, not {}.".format(type(other)) - - self.ignore_type = other.ignore_type - self.is_input = other.is_input - self.is_target = other.is_target - self.padder = other.padder - - return self - - def split(self, sep: str = None, inplace: bool = True): - r""" - 依次对自身的元素使用.split()方法,应该只有当本field的元素为str时,该方法才有用。将返回值 - - :param sep: 分割符,如果为None则直接调用str.split()。 - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: List[List[str]] or self - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - new_contents.append(cell.split(sep)) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - return self._after_process(new_contents, inplace=inplace) - - def int(self, inplace: bool = True): - r""" - 将本field中的值调用int(cell). 支持field中内容为以下两种情况(1)['1', '2', ...](即field中每个值为str的), - (2) [['1', '2', ..], ['3', ..], ...](即field中每个值为一个list,list中的值会被依次转换。) - - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: List[int], List[List[int]], self - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - if isinstance(cell, list): - new_contents.append([int(value) for value in cell]) - else: - new_contents.append(int(cell)) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - return self._after_process(new_contents, inplace=inplace) - - def float(self, inplace=True): - r""" - 将本field中的值调用float(cell). 支持field中内容为以下两种情况(1)['1', '2', ...](即field中每个值为str的), - (2) [['1', '2', ..], ['3', ..], ...](即field中每个值为一个list,list中的值会被依次转换。) - - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - if isinstance(cell, list): - new_contents.append([float(value) for value in cell]) - else: - new_contents.append(float(cell)) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - return self._after_process(new_contents, inplace=inplace) - - def bool(self, inplace=True): - r""" - 将本field中的值调用bool(cell). 支持field中内容为以下两种情况(1)['1', '2', ...](即field中每个值为str的), - (2) [['1', '2', ..], ['3', ..], ...](即field中每个值为一个list,list中的值会被依次转换。) - - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - if isinstance(cell, list): - new_contents.append([bool(value) for value in cell]) - else: - new_contents.append(bool(cell)) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - - return self._after_process(new_contents, inplace=inplace) - - def lower(self, inplace=True): - r""" - 将本field中的值调用cell.lower(). 支持field中内容为以下两种情况(1)['1', '2', ...](即field中每个值为str的), - (2) [['1', '2', ..], ['3', ..], ...](即field中每个值为一个list,list中的值会被依次转换。) - - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: List[int], List[List[int]], self - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - if isinstance(cell, list): - new_contents.append([value.lower() for value in cell]) - else: - new_contents.append(cell.lower()) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - return self._after_process(new_contents, inplace=inplace) - - def upper(self, inplace=True): - r""" - 将本field中的值调用cell.lower(). 支持field中内容为以下两种情况(1)['1', '2', ...](即field中每个值为str的), - (2) [['1', '2', ..], ['3', ..], ...](即field中每个值为一个list,list中的值会被依次转换。) - - :param inplace: 如果为True,则将新生成值替换本field。否则返回list。 - :return: List[int], List[List[int]], self - """ - new_contents = [] - for index, cell in enumerate(self.content): - try: - if isinstance(cell, list): - new_contents.append([value.upper() for value in cell]) - else: - new_contents.append(cell.upper()) - except Exception as e: - logger.error(f"Exception happens when process value in index {index}.") - raise e - return self._after_process(new_contents, inplace=inplace) - - def value_count(self): - r""" - 返回该field下不同value的数量。多用于统计label数量 - - :return: Counter, key是label,value是出现次数 - """ - count = Counter() - - def cum(cell): - if _is_iterable(cell) and not isinstance(cell, str): - for cell_ in cell: - cum(cell_) - else: - count[cell] += 1 - - for cell in self.content: - cum(cell) - return count - - def _after_process(self, new_contents, inplace): - r""" - 当调用处理函数之后,决定是否要替换field。 - - :param new_contents: - :param inplace: - :return: self或者生成的content - """ - if inplace: - self.content = new_contents - try: - self.is_input = self.is_input - self.is_target = self.is_input - except SetInputOrTargetException as e: - logger.error("The newly generated field cannot be set as input or target.") - raise e - return self - else: - return new_contents diff --git a/fastNLP/core/instance.py b/fastNLP/core/instance.py deleted file mode 100644 index 83e3903e..00000000 --- a/fastNLP/core/instance.py +++ /dev/null @@ -1,61 +0,0 @@ -r""" -instance 模块实现了Instance 类在fastNLP中对应sample。一个sample可以认为是一个Instance类型的对象。 -便于理解的例子可以参考文档 :mod:`fastNLP.core.dataset` 中的表格 - -""" - -__all__ = [ - "Instance" -] - -from .utils import pretty_table_printer - - -class Instance(object): - r""" - Instance是fastNLP中对应一个sample的类。每个sample在fastNLP中是一个Instance对象。 - Instance一般与 :class:`~fastNLP.DataSet` 一起使用, Instance的初始化如下面的Example所示:: - - >>>from fastNLP import Instance - >>>ins = Instance(field_1=[1, 1, 1], field_2=[2, 2, 2]) - >>>ins["field_1"] - [1, 1, 1] - >>>ins.add_field("field_3", [3, 3, 3]) - >>>ins = Instance(**{'x1': 1, 'x2':np.zeros((3, 4))}) - """ - - def __init__(self, **fields): - - self.fields = fields - - def add_field(self, field_name, field): - r""" - 向Instance中增加一个field - - :param str field_name: 新增field的名称 - :param Any field: 新增field的内容 - """ - self.fields[field_name] = field - - def items(self): - r""" - 返回一个迭代器,迭代器返回两个内容,第一个内容是field_name, 第二个内容是field_value - - :return: 一个迭代器 - """ - return self.fields.items() - - def __contains__(self, item): - return item in self.fields - - def __getitem__(self, name): - if name in self.fields: - return self.fields[name] - else: - raise KeyError("{} not found".format(name)) - - def __setitem__(self, name, field): - return self.add_field(name, field) - - def __repr__(self): - return str(pretty_table_printer(self)) diff --git a/fastNLP/core/losses.py b/fastNLP/core/losses.py deleted file mode 100644 index 3bce8733..00000000 --- a/fastNLP/core/losses.py +++ /dev/null @@ -1,480 +0,0 @@ -r""" -losses 模块定义了 fastNLP 中所需的各种损失函数,一般做为 :class:`~fastNLP.Trainer` 的参数使用。 - -""" -__all__ = [ - "LossBase", - - "LossFunc", - "LossInForward", - - "CrossEntropyLoss", - "BCELoss", - "BCEWithLogits", - "L1Loss", - "NLLLoss", - "MSELoss", - - "CMRC2018Loss" - -] - -import inspect -from collections import defaultdict - -import torch -import torch.nn.functional as F - -from .utils import _CheckError -from .utils import _CheckRes -from .utils import _build_args -from .utils import _check_arg_dict_list -from .utils import _check_function_or_method -from .utils import _get_func_signature -from .utils import seq_len_to_mask -from ..core.const import Const - - -class LossBase(object): - r""" - 所有loss的基类。如果需要结合到Trainer之中需要实现get_loss方法 - """ - - def __init__(self): - self._param_map = {} # key是fun的参数,value是以该值从传入的dict取出value - self._checked = False - - @property - def param_map(self): - if len(self._param_map) == 0: # 如果为空说明还没有初始化 - func_spect = inspect.getfullargspec(self.get_loss) - func_args = [arg for arg in func_spect.args if arg != 'self'] - for arg in func_args: - self._param_map[arg] = arg - return self._param_map - - def get_loss(self, *args, **kwargs): - """ - - :param args: - :param kwargs: - :return: torch.Tensor - """ - raise NotImplementedError - - def _init_param_map(self, key_map=None, **kwargs): - r"""检查key_map和其他参数map,并将这些映射关系添加到self._param_map - - :param dict key_map: 表示key的映射关系 - :param kwargs: key word args里面的每一个的键-值对都会被构造成映射关系 - :return: None - """ - value_counter = defaultdict(set) - if key_map is not None: - if not isinstance(key_map, dict): - raise TypeError("key_map must be `dict`, got {}.".format(type(key_map))) - for key, value in key_map.items(): - if value is None: - self._param_map[key] = key - continue - if not isinstance(key, str): - raise TypeError(f"key in key_map must be `str`, not `{type(key)}`.") - if not isinstance(value, str): - raise TypeError(f"value in key_map must be `str`, not `{type(value)}`.") - self._param_map[key] = value - value_counter[value].add(key) - for key, value in kwargs.items(): - if value is None: - self._param_map[key] = key - continue - if not isinstance(value, str): - raise TypeError(f"in {key}={value}, value must be `str`, not `{type(value)}`.") - self._param_map[key] = value - value_counter[value].add(key) - for value, key_set in value_counter.items(): - if len(key_set) > 1: - raise ValueError(f"Several parameters:{key_set} are provided with one output {value}.") - - # check consistence between signature and _param_map - func_spect = inspect.getfullargspec(self.get_loss) - func_args = [arg for arg in func_spect.args if arg != 'self'] - for func_param, input_param in self._param_map.items(): - if func_param not in func_args: - raise NameError( - f"Parameter `{func_param}` is not in {_get_func_signature(self.get_loss)}. Please check the " - f"initialization parameters, or change its signature.") - - # evaluate should not have varargs. - # if func_spect.varargs: - # raise NameError(f"Delete `*{func_spect.varargs}` in {get_func_signature(self.get_loss)}(Do not use " - # f"positional argument.).") - - def __call__(self, pred_dict, target_dict, check=False): - r""" - :param dict pred_dict: 模型的forward函数返回的dict - :param dict target_dict: DataSet.batch_y里的键-值对所组成的dict - :param Boolean check: 每一次执行映射函数的时候是否检查映射表,默认为不检查 - :return: - """ - - if not self._checked: - # 1. check consistence between signature and _param_map - func_spect = inspect.getfullargspec(self.get_loss) - func_args = set([arg for arg in func_spect.args if arg != 'self']) - for func_arg, input_arg in self._param_map.items(): - if func_arg not in func_args: - raise NameError(f"`{func_arg}` not in {_get_func_signature(self.get_loss)}.") - - # 2. only part of the _param_map are passed, left are not - for arg in func_args: - if arg not in self._param_map: - self._param_map[arg] = arg # This param does not need mapping. - self._evaluate_args = func_args - self._reverse_param_map = {input_arg: func_arg for func_arg, input_arg in self._param_map.items()} - - mapped_pred_dict = {} - mapped_target_dict = {} - for input_arg, mapped_arg in self._reverse_param_map.items(): - if input_arg in pred_dict: - mapped_pred_dict[mapped_arg] = pred_dict[input_arg] - if input_arg in target_dict: - mapped_target_dict[mapped_arg] = target_dict[input_arg] - - # missing - if not self._checked: - duplicated = [] - for input_arg, mapped_arg in self._reverse_param_map.items(): - if input_arg in pred_dict and input_arg in target_dict: - duplicated.append(input_arg) - check_res = _check_arg_dict_list(self.get_loss, [mapped_pred_dict, mapped_target_dict]) - # replace missing. - missing = check_res.missing - replaced_missing = list(missing) - for idx, func_arg in enumerate(missing): - # Don't delete `` in this information, nor add `` - replaced_missing[idx] = f"{self._param_map[func_arg]}" + f"(assign to `{func_arg}` " \ - f"in `{self.__class__.__name__}`)" - - check_res = _CheckRes(missing=replaced_missing, - unused=check_res.unused, - duplicated=duplicated, - required=check_res.required, - all_needed=check_res.all_needed, - varargs=check_res.varargs) - - if check_res.missing or check_res.duplicated: - raise _CheckError(check_res=check_res, - func_signature=_get_func_signature(self.get_loss)) - self._checked = True - - refined_args = _build_args(self.get_loss, **mapped_pred_dict, **mapped_target_dict) - - loss = self.get_loss(**refined_args) - self._checked = True - - return loss - - -class LossFunc(LossBase): - r""" - 提供给用户使用自定义损失函数的类 - - :param func: 用户自行定义的损失函数,应当为一个函数。 - :param dict key_map: 参数映射表。键为Model/DataSet参数名,值为损失函数参数名。 - fastNLP的trainer将在训练时从模型返回值或者训练数据DataSet的target=True的field中 - 找到相对应的参数名为value的参数,并传入func中作为参数名为key的参数 - :param kwargs: 除了参数映射表以外可以用key word args的方式设置参数映射关系 - - 使用方法:: - - import torch.nn.functional as F - loss_func = LossFunc(F.cross_entropy, input="pred", target="label") - # 这表示构建了一个损失函数类,由func计算损失函数,其中将从模型返回值或者DataSet的target=True的field - # 当中找到一个参数名为`pred`的参数传入func一个参数名为`input`的参数;找到一个参数名为`label`的参数 - # 传入func作为一个名为`target`的参数 - - """ - - def __init__(self, func, key_map=None, **kwargs): - - super(LossFunc, self).__init__() - _check_function_or_method(func) - self.get_loss = func - if key_map is not None: - if not isinstance(key_map, dict): - raise RuntimeError(f"Loss error: key_map except a {type({})} but got a {type(key_map)}") - self._init_param_map(key_map, **kwargs) - - -class CrossEntropyLoss(LossBase): - r""" - 交叉熵损失函数 - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param seq_len: 句子的长度, 长度之外的token不会计算loss。 - :param int class_in_dim: 在序列标注的场景中,pred可能的shape为(batch_size, max_len, num_classes) - 或(batch_size, num_classes, max_len), CrossEntropyLoss需要知道哪一维是class的维度以计算loss。如果为-1,就根据pred的第 - 二维是否等于target的第二维来判断是否需要交换pred的第二维和第三维,因为target的第二维是length的维度,如果这一维度上和pred相等, - 那么pred可能第二维也是长度维(存在误判的可能,如果有误判的情况,请显示设置该值)。其它大于0的值则认为该维度是class的维度。 - :param ignore_idx: padding的index,在计算loss时将忽略target中标号为padding_idx的内容, 可以通过该值代替 - 传入seq_len. - :param str reduction: 支持 `mean` ,`sum` 和 `none` . - - Example:: - - loss = CrossEntropyLoss(pred='pred', target='label', padding_idx=0) - - """ - - def __init__(self, pred=None, target=None, seq_len=None, class_in_dim=-1, ignore_idx=-100, reduction='mean', **kwargs): - super(CrossEntropyLoss, self).__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - ignore_idx = kwargs.pop('padding_idx', ignore_idx) - self.ignore_idx = ignore_idx - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - self.class_in_dim = class_in_dim - - def get_loss(self, pred, target, seq_len=None): - if seq_len is not None and target.dim()>1: - mask = seq_len_to_mask(seq_len, max_len=target.size(1)).eq(False) - target = target.masked_fill(mask, self.ignore_idx) - - if pred.dim() > 2: - if self.class_in_dim == -1: - if pred.size(1) != target.size(1): # 有可能顺序替换了 - pred = pred.transpose(1, 2) - else: - pred = pred.transpose(-1, self.class_in_dim) - pred = pred.reshape(-1, pred.size(-1)) - target = target.reshape(-1) - - return F.cross_entropy(input=pred, target=target, - ignore_index=self.ignore_idx, reduction=self.reduction) - - -class L1Loss(LossBase): - r""" - L1损失函数 - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` >`target` - :param str reduction: 支持'mean','sum'和'none'. - - """ - - def __init__(self, pred=None, target=None, reduction='mean'): - super(L1Loss, self).__init__() - self._init_param_map(pred=pred, target=target) - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - - def get_loss(self, pred, target): - return F.l1_loss(input=pred, target=target, reduction=self.reduction) - - -class MSELoss(LossBase): - r""" - MSE损失函数 - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` >`target` - :param str reduction: 支持'mean','sum'和'none'. - - """ - - def __init__(self, pred=None, target=None, reduction='mean'): - super(MSELoss, self).__init__() - self._init_param_map(pred=pred, target=target) - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - - def get_loss(self, pred, target): - return F.mse_loss(input=pred, target=target, reduction=self.reduction) - - -class BCELoss(LossBase): - r""" - 二分类交叉熵损失函数 - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param str reduction: 支持 `mean` ,`sum` 和 `none` . - """ - - def __init__(self, pred=None, target=None, reduction='mean'): - super(BCELoss, self).__init__() - self._init_param_map(pred=pred, target=target) - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - - def get_loss(self, pred, target): - return F.binary_cross_entropy(input=pred, target=target, reduction=self.reduction) - - -class BCEWithLogits(LossBase): - r""" - 二分类交叉熵损失函数, 传入数据之前不需要做sigmoid操作 - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param int class_in_dim: 在序列标注的场景中,pred可能的shape为(batch_size, max_len, num_classes) - 或(batch_size, num_classes, max_len), BCEWithLogits需要知道哪一维是class的维度以计算loss。如果为-1,就根据pred的第 - 二维是否等于target的第二维来判断是否需要交换pred的第二维和第三维,因为target的第二维是length的维度,如果这一维度上和pred相等, - 那么pred可能第二维也是长度维(存在误判的可能,如果有误判的情况,请显示设置该值)。其它大于0的值则认为该维度是class的维度。 - :param str reduction: 支持 `mean` ,`sum` 和 `none` . - """ - - def __init__(self, pred=None, target=None, class_in_dim=-1, reduction='mean'): - super(BCEWithLogits, self).__init__() - self._init_param_map(pred=pred, target=target) - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - self.class_in_dim = class_in_dim - - def get_loss(self, pred, target): - if pred.dim() > 2: - if self.class_in_dim == -1: - if pred.size(1) != target.size(1): # 有可能顺序替换了 - pred = pred.transpose(1, 2) - else: - pred = pred.transpose(-1, self.class_in_dim) - pred = pred.reshape(-1) - target = target.reshape(-1) - - return F.binary_cross_entropy_with_logits(input=pred, target=target, reduction=self.reduction) - - -class NLLLoss(LossBase): - r""" - 负对数似然损失函数 - """ - - def __init__(self, pred=None, target=None, seq_len=None, class_in_dim=-1, ignore_idx=-100, reduction='mean'): - r""" - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param seq_len: 句子的长度, 长度之外的token不会计算loss。仅在输出为3d时需要 - :param int class_in_dim: 在序列标注的场景中,pred可能的shape为(batch_size, max_len, num_classes) - 或(batch_size, num_classes, max_len), CrossEntropyLoss需要知道哪一维是class的维度以计算loss。如果为-1,就根据pred的第 - 二维是否等于target的第二维来判断是否需要交换pred的第二维和第三维,因为target的第二维是length的维度,如果这一维度上和pred相等, - 那么pred可能第二维也是长度维(存在误判的可能,如果有误判的情况,请显示设置该值)。其它大于0的值则认为该维度是class的维度。 - :param ignore_idx: ignore的index,在计算loss时将忽略target中标号为ignore_idx的内容, 可以通过该值代替 - 传入seq_len. - :param str reduction: 支持 `mean` ,`sum` 和 `none` . - """ - super(NLLLoss, self).__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - assert reduction in ('mean', 'sum', 'none') - self.reduction = reduction - self.ignore_idx = ignore_idx - self.class_in_dim = class_in_dim - - def get_loss(self, pred, target, seq_len=None): - if seq_len is not None and target.dim()>1: - mask = seq_len_to_mask(seq_len, max_len=target.size(1)).eq(False) - target = target.masked_fill(mask, self.ignore_idx) - - if pred.dim() > 2: - if self.class_in_dim == -1: - if pred.size(1) != target.size(1): # 有可能顺序替换了 - pred = pred.transpose(1, 2) - else: - pred = pred.transpose(-1, self.class_in_dim) - pred = pred.reshape(-1, pred.size(-1)) - target = target.reshape(-1) - - return F.nll_loss(input=pred, target=target, ignore_index=self.ignore_idx, reduction=self.reduction) - - -class LossInForward(LossBase): - r""" - 从forward()函数返回结果中获取loss - """ - - def __init__(self, loss_key=Const.LOSS): - r""" - - :param str loss_key: 在forward函数中loss的键名,默认为loss - """ - super().__init__() - if not isinstance(loss_key, str): - raise TypeError(f"Only str allowed for loss_key, got {type(loss_key)}.") - self.loss_key = loss_key - - def get_loss(self, **kwargs): - if self.loss_key not in kwargs: - check_res = _CheckRes( - missing=[self.loss_key + f"(assign to `{self.loss_key}` in `{self.__class__.__name__}`"], - unused=[], - duplicated=[], - required=[], - all_needed=[], - varargs=[]) - raise _CheckError(check_res=check_res, func_signature=_get_func_signature(self.get_loss)) - return kwargs[self.loss_key] - - def __call__(self, pred_dict, target_dict, check=False): - - loss = self.get_loss(**pred_dict) - - if not (isinstance(loss, torch.Tensor) and len(loss.size()) == 0): - if not isinstance(loss, torch.Tensor): - raise TypeError(f"Loss excepted to be a torch.Tensor, got {type(loss)}") - loss = torch.sum(loss) / (loss.view(-1)).size(0) - # raise RuntimeError(f"The size of loss excepts to be torch.Size([]), got {loss.size()}") - - return loss - - -class CMRC2018Loss(LossBase): - r""" - 用于计算CMRC2018中文问答任务。 - - """ - def __init__(self, target_start=None, target_end=None, context_len=None, pred_start=None, pred_end=None, - reduction='mean'): - super().__init__() - - assert reduction in ('mean', 'sum') - - self._init_param_map(target_start=target_start, target_end=target_end, context_len=context_len, - pred_start=pred_start, pred_end=pred_end) - self.reduction = reduction - - def get_loss(self, target_start, target_end, context_len, pred_start, pred_end): - r""" - - :param target_start: batch_size - :param target_end: batch_size - :param context_len: batch_size - :param pred_start: batch_size x max_len - :param pred_end: batch_size x max_len - :return: - """ - batch_size, max_len = pred_end.size() - mask = seq_len_to_mask(context_len, max_len).eq(False) - - pred_start = pred_start.masked_fill(mask, float('-inf')) - pred_end = pred_end.masked_fill(mask, float('-inf')) - - start_loss = F.cross_entropy(pred_start, target_start, reduction='sum') - end_loss = F.cross_entropy(pred_end, target_end, reduction='sum') - - loss = start_loss + end_loss - - if self.reduction == 'mean': - loss = loss / batch_size - - return loss/2 - -def _prepare_losser(losser): - if losser is None: - losser = LossInForward() - return losser - elif isinstance(losser, LossBase): - return losser - else: - raise TypeError(f"Type of loss should be `fastNLP.LossBase`, got {type(losser)}") diff --git a/fastNLP/core/metrics.py b/fastNLP/core/metrics.py deleted file mode 100644 index 31f69cb9..00000000 --- a/fastNLP/core/metrics.py +++ /dev/null @@ -1,1246 +0,0 @@ -r""" -metrics 模块实现了 fastNLP 所需的各种常用衡量指标,一般做为 :class:`~fastNLP.Trainer` 的参数使用。 - -""" -__all__ = [ - "MetricBase", - "AccuracyMetric", - "SpanFPreRecMetric", - "CMRC2018Metric", - "ClassifyFPreRecMetric", - "ConfusionMatrixMetric" -] - -import inspect -import warnings -from abc import abstractmethod -from collections import defaultdict -from typing import Union -from copy import deepcopy -import re - -import numpy as np -import torch - -from .utils import _CheckError -from .utils import _CheckRes -from .utils import _build_args -from .utils import _check_arg_dict_list -from .utils import _get_func_signature -from .utils import seq_len_to_mask -from .vocabulary import Vocabulary -from .utils import ConfusionMatrix - - -class MetricBase(object): - r""" - 所有metrics的基类,所有的传入到Trainer, Tester的Metric需要继承自该对象,需要覆盖写入evaluate(), get_metric()方法。 - - evaluate(xxx)中传入的是一个batch的数据。 - - get_metric(xxx)当所有数据处理完毕,调用该方法得到最终的metric值 - - 以分类问题中,Accuracy计算为例 - 假设model的forward返回dict中包含 `pred` 这个key, 并且该key需要用于Accuracy:: - - class Model(nn.Module): - def __init__(xxx): - # do something - def forward(self, xxx): - # do something - return {'pred': pred, 'other_keys':xxx} # pred's shape: batch_size x num_classes - - 假设dataset中 `label` 这个field是需要预测的值,并且该field被设置为了target - 对应的AccMetric可以按如下的定义, version1, 只使用这一次:: - - class AccMetric(MetricBase): - def __init__(self): - super().__init__() - - # 根据你的情况自定义指标 - self.corr_num = 0 - self.total = 0 - - def evaluate(self, label, pred): # 这里的名称需要和dataset中target field与model返回的key是一样的,不然找不到对应的value - # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric - self.total += label.size(0) - self.corr_num += label.eq(pred).sum().item() - - def get_metric(self, reset=True): # 在这里定义如何计算metric - acc = self.corr_num/self.total - if reset: # 是否清零以便重新计算 - self.corr_num = 0 - self.total = 0 - return {'acc': acc} # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中 - - - version2,如果需要复用Metric,比如下一次使用AccMetric时,dataset中目标field不叫label而叫y,或者model的输出不是pred:: - - class AccMetric(MetricBase): - def __init__(self, label=None, pred=None): - # 假设在另一场景使用时,目标field叫y,model给出的key为pred_y。则只需要在初始化AccMetric时, - # acc_metric = AccMetric(label='y', pred='pred_y')即可。 - # 当初始化为acc_metric = AccMetric(),即label=None, pred=None, fastNLP会直接使用'label', 'pred'作为key去索取对 - # 应的的值 - super().__init__() - self._init_param_map(label=label, pred=pred) # 该方法会注册label和pred. 仅需要注册evaluate()方法会用到的参数名即可 - # 如果没有注册该则效果与version1就是一样的 - - # 根据你的情况自定义指标 - self.corr_num = 0 - self.total = 0 - - def evaluate(self, label, pred): # 这里的参数名称需要和self._init_param_map()注册时一致。 - # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric - self.total += label.size(0) - self.corr_num += label.eq(pred).sum().item() - - def get_metric(self, reset=True): # 在这里定义如何计算metric - acc = self.corr_num/self.total - if reset: # 是否清零以便重新计算 - self.corr_num = 0 - self.total = 0 - return {'acc': acc} # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中 - - - ``MetricBase`` 将会在输入的字典 ``pred_dict`` 和 ``target_dict`` 中进行检查. - ``pred_dict`` 是模型当中 ``forward()`` 函数或者 ``predict()`` 函数的返回值. - ``target_dict`` 是DataSet当中的ground truth, 判定ground truth的条件是field的 ``is_target`` 被设置为True. - - ``MetricBase`` 会进行以下的类型检测: - - 1. self.evaluate当中是否有varargs, 这是不支持的. - 2. self.evaluate当中所需要的参数是否既不在 ``pred_dict`` 也不在 ``target_dict`` . - 3. self.evaluate当中所需要的参数是否既在 ``pred_dict`` 也在 ``target_dict`` . - - 除此以外,在参数被传入self.evaluate以前,这个函数会检测 ``pred_dict`` 和 ``target_dict`` 当中没有被用到的参数 - 如果kwargs是self.evaluate的参数,则不会检测 - - - self.evaluate将计算一个批次(batch)的评价指标,并累计。 没有返回值 - self.get_metric将统计当前的评价指标并返回评价结果, 返回值需要是一个dict, key是指标名称,value是指标的值 - - """ - - def __init__(self): - self._param_map = {} # key is param in function, value is input param. - self._checked = False - self._metric_name = self.__class__.__name__ - - @property - def param_map(self): - if len(self._param_map) == 0: # 如果为空说明还没有初始化 - func_spect = inspect.getfullargspec(self.evaluate) - func_args = [arg for arg in func_spect.args if arg != 'self'] - for arg in func_args: - self._param_map[arg] = arg - return self._param_map - - @abstractmethod - def evaluate(self, *args, **kwargs): - raise NotImplementedError - - @abstractmethod - def get_metric(self, reset=True): - raise NotImplemented - - def set_metric_name(self, name: str): - r""" - 设置metric的名称,默认是Metric的class name. - - :param str name: - :return: self - """ - self._metric_name = name - return self - - def get_metric_name(self): - r""" - 返回metric的名称 - - :return: - """ - return self._metric_name - - def _init_param_map(self, key_map=None, **kwargs): - r"""检查key_map和其他参数map,并将这些映射关系添加到self._param_map - - :param dict key_map: 表示key的映射关系 - :param kwargs: key word args里面的每一个的键-值对都会被构造成映射关系 - :return: None - """ - value_counter = defaultdict(set) - if key_map is not None: - if not isinstance(key_map, dict): - raise TypeError("key_map must be `dict`, got {}.".format(type(key_map))) - for key, value in key_map.items(): - if value is None: - self._param_map[key] = key - continue - if not isinstance(key, str): - raise TypeError(f"key in key_map must be `str`, not `{type(key)}`.") - if not isinstance(value, str): - raise TypeError(f"value in key_map must be `str`, not `{type(value)}`.") - self._param_map[key] = value - value_counter[value].add(key) - for key, value in kwargs.items(): - if value is None: - self._param_map[key] = key - continue - if not isinstance(value, str): - raise TypeError(f"in {key}={value}, value must be `str`, not `{type(value)}`.") - self._param_map[key] = value - value_counter[value].add(key) - for value, key_set in value_counter.items(): - if len(key_set) > 1: - raise ValueError(f"Several parameters:{key_set} are provided with one output {value}.") - - # check consistence between signature and _param_map - func_spect = inspect.getfullargspec(self.evaluate) - func_args = [arg for arg in func_spect.args if arg != 'self'] - for func_param, input_param in self._param_map.items(): - if func_param not in func_args: - raise NameError( - f"Parameter `{func_param}` is not in {_get_func_signature(self.evaluate)}. Please check the " - f"initialization parameters, or change its signature.") - - def __call__(self, pred_dict, target_dict): - r""" - 这个方法会调用self.evaluate 方法. - 在调用之前,会进行以下检测: - 1. self.evaluate当中是否有varargs, 这是不支持的. - 2. self.evaluate当中所需要的参数是否既不在``pred_dict``也不在``target_dict``. - 3. self.evaluate当中所需要的参数是否既在``pred_dict``也在``target_dict``. - - 除此以外,在参数被传入self.evaluate以前,这个函数会检测``pred_dict``和``target_dict``当中没有被用到的参数 - 如果kwargs是self.evaluate的参数,则不会检测 - :param pred_dict: 模型的forward函数或者predict函数返回的dict - :param target_dict: DataSet.batch_y里的键-值对所组成的dict(即is_target=True的fields的内容) - :return: - """ - - if not self._checked: - if not callable(self.evaluate): - raise TypeError(f"{self.__class__.__name__}.evaluate has to be callable, not {type(self.evaluate)}.") - # 1. check consistence between signature and _param_map - func_spect = inspect.getfullargspec(self.evaluate) - func_args = set([arg for arg in func_spect.args if arg != 'self']) - for func_arg, input_arg in self._param_map.items(): - if func_arg not in func_args: - raise NameError(f"`{func_arg}` not in {_get_func_signature(self.evaluate)}.") - - # 2. only part of the _param_map are passed, left are not - for arg in func_args: - if arg not in self._param_map: - self._param_map[arg] = arg # This param does not need mapping. - self._evaluate_args = func_args - self._reverse_param_map = {input_arg: func_arg for func_arg, input_arg in self._param_map.items()} - - # need to wrap inputs in dict. - mapped_pred_dict = {} - mapped_target_dict = {} - for input_arg, mapped_arg in self._reverse_param_map.items(): - if input_arg in pred_dict: - mapped_pred_dict[mapped_arg] = pred_dict[input_arg] - if input_arg in target_dict: - mapped_target_dict[mapped_arg] = target_dict[input_arg] - - # missing - if not self._checked: - duplicated = [] - for input_arg, mapped_arg in self._reverse_param_map.items(): - if input_arg in pred_dict and input_arg in target_dict: - duplicated.append(input_arg) - check_res = _check_arg_dict_list(self.evaluate, [mapped_pred_dict, mapped_target_dict]) - # only check missing. - # replace missing. - missing = check_res.missing - replaced_missing = list(missing) - for idx, func_arg in enumerate(missing): - # Don't delete `` in this information, nor add `` - replaced_missing[idx] = f"{self._param_map[func_arg]}" + f"(assign to `{func_arg}` " \ - f"in `{self.__class__.__name__}`)" - - check_res = _CheckRes(missing=replaced_missing, - unused=check_res.unused, - duplicated=duplicated, - required=check_res.required, - all_needed=check_res.all_needed, - varargs=check_res.varargs) - - if check_res.missing or check_res.duplicated: - raise _CheckError(check_res=check_res, - func_signature=_get_func_signature(self.evaluate)) - self._checked = True - refined_args = _build_args(self.evaluate, **mapped_pred_dict, **mapped_target_dict) - - self.evaluate(**refined_args) - - return - - -class ConfusionMatrixMetric(MetricBase): - r""" - 分类问题计算混淆矩阵的Metric(其它的Metric参见 :mod:`fastNLP.core.metrics` ) - 最后返回结果为:: - - dict,{'confusion_matrix': ConfusionMatrix实例} - - ConfusionMatrix实例的print()函数将输出矩阵字符串。 - - .. code :: - - pred_dict = {"pred": torch.Tensor([2,1,3])} - target_dict = {'target': torch.Tensor([2,2,1])} - metric = ConfusionMatrixMetric() - metric(pred_dict=pred_dict, target_dict=target_dict, ) - print(metric.get_metric()) - - .. code :: - - {'confusion_matrix': - target 1.0 2.0 3.0 all - pred - 1.0 0 1 0 1 - 2.0 0 1 0 1 - 3.0 1 0 0 1 - all 1 2 0 3 - } - - """ - def __init__(self, - vocab=None, - pred=None, - target=None, - seq_len=None, - print_ratio=False - ): - r""" - :param vocab: vocab词表类,要求有to_word()方法。 - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param seq_len: 参数映射表中 `seq_len` 的映射关系,None表示映射关系为 `seq_len` -> `seq_len` - :param print_ratio: 限制print的输出,false only for result, true for result, percent(dim=0), percent(dim = 1) - """ - super().__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - self.confusion_matrix = ConfusionMatrix( - vocab=vocab, - print_ratio=print_ratio, - ) - - def evaluate(self, pred, target, seq_len=None): - r""" - evaluate函数将针对一个批次的预测结果做评价指标的累计 - - :param torch.Tensor pred: 预测的tensor, tensor的形状可以是torch.Size([B,]), torch.Size([B, n_classes]), - torch.Size([B, max_len]), 或者torch.Size([B, max_len, n_classes]) - :param torch.Tensor target: 真实值的tensor, tensor的形状可以是Element's can be: torch.Size([B,]), - torch.Size([B,]), torch.Size([B, max_len]), 或者torch.Size([B, max_len]) - :param torch.Tensor seq_len: 序列长度标记, 标记的形状可以是None, torch.Size([B]), 或者torch.Size([B]). - """ - if not isinstance(pred, torch.Tensor): - raise TypeError( - f"`pred` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(pred)}.") - if not isinstance(target, torch.Tensor): - raise TypeError( - f"`target` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(target)}.") - - if seq_len is not None and not isinstance(seq_len, torch.Tensor): - raise TypeError( - f"`seq_lens` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(seq_len)}.") - - if pred.dim() == target.dim(): - if torch.numel(pred) !=torch.numel(target): - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have same dimensions with target, they should have same element numbers. while target have " - f"element numbers:{torch.numel(target)}, pred have element numbers: {torch.numel(pred)}") - - pass - elif pred.dim() == target.dim() + 1: - pred = pred.argmax(dim=-1) - if seq_len is None and target.dim() > 1: - warnings.warn("You are not passing `seq_len` to exclude pad.") - else: - raise RuntimeError( - f"In {_get_func_signature(self.evaluate)}, when pred have " - f"size:{pred.size()}, target should have size: {pred.size()} or " - f"{pred.size()[:-1]}, got {target.size()}.") - - target = target.to(pred) - if seq_len is not None and target.dim() > 1: - for p, t, l in zip(pred.tolist(), target.tolist(), - seq_len.tolist()): - l = int(l) - self.confusion_matrix.add_pred_target(p[:l], t[:l]) - elif target.dim() > 1: #对于没有传入seq_len,但是又是高维的target,按全长输出 - for p, t in zip(pred.tolist(), target.tolist()): - self.confusion_matrix.add_pred_target(p, t) - else: - self.confusion_matrix.add_pred_target(pred.tolist(), - target.tolist()) - - def get_metric(self, reset=True): - r""" - get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果. - :param bool reset: 在调用完get_metric后是否清空评价指标统计量. - :return dict evaluate_result: {"confusion_matrix": ConfusionMatrix} - """ - confusion = {'confusion_matrix': deepcopy(self.confusion_matrix)} - if reset: - self.confusion_matrix.clear() - return confusion - - - - - -class AccuracyMetric(MetricBase): - r""" - 准确率Metric(其它的Metric参见 :mod:`fastNLP.core.metrics` ) - """ - - def __init__(self, pred=None, target=None, seq_len=None): - r""" - - :param pred: 参数映射表中 `pred` 的映射关系,None表示映射关系为 `pred` -> `pred` - :param target: 参数映射表中 `target` 的映射关系,None表示映射关系为 `target` -> `target` - :param seq_len: 参数映射表中 `seq_len` 的映射关系,None表示映射关系为 `seq_len` -> `seq_len` - """ - - super().__init__() - - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - - self.total = 0 - self.acc_count = 0 - - def evaluate(self, pred, target, seq_len=None): - r""" - evaluate函数将针对一个批次的预测结果做评价指标的累计 - - :param torch.Tensor pred: 预测的tensor, tensor的形状可以是torch.Size([B,]), torch.Size([B, n_classes]), - torch.Size([B, max_len]), 或者torch.Size([B, max_len, n_classes]) - :param torch.Tensor target: 真实值的tensor, tensor的形状可以是Element's can be: torch.Size([B,]), - torch.Size([B,]), torch.Size([B, max_len]), 或者torch.Size([B, max_len]) - :param torch.Tensor seq_len: 序列长度标记, 标记的形状可以是None, None, torch.Size([B]), 或者torch.Size([B]). - 如果mask也被传进来的话seq_len会被忽略. - - """ - # TODO 这里报错需要更改,因为pred是啥用户并不知道。需要告知用户真实的value - if not isinstance(pred, torch.Tensor): - raise TypeError(f"`pred` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(pred)}.") - if not isinstance(target, torch.Tensor): - raise TypeError(f"`target` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(target)}.") - - if seq_len is not None and not isinstance(seq_len, torch.Tensor): - raise TypeError(f"`seq_lens` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(seq_len)}.") - - if seq_len is not None and target.dim() > 1: - max_len = target.size(1) - masks = seq_len_to_mask(seq_len=seq_len, max_len=max_len) - else: - masks = None - - if pred.dim() == target.dim(): - if torch.numel(pred) !=torch.numel(target): - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have same dimensions with target, they should have same element numbers. while target have " - f"element numbers:{torch.numel(target)}, pred have element numbers: {torch.numel(pred)}") - - pass - elif pred.dim() == target.dim() + 1: - pred = pred.argmax(dim=-1) - if seq_len is None and target.dim() > 1: - warnings.warn("You are not passing `seq_len` to exclude pad when calculate accuracy.") - else: - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have " - f"size:{pred.size()}, target should have size: {pred.size()} or " - f"{pred.size()[:-1]}, got {target.size()}.") - - target = target.to(pred) - if masks is not None: - self.acc_count += torch.sum(torch.eq(pred, target).masked_fill(masks.eq(False), 0)).item() - self.total += torch.sum(masks).item() - else: - self.acc_count += torch.sum(torch.eq(pred, target)).item() - self.total += np.prod(list(pred.size())) - - def get_metric(self, reset=True): - r""" - get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果. - - :param bool reset: 在调用完get_metric后是否清空评价指标统计量. - :return dict evaluate_result: {"acc": float} - """ - evaluate_result = {'acc': round(float(self.acc_count) / (self.total + 1e-12), 6)} - if reset: - self.acc_count = 0 - self.total = 0 - return evaluate_result - -class ClassifyFPreRecMetric(MetricBase): - r""" - 分类问题计算FPR值的Metric(其它的Metric参见 :mod:`fastNLP.core.metrics` ) - - 最后得到的metric结果为:: - - { - 'f': xxx, # 这里使用f考虑以后可以计算f_beta值 - 'pre': xxx, - 'rec':xxx - } - - 若only_gross=False, 即还会返回各个label的metric统计值:: - - { - 'f': xxx, - 'pre': xxx, - 'rec':xxx, - 'f-label': xxx, - 'pre-label': xxx, - 'rec-label':xxx, - ... - } - - """ - - def __init__(self, tag_vocab=None, pred=None, target=None, seq_len=None, ignore_labels=None, - only_gross=True, f_type='micro', beta=1): - r""" - - :param tag_vocab: 标签的 :class:`~fastNLP.Vocabulary` . 默认值为None。若为None则使用数字来作为标签内容,否则使用vocab来作为标签内容。 - :param str pred: 用该key在evaluate()时从传入dict中取出prediction数据。 为None,则使用 `pred` 取数据 - :param str target: 用该key在evaluate()时从传入dict中取出target数据。 为None,则使用 `target` 取数据 - :param str seq_len: 用该key在evaluate()时从传入dict中取出sequence length数据。为None,则使用 `seq_len` 取数据。 - :param list ignore_labels: str 组成的list. 这个list中的class不会被用于计算。例如在POS tagging时传入['NN'],则不会计算'NN'个label - :param bool only_gross: 是否只计算总的f1, precision, recall的值;如果为False,不仅返回总的f1, pre, rec, 还会返回每个label的f1, pre, rec - :param str f_type: `micro` 或 `macro` . `micro` :通过先计算总体的TP,FN和FP的数量,再计算f, precision, recall; `macro` : 分布计算每个类别的f, precision, recall,然后做平均(各类别f的权重相同) - :param float beta: f_beta分数, :math:`f_{beta} = \frac{(1 + {beta}^{2})*(pre*rec)}{({beta}^{2}*pre + rec)}` . 常用为 `beta=0.5, 1, 2` 若为0.5则精确率的权重高于召回率;若为1,则两者平等;若为2,则召回率权重高于精确率。 - """ - - if tag_vocab: - if not isinstance(tag_vocab, Vocabulary): - raise TypeError("tag_vocab can only be fastNLP.Vocabulary, not {}.".format(type(tag_vocab))) - if f_type not in ('micro', 'macro'): - raise ValueError("f_type only supports `micro` or `macro`', got {}.".format(f_type)) - - self.ignore_labels = ignore_labels - self.f_type = f_type - self.beta = beta - self.beta_square = self.beta ** 2 - self.only_gross = only_gross - - super().__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - - self.tag_vocab = tag_vocab - - self._tp, self._fp, self._fn = defaultdict(int), defaultdict(int), defaultdict(int) - # tp: truth=T, classify=T; fp: truth=T, classify=F; fn: truth=F, classify=T - - def evaluate(self, pred, target, seq_len=None): - r""" - evaluate函数将针对一个批次的预测结果做评价指标的累计 - - :param torch.Tensor pred: 预测的tensor, tensor的形状可以是torch.Size([B,]), torch.Size([B, n_classes]), - torch.Size([B, max_len]), 或者torch.Size([B, max_len, n_classes]) - :param torch.Tensor target: 真实值的tensor, tensor的形状可以是Element's can be: torch.Size([B,]), - torch.Size([B,]), torch.Size([B, max_len]), 或者torch.Size([B, max_len]) - :param torch.Tensor seq_len: 序列长度标记, 标记的形状可以是None, None, torch.Size([B]), 或者torch.Size([B]). - 如果mask也被传进来的话seq_len会被忽略. - - """ - # TODO 这里报错需要更改,因为pred是啥用户并不知道。需要告知用户真实的value - if not isinstance(pred, torch.Tensor): - raise TypeError(f"`pred` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(pred)}.") - if not isinstance(target, torch.Tensor): - raise TypeError(f"`target` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(target)}.") - - if seq_len is not None and not isinstance(seq_len, torch.Tensor): - raise TypeError(f"`seq_lens` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(seq_len)}.") - - if seq_len is not None and target.dim() > 1: - max_len = target.size(1) - masks = seq_len_to_mask(seq_len=seq_len, max_len=max_len) - else: - masks = torch.ones_like(target).long().to(target.device) - - masks = masks.eq(1) - - if pred.dim() == target.dim(): - if torch.numel(pred) !=torch.numel(target): - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have same dimensions with target, they should have same element numbers. while target have " - f"element numbers:{torch.numel(target)}, pred have element numbers: {torch.numel(pred)}") - - pass - elif pred.dim() == target.dim() + 1: - pred = pred.argmax(dim=-1) - if seq_len is None and target.dim() > 1: - warnings.warn("You are not passing `seq_len` to exclude pad when calculate accuracy.") - else: - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have " - f"size:{pred.size()}, target should have size: {pred.size()} or " - f"{pred.size()[:-1]}, got {target.size()}.") - - target = target.to(pred) - target = target.masked_select(masks) - pred = pred.masked_select(masks) - target_idxes = set(target.reshape(-1).tolist()) - for target_idx in target_idxes: - self._tp[target_idx] += torch.sum((pred == target_idx).long().masked_fill(target != target_idx, 0)).item() - self._fp[target_idx] += torch.sum((pred == target_idx).long().masked_fill(target == target_idx, 0)).item() - self._fn[target_idx] += torch.sum((pred != target_idx).long().masked_fill(target != target_idx, 0)).item() - - def get_metric(self, reset=True): - r""" - get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果. - - :param bool reset: 在调用完get_metric后是否清空评价指标统计量. - :return dict evaluate_result: {"acc": float} - """ - evaluate_result = {} - if not self.only_gross or self.f_type == 'macro': - tags = set(self._fn.keys()) - tags.update(set(self._fp.keys())) - tags.update(set(self._tp.keys())) - f_sum = 0 - pre_sum = 0 - rec_sum = 0 - for tag in tags: - if self.tag_vocab is not None: - tag_name = self.tag_vocab.to_word(tag) - else: - tag_name = int(tag) - tp = self._tp[tag] - fn = self._fn[tag] - fp = self._fp[tag] - f, pre, rec = _compute_f_pre_rec(self.beta_square, tp, fn, fp) - f_sum += f - pre_sum += pre - rec_sum += rec - if not self.only_gross and tag != '': # tag!=''防止无tag的情况 - f_key = 'f-{}'.format(tag_name) - pre_key = 'pre-{}'.format(tag_name) - rec_key = 'rec-{}'.format(tag_name) - evaluate_result[f_key] = f - evaluate_result[pre_key] = pre - evaluate_result[rec_key] = rec - - if self.f_type == 'macro': - evaluate_result['f'] = f_sum / len(tags) - evaluate_result['pre'] = pre_sum / len(tags) - evaluate_result['rec'] = rec_sum / len(tags) - - if self.f_type == 'micro': - f, pre, rec = _compute_f_pre_rec(self.beta_square, - sum(self._tp.values()), - sum(self._fn.values()), - sum(self._fp.values())) - evaluate_result['f'] = f - evaluate_result['pre'] = pre - evaluate_result['rec'] = rec - - if reset: - self._tp = defaultdict(int) - self._fp = defaultdict(int) - self._fn = defaultdict(int) - - for key, value in evaluate_result.items(): - evaluate_result[key] = round(value, 6) - - return evaluate_result - - -def _bmes_tag_to_spans(tags, ignore_labels=None): - r""" - 给定一个tags的lis,比如['S-song', 'B-singer', 'M-singer', 'E-singer', 'S-moive', 'S-actor']。 - 返回[('song', (0, 1)), ('singer', (1, 4)), ('moive', (4, 5)), ('actor', (5, 6))] (左闭右开区间) - 也可以是单纯的['S', 'B', 'M', 'E', 'B', 'M', 'M',...]序列 - - :param tags: List[str], - :param ignore_labels: List[str], 在该list中的label将被忽略 - :return: List[Tuple[str, List[int, int]]]. [(label,[start, end])] - """ - ignore_labels = set(ignore_labels) if ignore_labels else set() - - spans = [] - prev_bmes_tag = None - for idx, tag in enumerate(tags): - tag = tag.lower() - bmes_tag, label = tag[:1], tag[2:] - if bmes_tag in ('b', 's'): - spans.append((label, [idx, idx])) - elif bmes_tag in ('m', 'e') and prev_bmes_tag in ('b', 'm') and label == spans[-1][0]: - spans[-1][1][1] = idx - else: - spans.append((label, [idx, idx])) - prev_bmes_tag = bmes_tag - return [(span[0], (span[1][0], span[1][1] + 1)) - for span in spans - if span[0] not in ignore_labels - ] - - -def _bmeso_tag_to_spans(tags, ignore_labels=None): - r""" - 给定一个tags的lis,比如['O', 'B-singer', 'M-singer', 'E-singer', 'O', 'O']。 - 返回[('singer', (1, 4))] (左闭右开区间) - - :param tags: List[str], - :param ignore_labels: List[str], 在该list中的label将被忽略 - :return: List[Tuple[str, List[int, int]]]. [(label,[start, end])] - """ - ignore_labels = set(ignore_labels) if ignore_labels else set() - - spans = [] - prev_bmes_tag = None - for idx, tag in enumerate(tags): - tag = tag.lower() - bmes_tag, label = tag[:1], tag[2:] - if bmes_tag in ('b', 's'): - spans.append((label, [idx, idx])) - elif bmes_tag in ('m', 'e') and prev_bmes_tag in ('b', 'm') and label == spans[-1][0]: - spans[-1][1][1] = idx - elif bmes_tag == 'o': - pass - else: - spans.append((label, [idx, idx])) - prev_bmes_tag = bmes_tag - return [(span[0], (span[1][0], span[1][1] + 1)) - for span in spans - if span[0] not in ignore_labels - ] - - -def _bioes_tag_to_spans(tags, ignore_labels=None): - r""" - 给定一个tags的lis,比如['O', 'B-singer', 'I-singer', 'E-singer', 'O', 'O']。 - 返回[('singer', (1, 4))] (左闭右开区间) - - :param tags: List[str], - :param ignore_labels: List[str], 在该list中的label将被忽略 - :return: List[Tuple[str, List[int, int]]]. [(label,[start, end])] - """ - ignore_labels = set(ignore_labels) if ignore_labels else set() - - spans = [] - prev_bioes_tag = None - for idx, tag in enumerate(tags): - tag = tag.lower() - bioes_tag, label = tag[:1], tag[2:] - if bioes_tag in ('b', 's'): - spans.append((label, [idx, idx])) - elif bioes_tag in ('i', 'e') and prev_bioes_tag in ('b', 'i') and label == spans[-1][0]: - spans[-1][1][1] = idx - elif bioes_tag == 'o': - pass - else: - spans.append((label, [idx, idx])) - prev_bioes_tag = bioes_tag - return [(span[0], (span[1][0], span[1][1] + 1)) - for span in spans - if span[0] not in ignore_labels - ] - - -def _bio_tag_to_spans(tags, ignore_labels=None): - r""" - 给定一个tags的lis,比如['O', 'B-singer', 'I-singer', 'I-singer', 'O', 'O']。 - 返回[('singer', (1, 4))] (左闭右开区间) - - :param tags: List[str], - :param ignore_labels: List[str], 在该list中的label将被忽略 - :return: List[Tuple[str, List[int, int]]]. [(label,[start, end])] - """ - ignore_labels = set(ignore_labels) if ignore_labels else set() - - spans = [] - prev_bio_tag = None - for idx, tag in enumerate(tags): - tag = tag.lower() - bio_tag, label = tag[:1], tag[2:] - if bio_tag == 'b': - spans.append((label, [idx, idx])) - elif bio_tag == 'i' and prev_bio_tag in ('b', 'i') and label == spans[-1][0]: - spans[-1][1][1] = idx - elif bio_tag == 'o': # o tag does not count - pass - else: - spans.append((label, [idx, idx])) - prev_bio_tag = bio_tag - return [(span[0], (span[1][0], span[1][1] + 1)) for span in spans if span[0] not in ignore_labels] - - -def _get_encoding_type_from_tag_vocab(tag_vocab: Union[Vocabulary, dict]) -> str: - r""" - 给定Vocabulary自动判断是哪种类型的encoding, 支持判断bmes, bioes, bmeso, bio - - :param tag_vocab: 支持传入tag Vocabulary; 或者传入形如{0:"O", 1:"B-tag1"},即index在前,tag在后的dict。 - :return: - """ - tag_set = set() - unk_token = '' - pad_token = '' - if isinstance(tag_vocab, Vocabulary): - unk_token = tag_vocab.unknown - pad_token = tag_vocab.padding - tag_vocab = tag_vocab.idx2word - for idx, tag in tag_vocab.items(): - if tag in (unk_token, pad_token): - continue - tag = tag[:1].lower() - tag_set.add(tag) - - bmes_tag_set = set('bmes') - if tag_set == bmes_tag_set: - return 'bmes' - bio_tag_set = set('bio') - if tag_set == bio_tag_set: - return 'bio' - bmeso_tag_set = set('bmeso') - if tag_set == bmeso_tag_set: - return 'bmeso' - bioes_tag_set = set('bioes') - if tag_set == bioes_tag_set: - return 'bioes' - raise RuntimeError("encoding_type cannot be inferred automatically. Only support " - "'bio', 'bmes', 'bmeso', 'bioes' type.") - - -def _check_tag_vocab_and_encoding_type(tag_vocab: Union[Vocabulary, dict], encoding_type: str): - r""" - 检查vocab中的tag是否与encoding_type是匹配的 - - :param tag_vocab: 支持传入tag Vocabulary; 或者传入形如{0:"O", 1:"B-tag1"},即index在前,tag在后的dict。 - :param encoding_type: bio, bmes, bioes, bmeso - :return: - """ - tag_set = set() - unk_token = '' - pad_token = '' - if isinstance(tag_vocab, Vocabulary): - unk_token = tag_vocab.unknown - pad_token = tag_vocab.padding - tag_vocab = tag_vocab.idx2word - for idx, tag in tag_vocab.items(): - if tag in (unk_token, pad_token): - continue - tag = tag[:1].lower() - tag_set.add(tag) - - tags = encoding_type - for tag in tag_set: - assert tag in tags, f"{tag} is not a valid tag in encoding type:{encoding_type}. Please check your " \ - f"encoding_type." - tags = tags.replace(tag, '') # 删除该值 - if tags: # 如果不为空,说明出现了未使用的tag - warnings.warn(f"Tag:{tags} in encoding type:{encoding_type} is not presented in your Vocabulary. Check your " - "encoding_type.") - - -class SpanFPreRecMetric(MetricBase): - r""" - 在序列标注问题中,以span的方式计算F, pre, rec. - 比如中文Part of speech中,会以character的方式进行标注,句子 `中国在亚洲` 对应的POS可能为(以BMES为例) - ['B-NN', 'E-NN', 'S-DET', 'B-NN', 'E-NN']。该metric就是为类似情况下的F1计算。 - 最后得到的metric结果为:: - - { - 'f': xxx, # 这里使用f考虑以后可以计算f_beta值 - 'pre': xxx, - 'rec':xxx - } - - 若only_gross=False, 即还会返回各个label的metric统计值:: - - { - 'f': xxx, - 'pre': xxx, - 'rec':xxx, - 'f-label': xxx, - 'pre-label': xxx, - 'rec-label':xxx, - ... - } - """ - - def __init__(self, tag_vocab, pred=None, target=None, seq_len=None, encoding_type=None, ignore_labels=None, - only_gross=True, f_type='micro', beta=1): - r""" - - :param tag_vocab: 标签的 :class:`~fastNLP.Vocabulary` 。支持的标签为"B"(没有label);或"B-xxx"(xxx为某种label,比如POS中的NN), - 在解码时,会将相同xxx的认为是同一个label,比如['B-NN', 'E-NN']会被合并为一个'NN'. - :param str pred: 用该key在evaluate()时从传入dict中取出prediction数据。 为None,则使用 `pred` 取数据 - :param str target: 用该key在evaluate()时从传入dict中取出target数据。 为None,则使用 `target` 取数据 - :param str seq_len: 用该key在evaluate()时从传入dict中取出sequence length数据。为None,则使用 `seq_len` 取数据。 - :param str encoding_type: 目前支持bio, bmes, bmeso, bioes。默认为None,通过tag_vocab自动判断. - :param list ignore_labels: str 组成的list. 这个list中的class不会被用于计算。例如在POS tagging时传入['NN'],则不会计算'NN'个label - :param bool only_gross: 是否只计算总的f1, precision, recall的值;如果为False,不仅返回总的f1, pre, rec, 还会返回每个label的f1, pre, rec - :param str f_type: `micro` 或 `macro` . `micro` :通过先计算总体的TP,FN和FP的数量,再计算f, precision, recall; `macro` : 分布计算每个类别的f, precision, recall,然后做平均(各类别f的权重相同) - :param float beta: f_beta分数, :math:`f_{beta} = \frac{(1 + {beta}^{2})*(pre*rec)}{({beta}^{2}*pre + rec)}` . 常用为 `beta=0.5, 1, 2` 若为0.5则精确率的权重高于召回率;若为1,则两者平等;若为2,则召回率权重高于精确率。 - """ - - if not isinstance(tag_vocab, Vocabulary): - raise TypeError("tag_vocab can only be fastNLP.Vocabulary, not {}.".format(type(tag_vocab))) - if f_type not in ('micro', 'macro'): - raise ValueError("f_type only supports `micro` or `macro`', got {}.".format(f_type)) - - if encoding_type: - encoding_type = encoding_type.lower() - _check_tag_vocab_and_encoding_type(tag_vocab, encoding_type) - self.encoding_type = encoding_type - else: - self.encoding_type = _get_encoding_type_from_tag_vocab(tag_vocab) - - if self.encoding_type == 'bmes': - self.tag_to_span_func = _bmes_tag_to_spans - elif self.encoding_type == 'bio': - self.tag_to_span_func = _bio_tag_to_spans - elif self.encoding_type == 'bmeso': - self.tag_to_span_func = _bmeso_tag_to_spans - elif self.encoding_type == 'bioes': - self.tag_to_span_func = _bioes_tag_to_spans - else: - raise ValueError("Only support 'bio', 'bmes', 'bmeso', 'bioes' type.") - - self.ignore_labels = ignore_labels - self.f_type = f_type - self.beta = beta - self.beta_square = self.beta ** 2 - self.only_gross = only_gross - - super().__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - - self.tag_vocab = tag_vocab - - self._true_positives = defaultdict(int) - self._false_positives = defaultdict(int) - self._false_negatives = defaultdict(int) - - def evaluate(self, pred, target, seq_len): - r"""evaluate函数将针对一个批次的预测结果做评价指标的累计 - - :param pred: [batch, seq_len] 或者 [batch, seq_len, len(tag_vocab)], 预测的结果 - :param target: [batch, seq_len], 真实值 - :param seq_len: [batch] 文本长度标记 - :return: - """ - if not isinstance(pred, torch.Tensor): - raise TypeError(f"`pred` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(pred)}.") - if not isinstance(target, torch.Tensor): - raise TypeError(f"`target` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(target)}.") - - if not isinstance(seq_len, torch.Tensor): - raise TypeError(f"`seq_lens` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(seq_len)}.") - - if pred.size() == target.size() and len(target.size()) == 2: - pass - elif len(pred.size()) == len(target.size()) + 1 and len(target.size()) == 2: - num_classes = pred.size(-1) - pred = pred.argmax(dim=-1) - if (target >= num_classes).any(): - raise ValueError("A gold label passed to SpanBasedF1Metric contains an " - "id >= {}, the number of classes.".format(num_classes)) - else: - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have " - f"size:{pred.size()}, target should have size: {pred.size()} or " - f"{pred.size()[:-1]}, got {target.size()}.") - - batch_size = pred.size(0) - pred = pred.tolist() - target = target.tolist() - for i in range(batch_size): - pred_tags = pred[i][:int(seq_len[i])] - gold_tags = target[i][:int(seq_len[i])] - - pred_str_tags = [self.tag_vocab.to_word(tag) for tag in pred_tags] - gold_str_tags = [self.tag_vocab.to_word(tag) for tag in gold_tags] - - pred_spans = self.tag_to_span_func(pred_str_tags, ignore_labels=self.ignore_labels) - gold_spans = self.tag_to_span_func(gold_str_tags, ignore_labels=self.ignore_labels) - - for span in pred_spans: - if span in gold_spans: - self._true_positives[span[0]] += 1 - gold_spans.remove(span) - else: - self._false_positives[span[0]] += 1 - for span in gold_spans: - self._false_negatives[span[0]] += 1 - - def get_metric(self, reset=True): - r"""get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果.""" - evaluate_result = {} - if not self.only_gross or self.f_type == 'macro': - tags = set(self._false_negatives.keys()) - tags.update(set(self._false_positives.keys())) - tags.update(set(self._true_positives.keys())) - f_sum = 0 - pre_sum = 0 - rec_sum = 0 - for tag in tags: - tp = self._true_positives[tag] - fn = self._false_negatives[tag] - fp = self._false_positives[tag] - f, pre, rec = _compute_f_pre_rec(self.beta_square, tp, fn, fp) - f_sum += f - pre_sum += pre - rec_sum += rec - if not self.only_gross and tag != '': # tag!=''防止无tag的情况 - f_key = 'f-{}'.format(tag) - pre_key = 'pre-{}'.format(tag) - rec_key = 'rec-{}'.format(tag) - evaluate_result[f_key] = f - evaluate_result[pre_key] = pre - evaluate_result[rec_key] = rec - - if self.f_type == 'macro': - evaluate_result['f'] = f_sum / len(tags) - evaluate_result['pre'] = pre_sum / len(tags) - evaluate_result['rec'] = rec_sum / len(tags) - - if self.f_type == 'micro': - f, pre, rec = _compute_f_pre_rec(self.beta_square, - sum(self._true_positives.values()), - sum(self._false_negatives.values()), - sum(self._false_positives.values())) - evaluate_result['f'] = f - evaluate_result['pre'] = pre - evaluate_result['rec'] = rec - - if reset: - self._true_positives = defaultdict(int) - self._false_positives = defaultdict(int) - self._false_negatives = defaultdict(int) - - for key, value in evaluate_result.items(): - evaluate_result[key] = round(value, 6) - - return evaluate_result - - -def _compute_f_pre_rec(beta_square, tp, fn, fp): - r""" - - :param tp: int, true positive - :param fn: int, false negative - :param fp: int, false positive - :return: (f, pre, rec) - """ - pre = tp / (fp + tp + 1e-13) - rec = tp / (fn + tp + 1e-13) - f = (1 + beta_square) * pre * rec / (beta_square * pre + rec + 1e-13) - - return f, pre, rec - - -def _prepare_metrics(metrics): - r""" - - Prepare list of Metric based on input - :param metrics: - :return: List[fastNLP.MetricBase] - """ - _metrics = [] - if metrics: - if isinstance(metrics, list): - for metric in metrics: - if isinstance(metric, type): - metric = metric() - if isinstance(metric, MetricBase): - metric_name = metric.__class__.__name__ - if not callable(metric.evaluate): - raise TypeError(f"{metric_name}.evaluate must be callable, got {type(metric.evaluate)}.") - if not callable(metric.get_metric): - raise TypeError(f"{metric_name}.get_metric must be callable, got {type(metric.get_metric)}.") - _metrics.append(metric) - else: - raise TypeError( - f"The type of metric in metrics must be `fastNLP.MetricBase`, not `{type(metric)}`.") - elif isinstance(metrics, MetricBase): - _metrics = [metrics] - else: - raise TypeError(f"The type of metrics should be `list[fastNLP.MetricBase]` or `fastNLP.MetricBase`, " - f"got {type(metrics)}.") - return _metrics - - -def _accuracy_topk(y_true, y_prob, k=1): - r"""Compute accuracy of y_true matching top-k probable labels in y_prob. - - :param y_true: ndarray, true label, [n_samples] - :param y_prob: ndarray, label probabilities, [n_samples, n_classes] - :param k: int, k in top-k - :returns acc: accuracy of top-k - - """ - y_pred_topk = np.argsort(y_prob, axis=-1)[:, -1:-k - 1:-1] - y_true_tile = np.tile(np.expand_dims(y_true, axis=1), (1, k)) - y_match = np.any(y_pred_topk == y_true_tile, axis=-1) - acc = np.sum(y_match) / y_match.shape[0] - return acc - - -def _pred_topk(y_prob, k=1): - r"""Return top-k predicted labels and corresponding probabilities. - - :param y_prob: ndarray, size [n_samples, n_classes], probabilities on labels - :param k: int, k of top-k - :returns (y_pred_topk, y_prob_topk): - y_pred_topk: ndarray, size [n_samples, k], predicted top-k labels - y_prob_topk: ndarray, size [n_samples, k], probabilities for top-k labels - - """ - y_pred_topk = np.argsort(y_prob, axis=-1)[:, -1:-k - 1:-1] - x_axis_index = np.tile( - np.arange(len(y_prob))[:, np.newaxis], - (1, k)) - y_prob_topk = y_prob[x_axis_index, y_pred_topk] - return y_pred_topk, y_prob_topk - - -class CMRC2018Metric(MetricBase): - r""" - CRMC2018任务的评价metric - """ - def __init__(self, answers=None, raw_chars=None, context_len=None, pred_start=None, pred_end=None): - super().__init__() - self._init_param_map(answers=answers, raw_chars=raw_chars, context_len=context_len, pred_start=pred_start, - pred_end=pred_end) - self.em = 0 - self.total = 0 - self.f1 = 0 - - def evaluate(self, answers, raw_chars, pred_start, pred_end, context_len=None): - r""" - - :param list[str] answers: 如[["答案1", "答案2", "答案3"], [...], ...] - :param list[str] raw_chars: [["这", "是", ...], [...]] - :param tensor pred_start: batch_size x length 或 batch_size, - :param tensor pred_end: batch_size x length 或 batch_size(是闭区间,包含end位置), - :param tensor context_len: context长度, batch_size - :return: - """ - if pred_start.dim() > 1: - batch_size, max_len = pred_start.size() - context_mask = seq_len_to_mask(context_len, max_len=max_len).eq(False) - pred_start.masked_fill_(context_mask, float('-inf')) - pred_end.masked_fill_(context_mask, float('-inf')) - max_pred_start, pred_start_index = pred_start.max(dim=-1, keepdim=True) # batch_size, - pred_start_mask = pred_start.eq(max_pred_start).cumsum(dim=-1).eq(0) # 只能预测这之后的值 - pred_end.masked_fill_(pred_start_mask, float('-inf')) - pred_end_index = pred_end.argmax(dim=-1) + 1 - else: - pred_start_index = pred_start - pred_end_index = pred_end + 1 - pred_ans = [] - for index, (start, end) in enumerate(zip(pred_start_index.flatten().tolist(), pred_end_index.tolist())): - pred_ans.append(''.join(raw_chars[index][start:end])) - for answer, pred_an in zip(answers, pred_ans): - pred_an = pred_an.strip() - self.f1 += _calc_cmrc2018_f1_score(answer, pred_an) - self.total += 1 - self.em += _calc_cmrc2018_em_score(answer, pred_an) - - def get_metric(self, reset=True): - eval_res = {'f1': round(self.f1 / self.total*100, 2), 'em': round(self.em / self.total*100, 2)} - if reset: - self.em = 0 - self.total = 0 - self.f1 = 0 - return eval_res - -# split Chinese -def _cn_segmentation(in_str, rm_punc=False): - in_str = str(in_str).lower().strip() - segs_out = [] - temp_str = "" - sp_char = {'-', ':', '_', '*', '^', '/', '\\', '~', '`', '+', '=', ',', '。', ':', '?', '!', '“', '”', ';', '’', '《', - '》', '……', '·', '、', '「', '」', '(', ')', '-', '~', '『', '』'} - for char in in_str: - if rm_punc and char in sp_char: - continue - if re.search(r'[\u4e00-\u9fa5]', char) or char in sp_char: - if temp_str != "": - ss = list(temp_str) - segs_out.extend(ss) - temp_str = "" - segs_out.append(char) - else: - temp_str += char - - # handling last part - if temp_str != "": - ss = list(temp_str) - segs_out.extend(ss) - - return segs_out - - -# remove punctuation -def _remove_punctuation(in_str): - in_str = str(in_str).lower().strip() - sp_char = ['-', ':', '_', '*', '^', '/', '\\', '~', '`', '+', '=', - ',', '。', ':', '?', '!', '“', '”', ';', '’', '《', '》', '……', '·', '、', - '「', '」', '(', ')', '-', '~', '『', '』'] - out_segs = [] - for char in in_str: - if char in sp_char: - continue - else: - out_segs.append(char) - return ''.join(out_segs) - - -# find longest common string -def _find_lcs(s1, s2): - m = [[0 for i in range(len(s2) + 1)] for j in range(len(s1) + 1)] - mmax = 0 - p = 0 - for i in range(len(s1)): - for j in range(len(s2)): - if s1[i] == s2[j]: - m[i + 1][j + 1] = m[i][j] + 1 - if m[i + 1][j + 1] > mmax: - mmax = m[i + 1][j + 1] - p = i + 1 - return s1[p - mmax:p], mmax - - -def _calc_cmrc2018_f1_score(answers, prediction): - f1_scores = [] - for ans in answers: - ans_segs = _cn_segmentation(ans, rm_punc=True) - prediction_segs = _cn_segmentation(prediction, rm_punc=True) - lcs, lcs_len = _find_lcs(ans_segs, prediction_segs) - if lcs_len == 0: - f1_scores.append(0) - continue - precision = 1.0 * lcs_len / len(prediction_segs) - recall = 1.0 * lcs_len / len(ans_segs) - f1 = (2 * precision * recall) / (precision + recall) - f1_scores.append(f1) - return max(f1_scores) - - -def _calc_cmrc2018_em_score(answers, prediction): - em = 0 - for ans in answers: - ans_ = _remove_punctuation(ans) - prediction_ = _remove_punctuation(prediction) - if ans_ == prediction_: - em = 1 - break - return em diff --git a/fastNLP/core/optimizer.py b/fastNLP/core/optimizer.py deleted file mode 100644 index 8c53176a..00000000 --- a/fastNLP/core/optimizer.py +++ /dev/null @@ -1,227 +0,0 @@ -r""" -optimizer 模块定义了 fastNLP 中所需的各种优化器,一般做为 :class:`~fastNLP.Trainer` 的参数使用。 - -""" -__all__ = [ - "Optimizer", - "SGD", - "Adam", - "AdamW" -] - -import math - -import torch -from torch.optim.optimizer import Optimizer as TorchOptimizer - - -class Optimizer(object): - r""" - Optimizer - """ - - def __init__(self, model_params, **kwargs): - r""" - - :param model_params: a generator. E.g. ``model.parameters()`` for PyTorch models. - :param kwargs: additional parameters. - """ - if model_params is not None and not hasattr(model_params, "__next__"): - raise RuntimeError("model parameters should be a generator, rather than {}.".format(type(model_params))) - self.model_params = model_params - self.settings = kwargs - - def construct_from_pytorch(self, model_params): - raise NotImplementedError - - @staticmethod - def _get_require_grads_param(params): - r""" - 将params中不需要gradient的删除 - - :param iterable params: parameters - :return: list(nn.Parameters) - """ - return [param for param in params if param.requires_grad] - - -class NullOptimizer(Optimizer): - r""" - 当不希望Trainer更新optimizer时,传入本optimizer,但请确保通过callback的方式对参数进行了更新。 - - """ - def __init__(self): - super().__init__(None) - - def construct_from_pytorch(self, model_params): - return self - - def __getattr__(self, item): - def pass_func(*args, **kwargs): - pass - - return pass_func - - -class SGD(Optimizer): - r""" - SGD - """ - - def __init__(self, lr=0.001, momentum=0, model_params=None): - r""" - :param float lr: learning rate. Default: 0.01 - :param float momentum: momentum. Default: 0 - :param model_params: a generator. E.g. ``model.parameters()`` for PyTorch models. - """ - if not isinstance(lr, float): - raise TypeError("learning rate has to be float.") - super(SGD, self).__init__(model_params, lr=lr, momentum=momentum) - - def construct_from_pytorch(self, model_params): - if self.model_params is None: - # careful! generator cannot be assigned. - return torch.optim.SGD(self._get_require_grads_param(model_params), **self.settings) - else: - return torch.optim.SGD(self._get_require_grads_param(self.model_params), **self.settings) - - -class Adam(Optimizer): - r""" - Adam - """ - - def __init__(self, lr=0.001, weight_decay=0, betas=(0.9, 0.999), eps=1e-8, amsgrad=False, model_params=None): - r""" - - :param float lr: learning rate - :param float weight_decay: - :param eps: - :param amsgrad: - :param model_params: a generator. E.g. ``model.parameters()`` for PyTorch models. - """ - if not isinstance(lr, float): - raise TypeError("learning rate has to be float.") - super(Adam, self).__init__(model_params, lr=lr, betas=betas, eps=eps, amsgrad=amsgrad, - weight_decay=weight_decay) - - def construct_from_pytorch(self, model_params): - if self.model_params is None: - # careful! generator cannot be assigned. - return torch.optim.Adam(self._get_require_grads_param(model_params), **self.settings) - else: - return torch.optim.Adam(self._get_require_grads_param(self.model_params), **self.settings) - - -class AdamW(TorchOptimizer): - r""" - 对AdamW的实现,该实现在pytorch 1.2.0版本中已经出现,https://github.com/pytorch/pytorch/pull/21250。 - 这里加入以适配低版本的pytorch - - .. todo:: - 翻译成中文 - - The original Adam algorithm was proposed in `Adam: A Method for Stochastic Optimization`_. - The AdamW variant was proposed in `Decoupled Weight Decay Regularization`_. - - .. _Adam\: A Method for Stochastic Optimization: https://arxiv.org/abs/1412.6980 - - .. _Decoupled Weight Decay Regularization: https://arxiv.org/abs/1711.05101 - - .. _On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ - """ - - def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, - weight_decay=1e-2, amsgrad=False): - r""" - - :param params (iterable): iterable of parameters to optimize or dicts defining - parameter groups - :param lr (float, optional): learning rate (default: 1e-3) - :param betas (Tuple[float, float], optional): coefficients used for computing - running averages of gradient and its square (default: (0.9, 0.99)) - :param eps (float, optional): term added to the denominator to improve - numerical stability (default: 1e-8) - :param weight_decay (float, optional): weight decay coefficient (default: 1e-2) - algorithm from the paper `On the Convergence of Adam and Beyond`_ - (default: False) - """ - if not 0.0 <= lr: - raise ValueError("Invalid learning rate: {}".format(lr)) - if not 0.0 <= eps: - raise ValueError("Invalid epsilon value: {}".format(eps)) - if not 0.0 <= betas[0] < 1.0: - raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) - if not 0.0 <= betas[1] < 1.0: - raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) - defaults = dict(lr=lr, betas=betas, eps=eps, - weight_decay=weight_decay, amsgrad=amsgrad) - super(AdamW, self).__init__(params, defaults) - - def __setstate__(self, state): - super(AdamW, self).__setstate__(state) - for group in self.param_groups: - group.setdefault('amsgrad', False) - - def step(self, closure=None): - r"""Performs a single optimization step. - - :param closure: (callable, optional) A closure that reevaluates the model - and returns the loss. - """ - loss = None - if closure is not None: - loss = closure() - - for group in self.param_groups: - for p in group['params']: - if p.grad is None: - continue - - # Perform stepweight decay - p.data.mul_(1 - group['lr'] * group['weight_decay']) - - # Perform optimization step - grad = p.grad.data - if grad.is_sparse: - raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead') - amsgrad = group['amsgrad'] - - state = self.state[p] - - # State initialization - if len(state) == 0: - state['step'] = 0 - # Exponential moving average of gradient values - state['exp_avg'] = torch.zeros_like(p.data) - # Exponential moving average of squared gradient values - state['exp_avg_sq'] = torch.zeros_like(p.data) - if amsgrad: - # Maintains max of all exp. moving avg. of sq. grad. values - state['max_exp_avg_sq'] = torch.zeros_like(p.data) - - exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] - if amsgrad: - max_exp_avg_sq = state['max_exp_avg_sq'] - beta1, beta2 = group['betas'] - - state['step'] += 1 - - # Decay the first and second moment running average coefficient - exp_avg.mul_(beta1).add_(1 - beta1, grad) - exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) - if amsgrad: - # Maintains the maximum of all 2nd moment running avg. till now - torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq) - # Use the max. for normalizing running avg. of gradient - denom = max_exp_avg_sq.sqrt().add_(group['eps']) - else: - denom = exp_avg_sq.sqrt().add_(group['eps']) - - bias_correction1 = 1 - beta1 ** state['step'] - bias_correction2 = 1 - beta2 ** state['step'] - step_size = group['lr'] * math.sqrt(bias_correction2) / bias_correction1 - - p.data.addcdiv_(-step_size, exp_avg, denom) - - return loss diff --git a/fastNLP/core/predictor.py b/fastNLP/core/predictor.py deleted file mode 100644 index 613a4993..00000000 --- a/fastNLP/core/predictor.py +++ /dev/null @@ -1,83 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "Predictor" -] - -from collections import defaultdict - -import torch - -from . import DataSet -from . import DataSetIter -from . import SequentialSampler -from .utils import _build_args, _move_dict_value_to_device, _get_model_device - - -class Predictor(object): - r""" - 一个根据训练模型预测输出的预测器(Predictor) - - 与测试器(Tester)不同的是,predictor不关心模型性能的评价指标,只做inference。 - 这是一个fastNLP调用的高级模型包装器。它与Trainer、Tester不共享任何操作。 - """ - - def __init__(self, network): - r""" - - :param torch.nn.Module network: 用来完成预测任务的模型 - """ - if not isinstance(network, torch.nn.Module): - raise ValueError( - "Only fastNLP.models.BaseModel or torch.nn,Module is allowed, not {}".format(type(network))) - self.network = network - self.batch_size = 1 - self.batch_output = [] - - def predict(self, data: DataSet, seq_len_field_name=None): - r"""用已经训练好的模型进行inference. - - :param fastNLP.DataSet data: 待预测的数据集 - :param str seq_len_field_name: 表示序列长度信息的field名字 - :return: dict dict里面的内容为模型预测的结果 - """ - if not isinstance(data, DataSet): - raise ValueError("Only Dataset class is allowed, not {}.".format(type(data))) - if seq_len_field_name is not None and seq_len_field_name not in data.field_arrays: - raise ValueError("Field name {} not found in DataSet {}.".format(seq_len_field_name, data)) - - prev_training = self.network.training - self.network.eval() - network_device = _get_model_device(self.network) - batch_output = defaultdict(list) - data_iterator = DataSetIter(data, batch_size=self.batch_size, sampler=SequentialSampler(), as_numpy=False) - - if hasattr(self.network, "predict"): - predict_func = self.network.predict - else: - predict_func = self.network.forward - - with torch.no_grad(): - for batch_x, _ in data_iterator: - _move_dict_value_to_device(batch_x, _, device=network_device) - refined_batch_x = _build_args(predict_func, **batch_x) - prediction = predict_func(**refined_batch_x) - - if seq_len_field_name is not None: - seq_lens = batch_x[seq_len_field_name].tolist() - - for key, value in prediction.items(): - value = value.cpu().numpy() - if len(value.shape) == 1 or (len(value.shape) == 2 and value.shape[1] == 1): - batch_output[key].extend(value.tolist()) - else: - if seq_len_field_name is not None: - tmp_batch = [] - for idx, seq_len in enumerate(seq_lens): - tmp_batch.append(value[idx, :seq_len]) - batch_output[key].extend(tmp_batch) - else: - batch_output[key].append(value) - - self.network.train(prev_training) - return batch_output diff --git a/fastNLP/core/sampler.py b/fastNLP/core/sampler.py deleted file mode 100644 index 8ad10e26..00000000 --- a/fastNLP/core/sampler.py +++ /dev/null @@ -1,418 +0,0 @@ -r""" -sampler 子类实现了 fastNLP 所需的各种采样器。 -""" -__all__ = [ - "Sampler", - "BucketSampler", - "SequentialSampler", - "RandomSampler", - "SortedSampler", - "ConstantTokenNumSampler" -] - -from itertools import chain - -import numpy as np - - -class Sampler(object): - r""" - `Sampler` 类的基类. 规定以何种顺序取出data中的元素 - - 子类必须实现 ``__call__`` 方法. 输入 `DataSet` 对象, 返回其中元素的下标序列 - """ - - def __call__(self, data_set): - r""" - :param DataSet data_set: `DataSet` 对象, 需要Sample的数据 - :return result: list(int) 其中元素的下标序列, ``data_set`` 中元素会按 ``result`` 中顺序取出 - """ - raise NotImplementedError - - -class SequentialSampler(Sampler): - r""" - 顺序取出元素的 `Sampler` - - """ - - def __call__(self, data_set): - return list(range(len(data_set))) - - -class RandomSampler(Sampler): - r""" - 随机化取元素的 `Sampler` - - """ - - def __call__(self, data_set): - return list(np.random.permutation(len(data_set))) - - -class BucketSampler(Sampler): - r""" - 带Bucket的 `Random Sampler`. 可以随机地取出长度相似的元素 - """ - - def __init__(self, num_buckets=10, batch_size=None, seq_len_field_name='seq_len'): - r""" - - :param int num_buckets: bucket的数量 - :param int batch_size: batch的大小. 默认为None,Trainer/Tester在调用BucketSampler时,会将该值正确设置,如果是非 - Trainer/Tester场景使用,需要显示传递该值 - :param str seq_len_field_name: 对应序列长度的 `field` 的名字 - """ - self.num_buckets = num_buckets - self.batch_size = batch_size - self.seq_len_field_name = seq_len_field_name - - def set_batch_size(self, batch_size): - r""" - - :param int batch_size: 每个batch的大小 - :return: - """ - self.batch_size = batch_size - - def __call__(self, data_set): - if self.batch_size is None: - raise RuntimeError("batch_size is None.") - seq_lens = data_set.get_all_fields()[self.seq_len_field_name].content - total_sample_num = len(seq_lens) - - bucket_indexes = [] - assert total_sample_num >= self.num_buckets, "The number of samples is smaller than the number of buckets." - num_sample_per_bucket = total_sample_num // self.num_buckets - for i in range(self.num_buckets): - bucket_indexes.append([num_sample_per_bucket * i, num_sample_per_bucket * (i + 1)]) - bucket_indexes[-1][1] = total_sample_num - - sorted_seq_lens = list(sorted([(idx, seq_len) for - idx, seq_len in zip(range(total_sample_num), seq_lens)], - key=lambda x: x[1])) - - batchs = [] - - left_init_indexes = [] - for b_idx in range(self.num_buckets): - start_idx = bucket_indexes[b_idx][0] - end_idx = bucket_indexes[b_idx][1] - sorted_bucket_seq_lens = sorted_seq_lens[start_idx:end_idx] - left_init_indexes.extend([tup[0] for tup in sorted_bucket_seq_lens]) - num_batch_per_bucket = len(left_init_indexes) // self.batch_size - np.random.shuffle(left_init_indexes) - for i in range(num_batch_per_bucket): - batchs.append(left_init_indexes[i * self.batch_size:(i + 1) * self.batch_size]) - left_init_indexes = left_init_indexes[num_batch_per_bucket * self.batch_size:] - if (left_init_indexes) != 0: - batchs.append(left_init_indexes) - np.random.shuffle(batchs) - - return list(chain(*batchs)) - - -class ConstTokenNumSampler(Sampler): - """ - 尽量保证每个batch的输入token数量是接近的。 - - 使用示例 - >>> # 假设已经有了tr_data并有一个field叫做seq_len保存了每个instance的token数量 - >>> from fastNLP import DataSetIter, Trainer - >>> sampler = ConstTokenNumSampler('src_seq_len', max_token=4096) - >>> - >>> # 直接将sampler传入Trainer中,此时batch_size参数的值会被忽略 - >>> trainer = Trainer(tr_data, model, optimizer=optimizer, loss=TranslationLoss(), - >>> batch_size=1, sampler=sampler, drop_last=False, update_every=1) - """ - def __init__(self, seq_len_field_name, max_token=4096, max_sentence=-1, need_be_multiple_of=1, num_bucket=-1): - """ - - :param List[int] seq_len_field_name: 哪个field指示的sample的长度 - :param int max_token: 每个batch的最大的token数量 - :param int max_sentence: 每个batch最多多少个instance, -1表示根据max_token决定 - :param int need_be_multiple_of: 生成的batch的instance的数量需要是几的倍数,在DataParallel场景下会用到 - :param int num_bucket: 将数据按长度拆分为num_bucket个bucket,batch中的sample尽量在bucket之中进行组合,这样可以减少padding。 - """ - assert (max_sentence!=-1 and max_sentence>=need_be_multiple_of) or max_sentence<1 - self.seq_len_field_name = seq_len_field_name - self.num_bucket = num_bucket - self.max_token = max_token - self._max_sentence = max_sentence - self.need_be_multiple_of = need_be_multiple_of - - def __call__(self, data_set): - assert len(data_set)>self.num_bucket, "The number of samples should be larger than buckets." - seq_len = data_set.get_field(self.seq_len_field_name) - self.seq_len = seq_len - seq_len_indice = [(length, i) for i, length in enumerate(seq_len)] - seq_len_indice.sort(key=lambda x: x[0]) - indice_in_buckets = [] - if self.num_bucket>0: - sample_per_bucket = len(seq_len_indice)//self.num_bucket - i = 0 - while len(indice_in_buckets)self.max_token or len(batch)>=self.max_sentence: - left_sample = len(batch) % self.need_be_multiple_of - add_samples = batch.copy() - cur_max_len =length - if left_sample!=0: - add_samples = add_samples[:-left_sample] - batch = batch[-left_sample:] - cur_max_len = max(cur_max_len, max(batch)) - else: - batch = [] - if len(add_samples)==0: - raise RuntimeError(f"The sample `{i}` is too long to make a batch with {self.need_be_multiple_of} samples.") - batches.append(add_samples) - else: - cur_max_len = max_len - batch.append(i) - if batch: - left_sample = len(batch) % self.need_be_multiple_of - add_samples = batch.copy() - if left_sample != 0: - add_samples = add_samples[:-left_sample].copy() - if add_samples: - batches.append(add_samples) - np.random.shuffle(batches) - self.batches = batches - - def __iter__(self): - for batch in self.batches: - yield batch - self.get_new_order() - - def __len__(self): - return len(self.batches) - - -class ConstantTokenNumSampler: - """ - 尽量保证每个batch的输入token数量是接近的。 - - 使用示例 - >>> # 假设已经有了tr_data并有一个field叫做seq_len保存了每个instance的token数量 - >>> from fastNLP import DataSetIter, Trainer - >>> sampler = ConstantTokenNumSampler(tr_data.get_field('seq_len').content, max_token=4096) - >>> tr_iter = DataSetIter(tr_data, - >>> batch_size=1, sampler=None, as_numpy=False, num_workers=0, pin_memory=False, - >>> drop_last=False, timeout=0, worker_init_fn=None, - >>> batch_sampler=sampler) - >>> - >>> # 直接将tr_iter传入Trainer中,此时batch_size参数的值会被忽略 - >>> trainer = Trainer(tr_iter, model, optimizer=optimizer, loss=TranslationLoss(), - >>> batch_size=1, sampler=None, drop_last=False, update_every=1) - """ - def __init__(self, seq_len, max_token=4096, max_sentence=-1, need_be_multiple_of=1, num_bucket=-1): - """ - - :param List[int] seq_len: list[int], 是每个sample的长度。一般可以通过dataset.get_field('seq_len').content传入 - :param int max_token: 每个batch的最大的token数量 - :param int max_sentence: 每个batch最多多少个instance, -1表示根据max_token决定 - :param int need_be_multiple_of: 生成的batch的instance的数量需要是几的倍数,在DataParallel场景下会用到 - :param int num_bucket: 将数据按长度拆分为num_bucket个bucket,batch中的sample尽量在bucket之中进行组合,这样可以减少padding。 - """ - assert (max_sentence!=-1 and max_sentence>=need_be_multiple_of) or max_sentence<1 - assert len(seq_len)>num_bucket, "The number of samples should be larger than buckets." - self.seq_len = seq_len - self.max_token = max_token - self._max_sentence = max_sentence - self.need_be_multiple_of = need_be_multiple_of - seq_len_indice = [(length, i) for i, length in enumerate(seq_len)] - seq_len_indice.sort(key=lambda x: x[0]) - indice_in_buckets = [] - if num_bucket>0: - sample_per_bucket = len(seq_len_indice)//num_bucket - i = 0 - while len(indice_in_buckets)self.max_token or len(batch)>=self.max_sentence: - left_sample = len(batch) % self.need_be_multiple_of - add_samples = batch.copy() - cur_max_len =length - if left_sample!=0: - add_samples = add_samples[:-left_sample] - batch = batch[-left_sample:] - cur_max_len = max(cur_max_len, max(batch)) - else: - batch = [] - if len(add_samples)==0: - raise RuntimeError(f"The sample `{i}` is too long to make a batch with {self.need_be_multiple_of} samples.") - batches.append(add_samples) - else: - cur_max_len = max_len - batch.append(i) - if batch: - left_sample = len(batch) % self.need_be_multiple_of - add_samples = batch.copy() - if left_sample != 0: - add_samples = add_samples[:-left_sample].copy() - if add_samples: - batches.append(add_samples) - np.random.shuffle(batches) - self.batches = batches - - def __iter__(self): - for batch in self.batches: - yield batch - self.get_new_order() - - def __len__(self): - return len(self.batches) - - -class SortedSampler(Sampler): - r""" - 按照sample的长度进行排序,主要在测试的时候使用,可以加速测试(因为减少了padding) - """ - def __init__(self, seq_len_field_name='seq_len', descending=True): - """ - - :param str seq_len_field_name: 按哪个field进行排序。如果传入的field是数字,则直接按照该数字大小排序;如果传入的field不是 - 数字,则使用该field的长度进行排序 - :param bool descending: 是否降序排列 - """ - self.seq_len_field_name = seq_len_field_name - self.descending = descending - - def __call__(self, data_set): - seq_lens = data_set.get_field(self.seq_len_field_name).content - try: - seq_lens = list(map(len, seq_lens)) - except: - pass - - orders = np.argsort(seq_lens).tolist() # 从小到大的顺序 - if self.descending: - orders = orders[::-1] - return orders - - -def simple_sort_bucketing(lengths): - r""" - - :param lengths: list of int, the lengths of all examples. - :return data: 2-level list - :: - - [ - [index_11, index_12, ...], # bucket 1 - [index_21, index_22, ...], # bucket 2 - ... - ] - - """ - lengths_mapping = [(idx, length) for idx, length in enumerate(lengths)] - sorted_lengths = sorted(lengths_mapping, key=lambda x: x[1]) - # TODO: need to return buckets - return [idx for idx, _ in sorted_lengths] - - -def k_means_1d(x, k, max_iter=100): - r"""Perform k-means on 1-D data. - - :param x: list of int, representing points in 1-D. - :param k: the number of clusters required. - :param max_iter: maximum iteration - :return centroids: numpy array, centroids of the k clusters - assignment: numpy array, 1-D, the bucket id assigned to each example. - """ - sorted_x = sorted(list(set(x))) - x = np.array(x) - if len(sorted_x) < k: - raise ValueError("too few buckets") - gap = len(sorted_x) / k - - centroids = np.array([sorted_x[int(x * gap)] for x in range(k)]) - assign = None - - for i in range(max_iter): - # Cluster Assignment step - assign = np.array([np.argmin([np.absolute(x_i - x) for x in centroids]) for x_i in x]) - # Move centroids step - new_centroids = np.array([x[assign == k].mean() for k in range(k)]) - if (new_centroids == centroids).all(): - centroids = new_centroids - break - centroids = new_centroids - return np.array(centroids), assign - - -def k_means_bucketing(lengths, buckets): - r"""Assign all instances into possible buckets using k-means, such that instances in the same bucket have similar lengths. - - :param lengths: list of int, the length of all samples. - :param buckets: list of int. The length of the list is the number of buckets. Each integer is the maximum length - threshold for each bucket (This is usually None.). - :return data: 2-level list - :: - - [ - [index_11, index_12, ...], # bucket 1 - [index_21, index_22, ...], # bucket 2 - ... - ] - - """ - bucket_data = [[] for _ in buckets] - num_buckets = len(buckets) - _, assignments = k_means_1d(lengths, num_buckets) - - for idx, bucket_id in enumerate(assignments): - if buckets[bucket_id] is None or lengths[idx] <= buckets[bucket_id]: - bucket_data[bucket_id].append(idx) - return bucket_data diff --git a/fastNLP/core/tester.py b/fastNLP/core/tester.py deleted file mode 100644 index cb05f82d..00000000 --- a/fastNLP/core/tester.py +++ /dev/null @@ -1,250 +0,0 @@ -r""" -tester模块实现了 fastNLP 所需的Tester类,能在提供数据、模型以及metric的情况下进行性能测试。 - -.. code-block:: - - import numpy as np - import torch - from torch import nn - from fastNLP import Tester - from fastNLP import DataSet - from fastNLP import AccuracyMetric - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(1, 1) - def forward(self, a): - return {'pred': self.fc(a.unsqueeze(1)).squeeze(1)} - - model = Model() - - dataset = DataSet({'a': np.arange(10, dtype=float), 'b':np.arange(10, dtype=float)*2}) - - dataset.set_input('a') - dataset.set_target('b') - - tester = Tester(dataset, model, metrics=AccuracyMetric()) - eval_results = tester.test() - -这里Metric的映射规律是和 :class:`fastNLP.Trainer` 中一致的,具体使用请参考 :mod:`trainer 模块` 的1.3部分。 -Tester在验证进行之前会调用model.eval()提示当前进入了evaluation阶段,即会关闭nn.Dropout()等,在验证结束之后会调用model.train()恢复到训练状态。 - - -""" -import time - -import torch -import torch.nn as nn - -try: - from tqdm.auto import tqdm -except: - from .utils import _pseudo_tqdm as tqdm - -from .batch import BatchIter, DataSetIter -from .dataset import DataSet -from .metrics import _prepare_metrics -from .sampler import SequentialSampler -from .utils import _CheckError -from .utils import _build_args -from .utils import _check_loss_evaluate -from .utils import _move_dict_value_to_device -from .utils import _get_func_signature -from .utils import _get_model_device -from .utils import _move_model_to_device -from .utils import _build_fp16_env -from .utils import _can_use_fp16 -from ._parallel_utils import _data_parallel_wrapper -from ._parallel_utils import _model_contains_inner_module -from functools import partial -from ._logger import logger -from .sampler import Sampler - -__all__ = [ - "Tester" -] - - -class Tester(object): - r""" - Tester是在提供数据,模型以及metric的情况下进行性能测试的类。需要传入模型,数据以及metric进行验证。 - """ - - def __init__(self, data, model, metrics, batch_size=16, num_workers=0, device=None, verbose=1, use_tqdm=True, - fp16=False, **kwargs): - r""" - - :param ~fastNLP.DataSet,~fastNLP.BatchIter data: 需要测试的数据集 - :param torch.nn.Module model: 使用的模型 - :param ~fastNLP.core.metrics.MetricBase,List[~fastNLP.core.metrics.MetricBase] metrics: 测试时使用的metrics - :param int batch_size: evaluation时使用的batch_size有多大。 - :param str,int,torch.device,list(int) device: 将模型load到哪个设备。默认为None,即Trainer不对模型 - 的计算位置进行管理。支持以下的输入: - - 1. str: ['cpu', 'cuda', 'cuda:0', 'cuda:1', ...] 依次为'cpu'中, 可见的第一个GPU中,可见的第一个GPU中,可见的第二个GPU中; - - 2. torch.device:将模型装载到torch.device上。 - - 3. int: 将使用device_id为该值的gpu进行训练 - - 4. list(int):如果多于1个device,将使用torch.nn.DataParallel包裹model, 并使用传入的device。 - - 5. None. 为None则不对模型进行任何处理,如果传入的model为torch.nn.DataParallel该值必须为None。 - - 如果模型是通过predict()进行预测的话,那么将不能使用多卡(DataParallel)进行验证,只会使用第一张卡上的模型。 - :param int verbose: 如果为0不输出任何信息; 如果为1,打印出验证结果。 - :param bool use_tqdm: 是否使用tqdm来显示测试进度; 如果为False,则不会显示任何内容。 - :param bool fp16: 是否使用float16进行验证 - :param kwargs: - Sampler sampler: 支持传入sampler控制测试顺序 - bool pin_memory: 是否将产生的tensor使用pin memory, 可能会加快数据速度。 - """ - super(Tester, self).__init__() - - if not isinstance(model, nn.Module): - raise TypeError(f"The type of model must be `torch.nn.Module`, got `{type(model)}`.") - - self.metrics = _prepare_metrics(metrics) - - self.data = data - self._model = _move_model_to_device(model, device=device) - self.batch_size = batch_size - self.verbose = verbose - self.use_tqdm = use_tqdm - self.logger = logger - self.pin_memory = kwargs.get('pin_memory', True) - - if isinstance(data, DataSet): - sampler = kwargs.get('sampler', None) - if sampler is None: - sampler = SequentialSampler() - elif not isinstance(sampler, (Sampler, torch.utils.data.Sampler)): - raise ValueError(f"The type of sampler should be fastNLP.BaseSampler or pytorch's Sampler, got {type(sampler)}") - if hasattr(sampler, 'set_batch_size'): - sampler.set_batch_size(batch_size) - self.data_iterator = DataSetIter(dataset=data, batch_size=batch_size, sampler=sampler, - num_workers=num_workers, - pin_memory=self.pin_memory) - elif isinstance(data, BatchIter): - self.data_iterator = data - else: - raise TypeError("data type {} not support".format(type(data))) - - # check predict - if (hasattr(self._model, 'predict') and callable(self._model.predict)) or \ - (_model_contains_inner_module(self._model) and hasattr(self._model.module, 'predict') and - callable(self._model.module.predict)): - if isinstance(self._model, nn.DataParallel): - self._predict_func_wrapper = partial(_data_parallel_wrapper('predict', - self._model.device_ids, - self._model.output_device), - network=self._model.module) - self._predict_func = self._model.module.predict # 用于匹配参数 - elif isinstance(self._model, nn.parallel.DistributedDataParallel): - self._predict_func = self._model.module.predict - self._predict_func_wrapper = self._model.module.predict # 用于调用 - else: - self._predict_func = self._model.predict - self._predict_func_wrapper = self._model.predict - else: - if _model_contains_inner_module(self._model): - self._predict_func_wrapper = self._model.forward - self._predict_func = self._model.module.forward - else: - self._predict_func = self._model.forward - self._predict_func_wrapper = self._model.forward - - if fp16: - _can_use_fp16(model=model, device=device, func=self._predict_func) - self.auto_cast, _grad_scaler = _build_fp16_env(not fp16) - - def test(self): - r"""开始进行验证,并返回验证结果。 - - :return Dict[Dict]: dict的二层嵌套结构,dict的第一层是metric的名称; 第二层是这个metric的指标。一个AccuracyMetric的例子为{'AccuracyMetric': {'acc': 1.0}}。 - """ - # turn on the testing mode; clean up the history - self._model_device = _get_model_device(self._model) - network = self._model - self._mode(network, is_test=True) - data_iterator = self.data_iterator - eval_results = {} - try: - with torch.no_grad(): - if not self.use_tqdm: - from .utils import _pseudo_tqdm as inner_tqdm - else: - inner_tqdm = tqdm - with inner_tqdm(total=len(data_iterator), leave=False, dynamic_ncols=True) as pbar: - pbar.set_description_str(desc="Test") - - start_time = time.time() - - for batch_x, batch_y in data_iterator: - _move_dict_value_to_device(batch_x, batch_y, device=self._model_device, - non_blocking=self.pin_memory) - with self.auto_cast(): - pred_dict = self._data_forward(self._predict_func, batch_x) - if not isinstance(pred_dict, dict): - raise TypeError(f"The return value of {_get_func_signature(self._predict_func)} " - f"must be `dict`, got {type(pred_dict)}.") - for metric in self.metrics: - metric(pred_dict, batch_y) - - if self.use_tqdm: - pbar.update() - - for metric in self.metrics: - eval_result = metric.get_metric() - if not isinstance(eval_result, dict): - raise TypeError(f"The return value of {_get_func_signature(metric.get_metric)} must be " - f"`dict`, got {type(eval_result)}") - metric_name = metric.get_metric_name() - eval_results[metric_name] = eval_result - pbar.close() - end_time = time.time() - test_str = f'Evaluate data in {round(end_time - start_time, 2)} seconds!' - if self.verbose >= 0: - self.logger.info(test_str) - except _CheckError as e: - prev_func_signature = _get_func_signature(self._predict_func) - _check_loss_evaluate(prev_func_signature=prev_func_signature, func_signature=e.func_signature, - check_res=e.check_res, pred_dict=pred_dict, target_dict=batch_y, - dataset=self.data, check_level=0) - finally: - self._mode(network, is_test=False) - if self.verbose >= 1: - logger.info("[tester] \n{}".format(self._format_eval_results(eval_results))) - return eval_results - - def _mode(self, model, is_test=False): - r"""Train mode or Test mode. This is for PyTorch currently. - - :param model: a PyTorch model - :param is_test: bool, whether in test mode or not. - - """ - if is_test: - model.eval() - else: - model.train() - - def _data_forward(self, func, x): - r"""A forward pass of the model. """ - x = _build_args(func, **x) - y = self._predict_func_wrapper(**x) - return y - - def _format_eval_results(self, results): - r"""Override this method to support more print formats. - - :param results: dict, (str: float) is (metrics name: value) - - """ - _str = '' - for metric_name, metric_result in results.items(): - _str += metric_name + ': ' - _str += ", ".join([str(key) + "=" + str(value) for key, value in metric_result.items()]) - _str += '\n' - return _str[:-1] diff --git a/fastNLP/core/trainer.py b/fastNLP/core/trainer.py deleted file mode 100644 index f4f8a093..00000000 --- a/fastNLP/core/trainer.py +++ /dev/null @@ -1,1038 +0,0 @@ -r""" -Trainer在fastNLP中用于组织单任务的训练过程,可以避免用户在不同训练任务中重复撰以下步骤的代码 - - (1) epoch循环; - - (2) 将数据分成不同的Batch; - - (3) 对Batch进行pad; - - (4) 每个epoch结束或一定step后进行验证集验证; - - (5) 保存获得更好验证性能的模型。 - - ----------------------------- -1. Trainer的基本使用 ----------------------------- - -下面的例子是使用神经网络来进行预测一个序列中是否有偶数个1。 - -.. code-block:: python - - import numpy as np - from torch import nn - import torch - import torch.nn.functional as F - from torch.optim import SGD - - from fastNLP import DataSet - from fastNLP import Trainer - from fastNLP import CrossEntropyLoss - from fastNLP import AccuracyMetric - from fastNLP.modules.decoder import MLP - - # 模型 - class Model(nn.Module): - def __init__(self, input_num): - super().__init__() - self.fcs = MLP([input_num, 40, 40, 2], 'relu') - - def forward(self, x): - x = self.fcs(x) - return {'pred': x} - model = Model(10) - - # 生成数据 - def generate_psedo_dataset(num_samples): - dataset = DataSet() - data = np.random.randint(2, size=(num_samples, 10)) - label = np.sum(data, axis=1)%2 - dataset = DataSet({'x':data.astype(float), 'label': label}) - dataset.set_input('x') - dataset.set_target('label') - return dataset - tr_dataset = generate_psedo_dataset(1000) - dev_data = generate_psedo_dataset(100) - - # 训练 - trainer = Trainer(tr_dataset, model, loss=CrossEntropyLoss(target='label'), - optimizer=SGD(model.parameters(), lr=0.1),n_epochs=1000, - dev_data = dev_data, metrics=AccuracyMetric(target='label')) - trainer.train() - -由上面的例子可以看出通过使用Trainer,可以使得训练部分的代码大幅减少。 -使用Trainer需要满足以下几个条件: - -1.1 模型 ----------------------------- - -1 模型的forward()的参数名需要与DataSet中的名字对应。实际上fastNLP在将DataSet中的数据传递给模型forward()时,是 -通过匹配名称实现的。所以上例中,如果Model的forward函数修改为forward(self, data), 则DataSet中的'x'这个field就应该 -改名为'data'。 - -2 传递给forward()的参数是DataSet中被设置为input的那些field。但如果forward()中没有对应的参数,则不会将数据传递 -给forward()。例如,DataSet中'x1', 'x2'都是input,但是模型的函数为forward(self, x1), 那么'x2'不会传递给forward()。 - -3 模型的forward()返回值需要为一个dict。 - -1.2 Loss ----------------------------- - -fastNLP中的为了不限制forward函数的返回内容数量(比如一些复杂任务需要返回多个内容,如Dependency Parsing, -:mod:`Loss` 与 :mod:`Metric` 都使用了通过名称来匹配相应内容的策略。如上面的例子中 - -.. code-block:: python - - trainer = Trainer(tr_dataset, model, loss=CrossEntropyLoss(target='label'), - optimizer=SGD(model.parameters(), lr=0.1),n_epochs=1000, - dev_data = dev_data, metrics=AccuracyMetric(target='label')) - -loss被设置为了 :class:`~fastNLP.CrossEntropyLoss` , 但在初始化的时候传入了target='label'这个参数, -:class:`~fastNLP.CrossEntropyLoss` 的初始化参数为(pred=None, target=None, padding_idx=-100)。 - -这里的两个参数分别为计算CrossEntropy时需要使用到的模型的预测值与真实值。 -其中 `pred` 一般来自于模型forward()的返回结果,`target` 一般是来自于DataSet中被设置为target的field。 -由于每个人对真实值或者model的返回值取名并不一样,所以fastNLP的 :mod:`Loss` 提供一种类似于映射的机制来匹配对应的值, -比如这里 :class:`~fastNLP.CrossEntropyLoss` 将尝试找到名为'label'的内容来作为真实值得到loss; -而pred=None, 则 :class:`~fastNLP.CrossEntropyLoss` 使用'pred'作为名称匹配预测值, -正好forward的返回值也叫pred,所以这里不需要申明pred。 - -尽管fastNLP使用了映射机制来使得loss的计算变得比较灵活,但有些情况下loss必须在模型中进行计算,比如使用了CRF的模型。 -fastNLP中提供了 :class:`~fastNLP.LossInForward` 这个loss。 -这个loss的原理是直接在forward()的返回结果中找到loss_key(默认寻找'loss')指定的那个tensor,并使用它作为loss。 -如果Trainer初始化没有提供loss则默认使用 :class:`~fastNLP.LossInForward` 。 - -.. todo:: - 补充一个例子 详细例子可以参照 - -1.3 Metric ----------------------------- - -:mod:`Metric` 使用了与上述Loss一样的策略,即使用名称进行匹配。 -AccuracyMetric(target='label')的情况与CrossEntropyLoss 是同理的。 - -在进行验证时,可能用到的计算与forward()中不太一致,没有办法直接从forward()的结果中得到预测值,这时模型可以提供一个predict()方法, -如果提供的模型具有predict方法,则在模型验证时将调用predict()方法获取预测结果, -传入到predict()的参数也是从DataSet中被设置为input的field中选择出来的; -与forward()一样,返回值需要为一个dict。 - -.. todo:: - 补充一个例子 具体例子可以参考 - ----------------------------- -2. Trainer的代码检查 ----------------------------- - -由于在fastNLP中采取了映射的机制,所以难免可能存在对应出错的情况。Trainer提供一种映射检查机制,可以通过check_code_level来进行控制 -比如下面的例子中,由于各种原因产生的报错 - -Example2.1 ----------------------------- - -.. code-block:: python - - import numpy as np - from torch import nn - import torch - from torch.optim import SGD - from fastNLP import Trainer - from fastNLP import DataSet - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(1, 1) - def forward(self, x, b): - loss = torch.mean((self.fc(x)-b)**2) - return {'loss': loss} - model = Model() - - dataset = DataSet({'a': np.arange(10), 'b':np.arange(10)*2}) - dataset.set_input('a', 'b') - - trainer = Trainer(dataset, model, loss=None, optimizer=SGD(model.parameters(), lr=0.001)) - - trainer = Trainer(dataset, model, SGD(model.parameters())) - # 会报以下的错误 - # input fields after batch(if batch size is 2): - # a: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - # b: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) - # There is no target field. - # .... - # NameError: - # Problems occurred when calling Model.forward(self, x, b) - # missing param: ['x'] - # unused field: ['a'] - # Suggestion: You need to provide ['x'] in DataSet and set it as input. - -这里就是由于在Trainer初始化的时候,fastNLP会尝试使用一个batch_size=2的batch去运行一遍forward()以及backward()。这里有两类 -信息可以为你提供参考 - -1 'input fields after batch...'这部分显示的是train dataset经过Batch操作后,每个field对应的类型以及进行shape。这里 -因为train dataset没有target所以没有显示。根据这里可以看出是否正确将需要的内容设置为了input或target。 - -2 NameError,NameError发生在映射出错的情况。这里报错的原因是由于尝试进行forward计算时(可以通过Model.forward(self, x, b)判断 -出当前是在调取forward),却没有获取到forward()函数中需要的'x';在报错信息中同时指出了缺'x',而'a'没有被使用,那么可能 -就是由于field的名称不对。这里将dataset中'a'这个field的名称改为'x',或者model的参数从'x'修改为'a'都可以解决问题。 - -下面的例子是由于loss计算的时候找不到需要的值 - -Example2.2 ----------------------------- - -.. code-block:: python - - import numpy as np - from torch import nn - from torch.optim import SGD - from fastNLP import Trainer - from fastNLP import DataSet - from fastNLP import L1Loss - import torch - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(1, 1) - def forward(self, a): - return {'pred_b': self.fc(a.unsqueeze(1)).squeeze(1), 'No use':1} - - model = Model() - - dataset = DataSet({'a': np.arange(10, dtype=float), 'b':np.arange(10, dtype=float)*2}) - - dataset.set_input('a') - dataset.set_target('b') - - trainer = Trainer(dataset, model, loss=L1Loss(target='label'), optimizer=SGD(model.parameters(), lr=0.001)) - # 报错信息如下 - # input fields after batch(if batch size is 2): - # a: (1)type:torch.Tensor (2)dtype:torch.float32, (3)shape:torch.Size([2]) - # target fields after batch(if batch size is 2): - # b: (1)type:torch.Tensor (2)dtype:torch.float32, (3)shape:torch.Size([2]) - # .... - # NameError: - # Problems occurred when calling L1Loss.get_loss(self, pred, target) - # missing param: ['pred(assign to `pred` in `L1Loss`)', 'label(assign to `target` in `L1Loss`)'] - # unused field: ['b'] - # unused param: ['pred_b', 'No use'] - # target field: ['b'] - # param from Model.forward(self, a): ['pred_b', 'No use'] - # Suggestion: (1). Check key assignment for `target` when initialize L1Loss. Or provide `label` in DataSet or output of Model.forward(self, a). - # (2). Check key assignment for `pred` when initialize L1Loss. Or provide `pred` in DataSet or output of Model.forward(self, a). - -报错信息也包含两部分: - -1 第一部分与上面是一样的 - -2 这里报错的原因是由于计算loss的时候找不到相应的值(通过L1Loss.get_loss(self, pred, target)判断出来的); -报错的原因是因为 `pred` 和 `label` (我们在初始化L1Loss时将target指定为了label)都没有找到。 -这里'unused field'是DataSet中出现了,但却没有被设置为input或者target的field; -'unused param'是forward()中返回且没有被使用到的内容;'target field'是被设置为了target的field; -'param from Model.forward(self, a)'是forward()返回的所有key。"Suggestion"是关于当前错误处理的建议。 - -但是在一些情况下,比如forward()返回值只有一个,target也只有一个,fastNLP不会进行匹配,而直接将forward()的结果作为pred, -将DataSet中的target设置为target。上面的例子在返回值中加入了一个'No use'则只是为了使得Loss去匹配结果。 - - -下面是带有dev dataset时如果出现错误会发生的报错, - -Example2.3 ----------------------------- - -.. code-block:: python - - import numpy as np - from torch import nn - from torch.optim import SGD - from fastNLP import Trainer - from fastNLP import DataSet - from fastNLP import AccuracyMetric - import torch - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(1, 1) - def forward(self, a, b): - loss = torch.mean((self.fc(a.float().unsqueeze(1))-b.float())**2) - return {'loss': loss} - def predict(self, a): # 使用predict()进行验证 - return {'output':self.fc(a.float().unsqueeze(1))} #这里return的值不包含'pred'这个key - model = Model() - - dataset = DataSet({'a': np.arange(10), 'b':np.arange(10)*2}) - dev_data = DataSet({'a': np.arange(10, 20), 'b':np.arange(10, 20)*2}) - - dataset.set_input('a', 'b') - dev_data.set_input('a') # 这里没有设置target - - trainer = Trainer(dataset, model, loss=None, optimizer=SGD(model.parameters(), lr=0.001), - dev_data=dev_data, metrics=AccuracyMetric()) - - # 报错信息 - # ... - # NameError: - # Problems occurred when calling AccuracyMetric.evaluate(self, pred, target, seq_len=None) - # missing param: ['pred(assign to `pred` in `AccuracyMetric`)', 'target(assign to `target` in `AccuracyMetric`)'] - # unused param: ['output'] - # target field: [] - # param from Model.predict(self, a): ['output'] - # Suggestion: (1). Check key assignment for `pred` when initialize AccuracyMetric. Or provide `pred` in DataSet or output of Model.predict(self, a). - # (2). Check key assignment for `target` when initialize AccuracyMetric. Or provide `target` in DataSet or output of Model.predict(self, a). - -报错信息和前面都是类似的,但是可以通过'AccuracyMetric.evaluate(self, pred, target, seq_len=None)'看出这里是evaluation -的时候发生了错误。这样避免了需要在完成一整个epoch的训练才能发现evaluation弄错的情况。这里的修改是通过在初始化metric的时候 -指明通过'output'获取`pred`, 即AccuracyMetric(pred='output')。 - -可以通过check_code_level调节检查的强度。默认为0,即进行检查。 - ----------------------------- -3. Trainer与callback ----------------------------- - -虽然Trainer本身已经集成了一些功能,但仍然不足以囊括训练过程中可能需要到的功能,比如负采样,learning rate decay, Early Stop等。 -为了解决这个问题fastNLP引入了callback的机制,:class:`~fastNLP.Callback` 是一种在Trainer训练过程中特定阶段会运行的函数集合, -所有的 :class:`~fastNLP.Callback` 都具有on_*(比如on_train_start, on_backward_begin)等函数。 -如果 Callback 实现了该函数,则Trainer运行至对应阶段,会进行调用,例如:: - - from fastNLP import Callback, EarlyStopCallback, Trainer, CrossEntropyLoss, AccuracyMetric - from fastNLP.models import CNNText - - start_time = time.time() - - class MyCallback(Callback): - def on_epoch_end(self): - print('{:d}ms\n\n'.format(round((time.time()-start_time)*1000))) - - model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1) - trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data, loss=CrossEntropyLoss(), - metrics=AccuracyMetric(), callbacks=[MyCallback(),EarlyStopCallback(10)]) - trainer.train() - -这里,我们通过继承 :class:`~fastNLP.Callback` 类定义了自己的 callback 的,并和内置的 :class:`~fastNLP.EarlyStopCallback` -一起传给了 :class:`~fastNLP.Trainer` ,增强了 :class:`~fastNLP.Trainer` 的功能 - -fastNLP已经自带了很多callback函数供使用,可以参考 :mod:`fastNLP.core.callback` 。 - -""" -__all__ = [ - "Trainer" -] - -import os -import time -from datetime import datetime, timedelta - -import numpy as np -import torch -import torch.nn as nn - -try: - from tqdm.auto import tqdm -except: - from .utils import _pseudo_tqdm as tqdm -import warnings -from pkg_resources import parse_version - -from .batch import DataSetIter, BatchIter -from .callback import CallbackManager, CallbackException, Callback -from .dataset import DataSet -from .losses import _prepare_losser -from .metrics import _prepare_metrics -from .optimizer import Optimizer -from .sampler import Sampler -from .sampler import RandomSampler, ConstTokenNumSampler -from .tester import Tester -from .utils import _CheckError -from .utils import _build_args -from .utils import _check_forward_error -from .utils import _check_loss_evaluate -from .utils import _move_dict_value_to_device -from .utils import _get_func_signature -from .utils import _get_model_device -from .utils import _move_model_to_device -from .utils import _build_fp16_env -from .utils import _can_use_fp16 -from ._parallel_utils import _model_contains_inner_module -from ._logger import logger - - -class Trainer(object): - r""" - Trainer在fastNLP中用于组织单任务的训练过程,可以避免用户在不同训练任务中重复撰写 - (1) epoch循环; - (2) 将数据分成不同的Batch; - (3) 对Batch进行pad; - (4) 每个epoch结束或一定step后进行验证集验证; - (5) 保存获得更好验证性能的模型等。 - - 详细的介绍参见 :mod:`fastNLP.core.trainer` - """ - - def __init__(self, train_data, model, optimizer=None, loss=None, - batch_size=32, sampler=None, drop_last=False, update_every=1, - num_workers=0, n_epochs=10, print_every=5, - dev_data=None, metrics=None, metric_key=None, - validate_every=-1, save_path=None, use_tqdm=True, device=None, - callbacks=None, check_code_level=0, fp16=False, **kwargs): - r""" - :param train_data: 训练集, :class:`~fastNLP.DataSet` 类型或 :class:`~fastNLP.BatchIter` 的子类 - :param nn.modules model: 待训练的模型 - :param optimizer: `torch.optim.Optimizer` 优化器。如果为None,则Trainer使用默认的Adam(model.parameters(), lr=4e-3)这个优化器 - :param int batch_size: 训练和验证的时候的batch大小。 - :param loss: 使用的 :class:`~fastNLP.core.losses.LossBase` 对象。当为None时,默认使用 :class:`~fastNLP.LossInForward` - :param sampler: Batch数据生成的顺序, :class:`~fastNLP.Sampler` 类型。如果为None,默认使用 :class:`~fastNLP.RandomSampler` - :param drop_last: 如果最后一个batch没有正好为batch_size这么多数据,就扔掉最后一个batch - :param num_workers: int, 有多少个线程来进行数据pad处理。 - :param update_every: int, 多少步更新一次梯度。用于希望累计梯度的场景,比如需要128的batch_size, 但是直接设为128 - 会导致内存不足,通过设置batch_size=32, update_every=4达到目的。当optimizer为None时,该参数无效。 - :param int n_epochs: 需要优化迭代多少次。 - :param int print_every: 多少次反向传播更新tqdm显示的loss; 如果use_tqdm=False, 则多少次反向传播打印loss。 - :param dev_data: 用于做验证的DataSet, :class:`~fastNLP.DataSet` 类型。 - :param metrics: 验证的评估函数。可以只使用一个 :class:`Metric` , - 也可以使用多个 :class:`Metric` ,通过列表传入。 - 如验证时取得了更好的验证结果(如果有多个Metric,以列表中第一个Metric为准),且save_path不为None, - 则保存当前模型。Metric种类详见 :mod:`metrics模块 ` 。仅在传入dev_data时有效。 - :param str,None metric_key: :class:`Metric` 有时会有多个指标, - 比如 :class:`~fastNLP.core.metrics.SpanFPreRecMetric` 中包含了'f', 'pre', 'rec'。此时需 - 要指定以哪个指标为准。另外有些指标是越小效果越好,比如语言模型的困惑度,这种情况下,在key前面增加一个'-'来表 - 明验证时,值越小越好(比如: "-ppl")。仅在传入dev_data时有效。 - :param int validate_every: 多少个step在验证集上验证一次; 如果为-1,则每个epoch结束验证一次。仅在传入dev_data时有效。 - :param str,None save_path: 将模型保存路径,如果路径不存在,将自动创建文件夹。如果为None,则不保存模型。如果dev_data为None,则保存 - 最后一次迭代的模型。保存的时候不仅保存了参数,还保存了模型结构。即便使用DataParallel,这里也只保存模型。 - :param bool use_tqdm: 是否使用tqdm来显示训练进度; 如果为False,则将loss打印在终端中。 - :param str,int,torch.device,list(int) device: 将模型load到哪个设备。默认为None,即Trainer不对模型 - 的计算位置进行管理。支持以下的输入: - - 1. str: ['cpu', 'cuda', 'cuda:0', 'cuda:1', ...] 依次为'cpu'中, 可见的第一个GPU中, 可见的第一个GPU中, - 可见的第二个GPU中; - - 2. torch.device:将模型装载到torch.device上。 - - 3. int: 将使用device_id为该值的gpu进行训练 - - 4. list(int):如果多于1个device,将使用torch.nn.DataParallel包裹model, 并使用传入的device。 - - 5. None. 为None则不对模型进行任何处理,如果传入的model为torch.nn.DataParallel该值必须为None。 - - 已知可能会出现的问题:Adagrad优化器可能无法正常使用这个参数,请手动管理模型位置。 - - :param list(callbacks) callbacks: 用于在train过程中起调节作用的回调函数。比如early stop,negative sampling等可以 - 通过callback机制实现。 可使用的callback参见 :mod:`callback模块 ` - :param int check_code_level: 模型检查等级. -1: 不进行检查; 0: 仅出现错误时停止; 1: 如果有field没有被使用, - 报告警告信息; 2: 有任何field没有被使用都报错. 检查的原理是通过使用很小的batch(默认2个sample)来运行代码,但是 - 这个过程理论上不会修改任何参数,只是会检查能否运行。但如果(1)模型中存在将batch_size写为某个固定值的情况; - (2)模型中存在累加前向计算次数的,可能会多计算1次。以上情况建议将check_code_level设置为-1。 - :param bool fp16: 是否使用fp16进行训练。 - :param kwargs: 支持配置可选参数 - bool test_use_tqdm: 在dev上验证的时候是否开启tqdm - Sampler test_sampler: 在evaluate的时候使用的sampler - bool test_use_fp16: evalute的时候是否使用fp16测试,默认与fp16相同的取值。 - bool set_grad_to_none: 在zero_grad的时候是否将gradient设置为None,而不是设置为zero - GradScaler grad_scaler: 仅在fp16为True时有效,如果不使用torch.cuda.amp.GradScaler的初始化参数,可传入一个已经初始化后的 - grad_scaler。 - bool pin_memory: 是否将产生的tensor使用pin memory, 可能会加快数据速度。 - """ - super(Trainer, self).__init__() - if not isinstance(model, nn.Module): - raise TypeError(f"The type of model must be torch.nn.Module, got {type(model)}.") - - # check metrics and dev_data - if (not metrics) and dev_data is not None: - raise ValueError("No metric for dev_data evaluation.") - if metrics and (dev_data is None): - raise ValueError("No dev_data for evaluations, pass dev_data or set metrics to None. ") - - # check update every - assert update_every >= 1, "update_every must be no less than 1." - self.update_every = int(update_every) - - # check save_path - if not (save_path is None or isinstance(save_path, str)): - raise ValueError("save_path can only be None or `str`.") - # prepare evaluate - metrics = _prepare_metrics(metrics) - - # parse metric_key - # increase_better is True. It means the exp result gets better if the indicator increases. - # It is true by default. - self.increase_better = True - if metric_key is not None: - self.increase_better = False if metric_key[0] == "-" else True - self.metric_key = metric_key[1:] if metric_key[0] == "+" or metric_key[0] == "-" else metric_key - else: - self.metric_key = None - # prepare loss - losser = _prepare_losser(loss) - - if isinstance(train_data, BatchIter): - if sampler is not None: - warnings.warn("sampler is ignored when train_data is a BatchIter.") - if num_workers>0: - warnings.warn("num_workers is ignored when train_data is BatchIter.") - if drop_last: - warnings.warn("drop_last is ignored when train_data is BatchIter.") - # concerning issue from https://github.com/pytorch/pytorch/issues/57273 - self.pin_memory = kwargs.get('pin_memory', False if parse_version(torch.__version__)==parse_version('1.9') else True) - if isinstance(model, nn.parallel.DistributedDataParallel): # 如果是分布式的 - # device为None - if device is not None: - warnings.warn("device is ignored when model is nn.parallel.DistributedDataParallel.") - device = None - # Sampler要是分布式的 - if sampler is None: - sampler = torch.utils.data.DistributedSampler(train_data) - elif not isinstance(sampler, torch.utils.data.DistributedSampler): - raise TypeError("When using nn.parallel.DistributedDataParallel, " - "sampler must be None or torch.utils.data.DistributedSampler.") - # 不能保存模型 - if save_path: - raise RuntimeError("Saving model in Distributed situation is not allowed right now.") - else: - # sampler check - if sampler is not None and not isinstance(sampler, (Sampler, torch.utils.data.Sampler)): - raise ValueError(f"The type of sampler should be fastNLP.BaseSampler or pytorch's Sampler, got {type(sampler)}") - if sampler is None: - sampler = RandomSampler() - elif hasattr(sampler, 'set_batch_size'): - sampler.set_batch_size(batch_size) - if isinstance(sampler, ConstTokenNumSampler): # 直接使用固定token数量的Sampler - assert isinstance(train_data, - DataSet), f"When sampler is `ConstTokenNumSampler`, the train_data must" \ - f" be `DataSet`." - sampler(train_data) - train_data = DataSetIter(train_data, - batch_size=1, sampler=None, as_numpy=False, num_workers=num_workers, - pin_memory=self.pin_memory, drop_last=drop_last, timeout=0, worker_init_fn=None, - batch_sampler=sampler) - - if isinstance(train_data, DataSet): - self.data_iterator = DataSetIter(dataset=train_data, batch_size=batch_size, sampler=sampler, - num_workers=num_workers, drop_last=drop_last, - pin_memory=self.pin_memory) - elif isinstance(train_data, BatchIter): - self.data_iterator = train_data - train_data = train_data.dataset - check_code_level = -1 # 强制跳过校验 - else: - raise TypeError("train_data type {} not support".format(type(train_data))) - - model.train() - self.model = _move_model_to_device(model, device=device) - if _model_contains_inner_module(self.model): - self._forward_func = self.model.module.forward - else: - self._forward_func = self.model.forward - - self.fp16 = fp16 - self.verbose = kwargs.get('verbose', 0) - - # check fp16相关的设置 - self.auto_cast, _grad_scaler = _build_fp16_env(dummy=not fp16) - self.grad_scaler = _grad_scaler() - if self.fp16: - _can_use_fp16(device=device, model=model, func=self._forward_func) - grad_scaler = kwargs.get('grad_scaler', None) - if grad_scaler is not None: - self.grad_scaler = grad_scaler - else: - self.grad_scaler = _grad_scaler() - self.test_use_fp16 = kwargs.get('test_use_fp16', fp16) - self.set_grad_to_none = kwargs.get('set_grad_to_none', True) - - if check_code_level > -1: - # _check_code 是 fastNLP 帮助你检查代码是否正确的方法 。如果你在错误栈中看到这行注释,请认真检查你的field名与模型的输入 - # 名是否匹配 - dev_dataset = dev_data - if isinstance(dev_data, BatchIter): - dev_dataset = None - warnings.warn("dev_data is of BatchIter type, ignore validation checking.") - check_batch_size = min(batch_size, DEFAULT_CHECK_BATCH_SIZE) - if isinstance(self.model, nn.DataParallel): - _num_devices = len(self.model.device_ids) - if batch_size//_num_devices>1: # 如果多卡是每个卡可以分多个数据的,则用每个卡给两个sample - check_batch_size = max(len(self.model.device_ids)*2, check_batch_size) - else: - check_batch_size = max(len(self.model.device_ids), check_batch_size) - _check_code(dataset=train_data, model=self.model, losser=losser, forward_func=self._forward_func, metrics=metrics, - dev_data=dev_dataset, metric_key=self.metric_key, check_level=check_code_level, - batch_size=check_batch_size) - - self.train_data = train_data - self.dev_data = dev_data # If None, No validation. - self.losser = losser - self.metrics = metrics - self.n_epochs = int(n_epochs) - self.batch_size = int(batch_size) - self.save_path = save_path - self.print_every = int(print_every) - self.validate_every = int(validate_every) if validate_every != 0 else -1 - self.best_metric_indicator = None - self.best_dev_epoch = None - self.best_dev_step = None - self.best_dev_perf = None - self.n_steps = len(self.data_iterator) * self.n_epochs - - if isinstance(optimizer, torch.optim.Optimizer): - self.optimizer = optimizer - elif isinstance(optimizer, Optimizer): - self.optimizer = optimizer.construct_from_pytorch(self.model.parameters()) - elif optimizer is None: - self.optimizer = torch.optim.Adam(self.model.parameters(), lr=4e-3) - else: - if not (hasattr(optimizer, 'step') and callable(optimizer.step)): - raise TypeError("optimizer must have a callable step() function.") - else: - self.optimizer = optimizer - - self.logger = logger - - self.use_tqdm = use_tqdm - self.test_use_tqdm = kwargs.get('test_use_tqdm', self.use_tqdm) - self.pbar = None - self.print_every = abs(self.print_every) - self.kwargs = kwargs - if self.dev_data is not None: - self.tester = Tester(model=self.model, - data=self.dev_data, - metrics=self.metrics, - batch_size=kwargs.get("dev_batch_size", self.batch_size), - device=None, # 由上面的部分处理device - verbose=0, - use_tqdm=self.test_use_tqdm, - sampler=kwargs.get('test_sampler', None), - fp16=self.test_use_fp16, - num_workers=num_workers, - pin_memory=self.pin_memory) - - self.start_time = None # start timestamp - - if isinstance(callbacks, Callback): - callbacks = [callbacks] - - self.callback_manager = CallbackManager(env={"trainer": self}, - callbacks=callbacks) - - def train(self, load_best_model=True, on_exception='auto', **kwargs): - r""" - 使用该函数使Trainer开始训练。 - - :param bool load_best_model: 该参数只有在初始化提供了dev_data的情况下有效,如果True, trainer将在返回之前重新加载dev表现 - 最好的模型参数。 - :param str on_exception: 在训练过程遭遇exception,并被 :py:class:Callback 的on_exception()处理后,是否继续抛出异常。 - 支持'ignore','raise', 'auto': 'ignore'将捕获异常,写在Trainer.train()后面的代码将继续运行; 'raise'将异常抛出; - 'auto'将ignore以下两种Exception: CallbackException与KeyboardInterrupt, raise其它exception. - :param kwargs: - int verbose: 为1时在发生异常时会打印异常发生时batch中的数据在dataset中的index - :return dict: 返回一个字典类型的数据, - 内含以下内容:: - - seconds: float, 表示训练时长 - 以下三个内容只有在提供了dev_data的情况下会有。 - best_eval: Dict of Dict, 表示evaluation的结果。第一层的key为Metric的名称, - 第二层的key为具体的Metric - best_epoch: int,在第几个epoch取得的最佳值 - best_step: int, 在第几个step(batch)更新取得的最佳值 - - """ - results = {} - verbose = kwargs.get('verbose', 0) - if self.n_epochs <= 0: - self.logger.info(f"training epoch is {self.n_epochs}, nothing was done.") - results['seconds'] = 0. - return results - try: - self._model_device = _get_model_device(self.model) - self._mode(self.model, is_test=False) - self._load_best_model = load_best_model - # 加上millsecond,防止两个太接近的保存 - self.start_time = str(datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%f')) - start_time = time.time() - self.logger.info("training epochs started " + self.start_time) - self.step = 0 - self.epoch = 1 - try: - self.callback_manager.on_train_begin() - self._train() - self.callback_manager.on_train_end() - - except BaseException as e: - self.callback_manager.on_exception(e) - if verbose>0: - self.logger.info(f"The data indices for current batch are: {self.data_iterator.cur_batch_indices}.") - if on_exception == 'auto': - if not isinstance(e, (CallbackException, KeyboardInterrupt)): - raise e - elif on_exception == 'raise': - raise e - - if self.dev_data is not None and self.best_dev_perf is not None and load_best_model: - model_name = "best_" + "_".join([self.model.__class__.__name__, self.metric_key, self.start_time]) - load_succeed = self._load_model(self.model, model_name) - if load_succeed: - self.logger.info("Reloaded the best model.") - else: - self.logger.info("Fail to reload best model.") - - if self.dev_data is None and self.save_path is not None: - model_name = "_".join([self.model.__class__.__name__, self.start_time]) - self._save_model(self.model, model_name) - - finally: - if self.dev_data is not None and self.best_dev_perf is not None: - self.logger.info( - "\nIn Epoch:{}/Step:{}, got best dev performance:".format(self.best_dev_epoch, self.best_dev_step)) - self.logger.info(self.tester._format_eval_results(self.best_dev_perf)) - results['best_eval'] = self.best_dev_perf - results['best_epoch'] = self.best_dev_epoch - results['best_step'] = self.best_dev_step - - results['seconds'] = round(time.time() - start_time, 2) - - return results - - def _train(self): - if not self.use_tqdm: - from .utils import _pseudo_tqdm as inner_tqdm - else: - inner_tqdm = tqdm - start = time.time() - with inner_tqdm(total=self.n_steps, postfix='loss:{0:<6.5f}', leave=False, dynamic_ncols=True, - initial=self.step) as pbar: - self.pbar = pbar - avg_loss = 0 - self.batch_per_epoch = self.data_iterator.num_batches - for epoch in range(self.epoch, self.n_epochs + 1): - self.epoch = epoch - pbar.set_description_str(desc="Epoch {}/{}".format(epoch, self.n_epochs)) - # early stopping - self.callback_manager.on_epoch_begin() - for batch_x, batch_y in self.data_iterator: - self.step += 1 - _move_dict_value_to_device(batch_x, batch_y, device=self._model_device) - indices = self.data_iterator.get_batch_indices() - # negative sampling; replace unknown; re-weight batch_y - self.callback_manager.on_batch_begin(batch_x, batch_y, indices) - prediction = self._data_forward(self.model, batch_x) - - # edit prediction - self.callback_manager.on_loss_begin(batch_y, prediction) - with self.auto_cast(): - loss = self._compute_loss(prediction, batch_y).mean() - loss = loss / self.update_every - avg_loss += loss.item() - - # Is loss NaN or inf? requires_grad = False - self.callback_manager.on_backward_begin(loss) - self._grad_backward(loss) - self.callback_manager.on_backward_end() - - self._update() - self.callback_manager.on_step_end() - - if self.step % self.print_every == 0: - avg_loss = float(avg_loss) / self.print_every - if self.use_tqdm: - print_output = "loss:{:<6.5f}".format(avg_loss) - pbar.update(self.print_every) - else: - end = time.time() - diff = timedelta(seconds=round(end - start)) - print_output = "[epoch: {:>3} step: {:>4}] train loss: {:>4.6} time: {}".format( - epoch, self.step, avg_loss, diff) - pbar.set_postfix_str(print_output) - avg_loss = 0 - self.callback_manager.on_batch_end() - - if (self.validate_every > 0 and self.step % self.validate_every == 0) \ - and self.dev_data is not None: - eval_res = self._do_validation(epoch=epoch, step=self.step) - eval_str = "Evaluation on dev at Epoch {}/{}. Step:{}/{}: ".format(epoch, self.n_epochs, self.step, - self.n_steps) - # pbar.write(eval_str + '\n') - self.logger.info(eval_str) - self.logger.info(self.tester._format_eval_results(eval_res)+'\n') - # ================= mini-batch end ==================== # - if self.validate_every<0 and self.dev_data is not None: # 在epoch结束之后的evaluate - eval_res = self._do_validation(epoch=epoch, step=self.step) - eval_str = "Evaluation on dev at Epoch {}/{}. Step:{}/{}: ".format(epoch, self.n_epochs, self.step, - self.n_steps) - # pbar.write(eval_str + '\n') - self.logger.info(eval_str) - self.logger.info(self.tester._format_eval_results(eval_res) + '\n') - # lr decay; early stopping - self.callback_manager.on_epoch_end() - # =============== epochs end =================== # - if self.dev_data is not None and (self.validate_every>0 and self.n_steps%self.validate_every!=0): - eval_res = self._do_validation(epoch=epoch, step=self.step) - eval_str = "Evaluation on dev at Epoch {}/{}. Step:{}/{}: ".format(epoch, self.n_epochs, self.step, - self.n_steps) - # pbar.write(eval_str + '\n') - self.logger.info(eval_str) - self.logger.info(self.tester._format_eval_results(eval_res) + '\n') - pbar.close() - self.pbar = None - # ============ tqdm end ============== # - - def _do_validation(self, epoch, step): - self.callback_manager.on_valid_begin() - res = self.tester.test() - - is_better_eval = False - if self._better_eval_result(res): - if self.save_path is not None: - self._save_model(self.model, - "best_" + "_".join([self.model.__class__.__name__, self.metric_key, self.start_time])) - elif self._load_best_model: - self._best_model_states = {name: param.cpu().clone() for name, param in self.model.state_dict().items()} - self.best_dev_perf = res - self.best_dev_epoch = epoch - self.best_dev_step = step - is_better_eval = True - # get validation results; adjust optimizer - self.callback_manager.on_valid_end(res, self.metric_key, self.optimizer, is_better_eval) - return res - - def _mode(self, model, is_test=False): - r"""Train mode or Test mode. This is for PyTorch currently. - - :param model: a PyTorch model - :param bool is_test: whether in test mode or not. - - """ - if is_test: - model.eval() - else: - model.train() - - def _update(self): - r"""Perform weight update on a model. - - """ - if self.step % self.update_every == 0: - self.grad_scaler.step(self.optimizer) - self.grad_scaler.update() - - def _data_forward(self, network, x): - x = _build_args(self._forward_func, **x) - with self.auto_cast(): - y = network(**x) - if not isinstance(y, dict): - raise TypeError( - f"The return value of {_get_func_signature(self._forward_func)} should be dict, got {type(y)}.") - return y - - def _grad_backward(self, loss): - r"""Compute gradient with link rules. - - :param loss: a scalar where back-prop starts - - For PyTorch, just do "loss.backward()" - """ - if (self.step-1) % self.update_every == 0: - self._clear_grad(self.optimizer, self.set_grad_to_none) - self.grad_scaler.scale(loss).backward() - - def _clear_grad(self, optimizer, set_to_none=True): - param_groups = optimizer.param_groups - for group in param_groups: - for p in group['params']: - if p.grad is not None: - if set_to_none: - p.grad = None - else: - if p.grad.grad_fn is not None: - p.grad.detach_() - else: - p.grad.requires_grad_(False) - p.grad.zero_() - - def _compute_loss(self, predict, truth): - r"""Compute loss given prediction and ground truth. - - :param predict: prediction dict, produced by model.forward - :param truth: ground truth dict, produced by batch_y - :return: a scalar - """ - return self.losser(predict, truth) - - def _save_model(self, model, model_name, only_param=False): - r""" 存储不含有显卡信息的state_dict或model - :param model: - :param model_name: - :param only_param: - :return: - """ - if self.save_path is not None: - model_path = os.path.join(self.save_path, model_name) - if not os.path.exists(self.save_path): - os.makedirs(self.save_path, exist_ok=True) - if _model_contains_inner_module(model): - model = model.module - if only_param: - state_dict = model.state_dict() - for key in state_dict: - state_dict[key] = state_dict[key].cpu() - torch.save(state_dict, model_path) - else: - model.cpu() - torch.save(model, model_path) - model.to(self._model_device) - - def _load_model(self, model, model_name, only_param=False): - # 返回bool值指示是否成功reload模型 - if self.save_path is not None: - model_path = os.path.join(self.save_path, model_name) - if only_param: - states = torch.load(model_path) - else: - states = torch.load(model_path).state_dict() - if _model_contains_inner_module(model): - model.module.load_state_dict(states) - else: - model.load_state_dict(states) - elif hasattr(self, "_best_model_states"): - model.load_state_dict(self._best_model_states) - else: - return False - return True - - def _better_eval_result(self, metrics): - r"""Check if the current epoch yields better validation results. - - :return bool value: True means current results on dev set is the best. - """ - indicator, indicator_val = _check_eval_results(metrics, self.metric_key, self.metrics) - if self.metric_key is None: - self.metric_key = indicator - is_better = True - if self.best_metric_indicator is None: - # first-time validation - self.best_metric_indicator = indicator_val - else: - if self.increase_better is True: - if indicator_val > self.best_metric_indicator: - self.best_metric_indicator = indicator_val - else: - is_better = False - else: - if indicator_val < self.best_metric_indicator: - self.best_metric_indicator = indicator_val - else: - is_better = False - return is_better - - @property - def is_master(self): - r"""是否是主进程""" - return True - -DEFAULT_CHECK_BATCH_SIZE = 2 -DEFAULT_CHECK_NUM_BATCH = 2 - - -def _get_value_info(_dict): - # given a dict value, return information about this dict's value. Return list of str - strs = [] - for key, value in _dict.items(): - _str = '' - if isinstance(value, torch.Tensor): - _str += "\t{}: (1)type:torch.Tensor (2)dtype:{}, (3)shape:{} ".format(key, - value.dtype, value.size()) - elif isinstance(value, np.ndarray): - _str += "\t{}: (1)type:numpy.ndarray (2)dtype:{}, (3)shape:{} ".format(key, - value.dtype, value.shape) - else: - _str += "\t{}: type:{}".format(key, type(value)) - strs.append(_str) - return strs - - -def _check_code(dataset, model, losser, metrics, forward_func, batch_size=DEFAULT_CHECK_BATCH_SIZE, - dev_data=None, metric_key=None, check_level=0): - # check get_loss 方法 - model_device = _get_model_device(model=model) - _iter = DataSetIter(dataset, batch_size=batch_size, sampler=None) - - for batch_count, (batch_x, batch_y) in enumerate(_iter): - _move_dict_value_to_device(batch_x, batch_y, device=model_device) - # forward check - if batch_count == 0: - info_str = "" - input_fields = _get_value_info(batch_x) - target_fields = _get_value_info(batch_y) - if len(input_fields) > 0: - info_str += "input fields after batch(if batch size is {}):\n".format(batch_size) - info_str += "\n".join(input_fields) - info_str += '\n' - else: - raise RuntimeError("There is no input field.") - if len(target_fields) > 0: - info_str += "target fields after batch(if batch size is {}):\n".format(batch_size) - info_str += "\n".join(target_fields) - info_str += '\n' - else: - info_str += 'There is no target field.' - logger.info(info_str) - _check_forward_error(forward_func=forward_func, dataset=dataset, - batch_x=batch_x, check_level=check_level) - refined_batch_x = _build_args(forward_func, **batch_x) - pred_dict = model(**refined_batch_x) - func_signature = _get_func_signature(forward_func) - if not isinstance(pred_dict, dict): - raise TypeError(f"The return value of {func_signature} should be `dict`, not `{type(pred_dict)}`.") - - # loss check - try: - loss = losser(pred_dict, batch_y) - # check loss output - if batch_count == 0: - if not isinstance(loss, torch.Tensor): - raise TypeError( - f"The return value of {_get_func_signature(losser.get_loss)} should be `torch.Tensor`, " - f"but got `{type(loss)}`.") - if len(loss.size()) != 0: - raise ValueError( - f"The size of return value of {_get_func_signature(losser.get_loss)} is {loss.size()}, " - f"should be torch.size([])") - loss.backward() - except _CheckError as e: - # TODO: another error raised if _CheckError caught - pre_func_signature = _get_func_signature(forward_func) - _check_loss_evaluate(prev_func_signature=pre_func_signature, func_signature=e.func_signature, - check_res=e.check_res, pred_dict=pred_dict, target_dict=batch_y, - dataset=dataset, check_level=check_level) - model.zero_grad() - if batch_count + 1 >= DEFAULT_CHECK_NUM_BATCH: - break - - if dev_data is not None: - tester = Tester(data=dev_data[:batch_size * DEFAULT_CHECK_NUM_BATCH], model=model, metrics=metrics, - batch_size=batch_size, verbose=-1, use_tqdm=False) - evaluate_results = tester.test() - _check_eval_results(metrics=evaluate_results, metric_key=metric_key, metric_list=metrics) - - -def _check_eval_results(metrics, metric_key, metric_list): - # metrics: tester返回的结果 - # metric_key: 一个用来做筛选的指标,来自Trainer的初始化 - # metric_list: 多个用来做评价的指标,来自Trainer的初始化 - if isinstance(metrics, tuple): - loss, metrics = metrics - - if isinstance(metrics, dict): - metric_dict = list(metrics.values())[0] # 取第一个metric - - if metric_key is None: - indicator_val, indicator = list(metric_dict.values())[0], list(metric_dict.keys())[0] - else: - # metric_key is set - if metric_key not in metric_dict: - raise RuntimeError(f"metric key {metric_key} not found in {metric_dict}") - indicator_val = metric_dict[metric_key] - indicator = metric_key - else: - raise RuntimeError("Invalid metrics type. Expect {}, got {}".format((tuple, dict), type(metrics))) - return indicator, indicator_val diff --git a/fastNLP/core/utils.py b/fastNLP/core/utils.py deleted file mode 100644 index a7a286d0..00000000 --- a/fastNLP/core/utils.py +++ /dev/null @@ -1,1120 +0,0 @@ -r""" -utils模块实现了 fastNLP 内部和外部所需的很多工具。其中用户可以使用的是 :func:`cache_results` 修饰器。 -""" - -__all__ = [ - "cache_results", - "seq_len_to_mask", - "get_seq_len" -] - -import inspect -import os -import warnings -from collections import Counter, namedtuple -from typing import List - -import _pickle -import numpy as np -import torch.nn as nn -from prettytable import PrettyTable - -from ._logger import logger -from ._parallel_utils import _model_contains_inner_module -# from .vocabulary import Vocabulary -import torch -import contextlib -from pkg_resources import parse_version - - -_CheckRes = namedtuple('_CheckRes', ['missing', 'unused', 'duplicated', 'required', 'all_needed', - 'varargs']) - - -class ConfusionMatrix: - r"""a dict can provide Confusion Matrix""" - def __init__(self, show_result=None,vocab=None, print_ratio=False): - r""" - :param show_result: list type, 数据类型需要和target保持一致 - :param vocab: 需要有to_word方法,建议直接使用Fastnlp.core.Vocabulary。 - :param print_ratio: 限制print的输出,False只输出数量Confusion Matrix, True还会输出百分比Confusion Matrix, 分别为行/列 - """ - if vocab and not hasattr(vocab, "to_word"): - raise TypeError( - f"`vocab` in {_get_func_signature(self.__init__)} must be Fastnlp.core.Vocabulary," - f"got {type(vocab)}.") - self.confusiondict = {} # key: pred index, value:target word ocunt - self.predcount = {} # key:pred index, value:count - self.targetcount = {} # key:target index, value:count - self.show_result = show_result - self.vocab = vocab - self.print_ratio = print_ratio - - def add_pred_target(self, pred, target): # 一组结果 - r""" - 通过这个函数向ConfusionMatrix加入一组预测结果 - :param list pred: 预测的标签列表 - :param list target: 真实值的标签列表 - :return ConfusionMatrix - confusion=ConfusionMatrix() - pred = [2,1,3] - target = [2,2,1] - confusion.add_pred_target(pred, target) - print(confusion) - - target 1 2 3 all - pred - 1 0 1 0 1 - 2 0 1 0 1 - 3 1 0 0 1 - all 1 2 0 3 - """ - for p, t in zip(pred, target): # - self.predcount[p] = self.predcount.get(p, 0) + 1 - self.targetcount[t] = self.targetcount.get(t, 0) + 1 - if p in self.confusiondict: - self.confusiondict[p][t] = self.confusiondict[p].get(t, 0) + 1 - else: - self.confusiondict[p] = {} - self.confusiondict[p][t] = 1 - return self.confusiondict - - def clear(self): - r""" - 清空ConfusionMatrix,等待再次新加入 - :return: - """ - self.confusiondict = {} - self.targetcount = {} - self.predcount = {} - - def get_result(self): - r""" - :return list output: ConfusionMatrix content,具体值与汇总统计 - """ - row2idx = {} - idx2row = {} - # 已知的所有键/label - totallabel = sorted( - list( - set(self.targetcount.keys()).union(set( - self.predcount.keys())))) - lenth = len(totallabel) - - for label, idx in zip(totallabel, range(lenth)): - idx2row[ - label] = idx # 建立一个临时字典,key:vocab的index, value: 行列index 1,3,5...->0,1,2,... - row2idx[ - idx] = label # 建立一个临时字典,value:vocab的index, key: 行列index 0,1,2...->1,3,5,... - output = [] - for i in row2idx.keys(): # 第i行 - p = row2idx[i] - l = [0 for _ in range(lenth)] - if self.confusiondict.get(p, None): - for t, c in self.confusiondict[p].items(): - l[idx2row[t]] = c # 完成一行 - l = [n for n in l] + [sum(l)] - output.append(l) - tail = [self.targetcount.get(row2idx[k], 0) for k in row2idx.keys()] - tail += [sum(tail)] - output.append(tail) - return output - - def get_percent(self, dim=0): - r""" - :param dim int: 0/1, 0 for row,1 for column - :return list output: ConfusionMatrix content,具体值与汇总统计 - """ - result = self.get_result() - if dim == 0: - tmp = np.array(result) - tmp = tmp / (tmp[:, -1].reshape([len(result), -1])) - tmp[np.isnan(tmp)] = 0 - tmp = tmp * 100 - elif dim == 1: - tmp = np.array(result).T - tmp = tmp / (tmp[:, -1].reshape([len(result), -1]) + 1e-12) - tmp = tmp.T * 100 - tmp = np.around(tmp, decimals=2) - return tmp.tolist() - - def get_aligned_table(self, data, flag="result"): - r""" - :param data: highly recommend use get_percent/ get_result return as dataset here, or make sure data is a n*n list type data - :param flag: only difference between result and other words is whether "%" is in output string - :return: an aligned_table ready to print out - """ - row2idx = {} - idx2row = {} - # 已知的所有键/label - totallabel = sorted( - list( - set(self.targetcount.keys()).union(set( - self.predcount.keys())))) - lenth = len(totallabel) - # namedict key :label idx value: str label name/label idx - namedict = dict([ - (k, str(k if self.vocab == None else self.vocab.to_word(k))) - for k in totallabel - ]) - for label, lineidx in zip(totallabel, range(lenth)): - idx2row[ - label] = lineidx # 建立一个临时字典,key:vocab的index, value: 行列index 1,3,5...->0,1,2,... - row2idx[ - lineidx] = label # 建立一个临时字典,key: 行列index 0,1,2...->1,3,5,...,value:vocab的index, - # 这里打印东西 - out = str() - output = [] - # 表头 - head = (["target"] + - [str(namedict[row2idx[k]]) for k in row2idx.keys()] + ["all"]) - col_lenths = [len(h) for h in head] - output.append(head) - output.append(["pred"]) - # 内容 - for i in row2idx.keys(): # 第i行 - p = row2idx[i] - h = namedict[p] - l = [h] + [[str(n) + "%", str(n)][flag == "result"] - for n in data[i]] - col_lenths = [ - max(col_lenths[idx], [len(i) for i in l][idx]) - for idx in range(len(col_lenths)) - ] - output.append(l) - - tail = ["all"] + [[str(n) + "%", str(n)][flag == "result"] - for n in data[-1]] - col_lenths = [ - max(col_lenths[idx], [len(i) for i in tail][idx]) - for idx in range(len(col_lenths)) - ] - output.append(tail) - - if self.show_result: - missing_item=[] - missing_item = [i for i in self.show_result if i not in idx2row] - self.show_result = [i for i in self.show_result if i in idx2row] - if missing_item: - print(f"Noticing label(s) which is/are not in target list appeared, final output string will not contain{str(missing_item)}") - if self.show_result: - show_col = [0] + [i + 1 for i in [idx2row[i] for i in self.show_result]] - show_row = [0]+[i+2 for i in [idx2row[i] for i in self.show_result]] - output = [[row[col] for col in show_col] for row in [output[row] for row in show_row]] - output.insert(1,["pred"]) - for line in output: - for colidx in range(len(line)): - out += "%*s" % (col_lenths[colidx], line[colidx]) + "\t" - out += "\n" - return "\n" + out - - def __repr__(self): - r""" - :return string output: ConfusionMatrix的格式化输出,包括表头各标签字段,具体值与汇总统计。 - """ - result = self.get_result() - o0 = self.get_aligned_table(result, flag="result") - - out = str() - if self.print_ratio: - p1 = self.get_percent() - o1 = "\nNotice the row direction\n" + self.get_aligned_table( - p1, flag="percent") - p2 = self.get_percent(dim=1) - o2 = "\nNotice the column direction\n" + self.get_aligned_table( - p2, flag="percent") - out = out + o0 + o1 + o2 - else: - out = o0 - return out - - - -class Option(dict): - r"""a dict can treat keys as attributes""" - - def __getattr__(self, item): - try: - return self.__getitem__(item) - except KeyError: - raise AttributeError(item) - - def __setattr__(self, key, value): - if key.startswith('__') and key.endswith('__'): - raise AttributeError(key) - self.__setitem__(key, value) - - def __delattr__(self, item): - try: - self.pop(item) - except KeyError: - raise AttributeError(item) - - def __getstate__(self): - return self - - def __setstate__(self, state): - self.update(state) - - -def _prepare_cache_filepath(filepath): - r""" - 检查filepath是否可以作为合理的cache文件. 如果可以的话,会自动创造路径 - :param filepath: str. - :return: None, if not, this function will raise error - """ - _cache_filepath = os.path.abspath(filepath) - if os.path.isdir(_cache_filepath): - raise RuntimeError("The cache_file_path must be a file, not a directory.") - cache_dir = os.path.dirname(_cache_filepath) - if not os.path.exists(cache_dir): - os.makedirs(cache_dir, exist_ok=True) - - -def cache_results(_cache_fp, _refresh=False, _verbose=1): - r""" - cache_results是fastNLP中用于cache数据的装饰器。通过下面的例子看一下如何使用:: - - import time - import numpy as np - from fastNLP import cache_results - - @cache_results('cache.pkl') - def process_data(): - # 一些比较耗时的工作,比如读取数据,预处理数据等,这里用time.sleep()代替耗时 - time.sleep(1) - return np.random.randint(10, size=(5,)) - - start_time = time.time() - print("res =",process_data()) - print(time.time() - start_time) - - start_time = time.time() - print("res =",process_data()) - print(time.time() - start_time) - - # 输出内容如下,可以看到两次结果相同,且第二次几乎没有花费时间 - # Save cache to cache.pkl. - # res = [5 4 9 1 8] - # 1.0042750835418701 - # Read cache from cache.pkl. - # res = [5 4 9 1 8] - # 0.0040721893310546875 - - 可以看到第二次运行的时候,只用了0.0001s左右,是由于第二次运行将直接从cache.pkl这个文件读取数据,而不会经过再次预处理:: - - # 还是以上面的例子为例,如果需要重新生成另一个cache,比如另一个数据集的内容,通过如下的方式调用即可 - process_data(_cache_fp='cache2.pkl') # 完全不影响之前的‘cache.pkl' - - 上面的_cache_fp是cache_results会识别的参数,它将从'cache2.pkl'这里缓存/读取数据,即这里的'cache2.pkl'覆盖默认的 - 'cache.pkl'。如果在你的函数前面加上了@cache_results()则你的函数会增加三个参数[_cache_fp, _refresh, _verbose]。 - 上面的例子即为使用_cache_fp的情况,这三个参数不会传入到你的函数中,当然你写的函数参数名也不可能包含这三个名称:: - - process_data(_cache_fp='cache2.pkl', _refresh=True) # 这里强制重新生成一份对预处理的cache。 - # _verbose是用于控制输出信息的,如果为0,则不输出任何内容;如果为1,则会提醒当前步骤是读取的cache还是生成了新的cache - - :param str _cache_fp: 将返回结果缓存到什么位置;或从什么位置读取缓存。如果为None,cache_results没有任何效用,除非在 - 函数调用的时候传入_cache_fp这个参数。 - :param bool _refresh: 是否重新生成cache。 - :param int _verbose: 是否打印cache的信息。 - :return: - """ - - def wrapper_(func): - signature = inspect.signature(func) - for key, _ in signature.parameters.items(): - if key in ('_cache_fp', '_refresh', '_verbose'): - raise RuntimeError("The function decorated by cache_results cannot have keyword `{}`.".format(key)) - - def wrapper(*args, **kwargs): - if '_cache_fp' in kwargs: - cache_filepath = kwargs.pop('_cache_fp') - assert isinstance(cache_filepath, str), "_cache_fp can only be str." - else: - cache_filepath = _cache_fp - if '_refresh' in kwargs: - refresh = kwargs.pop('_refresh') - assert isinstance(refresh, bool), "_refresh can only be bool." - else: - refresh = _refresh - if '_verbose' in kwargs: - verbose = kwargs.pop('_verbose') - assert isinstance(verbose, int), "_verbose can only be integer." - else: - verbose = _verbose - refresh_flag = True - - if cache_filepath is not None and refresh is False: - # load data - if os.path.exists(cache_filepath): - with open(cache_filepath, 'rb') as f: - results = _pickle.load(f) - if verbose == 1: - logger.info("Read cache from {}.".format(cache_filepath)) - refresh_flag = False - - if refresh_flag: - results = func(*args, **kwargs) - if cache_filepath is not None: - if results is None: - raise RuntimeError("The return value is None. Delete the decorator.") - _prepare_cache_filepath(cache_filepath) - with open(cache_filepath, 'wb') as f: - _pickle.dump(results, f) - logger.info("Save cache to {}.".format(cache_filepath)) - - return results - - return wrapper - - return wrapper_ - - -def _save_model(model, model_name, save_dir, only_param=False): - r""" 存储不含有显卡信息的state_dict或model - :param model: - :param model_name: - :param save_dir: 保存的directory - :param only_param: - :return: - """ - model_path = os.path.join(save_dir, model_name) - if not os.path.isdir(save_dir): - os.makedirs(save_dir, exist_ok=True) - if _model_contains_inner_module(model): - model = model.module - if only_param: - state_dict = model.state_dict() - for key in state_dict: - state_dict[key] = state_dict[key].cpu() - torch.save(state_dict, model_path) - else: - _model_device = _get_model_device(model) - model.cpu() - torch.save(model, model_path) - model.to(_model_device) - - -def _move_model_to_device(model, device): - r""" - 将model移动到device - - :param model: torch.nn.DataParallel or torch.nn.Module. 当为torch.nn.DataParallel, 则只是调用一次cuda。device必须为 - None。 - :param str,int,torch.device,list(int),list(torch.device) device: 将模型load到哪个设备。默认为None,即Trainer不对模型 - 的计算位置进行管理。支持以下的输入: - - 1. str: ['cpu', 'cuda', 'cuda:0', 'cuda:1', ...] 依次为'cpu'中, 可见的第一个GPU中, 可见的第一个GPU中, - 可见的第二个GPU中; - - 2. torch.device:将模型装载到torch.device上。 - - 3. int: 将使用device_id为该值的gpu进行训练 - - 4. list(int):如果多于1个device,将使用torch.nn.DataParallel包裹model, 并使用传入的device。 - - 5. None. 为None则不对模型进行任何处理,如果传入的model为torch.nn.DataParallel该值必须为None。 - - :return: torch.nn.DataParallel or torch.nn.Module - """ - # if isinstance(model, torch.nn.parallel.DistributedDataParallel): - # raise RuntimeError("model of `torch.nn.parallel.DistributedDataParallel` is not supported right now.") - - if device is None: - if isinstance(model, torch.nn.DataParallel): - model.cuda(model.device_ids[0]) - return model - else: - if not torch.cuda.is_available() and ((isinstance(device, str) and device!='cpu') or - (isinstance(device, torch.device) and device.type != 'cpu')): - raise ValueError("There is no usable gpu. set `device` as `cpu` or `None`.") - - if isinstance(model, torch.nn.DataParallel): - raise RuntimeError("When model is `torch.nn.DataParallel`, the device has to be `None`.") - - if isinstance(device, int): - assert device > -1, "device can only be non-negative integer" - assert torch.cuda.device_count() > device, "Only has {} gpus, cannot use device {}.".format( - torch.cuda.device_count(), - device) - device = torch.device('cuda:{}'.format(device)) - elif isinstance(device, str): - device = torch.device(device) - if device.type == 'cuda' and device.index is not None: - assert device.index < torch.cuda.device_count(), "Only has {} gpus, cannot use device cuda:{}.".format( - torch.cuda.device_count(), - device) - elif isinstance(device, torch.device): - if device.type == 'cuda' and device.index is not None: - assert device.index < torch.cuda.device_count(), "Only has {} gpus, cannot use device cuda:{}.".format( - torch.cuda.device_count(), - device) - elif isinstance(device, list): - types = set([type(d) for d in device]) - assert len(types) == 1, "Mixed type in device, only `int` allowed." - assert list(types)[0] == int, "Only int supported for multiple devices." - assert len(set(device)) == len(device), "Duplicated device id found in device." - for d in device: - assert d > -1, "Only non-negative device id allowed." - if len(device) > 1: - output_device = device[0] - model = nn.DataParallel(model, device_ids=device, output_device=output_device) - device = torch.device(device[0]) - else: - raise TypeError("Unsupported device type.") - model = model.to(device) - return model - - -def _get_model_device(model): - r""" - 传入一个nn.Module的模型,获取它所在的device - - :param model: nn.Module - :return: torch.device,None 如果返回值为None,说明这个模型没有任何参数。 - """ - # TODO 这个函数存在一定的风险,因为同一个模型可能存在某些parameter不在显卡中,比如BertEmbedding. 或者跨显卡 - assert isinstance(model, nn.Module) - - parameters = list(model.parameters()) - if len(parameters) == 0: - return None - else: - return parameters[0].device - - -def _build_args(func, **kwargs): - r""" - 根据func的初始化参数,从kwargs中选择func需要的参数 - - :param func: callable - :param kwargs: 参数 - :return:dict. func中用到的参数 - """ - spect = inspect.getfullargspec(func) - if spect.varkw is not None: - return kwargs - needed_args = set(spect.args) - defaults = [] - if spect.defaults is not None: - defaults = [arg for arg in spect.defaults] - start_idx = len(spect.args) - len(defaults) - output = {name: default for name, default in zip(spect.args[start_idx:], defaults)} - output.update({name: val for name, val in kwargs.items() if name in needed_args}) - return output - - -def _map_args(maps: dict, **kwargs): - # maps: key=old name, value= new name - output = {} - for name, val in kwargs.items(): - if name in maps: - assert isinstance(maps[name], str) - output.update({maps[name]: val}) - else: - output.update({name: val}) - for keys in maps.keys(): - if keys not in output.keys(): - pass - return output - - -def _get_arg_list(func): - assert callable(func) - spect = inspect.getfullargspec(func) - if spect.defaults is not None: - args = spect.args[: -len(spect.defaults)] - defaults = spect.args[-len(spect.defaults):] - defaults_val = spect.defaults - else: - args = spect.args - defaults = None - defaults_val = None - varargs = spect.varargs - kwargs = spect.varkw - return args, defaults, defaults_val, varargs, kwargs - - -# check args -def _check_arg_dict_list(func, args): - if isinstance(args, dict): - arg_dict_list = [args] - else: - arg_dict_list = args - assert callable(func) and isinstance(arg_dict_list, (list, tuple)) - assert len(arg_dict_list) > 0 and isinstance(arg_dict_list[0], dict) - spect = inspect.getfullargspec(func) - all_args = set([arg for arg in spect.args if arg != 'self']) - defaults = [] - if spect.defaults is not None: - defaults = [arg for arg in spect.defaults] - start_idx = len(spect.args) - len(defaults) - default_args = set(spect.args[start_idx:]) - require_args = all_args - default_args - input_arg_count = Counter() - for arg_dict in arg_dict_list: - input_arg_count.update(arg_dict.keys()) - duplicated = [name for name, val in input_arg_count.items() if val > 1] - input_args = set(input_arg_count.keys()) - missing = list(require_args - input_args) - unused = list(input_args - all_args) - varargs = [] if not spect.varargs else [spect.varargs] - return _CheckRes(missing=missing, - unused=unused, - duplicated=duplicated, - required=list(require_args), - all_needed=list(all_args), - varargs=varargs) - - -def _get_func_signature(func): - r""" - - Given a function or method, return its signature. - For example: - - 1 function:: - - def func(a, b='a', *args): - xxxx - get_func_signature(func) # 'func(a, b='a', *args)' - - 2 method:: - - class Demo: - def __init__(self): - xxx - def forward(self, a, b='a', **args) - demo = Demo() - get_func_signature(demo.forward) # 'Demo.forward(self, a, b='a', **args)' - - :param func: a function or a method - :return: str or None - """ - if inspect.ismethod(func): - class_name = func.__self__.__class__.__name__ - signature = inspect.signature(func) - signature_str = str(signature) - if len(signature_str) > 2: - _self = '(self, ' - else: - _self = '(self' - signature_str = class_name + '.' + func.__name__ + _self + signature_str[1:] - return signature_str - elif inspect.isfunction(func): - signature = inspect.signature(func) - signature_str = str(signature) - signature_str = func.__name__ + signature_str - return signature_str - - -def _is_function_or_method(func): - r""" - - :param func: - :return: - """ - if not inspect.ismethod(func) and not inspect.isfunction(func): - return False - return True - - -def _check_function_or_method(func): - if not _is_function_or_method(func): - raise TypeError(f"{type(func)} is not a method or function.") - - -def _move_dict_value_to_device(*args, device: torch.device, non_blocking=False): - r""" - - move data to model's device, element in *args should be dict. This is a inplace change. - :param device: torch.device - :param non_blocking: bool, 是否异步将数据转移到cpu, 需要tensor使用pin_memory() - :param args: - :return: - """ - if not torch.cuda.is_available() or device is None: - return - - if not isinstance(device, torch.device): - raise TypeError(f"device must be `torch.device`, got `{type(device)}`") - - for arg in args: - if isinstance(arg, dict): - for key, value in arg.items(): - if isinstance(value, torch.Tensor): - arg[key] = value.to(device, non_blocking=non_blocking) - else: - raise TypeError("Only support `dict` type right now.") - - -class _CheckError(Exception): - r""" - - _CheckError. Used in losses.LossBase, metrics.MetricBase. - """ - - def __init__(self, check_res: _CheckRes, func_signature: str): - errs = [f'Problems occurred when calling `{func_signature}`'] - - if check_res.varargs: - errs.append(f"\tvarargs: {check_res.varargs}(Does not support pass positional arguments, please delete it)") - if check_res.missing: - errs.append(f"\tmissing param: {check_res.missing}") - if check_res.duplicated: - errs.append(f"\tduplicated param: {check_res.duplicated}") - if check_res.unused: - errs.append(f"\tunused param: {check_res.unused}") - - Exception.__init__(self, '\n'.join(errs)) - - self.check_res = check_res - self.func_signature = func_signature - - -IGNORE_CHECK_LEVEL = 0 -WARNING_CHECK_LEVEL = 1 -STRICT_CHECK_LEVEL = 2 - - -def _check_loss_evaluate(prev_func_signature: str, func_signature: str, check_res: _CheckRes, - pred_dict: dict, target_dict: dict, dataset, check_level=0): - errs = [] - unuseds = [] - _unused_field = [] - _unused_param = [] - suggestions = [] - # if check_res.varargs: - # errs.append(f"\tvarargs: *{check_res.varargs}") - # suggestions.append(f"Does not support pass positional arguments, please delete *{check_res.varargs}.") - - if check_res.unused: - for _unused in check_res.unused: - if _unused in target_dict: - _unused_field.append(_unused) - else: - _unused_param.append(_unused) - if _unused_field: - unuseds.append(f"\tunused field: {_unused_field}") - if _unused_param: - unuseds.append(f"\tunused param: {_unused_param}") # output from predict or forward - - module_name = func_signature.split('.')[0] - if check_res.missing: - errs.append(f"\tmissing param: {check_res.missing}") - import re - mapped_missing = [] # 提供了映射的参数 - unmapped_missing = [] # 没有指定映射的参数 - input_func_map = {} - for _miss_ in check_res.missing: - # they shoudl like 'SomeParam(assign to xxx)' - _miss = _miss_.split('(')[0] - matches = re.findall("(?<=`)[a-zA-Z0-9]*?(?=`)", _miss_) - if len(matches) == 2: - fun_arg, module_name = matches - input_func_map[_miss] = fun_arg - if fun_arg == _miss: - unmapped_missing.append(_miss) - else: - mapped_missing.append(_miss) - else: - unmapped_missing.append(_miss) - - for _miss in mapped_missing + unmapped_missing: - if _miss in dataset: - suggestions.append(f"Set `{_miss}` as target.") - else: - _tmp = '' - if check_res.unused: - _tmp = f"Check key assignment for `{input_func_map.get(_miss,_miss)}` when initialize {module_name}." - if _tmp: - _tmp += f' Or provide `{_miss}` in DataSet or the output of {prev_func_signature}. ' - else: - _tmp = f'Provide `{_miss}` in DataSet or the output of {prev_func_signature}.' - if not dataset.collater.is_empty(): - _tmp += f'Or you need to add `{_miss}` in the output of your collate_fn. ' - suggestions.append(_tmp) - - if check_res.duplicated: - errs.append(f"\tduplicated param: {check_res.duplicated}.") - suggestions.append(f"Delete {check_res.duplicated} in the output of " - f"{prev_func_signature} or do not set {check_res.duplicated} as targets. ") - - if len(errs) > 0: - errs.extend(unuseds) - elif check_level == STRICT_CHECK_LEVEL: - errs.extend(unuseds) - - if len(errs) > 0: - errs.insert(0, f'Problems occurred when calling {func_signature}') - sugg_str = "" - if len(suggestions) > 1: - for idx, sugg in enumerate(suggestions): - if idx > 0: - sugg_str += '\t\t\t' - sugg_str += f'({idx + 1}). {sugg}\n' - sugg_str = sugg_str[:-1] - else: - sugg_str += suggestions[0] - errs.append(f'\ttarget field: {list(target_dict.keys())}') - errs.append(f'\tparam from {prev_func_signature}: {list(pred_dict.keys())}') - err_str = '\n' + '\n'.join(errs) + '\n\tSuggestion: ' + sugg_str - raise NameError(err_str) - if check_res.unused: - if check_level == WARNING_CHECK_LEVEL: - if not module_name: - module_name = func_signature.split('.')[0] - _unused_warn = f'{check_res.unused} is not used by {module_name}.' - warnings.warn(message=_unused_warn) - - -def _check_forward_error(forward_func, batch_x, dataset, check_level): - check_res = _check_arg_dict_list(forward_func, batch_x) - func_signature = _get_func_signature(forward_func) - - errs = [] - suggestions = [] - _unused = [] - - # if check_res.varargs: - # errs.append(f"\tvarargs: {check_res.varargs}") - # suggestions.append(f"Does not support pass positional arguments, please delete *{check_res.varargs}.") - if check_res.missing: - errs.append(f"\tmissing param: {check_res.missing}") - _miss_in_dataset = [] - _miss_out_dataset = [] - for _miss in check_res.missing: - if _miss in dataset: - _miss_in_dataset.append(_miss) - else: - _miss_out_dataset.append(_miss) - if _miss_in_dataset: - suggestions.append(f"You might need to set `{_miss_in_dataset}` as input. ") - if _miss_out_dataset: - _tmp = f"You need to provide `{_miss_out_dataset}` in DataSet and set it as input. " - if not dataset.collater.is_empty(): - _tmp += f'Or you need to add `{_miss_out_dataset}` in the output of your collate_fn. ' - suggestions.append(_tmp) - - if check_res.unused: - _unused = [f"\tunused field: {check_res.unused}"] - if len(errs) > 0: - errs.extend(_unused) - elif check_level == STRICT_CHECK_LEVEL: - errs.extend(_unused) - - if len(errs) > 0: - errs.insert(0, f'Problems occurred when calling {func_signature}') - sugg_str = "" - if len(suggestions) > 1: - for idx, sugg in enumerate(suggestions): - sugg_str += f'({idx + 1}). {sugg}' - err_str = '\n' + '\n'.join(errs) + '\n\tSuggestion: ' + sugg_str - elif len(suggestions): - sugg_str += suggestions[0] - err_str = '\n' + '\n'.join(errs) + '\n\tSuggestion: ' + sugg_str - else: - err_str = '\n' + '\n'.join(errs) - raise NameError(err_str) - if _unused: - if check_level == WARNING_CHECK_LEVEL: - _unused_warn = _unused[0] + f' in {func_signature}.' - warnings.warn(message=_unused_warn) - - -def seq_len_to_mask(seq_len, max_len=None): - r""" - - 将一个表示sequence length的一维数组转换为二维的mask,不包含的位置为0。 - 转变 1-d seq_len到2-d mask. - - .. code-block:: - - >>> seq_len = torch.arange(2, 16) - >>> mask = seq_len_to_mask(seq_len) - >>> print(mask.size()) - torch.Size([14, 15]) - >>> seq_len = np.arange(2, 16) - >>> mask = seq_len_to_mask(seq_len) - >>> print(mask.shape) - (14, 15) - >>> seq_len = torch.arange(2, 16) - >>> mask = seq_len_to_mask(seq_len, max_len=100) - >>>print(mask.size()) - torch.Size([14, 100]) - - :param np.ndarray,torch.LongTensor seq_len: shape将是(B,) - :param int max_len: 将长度pad到这个长度。默认(None)使用的是seq_len中最长的长度。但在nn.DataParallel的场景下可能不同卡的seq_len会有 - 区别,所以需要传入一个max_len使得mask的长度是pad到该长度。 - :return: np.ndarray, torch.Tensor 。shape将是(B, max_length), 元素类似为bool或torch.uint8 - """ - if isinstance(seq_len, np.ndarray): - assert len(np.shape(seq_len)) == 1, f"seq_len can only have one dimension, got {len(np.shape(seq_len))}." - max_len = int(max_len) if max_len else int(seq_len.max()) - broad_cast_seq_len = np.tile(np.arange(max_len), (len(seq_len), 1)) - mask = broad_cast_seq_len < seq_len.reshape(-1, 1) - - elif isinstance(seq_len, torch.Tensor): - assert seq_len.dim() == 1, f"seq_len can only have one dimension, got {seq_len.dim() == 1}." - batch_size = seq_len.size(0) - max_len = int(max_len) if max_len else seq_len.max().long() - broad_cast_seq_len = torch.arange(max_len).expand(batch_size, -1).to(seq_len) - mask = broad_cast_seq_len.lt(seq_len.unsqueeze(1)) - else: - raise TypeError("Only support 1-d numpy.ndarray or 1-d torch.Tensor.") - - return mask - - -class _pseudo_tqdm: - r""" - 当无法引入tqdm,或者Trainer中设置use_tqdm为false的时候,用该方法打印数据 - """ - - def __init__(self, **kwargs): - self.logger = logger - - def write(self, info): - self.logger.info(info) - - def set_postfix_str(self, info): - self.logger.info(info) - - def __getattr__(self, item): - def pass_func(*args, **kwargs): - pass - - return pass_func - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - del self - - -def iob2(tags: List[str]) -> List[str]: - r""" - 检查数据是否是合法的IOB数据,如果是IOB1会被自动转换为IOB2。两者的差异见 - https://datascience.stackexchange.com/questions/37824/difference-between-iob-and-iob2-format - - :param tags: 需要转换的tags, 需要为大写的BIO标签。 - """ - for i, tag in enumerate(tags): - if tag == "O": - continue - split = tag.split("-") - if len(split) != 2 or split[0] not in ["I", "B"]: - raise TypeError("The encoding schema is not a valid IOB type.") - if split[0] == "B": - continue - elif i == 0 or tags[i - 1] == "O": # conversion IOB1 to IOB2 - tags[i] = "B" + tag[1:] - elif tags[i - 1][1:] == tag[1:]: - continue - else: # conversion IOB1 to IOB2 - tags[i] = "B" + tag[1:] - return tags - - -def iob2bioes(tags: List[str]) -> List[str]: - r""" - 将iob的tag转换为bioes编码 - :param tags: List[str]. 编码需要是大写的。 - :return: - """ - new_tags = [] - for i, tag in enumerate(tags): - if tag == 'O': - new_tags.append(tag) - else: - split = tag.split('-')[0] - if split == 'B': - if i + 1 != len(tags) and tags[i + 1].split('-')[0] == 'I': - new_tags.append(tag) - else: - new_tags.append(tag.replace('B-', 'S-')) - elif split == 'I': - if i + 1 < len(tags) and tags[i + 1].split('-')[0] == 'I': - new_tags.append(tag) - else: - new_tags.append(tag.replace('I-', 'E-')) - else: - raise TypeError("Invalid IOB format.") - return new_tags - - -def _is_iterable(value): - # 检查是否是iterable的, duck typing - try: - iter(value) - return True - except BaseException as e: - return False - - -def get_seq_len(words, pad_value=0): - r""" - 给定batch_size x max_len的words矩阵,返回句子长度 - - :param words: batch_size x max_len - :return: (batch_size,) - """ - mask = words.ne(pad_value) - return mask.sum(dim=-1) - - -def pretty_table_printer(dataset_or_ins) -> PrettyTable: - r""" - :param dataset_or_ins: 传入一个dataSet或者instance - ins = Instance(field_1=[1, 1, 1], field_2=[2, 2, 2], field_3=["a", "b", "c"]) - +-----------+-----------+-----------------+ - | field_1 | field_2 | field_3 | - +-----------+-----------+-----------------+ - | [1, 1, 1] | [2, 2, 2] | ['a', 'b', 'c'] | - +-----------+-----------+-----------------+ - :return: 以 pretty table的形式返回根据terminal大小进行自动截断 - """ - x = PrettyTable() - try: - sz = os.get_terminal_size() - column = sz.columns - row = sz.lines - except OSError: - column = 144 - row = 11 - - if type(dataset_or_ins).__name__ == "DataSet": - x.field_names = list(dataset_or_ins.field_arrays.keys()) - c_size = len(x.field_names) - for ins in dataset_or_ins: - x.add_row([sub_column(ins[k], column, c_size, k) for k in x.field_names]) - row -= 1 - if row < 0: - x.add_row(["..." for _ in range(c_size)]) - break - elif type(dataset_or_ins).__name__ == "Instance": - x.field_names = list(dataset_or_ins.fields.keys()) - c_size = len(x.field_names) - x.add_row([sub_column(dataset_or_ins[k], column, c_size, k) for k in x.field_names]) - - else: - raise Exception("only accept DataSet and Instance") - x.align = "l" - - return x - - -def sub_column(string: str, c: int, c_size: int, title: str) -> str: - r""" - :param string: 要被截断的字符串 - :param c: 命令行列数 - :param c_size: instance或dataset field数 - :param title: 列名 - :return: 对一个过长的列进行截断的结果 - """ - avg = max(int(c / c_size / 2), len(title)) - string = str(string) - res = "" - counter = 0 - for char in string: - if ord(char) > 255: - counter += 2 - else: - counter += 1 - res += char - if counter > avg: - res = res + "..." - break - return res - - -def _is_function_contains_autocast(func): - """ - 检查func是否包含autocast,(1)是否使用了autocast的修饰器或, (2)使用使用with autocast()环境 - - :param func: 待检查的函数 - """ - import re - source = inspect.getsource(func) - lines = source.split('\n') - for line in lines: - line = line.strip() - if re.search(r'@[\w\.]*autocast\(\w*\)', line): - raise RuntimeError("Please do not use `autocast()` decorator, use `with autocast():` instead. Please refer to" - " https://pytorch.org/docs/stable/notes/amp_examples.html#dataparallel-in-a-single-process ") - if re.search(r'with [\w\.]*autocast\(\w*\):', line): - return True - return False - - -class DummyGradScaler: - """ - 用于Dummy pytorch的GradScaler对象,防止重复写大量的if判断 - - """ - def __init__(self, *args, **kwargs): - pass - - def get_scale(self): - return 1.0 - - def is_enabled(self): - return False - - def scale(self, outputs): - return outputs - - def step(self, optimizer, *args, **kwargs): - optimizer.step(*args, **kwargs) - - def update(self, new_scale=None): - pass - - def unscale_(self, optimizer): - pass - - def load_state_dict(self, state_dict): - pass - - def state_dict(self): - return {} - - -def _build_fp16_env(dummy=False): - if dummy: - autocast = contextlib.ExitStack - GradScaler = DummyGradScaler - else: - if not torch.cuda.is_available(): - raise RuntimeError("No cuda") - if torch.cuda.get_device_capability(0)[0] < 7: - warnings.warn( - "NOTE: your device does NOT support faster training with fp16, " - "please switch to FP32 which is likely to be faster" - ) - try: - from torch.cuda.amp import autocast, GradScaler - except ImportError: - raise RuntimeError("torch version too low (less than 1.6)") - return autocast, GradScaler - - -def _can_use_fp16(device, model, func): - if parse_version(torch.__version__) < parse_version('1.6'): - raise RuntimeError("Pytorch supports float16 after version 1.6, please upgrade your pytorch version.") - model_device = _get_model_device(model) - if device is None and model_device is not None and model_device.type != 'cuda': - raise RuntimeError("You have to run in cuda device to use fp16.") - if isinstance(device, str): - if device=='cpu': - raise RuntimeError("You have to run in cuda device to use fp16.") - if isinstance(device, torch.device) and device.type=='cpu': - raise RuntimeError("You have to run in cuda device to use fp16.") - - if (_model_contains_inner_module(model) or (isinstance(device, list) and len(device) > 1)): - # 需要提醒用户 - if not _is_function_contains_autocast(func): - raise RuntimeError("When use fp16 in Parallel Training, you have to set autocast() in your forward " - "function as described in " - "https://pytorch.org/docs/stable/notes/amp_examples.html#dataparallel-in-a-single-process") diff --git a/fastNLP/core/vocabulary.py b/fastNLP/core/vocabulary.py deleted file mode 100644 index aef99034..00000000 --- a/fastNLP/core/vocabulary.py +++ /dev/null @@ -1,586 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "Vocabulary", - "VocabularyOption", -] - -from collections import Counter -from functools import partial -from functools import wraps - -from ._logger import logger -from .dataset import DataSet -from .utils import Option -from .utils import _is_iterable -import io - - -class VocabularyOption(Option): - def __init__(self, - max_size=None, - min_freq=None, - padding='', - unknown=''): - super().__init__( - max_size=max_size, - min_freq=min_freq, - padding=padding, - unknown=unknown - ) - - -def _check_build_vocab(func): - r"""A decorator to make sure the indexing is built before used. - - """ - - @wraps(func) # to solve missing docstring - def _wrapper(self, *args, **kwargs): - if self._word2idx is None or self.rebuild is True: - self.build_vocab() - return func(self, *args, **kwargs) - - return _wrapper - - -def _check_build_status(func): - r"""A decorator to check whether the vocabulary updates after the last build. - - """ - - @wraps(func) # to solve missing docstring - def _wrapper(self, *args, **kwargs): - if self.rebuild is False: - self.rebuild = True - if self.max_size is not None and len(self.word_count) >= self.max_size: - logger.info("[Warning] Vocabulary has reached the max size {} when calling {} method. " - "Adding more words may cause unexpected behaviour of Vocabulary. ".format( - self.max_size, func.__name__)) - return func(self, *args, **kwargs) - - return _wrapper - - -class Vocabulary(object): - r""" - 用于构建, 存储和使用 `str` 到 `int` 的一一映射:: - - vocab = Vocabulary() - word_list = "this is a word list".split() - vocab.update(word_list) - vocab["word"] # str to int - vocab.to_word(5) # int to str - """ - - def __init__(self, max_size=None, min_freq=None, padding='', unknown=''): - r""" - - :param int max_size: `Vocabulary` 的最大大小, 即能存储词的最大数量 - 若为 ``None`` , 则不限制大小. Default: ``None`` - :param int min_freq: 能被记录下的词在文本中的最小出现频率, 应大于或等于 1. - 若小于该频率, 词语将被视为 `unknown`. 若为 ``None`` , 所有文本中的词都被记录. Default: ``None`` - :param str optional padding: padding的字符. 如果设置为 ``None`` , - 则vocabulary中不考虑padding, 也不计入词表大小,为 ``None`` 的情况多在为label建立Vocabulary的情况. - Default: '' - :param str optional unknown: unknown的字符,所有未被记录的词在转为 `int` 时将被视为unknown. - 如果设置为 ``None`` ,则vocabulary中不考虑unknow, 也不计入词表大小. - 为 ``None`` 的情况多在为label建立Vocabulary的情况. - Default: '' - """ - self.max_size = max_size - self.min_freq = min_freq - self.word_count = Counter() - self.unknown = unknown - self.padding = padding - self._word2idx = None - self._idx2word = None - self.rebuild = True - # 用于承载不需要单独创建entry的词语,具体见from_dataset()方法 - self._no_create_word = Counter() - - @property - @_check_build_vocab - def word2idx(self): - return self._word2idx - - @word2idx.setter - def word2idx(self, value): - self._word2idx = value - - @property - @_check_build_vocab - def idx2word(self): - return self._idx2word - - @idx2word.setter - def idx2word(self, value): - self._word2idx = value - - @_check_build_status - def update(self, word_lst, no_create_entry=False): - r"""依次增加序列中词在词典中的出现频率 - - :param list word_lst: a list of strings - :param bool no_create_entry: 如果词语来自于非训练集建议设置为True。在使用fastNLP.TokenEmbedding加载预训练模型时,没有从预训练词表中找到这个词的处理方式。 - 如果为True,则不会有这个词语创建一个单独的entry,它将一直被指向unk的表示; 如果为False,则为这个词创建一个单独 - 的entry。如果这个word来自于dev或者test,一般设置为True,如果来自与train一般设置为False。以下两种情况: 如果新 - 加入一个word,且no_create_entry为True,但这个词之前已经在Vocabulary中且并不是no_create_entry的,则还是会为这 - 个词创建一个单独的vector; 如果no_create_entry为False,但这个词之前已经在Vocabulary中且并不是no_create_entry的, - 则这个词将认为是需要创建单独的vector的。 - """ - self._add_no_create_entry(word_lst, no_create_entry) - self.word_count.update(word_lst) - return self - - @_check_build_status - def add(self, word, no_create_entry=False): - r""" - 增加一个新词在词典中的出现频率 - - :param str word: 新词 - :param bool no_create_entry: 如果词语来自于非训练集建议设置为True。在使用fastNLP.TokenEmbedding加载预训练模型时,没有从预训练词表中找到这个词的处理方式。 - 如果为True,则不会有这个词语创建一个单独的entry,它将一直被指向unk的表示; 如果为False,则为这个词创建一个单独 - 的entry。如果这个word来自于dev或者test,一般设置为True,如果来自与train一般设置为False。以下两种情况: 如果新 - 加入一个word,且no_create_entry为True,但这个词之前已经在Vocabulary中且并不是no_create_entry的,则还是会为这 - 个词创建一个单独的vector; 如果no_create_entry为False,但这个词之前已经在Vocabulary中且并不是no_create_entry的, - 则这个词将认为是需要创建单独的vector的。 - """ - self._add_no_create_entry(word, no_create_entry) - self.word_count[word] += 1 - return self - - def _add_no_create_entry(self, word, no_create_entry): - r""" - 在新加入word时,检查_no_create_word的设置。 - - :param str List[str] word: - :param bool no_create_entry: - :return: - """ - if isinstance(word, str) or not _is_iterable(word): - word = [word] - for w in word: - if no_create_entry and self.word_count.get(w, 0) == self._no_create_word.get(w, 0): - self._no_create_word[w] += 1 - elif not no_create_entry and w in self._no_create_word: - self._no_create_word.pop(w) - - @_check_build_status - def add_word(self, word, no_create_entry=False): - r""" - 增加一个新词在词典中的出现频率 - - :param str word: 新词 - :param bool no_create_entry: 如果词语来自于非训练集建议设置为True。在使用fastNLP.TokenEmbedding加载预训练模型时,没有从预训练词表中找到这个词的处理方式。 - 如果为True,则不会有这个词语创建一个单独的entry,它将一直被指向unk的表示; 如果为False,则为这个词创建一个单独 - 的entry。如果这个word来自于dev或者test,一般设置为True,如果来自与train一般设置为False。以下两种情况: 如果新 - 加入一个word,且no_create_entry为True,但这个词之前已经在Vocabulary中且并不是no_create_entry的,则还是会为这 - 个词创建一个单独的vector; 如果no_create_entry为False,但这个词之前已经在Vocabulary中且并不是no_create_entry的, - 则这个词将认为是需要创建单独的vector的。 - """ - self.add(word, no_create_entry=no_create_entry) - - @_check_build_status - def add_word_lst(self, word_lst, no_create_entry=False): - r""" - 依次增加序列中词在词典中的出现频率 - - :param list[str] word_lst: 词的序列 - :param bool no_create_entry: 如果词语来自于非训练集建议设置为True。在使用fastNLP.TokenEmbedding加载预训练模型时,没有从预训练词表中找到这个词的处理方式。 - 如果为True,则不会有这个词语创建一个单独的entry,它将一直被指向unk的表示; 如果为False,则为这个词创建一个单独 - 的entry。如果这个word来自于dev或者test,一般设置为True,如果来自与train一般设置为False。以下两种情况: 如果新 - 加入一个word,且no_create_entry为True,但这个词之前已经在Vocabulary中且并不是no_create_entry的,则还是会为这 - 个词创建一个单独的vector; 如果no_create_entry为False,但这个词之前已经在Vocabulary中且并不是no_create_entry的, - 则这个词将认为是需要创建单独的vector的。 - """ - self.update(word_lst, no_create_entry=no_create_entry) - return self - - def build_vocab(self): - r""" - 根据已经出现的词和出现频率构建词典. 注意: 重复构建可能会改变词典的大小, - 但已经记录在词典中的词, 不会改变对应的 `int` - - """ - if self._word2idx is None: - self._word2idx = {} - if self.padding is not None: - self._word2idx[self.padding] = len(self._word2idx) - if (self.unknown is not None) and (self.unknown != self.padding): - self._word2idx[self.unknown] = len(self._word2idx) - - max_size = min(self.max_size, len(self.word_count)) if self.max_size else None - words = self.word_count.most_common(max_size) - if self.min_freq is not None: - words = filter(lambda kv: kv[1] >= self.min_freq, words) - if self._word2idx is not None: - words = filter(lambda kv: kv[0] not in self._word2idx, words) - start_idx = len(self._word2idx) - self._word2idx.update({w: i + start_idx for i, (w, _) in enumerate(words)}) - self.build_reverse_vocab() - self.rebuild = False - return self - - def build_reverse_vocab(self): - r""" - 基于 `word to index` dict, 构建 `index to word` dict. - - """ - self._idx2word = {i: w for w, i in self._word2idx.items()} - return self - - @_check_build_vocab - def __len__(self): - return len(self._word2idx) - - @_check_build_vocab - def __contains__(self, item): - r""" - 检查词是否被记录 - - :param item: the word - :return: True or False - """ - return item in self._word2idx - - def has_word(self, w): - r""" - 检查词是否被记录:: - - has_abc = vocab.has_word('abc') - # equals to - has_abc = 'abc' in vocab - - :param item: the word - :return: ``True`` or ``False`` - """ - return self.__contains__(w) - - @_check_build_vocab - def __getitem__(self, w): - r""" - To support usage like:: - - vocab[w] - """ - if w in self._word2idx: - return self._word2idx[w] - if self.unknown is not None: - return self._word2idx[self.unknown] - else: - raise ValueError("word `{}` not in vocabulary".format(w)) - - @_check_build_vocab - def index_dataset(self, *datasets, field_name, new_field_name=None): - r""" - 将DataSet中对应field的词转为数字,Example:: - - # remember to use `field_name` - vocab.index_dataset(train_data, dev_data, test_data, field_name='words') - - :param ~fastNLP.DataSet,List[~fastNLP.DataSet] datasets: 需要转index的一个或多个数据集 - :param list,str field_name: 需要转index的field, 若有多个 DataSet, 每个DataSet都必须有此 field. - 目前支持 ``str`` , ``List[str]`` - :param list,str new_field_name: 保存结果的field_name. 若为 ``None`` , 将覆盖原field. - Default: ``None``. - """ - - def index_instance(field): - r""" - 有几种情况, str, 1d-list, 2d-list - :param ins: - :return: - """ - if isinstance(field, str) or not _is_iterable(field): - return self.to_index(field) - else: - if isinstance(field[0], str) or not _is_iterable(field[0]): - return [self.to_index(w) for w in field] - else: - if not isinstance(field[0][0], str) and _is_iterable(field[0][0]): - raise RuntimeError("Only support field with 2 dimensions.") - return [[self.to_index(c) for c in w] for w in field] - - new_field_name = new_field_name or field_name - - if type(new_field_name) == type(field_name): - if isinstance(new_field_name, list): - assert len(new_field_name) == len(field_name), "new_field_name should have same number elements with " \ - "field_name." - elif isinstance(new_field_name, str): - field_name = [field_name] - new_field_name = [new_field_name] - else: - raise TypeError("field_name and new_field_name can only be str or List[str].") - - for idx, dataset in enumerate(datasets): - if isinstance(dataset, DataSet): - try: - for f_n, n_f_n in zip(field_name, new_field_name): - dataset.apply_field(index_instance, field_name=f_n, new_field_name=n_f_n) - except Exception as e: - logger.info("When processing the `{}` dataset, the following error occurred.".format(idx)) - raise e - else: - raise RuntimeError("Only DataSet type is allowed.") - return self - - @property - def _no_create_word_length(self): - return len(self._no_create_word) - - def from_dataset(self, *datasets, field_name, no_create_entry_dataset=None): - r""" - 使用dataset的对应field中词构建词典:: - - # remember to use `field_name` - vocab.from_dataset(train_data1, train_data2, field_name='words') - - :param ~fastNLP.DataSet,List[~fastNLP.DataSet] datasets: 需要转index的一个或多个数据集 - :param str,List[str] field_name: 可为 ``str`` 或 ``List[str]`` . - 构建词典所使用的 field(s), 支持一个或多个field,若有多个 DataSet, 每个DataSet都必须有这些field. 目前支持的field结构 - : ``str`` , ``List[str]`` - :param no_create_entry_dataset: 可以传入DataSet, List[DataSet]或者None(默认), 建议直接将非训练数据都传入到这个参数。该选项用在接下来的模型会使用pretrain - 的embedding(包括glove, word2vec, elmo与bert)且会finetune的情况。如果仅使用来自于train的数据建立vocabulary,会导致test与dev - 中的数据无法充分利用到来自于预训练embedding的信息,所以在建立词表的时候将test与dev考虑进来会使得最终的结果更好。 - 如果一个词出现在了train中,但是没在预训练模型中,embedding会为它用unk初始化,但它是单独的一个vector,如果 - finetune embedding的话,这个词在更新之后可能会有更好的表示; 而如果这个词仅出现在了dev或test中,那么就不能为它们单独建立vector, - 而应该让它指向unk这个vector的值。所以只位于no_create_entry_dataset中的token,将首先从预训练的词表中寻找它的表示, - 如果找到了,就使用该表示; 如果没有找到,则认为该词的表示应该为unk的表示。 - :return self: - """ - if isinstance(field_name, str): - field_name = [field_name] - elif not isinstance(field_name, list): - raise TypeError('invalid argument field_name: {}'.format(field_name)) - - def construct_vocab(ins, no_create_entry=False): - for fn in field_name: - field = ins[fn] - if isinstance(field, str) or not _is_iterable(field): - self.add_word(field, no_create_entry=no_create_entry) - else: - if isinstance(field[0], str) or not _is_iterable(field[0]): - for word in field: - self.add_word(word, no_create_entry=no_create_entry) - else: - if not isinstance(field[0][0], str) and _is_iterable(field[0][0]): - raise RuntimeError("Only support field with 2 dimensions.") - for words in field: - for word in words: - self.add_word(word, no_create_entry=no_create_entry) - - for idx, dataset in enumerate(datasets): - if isinstance(dataset, DataSet): - try: - dataset.apply(construct_vocab) - except BaseException as e: - logger.error("When processing the `{}` dataset, the following error occurred:".format(idx)) - raise e - else: - raise TypeError("Only DataSet type is allowed.") - - if no_create_entry_dataset is not None: - partial_construct_vocab = partial(construct_vocab, no_create_entry=True) - if isinstance(no_create_entry_dataset, DataSet): - no_create_entry_dataset.apply(partial_construct_vocab) - elif isinstance(no_create_entry_dataset, list): - for dataset in no_create_entry_dataset: - if not isinstance(dataset, DataSet): - raise TypeError("Only DataSet type is allowed.") - dataset.apply(partial_construct_vocab) - return self - - def _is_word_no_create_entry(self, word): - r""" - 判断当前的word是否是不需要创建entry的,具体参见from_dataset的说明 - :param word: str - :return: bool - """ - return word in self._no_create_word - - def to_index(self, w): - r""" - 将词转为数字. 若词不再词典中被记录, 将视为 unknown, 若 ``unknown=None`` , 将抛出 ``ValueError`` :: - - index = vocab.to_index('abc') - # equals to - index = vocab['abc'] - - :param str w: a word - :return int index: the number - """ - return self.__getitem__(w) - - @property - @_check_build_vocab - def unknown_idx(self): - r""" - unknown 对应的数字. - """ - if self.unknown is None: - return None - return self._word2idx[self.unknown] - - @property - @_check_build_vocab - def padding_idx(self): - r""" - padding 对应的数字 - """ - if self.padding is None: - return None - return self._word2idx[self.padding] - - @_check_build_vocab - def to_word(self, idx): - r""" - 给定一个数字, 将其转为对应的词. - - :param int idx: the index - :return str word: the word - """ - return self._idx2word[idx] - - def clear(self): - r""" - 删除Vocabulary中的词表数据。相当于重新初始化一下。 - - :return: - """ - self.word_count.clear() - self._word2idx = None - self._idx2word = None - self.rebuild = True - self._no_create_word.clear() - return self - - def __getstate__(self): - r"""Use to prepare data for pickle. - - """ - len(self) # make sure vocab has been built - state = self.__dict__.copy() - # no need to pickle _idx2word as it can be constructed from _word2idx - del state['_idx2word'] - return state - - def __setstate__(self, state): - r"""Use to restore state from pickle. - - """ - self.__dict__.update(state) - self.build_reverse_vocab() - - def __repr__(self): - return "Vocabulary({}...)".format(list(self.word_count.keys())[:5]) - - @_check_build_vocab - def __iter__(self): - # 依次(word1, 0), (word1, 1) - for index in range(len(self._word2idx)): - yield self.to_word(index), index - - def save(self, filepath): - r""" - - :param str,io.StringIO filepath: Vocabulary的储存路径 - :return: - """ - if isinstance(filepath, io.IOBase): - assert filepath.writable() - f = filepath - elif isinstance(filepath, str): - try: - f = open(filepath, 'w', encoding='utf-8') - except Exception as e: - raise e - else: - raise TypeError("Illegal `filepath`.") - - f.write(f'max_size\t{self.max_size}\n') - f.write(f'min_freq\t{self.min_freq}\n') - f.write(f'unknown\t{self.unknown}\n') - f.write(f'padding\t{self.padding}\n') - f.write(f'rebuild\t{self.rebuild}\n') - f.write('\n') - # idx: 如果idx为-2, 说明还没有进行build; 如果idx为-1,说明该词未编入 - # no_create_entry: 如果为1,说明该词是no_create_entry; 0 otherwise - # word \t count \t idx \t no_create_entry \n - idx = -2 - for word, count in self.word_count.items(): - if self._word2idx is not None: - idx = self._word2idx.get(word, -1) - is_no_create_entry = int(self._is_word_no_create_entry(word)) - f.write(f'{word}\t{count}\t{idx}\t{is_no_create_entry}\n') - if isinstance(filepath, str): # 如果是file的话就关闭 - f.close() - - @staticmethod - def load(filepath): - r""" - - :param str,io.StringIO filepath: Vocabulary的读取路径 - :return: Vocabulary - """ - if isinstance(filepath, io.IOBase): - assert filepath.writable() - f = filepath - elif isinstance(filepath, str): - try: - f = open(filepath, 'r', encoding='utf-8') - except Exception as e: - raise e - else: - raise TypeError("Illegal `filepath`.") - - vocab = Vocabulary() - for line in f: - line = line.strip('\n') - if line: - name, value = line.split() - if name in ('max_size', 'min_freq'): - value = int(value) if value!='None' else None - setattr(vocab, name, value) - elif name in ('unknown', 'padding'): - value = value if value!='None' else None - setattr(vocab, name, value) - elif name == 'rebuild': - vocab.rebuild = True if value=='True' else False - else: - break - word_counter = {} - no_create_entry_counter = {} - word2idx = {} - for line in f: - line = line.strip('\n') - if line: - parts = line.split('\t') - word,count,idx,no_create_entry = parts[0], int(parts[1]), int(parts[2]), int(parts[3]) - if idx >= 0: - word2idx[word] = idx - word_counter[word] = count - if no_create_entry: - no_create_entry_counter[word] = count - - word_counter = Counter(word_counter) - no_create_entry_counter = Counter(no_create_entry_counter) - if len(word2idx)>0: - if vocab.padding: - word2idx[vocab.padding] = 0 - if vocab.unknown: - word2idx[vocab.unknown] = 1 if vocab.padding else 0 - idx2word = {value:key for key,value in word2idx.items()} - - vocab.word_count = word_counter - vocab._no_create_word = no_create_entry_counter - if word2idx: - vocab._word2idx = word2idx - vocab._idx2word = idx2word - if isinstance(filepath, str): # 如果是file的话就关闭 - f.close() - return vocab diff --git a/fastNLP/doc_utils.py b/fastNLP/doc_utils.py deleted file mode 100644 index 3f7889e4..00000000 --- a/fastNLP/doc_utils.py +++ /dev/null @@ -1,54 +0,0 @@ -r"""undocumented -用于辅助生成 fastNLP 文档的代码 -""" - -__all__ = [] - -import inspect -import sys - - -def doc_process(m): - for name, obj in inspect.getmembers(m): - if inspect.isclass(obj) or inspect.isfunction(obj): - if obj.__module__ != m.__name__: - if obj.__doc__ is None: - # print(name, obj.__doc__) - pass - else: - module_name = obj.__module__ - - # 识别并标注类和函数在不同层次中的位置 - - while 1: - defined_m = sys.modules[module_name] - try: - if "undocumented" not in defined_m.__doc__ and name in defined_m.__all__: - obj.__doc__ = r"别名 :class:`" + m.__name__ + "." + name + "`" \ - + " :class:`" + module_name + "." + name + "`\n" + obj.__doc__ - break - module_name = ".".join(module_name.split('.')[:-1]) - if module_name == m.__name__: - # print(name, ": not found defined doc.") - break - except: - print("Warning: Module {} lacks `__doc__`".format(module_name)) - break - - # 识别并标注基类,只有基类也在 fastNLP 中定义才显示 - - if inspect.isclass(obj): - for base in obj.__bases__: - if base.__module__.startswith("fastNLP"): - parts = base.__module__.split(".") + [] - module_name, i = "fastNLP", 1 - for i in range(len(parts) - 1): - defined_m = sys.modules[module_name] - try: - if "undocumented" not in defined_m.__doc__ and name in defined_m.__all__: - obj.__doc__ = r"基类 :class:`" + defined_m.__name__ + "." + base.__name__ + "` \n\n" + obj.__doc__ - break - module_name += "." + parts[i + 1] - except: - print("Warning: Module {} lacks `__doc__`".format(module_name)) - break diff --git a/fastNLP/embeddings/__init__.py b/fastNLP/embeddings/__init__.py deleted file mode 100644 index dae75995..00000000 --- a/fastNLP/embeddings/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -r""" -embeddings 模块主要用于从各种预训练的模型中获取词语的分布式表示,目前支持的预训练模型包括word2vec, glove, ELMO, BERT等。这里所有 -embedding的forward输入都是形状为 ``(batch_size, max_len)`` 的torch.LongTensor,输出都是 ``(batch_size, max_len, embedding_dim)`` 的 -torch.FloatTensor。所有的embedding都可以使用 `self.num_embedding` 获取最大的输入index范围, 用 `self.embeddig_dim` 或 `self.embed_size` 获取embedding的 -输出维度。 -""" - -__all__ = [ - "Embedding", - "TokenEmbedding", - "StaticEmbedding", - "ElmoEmbedding", - "BertEmbedding", - "BertWordPieceEncoder", - - "RobertaEmbedding", - "RobertaWordPieceEncoder", - - "TransformersEmbedding", - "TransformersWordPieceEncoder", - - "GPT2Embedding", - "GPT2WordPieceEncoder", - - "StackEmbedding", - "LSTMCharEmbedding", - "CNNCharEmbedding", - - "get_embeddings", - "get_sinusoid_encoding_table" -] - -from .embedding import Embedding, TokenEmbedding -from .static_embedding import StaticEmbedding -from .elmo_embedding import ElmoEmbedding -from .bert_embedding import BertEmbedding, BertWordPieceEncoder -from .roberta_embedding import RobertaEmbedding, RobertaWordPieceEncoder -from .transformers_embedding import TransformersEmbedding, TransformersWordPieceEncoder -from .gpt2_embedding import GPT2WordPieceEncoder, GPT2Embedding -from .char_embedding import CNNCharEmbedding, LSTMCharEmbedding -from .stack_embedding import StackEmbedding -from .utils import get_embeddings, get_sinusoid_encoding_table - -import sys -from ..doc_utils import doc_process -doc_process(sys.modules[__name__]) \ No newline at end of file diff --git a/fastNLP/embeddings/bert_embedding.py b/fastNLP/embeddings/bert_embedding.py deleted file mode 100644 index 01e646a7..00000000 --- a/fastNLP/embeddings/bert_embedding.py +++ /dev/null @@ -1,658 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "BertEmbedding", - "BertWordPieceEncoder" -] - -import os -import warnings -from itertools import chain -from functools import partial -import json -import numpy as np -import torch -from torch import nn - -from .contextual_embedding import ContextualEmbedding -from ..core import logger -from ..core.vocabulary import Vocabulary -from ..io.file_utils import PRETRAINED_BERT_MODEL_DIR -from ..modules.encoder.bert import BertModel -from ..modules.tokenizer import BertTokenizer - -# TODO 需要重新修改,使得encoder可以直接读取embedding的权重 -VOCAB_NAME = 'vocab.txt' -BERT_EMBED_HYPER = 'bert_hyper.json' -BERT_EMBED_FOLDER = 'bert' -BERT_ENCODER_HYPER = 'bert_hyper.json' -BERT_ENCODER_FOLDER = 'bert' - - -class BertEmbedding(ContextualEmbedding): - r""" - 使用BERT对words进行编码的Embedding。建议将输入的words长度限制在430以内,而不要使用512(根据预训练模型参数,可能有变化)。这是由于 - 预训练的bert模型长度限制为512个token,而因为输入的word是未进行word piece分割的(word piece的分割有BertEmbedding在输入word - 时切分),在分割之后长度可能会超过最大长度限制。 - - BertEmbedding可以支持自动下载权重,当前支持的模型: - en: base-cased - en-base-uncased: - en-large-cased-wwm: - en-large-cased: - en-large-uncased: - en-large-uncased-wwm - cn: 中文BERT wwm by HIT - cn-base: 中文BERT base-chinese - cn-wwm-ext: 中文BERT wwm by HIT with extra data pretrain. - multi-base-cased: multilingual cased - multi-base-uncased: multilingual uncased - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import BertEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = BertEmbedding(vocab, model_dir_or_name='en-base-uncased', requires_grad=False, layers='4,-2,-1') - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5, 2304]) - """ - - def __init__(self, vocab: Vocabulary, model_dir_or_name: str = 'en-base-uncased', layers: str = '-1', - pool_method: str = 'first', word_dropout=0, dropout=0, include_cls_sep: bool = False, - pooled_cls=True, requires_grad: bool = True, auto_truncate: bool = False, **kwargs): - r""" - - :param ~fastNLP.Vocabulary vocab: 词表 - :param str model_dir_or_name: 模型所在目录或者模型的名称。当传入模型所在目录时,目录中应该包含一个词表文件(以.txt作为后缀名), - 权重文件(以.bin作为文件后缀名), 配置文件(以.json作为后缀名)。 - :param str layers: 输出embedding表示来自于哪些层,不同层的结果按照layers中的顺序在最后一维concat起来。以','隔开层数,层的序号是 - 从0开始,可以以负数去索引倒数几层。 layer=0为embedding层(包括wordpiece embedding, - position embedding和segment embedding) - :param str pool_method: 因为在bert中,每个word会被表示为多个word pieces, 当获取一个word的表示的时候,怎样从它的word pieces - 中计算得到它对应的表示。支持 ``last`` , ``first`` , ``avg`` , ``max``。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool include_cls_sep: bool,在bert计算句子的表示的时候,需要在前面加上[CLS]和[SEP], 是否在结果中保留这两个内容。 这样 - 会使得word embedding的结果比输入的结果长两个token。如果该值为True,则在使用 :class::StackEmbedding 可能会与其它类型的 - embedding长度不匹配。 - :param bool pooled_cls: 返回的[CLS]是否使用预训练中的BertPool映射一下,仅在include_cls_sep时有效。如果下游任务只取[CLS]做预测, - 一般该值为True。 - :param bool requires_grad: 是否需要gradient以更新Bert的权重。 - :param bool auto_truncate: 当句子words拆分为word pieces长度超过bert最大允许长度(一般为512), 自动截掉拆分后的超过510个 - word pieces后的内容,并将第512个word piece置为[SEP]。超过长度的部分的encode结果直接全部置零。一般仅有只使用[CLS] - 来进行分类的任务将auto_truncate置为True。 - :param kwargs: - int min_freq: 小于该次数的词会被unk代替, 默认为1 - """ - super(BertEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - if word_dropout > 0: - assert vocab.unknown != None, "When word_drop>0, Vocabulary must contain the unknown token." - - if model_dir_or_name.lower() in PRETRAINED_BERT_MODEL_DIR: - if 'cn' in model_dir_or_name.lower() and pool_method not in ('first', 'last'): - logger.warning("For Chinese bert, pooled_method should choose from 'first', 'last' in order to achieve" - " faster speed.") - warnings.warn("For Chinese bert, pooled_method should choose from 'first', 'last' in order to achieve" - " faster speed.") - - self._word_sep_index = -100 - if '[SEP]' in vocab: - self._word_sep_index = vocab['[SEP]'] - self._word_cls_index = -100 - if '[CLS]' in vocab: - self._word_cls_index = vocab['[CLS]'] - - min_freq = kwargs.pop('min_freq', 1) - self._min_freq = min_freq - self.model = _BertWordModel(model_dir_or_name=model_dir_or_name, vocab=vocab, layers=layers, - pool_method=pool_method, include_cls_sep=include_cls_sep, - pooled_cls=pooled_cls, min_freq=min_freq, auto_truncate=auto_truncate, - **kwargs) - - self.requires_grad = requires_grad - self._embed_size = len(self.model.layers) * self.model.encoder.hidden_size - - def _delete_model_weights(self): - del self.model - - def forward(self, words): - r""" - 计算words的bert embedding表示。计算之前会在每句话的开始增加[CLS]在结束增加[SEP], 并根据include_cls_sep判断要不要 - 删除这两个token的表示。 - - :param torch.LongTensor words: [batch_size, max_len] - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - words = self.drop_word(words) - outputs = self._get_sent_reprs(words) - if outputs is not None: - return self.dropout(outputs) - outputs = self.model(words) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._word_pad_index) - mask = pad_mask.__and__(mask) # pad的位置不为unk - if self._word_sep_index!=-100: - not_sep_mask = words.ne(self._word_sep_index) - mask = mask.__and__(not_sep_mask) - if self._word_cls_index!=-100: - not_cls_mask = words.ne(self._word_cls_index) - mask = mask.__and__(not_cls_mask) - words = words.masked_fill(mask, self._word_unk_index) - return words - - def save(self, folder): - """ - 将embedding保存到folder这个目录下,将会保存三个文件vocab.txt, bert_embed_hyper.txt, bert_embed/, 其中bert_embed下包含 - config.json,pytorch_model.bin,vocab.txt三个文件(该folder下的数据也可以直接被BERTModel读取) - - :param str folder: - :return: - """ - os.makedirs(folder, exist_ok=True) - - self.get_word_vocab().save(os.path.join(folder, VOCAB_NAME)) - - hyper = {} - hyper['min_freq'] = self._min_freq - hyper['layers'] = ','.join(map(str, self.model.layers)) - hyper['pool_method'] = self.model.pool_method - hyper['dropout'] = self.dropout_layer.p - hyper['word_dropout'] = self.word_dropout - hyper['include_cls_sep'] = self.model.include_cls_sep - hyper['pooled_cls'] = self.model.pooled_cls - hyper['auto_truncate'] = self.model.auto_truncate - hyper['requires_grad'] = bool(self.requires_grad) - - with open(os.path.join(folder, BERT_EMBED_HYPER), 'w', encoding='utf-8') as f: - json.dump(hyper, f, indent=2) - - os.makedirs(os.path.join(folder, BERT_EMBED_FOLDER), exist_ok=True) - self.model.save(os.path.join(folder, BERT_EMBED_FOLDER)) - logger.debug(f"BERTEmbedding has been saved in {folder}") - - @classmethod - def load(cls, folder): - """ - 给定一个folder, 需要包含以下三个内容vocab.txt, bert_embed_hyper.txt, bert_embed/ - - :param str folder: - :return: - """ - for name in [VOCAB_NAME, BERT_EMBED_FOLDER, BERT_EMBED_HYPER]: - assert os.path.exists(os.path.join(folder, name)), f"{name} not found in {folder}." - - vocab = Vocabulary.load(os.path.join(folder, VOCAB_NAME)) - - with open(os.path.join(folder, BERT_EMBED_HYPER), 'r', encoding='utf-8') as f: - hyper = json.load(f) - - model_dir_or_name = os.path.join(os.path.join(folder, BERT_EMBED_FOLDER)) - - bert_embed = cls(vocab=vocab, model_dir_or_name=model_dir_or_name, **hyper) - return bert_embed - - -class BertWordPieceEncoder(nn.Module): - r""" - 读取bert模型,读取之后调用index_dataset方法在dataset中生成word_pieces这一列。 - - BertWordPieceEncoder可以支持自动下载权重,当前支持的模型: - en: base-cased - en-large-cased-wwm: - en-large-cased: - en-large-uncased: - en-large-uncased-wwm - cn: 中文BERT wwm by HIT - cn-base: 中文BERT base-chinese - cn-wwm-ext: 中文BERT wwm by HIT with extra data pretrain. - multi-base-cased: multilingual cased - multi-base-uncased: multilingual uncased - - """ - - def __init__(self, model_dir_or_name: str = 'en-base-uncased', layers: str = '-1', pooled_cls: bool = False, - word_dropout=0, dropout=0, requires_grad: bool = True, **kwargs): - r""" - - :param str model_dir_or_name: 模型所在目录或者模型的名称。默认值为 ``en-base-uncased`` - :param str layers: 最终结果中的表示。以','隔开层数,可以以负数去索引倒数几层。layer=0为embedding层(包括wordpiece embedding, - position embedding和segment embedding) - :param bool pooled_cls: 返回的句子开头的[CLS]是否使用预训练中的BertPool映射一下。如果下游任务取[CLS]做预测,一般该值为True。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool requires_grad: 是否需要gradient。 - """ - super().__init__() - - self.model = _BertWordPieceModel(model_dir_or_name=model_dir_or_name, layers=layers, pooled_cls=pooled_cls) - self._sep_index = self.model._sep_index - self._cls_index = self.model._cls_index - self._wordpiece_pad_index = self.model._wordpiece_pad_index - self._wordpiece_unk_index = self.model._wordpiece_unknown_index - self._embed_size = len(self.model.layers) * self.model.encoder.hidden_size - self.requires_grad = requires_grad - self.word_dropout = word_dropout - self.dropout_layer = nn.Dropout(dropout) - - @property - def embed_size(self): - return self._embed_size - - @property - def embedding_dim(self): - return self._embed_size - - @property - def num_embedding(self): - return self.model.encoder.config.vocab_size - - def index_datasets(self, *datasets, field_name, add_cls_sep=True): - r""" - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input,且将word_pieces这一列的pad value设置为了 - bert的pad value。 - - :param ~fastNLP.DataSet datasets: DataSet对象 - :param str field_name: 基于哪一列的内容生成word_pieces列。这一列中每个数据应该是List[str]的形式。 - :param bool add_cls_sep: 如果首尾不是[CLS]与[SEP]会在首尾额外加入[CLS]与[SEP]。 - :return: - """ - - self.model.index_datasets(*datasets, field_name=field_name, add_cls_sep=add_cls_sep) - - def forward(self, word_pieces, token_type_ids=None): - r""" - 计算words的bert embedding表示。传入的words中应该自行包含[CLS]与[SEP]的tag。 - - :param words: batch_size x max_len - :param token_type_ids: batch_size x max_len, 用于区分前一句和后一句话. 如果不传入,则自动生成(大部分情况,都不需要输入), - 第一个[SEP]及之前为0, 第二个[SEP]及到第一个[SEP]之间为1; 第三个[SEP]及到第二个[SEP]之间为0,依次往后推。 - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - if token_type_ids is None: - with torch.no_grad(): - sep_mask = word_pieces.eq(self._sep_index) # batch_size x max_len - sep_mask_cumsum = sep_mask.long().flip(dims=[-1]).cumsum(dim=-1).flip(dims=[-1]) - token_type_ids = sep_mask_cumsum.fmod(2) - token_type_ids = token_type_ids[:, :1].__xor__(token_type_ids) # 如果开头是奇数,则需要flip一下结果,因为需要保证开头为0 - - word_pieces = self.drop_word(word_pieces) - outputs = self.model(word_pieces, token_type_ids) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout_layer(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - not_sep_mask = words.ne(self._sep_index) - not_cls_mask = words.ne(self._cls_index) - replaceable_mask = not_sep_mask.__and__(not_cls_mask) - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._wordpiece_pad_index) - mask = pad_mask.__and__(mask).__and__(replaceable_mask) # pad的位置不为unk - words = words.masked_fill(mask, self._wordpiece_unk_index) - return words - - def save(self, folder): - """ - 会在folder下创建两个文件bert_encoder_hyper.json与bert_encoder/, bert_encoder下包含三个文件config.json, - pytorch_model.bin,vocab.txt三个文件(该folder下的数据也可以直接被BERTModel读取) - - :param str folder: - :return: - """ - os.makedirs(folder, exist_ok=True) - - hyper = {} - hyper['layers'] = ','.join(map(str, self.model.layers)) - hyper['dropout'] = self.dropout_layer.p - hyper['word_dropout'] = self.word_dropout - hyper['pooled_cls'] = self.model.pooled_cls - hyper['requires_grad'] = bool(self.requires_grad) - - with open(os.path.join(folder, BERT_ENCODER_HYPER), 'w', encoding='utf-8') as f: - json.dump(hyper, f, indent=2) - - os.makedirs(os.path.join(folder, BERT_ENCODER_FOLDER), exist_ok=True) - self.model.save(os.path.join(folder, BERT_ENCODER_FOLDER)) - logger.debug(f"BertWordPieceEncoder has been saved in {folder}") - - @classmethod - def load(cls, folder): - """ - 会在folder下创建两个文件bert_encoder_hyper.json与bert_encoder/, bert_encoder下包含三个文件 - - :param folder: - :return: - """ - for name in [BERT_ENCODER_HYPER, BERT_ENCODER_FOLDER]: - assert os.path.exists(os.path.join(folder, name)), f"{name} not found in {folder}." - - with open(os.path.join(folder, BERT_ENCODER_HYPER), 'r', encoding='utf-8') as f: - hyper = json.load(f) - - model_dir_or_name = os.path.join(os.path.join(folder, BERT_ENCODER_FOLDER)) - - bert_encoder = cls(model_dir_or_name=model_dir_or_name, **hyper) - return bert_encoder - - -class _BertWordModel(nn.Module): - def __init__(self, model_dir_or_name: str, vocab: Vocabulary, layers: str = '-1', pool_method: str = 'first', - include_cls_sep: bool = False, pooled_cls: bool = False, auto_truncate: bool = False, min_freq=2, - **kwargs): - super().__init__() - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - if layers.lower() == 'all': - self.layers = None - else: - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - neg_num_output_layer = -16384 - pos_num_output_layer = 0 - if self.layers is None: - neg_num_output_layer = -1 - else: - for layer in self.layers: - if layer < 0: - neg_num_output_layer = max(layer, neg_num_output_layer) - else: - pos_num_output_layer = max(layer, pos_num_output_layer) - - self.tokenizer = BertTokenizer.from_pretrained(model_dir_or_name) - self.encoder = BertModel.from_pretrained(model_dir_or_name, - neg_num_output_layer=neg_num_output_layer, - pos_num_output_layer=pos_num_output_layer, - **kwargs) - self._max_position_embeddings = self.encoder.config.max_position_embeddings - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - if self.layers is None: - self.layers = [idx for idx in range(encoder_layer_number + 1)] - logger.info(f'Bert Model will return {len(self.layers)} layers (layer-0 ' - f'is embedding result): {self.layers}') - assert len(self.layers) > 0, "There is no layer selected!" - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a bert model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a bert model with {encoder_layer_number} layers." - - assert pool_method in ('avg', 'max', 'first', 'last') - self.pool_method = pool_method - self.include_cls_sep = include_cls_sep - self.pooled_cls = pooled_cls - self.auto_truncate = auto_truncate - - # 将所有vocab中word的wordpiece计算出来, 需要额外考虑[CLS]和[SEP] - self._has_sep_in_vocab = '[SEP]' in vocab # 用来判断传入的数据是否需要生成token_ids - - word_to_wordpieces = [] - word_pieces_lengths = [] - for word, index in vocab: - if index == vocab.padding_idx: # pad是个特殊的符号 - word = '[PAD]' - elif index == vocab.unknown_idx: - word = '[UNK]' - elif vocab.word_count[word] < min_freq: - word = '[UNK]' - word_pieces = self.tokenizer.wordpiece_tokenizer.tokenize(word) - word_pieces = self.tokenizer.convert_tokens_to_ids(word_pieces) - word_to_wordpieces.append(word_pieces) - word_pieces_lengths.append(len(word_pieces)) - self._cls_index = self.tokenizer.vocab['[CLS]'] - self._sep_index = self.tokenizer.vocab['[SEP]'] - self._word_pad_index = vocab.padding_idx - self._wordpiece_pad_index = self.tokenizer.vocab['[PAD]'] # 需要用于生成word_piece - self.word_to_wordpieces = np.array(word_to_wordpieces, dtype=object) - self.register_buffer('word_pieces_lengths', torch.LongTensor(word_pieces_lengths)) - logger.debug("Successfully generate word pieces.") - - def forward(self, words): - r""" - - :param words: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - with torch.no_grad(): - batch_size, max_word_len = words.size() - word_mask = words.ne(self._word_pad_index) # 为1的地方有word - seq_len = word_mask.sum(dim=-1) - batch_word_pieces_length = self.word_pieces_lengths[words].masked_fill(word_mask.eq(False), - 0) # batch_size x max_len - word_pieces_lengths = batch_word_pieces_length.sum(dim=-1) # batch_size - max_word_piece_length = batch_word_pieces_length.sum(dim=-1).max().item() # 表示word piece的长度(包括padding) - if max_word_piece_length + 2 > self._max_position_embeddings: - if self.auto_truncate: - word_pieces_lengths = word_pieces_lengths.masked_fill( - word_pieces_lengths + 2 > self._max_position_embeddings, - self._max_position_embeddings - 2) - else: - raise RuntimeError( - "After split words into word pieces, the lengths of word pieces are longer than the " - f"maximum allowed sequence length:{self._max_position_embeddings} of bert. You can set " - f"`auto_truncate=True` for BertEmbedding to automatically truncate overlong input.") - - # +2是由于需要加入[CLS]与[SEP] - word_pieces = words.new_full((batch_size, min(max_word_piece_length + 2, self._max_position_embeddings)), - fill_value=self._wordpiece_pad_index) - attn_masks = torch.zeros_like(word_pieces) - # 1. 获取words的word_pieces的id,以及对应的span范围 - word_indexes = words.cpu().numpy() - for i in range(batch_size): - word_pieces_i = list(chain(*self.word_to_wordpieces[word_indexes[i, :seq_len[i]]])) - if self.auto_truncate and len(word_pieces_i) > self._max_position_embeddings - 2: - word_pieces_i = word_pieces_i[:self._max_position_embeddings - 2] - word_pieces[i, 1:word_pieces_lengths[i] + 1] = torch.LongTensor(word_pieces_i) - attn_masks[i, :word_pieces_lengths[i] + 2].fill_(1) - # 添加[cls]和[sep] - word_pieces[:, 0].fill_(self._cls_index) - batch_indexes = torch.arange(batch_size).to(words) - word_pieces[batch_indexes, word_pieces_lengths + 1] = self._sep_index - if self._has_sep_in_vocab: # 但[SEP]在vocab中出现应该才会需要token_ids - sep_mask = word_pieces.eq(self._sep_index).long() # batch_size x max_len - sep_mask_cumsum = sep_mask.flip(dims=[-1]).cumsum(dim=-1).flip(dims=[-1]) - token_type_ids = sep_mask_cumsum.fmod(2) - token_type_ids = token_type_ids[:, :1].__xor__(token_type_ids) # 如果开头是奇数,则需要flip一下结果,因为需要保证开头为0 - else: - token_type_ids = torch.zeros_like(word_pieces) - # 2. 获取hidden的结果,根据word_pieces进行对应的pool计算 - # all_outputs: [batch_size x max_len x hidden_size, batch_size x max_len x hidden_size, ...] - bert_outputs, pooled_cls = self.encoder(word_pieces, token_type_ids=token_type_ids, - attention_mask=attn_masks, - output_all_encoded_layers=True) - # output_layers = [self.layers] # len(self.layers) x batch_size x real_word_piece_length x hidden_size - - if self.include_cls_sep: - s_shift = 1 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len + 2, - bert_outputs[-1].size(-1)) - - else: - s_shift = 0 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len, - bert_outputs[-1].size(-1)) - batch_word_pieces_cum_length = batch_word_pieces_length.new_zeros(batch_size, max_word_len + 1) - batch_word_pieces_cum_length[:, 1:] = batch_word_pieces_length.cumsum(dim=-1) # batch_size x max_len - - if self.pool_method == 'first': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, :seq_len.max()] - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - elif self.pool_method == 'last': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, 1:seq_len.max()+1] - 1 - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - - for l_index, l in enumerate(self.layers): - output_layer = bert_outputs[l] - real_word_piece_length = output_layer.size(1) - 2 - if max_word_piece_length > real_word_piece_length: # 如果实际上是截取出来的 - paddings = output_layer.new_zeros(batch_size, - max_word_piece_length - real_word_piece_length, - output_layer.size(2)) - output_layer = torch.cat((output_layer, paddings), dim=1).contiguous() - # 从word_piece collapse到word的表示 - truncate_output_layer = output_layer[:, 1:-1] # 删除[CLS]与[SEP] batch_size x len x hidden_size - if self.pool_method == 'first': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1)+s_shift] = tmp - - elif self.pool_method == 'last': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1)+s_shift] = tmp - elif self.pool_method == 'max': - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift], _ = torch.max(truncate_output_layer[i, start:end], dim=-2) - else: - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift] = torch.mean(truncate_output_layer[i, start:end], dim=-2) - if self.include_cls_sep: - if l in (len(bert_outputs) - 1, -1) and self.pooled_cls: - outputs[l_index, :, 0] = pooled_cls - else: - outputs[l_index, :, 0] = output_layer[:, 0] - outputs[l_index, batch_indexes, seq_len + s_shift] = output_layer[batch_indexes, word_pieces_lengths + s_shift] - - # 3. 最终的embedding结果 - return outputs - - def save(self, folder): - """ - 给定一个folder保存pytorch_model.bin, config.json, vocab.txt - - :param str folder: - :return: - """ - self.tokenizer.save_pretrained(folder) - self.encoder.save_pretrained(folder) - - -class _BertWordPieceModel(nn.Module): - r""" - 这个模块用于直接计算word_piece的结果. - - """ - - def __init__(self, model_dir_or_name: str, layers: str = '-1', pooled_cls: bool=False): - super().__init__() - - self.tokenizer = BertTokenizer.from_pretrained(model_dir_or_name) - self.encoder = BertModel.from_pretrained(model_dir_or_name) - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a bert model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a bert model with {encoder_layer_number} layers." - - self._cls_index = self.tokenizer.cls_index - self._sep_index = self.tokenizer.sep_index - self._wordpiece_unknown_index = self.tokenizer.unk_index - self._wordpiece_pad_index = self.tokenizer.pad_index # 需要用于生成word_piece - self.pooled_cls = pooled_cls - - def index_datasets(self, *datasets, field_name, add_cls_sep=True): - r""" - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input。如果首尾不是 - [CLS]与[SEP]会在首尾额外加入[CLS]与[SEP], 且将word_pieces这一列的pad value设置为了bert的pad value。 - - :param datasets: DataSet对象 - :param field_name: 基于哪一列index - :return: - """ - - encode_func = partial(self.tokenizer.encode, add_special_tokens=add_cls_sep) - - for index, dataset in enumerate(datasets): - try: - dataset.apply_field(encode_func, field_name=field_name, new_field_name='word_pieces', - is_input=True) - dataset.set_pad_val('word_pieces', self._wordpiece_pad_index) - except Exception as e: - logger.error(f"Exception happens when processing the {index} dataset.") - raise e - - def forward(self, word_pieces, token_type_ids=None): - r""" - - :param word_pieces: torch.LongTensor, batch_size x max_len - :param token_type_ids: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - batch_size, max_len = word_pieces.size() - - attn_masks = word_pieces.ne(self._wordpiece_pad_index) - bert_outputs, pooled_cls = self.encoder(word_pieces, token_type_ids=token_type_ids, attention_mask=attn_masks, - output_all_encoded_layers=True) - # output_layers = [self.layers] # len(self.layers) x batch_size x max_word_piece_length x hidden_size - outputs = bert_outputs[0].new_zeros((len(self.layers), batch_size, max_len, bert_outputs[0].size(-1))) - for l_index, l in enumerate(self.layers): - bert_output = bert_outputs[l] - if l in (len(bert_outputs)-1, -1) and self.pooled_cls: - bert_output[:, 0] = pooled_cls - outputs[l_index] = bert_output - return outputs - - def save(self, folder): - """ - 给定一个folder保存pytorch_model.bin, config.json, vocab.txt - - :param folder: - :return: - """ - self.tokenizer.save_pretrained(folder) - self.encoder.save_pretrained(folder) diff --git a/fastNLP/embeddings/char_embedding.py b/fastNLP/embeddings/char_embedding.py deleted file mode 100644 index a2996ae2..00000000 --- a/fastNLP/embeddings/char_embedding.py +++ /dev/null @@ -1,284 +0,0 @@ -r""" -该文件中主要包含的是character的Embedding,包括基于CNN与LSTM的character Embedding。与其它Embedding一样,这里的Embedding输入也是 -词的index而不需要使用词语中的char的index来获取表达。 -""" - -__all__ = [ - "CNNCharEmbedding", - "LSTMCharEmbedding" -] - -from typing import List - -import torch -import torch.nn as nn -import torch.nn.functional as F - -from .embedding import TokenEmbedding -from .static_embedding import StaticEmbedding -from .utils import _construct_char_vocab_from_vocab -from .utils import get_embeddings -from ..core import logger -from ..core.vocabulary import Vocabulary -from ..modules.encoder.lstm import LSTM - - -class CNNCharEmbedding(TokenEmbedding): - r""" - 使用CNN生成character embedding。CNN的结构为, embed(x) -> Dropout(x) -> CNN(x) -> activation(x) -> pool -> fc -> Dropout. - 不同的kernel大小的fitler结果是concat起来然后通过一层fully connected layer, 然后输出word的表示。 - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import CNNCharEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = CNNCharEmbedding(vocab, embed_size=50) - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5,50]) - - """ - - def __init__(self, vocab: Vocabulary, embed_size: int = 50, char_emb_size: int = 50, word_dropout: float = 0, - dropout: float = 0, filter_nums: List[int] = (40, 30, 20), kernel_sizes: List[int] = (5, 3, 1), - pool_method: str = 'max', activation='relu', min_char_freq: int = 2, pre_train_char_embed: str = None, - requires_grad:bool=True, include_word_start_end:bool=True): - r""" - - :param vocab: 词表 - :param embed_size: 该CNNCharEmbedding的输出维度大小,默认值为50. - :param char_emb_size: character的embed的维度。character是从vocab中生成的。默认值为50. - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率drop分布式表示与char embedding的输出。 - :param filter_nums: filter的数量. 长度需要和kernels一致。默认值为[40, 30, 20]. - :param kernel_sizes: kernel的大小. 默认值为[5, 3, 1]. - :param pool_method: character的表示在合成一个表示时所使用的pool方法,支持'avg', 'max'. - :param activation: CNN之后使用的激活方法,支持'relu', 'sigmoid', 'tanh' 或者自定义函数. - :param min_char_freq: character的最少出现次数。默认值为2. - :param pre_train_char_embed: 可以有两种方式调用预训练好的character embedding:第一种是传入embedding文件夹 - (文件夹下应该只有一个以.txt作为后缀的文件)或文件路径;第二种是传入embedding的名称,第二种情况将自动查看缓存中是否存在该模型, - 没有的话将自动下载。如果输入为None则使用embedding_dim的维度随机初始化一个embedding. - :param requires_grad: 是否更新权重 - :param include_word_start_end: 是否在每个word开始的character前和结束的character增加特殊标示符号; - """ - super(CNNCharEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - for kernel in kernel_sizes: - assert kernel % 2 == 1, "Only odd kernel is allowed." - - assert pool_method in ('max', 'avg') - self.pool_method = pool_method - # activation function - if isinstance(activation, str): - if activation.lower() == 'relu': - self.activation = F.relu - elif activation.lower() == 'sigmoid': - self.activation = F.sigmoid - elif activation.lower() == 'tanh': - self.activation = F.tanh - elif activation is None: - self.activation = lambda x: x - elif callable(activation): - self.activation = activation - else: - raise Exception( - "Undefined activation function: choose from: [relu, tanh, sigmoid, or a callable function]") - - logger.info("Start constructing character vocabulary.") - # 建立char的词表 - self.char_vocab = _construct_char_vocab_from_vocab(vocab, min_freq=min_char_freq, - include_word_start_end=include_word_start_end) - self.char_pad_index = self.char_vocab.padding_idx - logger.info(f"In total, there are {len(self.char_vocab)} distinct characters.") - # 对vocab进行index - max_word_len = max(map(lambda x: len(x[0]), vocab)) - if include_word_start_end: - max_word_len += 2 - self.register_buffer('words_to_chars_embedding', torch.full((len(vocab), max_word_len), - fill_value=self.char_pad_index, dtype=torch.long)) - self.register_buffer('word_lengths', torch.zeros(len(vocab)).long()) - for word, index in vocab: - # if index!=vocab.padding_idx: # 如果是pad的话,直接就为pad_value了。修改为不区分pad, 这样所有的也是同一个embed - if include_word_start_end: - word = [''] + list(word) + [''] - self.words_to_chars_embedding[index, :len(word)] = \ - torch.LongTensor([self.char_vocab.to_index(c) for c in word]) - self.word_lengths[index] = len(word) - # self.char_embedding = nn.Embedding(len(self.char_vocab), char_emb_size) - if pre_train_char_embed: - self.char_embedding = StaticEmbedding(self.char_vocab, model_dir_or_name=pre_train_char_embed) - else: - self.char_embedding = get_embeddings((len(self.char_vocab), char_emb_size)) - - self.convs = nn.ModuleList([nn.Conv1d( - self.char_embedding.embedding_dim, filter_nums[i], kernel_size=kernel_sizes[i], bias=True, - padding=kernel_sizes[i] // 2) - for i in range(len(kernel_sizes))]) - self._embed_size = embed_size - self.fc = nn.Linear(sum(filter_nums), embed_size) - self.requires_grad = requires_grad - - def forward(self, words): - r""" - 输入words的index后,生成对应的words的表示。 - - :param words: [batch_size, max_len] - :return: [batch_size, max_len, embed_size] - """ - words = self.drop_word(words) - batch_size, max_len = words.size() - chars = self.words_to_chars_embedding[words] # batch_size x max_len x max_word_len - word_lengths = self.word_lengths[words] # batch_size x max_len - max_word_len = word_lengths.max() - chars = chars[:, :, :max_word_len] - # 为1的地方为mask - chars_masks = chars.eq(self.char_pad_index) # batch_size x max_len x max_word_len 如果为0, 说明是padding的位置了 - chars = self.char_embedding(chars) # batch_size x max_len x max_word_len x embed_size - chars = self.dropout(chars) - reshaped_chars = chars.reshape(batch_size * max_len, max_word_len, -1) - reshaped_chars = reshaped_chars.transpose(1, 2) # B' x E x M - conv_chars = [conv(reshaped_chars).transpose(1, 2).reshape(batch_size, max_len, max_word_len, -1) - for conv in self.convs] - conv_chars = torch.cat(conv_chars, dim=-1).contiguous() # B x max_len x max_word_len x sum(filters) - conv_chars = self.activation(conv_chars) - if self.pool_method == 'max': - conv_chars = conv_chars.masked_fill(chars_masks.unsqueeze(-1), float('-inf')) - chars, _ = torch.max(conv_chars, dim=-2) # batch_size x max_len x sum(filters) - else: - conv_chars = conv_chars.masked_fill(chars_masks.unsqueeze(-1), 0) - chars = torch.sum(conv_chars, dim=-2) / chars_masks.eq(False).sum(dim=-1, keepdim=True).float() - chars = self.fc(chars) - return self.dropout(chars) - - -class LSTMCharEmbedding(TokenEmbedding): - r""" - 使用LSTM的方式对character进行encode. embed(x) -> Dropout(x) -> LSTM(x) -> activation(x) -> pool -> Dropout - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import LSTMCharEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = LSTMCharEmbedding(vocab, embed_size=50) - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5,50]) - - """ - - def __init__(self, vocab: Vocabulary, embed_size: int = 50, char_emb_size: int = 50, word_dropout: float = 0, - dropout: float = 0, hidden_size=50, pool_method: str = 'max', activation='relu', - min_char_freq: int = 2, bidirectional=True, pre_train_char_embed: str = None, - requires_grad:bool=True, include_word_start_end:bool=True): - r""" - - :param vocab: 词表 - :param embed_size: LSTMCharEmbedding的输出维度。默认值为50. - :param char_emb_size: character的embedding的维度。默认值为50. - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param dropout: 以多大概率drop character embedding的输出以及最终的word的输出。 - :param hidden_size: LSTM的中间hidden的大小,如果为bidirectional的,hidden会除二,默认为50. - :param pool_method: 支持'max', 'avg'。 - :param activation: 激活函数,支持'relu', 'sigmoid', 'tanh', 或者自定义函数. - :param min_char_freq: character的最小出现次数。默认值为2. - :param bidirectional: 是否使用双向的LSTM进行encode。默认值为True。 - :param pre_train_char_embed: 可以有两种方式调用预训练好的character embedding:第一种是传入embedding文件夹 - (文件夹下应该只有一个以.txt作为后缀的文件)或文件路径;第二种是传入embedding的名称,第二种情况将自动查看缓存中是否存在该模型, - 没有的话将自动下载。如果输入为None则使用embedding_dim的维度随机初始化一个embedding. - :param requires_grad: 是否更新权重 - :param include_word_start_end: 是否在每个word开始的character前和结束的character增加特殊标示符号; - """ - super(LSTMCharEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - assert hidden_size % 2 == 0, "Only even kernel is allowed." - - assert pool_method in ('max', 'avg') - self.pool_method = pool_method - # activation function - if isinstance(activation, str): - if activation.lower() == 'relu': - self.activation = F.relu - elif activation.lower() == 'sigmoid': - self.activation = F.sigmoid - elif activation.lower() == 'tanh': - self.activation = F.tanh - elif activation is None: - self.activation = lambda x: x - elif callable(activation): - self.activation = activation - else: - raise Exception( - "Undefined activation function: choose from: [relu, tanh, sigmoid, or a callable function]") - - logger.info("Start constructing character vocabulary.") - # 建立char的词表 - self.char_vocab = _construct_char_vocab_from_vocab(vocab, min_freq=min_char_freq, - include_word_start_end=include_word_start_end) - self.char_pad_index = self.char_vocab.padding_idx - logger.info(f"In total, there are {len(self.char_vocab)} distinct characters.") - # 对vocab进行index - max_word_len = max(map(lambda x: len(x[0]), vocab)) - if include_word_start_end: - max_word_len += 2 - self.register_buffer('words_to_chars_embedding', torch.full((len(vocab), max_word_len), - fill_value=self.char_pad_index, dtype=torch.long)) - self.register_buffer('word_lengths', torch.zeros(len(vocab)).long()) - for word, index in vocab: - # if index!=vocab.padding_idx: # 如果是pad的话,直接就为pad_value了. 修改为不区分pad与否 - if include_word_start_end: - word = [''] + list(word) + [''] - self.words_to_chars_embedding[index, :len(word)] = \ - torch.LongTensor([self.char_vocab.to_index(c) for c in word]) - self.word_lengths[index] = len(word) - if pre_train_char_embed: - self.char_embedding = StaticEmbedding(self.char_vocab, pre_train_char_embed) - else: - self.char_embedding = get_embeddings((len(self.char_vocab), char_emb_size)) - - self.fc = nn.Linear(hidden_size, embed_size) - hidden_size = hidden_size // 2 if bidirectional else hidden_size - - self.lstm = LSTM(self.char_embedding.embedding_dim, hidden_size, bidirectional=bidirectional, batch_first=True) - self._embed_size = embed_size - self.bidirectional = bidirectional - self.requires_grad = requires_grad - - def forward(self, words): - r""" - 输入words的index后,生成对应的words的表示。 - - :param words: [batch_size, max_len] - :return: [batch_size, max_len, embed_size] - """ - words = self.drop_word(words) - batch_size, max_len = words.size() - chars = self.words_to_chars_embedding[words] # batch_size x max_len x max_word_len - word_lengths = self.word_lengths[words] # batch_size x max_len - max_word_len = word_lengths.max() - chars = chars[:, :, :max_word_len] - # 为mask的地方为1 - chars_masks = chars.eq(self.char_pad_index) # batch_size x max_len x max_word_len 如果为0, 说明是padding的位置了 - chars = self.char_embedding(chars) # batch_size x max_len x max_word_len x embed_size - chars = self.dropout(chars) - reshaped_chars = chars.reshape(batch_size * max_len, max_word_len, -1) - char_seq_len = chars_masks.eq(False).sum(dim=-1).reshape(batch_size * max_len) - lstm_chars = self.lstm(reshaped_chars, char_seq_len)[0].reshape(batch_size, max_len, max_word_len, -1) - # B x M x M x H - - lstm_chars = self.activation(lstm_chars) - if self.pool_method == 'max': - lstm_chars = lstm_chars.masked_fill(chars_masks.unsqueeze(-1), float('-inf')) - chars, _ = torch.max(lstm_chars, dim=-2) # batch_size x max_len x H - else: - lstm_chars = lstm_chars.masked_fill(chars_masks.unsqueeze(-1), 0) - chars = torch.sum(lstm_chars, dim=-2) / chars_masks.eq(False).sum(dim=-1, keepdim=True).float() - - chars = self.fc(chars) - - return self.dropout(chars) diff --git a/fastNLP/embeddings/contextual_embedding.py b/fastNLP/embeddings/contextual_embedding.py deleted file mode 100644 index d3ae6b4e..00000000 --- a/fastNLP/embeddings/contextual_embedding.py +++ /dev/null @@ -1,113 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "ContextualEmbedding" -] - -from abc import abstractmethod - -import torch - -from .embedding import TokenEmbedding -from ..core import logger -from ..core.batch import DataSetIter -from ..core.dataset import DataSet -from ..core.sampler import SequentialSampler -from ..core.utils import _move_model_to_device, _get_model_device -from ..core.vocabulary import Vocabulary - - -class ContextualEmbedding(TokenEmbedding): - r""" - ContextualEmbedding组件. BertEmbedding与ElmoEmbedding的基类 - """ - def __init__(self, vocab: Vocabulary, word_dropout: float = 0.0, dropout: float = 0.0): - super(ContextualEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - def add_sentence_cache(self, *datasets, batch_size=32, device='cpu', delete_weights: bool = True): - r""" - 由于动态embedding生成比较耗时,所以可以把每句话embedding缓存下来,这样就不需要每次都运行生成过程。 - - :param datasets: DataSet对象 - :param batch_size: int, 生成cache的sentence表示时使用的batch的大小 - :param device: 参考 :class::fastNLP.Trainer 的device - :param delete_weights: 似乎在生成了cache之后删除权重,在不需要finetune动态模型的情况下,删除权重会大量减少内存占用。 - :return: - """ - for index, dataset in enumerate(datasets): - try: - assert isinstance(dataset, DataSet), "Only fastNLP.DataSet object is allowed." - assert 'words' in dataset.get_input_name(), "`words` field has to be set as input." - except Exception as e: - logger.error(f"Exception happens at {index} dataset.") - raise e - - sent_embeds = {} - _move_model_to_device(self, device=device) - device = _get_model_device(self) - pad_index = self._word_vocab.padding_idx - logger.info("Start to calculate sentence representations.") - with torch.no_grad(): - for index, dataset in enumerate(datasets): - try: - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler()) - for batch_x, batch_y in batch: - words = batch_x['words'].to(device) - words_list = words.tolist() - seq_len = words.ne(pad_index).sum(dim=-1) - max_len = words.size(1) - # 因为有些情况可能包含CLS, SEP, 从后面往前计算比较安全。 - seq_len_from_behind = (max_len - seq_len).tolist() - word_embeds = self(words).detach().cpu().numpy() - for b in range(words.size(0)): - length = seq_len_from_behind[b] - if length == 0: - sent_embeds[tuple(words_list[b][:seq_len[b]])] = word_embeds[b] - else: - sent_embeds[tuple(words_list[b][:seq_len[b]])] = word_embeds[b, :-length] - except Exception as e: - logger.error(f"Exception happens at {index} dataset.") - raise e - logger.info("Finish calculating sentence representations.") - self.sent_embeds = sent_embeds - if delete_weights: - self._delete_model_weights() - - def _get_sent_reprs(self, words): - r""" - 获取sentence的表示,如果有缓存,则返回缓存的值; 没有缓存则返回None - - :param words: torch.LongTensor - :return: - """ - if hasattr(self, 'sent_embeds'): - words_list = words.tolist() - seq_len = words.ne(self._word_pad_index).sum(dim=-1) - _embeds = [] - for b in range(len(words)): - words_i = tuple(words_list[b][:seq_len[b]]) - embed = self.sent_embeds[words_i] - _embeds.append(embed) - max_sent_len = max(map(len, _embeds)) - embeds = words.new_zeros(len(_embeds), max_sent_len, self.embed_size, dtype=torch.float, - device=words.device) - for i, embed in enumerate(_embeds): - embeds[i, :len(embed)] = torch.FloatTensor(embed).to(words.device) - return embeds - return None - - @abstractmethod - def _delete_model_weights(self): - r"""删除计算表示的模型以节省资源""" - raise NotImplementedError - - def remove_sentence_cache(self): - r""" - 删除缓存的句子表示. 删除之后如果模型权重没有被删除,将开始使用动态计算权重。 - - :return: - """ - del self.sent_embeds diff --git a/fastNLP/embeddings/elmo_embedding.py b/fastNLP/embeddings/elmo_embedding.py deleted file mode 100644 index 39cd4b30..00000000 --- a/fastNLP/embeddings/elmo_embedding.py +++ /dev/null @@ -1,335 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "ElmoEmbedding" -] - -import codecs -import json -import os - -import torch -import torch.nn as nn -import torch.nn.functional as F - -from .contextual_embedding import ContextualEmbedding -from ..core import logger -from ..core.vocabulary import Vocabulary -from ..io.file_utils import cached_path, _get_embedding_url, PRETRAINED_ELMO_MODEL_DIR -from ..modules.encoder._elmo import ElmobiLm, ConvTokenEmbedder - - -class ElmoEmbedding(ContextualEmbedding): - r""" - 使用ELMo的embedding。初始化之后,只需要传入words就可以得到对应的embedding。 - 当前支持的使用名称初始化的模型: - - .. code:: - - en: 即en-medium hidden_size 1024; output_size 12 - en-medium: hidden_size 2048; output_size 256 - en-origial: hidden_size 4096; output_size 512 - en-original-5.5b: hidden_size 4096; output_size 512 - en-small: hidden_size 1024; output_size 128 - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import ElmoEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> # 使用不同层的concat的结果 - >>> embed = ElmoEmbedding(vocab, model_dir_or_name='en', layers='1,2', requires_grad=False) - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5, 2048]) - - >>> # 使用不同层的weighted sum。 - >>> embed = ElmoEmbedding(vocab, model_dir_or_name='en', layers='mix', requires_grad=False) - >>> embed.set_mix_weights_requires_grad() # 使得weighted的权重是可以学习的,但ELMO的LSTM部分是不更新 - - """ - - def __init__(self, vocab: Vocabulary, model_dir_or_name: str = 'en', layers: str = '2', requires_grad: bool = True, - word_dropout=0.0, dropout=0.0, cache_word_reprs: bool = False): - r""" - - :param vocab: 词表 - :param model_dir_or_name: 可以有两种方式调用预训练好的ELMo embedding:第一种是传入ELMo所在文件夹,该文件夹下面应该有两个文件, - 其中一个是以json为后缀的配置文件,另一个是以pkl为后缀的权重文件;第二种是传入ELMo版本的名称,将自动查看缓存中是否存在该模型, - 没有的话将自动下载并缓存。 - :param layers: str, 指定返回的层数(从0开始), 以,隔开不同的层。如果要返回第二层的结果'2', 返回后两层的结果'1,2'。不同的层的结果 - 按照这个顺序concat起来,默认为'2'。'mix'会使用可学习的权重结合不同层的表示(权重是否可训练与requires_grad保持一致, - 初始化权重对三层结果进行mean-pooling, 可以通过ElmoEmbedding.set_mix_weights_requires_grad()方法只将mix weights设置为可学习。) - :param requires_grad: bool, 该层是否需要gradient, 默认为False. - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param cache_word_reprs: 可以选择对word的表示进行cache; 设置为True的话,将在初始化的时候为每个word生成对应的embedding, - 并删除character encoder,之后将直接使用cache的embedding。默认为False。 - """ - super(ElmoEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - # 根据model_dir_or_name检查是否存在并下载 - if model_dir_or_name.lower() in PRETRAINED_ELMO_MODEL_DIR: - model_url = _get_embedding_url('elmo', model_dir_or_name.lower()) - model_dir = cached_path(model_url, name='embedding') - # 检查是否存在 - elif os.path.isdir(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_dir = model_dir_or_name - else: - raise ValueError(f"Cannot recognize {model_dir_or_name}.") - self.model = _ElmoModel(model_dir, vocab, cache_word_reprs=cache_word_reprs) - num_layers = self.model.encoder.num_layers - - if layers == 'mix': - self.layer_weights = nn.Parameter(torch.zeros(self.model.config['lstm']['n_layers'] + 1), - requires_grad=requires_grad) - self.gamma = nn.Parameter(torch.ones(1), requires_grad=requires_grad) - self._get_outputs = self._get_mixed_outputs - self._embed_size = self.model.config['lstm']['projection_dim'] * 2 - else: - layers = list(map(int, layers.split(','))) - assert len(layers) > 0, "Must choose at least one output, but got None." - for layer in layers: - assert 0 <= layer <= num_layers, f"Layer index should be in range [0, {num_layers}], but got {layer}." - self.layers = layers - self._get_outputs = self._get_layer_outputs - self._embed_size = len(self.layers) * self.model.config['lstm']['projection_dim'] * 2 - - self.requires_grad = requires_grad - - def _get_mixed_outputs(self, outputs): - # outputs: num_layers x batch_size x max_len x hidden_size - # return: batch_size x max_len x hidden_size - weights = F.softmax(self.layer_weights + 1 / len(outputs), dim=0).to(outputs) - outputs = torch.einsum('l,lbij->bij', weights, outputs) - return self.gamma.to(outputs) * outputs - - def set_mix_weights_requires_grad(self, flag=True): - r""" - 当初始化ElmoEmbedding时layers被设置为mix时,可以通过调用该方法设置mix weights是否可训练。如果layers不是mix,调用 - 该方法没有用。 - - :param bool flag: 混合不同层表示的结果是否可以训练。 - :return: - """ - if hasattr(self, 'layer_weights'): - self.layer_weights.requires_grad = flag - self.gamma.requires_grad = flag - - def _get_layer_outputs(self, outputs): - if len(self.layers) == 1: - outputs = outputs[self.layers[0]] - else: - outputs = torch.cat(tuple([*outputs[self.layers]]), dim=-1) - - return outputs - - def forward(self, words: torch.LongTensor): - r""" - 计算words的elmo embedding表示。根据elmo文章中介绍的ELMO实际上是有2L+1层结果,但是为了让结果比较容易拆分,token的 - 被重复了一次,使得实际上layer=0的结果是[token_embedding;token_embedding], 而layer=1的结果是[forward_hiddens; - backward_hiddens]. - - :param words: batch_size x max_len - :return: torch.FloatTensor. batch_size x max_len x (512*len(self.layers)) - """ - words = self.drop_word(words) - outputs = self._get_sent_reprs(words) - if outputs is not None: - return self.dropout(outputs) - outputs = self.model(words) - outputs = self._get_outputs(outputs) - return self.dropout(outputs) - - def _delete_model_weights(self): - for name in ['layers', 'model', 'layer_weights', 'gamma']: - if hasattr(self, name): - delattr(self, name) - - -class _ElmoModel(nn.Module): - r""" - 该Module是ElmoEmbedding中进行所有的heavy lifting的地方。做的工作,包括 - (1) 根据配置,加载模型; - (2) 根据vocab,对模型中的embedding进行调整. 并将其正确初始化 - (3) 保存一个words与chars的对应转换,获取时自动进行相应的转换 - (4) 设计一个保存token的embedding,允许缓存word的表示。 - - """ - - def __init__(self, model_dir: str, vocab: Vocabulary = None, cache_word_reprs: bool = False): - super(_ElmoModel, self).__init__() - self.model_dir = model_dir - dir = os.walk(self.model_dir) - config_file = None - weight_file = None - config_count = 0 - weight_count = 0 - for path, dir_list, file_list in dir: - for file_name in file_list: - if file_name.__contains__(".json"): - config_file = file_name - config_count += 1 - elif file_name.__contains__(".pkl"): - weight_file = file_name - weight_count += 1 - if config_count > 1 or weight_count > 1: - raise Exception(f"Multiple config files(*.json) or weight files(*.hdf5) detected in {model_dir}.") - elif config_count == 0 or weight_count == 0: - raise Exception(f"No config file or weight file found in {model_dir}") - with open(os.path.join(model_dir, config_file), 'r') as config_f: - config = json.load(config_f) - self.weight_file = os.path.join(model_dir, weight_file) - self.config = config - - OOV_TAG = '' - PAD_TAG = '' - BOS_TAG = '' - EOS_TAG = '' - BOW_TAG = '' - EOW_TAG = '' - - # For the model trained with character-based word encoder. - char_lexicon = {} - with codecs.open(os.path.join(model_dir, 'char.dic'), 'r', encoding='utf-8') as fpi: - for line in fpi: - tokens = line.strip().split('\t') - if len(tokens) == 1: - tokens.insert(0, '\u3000') - token, i = tokens - char_lexicon[token] = int(i) - - # 做一些sanity check - for special_word in [PAD_TAG, OOV_TAG, BOW_TAG, EOW_TAG]: - assert special_word in char_lexicon, f"{special_word} not found in char.dic." - - # 从vocab中构建char_vocab - char_vocab = Vocabulary(unknown=OOV_TAG, padding=PAD_TAG) - # 需要保证在里面 - char_vocab.add_word_lst([BOW_TAG, EOW_TAG, BOS_TAG, EOS_TAG]) - - for word, index in vocab: - char_vocab.add_word_lst(list(word)) - - self.bos_index, self.eos_index, self._pad_index = len(vocab), len(vocab) + 1, vocab.padding_idx - # 根据char_lexicon调整, 多设置一位,是预留给word padding的(该位置的char表示为全0表示) - char_emb_layer = nn.Embedding(len(char_vocab) + 1, int(config['char_cnn']['embedding']['dim']), - padding_idx=len(char_vocab)) - - # 读入预训练权重 这里的elmo_model 包含char_cnn和 lstm 的 state_dict - elmo_model = torch.load(os.path.join(self.model_dir, weight_file), map_location='cpu') - - char_embed_weights = elmo_model["char_cnn"]['char_emb_layer.weight'] - - found_char_count = 0 - for char, index in char_vocab: # 调整character embedding - if char in char_lexicon: - index_in_pre = char_lexicon.get(char) - found_char_count += 1 - else: - index_in_pre = char_lexicon[OOV_TAG] - char_emb_layer.weight.data[index] = char_embed_weights[index_in_pre] - - logger.info(f"{found_char_count} out of {len(char_vocab)} characters were found in pretrained elmo embedding.") - # 生成words到chars的映射 - max_chars = config['char_cnn']['max_characters_per_token'] - self.register_buffer('words_to_chars_embedding', torch.full((len(vocab) + 2, max_chars), - fill_value=len(char_vocab), - dtype=torch.long)) - for word, index in list(iter(vocab)) + [(BOS_TAG, len(vocab)), (EOS_TAG, len(vocab) + 1)]: - if len(word) + 2 > max_chars: - word = word[:max_chars - 2] - if index == self._pad_index: - continue - elif word == BOS_TAG or word == EOS_TAG: - char_ids = [char_vocab.to_index(BOW_TAG)] + [char_vocab.to_index(word)] + [ - char_vocab.to_index(EOW_TAG)] - char_ids += [char_vocab.to_index(PAD_TAG)] * (max_chars - len(char_ids)) - else: - char_ids = [char_vocab.to_index(BOW_TAG)] + [char_vocab.to_index(c) for c in word] + [ - char_vocab.to_index(EOW_TAG)] - char_ids += [char_vocab.to_index(PAD_TAG)] * (max_chars - len(char_ids)) - self.words_to_chars_embedding[index] = torch.LongTensor(char_ids) - - self.char_vocab = char_vocab - - self.token_embedder = ConvTokenEmbedder( - config, self.weight_file, None, char_emb_layer) - elmo_model["char_cnn"]['char_emb_layer.weight'] = char_emb_layer.weight - self.token_embedder.load_state_dict(elmo_model["char_cnn"]) - - self.output_dim = config['lstm']['projection_dim'] - - # lstm encoder - self.encoder = ElmobiLm(config) - self.encoder.load_state_dict(elmo_model["lstm"]) - - if cache_word_reprs: - if config['char_cnn']['embedding']['dim'] > 0: # 只有在使用了chars的情况下有用 - logger.info("Start to generate cache word representations.") - batch_size = 320 - # bos eos - word_size = self.words_to_chars_embedding.size(0) - num_batches = word_size // batch_size + \ - int(word_size % batch_size != 0) - - self.cached_word_embedding = nn.Embedding(word_size, - config['lstm']['projection_dim']) - with torch.no_grad(): - for i in range(num_batches): - words = torch.arange(i * batch_size, - min((i + 1) * batch_size, word_size)).long() - chars = self.words_to_chars_embedding[words].unsqueeze(1) # batch_size x 1 x max_chars - word_reprs = self.token_embedder(words.unsqueeze(1), - chars).detach() # batch_size x 1 x config['encoder']['projection_dim'] - self.cached_word_embedding.weight.data[words] = word_reprs.squeeze(1) - - logger.info("Finish generating cached word representations. Going to delete the character encoder.") - del self.token_embedder, self.words_to_chars_embedding - else: - logger.info("There is no need to cache word representations, since no character information is used.") - - def forward(self, words): - r""" - - :param words: batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size - """ - # 扩展, - batch_size, max_len = words.size() - expanded_words = words.new_zeros(batch_size, max_len + 2) # 因为pad一定为0, - seq_len = words.ne(self._pad_index).sum(dim=-1) - expanded_words[:, 1:-1] = words - expanded_words[:, 0].fill_(self.bos_index) - expanded_words[torch.arange(batch_size).to(words), seq_len + 1] = self.eos_index - seq_len = seq_len + 2 - zero_tensor = expanded_words.new_zeros(expanded_words.shape) - mask = (expanded_words == zero_tensor).unsqueeze(-1) - if hasattr(self, 'cached_word_embedding'): - token_embedding = self.cached_word_embedding(expanded_words) - else: - if hasattr(self, 'words_to_chars_embedding'): - chars = self.words_to_chars_embedding[expanded_words] - else: - chars = None - token_embedding = self.token_embedder(expanded_words, chars) # batch_size x max_len x embed_dim - - encoder_output = self.encoder(token_embedding, seq_len) - if encoder_output.size(2) < max_len + 2: - num_layers, _, output_len, hidden_size = encoder_output.size() - dummy_tensor = encoder_output.new_zeros(num_layers, batch_size, - max_len + 2 - output_len, hidden_size) - encoder_output = torch.cat((encoder_output, dummy_tensor), 2) - sz = encoder_output.size() # 2, batch_size, max_len, hidden_size - token_embedding = token_embedding.masked_fill(mask, 0) - token_embedding = torch.cat((token_embedding, token_embedding), dim=2).view(1, sz[1], sz[2], sz[3]) - encoder_output = torch.cat((token_embedding, encoder_output), dim=0) - - # 删除, . 这里没有精确地删除,但应该也不会影响最后的结果了。 - encoder_output = encoder_output[:, :, 1:-1] - return encoder_output diff --git a/fastNLP/embeddings/embedding.py b/fastNLP/embeddings/embedding.py deleted file mode 100644 index 9b6a1a7f..00000000 --- a/fastNLP/embeddings/embedding.py +++ /dev/null @@ -1,212 +0,0 @@ -r""" -该模块中的Embedding主要用于随机初始化的embedding(更推荐使用 :class:`fastNLP.embeddings.StaticEmbedding` ),或按照预训练权重初始化Embedding。 - -""" - -__all__ = [ - "Embedding", - "TokenEmbedding" -] - -from abc import abstractmethod - -import torch -import torch.nn as nn - -from .utils import get_embeddings - - -class Embedding(nn.Module): - r""" - 词向量嵌入,支持输入多种方式初始化. 可以通过self.num_embeddings获取词表大小; self.embedding_dim获取embedding的维度. - - Example:: - - >>> import numpy as np - >>> from fastNLP.embeddings import Embedding - >>> init_embed = (2000, 100) - >>> embed = Embedding(init_embed) # 随机初始化一个具有2000个词,每个词表示为100维的词向量 - >>> init_embed = np.zeros((2000, 100)) - >>> embed = Embedding(init_embed) # 使用numpy.ndarray的值作为初始化值初始化一个Embedding - - """ - - def __init__(self, init_embed, word_dropout=0, dropout=0.0, unk_index=None): - r""" - - :param tuple(int,int),torch.FloatTensor,nn.Embedding,numpy.ndarray init_embed: 支持传入Embedding的大小(传入tuple(int, int), - 第一个int为vocab_zie, 第二个int为embed_dim); 或传入Tensor, Embedding, numpy.ndarray等则直接使用该值初始化Embedding; - :param float word_dropout: 按照一定概率随机将word设置为unk_index,这样可以使得unk这个token得到足够的训练, 且会对网络有 - 一定的regularize的作用。设置该值时,必须同时设置unk_index - :param float dropout: 对Embedding的输出的dropout。 - :param int unk_index: drop word时替换为的index。fastNLP的Vocabulary的unk_index默认为1。 - """ - super(Embedding, self).__init__() - - self.embed = get_embeddings(init_embed) - - self.dropout = nn.Dropout(dropout) - if not isinstance(self.embed, TokenEmbedding): - if hasattr(self.embed, 'embed_size'): - self._embed_size = self.embed.embed_size - elif hasattr(self.embed, 'embedding_dim'): - self._embed_size = self.embed.embedding_dim - else: - self._embed_size = self.embed.weight.size(1) - if word_dropout > 0 and not isinstance(unk_index, int): - raise ValueError("When drop word is set, you need to pass in the unk_index.") - else: - self._embed_size = self.embed.embed_size - unk_index = self.embed.get_word_vocab().unknown_idx - self.unk_index = unk_index - self.word_dropout = word_dropout - - def forward(self, words): - r""" - :param torch.LongTensor words: [batch, seq_len] - :return: torch.Tensor : [batch, seq_len, embed_dim] - """ - if self.word_dropout > 0 and self.training: - mask = torch.ones_like(words).float() * self.word_dropout - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - words = words.masked_fill(mask, self.unk_index) - words = self.embed(words) - return self.dropout(words) - - @property - def num_embedding(self) -> int: - if isinstance(self.embed, nn.Embedding): - return self.embed.weight.size(0) - else: - return self.embed.num_embeddings - - def __len__(self): - return len(self.embed) - - @property - def embed_size(self) -> int: - return self._embed_size - - @property - def embedding_dim(self) -> int: - return self._embed_size - - @property - def requires_grad(self): - r""" - Embedding的参数是否允许优化。True: 所有参数运行优化; False: 所有参数不允许优化; None: 部分允许优化、部分不允许 - :return: - """ - if not isinstance(self.embed, TokenEmbedding): - return self.embed.weight.requires_grad - else: - return self.embed.requires_grad - - @requires_grad.setter - def requires_grad(self, value): - if not isinstance(self.embed, TokenEmbedding): - self.embed.weight.requires_grad = value - else: - self.embed.requires_grad = value - - @property - def size(self): - if isinstance(self.embed, TokenEmbedding): - return self.embed.size - else: - return self.embed.weight.size() - - -class TokenEmbedding(nn.Module): - r""" - fastNLP中各种Embedding的基类 - - """ - def __init__(self, vocab, word_dropout=0.0, dropout=0.0): - super(TokenEmbedding, self).__init__() - if vocab.rebuild: - vocab.build_vocab() - assert vocab.padding is not None, "Vocabulary must have a padding entry." - self._word_vocab = vocab - self._word_pad_index = vocab.padding_idx - if word_dropout > 0: - assert vocab.unknown is not None, "Vocabulary must have unknown entry when you want to drop a word." - self.word_dropout = word_dropout - self._word_unk_index = vocab.unknown_idx - self.dropout_layer = nn.Dropout(dropout) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._word_pad_index) - mask = mask.__and__(pad_mask) - words = words.masked_fill(mask, self._word_unk_index) - return words - - def dropout(self, words): - r""" - 对embedding后的word表示进行drop。 - - :param torch.FloatTensor words: batch_size x max_len x embed_size - :return: - """ - return self.dropout_layer(words) - - @property - def requires_grad(self): - r""" - Embedding的参数是否允许优化。True: 所有参数运行优化; False: 所有参数不允许优化; None: 部分允许优化、部分不允许 - :return: - """ - requires_grads = set([param.requires_grad for param in self.parameters()]) - if len(requires_grads) == 1: - return requires_grads.pop() - else: - return None - - @requires_grad.setter - def requires_grad(self, value): - for param in self.parameters(): - param.requires_grad = value - - def __len__(self): - return len(self._word_vocab) - - @property - def embed_size(self) -> int: - return self._embed_size - - @property - def embedding_dim(self) -> int: - return self._embed_size - - @property - def num_embeddings(self) -> int: - r""" - 这个值可能会大于实际的embedding矩阵的大小。 - :return: - """ - return len(self._word_vocab) - - def get_word_vocab(self): - r""" - 返回embedding的词典。 - - :return: Vocabulary - """ - return self._word_vocab - - @property - def size(self): - return torch.Size(self.num_embeddings, self._embed_size) - - @abstractmethod - def forward(self, words): - raise NotImplementedError diff --git a/fastNLP/embeddings/gpt2_embedding.py b/fastNLP/embeddings/gpt2_embedding.py deleted file mode 100644 index a9ce3202..00000000 --- a/fastNLP/embeddings/gpt2_embedding.py +++ /dev/null @@ -1,656 +0,0 @@ -""" -.. todo:: - doc -""" - -__all__ = [ - "GPT2Embedding", - "GPT2WordPieceEncoder" -] - -import warnings -from functools import partial -from itertools import chain -from collections import OrderedDict - -import torch -from torch import nn -import numpy as np - -from .contextual_embedding import ContextualEmbedding -from ..core import logger -from ..core.utils import _get_model_device -from ..core.vocabulary import Vocabulary -from ..io.file_utils import PRETRAINED_BERT_MODEL_DIR -from ..modules.tokenizer import GPT2Tokenizer -from ..modules.encoder.gpt2 import GPT2LMHeadModel, GPT2Model - - -class GPT2Embedding(ContextualEmbedding): - """ - 使用GPT2对words进行编码的Embedding。 - - GPT2Embedding可以支持自动下载权重,当前支持的模型: - en: gpt2 - en-medium: gpt2-medium - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import BertEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = GPT2Embedding(vocab, model_dir_or_name='en-small', requires_grad=False, layers='4,-2,-1') - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5, 3096]) - """ - - def __init__(self, vocab: Vocabulary, model_dir_or_name: str = 'en', layers: str = '-1', - pool_method: str = 'first', dropout=0, requires_grad: bool = True, - auto_truncate: bool = False, language_model: bool = False, **kwargs): - """ - - :param ~fastNLP.Vocabulary vocab: 词表 - :param str model_dir_or_name: 模型所在目录或者模型的名称。当传入模型所在目录时,目录中应该包含一个词表文件(以.txt作为后缀名), - 权重文件(以.bin作为文件后缀名), 配置文件(以.json作为后缀名)。 - :param str layers: 输出embedding表示来自于哪些层,不同层的结果按照layers中的顺序在最后一维concat起来。以','隔开层数,层的序号是 - 从0开始,可以以负数去索引倒数几层。 - :param str pool_method: 因为在bert中,每个word会被表示为多个word pieces, 当获取一个word的表示的时候,怎样从它的word pieces - 中计算得到它对应的表示。支持 ``last`` , ``first`` , ``avg`` , ``max``。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool requires_grad: 是否需要gradient以更新Bert的权重。 - :param bool auto_truncate: 当句子words拆分为word pieces长度超过bert最大允许长度(一般为512), 自动截掉拆分后的超过510个 - word pieces后的内容,并将第512个word piece置为[SEP]。超过长度的部分的encode结果直接全部置零。一般仅有只使用[CLS] - 来进行分类的任务将auto_truncate置为True。 - :param bool language_model: 是否计算gpt2的lm loss,可以通过get_loss()获取,输入一个batch之后的get_loss调用即为batch的language - model的loss - :param **kwargs: - bool only_use_pretrain_bpe: 仅使用出现在pretrain词表中的bpe,如果该词没法tokenize则使用unk。如果embedding不需要更新 - 建议设置为True。 - int min_freq: 仅在only_use_pretrain_bpe为False有效,大于等于该次数的词会被新加入GPT2的BPE词表中 - bool truncate_embed: 是否仅保留用到的bpe(这样会减内存占用和加快速度) - """ - super().__init__(vocab, word_dropout=0, dropout=dropout) - - if model_dir_or_name.lower() in PRETRAINED_BERT_MODEL_DIR: - if 'cn' in model_dir_or_name.lower() and pool_method not in ('first', 'last'): - logger.warning("For Chinese GPT, pooled_method should choose from 'first', 'last' in order to achieve" - " faster speed.") - warnings.warn("For Chinese GPT, pooled_method should choose from 'first', 'last' in order to achieve" - " faster speed.") - - only_use_pretrain_bpe = kwargs.get('only_use_pretrain_bpe', False) - truncate_embed = kwargs.get('truncate_embed', True) - min_freq = kwargs.get('min_freq', 1) - - self.lm_loss =language_model - self.model = _GPT2Model(model_dir_or_name=model_dir_or_name, vocab=vocab, layers=layers, - pool_method=pool_method, auto_truncate=auto_truncate, language_model=language_model, - only_use_pretrain_bpe=only_use_pretrain_bpe, truncate_embed=truncate_embed, - min_freq=min_freq) - - self.requires_grad = requires_grad - self._embed_size = len(self.model.layers) * self.model.encoder.config.n_embd - - def _delete_model_weights(self): - del self.model - - def forward(self, words): - """ - 计算words的bert embedding表示。计算之前会在每句话的开始增加[CLS]在结束增加[SEP], 并根据include_cls_sep判断要不要 - 删除这两个token的表示。 - - :param torch.LongTensor words: [batch_size, max_len] - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - outputs = self._get_sent_reprs(words) - if outputs is not None: - return self.dropout(outputs) - outputs = self.model(words) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout(outputs) - - def drop_word(self, words): - """ - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - words = words.masked_fill(mask, self._word_unk_index) - return words - - def get_lm_loss(self, release=True): - """ - 当language_model=True时,可以通过该接口获取当前batch的language model loss的大小 - - :param bool release: 如果为True,获取了lm_loss后在下一次forward完成之前都无法获取lm_loss了 - :return: torch.FloatTensor([]) - """ - if hasattr(self.model, '_lm_loss_value'): - lm_loss_value = self.model._lm_loss_value - if release: - delattr(self.model, '_lm_loss_value') - return lm_loss_value - elif self.lm_loss: - raise RuntimeError("Make sure you have passed a batch into GPT2Embdding before accessing loss.") - else: - raise RuntimeError("Initialize your GPT2Embedding with language_model=True.") - - -class GPT2WordPieceEncoder(nn.Module): - """ - GPT2模型,使用时先使用本模型对应的Tokenizer对数据进行tokenize - GPT2WordPieceEncoder可以支持自动下载权重,当前支持的模型: - en: gpt2 - en-medium: gpt2-medium - - """ - - def __init__(self, model_dir_or_name: str = 'en', layers: str = '-1', - word_dropout=0, dropout=0, requires_grad: bool = True, language_model:bool=False): - """ - - :param str model_dir_or_name: 模型所在目录或者模型的名称。 - :param str,list layers: 最终结果中的表示。以','隔开层数,可以以负数去索引倒数几层 - :param float word_dropout: 多大概率将word piece置为<|endoftext|> - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool language_model: 是否使用language model - :param bool requires_grad: 是否需要gradient。 - """ - super().__init__() - - self.model = _GPT2WordPieceModel(model_dir_or_name=model_dir_or_name, layers=layers, language_model=language_model) - self._wordpiece_pad_index = self.model._wordpiece_pad_index - self._embed_size = len(self.model.layers) * self.model.encoder.config.n_embd - self.requires_grad = requires_grad - self.dropout_layer = nn.Dropout(dropout) - self._wordpiece_endoftext_index = self.model._endoftext_index - self.word_dropout = word_dropout - self.language_model = language_model - - @property - def embed_size(self): - return self._embed_size - - @property - def embedding_dim(self): - return self._embed_size - - @property - def num_embedding(self): - return self.model.encoder.config.vocab_size - - def index_datasets(self, *datasets, field_name, add_endoftext=False, add_prefix_space=True): - """ - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input,且将word_pieces这一列的pad value设置为了 - bert的pad value。 - - :param ~fastNLP.DataSet datasets: DataSet对象 - :param list[str] field_name: 基于哪一列的内容生成word_pieces列。这一列中每个数据应该是List[str]的形式。 - :param bool add_endoftext: 在句子开头加入<|endofline|>。 - :param bool add_prefix_space: 是否在句首增加空格 - :return: - """ - self.model.index_datasets(*datasets, field_name=field_name, add_endoftext=add_endoftext, - add_prefix_space=add_prefix_space) - - def forward(self, word_pieces, token_type_ids=None): - """ - 计算words的bert embedding表示。传入的words中应该在开头包含<|endofline|>。 - - :param word_pieces: batch_size x max_len - :param token_type_ids: batch_size x max_len, - :return: torch.FloatTensor. - """ - - outputs = self.model(word_pieces) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout_layer(outputs) - - def drop_word(self, words): - """ - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - endoftext_mask = words.ne(self._wordpiece_endoftext_index) - mask = endoftext_mask.__and__(mask) # pad的位置不为unk - words = words.masked_fill(mask, self._wordpiece_unk_index) - return words - - def generate_from_str(self, text='', max_len=40, do_sample=True, num_beams=1, temperature=1, top_k=50, top_p=1.0, - repetition_penalty=1.0, length_penalty=1.0): - """ - - :param str text: 故事的开头 - :param int max_len: 生成多长的句子 - :param bool do_sample: 是否使用采样的方式生成,如果使用采样,相同的参数可能出现不同的句子。 - :param int num_beams: 使用多大的beam size - :param float temperature: 用以调节采样分布的 - :param int top_k: 只保留此表中top_k个词进行生成。范围1-infinity - :param float top_p: 保留概率累积为top_p的词汇,范围0-1. - :param float repetition_penalty: 对重复token的惩罚 - :param float length_penalty: 惩罚过长的句子 - :return: list[str] - """ - if len(text)==0: - word_pieces = torch.LongTensor([[self.model.tokenizer.bos_index]]) - start_idx = 1 - else: - assert isinstance(text, str), "Only string input allowed." - assert self.language_model, "You must set `language_model=True`." - word_pieces = self.model.convert_words_to_word_pieces(text, add_prefix_space=True) - word_pieces = torch.LongTensor([word_pieces]) - start_idx = 0 - device = _get_model_device(self) - word_pieces = word_pieces.to(device) - outputs = self.model.encoder.generate(input_ids=word_pieces, - max_length=max_len, - do_sample=do_sample, - num_beams=num_beams, - temperature=temperature, - top_k=top_k, - top_p=top_p, - repetition_penalty=repetition_penalty, - bos_token_id=self.model.tokenizer.bos_index, - pad_token_id=self.model.tokenizer.eos_index, # 使用<|endoftext|>代替pad - eos_token_ids=self.model.tokenizer.eos_index, - length_penalty=length_penalty).squeeze(0) - - output_strs = [] - if outputs.dim()==1: - outputs = outputs[None] - outputs = outputs[:, start_idx:] - for i in range(len(outputs)): - str_ = self.model.tokenizer.convert_tokens_to_string(self.model.tokenizer.convert_ids_to_tokens(outputs[i].tolist())) - output_strs.append(str_) - - return output_strs - - def generate(self, word_pieces=None, max_len=40, do_sample=True, num_beams=1, temperature=1, top_k=50, top_p=1.0, - repetition_penalty=1.0, length_penalty=1.0): - """ - - :param torch.LongTensor,None word_pieces: 如果传入tensor,shape应该为batch_size x start_len; 如果传入None,会随机生成。 - :param int max_len: 生成多长的句子 - :param bool do_sample: 是否使用采样的方式生成,如果使用采样,相同的参数可能出现不同的句子。 - :param int num_beams: 使用多大的beam size - :param float temperature: 用以调节采样分布的 - :param int top_k: 只保留此表中top_k个词进行生成。范围1-infinity - :param float top_p: 保留概率累积为top_p的词汇,范围0-1. - :param float repetition_penalty: 对重复token的惩罚 - :param float length_penalty: 惩罚过长的句子 - :return: - """ - raise NotImplemented - - def get_lm_loss(self, release=True): - """ - 当language_model=True时,可以通过该接口获取当前batch的language model loss的大小 - - :param bool release: 如果为True,获取了lm_loss后在下一次forward完成之前都无法获取lm_loss了 - :return: torch.FloatTensor([]) - """ - if hasattr(self.model, '_lm_loss_value'): - lm_loss_value = self.model._lm_loss_value - if release: - delattr(self.model, '_lm_loss_value') - return lm_loss_value - elif self.lm_loss: - raise RuntimeError("Make sure you have passed a batch into GPT2Embdding before accessing loss.") - else: - raise RuntimeError("Initialize your GPT2Embedding with language_model=True.") - - -class _GPT2Model(nn.Module): - def __init__(self, model_dir_or_name, vocab, layers, pool_method='first', auto_truncate=True, language_model=False, - only_use_pretrain_bpe=False, min_freq=1, truncate_embed=False): - super().__init__() - - self.tokenzier = GPT2Tokenizer.from_pretrained(model_dir_or_name) - if language_model: - self.encoder = GPT2LMHeadModel.from_pretrained(model_dir_or_name) - else: - self.encoder = GPT2Model.from_pretrained(model_dir_or_name) - - self.lm_loss = language_model - self._max_position_embeddings = self.encoder.config.max_position_embeddings - # 检查encoder_layer_number是否合理 - encoder_layer_number = self.encoder.config.n_layer - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a GPT2 model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a GPT2 model with {encoder_layer_number} layers." - - assert pool_method in ('avg', 'max', 'first', 'last') - self.pool_method = pool_method - self.auto_truncate = auto_truncate - - # 将所有vocab中word的wordpiece计算出来, 需要额外考虑 - logger.info("Start to generate word pieces for word.") - # 第一步统计出需要的word_piece, 然后创建新的embed和word_piece_vocab, 然后填入值 - word_piece_dict = {'<|endoftext|>': 1} # 用到的word_piece以及新增的 - found_count = 0 - new_add_to_bpe_vocab = 0 - unsegment_count = 0 - - for word, index in vocab: - if index == vocab.padding_idx: # pad是个特殊的符号 - word = '<|endoftext|>' - elif index == vocab.unknown_idx: - word = '<|endoftext|>' - # _words = self.tokenzier.basic_tokenizer._tokenize_chinese_chars(word).split() # 这里暂时不考虑中文内容 - word_pieces = [] - word_pieces.extend(self.tokenzier.tokenize(word, add_prefix_space=True)) - if len(word_pieces) == 1: - if not vocab._is_word_no_create_entry(word): # 如果是train中的值, 但是却没有找到 - if index not in (vocab.unknown_idx, vocab.padding_idx) and word_pieces[0] == '<|endoftext|>': # 说明这个词不在原始的word里面 - if vocab.word_count[word] >= min_freq and not vocab._is_word_no_create_entry( - word) and not only_use_pretrain_bpe: # 出现次数大于这个次数才新增 - word_piece_dict[word] = 1 # 新增一个值 - new_add_to_bpe_vocab += 1 - unsegment_count += 1 - continue - for word_piece in word_pieces: - word_piece_dict[word_piece] = 1 - found_count += 1 - - if unsegment_count>0: - if only_use_pretrain_bpe or new_add_to_bpe_vocab==0: - logger.info(f"{unsegment_count} words are unsegmented.") - else: - logger.info(f"{unsegment_count} words are unsegmented. Among them, {new_add_to_bpe_vocab} added to the BPE vocab.") - - original_embed = self.encoder.get_input_embeddings().weight - # 特殊词汇要特殊处理 - if not truncate_embed: # 如果不删除的话需要将已有的加上 - word_piece_dict.update(self.tokenzier.encoder) - - embed = nn.Embedding(len(word_piece_dict), original_embed.size(1)) # 新的embed - new_word_piece_vocab = OrderedDict() - - for index, token in enumerate(['<|endoftext|>']): - index = word_piece_dict.pop(token, None) - if index is not None: - new_word_piece_vocab[token] = len(new_word_piece_vocab) - embed.weight.data[new_word_piece_vocab[token]] = original_embed[self.tokenzier.encoder[token]] - - for token in word_piece_dict.keys(): - if token not in new_word_piece_vocab: - new_word_piece_vocab[token] = len(new_word_piece_vocab) - index = new_word_piece_vocab[token] - if token in self.tokenzier.encoder: - embed.weight.data[index] = original_embed[self.tokenzier.encoder[token]] - else: - embed.weight.data[index] = original_embed[self.tokenzier.encoder['<|endoftext|>']] - - self.tokenzier._reinit_on_new_vocab(new_word_piece_vocab) - self.encoder.set_input_embeddings(embed) - self.encoder.tie_weights() - self.encoder.config.vocab_size = len(new_word_piece_vocab) - - word_to_wordpieces = [] - word_pieces_lengths = [] - for word, index in vocab: - if index == vocab.padding_idx: # pad是个特殊的符号 - word = '<|endoftext|>' - elif index == vocab.unknown_idx: - word = '<|endoftext|>' - word_pieces = self.tokenzier.tokenize(word) - word_pieces = self.tokenzier.convert_tokens_to_ids(word_pieces) - word_to_wordpieces.append(word_pieces) - word_pieces_lengths.append(len(word_pieces)) - self._word_pad_index = vocab.padding_idx - self._endoftext_index = self.tokenzier.encoder.get('<|endoftext|>') - self._wordpiece_pad_index = self.tokenzier.encoder.get('<|endoftext|>') # 需要用于生成word_piece - self.word_to_wordpieces = np.array(word_to_wordpieces, dtype=object) - self.register_buffer('word_pieces_lengths', torch.LongTensor(word_pieces_lengths)) - logger.debug("Successfully generate word pieces.") - - def forward(self, words): - """ - - :param words: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - with torch.no_grad(): - batch_size, max_word_len = words.size() - word_mask = words.ne(self._word_pad_index) # 为1的地方有word - seq_len = word_mask.sum(dim=-1) - batch_word_pieces_length = self.word_pieces_lengths[words].masked_fill(word_mask.eq(False), - 0) # batch_size x max_len - word_pieces_lengths = batch_word_pieces_length.sum(dim=-1) # batch_size - max_word_piece_length = batch_word_pieces_length.sum(dim=-1).max().item() # 表示word piece的长度(包括padding) - if max_word_piece_length > self._max_position_embeddings: - if self.auto_truncate: - word_pieces_lengths = word_pieces_lengths.masked_fill( - word_pieces_lengths > self._max_position_embeddings, - self._max_position_embeddings) - else: - raise RuntimeError( - "After split words into word pieces, the lengths of word pieces are longer than the " - f"maximum allowed sequence length:{self._max_position_embeddings} of GPT2. You can set " - f"`auto_truncate=True` for BertEmbedding to automatically truncate overlong input.") - - word_pieces = words.new_full((batch_size, min(max_word_piece_length, self._max_position_embeddings)), - fill_value=self._wordpiece_pad_index) - word_labels = word_pieces.clone() - attn_masks = torch.zeros_like(word_pieces) - # 1. 获取words的word_pieces的id,以及对应的span范围 - word_indexes = words.cpu().numpy() - for i in range(batch_size): - word_pieces_i = list(chain(*self.word_to_wordpieces[word_indexes[i, :seq_len[i]]])) - if self.auto_truncate and len(word_pieces_i) > self._max_position_embeddings: - word_pieces_i = word_pieces_i[:self._max_position_embeddings] - word_pieces[i, :word_pieces_lengths[i]] = torch.LongTensor(word_pieces_i) - word_labels[i, word_pieces_lengths[i]:].fill_(-100) # 计算lm_loss用的 - attn_masks[i, :word_pieces_lengths[i]].fill_(1) - # 添加<|endoftext|>, 默认不添加了 - # word_pieces[:, 0].fill_(self._endoftext_index) - batch_indexes = torch.arange(batch_size).to(words) - # 2. 获取hidden的结果,根据word_pieces进行对应的pool计算 - # all_outputs: [batch_size x max_len x hidden_size, batch_size x max_len x hidden_size, ...] - if self.lm_loss: - gpt2_outputs = self.encoder(word_pieces, token_type_ids=None, attention_mask=attn_masks, labels=word_labels, - output_attentions=False) - gpt2_outputs, self._lm_loss_value = gpt2_outputs[-1], gpt2_outputs[0] # n_layers x batch_size x max_len x hidden_size - else: - gpt2_outputs = self.encoder(word_pieces, token_type_ids=None, attention_mask=attn_masks, - output_attentions=False)[-1] - outputs = gpt2_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len, - gpt2_outputs[-1].size(-1)) - - batch_word_pieces_cum_length = batch_word_pieces_length.new_zeros(batch_size, max_word_len+1) - batch_word_pieces_cum_length[:, 1:] = batch_word_pieces_length.cumsum(dim=-1) # batch_size x max_len - - if self.pool_method == 'first': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, :seq_len.max()] - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - elif self.pool_method == 'last': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, :seq_len.max()] - 1 - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - - for l_index, l in enumerate(self.layers): - output_layer = gpt2_outputs[l] - real_word_piece_length = output_layer.size(1) - if max_word_piece_length > real_word_piece_length: # 如果实际上是截取出来的 - paddings = output_layer.new_zeros(batch_size, - max_word_piece_length - real_word_piece_length, - output_layer.size(2)) - output_layer = torch.cat((output_layer, paddings), dim=1).contiguous() - # 从word_piece collapse到word的表示 - # truncate_output_layer = output_layer # 删除endoftext batch_size x len x hidden_size - if self.pool_method == 'first': - tmp = output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, :batch_word_pieces_cum_length.size(1)] = tmp - elif self.pool_method == 'last': - tmp = output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, :batch_word_pieces_cum_length.size(1)] = tmp - elif self.pool_method == 'max': - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j], _ = torch.max(output_layer[i, start:end], dim=-2) - else: - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j] = torch.mean(output_layer[i, start:end], dim=-2) - - # 3. 最终的embedding结果 - return outputs - - def get_lm_loss(self): - """ - 当language_model为True时,通过该接口可以获取最近传入的一个batch的lanuage model loss - - :return: - """ - return self._lm_loss_value - - -class _GPT2WordPieceModel(nn.Module): - """ - 这个模块用于直接计算word_piece的结果. - - """ - - def __init__(self, model_dir_or_name: str, layers: str = '-1', language_model: bool=False): - super().__init__() - - self.tokenizer = GPT2Tokenizer.from_pretrained(model_dir_or_name) - if language_model: - self.encoder = GPT2LMHeadModel.from_pretrained(model_dir_or_name) - else: - self.encoder = GPT2Model.from_pretrained(model_dir_or_name) - - self.lm_loss = language_model - - # 检查encoder_layer_number是否合理 - encoder_layer_number = self.encoder.config.n_layer - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a gpt2 model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a gpt2 model with {encoder_layer_number} layers." - - self._endoftext_index = self.tokenizer.encoder.get('<|endoftext|>') - self._wordpiece_pad_index = self.tokenizer.encoder.get('<|endoftext|>') # 原来并没有pad,使用这个值替代一下。这个pad值并不重要,因为是从左到右计算的 - self._max_position_embeddings = self.encoder.config.max_position_embeddings - - def index_datasets(self, *datasets, field_name, add_endoftext=False, add_prefix_space=True): - """ - 使用gpt2的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input。如果开头不是<|endoftext|>, 且将 - word_pieces这一列的pad value设置为了bert的pad value。 - - :param datasets: DataSet对象 - :param field_name: 基于哪一列index - :param bool add_prefix_space: 是否添加句首的空格 - :return: - """ - convert_words_to_word_pieces = partial(self.convert_words_to_word_pieces, add_endoftext=add_endoftext, - add_prefix_space=add_prefix_space) - for index, dataset in enumerate(datasets): - try: - dataset.apply_field(convert_words_to_word_pieces, field_name=field_name, new_field_name='word_pieces', - is_input=True) - dataset.set_pad_val('word_pieces', self._wordpiece_pad_index) - except Exception as e: - logger.error(f"Exception happens when processing the {index} dataset.") - raise e - - def convert_words_to_word_pieces(self, words, add_endoftext=False, add_prefix_space=True): - """ - - :param list[str],str words: 将str数据转换为index - :param bool add_endoftext: 是否在句首增加endoftext - :param bool add_prefix_space: 是否添加句首的空格 - :return: - """ - word_pieces = [] - if isinstance(words, str): - words = self.tokenizer.tokenize(words, add_prefix_space=add_prefix_space) - word_piece_ids = self.tokenizer.convert_tokens_to_ids(words) - word_pieces.extend(word_piece_ids) - else: - for word in words: - tokens = self.tokenizer.tokenize(word, add_prefix_space=add_prefix_space) - word_piece_ids = self.tokenizer.convert_tokens_to_ids(tokens) - word_pieces.extend(word_piece_ids) - if add_endoftext: - if word_pieces[0] != self._endoftext_index: - word_pieces.insert(0, self._endoftext_index) - if len(word_pieces) > self._max_position_embeddings: - word_pieces[self._max_position_embeddings - 1] = word_pieces[-1] - word_pieces = word_pieces[:self._max_position_embeddings] - return word_pieces - - def forward(self, word_pieces, token_type_ids=None): - """ - - :param word_pieces: torch.LongTensor, batch_size x max_len - :param token_type_ids: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - batch_size, max_len = word_pieces.size() - - attn_masks = word_pieces.ne(self._wordpiece_pad_index) # 可能会错误导致开头的词被mask掉 - word_pieces = word_pieces.masked_fill(attn_masks.eq(0), self._endoftext_index) # 替换pad的值 - if self.lm_loss: - labels = word_pieces.clone() - labels = labels.masked_fill(labels.eq(self._wordpiece_pad_index), -100) - gpt_outputs = self.encoder(word_pieces, token_type_ids=token_type_ids, attention_mask=attn_masks, - output_attentions=False, labels=labels) - gpt_outputs, self._lm_loss_value = gpt_outputs[-1], gpt_outputs[0] # n_layers x batch_size x max_len x hidden_size - else: - gpt_outputs = self.encoder(word_pieces, token_type_ids=token_type_ids, attention_mask=attn_masks, - output_attentions=False) - gpt_outputs = gpt_outputs[-1] - # output_layers = [self.layers] # len(self.layers) x batch_size x max_word_piece_length x hidden_size - outputs = gpt_outputs[0].new_zeros((len(self.layers), batch_size, max_len, gpt_outputs[0].size(-1))) - for l_index, l in enumerate(self.layers): - outputs[l_index] = gpt_outputs[l] # 删除开头 - return outputs - - def get_lm_loss(self): - """ - 当language_model为True时,通过该接口可以获取最近传入的一个batch的lanuage model loss - - :return: - """ - return self._lm_loss_value - diff --git a/fastNLP/embeddings/roberta_embedding.py b/fastNLP/embeddings/roberta_embedding.py deleted file mode 100644 index 4b7040c0..00000000 --- a/fastNLP/embeddings/roberta_embedding.py +++ /dev/null @@ -1,589 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "RobertaEmbedding", - "RobertaWordPieceEncoder" -] - - -from functools import partial -import os -import json -from itertools import chain - -import numpy as np -import torch -import torch.nn as nn - -from .contextual_embedding import ContextualEmbedding -from ..core import logger, Vocabulary -from ..modules.encoder.roberta import RobertaModel -from ..modules.tokenizer import RobertaTokenizer - - -VOCAB_NAME = 'vocab.txt' -ROBERTA_EMBED_HYPER = 'roberta_hyper.json' -ROBERTA_ENCODER_HYPER = 'roberta_hyper.json' -ROBERTA_EMBED_FOLDER = 'roberta' -ROBERTA_ENCODER_FOLDER = 'roberta' - - -class RobertaEmbedding(ContextualEmbedding): - r""" - 使用RoBERTa对words进行编码的Embedding。建议将输入的words长度限制在430以内,而不要使用512(根据预训练模型参数,可能有变化)。这是由于 - 预训练的bert模型长度限制为512个token,而因为输入的word是未进行word piece分割的(word piece的分割有RobertaEmbedding在输入word - 时切分),在分割之后长度可能会超过最大长度限制。 - - RobertaEmbedding可以支持自动下载权重,当前支持的模型: - en: roberta-base - en-large: roberta-large - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import RobertaEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = RobertaEmbedding(vocab, model_dir_or_name='en', requires_grad=False, layers='4,-2,-1') - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5, 2304]) - """ - - def __init__(self, vocab: Vocabulary, model_dir_or_name: str = 'en', layers: str = '-1', - pool_method: str = 'first', word_dropout=0, dropout=0, include_cls_sep: bool = False, - pooled_cls=True, requires_grad: bool = True, auto_truncate: bool = False, **kwargs): - r""" - - :param ~fastNLP.Vocabulary vocab: 词表 - :param str model_dir_or_name: 模型所在目录或者模型的名称。当传入模型所在目录时,目录中应该包含一个词表文件 - (以vocab.json作为后缀名), 权重文件(以.bin作为文件后缀名), 配置文件(以config.json作为后缀名)。 - :param str,list layers: 输出embedding表示来自于哪些层,不同层的结果按照layers中的顺序在最后一维concat起来。以','隔开层数,层的序号是 - 从0开始,可以以负数去索引倒数几层。layer=0为embedding层(包括wordpiece embedding, position embedding) - :param str pool_method: 因为在bert中,每个word会被表示为多个word pieces, 当获取一个word的表示的时候,怎样从它的word pieces - 中计算得到它对应的表示。支持 ``last`` , ``first`` , ``avg`` , ``max``。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool include_cls_sep: bool,在bert计算句子的表示的时候,需要在前面加上[CLS]和[SEP], 是否在结果中保留这两个内容。 这样 - 会使得word embedding的结果比输入的结果长两个token。如果该值为True,则在使用 :class::StackEmbedding 可能会与其它类型的 - embedding长度不匹配。 - :param bool pooled_cls: 返回的是否使用预训练中的BertPool映射一下,仅在include_cls_sep时有效。如果下游任务只取做预测, - 一般该值为True。 - :param bool requires_grad: 是否需要gradient以更新Bert的权重。 - :param bool auto_truncate: 当句子words拆分为word pieces长度超过bert最大允许长度(一般为512), 自动截掉拆分后的超过510个 - word pieces后的内容,并将第512个word piece置为。超过长度的部分的encode结果直接全部置零。一般仅有只使用 - 来进行分类的任务将auto_truncate置为True。 - :param kwargs: - int min_freq: 小于该次数的词会被unk代替, 默认为1 - """ - super().__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - if word_dropout > 0: - assert vocab.unknown is not None, "When word_drop > 0, Vocabulary must contain the unknown token." - - self._word_sep_index = -100 - if '' in vocab: - self._word_sep_index = vocab[''] - - self._word_cls_index = -100 - if '' in vocab: - self._word_cls_index = vocab[''] - - min_freq = kwargs.pop('min_freq', 1) - self._min_freq = min_freq - - self.model = _RobertaWordModel(model_dir_or_name=model_dir_or_name, vocab=vocab, layers=layers, - pool_method=pool_method, include_cls_sep=include_cls_sep, - pooled_cls=pooled_cls, auto_truncate=auto_truncate, min_freq=min_freq, - **kwargs) - self.requires_grad = requires_grad - self._embed_size = len(self.model.layers) * self.model.encoder.hidden_size - - def _delete_model_weights(self): - del self.model - - def forward(self, words): - r""" - 计算words的roberta embedding表示。计算之前会在每句话的开始增加在结束增加, 并根据include_cls_sep判断要不要 - 删除这两个token的表示。 - - :param torch.LongTensor words: [batch_size, max_len] - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - words = self.drop_word(words) - outputs = self._get_sent_reprs(words) - if outputs is not None: - return self.dropout(outputs) - outputs = self.model(words) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._word_pad_index) - mask = pad_mask.__and__(mask) # pad的位置不为unk - if self._word_sep_index!=-100: - not_sep_mask = words.ne(self._word_sep_index) - mask = mask.__and__(not_sep_mask) - if self._word_cls_index!=-100: - not_cls_mask = words.ne(self._word_cls_index) - mask = mask.__and__(not_cls_mask) - words = words.masked_fill(mask, self._word_unk_index) - return words - - def save(self, folder): - """ - 将roberta embedding保存到folder,保存之后包含三个文件vocab.txt, roberta_embed_hyper.txt, roberta_embed/, - - :param str folder: 保存地址 - :return: - """ - os.makedirs(folder, exist_ok=True) - self.get_word_vocab().save(os.path.join(folder, VOCAB_NAME)) - - hyper = {} - hyper['min_freq'] = self._min_freq - hyper['layers'] = ','.join(map(str, self.model.layers)) - hyper['pool_method'] = self.model.pool_method - hyper['dropout'] = self.dropout_layer.p - hyper['word_dropout'] = self.word_dropout - hyper['include_cls_sep'] = self.model.include_cls_sep - hyper['pooled_cls'] = self.model.pooled_cls - hyper['auto_truncate'] = self.model.auto_truncate - hyper['requires_grad'] = bool(self.requires_grad) - - with open(os.path.join(folder, ROBERTA_EMBED_HYPER), 'w', encoding='utf-8') as f: - json.dump(hyper, f, indent=2) - - os.makedirs(os.path.join(folder, ROBERTA_EMBED_FOLDER), exist_ok=True) - self.model.save(os.path.join(folder, ROBERTA_EMBED_FOLDER)) - - @classmethod - def load(cls, folder): - """ - 从folder中读取数据初始化RobertaEmbedding - - :param folder: - :return: - """ - for name in [VOCAB_NAME, ROBERTA_EMBED_HYPER, ROBERTA_EMBED_FOLDER]: - assert os.path.exists(os.path.join(folder, name)), f"{name} not found in {folder}." - - vocab = Vocabulary.load(os.path.join(folder, VOCAB_NAME)) - with open(os.path.join(folder, ROBERTA_EMBED_HYPER), 'r', encoding='utf-8') as f: - hyper = json.load(f) - model_name_or_path = os.path.join(folder, ROBERTA_EMBED_FOLDER) - - roberta = cls(vocab=vocab, model_dir_or_name=model_name_or_path, **hyper) - return roberta - - -class _RobertaWordModel(nn.Module): - def __init__(self, model_dir_or_name: str, vocab: Vocabulary, layers: str = '-1', pool_method: str = 'first', - include_cls_sep: bool = False, pooled_cls: bool = False, auto_truncate: bool = False, min_freq=2, - **kwargs): - super().__init__() - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - if layers.lower() == 'all': - self.layers = None - else: - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - neg_num_output_layer = -16384 - pos_num_output_layer = 0 - if self.layers is None: - neg_num_output_layer = -1 - else: - for layer in self.layers: - if layer < 0: - neg_num_output_layer = max(layer, neg_num_output_layer) - else: - pos_num_output_layer = max(layer, pos_num_output_layer) - - self.tokenizer = RobertaTokenizer.from_pretrained(model_dir_or_name) - self.encoder = RobertaModel.from_pretrained(model_dir_or_name, - neg_num_output_layer=neg_num_output_layer, - pos_num_output_layer=pos_num_output_layer, - **kwargs) - # 由于RobertaEmbedding中设置了padding_idx为1, 且使用了非常神奇的position计算方式,所以-2 - self._max_position_embeddings = self.encoder.config.max_position_embeddings - 2 - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - if self.layers is None: - self.layers = [idx for idx in range(encoder_layer_number + 1)] - logger.info(f'RoBERTa Model will return {len(self.layers)} layers (layer-0 ' - f'is embedding result): {self.layers}') - assert len(self.layers) > 0, "There is no layer selected!" - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a roberta model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a roberta model with {encoder_layer_number} layers." - - assert pool_method in ('avg', 'max', 'first', 'last') - self.pool_method = pool_method - self.include_cls_sep = include_cls_sep - self.pooled_cls = pooled_cls - self.auto_truncate = auto_truncate - - word_to_wordpieces = [] - word_pieces_lengths = [] - for word, index in vocab: - if index == vocab.padding_idx: # pad是个特殊的符号 - word = '' - elif index == vocab.unknown_idx: - word = '' - elif vocab.word_count[word] < min_freq: - word = '' - word_pieces = self.tokenizer.tokenize(word) - word_pieces = self.tokenizer.convert_tokens_to_ids(word_pieces) - word_to_wordpieces.append(word_pieces) - word_pieces_lengths.append(len(word_pieces)) - self._cls_index = self.tokenizer.encoder[''] - self._sep_index = self.tokenizer.encoder[''] - self._word_pad_index = vocab.padding_idx - self._wordpiece_pad_index = self.tokenizer.encoder[''] # 需要用于生成word_piece - self.word_to_wordpieces = np.array(word_to_wordpieces, dtype=object) - self.register_buffer('word_pieces_lengths', torch.LongTensor(word_pieces_lengths)) - logger.debug("Successfully generate word pieces.") - - def forward(self, words): - r""" - - :param words: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - with torch.no_grad(): - batch_size, max_word_len = words.size() - word_mask = words.ne(self._word_pad_index) # 为1的地方有word - seq_len = word_mask.sum(dim=-1) - batch_word_pieces_length = self.word_pieces_lengths[words].masked_fill(word_mask.eq(False), - 0) # batch_size x max_len - word_pieces_lengths = batch_word_pieces_length.sum(dim=-1) # batch_size - max_word_piece_length = batch_word_pieces_length.sum(dim=-1).max().item() # 表示word piece的长度(包括padding) - if max_word_piece_length + 2 > self._max_position_embeddings: - if self.auto_truncate: - word_pieces_lengths = word_pieces_lengths.masked_fill( - word_pieces_lengths + 2 > self._max_position_embeddings, - self._max_position_embeddings - 2) - else: - raise RuntimeError( - "After split words into word pieces, the lengths of word pieces are longer than the " - f"maximum allowed sequence length:{self._max_position_embeddings} of bert. You can set " - f"`auto_truncate=True` for BertEmbedding to automatically truncate overlong input.") - - # +2是由于需要加入 - word_pieces = words.new_full((batch_size, min(max_word_piece_length + 2, self._max_position_embeddings)), - fill_value=self._wordpiece_pad_index) - attn_masks = torch.zeros_like(word_pieces) - # 1. 获取words的word_pieces的id,以及对应的span范围 - word_indexes = words.cpu().numpy() - for i in range(batch_size): - word_pieces_i = list(chain(*self.word_to_wordpieces[word_indexes[i, :seq_len[i]]])) - if self.auto_truncate and len(word_pieces_i) > self._max_position_embeddings - 2: - word_pieces_i = word_pieces_i[:self._max_position_embeddings - 2] - word_pieces[i, 1:word_pieces_lengths[i] + 1] = torch.LongTensor(word_pieces_i) - attn_masks[i, :word_pieces_lengths[i] + 2].fill_(1) - # 添加 - word_pieces[:, 0].fill_(self._cls_index) - batch_indexes = torch.arange(batch_size).to(words) - word_pieces[batch_indexes, word_pieces_lengths + 1] = self._sep_index - token_type_ids = torch.zeros_like(word_pieces) - # 2. 获取hidden的结果,根据word_pieces进行对应的pool计算 - # all_outputs: [batch_size x max_len x hidden_size, batch_size x max_len x hidden_size, ...] - bert_outputs, pooled_cls = self.encoder(word_pieces, token_type_ids=token_type_ids, - attention_mask=attn_masks, - output_all_encoded_layers=True) - # output_layers = [self.layers] # len(self.layers) x batch_size x real_word_piece_length x hidden_size - - if self.include_cls_sep: - s_shift = 1 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len + 2, - bert_outputs[-1].size(-1)) - - else: - s_shift = 0 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len, - bert_outputs[-1].size(-1)) - batch_word_pieces_cum_length = batch_word_pieces_length.new_zeros(batch_size, max_word_len + 1) - batch_word_pieces_cum_length[:, 1:] = batch_word_pieces_length.cumsum(dim=-1) # batch_size x max_len - - if self.pool_method == 'first': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, :seq_len.max()] - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - elif self.pool_method == 'last': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, 1:seq_len.max() + 1] - 1 - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - - for l_index, l in enumerate(self.layers): - output_layer = bert_outputs[l] - real_word_piece_length = output_layer.size(1) - 2 - if max_word_piece_length > real_word_piece_length: # 如果实际上是截取出来的 - paddings = output_layer.new_zeros(batch_size, - max_word_piece_length - real_word_piece_length, - output_layer.size(2)) - output_layer = torch.cat((output_layer, paddings), dim=1).contiguous() - # 从word_piece collapse到word的表示 - truncate_output_layer = output_layer[:, 1:-1] # 删除 batch_size x len x hidden_size - if self.pool_method == 'first': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1) + s_shift] = tmp - - elif self.pool_method == 'last': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1) + s_shift] = tmp - elif self.pool_method == 'max': - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift], _ = torch.max(truncate_output_layer[i, start:end], dim=-2) - else: - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift] = torch.mean(truncate_output_layer[i, start:end], dim=-2) - if self.include_cls_sep: - if l in (len(bert_outputs) - 1, -1) and self.pooled_cls: - outputs[l_index, :, 0] = pooled_cls - else: - outputs[l_index, :, 0] = output_layer[:, 0] - outputs[l_index, batch_indexes, seq_len + s_shift] = output_layer[batch_indexes, word_pieces_lengths + s_shift] - - # 3. 最终的embedding结果 - return outputs - - def save(self, folder): - """ - 给定一个folder保存pytorch_model.bin, config.json, vocab.txt - - :param str folder: - :return: - """ - self.tokenizer.save_pretrained(folder) - self.encoder.save_pretrained(folder) - - -class RobertaWordPieceEncoder(nn.Module): - r""" - 读取roberta模型,读取之后调用index_dataset方法在dataset中生成word_pieces这一列。 - - RobertaWordPieceEncoder可以支持自动下载权重,当前支持的模型: - en: roberta-base - en-large: roberta-large - - """ - def __init__(self, model_dir_or_name: str = 'en', layers: str = '-1', pooled_cls: bool = False, - word_dropout=0, dropout=0, requires_grad: bool = True, **kwargs): - r""" - - :param str model_dir_or_name: 模型所在目录或者模型的名称。默认值为 ``en-base-uncased`` - :param str layers: 最终结果中的表示。以','隔开层数,可以以负数去索引倒数几层。layer=0为embedding层(包括wordpiece embedding, - position embedding) - :param bool pooled_cls: 返回的句子开头的是否使用预训练中的BertPool映射一下。如果下游任务取做预测,一般该值为True。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool requires_grad: 是否需要gradient。 - """ - super().__init__() - - self.model = _WordPieceRobertaModel(model_dir_or_name=model_dir_or_name, layers=layers, pooled_cls=pooled_cls) - self._sep_index = self.model._sep_index - self._cls_index = self.model._cls_index - self._wordpiece_pad_index = self.model._wordpiece_pad_index - self._wordpiece_unk_index = self.model._wordpiece_unknown_index - self._embed_size = len(self.model.layers) * self.model.encoder.hidden_size - self.requires_grad = requires_grad - self.word_dropout = word_dropout - self.dropout_layer = nn.Dropout(dropout) - - @property - def embed_size(self): - return self._embed_size - - @property - def embedding_dim(self): - return self._embed_size - - @property - def num_embedding(self): - return self.model.encoder.config.vocab_size - - def index_datasets(self, *datasets, field_name, add_cls_sep=True, add_prefix_space=True): - r""" - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input,且将word_pieces这一列的pad value设置为了 - bert的pad value。 - - :param ~fastNLP.DataSet datasets: DataSet对象 - :param str field_name: 基于哪一列的内容生成word_pieces列。这一列中每个数据应该是List[str]的形式。 - :param bool add_cls_sep: 如果首尾不是会在首尾额外加入。 - :param bool add_prefix_spance: 是否在句首添加额外的空格,RoBERTa预训练时该值为True - :return: - """ - self.model.index_datasets(*datasets, field_name=field_name, add_cls_sep=add_cls_sep, add_prefix_space=add_prefix_space) - - def forward(self, word_pieces, token_type_ids=None): - r""" - 计算words的bert embedding表示。传入的words中应该自行包含>的tag。 - - :param words: batch_size x max_len - :param token_type_ids: batch_size x max_len, 用于区分前一句和后一句话. 如果不传入,则自动生成(大部分情况,都不需要输入)。 - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - word_pieces = self.drop_word(word_pieces) - outputs = self.model(word_pieces) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout_layer(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - not_sep_mask = words.ne(self._sep_index) - not_cls_mask = words.ne(self._cls_index) - replaceable_mask = not_sep_mask.__and__(not_cls_mask) - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._wordpiece_pad_index) - mask = pad_mask.__and__(mask).__and__(replaceable_mask) # pad的位置不为unk - words = words.masked_fill(mask, self._wordpiece_unk_index) - return words - - def save(self, folder): - os.makedirs(folder, exist_ok=True) - - hyper = {} - hyper['layers'] = ','.join(map(str, self.model.layers)) - hyper['dropout'] = self.dropout_layer.p - hyper['word_dropout'] = self.word_dropout - hyper['pooled_cls'] = self.model.pooled_cls - hyper['requires_grad'] = bool(self.requires_grad) - - with open(os.path.join(folder, ROBERTA_ENCODER_HYPER), 'w', encoding='utf-8') as f: - json.dump(hyper, f, indent=2) - - os.makedirs(os.path.join(folder, ROBERTA_ENCODER_FOLDER), exist_ok=True) - self.model.save(os.path.join(folder, ROBERTA_ENCODER_FOLDER)) - logger.debug(f"RobertaWordPieceEncoder has been saved in {folder}") - - @classmethod - def load(cls, folder): - for name in [ROBERTA_ENCODER_HYPER, ROBERTA_ENCODER_FOLDER]: - assert os.path.exists(os.path.join(folder, name)), f"{name} not found in {folder}." - - with open(os.path.join(folder, ROBERTA_ENCODER_HYPER), 'r', encoding='utf-8') as f: - hyper = json.load(f) - - model_dir_or_name = os.path.join(os.path.join(folder, ROBERTA_ENCODER_FOLDER)) - - bert_encoder = cls(model_dir_or_name=model_dir_or_name, **hyper) - return bert_encoder - - -class _WordPieceRobertaModel(nn.Module): - def __init__(self, model_dir_or_name: str, layers: str = '-1', pooled_cls: bool=False): - super().__init__() - - self.tokenizer = RobertaTokenizer.from_pretrained(model_dir_or_name) - self.encoder = RobertaModel.from_pretrained(model_dir_or_name) - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a RoBERTa model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a RoBERTa model with {encoder_layer_number} layers." - - self._cls_index = self.tokenizer.encoder[''] - self._sep_index = self.tokenizer.encoder[''] - self._wordpiece_pad_index = self.tokenizer.encoder[''] # 需要用于生成word_piece - self._wordpiece_unknown_index = self.tokenizer.encoder[''] - self.pooled_cls = pooled_cls - - def index_datasets(self, *datasets, field_name, add_cls_sep=True, add_prefix_space=True): - r""" - 使用roberta的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input。如果首尾不是 - 会在首尾额外加入, 且将word_pieces这一列的pad value设置为了bert的pad value。 - - :param datasets: DataSet对象 - :param field_name: 基于哪一列index, 这一列一般是raw_string - :param bool add_cls_sep: 是否在句首句尾添加cls和sep的index - :param bool add_prefix_space: 是否在句子开头添加空格,预训练时RoBERTa该值为True - :return: - """ - - encode_func = partial(self.tokenizer.encode, add_special_tokens=add_cls_sep, add_prefix_space=add_prefix_space) - - for index, dataset in enumerate(datasets): - try: - dataset.apply_field(encode_func, field_name=field_name, new_field_name='word_pieces', - is_input=True) - dataset.set_pad_val('word_pieces', self._wordpiece_pad_index) - except Exception as e: - logger.error(f"Exception happens when processing the {index} dataset.") - raise e - - def forward(self, word_pieces): - r""" - - :param word_pieces: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - batch_size, max_len = word_pieces.size() - - attn_masks = word_pieces.ne(self._wordpiece_pad_index) - roberta_outputs, pooled_cls = self.encoder(word_pieces, token_type_ids=torch.zeros_like(word_pieces), - attention_mask=attn_masks, - output_all_encoded_layers=True) - # output_layers = [self.layers] # len(self.layers) x batch_size x max_word_piece_length x hidden_size - outputs = roberta_outputs[0].new_zeros((len(self.layers), batch_size, max_len, roberta_outputs[0].size(-1))) - for l_index, l in enumerate(self.layers): - roberta_output = roberta_outputs[l] - if l in (len(roberta_output)-1, -1) and self.pooled_cls: - roberta_output[:, 0] = pooled_cls - outputs[l_index] = roberta_output - return outputs - - def save(self, folder): - self.tokenizer.save_pretrained(folder) - self.encoder.save_pretrained(folder) \ No newline at end of file diff --git a/fastNLP/embeddings/stack_embedding.py b/fastNLP/embeddings/stack_embedding.py deleted file mode 100644 index 7ef4736b..00000000 --- a/fastNLP/embeddings/stack_embedding.py +++ /dev/null @@ -1,99 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "StackEmbedding", -] - -from typing import List - -import torch -from torch import nn as nn - -from .embedding import TokenEmbedding -from .utils import _check_vocab_has_same_index - - -class StackEmbedding(TokenEmbedding): - r""" - 支持将多个embedding集合成一个embedding。 - - Example:: - - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import StaticEmbedding, StackEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed_1 = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d', requires_grad=True) - >>> embed_2 = StaticEmbedding(vocab, model_dir_or_name='en-word2vec-300', requires_grad=True) - >>> embed = StackEmbedding([embed_1, embed_2]) - - """ - - def __init__(self, embeds: List[TokenEmbedding], word_dropout=0, dropout=0): - r""" - - :param embeds: 一个由若干个TokenEmbedding组成的list,要求每一个TokenEmbedding的词表都保持一致 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。不同embedidng会在相同的位置 - 被设置为unknown。如果这里设置了dropout,则组成的embedding就不要再设置dropout了。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - """ - vocabs = [] - for embed in embeds: - if hasattr(embed, 'get_word_vocab'): - vocabs.append(embed.get_word_vocab()) - _vocab = vocabs[0] - for vocab in vocabs[1:]: - if _vocab!=vocab: - _check_vocab_has_same_index(_vocab, vocab) - - super(StackEmbedding, self).__init__(_vocab, word_dropout=word_dropout, dropout=dropout) - assert isinstance(embeds, list) - for embed in embeds: - assert isinstance(embed, TokenEmbedding), "Only TokenEmbedding type is supported." - self.embeds = nn.ModuleList(embeds) - self._embed_size = sum([embed.embed_size for embed in self.embeds]) - - def append(self, embed: TokenEmbedding): - r""" - 添加一个embedding到结尾。 - :param embed: - :return: - """ - assert isinstance(embed, TokenEmbedding) - _check_vocab_has_same_index(self.get_word_vocab(), embed.get_word_vocab()) - self._embed_size += embed.embed_size - self.embeds.append(embed) - return self - - def pop(self): - r""" - 弹出最后一个embed - :return: - """ - embed = self.embeds.pop() - self._embed_size -= embed.embed_size - return embed - - @property - def embed_size(self): - r""" - 该Embedding输出的vector的最后一维的维度。 - :return: - """ - return self._embed_size - - def forward(self, words): - r""" - 得到多个embedding的结果,并把结果按照顺序concat起来。 - - :param words: batch_size x max_len - :return: 返回的shape和当前这个stack embedding中embedding的组成有关 - """ - outputs = [] - words = self.drop_word(words) - for embed in self.embeds: - outputs.append(embed(words)) - outputs = self.dropout(torch.cat(outputs, dim=-1)) - return outputs diff --git a/fastNLP/embeddings/static_embedding.py b/fastNLP/embeddings/static_embedding.py deleted file mode 100644 index 09c44d6c..00000000 --- a/fastNLP/embeddings/static_embedding.py +++ /dev/null @@ -1,405 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "StaticEmbedding" -] -import os -import warnings -from collections import defaultdict -from copy import deepcopy -import json -from typing import Union - -import numpy as np -import torch -import torch.nn as nn - -from .embedding import TokenEmbedding -from ..core import logger -from ..core.vocabulary import Vocabulary -from ..io.file_utils import PRETRAIN_STATIC_FILES, _get_embedding_url, cached_path -from ..io.file_utils import _get_file_name_base_on_postfix - - -VOCAB_FILENAME = 'vocab.txt' -STATIC_HYPER_FILENAME = 'static_hyper.json' -STATIC_EMBED_FILENAME = 'static.txt' - - -class StaticEmbedding(TokenEmbedding): - r""" - StaticEmbedding组件. 给定预训练embedding的名称或路径,根据vocab从embedding中抽取相应的数据(只会将出现在vocab中的词抽取出来, - 如果没有找到,则会随机初始化一个值(但如果该word是被标记为no_create_entry的话,则不会单独创建一个值,而是会被指向unk的index))。 - 当前支持自动下载的预训练vector有: - - .. code:: - - en: 实际为en-glove-840b-300d(常用) - en-glove-6b-50d: glove官方的50d向量 - en-glove-6b-100d: glove官方的100d向量 - en-glove-6b-200d: glove官方的200d向量 - en-glove-6b-300d: glove官方的300d向量 - en-glove-42b-300d: glove官方使用42B数据训练版本 - en-glove-840b-300d: - en-glove-twitter-27b-25d: - en-glove-twitter-27b-50d: - en-glove-twitter-27b-100d: - en-glove-twitter-27b-200d: - en-word2vec-300d: word2vec官方发布的300d向量 - en-fasttext-crawl: fasttext官方发布的300d英文预训练 - cn-char-fastnlp-100d: fastNLP训练的100d的character embedding - cn-bi-fastnlp-100d: fastNLP训练的100d的bigram embedding - cn-tri-fastnlp-100d: fastNLP训练的100d的trigram embedding - cn-fasttext: fasttext官方发布的300d中文预训练embedding - - Example:: - - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import StaticEmbedding - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-50d') - - >>> vocab = Vocabulary().add_word_lst(["The", 'the', "THE"]) - >>> embed = StaticEmbedding(vocab, model_dir_or_name="en-glove-50d", lower=True) - >>> # "the", "The", "THE"它们共用一个vector,且将使用"the"在预训练词表中寻找它们的初始化表示。 - - >>> vocab = Vocabulary().add_word_lst(["The", "the", "THE"]) - >>> embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=5, lower=True) - >>> words = torch.LongTensor([[vocab.to_index(word) for word in ["The", "the", "THE"]]]) - >>> embed(words) - >>> tensor([[[ 0.5773, 0.7251, -0.3104, 0.0777, 0.4849], - [ 0.5773, 0.7251, -0.3104, 0.0777, 0.4849], - [ 0.5773, 0.7251, -0.3104, 0.0777, 0.4849]]], - grad_fn=) # 每种word的输出是一致的。 - - """ - - def __init__(self, vocab: Vocabulary, model_dir_or_name: Union[str, None] = 'en', embedding_dim=-1, requires_grad: bool = True, - init_method=None, lower=False, dropout=0, word_dropout=0, normalize=False, min_freq=1, **kwargs): - r""" - - :param Vocabulary vocab: 词表. StaticEmbedding只会加载包含在词表中的词的词向量,在预训练向量中没找到的使用随机初始化 - :param model_dir_or_name: 可以有两种方式调用预训练好的static embedding:第一种是传入embedding文件夹(文件夹下应该只有一个 - 以.txt作为后缀的文件)或文件路径;第二种是传入embedding的名称,第二种情况将自动查看缓存中是否存在该模型,没有的话将自动下载。 - 如果输入为None则使用embedding_dim的维度随机初始化一个embedding。 - :param int embedding_dim: 随机初始化的embedding的维度,当该值为大于0的值时,将忽略model_dir_or_name。 - :param bool requires_grad: 是否需要gradient. 默认为True - :param callable init_method: 如何初始化没有找到的值。可以使用torch.nn.init.*中各种方法, 传入的方法应该接受一个tensor,并 - inplace地修改其值。 - :param bool lower: 是否将vocab中的词语小写后再和预训练的词表进行匹配。如果你的词表中包含大写的词语,或者就是需要单独 - 为大写的词语开辟一个vector表示,则将lower设置为False。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param bool normalize: 是否对vector进行normalize,使得每个vector的norm为1。 - :param int min_freq: Vocabulary词频数小于这个数量的word将被指向unk。 - :param dict kwargs: - bool only_train_min_freq: 仅对train中的词语使用min_freq筛选; - bool only_norm_found_vector: 默认为False, 是否仅对在预训练中找到的词语使用normalize; - bool only_use_pretrain_word: 默认为False, 仅使用出现在pretrain词表中的词,如果该词没有在预训练的词表中出现则为unk。如果embedding不需要更新建议设置为True。 - """ - super(StaticEmbedding, self).__init__(vocab, word_dropout=word_dropout, dropout=dropout) - if embedding_dim > 0: - if model_dir_or_name: - logger.info(f"StaticEmbedding will ignore `model_dir_or_name`, and randomly initialize embedding with" - f" dimension {embedding_dim}. If you want to use pre-trained embedding, " - f"set `embedding_dim` to 0.") - model_dir_or_name = None - - # 得到cache_path - if model_dir_or_name is None: - assert embedding_dim >= 1, "The dimension of embedding should be larger than 1." - embedding_dim = int(embedding_dim) - model_path = None - elif model_dir_or_name.lower() in PRETRAIN_STATIC_FILES: - model_url = _get_embedding_url('static', model_dir_or_name.lower()) - model_path = cached_path(model_url, name='embedding') - # 检查是否存在 - elif os.path.isfile(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_path = os.path.abspath(os.path.expanduser(model_dir_or_name)) - elif os.path.isdir(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_path = _get_file_name_base_on_postfix(os.path.abspath(os.path.expanduser(model_dir_or_name)), '.txt') - else: - raise ValueError(f"Cannot recognize {model_dir_or_name}.") - - kwargs['min_freq'] = min_freq - kwargs['lower'] = lower - # 根据min_freq缩小vocab - truncate_vocab = (vocab.min_freq is None and min_freq > 1) or (vocab.min_freq and vocab.min_freq < min_freq) - if truncate_vocab: - truncated_vocab = deepcopy(vocab) - truncated_vocab.min_freq = min_freq - truncated_vocab.word2idx = None - if lower: # 如果有lower,将大小写的的freq需要同时考虑到 - lowered_word_count = defaultdict(int) - for word, count in truncated_vocab.word_count.items(): - lowered_word_count[word.lower()] += count - for word in truncated_vocab.word_count.keys(): - word_count = truncated_vocab.word_count[word] - if lowered_word_count[word.lower()] >= min_freq and word_count < min_freq: - truncated_vocab.add_word_lst([word] * (min_freq - word_count), - no_create_entry=truncated_vocab._is_word_no_create_entry(word)) - - # 只限制在train里面的词语使用min_freq筛选 - if kwargs.get('only_train_min_freq', False) and model_dir_or_name is not None: - for word in truncated_vocab.word_count.keys(): - if truncated_vocab._is_word_no_create_entry(word) and truncated_vocab.word_count[word] < min_freq: - truncated_vocab.add_word_lst([word] * (min_freq - truncated_vocab.word_count[word]), - no_create_entry=True) - truncated_vocab.build_vocab() - truncated_words_to_words = torch.arange(len(vocab)).long() - for word, index in vocab: - truncated_words_to_words[index] = truncated_vocab.to_index(word) - logger.info(f"{len(vocab) - len(truncated_vocab)} words have frequency less than {min_freq}.") - vocab = truncated_vocab - - self.only_use_pretrain_word = kwargs.get('only_use_pretrain_word', False) - self.only_norm_found_vector = kwargs.get('only_norm_found_vector', False) - # 读取embedding - if lower: - lowered_vocab = Vocabulary(padding=vocab.padding, unknown=vocab.unknown) - for word, index in vocab: - if vocab._is_word_no_create_entry(word): - lowered_vocab.add_word(word.lower(), no_create_entry=True) - else: - lowered_vocab.add_word(word.lower()) # 先加入需要创建entry的 - logger.info(f"All word in the vocab have been lowered. There are {len(vocab)} words, {len(lowered_vocab)} " - f"unique lowered words.") - if model_path: - embedding = self._load_with_vocab(model_path, vocab=lowered_vocab, init_method=init_method) - else: - embedding = self._randomly_init_embed(len(lowered_vocab), embedding_dim, init_method) - self.register_buffer('words_to_words', torch.arange(len(vocab)).long()) - if lowered_vocab.unknown: - unknown_idx = lowered_vocab.unknown_idx - else: - unknown_idx = embedding.size(0) - 1 # 否则是最后一个为unknow - self.register_buffer('words_to_words', torch.arange(len(vocab)).long()) - words_to_words = torch.full((len(vocab),), fill_value=unknown_idx, dtype=torch.long).long() - for word, index in vocab: - if word not in lowered_vocab: - word = word.lower() - if word not in lowered_vocab and lowered_vocab._is_word_no_create_entry(word): - continue # 如果不需要创建entry,已经默认unknown了 - words_to_words[index] = self.words_to_words[lowered_vocab.to_index(word)] - self.register_buffer('words_to_words', words_to_words) - self._word_unk_index = lowered_vocab.unknown_idx # 替换一下unknown的index - else: - if model_path: - embedding = self._load_with_vocab(model_path, vocab=vocab, init_method=init_method) - else: - embedding = self._randomly_init_embed(len(vocab), embedding_dim, init_method) - self.register_buffer('words_to_words', torch.arange(len(vocab)).long()) - if not self.only_norm_found_vector and normalize: - embedding /= (torch.norm(embedding, dim=1, keepdim=True) + 1e-12) - - if truncate_vocab: - for i in range(len(truncated_words_to_words)): - index_in_truncated_vocab = truncated_words_to_words[i] - truncated_words_to_words[i] = self.words_to_words[index_in_truncated_vocab] - del self.words_to_words - self.register_buffer('words_to_words', truncated_words_to_words) - self.embedding = nn.Embedding(num_embeddings=embedding.shape[0], embedding_dim=embedding.shape[1], - padding_idx=vocab.padding_idx, - max_norm=None, norm_type=2, scale_grad_by_freq=False, - sparse=False, _weight=embedding) - self._embed_size = self.embedding.weight.size(1) - self.requires_grad = requires_grad - self.kwargs = kwargs - - @property - def weight(self): - return self.embedding.weight - - def _randomly_init_embed(self, num_embedding, embedding_dim, init_embed=None): - r""" - - :param int num_embedding: embedding的entry的数量 - :param int embedding_dim: embedding的维度大小 - :param callable init_embed: 初始化方法 - :return: torch.FloatTensor - """ - embed = torch.zeros(num_embedding, embedding_dim) - - if init_embed is None: - nn.init.uniform_(embed, -np.sqrt(3 / embedding_dim), np.sqrt(3 / embedding_dim)) - else: - init_embed(embed) - - return embed - - def _load_with_vocab(self, embed_filepath, vocab, dtype=np.float32, padding='', unknown='', - error='ignore', init_method=None): - r""" - 从embed_filepath这个预训练的词向量中抽取出vocab这个词表的词的embedding。EmbedLoader将自动判断embed_filepath是 - word2vec(第一行只有两个元素)还是glove格式的数据。 - - :param str embed_filepath: 预训练的embedding的路径。 - :param vocab: 词表 :class:`~fastNLP.Vocabulary` 类型,读取出现在vocab中的词的embedding。 - 没有出现在vocab中的词的embedding将通过找到的词的embedding的正态分布采样出来,以使得整个Embedding是同分布的。 - :param dtype: 读出的embedding的类型 - :param str padding: 词表中padding的token - :param str unknown: 词表中unknown的token - :param str error: `ignore` , `strict` ; 如果 `ignore` ,错误将自动跳过; 如果 `strict` , 错误将抛出。 - 这里主要可能出错的地方在于词表有空行或者词表出现了维度不一致。 - :param init_method: 如何初始化没有找到的值。可以使用torch.nn.init.*中各种方法。默认使用torch.nn.init.zeros_ - :return torch.tensor: shape为 [len(vocab), dimension], dimension由pretrain的embedding决定。 - """ - assert isinstance(vocab, Vocabulary), "Only fastNLP.Vocabulary is supported." - if not os.path.exists(embed_filepath): - raise FileNotFoundError("`{}` does not exist.".format(embed_filepath)) - with open(embed_filepath, 'r', encoding='utf-8') as f: - line = f.readline().strip() - parts = line.split() - start_idx = 0 - if len(parts) == 2: - dim = int(parts[1]) - start_idx += 1 - else: - dim = len(parts) - 1 - f.seek(0) - matrix = {} # index是word在vocab中的index,value是vector或None(如果在pretrain中没有找到该word) - if vocab.padding: - matrix[vocab.padding_idx] = torch.zeros(dim) - if vocab.unknown: - matrix[vocab.unknown_idx] = torch.zeros(dim) - found_count = 0 - found_unknown = False - for idx, line in enumerate(f, start_idx): - try: - parts = line.strip().split() - word = ''.join(parts[:-dim]) - nums = parts[-dim:] - # 对齐unk与pad - if word == padding and vocab.padding is not None: - word = vocab.padding - elif word == unknown and vocab.unknown is not None: - word = vocab.unknown - found_unknown = True - if word in vocab: - index = vocab.to_index(word) - if index in matrix: - warnings.warn(f"Word has more than one vector in embedding file. Set logger level to " - f"DEBUG for detail.") - logger.debug(f"Word:{word} occurs again in line:{idx}(starts from 0)") - matrix[index] = torch.from_numpy(np.fromstring(' '.join(nums), sep=' ', dtype=dtype, count=dim)) - if self.only_norm_found_vector: - matrix[index] = matrix[index] / np.linalg.norm(matrix[index]) - found_count += 1 - except Exception as e: - if error == 'ignore': - warnings.warn("Error occurred at the {} line.".format(idx)) - else: - logger.error("Error occurred at the {} line.".format(idx)) - raise e - logger.info("Found {} out of {} words in the pre-training embedding.".format(found_count, len(vocab))) - if not self.only_use_pretrain_word: # 如果只用pretrain中的值就不要为未找到的词创建entry了 - for word, index in vocab: - if index not in matrix and not vocab._is_word_no_create_entry(word): - if found_unknown: # 如果有unkonwn,用unknown初始化 - matrix[index] = matrix[vocab.unknown_idx] - else: - matrix[index] = None - # matrix中代表是需要建立entry的词 - vectors = self._randomly_init_embed(len(matrix), dim, init_method) - - if vocab.unknown is None: # 创建一个专门的unknown - unknown_idx = len(matrix) - vectors = torch.cat((vectors, torch.zeros(1, dim)), dim=0).contiguous() - else: - unknown_idx = vocab.unknown_idx - self.register_buffer('words_to_words', torch.full((len(vocab), ), fill_value=unknown_idx, dtype=torch.long).long()) - index = 0 - for word, index_in_vocab in vocab: - if index_in_vocab in matrix: - vec = matrix.get(index_in_vocab) - if vec is not None: # 使用找到的vector, 如果为None说明需要训练 - vectors[index] = vec - self.words_to_words[index_in_vocab] = index - index += 1 - - return vectors - - def forward(self, words): - r""" - 传入words的index - - :param words: torch.LongTensor, [batch_size, max_len] - :return: torch.FloatTensor, [batch_size, max_len, embed_size] - """ - if hasattr(self, 'words_to_words'): - words = self.words_to_words[words] - words = self.drop_word(words) - words = self.embedding(words) - words = self.dropout(words) - return words - - def save(self, folder): - """ - 将embedding存储到folder下,之后可以通过使用load方法读取 - - :param str folder: 会在该folder下生成三个文件, vocab.txt, static_embed_hyper.txt, static_embed_hyper.json. - 其中vocab.txt可以用Vocabulary通过load读取; embedding.txt按照word2vec的方式存储,以空格的方式隔开元素, - 第一行只有两个元素,剩下的行首先是word然后是各个维度的值; static_embed_hyper.json是StaticEmbedding的超参数 - :return: - """ - os.makedirs(folder, exist_ok=True) - - vocab = self.get_word_vocab() - vocab_fp = os.path.join(folder, VOCAB_FILENAME) - vocab.save(vocab_fp) - kwargs = self.kwargs.copy() - kwargs['dropout'] = self.dropout_layer.p - kwargs['word_dropout'] = self.word_dropout - kwargs['requires_grad'] = self.requires_grad - kwargs['only_norm_found_vector'] = False - kwargs['only_use_pretrain_word'] = True - - with open(os.path.join(folder, STATIC_HYPER_FILENAME), 'w', encoding='utf-8') as f: - json.dump(kwargs, f, indent=2) - - with open(os.path.join(folder, STATIC_EMBED_FILENAME), 'w', encoding='utf-8') as f: - f.write('{}\n'.format(' '*30)) # 留白之后再来填写 - word_count = 0 - saved_word = {} - valid_word_count = 0 - for i in range(len(self.words_to_words)): - word = vocab.to_word(i) - if not vocab._is_word_no_create_entry(word): - word_count += 1 - if kwargs['lower']: - word = word.lower() - if word in saved_word: - continue - saved_word[word] = 1 - vec_i = self.words_to_words[i] - if vec_i==vocab.unknown_idx and i!=vocab.unknown_idx: - continue - vec = self.embedding.weight.data[vec_i].tolist() - vec_str = ' '.join(map(str, vec)) - f.write(f'{word} {vec_str}\n') - valid_word_count += 1 - f.seek(0) - f.write('{} {}'.format(valid_word_count, self.embedding_dim)) - logger.debug(f"StaticEmbedding has been saved to {folder}.") - - @classmethod - def load(cls, folder): - """ - - :param str folder: 该folder下应该有以下三个文件vocab.txt, static_embed.txt, static_hyper.json - :return: - """ - for name in [VOCAB_FILENAME, STATIC_EMBED_FILENAME, STATIC_HYPER_FILENAME]: - assert os.path.exists(os.path.join(folder, name)), f"{name} not found in {folder}." - - vocab = Vocabulary.load(os.path.join(folder, VOCAB_FILENAME)) - with open(os.path.join(folder, STATIC_HYPER_FILENAME), 'r', encoding='utf-8') as f: - hyper = json.load(f) - - logger.info(f"Load StaticEmbedding from {folder}.") - embed = cls(vocab=vocab, model_dir_or_name=os.path.join(folder, STATIC_EMBED_FILENAME), **hyper) - return embed - diff --git a/fastNLP/embeddings/transformers_embedding.py b/fastNLP/embeddings/transformers_embedding.py deleted file mode 100644 index 4b15ea37..00000000 --- a/fastNLP/embeddings/transformers_embedding.py +++ /dev/null @@ -1,502 +0,0 @@ -r""" -将transformers包中的模型封装成fastNLP中的embedding对象 - -""" -import os -from itertools import chain -from functools import partial - -from torch import nn -import numpy as np -import torch - -from .contextual_embedding import ContextualEmbedding -from ..core import logger -from ..core.vocabulary import Vocabulary - - -__all__ = ['TransformersEmbedding', 'TransformersWordPieceEncoder'] - - -class TransformersEmbedding(ContextualEmbedding): - r""" - 使用transformers中的模型对words进行编码的Embedding。建议将输入的words长度限制在430以内,而不要使用512(根据预训练模型参数,可能有变化)。这是由于 - 预训练的bert模型长度限制为512个token,而因为输入的word是未进行word piece分割的(word piece的分割由TransformersEmbedding在输入word - 时切分),在分割之后长度可能会超过最大长度限制。 - - Example:: - - >>> import torch - >>> from fastNLP import Vocabulary - >>> from fastNLP.embeddings import TransformersEmbedding - >>> from transformers import ElectraModel, ElectraTokenizer - >>> vocab = Vocabulary().add_word_lst("The whether is good .".split()) - >>> model = ElectraModel.from_pretrained("google/electra-small-generator") - >>> tokenizer = ElectraTokenizer.from_pretrained("google/electra-small-generator") - >>> embed = TransformersEmbedding(vocab, model_dir_or_name='en', requires_grad=False, layers='4,-2,-1') - >>> words = torch.LongTensor([[vocab.to_index(word) for word in "The whether is good .".split()]]) - >>> outputs = embed(words) - >>> outputs.size() - >>> # torch.Size([1, 5, 2304]) - - """ - def __init__(self, vocab, model, tokenizer, layers='-1', - pool_method: str = 'first', word_dropout=0, dropout=0, requires_grad=True, - include_cls_sep: bool = False, auto_truncate=True, **kwargs): - r""" - - :param ~fastNLP.Vocabulary vocab: 词表 - :model model: transformers包中的PreTrainedModel对象 - :param tokenizer: transformers包中的PreTrainedTokenizer对象 - :param str,list layers: 输出embedding表示来自于哪些层,不同层的结果按照layers中的顺序在最后一维concat起来。以','隔开层数,层的序号是 - 从0开始,可以以负数去索引倒数几层。layer=0为embedding层(包括wordpiece embedding, position embedding) - :param str pool_method: 因为在bert中,每个word会被表示为多个word pieces, 当获取一个word的表示的时候,怎样从它的word pieces - 中计算得到它对应的表示。支持 ``last`` , ``first`` , ``avg`` , ``max``。 - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool include_cls_sep: bool,在bert计算句子的表示的时候,需要在前面加上[CLS]和[SEP], 是否在结果中保留这两个内容。 这样 - 会使得word embedding的结果比输入的结果长两个token。如果该值为True,则在使用 :class::StackEmbedding 可能会与其它类型的 - embedding长度不匹配。 - :param bool pooled_cls: 返回的是否使用预训练中的BertPool映射一下,仅在include_cls_sep时有效。如果下游任务只取做预测, - 一般该值为True。 - :param bool requires_grad: 是否需要gradient以更新Bert的权重。 - :param bool auto_truncate: 当句子words拆分为word pieces长度超过bert最大允许长度(一般为512), 自动截掉拆分后的超过510个 - word pieces后的内容,并将第512个word piece置为。超过长度的部分的encode结果直接全部置零。一般仅有只使用 - 来进行分类的任务将auto_truncate置为True。 - :param kwargs: - int min_freq: 小于该次数的词会被unk代替, 默认为1 - dict tokenizer_kwargs: 传递给tokenizer在调用tokenize()方法时所额外使用的参数,例如RoBERTaTokenizer需要传入 - {'add_prefix_space':True} - """ - super().__init__(vocab, word_dropout=word_dropout, dropout=dropout) - - if word_dropout > 0: - assert vocab.unknown is not None, "When word_drop > 0, Vocabulary must contain the unknown token." - - self._word_sep_index = -100 - if tokenizer.sep_token in vocab: - self._word_sep_index = vocab[tokenizer.sep_token] - - self._word_cls_index = -100 - if tokenizer.cls_token in vocab: - self._word_cls_index = vocab[tokenizer.cls_token] - - min_freq = kwargs.get('min_freq', 1) - self._min_freq = min_freq - - tokenizer_kwargs = kwargs.get('tokenizer_kwargs', {}) - self.model = _TransformersWordModel(tokenizer=tokenizer, model=model, vocab=vocab, layers=layers, - pool_method=pool_method, include_cls_sep=include_cls_sep, - auto_truncate=auto_truncate, min_freq=min_freq, tokenizer_kwargs=tokenizer_kwargs) - - self.requires_grad = requires_grad - self._embed_size = len(self.model.layers) * model.config.hidden_size - - def forward(self, words): - r""" - 计算words的roberta embedding表示。计算之前会在每句话的开始增加在结束增加, 并根据include_cls_sep判断要不要 - 删除这两个token的表示。 - - :param torch.LongTensor words: [batch_size, max_len] - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - words = self.drop_word(words) - outputs = self._get_sent_reprs(words) - if outputs is not None: - return self.dropout(outputs) - outputs = self.model(words) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._word_pad_index) - mask = pad_mask.__and__(mask) # pad的位置不为unk - if self._word_sep_index!=-100: - not_sep_mask = words.ne(self._word_sep_index) - mask = mask.__and__(not_sep_mask) - if self._word_cls_index!=-100: - not_cls_mask = words.ne(self._word_cls_index) - mask = mask.__and__(not_cls_mask) - words = words.masked_fill(mask, self._word_unk_index) - return words - - def save(self, folder): - """ - 保存tokenizer和model到folder文件夹。model保存在`folder/{model_name}`, tokenizer在`folder/{tokenizer_name}`下 - :param str folder: 保存地址 - :return: - """ - os.makedirs(folder, exist_ok=True) - self.model.save(folder) - - -class TransformersWordPieceEncoder(nn.Module): - r""" - 读取roberta模型,读取之后调用index_dataset方法在dataset中生成word_pieces这一列。 - - RobertaWordPieceEncoder可以支持自动下载权重,当前支持的模型: - en: roberta-base - en-large: roberta-large - - """ - def __init__(self, model, tokenizer, layers: str = '-1', - word_dropout=0, dropout=0, requires_grad: bool = True, **kwargs): - r""" - - :param model: transformers的model - :param tokenizer: transformer的tokenizer - :param str layers: 最终结果中的表示。以','隔开层数,可以以负数去索引倒数几层。layer=0为embedding层(包括wordpiece embedding, - position embedding) - :param float word_dropout: 以多大的概率将一个词替换为unk。这样既可以训练unk也是一定的regularize。 - :param float dropout: 以多大的概率对embedding的表示进行Dropout。0.1即随机将10%的值置为0。 - :param bool requires_grad: 是否需要gradient。 - """ - super().__init__() - - self.model = _WordPieceTransformersModel(model=model, tokenizer=tokenizer, layers=layers) - self._sep_index = self.model._sep_index - self._cls_index = self.model._cls_index - self._wordpiece_pad_index = self.model._wordpiece_pad_index - self._wordpiece_unk_index = self.model._wordpiece_unknown_index - self._embed_size = len(self.model.layers) * self.model.config.hidden_size - self.requires_grad = requires_grad - self.word_dropout = word_dropout - self.dropout_layer = nn.Dropout(dropout) - - @property - def embed_size(self): - return self._embed_size - - @property - def embedding_dim(self): - return self._embed_size - - @property - def num_embedding(self): - return self.model.encoder.config.vocab_size - - def index_datasets(self, *datasets, field_name, **kwargs): - r""" - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input,且将word_pieces这一列的pad value设置为了 - bert的pad value。 - - :param ~fastNLP.DataSet datasets: DataSet对象 - :param str field_name: 基于哪一列的内容生成word_pieces列。这一列中每个数据应该是raw_string的形式。 - :param kwargs: 传递给tokenizer的参数 - :return: - """ - self.model.index_datasets(*datasets, field_name=field_name, **kwargs) - - def forward(self, word_pieces, token_type_ids=None): - r""" - 计算words的bert embedding表示。传入的words中应该自行包含[CLS]与[SEP]的tag。 - - :param words: batch_size x max_len - :param token_type_ids: batch_size x max_len, 用于区分前一句和后一句话. 如果不传入,则自动生成(大部分情况,都不需要输入), - 第一个[SEP]及之前为0, 第二个[SEP]及到第一个[SEP]之间为1; 第三个[SEP]及到第二个[SEP]之间为0,依次往后推。 - :return: torch.FloatTensor. batch_size x max_len x (768*len(self.layers)) - """ - word_pieces = self.drop_word(word_pieces) - outputs = self.model(word_pieces) - outputs = torch.cat([*outputs], dim=-1) - - return self.dropout_layer(outputs) - - def drop_word(self, words): - r""" - 按照设定随机将words设置为unknown_index。 - - :param torch.LongTensor words: batch_size x max_len - :return: - """ - if self.word_dropout > 0 and self.training: - with torch.no_grad(): - not_sep_mask = words.ne(self._sep_index) - not_cls_mask = words.ne(self._cls_index) - replaceable_mask = not_sep_mask.__and__(not_cls_mask) - mask = torch.full_like(words, fill_value=self.word_dropout, dtype=torch.float, device=words.device) - mask = torch.bernoulli(mask).eq(1) # dropout_word越大,越多位置为1 - pad_mask = words.ne(self._wordpiece_pad_index) - mask = pad_mask.__and__(mask).__and__(replaceable_mask) # pad的位置不为unk - words = words.masked_fill(mask, self._wordpiece_unk_index) - return words - - def save(self, folder): - os.makedirs(folder, exist_ok=True) - self.model.save(os.path.join(folder, folder)) - logger.debug(f"TransformersWordPieceEncoder has been saved in {folder}") - - -class _TransformersWordModel(nn.Module): - def __init__(self, tokenizer, model, vocab: Vocabulary, layers: str = '-1', pool_method: str = 'first', - include_cls_sep: bool = False, auto_truncate: bool = False, min_freq=2, tokenizer_kwargs={}): - super().__init__() - - self.tokenizer = tokenizer - self.encoder = model - self.config = model.config - self.only_last_layer = True - if not (isinstance(layers, str) and (layers=='-1' or int(layers)==self.encoder.config.num_hidden_layers)): - assert self.encoder.config.output_hidden_states == True, \ - f"You have to output all hidden states if you want to" \ - f" access the middle output of `{model.__class__.__name__}` " - self.only_last_layer = False - - self._max_position_embeddings = self.encoder.config.max_position_embeddings - 2 - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - self.encoder_layer_number = encoder_layer_number - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a {model.__class__.__name__} model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a {model.__class__.__name__} model with {encoder_layer_number} layers." - - assert pool_method in ('avg', 'max', 'first', 'last') - self.pool_method = pool_method - self.include_cls_sep = include_cls_sep - self.auto_truncate = auto_truncate - - word_to_wordpieces = [] - word_pieces_lengths = [] - for word, index in vocab: - if index == vocab.padding_idx: # pad是个特殊的符号 - word = tokenizer.pad_token - elif index == vocab.unknown_idx: - word = tokenizer.unk_token - elif vocab.word_count[word] self._max_position_embeddings: - if self.auto_truncate: - word_pieces_lengths = word_pieces_lengths.masked_fill( - word_pieces_lengths + 2 > self._max_position_embeddings, self._max_position_embeddings - 2) - else: - raise RuntimeError( - "After split words into word pieces, the lengths of word pieces are longer than the " - f"maximum allowed sequence length:{self._max_position_embeddings} of bert. You can set " - f"`auto_truncate=True` for BertEmbedding to automatically truncate overlong input.") - - # +2是由于需要加入 - word_pieces = words.new_full((batch_size, min(max_word_piece_length + 2, self._max_position_embeddings)), - fill_value=self._wordpiece_pad_index) - attn_masks = torch.zeros_like(word_pieces) - # 1. 获取words的word_pieces的id,以及对应的span范围 - word_indexes = words.cpu().numpy() - for i in range(batch_size): - word_pieces_i = list(chain(*self.word_to_wordpieces[word_indexes[i, :seq_len[i]]])) - if self.auto_truncate and len(word_pieces_i) > self._max_position_embeddings - 2: - word_pieces_i = word_pieces_i[:self._max_position_embeddings - 2] - word_pieces[i, 1:word_pieces_lengths[i] + 1] = torch.LongTensor(word_pieces_i) - attn_masks[i, :word_pieces_lengths[i] + 2].fill_(1) - word_pieces[:, 0].fill_(self._cls_index) - batch_indexes = torch.arange(batch_size).to(words) - word_pieces[batch_indexes, word_pieces_lengths + 1] = self._sep_index - token_type_ids = torch.zeros_like(word_pieces) - # 2. 获取hidden的结果,根据word_pieces进行对应的pool计算 - # all_outputs: [batch_size x max_len x hidden_size, batch_size x max_len x hidden_size, ...] - all_outputs = self.encoder(input_ids=word_pieces, token_type_ids=token_type_ids, - attention_mask=attn_masks) - if not self.only_last_layer: - for _ in all_outputs: - if isinstance(_, (tuple, list)) and len(_)==self.encoder_layer_number: - bert_outputs = _ - break - else: - bert_outputs = all_outputs[:1] - # output_layers = [self.layers] # len(self.layers) x batch_size x real_word_piece_length x hidden_size - - if self.include_cls_sep: - s_shift = 1 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len + 2, - bert_outputs[-1].size(-1)) - - else: - s_shift = 0 - outputs = bert_outputs[-1].new_zeros(len(self.layers), batch_size, max_word_len, - bert_outputs[-1].size(-1)) - batch_word_pieces_cum_length = batch_word_pieces_length.new_zeros(batch_size, max_word_len + 1) - batch_word_pieces_cum_length[:, 1:] = batch_word_pieces_length.cumsum(dim=-1) # batch_size x max_len - - if self.pool_method == 'first': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, :seq_len.max()] - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - elif self.pool_method == 'last': - batch_word_pieces_cum_length = batch_word_pieces_cum_length[:, 1:seq_len.max() + 1] - 1 - batch_word_pieces_cum_length.masked_fill_(batch_word_pieces_cum_length.ge(max_word_piece_length), 0) - _batch_indexes = batch_indexes[:, None].expand((batch_size, batch_word_pieces_cum_length.size(1))) - - for l_index, l in enumerate(self.layers): - output_layer = bert_outputs[l] - real_word_piece_length = output_layer.size(1) - 2 - if max_word_piece_length > real_word_piece_length: # 如果实际上是截取出来的 - paddings = output_layer.new_zeros(batch_size, - max_word_piece_length - real_word_piece_length, - output_layer.size(2)) - output_layer = torch.cat((output_layer, paddings), dim=1).contiguous() - # 从word_piece collapse到word的表示 - truncate_output_layer = output_layer[:, 1:-1] # 删除 batch_size x len x hidden_size - if self.pool_method == 'first': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1) + s_shift] = tmp - - elif self.pool_method == 'last': - tmp = truncate_output_layer[_batch_indexes, batch_word_pieces_cum_length] - tmp = tmp.masked_fill(word_mask[:, :batch_word_pieces_cum_length.size(1), None].eq(False), 0) - outputs[l_index, :, s_shift:batch_word_pieces_cum_length.size(1) + s_shift] = tmp - elif self.pool_method == 'max': - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift], _ = torch.max(truncate_output_layer[i, start:end], dim=-2) - else: - for i in range(batch_size): - for j in range(seq_len[i]): - start, end = batch_word_pieces_cum_length[i, j], batch_word_pieces_cum_length[i, j + 1] - outputs[l_index, i, j + s_shift] = torch.mean(truncate_output_layer[i, start:end], dim=-2) - if self.include_cls_sep: - outputs[l_index, :, 0] = output_layer[:, 0] - outputs[l_index, batch_indexes, seq_len + s_shift] = output_layer[batch_indexes, word_pieces_lengths + s_shift] - - # 3. 最终的embedding结果 - return outputs - - def save(self, folder): - self.tokenzier.save_pretrained(folder) - self.encoder.save_pretrained(folder) - - -class _WordPieceTransformersModel(nn.Module): - def __init__(self, model, tokenizer, layers: str = '-1'): - super().__init__() - - self.tokenizer = tokenizer - self.encoder = model - self.config = self.encoder.config - # 检查encoder_layer_number是否合理 - encoder_layer_number = len(self.encoder.encoder.layer) - self.only_last_layer = True - if not (isinstance(layers, str) and (layers=='-1' or int(layers)==self.encoder.config.num_hidden_layers)): - assert self.encoder.config.output_hidden_states == True, \ - f"You have to output all hidden states if you want to" \ - f" access the middle output of `{model.__class__.__name__}` " - self.only_last_layer = False - - if isinstance(layers, list): - self.layers = [int(l) for l in layers] - elif isinstance(layers, str): - self.layers = list(map(int, layers.split(','))) - else: - raise TypeError("`layers` only supports str or list[int]") - - for layer in self.layers: - if layer < 0: - assert -layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a RoBERTa model with {encoder_layer_number} layers." - else: - assert layer <= encoder_layer_number, f"The layer index:{layer} is out of scope for " \ - f"a RoBERTa model with {encoder_layer_number} layers." - - self._cls_index = self.tokenizer.cls_token_id - self._sep_index = self.tokenizer.sep_token_id - self._wordpiece_pad_index = self.tokenizer.pad_token_id # 需要用于生成word_piece - self._wordpiece_unknown_index = self.tokenizer.unk_token_id - - def index_datasets(self, *datasets, field_name, **kwargs): - r""" - 使用bert的tokenizer新生成word_pieces列加入到datasets中,并将他们设置为input。如果首尾不是 - [CLS]与[SEP]会在首尾额外加入[CLS]与[SEP], 且将word_pieces这一列的pad value设置为了bert的pad value。 - - :param datasets: DataSet对象 - :param field_name: 基于哪一列index - :param kwargs: 传递给tokenizer的参数 - :return: - """ - kwargs['add_special_tokens'] = kwargs.get('add_special_tokens', True) - kwargs['add_prefix_space'] = kwargs.get('add_special_tokens', True) - - encode_func = partial(self.tokenizer.encode, **kwargs) - - for index, dataset in enumerate(datasets): - try: - dataset.apply_field(encode_func, field_name=field_name, new_field_name='word_pieces', - is_input=True) - dataset.set_pad_val('word_pieces', self._wordpiece_pad_index) - except Exception as e: - logger.error(f"Exception happens when processing the {index} dataset.") - raise e - - def forward(self, word_pieces): - r""" - - :param word_pieces: torch.LongTensor, batch_size x max_len - :return: num_layers x batch_size x max_len x hidden_size或者num_layers x batch_size x (max_len+2) x hidden_size - """ - batch_size, max_len = word_pieces.size() - - attn_masks = word_pieces.ne(self._wordpiece_pad_index) - all_outputs = self.encoder(word_pieces, token_type_ids=torch.zeros_like(word_pieces), - attention_mask=attn_masks) - if not self.only_last_layer: - for _ in all_outputs: - if isinstance(_, (tuple, list)) and len(_)==self.encoder_layer_number: - roberta_outputs = _ - break - else: - roberta_outputs = all_outputs[:1] - # output_layers = [self.layers] # len(self.layers) x batch_size x max_word_piece_length x hidden_size - outputs = roberta_outputs[0].new_zeros((len(self.layers), batch_size, max_len, roberta_outputs[0].size(-1))) - for l_index, l in enumerate(self.layers): - roberta_output = roberta_outputs[l] - outputs[l_index] = roberta_output - return outputs - - def save(self, folder): - self.tokenizer.save_pretrained(folder) - self.encoder.save_pretrained(folder) diff --git a/fastNLP/embeddings/utils.py b/fastNLP/embeddings/utils.py deleted file mode 100644 index 9a18bfe3..00000000 --- a/fastNLP/embeddings/utils.py +++ /dev/null @@ -1,104 +0,0 @@ -r""" -.. todo:: - doc -""" -import numpy as np -import torch -from torch import nn as nn - -from ..core.vocabulary import Vocabulary - -__all__ = [ - 'get_embeddings', - 'get_sinusoid_encoding_table' -] - - -def _construct_char_vocab_from_vocab(vocab: Vocabulary, min_freq: int = 1, include_word_start_end=True): - r""" - 给定一个word的vocabulary生成character的vocabulary. - - :param vocab: 从vocab - :param min_freq: - :param include_word_start_end: 是否需要包含特殊的 - :return: - """ - char_vocab = Vocabulary(min_freq=min_freq) - for word, index in vocab: - if not vocab._is_word_no_create_entry(word): - char_vocab.add_word_lst(list(word)) - if include_word_start_end: - char_vocab.add_word_lst(['', '']) - return char_vocab - - -def get_embeddings(init_embed, padding_idx=None): - r""" - 根据输入的init_embed返回Embedding对象。如果输入是tuple, 则随机初始化一个nn.Embedding; 如果输入是numpy.ndarray, 则按照ndarray - 的值将nn.Embedding初始化; 如果输入是torch.Tensor, 则按该值初始化nn.Embedding; 如果输入是fastNLP中的embedding将不做处理 - 返回原对象。 - - :param init_embed: 可以是 tuple:(num_embedings, embedding_dim), 即embedding的大小和每个词的维度;也可以传入 - nn.Embedding 对象, 此时就以传入的对象作为embedding; 传入np.ndarray也行,将使用传入的ndarray作为作为Embedding初始化; - 传入torch.Tensor, 将使用传入的值作为Embedding初始化。 - :param padding_idx: 当传入tuple时,padding_idx有效 - :return nn.Embedding: embeddings - """ - if isinstance(init_embed, tuple): - res = nn.Embedding( - num_embeddings=init_embed[0], embedding_dim=init_embed[1], padding_idx=padding_idx) - nn.init.uniform_(res.weight.data, a=-np.sqrt(3 / res.weight.data.size(1)), - b=np.sqrt(3 / res.weight.data.size(1))) - elif isinstance(init_embed, nn.Module): - res = init_embed - elif isinstance(init_embed, torch.Tensor): - res = nn.Embedding.from_pretrained(init_embed, freeze=False) - elif isinstance(init_embed, np.ndarray): - init_embed = torch.tensor(init_embed, dtype=torch.float32) - res = nn.Embedding.from_pretrained(init_embed, freeze=False) - else: - raise TypeError( - 'invalid init_embed type: {}'.format((type(init_embed)))) - return res - - -def get_sinusoid_encoding_table(n_position, d_hid, padding_idx=None): - """ - sinusoid的embedding,其中position的表示中,偶数维(0,2,4,...)是sin, 奇数(1,3,5...)是cos - - :param int n_position: 一共多少个position - :param int d_hid: 多少维度,需要为偶数 - :param padding_idx: - :return: torch.FloatTensor, shape为n_position x d_hid - """ - - def cal_angle(position, hid_idx): - return position / np.power(10000, 2 * (hid_idx // 2) / d_hid) - - def get_posi_angle_vec(position): - return [cal_angle(position, hid_j) for hid_j in range(d_hid)] - - sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)]) - - sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i - sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 - - if padding_idx is not None: - # zero vector for padding dimension - sinusoid_table[padding_idx] = 0. - - return torch.FloatTensor(sinusoid_table) - - -def _check_vocab_has_same_index(vocab, other_vocab): - """ - 检查两个vocabulary是否含有相同的word idx - - :param Vocabulary vocab: - :param Vocabulary other_vocab: - :return: - """ - if other_vocab != vocab: - for word, word_ix in vocab: - other_word_idx = other_vocab.to_index(word) - assert other_word_idx == word_ix, f"Word {word} has different index in vocabs, {word_ix} Vs. {other_word_idx}." \ No newline at end of file diff --git a/fastNLP/io/__init__.py b/fastNLP/io/__init__.py deleted file mode 100644 index 9d6dab7b..00000000 --- a/fastNLP/io/__init__.py +++ /dev/null @@ -1,126 +0,0 @@ -r""" -用于IO的模块, 具体包括: - -1. 用于读入 embedding 的 :mod:`EmbedLoader ` 类, - -2. 用于读入不同格式数据的 :mod:`Loader ` 类 - -3. 用于处理读入数据的 :mod:`Pipe ` 类 - -4. 用于保存和载入模型的类, 参考 :mod:`model_io模块 ` - -这些类的使用方法如下: -""" -__all__ = [ - 'DataBundle', - - 'EmbedLoader', - - 'Loader', - - 'CLSBaseLoader', - 'AGsNewsLoader', - 'DBPediaLoader', - 'YelpFullLoader', - 'YelpPolarityLoader', - 'IMDBLoader', - 'SSTLoader', - 'SST2Loader', - "ChnSentiCorpLoader", - "THUCNewsLoader", - "WeiboSenti100kLoader", - - 'ConllLoader', - 'Conll2003Loader', - 'Conll2003NERLoader', - 'OntoNotesNERLoader', - 'CTBLoader', - "MsraNERLoader", - "WeiboNERLoader", - "PeopleDailyNERLoader", - - 'CSVLoader', - 'JsonLoader', - - 'CWSLoader', - - 'MNLILoader', - "QuoraLoader", - "SNLILoader", - "QNLILoader", - "RTELoader", - "CNXNLILoader", - "BQCorpusLoader", - "LCQMCLoader", - - "CMRC2018Loader", - - "Pipe", - - "CLSBasePipe", - "AGsNewsPipe", - "DBPediaPipe", - "YelpFullPipe", - "YelpPolarityPipe", - "SSTPipe", - "SST2Pipe", - "IMDBPipe", - "ChnSentiCorpPipe", - "THUCNewsPipe", - "WeiboSenti100kPipe", - - "Conll2003Pipe", - "Conll2003NERPipe", - "OntoNotesNERPipe", - "MsraNERPipe", - "PeopleDailyPipe", - "WeiboNERPipe", - - "CWSPipe", - - "Conll2003NERPipe", - "OntoNotesNERPipe", - "MsraNERPipe", - "WeiboNERPipe", - "PeopleDailyPipe", - "Conll2003Pipe", - - "MatchingBertPipe", - "RTEBertPipe", - "SNLIBertPipe", - "QuoraBertPipe", - "QNLIBertPipe", - "MNLIBertPipe", - "CNXNLIBertPipe", - "BQCorpusBertPipe", - "LCQMCBertPipe", - "MatchingPipe", - "RTEPipe", - "SNLIPipe", - "QuoraPipe", - "QNLIPipe", - "MNLIPipe", - "LCQMCPipe", - "CNXNLIPipe", - "BQCorpusPipe", - "RenamePipe", - "GranularizePipe", - "MachingTruncatePipe", - - "CMRC2018BertPipe", - - 'ModelLoader', - 'ModelSaver', - -] - -import sys - -from .data_bundle import DataBundle -from .embed_loader import EmbedLoader -from .loader import * -from .model_io import ModelLoader, ModelSaver -from .pipe import * -from ..doc_utils import doc_process - -doc_process(sys.modules[__name__]) \ No newline at end of file diff --git a/fastNLP/io/data_bundle.py b/fastNLP/io/data_bundle.py deleted file mode 100644 index cfce4de4..00000000 --- a/fastNLP/io/data_bundle.py +++ /dev/null @@ -1,470 +0,0 @@ -r""" -.. todo:: - doc -""" -__all__ = [ - 'DataBundle', -] - -from typing import Union, List - -from ..core.dataset import DataSet -from ..core.vocabulary import Vocabulary -from ..core._logger import logger - - -class DataBundle: - r""" - 经过处理的数据信息,包括一系列数据集(比如:分开的训练集、验证集和测试集)以及各个field对应的vocabulary。该对象一般由fastNLP中各种 - Loader的load函数生成,可以通过以下的方法获取里面的内容 - - Example:: - - data_bundle = YelpLoader().load({'train':'/path/to/train', 'dev': '/path/to/dev'}) - train_vocabs = data_bundle.vocabs['train'] - train_data = data_bundle.datasets['train'] - dev_data = data_bundle.datasets['train'] - - """ - - def __init__(self, vocabs: dict = None, datasets: dict = None): - r""" - - :param vocabs: 从名称(字符串)到 :class:`~fastNLP.Vocabulary` 类型的dict - :param datasets: 从名称(字符串)到 :class:`~fastNLP.DataSet` 类型的dict。建议不要将相同的DataSet对象重复传入,可能会在 - 使用Pipe处理数据的时候遇到问题,若多个数据集确需一致,请手动deepcopy后传入。 - """ - self.vocabs = vocabs or {} - self.datasets = datasets or {} - - def set_vocab(self, vocab, field_name): - r""" - 向DataBunlde中增加vocab - - :param ~fastNLP.Vocabulary vocab: 词表 - :param str field_name: 这个vocab对应的field名称 - :return: self - """ - assert isinstance(vocab, Vocabulary), "Only fastNLP.Vocabulary supports." - self.vocabs[field_name] = vocab - return self - - def set_dataset(self, dataset, name: str): - r""" - - :param ~fastNLP.DataSet dataset: 传递给DataBundle的DataSet - :param str name: dataset的名称 - :return: self - """ - assert isinstance(dataset, DataSet), "Only fastNLP.DataSet supports." - self.datasets[name] = dataset - return self - - def get_dataset(self, name: str) -> DataSet: - r""" - 获取名为name的dataset - - :param str name: dataset的名称,一般为'train', 'dev', 'test' - :return: DataSet - """ - if name in self.datasets.keys(): - return self.datasets[name] - else: - error_msg = f'DataBundle do NOT have DataSet named {name}. ' \ - f'It should be one of {self.datasets.keys()}.' - logger.error(error_msg) - raise KeyError(error_msg) - - def delete_dataset(self, name: str): - r""" - 删除名为name的DataSet - - :param str name: - :return: self - """ - self.datasets.pop(name, None) - return self - - def get_vocab(self, field_name: str) -> Vocabulary: - r""" - 获取field名为field_name对应的vocab - - :param str field_name: 名称 - :return: Vocabulary - """ - if field_name in self.vocabs.keys(): - return self.vocabs[field_name] - else: - error_msg = f'DataBundle do NOT have Vocabulary named {field_name}. ' \ - f'It should be one of {self.vocabs.keys()}.' - logger.error(error_msg) - raise KeyError(error_msg) - - def delete_vocab(self, field_name: str): - r""" - 删除vocab - :param str field_name: - :return: self - """ - self.vocabs.pop(field_name, None) - return self - - @property - def num_dataset(self): - return len(self.datasets) - - @property - def num_vocab(self): - return len(self.vocabs) - - def set_input(self, *field_names, flag=True, use_1st_ins_infer_dim_type=True, ignore_miss_dataset=True): - r""" - 将field_names中的field设置为input, 对data_bundle中所有的dataset执行该操作:: - - data_bundle.set_input('words', 'seq_len') # 将words和seq_len这两个field的input属性设置为True - data_bundle.set_input('words', flag=False) # 将words这个field的input属性设置为False - - :param str field_names: field的名称 - :param bool flag: 将field_name的input状态设置为flag - :param bool use_1st_ins_infer_dim_type: 如果为True,将不会check该列是否所有数据都是同样的维度,同样的类型。将直接使用第一 - 行的数据进行类型和维度推断本列的数据的类型和维度。 - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :return: self - """ - for field_name in field_names: - for name, dataset in self.datasets.items(): - if not ignore_miss_dataset and not dataset.has_field(field_name): - raise KeyError(f"Field:{field_name} was not found in DataSet:{name}") - if not dataset.has_field(field_name): - continue - else: - dataset.set_input(field_name, flag=flag, use_1st_ins_infer_dim_type=use_1st_ins_infer_dim_type) - return self - - def set_target(self, *field_names, flag=True, use_1st_ins_infer_dim_type=True, ignore_miss_dataset=True): - r""" - 将field_names中的field设置为target, 对data_bundle中所有的dataset执行该操作:: - - data_bundle.set_target('target', 'seq_len') # 将words和target这两个field的input属性设置为True - data_bundle.set_target('target', flag=False) # 将target这个field的input属性设置为False - - :param str field_names: field的名称 - :param bool flag: 将field_name的target状态设置为flag - :param bool use_1st_ins_infer_dim_type: 如果为True,将不会check该列是否所有数据都是同样的维度,同样的类型。将直接使用第一 - 行的数据进行类型和维度推断本列的数据的类型和维度。 - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :return: self - """ - for field_name in field_names: - for name, dataset in self.datasets.items(): - if not ignore_miss_dataset and not dataset.has_field(field_name): - raise KeyError(f"Field:{field_name} was not found in DataSet:{name}") - if not dataset.has_field(field_name): - continue - else: - dataset.set_target(field_name, flag=flag, use_1st_ins_infer_dim_type=use_1st_ins_infer_dim_type) - return self - - def set_pad_val(self, field_name, pad_val, ignore_miss_dataset=True): - r""" - 将DataBundle中所有的DataSet中名为field_name的Field的padding值设置为pad_val. - - :param str field_name: - :param int pad_val: - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :return: self - """ - for name, dataset in self.datasets.items(): - if dataset.has_field(field_name=field_name): - dataset.set_pad_val(field_name=field_name, pad_val=pad_val) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - return self - - def set_ignore_type(self, *field_names, flag=True, ignore_miss_dataset=True): - r""" - 将DataBundle中所有的DataSet中名为*field_names的Field的ignore_type设置为flag状态 - - :param str field_names: - :param bool flag: - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :return: self - """ - for name, dataset in self.datasets.items(): - for field_name in field_names: - if dataset.has_field(field_name=field_name): - dataset.set_ignore_type(field_name, flag=flag) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - return self - - def copy_field(self, field_name, new_field_name, ignore_miss_dataset=True): - r""" - 将DataBundle中所有的DataSet中名为field_name的Field复制一份并命名为叫new_field_name. - - :param str field_name: - :param str new_field_name: - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :return: self - """ - for name, dataset in self.datasets.items(): - if dataset.has_field(field_name=field_name): - dataset.copy_field(field_name=field_name, new_field_name=new_field_name) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - return self - - def rename_field(self, field_name, new_field_name, ignore_miss_dataset=True, rename_vocab=True): - r""" - 将DataBundle中所有DataSet中名为field_name的field重命名为new_field_name. - - :param str field_name: - :param str new_field_name: - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :param bool rename_vocab: 如果该field同时也存在于vocabs中,会将该field的名称对应修改 - :return: self - """ - for name, dataset in self.datasets.items(): - if dataset.has_field(field_name=field_name): - dataset.rename_field(field_name=field_name, new_field_name=new_field_name) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - if rename_vocab: - if field_name in self.vocabs: - self.vocabs[new_field_name] = self.vocabs.pop(field_name) - - return self - - def delete_field(self, field_name, ignore_miss_dataset=True, delete_vocab=True): - r""" - 将DataBundle中所有DataSet中名为field_name的field删除掉. - - :param str field_name: - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :param bool delete_vocab: 如果该field也在vocabs中存在,将该值也一并删除 - :return: self - """ - for name, dataset in self.datasets.items(): - if dataset.has_field(field_name=field_name): - dataset.delete_field(field_name=field_name) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - if delete_vocab: - if field_name in self.vocabs: - self.vocabs.pop(field_name) - return self - - def iter_datasets(self) -> Union[str, DataSet]: - r""" - 迭代data_bundle中的DataSet - - Example:: - - for name, dataset in data_bundle.iter_datasets(): - pass - - :return: - """ - for name, dataset in self.datasets.items(): - yield name, dataset - - def get_dataset_names(self) -> List[str]: - r""" - 返回DataBundle中DataSet的名称 - - :return: - """ - return list(self.datasets.keys()) - - def get_vocab_names(self) -> List[str]: - r""" - 返回DataBundle中Vocabulary的名称 - - :return: - """ - return list(self.vocabs.keys()) - - def iter_vocabs(self) -> Union[str, Vocabulary]: - r""" - 迭代data_bundle中的DataSet - - Example: - - for field_name, vocab in data_bundle.iter_vocabs(): - pass - - :return: - """ - for field_name, vocab in self.vocabs.items(): - yield field_name, vocab - - def apply_field(self, func, field_name: str, new_field_name: str, ignore_miss_dataset=True, **kwargs): - r""" - 对 :class:`~fastNLP.io.DataBundle` 中所有的dataset使用 :meth:`~fastNLP.DataSet.apply_field` 方法 - - :param callable func: input是instance中名为 `field_name` 的field的内容。 - :param str field_name: 传入func的是哪个field。 - :param str new_field_name: 将func返回的内容放入到 `new_field_name` 这个field中,如果名称与已有的field相同,则覆 - 盖之前的field。如果为None则不创建新的field。 - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将名为 `new_field_name` 的field设置为input - - 2. is_target: bool, 如果为True则将名为 `new_field_name` 的field设置为target - - 3. ignore_type: bool, 如果为True则将名为 `new_field_name` 的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否显示tqdm进度条 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - """ - tqdm_desc = kwargs.get('tqdm_desc', '') - for name, dataset in self.datasets.items(): - if tqdm_desc != '': - kwargs['tqdm_desc'] = tqdm_desc + f' for `{name}`' - if dataset.has_field(field_name=field_name): - dataset.apply_field(func=func, field_name=field_name, new_field_name=new_field_name, **kwargs) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name}.") - return self - - def apply_field_more(self, func, field_name, modify_fields=True, ignore_miss_dataset=True, **kwargs): - r""" - 对 :class:`~fastNLP.io.DataBundle` 中所有的 dataset 使用 :meth:`~fastNLP.DataSet.apply_field_more` 方法 - - .. note:: - ``apply_field_more`` 与 ``apply_field`` 的区别参考 :meth:`fastNLP.DataSet.apply_more` 中关于 ``apply_more`` 与 - ``apply`` 区别的介绍。 - - :param callable func: 参数是 ``DataSet`` 中的 ``Instance`` ,返回值是一个字典,key 是field 的名字,value 是对应的结果 - :param str field_name: 传入func的是哪个field。 - :param bool modify_fields: 是否用结果修改 `DataSet` 中的 `Field`, 默认为 True - :param bool ignore_miss_dataset: 当某个field名称在某个dataset不存在时,如果为True,则直接忽略该DataSet; - 如果为False,则报错 - :param optional kwargs: 支持输入is_input, is_target, ignore_type - - 1. is_input: bool, 如果为True则将被修改的field设置为input - - 2. is_target: bool, 如果为True则将被修改的field设置为target - - 3. ignore_type: bool, 如果为True则将被修改的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否显示tqdm进度条 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return Dict[str:Dict[str:Field]]: 返回一个字典套字典,第一层的 key 是 dataset 的名字,第二层的 key 是 field 的名字 - """ - res = {} - tqdm_desc = kwargs.get('tqdm_desc', '') - for name, dataset in self.datasets.items(): - if tqdm_desc != '': - kwargs['tqdm_desc'] = tqdm_desc + f' for `{name}`' - if dataset.has_field(field_name=field_name): - res[name] = dataset.apply_field_more(func=func, field_name=field_name, modify_fields=modify_fields, **kwargs) - elif not ignore_miss_dataset: - raise KeyError(f"{field_name} not found DataSet:{name} .") - return res - - def apply(self, func, new_field_name: str, **kwargs): - r""" - 对 :class:`~fastNLP.io.DataBundle` 中所有的 dataset 使用 :meth:`~fastNLP.DataSet.apply` 方法 - - 对DataBundle中所有的dataset使用apply方法 - - :param callable func: input是instance中名为 `field_name` 的field的内容。 - :param str new_field_name: 将func返回的内容放入到 `new_field_name` 这个field中,如果名称与已有的field相同,则覆 - 盖之前的field。如果为None则不创建新的field。 - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将名为 `new_field_name` 的field设置为input - - 2. is_target: bool, 如果为True则将名为 `new_field_name` 的field设置为target - - 3. ignore_type: bool, 如果为True则将名为 `new_field_name` 的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否显示tqdm进度条 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - """ - tqdm_desc = kwargs.get('tqdm_desc', '') - for name, dataset in self.datasets.items(): - if tqdm_desc != '': - kwargs['tqdm_desc'] = tqdm_desc + f' for `{name}`' - dataset.apply(func, new_field_name=new_field_name, **kwargs) - return self - - def apply_more(self, func, modify_fields=True, **kwargs): - r""" - 对 :class:`~fastNLP.io.DataBundle` 中所有的 dataset 使用 :meth:`~fastNLP.DataSet.apply_more` 方法 - - .. note:: - ``apply_more`` 与 ``apply`` 的区别参考 :meth:`fastNLP.DataSet.apply_more` 中关于 ``apply_more`` 与 - ``apply`` 区别的介绍。 - - :param callable func: 参数是 ``DataSet`` 中的 ``Instance`` ,返回值是一个字典,key 是field 的名字,value 是对应的结果 - :param bool modify_fields: 是否用结果修改 ``DataSet`` 中的 ``Field`` , 默认为 True - :param optional kwargs: 支持输入is_input,is_target,ignore_type - - 1. is_input: bool, 如果为True则将被修改的的field设置为input - - 2. is_target: bool, 如果为True则将被修改的的field设置为target - - 3. ignore_type: bool, 如果为True则将被修改的的field的ignore_type设置为true, 忽略其类型 - - 4. use_tqdm: bool, 是否显示tqdm进度条 - - 5. tqdm_desc: str, 当use_tqdm为True时,可以显示当前tqdm正在处理的名称 - - :return Dict[str:Dict[str:Field]]: 返回一个字典套字典,第一层的 key 是 dataset 的名字,第二层的 key 是 field 的名字 - """ - res = {} - tqdm_desc = kwargs.get('tqdm_desc', '') - for name, dataset in self.datasets.items(): - if tqdm_desc!='': - kwargs['tqdm_desc'] = tqdm_desc + f' for `{name}`' - res[name] = dataset.apply_more(func, modify_fields=modify_fields, **kwargs) - return res - - def add_collate_fn(self, fn, name=None): - r""" - 向所有DataSet增加collate_fn, collate_fn详见 :class:`~fastNLP.DataSet` 中相关说明. - - :param callable fn: - :param name: - :return: - """ - for _, dataset in self.datasets.items(): - dataset.add_collate_fn(fn=fn, name=name) - - def delete_collate_fn(self, name=None): - r""" - 删除DataSet中的collate_fn - - :param name: - :return: - """ - for _, dataset in self.datasets.items(): - dataset.delete_collate_fn(name=name) - - def __repr__(self): - _str = '' - if len(self.datasets): - _str += 'In total {} datasets:\n'.format(self.num_dataset) - for name, dataset in self.datasets.items(): - _str += '\t{} has {} instances.\n'.format(name, len(dataset)) - if len(self.vocabs): - _str += 'In total {} vocabs:\n'.format(self.num_vocab) - for name, vocab in self.vocabs.items(): - _str += '\t{} has {} entries.\n'.format(name, len(vocab)) - return _str diff --git a/fastNLP/io/embed_loader.py b/fastNLP/io/embed_loader.py deleted file mode 100644 index 60bf4623..00000000 --- a/fastNLP/io/embed_loader.py +++ /dev/null @@ -1,188 +0,0 @@ -r""" -.. todo:: - doc -""" -__all__ = [ - "EmbedLoader", - "EmbeddingOption", -] - -import logging -import os -import warnings - -import numpy as np - -from ..core.utils import Option -from ..core.vocabulary import Vocabulary - - -class EmbeddingOption(Option): - def __init__(self, - embed_filepath=None, - dtype=np.float32, - normalize=True, - error='ignore'): - super().__init__( - embed_filepath=embed_filepath, - dtype=dtype, - normalize=normalize, - error=error - ) - - -class EmbedLoader: - r""" - 用于读取预训练的embedding, 读取结果可直接载入为模型参数。 - """ - - def __init__(self): - super(EmbedLoader, self).__init__() - - @staticmethod - def load_with_vocab(embed_filepath, vocab, dtype=np.float32, padding='', unknown='', normalize=True, - error='ignore', init_method=None): - r""" - 从embed_filepath这个预训练的词向量中抽取出vocab这个词表的词的embedding。EmbedLoader将自动判断embed_filepath是 - word2vec(第一行只有两个元素)还是glove格式的数据。 - - :param str embed_filepath: 预训练的embedding的路径。 - :param vocab: 词表 :class:`~fastNLP.Vocabulary` 类型,读取出现在vocab中的词的embedding。 - 没有出现在vocab中的词的embedding将通过找到的词的embedding的正态分布采样出来,以使得整个Embedding是同分布的。 - :param dtype: 读出的embedding的类型 - :param str padding: 词表中padding的token - :param str unknown: 词表中unknown的token - :param bool normalize: 是否将每个vector归一化到norm为1 - :param str error: `ignore` , `strict` ; 如果 `ignore` ,错误将自动跳过; 如果 `strict` , 错误将抛出。 - 这里主要可能出错的地方在于词表有空行或者词表出现了维度不一致。 - :param callable init_method: 传入numpy.ndarray, 返回numpy.ndarray, 用以初始化embedding - :return numpy.ndarray: shape为 [len(vocab), dimension], dimension由pretrain的embedding决定。 - """ - assert isinstance(vocab, Vocabulary), "Only fastNLP.Vocabulary is supported." - if not os.path.exists(embed_filepath): - raise FileNotFoundError("`{}` does not exist.".format(embed_filepath)) - with open(embed_filepath, 'r', encoding='utf-8') as f: - hit_flags = np.zeros(len(vocab), dtype=bool) - line = f.readline().strip() - parts = line.split() - start_idx = 0 - if len(parts) == 2: - dim = int(parts[1]) - start_idx += 1 - else: - dim = len(parts) - 1 - f.seek(0) - matrix = np.random.randn(len(vocab), dim).astype(dtype) - if init_method: - matrix = init_method(matrix) - for idx, line in enumerate(f, start_idx): - try: - parts = line.strip().split() - word = ''.join(parts[:-dim]) - nums = parts[-dim:] - # 对齐unk与pad - if word == padding and vocab.padding is not None: - word = vocab.padding - elif word == unknown and vocab.unknown is not None: - word = vocab.unknown - if word in vocab: - index = vocab.to_index(word) - matrix[index] = np.fromstring(' '.join(nums), sep=' ', dtype=dtype, count=dim) - hit_flags[index] = True - except Exception as e: - if error == 'ignore': - warnings.warn("Error occurred at the {} line.".format(idx)) - else: - logging.error("Error occurred at the {} line.".format(idx)) - raise e - total_hits = sum(hit_flags) - logging.info("Found {} out of {} words in the pre-training embedding.".format(total_hits, len(vocab))) - if init_method is None: - found_vectors = matrix[hit_flags] - if len(found_vectors) != 0: - mean = np.mean(found_vectors, axis=0, keepdims=True) - std = np.std(found_vectors, axis=0, keepdims=True) - unfound_vec_num = len(vocab) - total_hits - r_vecs = np.random.randn(unfound_vec_num, dim).astype(dtype) * std + mean - matrix[hit_flags == False] = r_vecs - - if normalize: - matrix /= np.linalg.norm(matrix, axis=1, keepdims=True) - - return matrix - - @staticmethod - def load_without_vocab(embed_filepath, dtype=np.float32, padding='', unknown='', normalize=True, - error='ignore'): - r""" - 从embed_filepath中读取预训练的word vector。根据预训练的词表读取embedding并生成一个对应的Vocabulary。 - - :param str embed_filepath: 预训练的embedding的路径。 - :param dtype: 读出的embedding的类型 - :param str padding: 词表中的padding的token. 并以此用做vocab的padding。 - :param str unknown: 词表中的unknown的token. 并以此用做vocab的unknown。 - :param bool normalize: 是否将每个vector归一化到norm为1 - :param str error: `ignore` , `strict` ; 如果 `ignore` ,错误将自动跳过; 如果 `strict` , 错误将抛出。这里主要可能出错的地 - 方在于词表有空行或者词表出现了维度不一致。 - :return (numpy.ndarray, Vocabulary): Embedding的shape是[词表大小+x, 词表维度], "词表大小+x"是由于最终的大小还取决与 - 是否使用padding, 以及unknown有没有在词表中找到对应的词。 Vocabulary中的词的顺序与Embedding的顺序是一一对应的。 - - """ - vocab = Vocabulary(padding=padding, unknown=unknown) - vec_dict = {} - found_unknown = False - found_pad = False - - with open(embed_filepath, 'r', encoding='utf-8') as f: - line = f.readline() - start = 1 - dim = -1 - if len(line.strip().split()) != 2: - f.seek(0) - start = 0 - for idx, line in enumerate(f, start=start): - try: - parts = line.strip().split() - if dim == -1: - dim = len(parts) - 1 - word = ''.join(parts[:-dim]) - nums = parts[-dim:] - vec = np.fromstring(' '.join(nums), sep=' ', dtype=dtype, count=dim) - vec_dict[word] = vec - vocab.add_word(word) - if unknown is not None and unknown == word: - found_unknown = True - if padding is not None and padding == word: - found_pad = True - except Exception as e: - if error == 'ignore': - warnings.warn("Error occurred at the {} line.".format(idx)) - pass - else: - logging.error("Error occurred at the {} line.".format(idx)) - raise e - if dim == -1: - raise RuntimeError("{} is an empty file.".format(embed_filepath)) - matrix = np.random.randn(len(vocab), dim).astype(dtype) - for key, vec in vec_dict.items(): - index = vocab.to_index(key) - matrix[index] = vec - - if ((unknown is not None) and (not found_unknown)) or ((padding is not None) and (not found_pad)): - start_idx = 0 - if padding is not None: - start_idx += 1 - if unknown is not None: - start_idx += 1 - - mean = np.mean(matrix[start_idx:], axis=0, keepdims=True) - std = np.std(matrix[start_idx:], axis=0, keepdims=True) - if (unknown is not None) and (not found_unknown): - matrix[start_idx - 1] = np.random.randn(1, dim).astype(dtype) * std + mean - if (padding is not None) and (not found_pad): - matrix[0] = np.random.randn(1, dim).astype(dtype) * std + mean - - if normalize: - matrix /= np.linalg.norm(matrix, axis=1, keepdims=True) - - return matrix, vocab diff --git a/fastNLP/io/file_reader.py b/fastNLP/io/file_reader.py deleted file mode 100644 index e70440de..00000000 --- a/fastNLP/io/file_reader.py +++ /dev/null @@ -1,136 +0,0 @@ -r"""undocumented -此模块用于给其它模块提供读取文件的函数,没有为用户提供 API -""" - -__all__ = [] - -import json -import csv - -from ..core import logger - - -def _read_csv(path, encoding='utf-8', headers=None, sep=',', dropna=True): - r""" - Construct a generator to read csv items. - - :param path: file path - :param encoding: file's encoding, default: utf-8 - :param headers: file's headers, if None, make file's first line as headers. default: None - :param sep: separator for each column. default: ',' - :param dropna: weather to ignore and drop invalid data, - :if False, raise ValueError when reading invalid data. default: True - :return: generator, every time yield (line number, csv item) - """ - with open(path, 'r', encoding=encoding) as csv_file: - f = csv.reader(csv_file, delimiter=sep) - start_idx = 0 - if headers is None: - headers = next(f) - start_idx += 1 - elif not isinstance(headers, (list, tuple)): - raise TypeError("headers should be list or tuple, not {}." \ - .format(type(headers))) - for line_idx, line in enumerate(f, start_idx): - contents = line - if len(contents) != len(headers): - if dropna: - continue - else: - if "" in headers: - raise ValueError(("Line {} has {} parts, while header has {} parts.\n" + - "Please check the empty parts or unnecessary '{}'s in header.") - .format(line_idx, len(contents), len(headers), sep)) - else: - raise ValueError("Line {} has {} parts, while header has {} parts." \ - .format(line_idx, len(contents), len(headers))) - _dict = {} - for header, content in zip(headers, contents): - _dict[header] = content - yield line_idx, _dict - - -def _read_json(path, encoding='utf-8', fields=None, dropna=True): - r""" - Construct a generator to read json items. - - :param path: file path - :param encoding: file's encoding, default: utf-8 - :param fields: json object's fields that needed, if None, all fields are needed. default: None - :param dropna: weather to ignore and drop invalid data, - :if False, raise ValueError when reading invalid data. default: True - :return: generator, every time yield (line number, json item) - """ - if fields: - fields = set(fields) - with open(path, 'r', encoding=encoding) as f: - for line_idx, line in enumerate(f): - data = json.loads(line) - if fields is None: - yield line_idx, data - continue - _res = {} - for k, v in data.items(): - if k in fields: - _res[k] = v - if len(_res) < len(fields): - if dropna: - continue - else: - raise ValueError('invalid instance at line: {}'.format(line_idx)) - yield line_idx, _res - - -def _read_conll(path, encoding='utf-8',sep=None, indexes=None, dropna=True): - r""" - Construct a generator to read conll items. - - :param path: file path - :param encoding: file's encoding, default: utf-8 - :param sep: seperator - :param indexes: conll object's column indexes that needed, if None, all columns are needed. default: None - :param dropna: weather to ignore and drop invalid data, - :if False, raise ValueError when reading invalid data. default: True - :return: generator, every time yield (line number, conll item) - """ - - def parse_conll(sample): - sample = list(map(list, zip(*sample))) - sample = [sample[i] for i in indexes] - for f in sample: - if len(f) <= 0: - raise ValueError('empty field') - return sample - - with open(path, 'r', encoding=encoding) as f: - sample = [] - start = next(f).strip() - if start != '': - sample.append(start.split(sep)) if sep else sample.append(start.split()) - for line_idx, line in enumerate(f, 1): - line = line.strip() - if line == '': - if len(sample): - try: - res = parse_conll(sample) - sample = [] - yield line_idx, res - except Exception as e: - if dropna: - logger.warning('Invalid instance which ends at line: {} has been dropped.'.format(line_idx)) - sample = [] - continue - raise ValueError('Invalid instance which ends at line: {}'.format(line_idx)) - elif line.startswith('#'): - continue - else: - sample.append(line.split(sep)) if sep else sample.append(line.split()) - if len(sample) > 0: - try: - res = parse_conll(sample) - yield line_idx, res - except Exception as e: - if dropna: - return - logger.error('invalid instance ends at line: {}'.format(line_idx)) - raise e diff --git a/fastNLP/io/file_utils.py b/fastNLP/io/file_utils.py deleted file mode 100644 index bbf3de1e..00000000 --- a/fastNLP/io/file_utils.py +++ /dev/null @@ -1,567 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "cached_path", - "get_filepath", - "get_cache_path", - "split_filename_suffix", - "get_from_cache", -] - -import os -import re -import shutil -import tempfile -from pathlib import Path -from urllib.parse import urlparse - -import requests -from requests import HTTPError -from tqdm import tqdm - -from ..core import logger - -PRETRAINED_BERT_MODEL_DIR = { - 'en': 'bert-base-cased.zip', - 'en-large-cased-wwm': 'bert-large-cased-wwm.zip', - 'en-large-uncased-wwm': 'bert-large-uncased-wwm.zip', - - 'en-large-uncased': 'bert-large-uncased.zip', - 'en-large-cased': 'bert-large-cased.zip', - - 'en-base-uncased': 'bert-base-uncased.zip', - 'en-base-cased': 'bert-base-cased.zip', - - 'en-base-cased-mrpc': 'bert-base-cased-finetuned-mrpc.zip', - - 'en-distilbert-base-uncased': 'distilbert-base-uncased.zip', - - 'multi-base-cased': 'bert-base-multilingual-cased.zip', - 'multi-base-uncased': 'bert-base-multilingual-uncased.zip', - - 'cn': 'bert-chinese-wwm.zip', - 'cn-base': 'bert-base-chinese.zip', - 'cn-wwm': 'bert-chinese-wwm.zip', - 'cn-wwm-ext': "bert-chinese-wwm-ext.zip" -} - -PRETRAINED_GPT2_MODEL_DIR = { - 'en': 'gpt2.zip', - 'en-medium': 'gpt2-medium.zip', - 'en-large': 'gpt2-large.zip', - 'en-xl': 'gpt2-xl.zip' -} - -PRETRAINED_ROBERTA_MODEL_DIR = { - 'en': 'roberta-base.zip', - 'en-large': 'roberta-large.zip' -} - -PRETRAINED_ELMO_MODEL_DIR = { - 'en': 'elmo_en_Medium.zip', - 'en-small': "elmo_en_Small.zip", - 'en-original-5.5b': 'elmo_en_Original_5.5B.zip', - 'en-original': 'elmo_en_Original.zip', - 'en-medium': 'elmo_en_Medium.zip' -} - -PRETRAIN_STATIC_FILES = { - 'en': 'glove.840B.300d.zip', - - 'en-glove-6b-50d': 'glove.6B.50d.zip', - 'en-glove-6b-100d': 'glove.6B.100d.zip', - 'en-glove-6b-200d': 'glove.6B.200d.zip', - 'en-glove-6b-300d': 'glove.6B.300d.zip', - 'en-glove-42b-300d': 'glove.42B.300d.zip', - 'en-glove-840b-300d': 'glove.840B.300d.zip', - 'en-glove-twitter-27b-25d': 'glove.twitter.27B.25d.zip', - 'en-glove-twitter-27b-50d': 'glove.twitter.27B.50d.zip', - 'en-glove-twitter-27b-100d': 'glove.twitter.27B.100d.zip', - 'en-glove-twitter-27b-200d': 'glove.twitter.27B.200d.zip', - - 'en-word2vec-300d': "GoogleNews-vectors-negative300.txt.gz", - - 'en-fasttext-wiki': "wiki-news-300d-1M.vec.zip", - 'en-fasttext-crawl': "crawl-300d-2M.vec.zip", - - 'cn': "tencent_cn.zip", - 'cn-tencent': "tencent_cn.zip", - 'cn-fasttext': "cc.zh.300.vec.gz", - 'cn-sgns-literature-word': 'sgns.literature.word.txt.zip', - 'cn-char-fastnlp-100d': "cn_char_fastnlp_100d.zip", - 'cn-bi-fastnlp-100d': "cn_bi_fastnlp_100d.zip", - "cn-tri-fastnlp-100d": "cn_tri_fastnlp_100d.zip" -} - -DATASET_DIR = { - # Classification, English - 'aclImdb': "imdb.zip", - "yelp-review-full": "yelp_review_full.tar.gz", - "yelp-review-polarity": "yelp_review_polarity.tar.gz", - "sst-2": "SST-2.zip", - "sst": "SST.zip", - 'mr': 'mr.zip', - "R8": "R8.zip", - "R52": "R52.zip", - "20ng": "20ng.zip", - "ohsumed": "ohsumed.zip", - - # Classification, Chinese - "chn-senti-corp": "chn_senti_corp.zip", - "weibo-senti-100k": "WeiboSenti100k.zip", - "thuc-news": "THUCNews.zip", - - # Matching, English - "mnli": "MNLI.zip", - "snli": "SNLI.zip", - "qnli": "QNLI.zip", - "rte": "RTE.zip", - - # Matching, Chinese - "cn-xnli": "XNLI.zip", - - # Sequence Labeling, Chinese - "msra-ner": "MSRA_NER.zip", - "peopledaily": "peopledaily.zip", - "weibo-ner": "weibo_NER.zip", - - # Chinese Word Segmentation - "cws-pku": 'cws_pku.zip', - "cws-cityu": "cws_cityu.zip", - "cws-as": 'cws_as.zip', - "cws-msra": 'cws_msra.zip', - - # Summarization, English - "ext-cnndm": "ext-cnndm.zip", - - # Question & answer, Chinese - "cmrc2018": "cmrc2018.zip" - -} - -PRETRAIN_MAP = {'elmo': PRETRAINED_ELMO_MODEL_DIR, - "bert": PRETRAINED_BERT_MODEL_DIR, - "static": PRETRAIN_STATIC_FILES, - 'gpt2': PRETRAINED_GPT2_MODEL_DIR, - 'roberta': PRETRAINED_ROBERTA_MODEL_DIR} - -# 用于扩展fastNLP的下载 -FASTNLP_EXTEND_DATASET_URL = 'fastnlp_dataset_url.txt' -FASTNLP_EXTEND_EMBEDDING_URL = {'elmo': 'fastnlp_elmo_url.txt', - 'bert':'fastnlp_bert_url.txt', - 'static': 'fastnlp_static_url.txt', - 'gpt2': 'fastnlp_gpt2_url.txt', - 'roberta': 'fastnlp_roberta_url.txt' - } - - -def cached_path(url_or_filename: str, cache_dir: str = None, name=None) -> Path: - r""" - 给定一个url,尝试通过url中的解析出来的文件名字filename到{cache_dir}/{name}/{filename}下寻找这个文件, - - 1. 如果cache_dir=None, 则cache_dir=~/.fastNLP/; 否则cache_dir=cache_dir - 2. 如果name=None, 则没有中间的{name}这一层结构;否者中间结构就为{name} - - 如果有该文件,就直接返回路径 - - 如果没有该文件,则尝试用传入的url下载 - - 或者文件名(可以是具体的文件名,也可以是文件夹),先在cache_dir下寻找该文件是否存在,如果不存在则去下载, 并 - 将文件放入到cache_dir中. - - :param str url_or_filename: 文件的下载url或者文件名称。 - :param str cache_dir: 文件的缓存文件夹。如果为None,将使用"~/.fastNLP"这个默认路径 - :param str name: 中间一层的名称。如embedding, dataset - :return: - """ - if cache_dir is None: - data_cache = Path(get_cache_path()) - else: - data_cache = cache_dir - - if name: - data_cache = os.path.join(data_cache, name) - - parsed = urlparse(url_or_filename) - - if parsed.scheme in ("http", "https"): - # URL, so get it from the cache (downloading if necessary) - return get_from_cache(url_or_filename, Path(data_cache)) - elif parsed.scheme == "" and Path(os.path.join(data_cache, url_or_filename)).exists(): - # File, and it exists. - return Path(os.path.join(data_cache, url_or_filename)) - elif parsed.scheme == "": - # File, but it doesn't exist. - raise FileNotFoundError("file {} not found in {}.".format(url_or_filename, data_cache)) - else: - # Something unknown - raise ValueError( - "unable to parse {} as a URL or as a local path".format(url_or_filename) - ) - - -def get_filepath(filepath): - r""" - 如果filepath为文件夹, - - 如果内含多个文件, 返回filepath - - 如果只有一个文件, 返回filepath + filename - - 如果filepath为文件 - - 返回filepath - - :param str filepath: 路径 - :return: - """ - if os.path.isdir(filepath): - files = os.listdir(filepath) - if len(files) == 1: - return os.path.join(filepath, files[0]) - else: - return filepath - elif os.path.isfile(filepath): - return filepath - else: - raise FileNotFoundError(f"{filepath} is not a valid file or directory.") - - -def get_cache_path(): - r""" - 获取fastNLP默认cache的存放路径, 如果将FASTNLP_CACHE_PATH设置在了环境变量中,将使用环境变量的值,使得不用每个用户都去下载。 - - :return str: 存放路径 - """ - if 'FASTNLP_CACHE_DIR' in os.environ: - fastnlp_cache_dir = os.environ.get('FASTNLP_CACHE_DIR') - if os.path.isdir(fastnlp_cache_dir): - return fastnlp_cache_dir - else: - raise NotADirectoryError(f"{os.environ['FASTNLP_CACHE_DIR']} is not a directory.") - fastnlp_cache_dir = os.path.expanduser(os.path.join("~", ".fastNLP")) - return fastnlp_cache_dir - - -def _get_base_url(name): - r""" - 根据name返回下载的url地址。 - - :param str name: 支持dataset和embedding两种 - :return: - """ - # 返回的URL结尾必须是/ - environ_name = "FASTNLP_{}_URL".format(name.upper()) - - if environ_name in os.environ: - url = os.environ[environ_name] - if url.endswith('/'): - return url - else: - return url + '/' - else: - URLS = { - 'embedding': "http://download.fastnlp.top/embedding/", - "dataset": "http://download.fastnlp.top/dataset/" - } - if name.lower() not in URLS: - raise KeyError(f"{name} is not recognized.") - return URLS[name.lower()] - - -def _get_embedding_url(embed_type, name): - r""" - 给定embedding类似和名称,返回下载url - - :param str embed_type: 支持static, bert, elmo。即embedding的类型 - :param str name: embedding的名称, 例如en, cn, based等 - :return: str, 下载的url地址 - """ - # 从扩展中寻找下载的url - _filename = FASTNLP_EXTEND_EMBEDDING_URL.get(embed_type, None) - if _filename: - url = _read_extend_url_file(_filename, name) - if url: - return url - embed_map = PRETRAIN_MAP.get(embed_type, None) - if embed_map: - filename = embed_map.get(name, None) - if filename: - url = _get_base_url('embedding') + filename - return url - raise KeyError("There is no {}. Only supports {}.".format(name, list(embed_map.keys()))) - else: - raise KeyError(f"There is no {embed_type}. Only supports bert, elmo, static, gpt2, roberta") - -def _read_extend_url_file(filename, name)->str: - r""" - filename中的内容使用制表符隔开,第一列是名称,第二列是下载的url地址 - - :param str filename: 在默认的路径下寻找file这个文件 - :param str name: 需要寻找的资源的名称 - :return: str,None - """ - cache_dir = get_cache_path() - filepath = os.path.join(cache_dir, filename) - if os.path.exists(filepath): - with open(filepath, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - if len(parts) == 2: - if name == parts[0]: - return parts[1] - return None - - -def _get_dataset_url(name, dataset_dir: dict = None): - r""" - 给定dataset的名称,返回下载url - - :param str name: 给定dataset的名称,比如imdb, sst-2等 - :return: str - """ - # 从扩展中寻找下载的url - url = _read_extend_url_file(FASTNLP_EXTEND_DATASET_URL, name) - if url: - return url - - dataset_dir = DATASET_DIR if dataset_dir is None else dataset_dir - filename = dataset_dir.get(name, None) - if filename: - url = _get_base_url('dataset') + filename - return url - else: - raise KeyError(f"There is no {name}.") - - -def split_filename_suffix(filepath): - r""" - 给定filepath 返回对应的name和suffix. 如果后缀是多个点,仅支持.tar.gz类型 - - :param filepath: 文件路径 - :return: filename, suffix - """ - filename = os.path.basename(filepath) - if filename.endswith('.tar.gz'): - return filename[:-7], '.tar.gz' - return os.path.splitext(filename) - - -def get_from_cache(url: str, cache_dir: Path = None) -> Path: - r""" - 尝试在cache_dir中寻找url定义的资源; 如果没有找到; 则从url下载并将结果放在cache_dir下,缓存的名称由url的结果推断而来。会将下载的 - 文件解压,将解压后的文件全部放在cache_dir文件夹中。 - - 如果从url中下载的资源解压后有多个文件,则返回目录的路径; 如果只有一个资源文件,则返回具体的路径。 - - :param url: 资源的 url - :param cache_dir: cache 目录 - :return: 路径 - """ - cache_dir.mkdir(parents=True, exist_ok=True) - - filename = re.sub(r".+/", "", url) - dir_name, suffix = split_filename_suffix(filename) - - # 寻找与它名字匹配的内容, 而不关心后缀 - match_dir_name = match_file(dir_name, cache_dir) - if match_dir_name: - dir_name = match_dir_name - cache_path = cache_dir / dir_name - - # get cache path to put the file - if cache_path.exists(): - return get_filepath(cache_path) - - # make HEAD request to check ETag TODO ETag可以用来判断资源是否已经更新了,之后需要加上 - # response = requests.head(url, headers={"User-Agent": "fastNLP"}) - # if response.status_code != 200: - # raise IOError( - # f"HEAD request failed for url {url} with status code {response.status_code}." - # ) - - # add ETag to filename if it exists - # etag = response.headers.get("ETag") - - if not cache_path.exists(): - # Download to temporary file, then copy to cache dir once finished. - # Otherwise you get corrupt cache entries if the download gets interrupted. - # GET file object - req = requests.get(url, stream=True, headers={"User-Agent": "fastNLP"}) - if req.status_code == 200: - success = False - fd, temp_filename = tempfile.mkstemp() - uncompress_temp_dir = None - try: - content_length = req.headers.get("Content-Length") - total = int(content_length) if content_length is not None else None - progress = tqdm(unit="B", total=total, unit_scale=1) - logger.info("%s not found in cache, downloading to %s" % (url, temp_filename)) - - with open(temp_filename, "wb") as temp_file: - for chunk in req.iter_content(chunk_size=1024 * 16): - if chunk: # filter out keep-alive new chunks - progress.update(len(chunk)) - temp_file.write(chunk) - progress.close() - logger.info(f"Finish download from {url}") - - # 开始解压 - if suffix in ('.zip', '.tar.gz', '.gz'): - uncompress_temp_dir = tempfile.mkdtemp() - logger.debug(f"Start to uncompress file to {uncompress_temp_dir}") - if suffix == '.zip': - unzip_file(Path(temp_filename), Path(uncompress_temp_dir)) - elif suffix == '.gz': - ungzip_file(temp_filename, uncompress_temp_dir, dir_name) - else: - untar_gz_file(Path(temp_filename), Path(uncompress_temp_dir)) - filenames = os.listdir(uncompress_temp_dir) - if len(filenames) == 1: - if os.path.isdir(os.path.join(uncompress_temp_dir, filenames[0])): - uncompress_temp_dir = os.path.join(uncompress_temp_dir, filenames[0]) - - cache_path.mkdir(parents=True, exist_ok=True) - logger.debug("Finish un-compressing file.") - else: - uncompress_temp_dir = temp_filename - cache_path = str(cache_path) + suffix - - # 复制到指定的位置 - logger.info(f"Copy file to {cache_path}") - if os.path.isdir(uncompress_temp_dir): - for filename in os.listdir(uncompress_temp_dir): - if os.path.isdir(os.path.join(uncompress_temp_dir, filename)): - shutil.copytree(os.path.join(uncompress_temp_dir, filename), cache_path / filename) - else: - shutil.copyfile(os.path.join(uncompress_temp_dir, filename), cache_path / filename) - else: - shutil.copyfile(uncompress_temp_dir, cache_path) - success = True - except Exception as e: - logger.error(e) - raise e - finally: - if not success: - if cache_path.exists(): - if cache_path.is_file(): - os.remove(cache_path) - else: - shutil.rmtree(cache_path) - os.close(fd) - os.remove(temp_filename) - if uncompress_temp_dir is None: - pass - elif os.path.isdir(uncompress_temp_dir): - shutil.rmtree(uncompress_temp_dir) - elif os.path.isfile(uncompress_temp_dir): - os.remove(uncompress_temp_dir) - return get_filepath(cache_path) - else: - raise HTTPError(f"Status code:{req.status_code}. Fail to download from {url}.") - - -def unzip_file(file: Path, to: Path): - # unpack and write out in CoNLL column-like format - from zipfile import ZipFile - - with ZipFile(file, "r") as zipObj: - # Extract all the contents of zip file in current directory - zipObj.extractall(to) - - -def untar_gz_file(file: Path, to: Path): - import tarfile - - with tarfile.open(file, 'r:gz') as tar: - tar.extractall(to) - - -def ungzip_file(file: str, to: str, filename:str): - import gzip - - g_file = gzip.GzipFile(file) - with open(os.path.join(to, filename), 'wb+') as f: - f.write(g_file.read()) - g_file.close() - - -def match_file(dir_name: str, cache_dir: Path) -> str: - r""" - 匹配的原则是: 在cache_dir下的文件与dir_name完全一致, 或除了后缀以外和dir_name完全一致。 - 如果找到了两个匹配的结果将报错. 如果找到了则返回匹配的文件的名称; 没有找到返回空字符串 - - :param dir_name: 需要匹配的名称 - :param cache_dir: 在该目录下找匹配dir_name是否存在 - :return str: 做为匹配结果的字符串 - """ - files = os.listdir(cache_dir) - matched_filenames = [] - for file_name in files: - if re.match(dir_name + '$', file_name) or re.match(dir_name + '\\..*', file_name): - matched_filenames.append(file_name) - if len(matched_filenames) == 0: - return '' - elif len(matched_filenames) == 1: - return matched_filenames[-1] - else: - raise RuntimeError(f"Duplicate matched files:{matched_filenames}, this should be caused by a bug.") - - -def _get_bert_dir(model_dir_or_name: str = 'en-base-uncased'): - if model_dir_or_name.lower() in PRETRAINED_BERT_MODEL_DIR: - model_url = _get_embedding_url('bert', model_dir_or_name.lower()) - model_dir = cached_path(model_url, name='embedding') - # 检查是否存在 - elif os.path.isdir(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_dir = os.path.abspath(os.path.expanduser(model_dir_or_name)) - else: - logger.error(f"Cannot recognize BERT dir or name ``{model_dir_or_name}``.") - raise ValueError(f"Cannot recognize BERT dir or name ``{model_dir_or_name}``.") - return str(model_dir) - - -def _get_gpt2_dir(model_dir_or_name: str = 'en'): - if model_dir_or_name.lower() in PRETRAINED_GPT2_MODEL_DIR: - model_url = _get_embedding_url('gpt2', model_dir_or_name.lower()) - model_dir = cached_path(model_url, name='embedding') - # 检查是否存在 - elif os.path.isdir(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_dir = os.path.abspath(os.path.expanduser(model_dir_or_name)) - else: - logger.error(f"Cannot recognize GPT2 dir or name ``{model_dir_or_name}``.") - raise ValueError(f"Cannot recognize GPT2 dir or name ``{model_dir_or_name}``.") - return str(model_dir) - - -def _get_roberta_dir(model_dir_or_name: str = 'en'): - if model_dir_or_name.lower() in PRETRAINED_ROBERTA_MODEL_DIR: - model_url = _get_embedding_url('roberta', model_dir_or_name.lower()) - model_dir = cached_path(model_url, name='embedding') - # 检查是否存在 - elif os.path.isdir(os.path.abspath(os.path.expanduser(model_dir_or_name))): - model_dir = os.path.abspath(os.path.expanduser(model_dir_or_name)) - else: - logger.error(f"Cannot recognize RoBERTa dir or name ``{model_dir_or_name}``.") - raise ValueError(f"Cannot recognize RoBERTa dir or name ``{model_dir_or_name}``.") - return str(model_dir) - - -def _get_file_name_base_on_postfix(dir_path, postfix): - r""" - 在dir_path中寻找后缀为postfix的文件. - :param dir_path: str, 文件夹 - :param postfix: 形如".bin", ".json"等 - :return: str,文件的路径 - """ - files = list(filter(lambda filename: filename.endswith(postfix), os.listdir(os.path.join(dir_path)))) - if len(files) == 0: - raise FileNotFoundError(f"There is no file endswith {postfix} file in {dir_path}") - elif len(files) > 1: - raise FileExistsError(f"There are multiple *{postfix} files in {dir_path}") - return os.path.join(dir_path, files[0]) diff --git a/fastNLP/io/loader/__init__.py b/fastNLP/io/loader/__init__.py deleted file mode 100644 index b547ce37..00000000 --- a/fastNLP/io/loader/__init__.py +++ /dev/null @@ -1,105 +0,0 @@ -r""" -Loader用于读取数据,并将内容读取到 :class:`~fastNLP.DataSet` 或者 :class:`~fastNLP.io.DataBundle` 中。所有的Loader都支持以下的 -三个方法: ``__init__`` , ``_load`` , ``loads`` . 其中 ``__init__(...)`` 用于申明读取参数,以及说明该Loader支持的数据格式, -读取后 :class:`~fastNLP.DataSet` 中的 `field` ; ``_load(path)`` 方法传入文件路径读取单个文件,并返回 :class:`~fastNLP.DataSet` ; -``load(paths)`` 用于读取文件夹下的文件,并返回 :class:`~fastNLP.io.DataBundle` 类型的对象 , load()方法支持以下几种类型的参数: - -0.传入None - 将尝试自动下载数据集并缓存。但不是所有的数据都可以直接下载。 - -1.传入一个文件的 path - 返回的 `data_bundle` 包含一个名为 `train` 的 dataset ,可以通过 ``data_bundle.get_dataset('train')`` 获取 - -2.传入一个文件夹目录 - 将读取的是这个文件夹下文件名中包含 `train` , `test` , `dev` 的文件,其它文件会被忽略。假设某个目录下的文件为:: - - | - +-train.txt - +-dev.txt - +-test.txt - +-other.txt - - 在 Loader().load('/path/to/dir') 返回的 `data_bundle` 中可以用 ``data_bundle.get_dataset('train')`` , - ``data_bundle.get_dataset('dev')`` , - ``data_bundle.get_dataset('test')`` 获取对应的 `dataset` ,其中 `other.txt` 的内容会被忽略。假设某个目录下的文件为:: - - | - +-train.txt - +-dev.txt - - 在 Loader().load('/path/to/dir') 返回的 `data_bundle` 中可以用 ``data_bundle.get_dataset('train')`` , - ``data_bundle.get_dataset('dev')`` 获取对应的 dataset。 - -3.传入一个字典 - 字典的的 key 为 `dataset` 的名称,value 是该 `dataset` 的文件路径:: - - paths = {'train':'/path/to/train', 'dev': '/path/to/dev', 'test':'/path/to/test'} - - 在 Loader().load(paths) 返回的 `data_bundle` 中可以用 ``data_bundle.get_dataset('train')`` , ``data_bundle.get_dataset('dev')`` , - ``data_bundle.get_dataset('test')`` 来获取对应的 `dataset` - -fastNLP 目前提供了如下的 Loader - - - -""" - -__all__ = [ - 'Loader', - - 'CLSBaseLoader', - 'YelpFullLoader', - 'YelpPolarityLoader', - 'AGsNewsLoader', - 'DBPediaLoader', - 'IMDBLoader', - 'SSTLoader', - 'SST2Loader', - "ChnSentiCorpLoader", - "THUCNewsLoader", - "WeiboSenti100kLoader", - "MRLoader", - "R8Loader", "R52Loader", "OhsumedLoader", "NG20Loader", - - 'ConllLoader', - 'Conll2003Loader', - 'Conll2003NERLoader', - 'OntoNotesNERLoader', - 'CTBLoader', - "MsraNERLoader", - "PeopleDailyNERLoader", - "WeiboNERLoader", - - 'CSVLoader', - 'JsonLoader', - - 'CWSLoader', - - 'MNLILoader', - "QuoraLoader", - "SNLILoader", - "QNLILoader", - "RTELoader", - "CNXNLILoader", - "BQCorpusLoader", - "LCQMCLoader", - - "CoReferenceLoader", - - "CMRC2018Loader" -] -from .classification import CLSBaseLoader, YelpFullLoader, YelpPolarityLoader, AGsNewsLoader, IMDBLoader, \ - SSTLoader, SST2Loader, DBPediaLoader, \ - ChnSentiCorpLoader, THUCNewsLoader, WeiboSenti100kLoader,\ - MRLoader, R8Loader, R52Loader, OhsumedLoader, NG20Loader -from .conll import ConllLoader, Conll2003Loader, Conll2003NERLoader, OntoNotesNERLoader, CTBLoader -from .conll import MsraNERLoader, PeopleDailyNERLoader, WeiboNERLoader -from .coreference import CoReferenceLoader -from .csv import CSVLoader -from .cws import CWSLoader -from .json import JsonLoader -from .loader import Loader -from .matching import MNLILoader, QuoraLoader, SNLILoader, QNLILoader, RTELoader, CNXNLILoader, BQCorpusLoader, \ - LCQMCLoader -from .qa import CMRC2018Loader - diff --git a/fastNLP/io/loader/classification.py b/fastNLP/io/loader/classification.py deleted file mode 100644 index 7f7a2667..00000000 --- a/fastNLP/io/loader/classification.py +++ /dev/null @@ -1,640 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CLSBaseLoader", - "YelpFullLoader", - "YelpPolarityLoader", - "AGsNewsLoader", - "DBPediaLoader", - "IMDBLoader", - "SSTLoader", - "SST2Loader", - "ChnSentiCorpLoader", - "THUCNewsLoader", - "WeiboSenti100kLoader", - - "MRLoader", - "R8Loader", - "R52Loader", - "OhsumedLoader", - "NG20Loader", -] - - -import glob -import os -import random -import shutil -import time -import warnings - -from .loader import Loader -from ...core.dataset import DataSet -from ...core.instance import Instance -from ...core._logger import logger - - -class CLSBaseLoader(Loader): - r""" - 文本分类Loader的一个基类 - - 原始数据中内容应该为, 每一行为一个sample,第一个逗号之前为target,第一个逗号之后为文本内容。 - - Example:: - - "1","I got 'new' tires from the..." - "1","Don't waste your time..." - - 读取的DataSet将具备以下的数据结构 - - .. csv-table:: - :header: "raw_words", "target" - - "I got 'new' tires from them and... ", "1" - "Don't waste your time. We had two...", "1" - "...", "..." - - """ - - def __init__(self, sep=',', has_header=False): - super().__init__() - self.sep = sep - self.has_header = has_header - - def _load(self, path: str): - ds = DataSet() - try: - with open(path, 'r', encoding='utf-8') as f: - read_header = self.has_header - for line in f: - if read_header: - read_header = False - continue - line = line.strip() - sep_index = line.index(self.sep) - target = line[:sep_index] - raw_words = line[sep_index + 1:] - if target.startswith("\""): - target = target[1:] - if target.endswith("\""): - target = target[:-1] - if raw_words.endswith("\""): - raw_words = raw_words[:-1] - if raw_words.startswith('"'): - raw_words = raw_words[1:] - raw_words = raw_words.replace('""', '"') # 替换双引号 - if raw_words: - ds.append(Instance(raw_words=raw_words, target=target)) - except Exception as e: - logger.error(f'Load file `{path}` failed for `{e}`') - return ds - - -def _split_dev(dataset_name, data_dir, dev_ratio=0.0, re_download=False, suffix='csv'): - if dev_ratio == 0.0: - return data_dir - modify_time = 0 - for filepath in glob.glob(os.path.join(data_dir, '*')): - modify_time = os.stat(filepath).st_mtime - break - if time.time() - modify_time > 1 and re_download: # 通过这种比较丑陋的方式判断一下文件是否是才下载的 - shutil.rmtree(data_dir) - data_dir = Loader()._get_dataset_path(dataset_name=dataset_name) - - if not os.path.exists(os.path.join(data_dir, f'dev.{suffix}')): - if dev_ratio > 0: - assert 0 < dev_ratio < 1, "dev_ratio should be in range (0,1)." - try: - with open(os.path.join(data_dir, f'train.{suffix}'), 'r', encoding='utf-8') as f, \ - open(os.path.join(data_dir, f'middle_file.{suffix}'), 'w', encoding='utf-8') as f1, \ - open(os.path.join(data_dir, f'dev.{suffix}'), 'w', encoding='utf-8') as f2: - for line in f: - if random.random() < dev_ratio: - f2.write(line) - else: - f1.write(line) - os.remove(os.path.join(data_dir, f'train.{suffix}')) - os.renames(os.path.join(data_dir, f'middle_file.{suffix}'), os.path.join(data_dir, f'train.{suffix}')) - finally: - if os.path.exists(os.path.join(data_dir, f'middle_file.{suffix}')): - os.remove(os.path.join(data_dir, f'middle_file.{suffix}')) - - return data_dir - - -class AGsNewsLoader(CLSBaseLoader): - def download(self): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - Xiang Zhang, Junbo Zhao, Yann LeCun. Character-level Convolutional Networks for Text Classification. Advances - in Neural Information Processing Systems 28 (NIPS 2015) - - :return: str, 数据集的目录地址 - """ - return self._get_dataset_path(dataset_name='ag-news') - - -class DBPediaLoader(CLSBaseLoader): - def download(self, dev_ratio: float = 0.0, re_download: bool = False): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - Xiang Zhang, Junbo Zhao, Yann LeCun. Character-level Convolutional Networks for Text Classification. Advances - in Neural Information Processing Systems 28 (NIPS 2015) - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = 'dbpedia' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - - -class IMDBLoader(CLSBaseLoader): - r""" - 原始数据中内容应该为, 每一行为一个sample,制表符之前为target,制表符之后为文本内容。 - - Example:: - - neg Alan Rickman & Emma... - neg I have seen this... - - IMDBLoader读取后的数据将具有以下两列内容: raw_words: str, 需要分类的文本; target: str, 文本的标签 - 读取的DataSet具备以下的结构: - - .. csv-table:: - :header: "raw_words", "target" - - "Alan Rickman & Emma... ", "neg" - "I have seen this... ", "neg" - "...", "..." - - """ - def __init__(self): - super().__init__(sep='\t') - - def download(self, dev_ratio: float = 0.0, re_download=False): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - http://www.aclweb.org/anthology/P11-1015 - - 根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。下载完成后不从train中切分dev - - :param float dev_ratio: 如果路径中没有dev.txt。从train划分多少作为dev的数据. 如果为0,则不划分dev - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = 'aclImdb' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='txt') - return data_dir - - -class SSTLoader(Loader): - r""" - 原始数据中内容应该为: - - Example:: - - (2 (3 (3 Effective) (2 but)) (1 (1 too-tepid)... - (3 (3 (2 If) (3 (2 you) (3 (2 sometimes)... - - 读取之后的DataSet具有以下的结构 - - .. csv-table:: 下面是使用SSTLoader读取的DataSet所具备的field - :header: "raw_words" - - "(2 (3 (3 Effective) (2 but)) (1 (1 too-tepid)..." - "(3 (3 (2 If) (3 (2 you) (3 (2 sometimes) ..." - "..." - - raw_words列是str。 - - """ - - def __init__(self): - super().__init__() - - def _load(self, path: str): - r""" - 从path读取SST文件 - - :param str path: 文件路径 - :return: DataSet - """ - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line: - ds.append(Instance(raw_words=line)) - return ds - - def download(self): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - https://nlp.stanford.edu/~socherr/EMNLP2013_RNTN.pdf - - :return: str, 数据集的目录地址 - """ - output_dir = self._get_dataset_path(dataset_name='sst') - return output_dir - - -class YelpFullLoader(CLSBaseLoader): - def download(self, dev_ratio: float = 0.0, re_download: bool = False): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - Xiang Zhang, Junbo Zhao, Yann LeCun. Character-level Convolutional Networks for Text Classification. Advances - in Neural Information Processing Systems 28 (NIPS 2015) - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = 'yelp-review-full' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - - -class YelpPolarityLoader(CLSBaseLoader): - def download(self, dev_ratio: float = 0.0, re_download: bool = False): - r""" - 自动下载数据集,如果你使用了这个数据集,请引用以下的文章 - - Xiang Zhang, Junbo Zhao, Yann LeCun. Character-level Convolutional Networks for Text Classification. Advances - in Neural Information Processing Systems 28 (NIPS 2015) - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = 'yelp-review-polarity' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - - -class SST2Loader(Loader): - r""" - 原始数据中内容为:第一行为标题(具体内容会被忽略),之后一行为一个sample,第一个制表符之前被认为是句子,第一个制表符之后认为是label - - Example:: - - sentence label - it 's a charming and often affecting journey . 1 - unflinchingly bleak and desperate 0 - - 读取之后DataSet将如下所示 - - .. csv-table:: - :header: "raw_words", "target" - - "it 's a charming and often affecting journey .", "1" - "unflinchingly bleak and desperate", "0" - "..." - - test的DataSet没有target列。 - """ - - def __init__(self): - super().__init__() - - def _load(self, path: str): - r"""从path读取SST2文件 - - :param str path: 数据路径 - :return: DataSet - """ - ds = DataSet() - - with open(path, 'r', encoding='utf-8') as f: - f.readline() # 跳过header - if 'test' in os.path.split(path)[1]: - warnings.warn("SST2's test file has no target.") - for line in f: - line = line.strip() - if line: - sep_index = line.index('\t') - raw_words = line[sep_index + 1:] - index = int(line[: sep_index]) - if raw_words: - ds.append(Instance(raw_words=raw_words, index=index)) - else: - for line in f: - line = line.strip() - if line: - raw_words = line[:-2] - target = line[-1] - if raw_words: - ds.append(Instance(raw_words=raw_words, target=target)) - return ds - - def download(self): - r""" - 自动下载数据集,如果你使用了该数据集,请引用以下的文章 - https://nlp.stanford.edu/pubs/SocherBauerManningNg_ACL2013.pdf - :return: - """ - output_dir = self._get_dataset_path(dataset_name='sst-2') - return output_dir - - -class ChnSentiCorpLoader(Loader): - r""" - 支持读取的数据的格式为,第一行为标题(具体内容会被忽略),之后一行为一个sample,第一个制表符之前被认为是label,第 - 一个制表符之后认为是句子 - - Example:: - - label text_a - 1 基金痛所有投资项目一样,必须先要有所了解... - 1 系统很好装,LED屏是不错,就是16比9的比例... - - 读取后的DataSet具有以下的field - - .. csv-table:: - :header: "raw_chars", "target" - - "基金痛所有投资项目一样,必须先要有所了解...", "1" - "系统很好装,LED屏是不错,就是16比9的比例...", "1" - "..." - - """ - def __init__(self): - super().__init__() - - def _load(self, path: str): - r""" - 从path中读取数据 - - :param path: - :return: - """ - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - f.readline() - for line in f: - line = line.strip() - tab_index = line.index('\t') - if tab_index != -1: - target = line[:tab_index] - raw_chars = line[tab_index + 1:] - if raw_chars: - ds.append(Instance(raw_chars=raw_chars, target=target)) - return ds - - def download(self) -> str: - r""" - 自动下载数据,该数据取自https://github.com/pengming617/bert_classification/tree/master/data,在 - https://arxiv.org/pdf/1904.09223.pdf与https://arxiv.org/pdf/1906.08101.pdf有使用 - - :return: - """ - output_dir = self._get_dataset_path('chn-senti-corp') - return output_dir - - -class THUCNewsLoader(Loader): - r""" - 数据集简介:document-level分类任务,新闻10分类 - 原始数据内容为:每行一个sample,第一个 "\\t" 之前为target,第一个 "\\t" 之后为raw_words - - Example:: - - 体育 调查-您如何评价热火客场胜绿军总分3-1夺赛点?... - - 读取后的Dataset将具有以下数据结构: - - .. csv-table:: - :header: "raw_words", "target" - - "调查-您如何评价热火客场胜绿军总分3-1夺赛点?...", "体育" - "...", "..." - - """ - - def __init__(self): - super(THUCNewsLoader, self).__init__() - - def _load(self, path: str = None): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - sep_index = line.index('\t') - raw_chars = line[sep_index + 1:] - target = line[:sep_index] - if raw_chars: - ds.append(Instance(raw_chars=raw_chars, target=target)) - return ds - - def download(self) -> str: - r""" - 自动下载数据,该数据取自 - - http://thuctc.thunlp.org/#%E4%B8%AD%E6%96%87%E6%96%87%E6%9C%AC%E5%88%86%E7%B1%BB%E6%95%B0%E6%8D%AE%E9%9B%86THUCNews - - :return: - """ - output_dir = self._get_dataset_path('thuc-news') - return output_dir - - -class WeiboSenti100kLoader(Loader): - r""" - 别名: - 数据集简介:微博sentiment classification,二分类 - - Example:: - - label text - 1 多谢小莲,好运满满[爱你] - 1 能在他乡遇老友真不赖,哈哈,珠儿,我也要用... - - 读取后的Dataset将具有以下数据结构: - - .. csv-table:: - :header: "raw_chars", "target" - - "多谢小莲,好运满满[爱你]", "1" - "能在他乡遇老友真不赖,哈哈,珠儿,我也要用...", "1" - "...", "..." - - """ - - def __init__(self): - super(WeiboSenti100kLoader, self).__init__() - - def _load(self, path: str = None): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - next(f) - for line in f: - line = line.strip() - target = line[0] - raw_chars = line[1:] - if raw_chars: - ds.append(Instance(raw_chars=raw_chars, target=target)) - return ds - - def download(self) -> str: - r""" - 自动下载数据,该数据取自 https://github.com/SophonPlus/ChineseNlpCorpus/ - 在 https://arxiv.org/abs/1906.08101 有使用 - :return: - """ - output_dir = self._get_dataset_path('weibo-senti-100k') - return output_dir - -class MRLoader(CLSBaseLoader): - def __init__(self): - super(MRLoader, self).__init__() - - def download(self, dev_ratio: float = 0.0, re_download: bool = False) -> str: - r""" - 自动下载数据集 - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = r'mr' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - -class R8Loader(CLSBaseLoader): - def __init__(self): - super(R8Loader, self).__init__() - - def download(self, dev_ratio: float = 0.0, re_download: bool = False) -> str: - r""" - 自动下载数据集 - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = r'R8' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - -class R52Loader(CLSBaseLoader): - def __init__(self): - super(R52Loader, self).__init__() - - def download(self, dev_ratio: float = 0.0, re_download: bool = False) -> str: - r""" - 自动下载数据集 - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = r'R52' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - -class NG20Loader(CLSBaseLoader): - def __init__(self): - super(NG20Loader, self).__init__() - - def download(self, dev_ratio: float = 0.0, re_download: bool = False) -> str: - r""" - 自动下载数据集 - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = r'20ng' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir - -class OhsumedLoader(CLSBaseLoader): - def __init__(self): - super(OhsumedLoader, self).__init__() - - def download(self, dev_ratio: float = 0.0, re_download: bool = False) -> str: - r""" - 自动下载数据集 - - 如果dev_ratio不等于0,则根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。 - 下载完成后在output_dir中有train.csv, test.csv, dev.csv三个文件。否则只有train.csv和test.csv - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - """ - dataset_name = r'ohsumed' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - data_dir = _split_dev(dataset_name=dataset_name, - data_dir=data_dir, - dev_ratio=dev_ratio, - re_download=re_download, - suffix='csv') - return data_dir \ No newline at end of file diff --git a/fastNLP/io/loader/conll.py b/fastNLP/io/loader/conll.py deleted file mode 100644 index 36289db8..00000000 --- a/fastNLP/io/loader/conll.py +++ /dev/null @@ -1,543 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "ConllLoader", - "Conll2003Loader", - "Conll2003NERLoader", - "OntoNotesNERLoader", - "CTBLoader", - "CNNERLoader", - "MsraNERLoader", - "WeiboNERLoader", - "PeopleDailyNERLoader" -] - -import glob -import os -import random -import shutil -import time - -from .loader import Loader -from ..file_reader import _read_conll -from ...core.const import Const -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class ConllLoader(Loader): - r""" - ConllLoader支持读取的数据格式: 以空行隔开两个sample,除了分割行,每一行用空格或者制表符隔开不同的元素。如下例所示: - - Example:: - - # 文件中的内容 - Nadim NNP B-NP B-PER - Ladki NNP I-NP I-PER - - AL-AIN NNP B-NP B-LOC - United NNP B-NP B-LOC - Arab NNP I-NP I-LOC - Emirates NNPS I-NP I-LOC - 1996-12-06 CD I-NP O - ... - - # 如果用以下的参数读取,返回的DataSet将包含raw_words和pos两个field, 这两个field的值分别取自于第0列与第1列 - dataset = ConllLoader(headers=['raw_words', 'pos'], indexes=[0, 1])._load('/path/to/train.conll') - # 如果用以下的参数读取,返回的DataSet将包含raw_words和ner两个field, 这两个field的值分别取自于第0列与第2列 - dataset = ConllLoader(headers=['raw_words', 'ner'], indexes=[0, 3])._load('/path/to/train.conll') - # 如果用以下的参数读取,返回的DataSet将包含raw_words, pos和ner三个field - dataset = ConllLoader(headers=['raw_words', 'pos', 'ner'], indexes=[0, 1, 3])._load('/path/to/train.conll') - - ConllLoader返回的DataSet的field由传入的headers确定。 - - 数据中以"-DOCSTART-"开头的行将被忽略,因为该符号在conll 2003中被用为文档分割符。 - - """ - - def __init__(self, headers, sep=None, indexes=None, dropna=True): - r""" - - :param list headers: 每一列数据的名称,需为List or Tuple of str。``header`` 与 ``indexes`` 一一对应 - :param list sep: 指定分隔符,默认为制表符 - :param list indexes: 需要保留的数据列下标,从0开始。若为 ``None`` ,则所有列都保留。Default: ``None`` - :param bool dropna: 是否忽略非法数据,若 ``False`` ,遇到非法数据时抛出 ``ValueError`` 。Default: ``True`` - """ - super(ConllLoader, self).__init__() - if not isinstance(headers, (list, tuple)): - raise TypeError( - 'invalid headers: {}, should be list of strings'.format(headers)) - self.headers = headers - self.dropna = dropna - self.sep=sep - if indexes is None: - self.indexes = list(range(len(self.headers))) - else: - if len(indexes) != len(headers): - raise ValueError - self.indexes = indexes - - def _load(self, path): - r""" - 传入的一个文件路径,将该文件读入DataSet中,field由ConllLoader初始化时指定的headers决定。 - - :param str path: 文件的路径 - :return: DataSet - """ - ds = DataSet() - for idx, data in _read_conll(path,sep=self.sep, indexes=self.indexes, dropna=self.dropna): - ins = {h: data[i] for i, h in enumerate(self.headers)} - ds.append(Instance(**ins)) - return ds - - -class Conll2003Loader(ConllLoader): - r""" - 用于读取conll2003任务的数据。数据的内容应该类似与以下的内容, 第一列为raw_words, 第二列为pos, 第三列为chunking,第四列为ner。 - - Example:: - - Nadim NNP B-NP B-PER - Ladki NNP I-NP I-PER - - AL-AIN NNP B-NP B-LOC - United NNP B-NP B-LOC - Arab NNP I-NP I-LOC - Emirates NNPS I-NP I-LOC - 1996-12-06 CD I-NP O - ... - - 返回的DataSet的内容为 - - .. csv-table:: 下面是Conll2003Loader加载后数据具备的结构。 - :header: "raw_words", "pos", "chunk", "ner" - - "[Nadim, Ladki]", "[NNP, NNP]", "[B-NP, I-NP]", "[B-PER, I-PER]" - "[AL-AIN, United, Arab, ...]", "[NNP, NNP, NNP, ...]", "[B-NP, B-NP, I-NP, ...]", "[B-LOC, B-LOC, I-LOC, ...]" - "[...]", "[...]", "[...]", "[...]" - - """ - - def __init__(self): - headers = [ - 'raw_words', 'pos', 'chunk', 'ner', - ] - super(Conll2003Loader, self).__init__(headers=headers) - - def _load(self, path): - r""" - 传入的一个文件路径,将该文件读入DataSet中,field由ConllLoader初始化时指定的headers决定。 - - :param str path: 文件的路径 - :return: DataSet - """ - ds = DataSet() - for idx, data in _read_conll(path, indexes=self.indexes, dropna=self.dropna): - doc_start = False - for i, h in enumerate(self.headers): - field = data[i] - if str(field[0]).startswith('-DOCSTART-'): - doc_start = True - break - if doc_start: - continue - ins = {h: data[i] for i, h in enumerate(self.headers)} - ds.append(Instance(**ins)) - return ds - - def download(self, output_dir=None): - raise RuntimeError("conll2003 cannot be downloaded automatically.") - - -class Conll2003NERLoader(ConllLoader): - r""" - 用于读取conll2003任务的NER数据。每一行有4列内容,空行意味着隔开两个句子 - - 支持读取的内容如下 - Example:: - - Nadim NNP B-NP B-PER - Ladki NNP I-NP I-PER - - AL-AIN NNP B-NP B-LOC - United NNP B-NP B-LOC - Arab NNP I-NP I-LOC - Emirates NNPS I-NP I-LOC - 1996-12-06 CD I-NP O - ... - - 返回的DataSet的内容为 - - .. csv-table:: 下面是Conll2003Loader加载后数据具备的结构, target是BIO2编码 - :header: "raw_words", "target" - - "[Nadim, Ladki]", "[B-PER, I-PER]" - "[AL-AIN, United, Arab, ...]", "[B-LOC, B-LOC, I-LOC, ...]" - "[...]", "[...]" - - """ - - def __init__(self): - headers = [ - 'raw_words', 'target', - ] - super().__init__(headers=headers, indexes=[0, 3]) - - def _load(self, path): - r""" - 传入的一个文件路径,将该文件读入DataSet中,field由ConllLoader初始化时指定的headers决定。 - - :param str path: 文件的路径 - :return: DataSet - """ - ds = DataSet() - for idx, data in _read_conll(path, indexes=self.indexes, dropna=self.dropna): - doc_start = False - for i, h in enumerate(self.headers): - field = data[i] - if str(field[0]).startswith('-DOCSTART-'): - doc_start = True - break - if doc_start: - continue - ins = {h: data[i] for i, h in enumerate(self.headers)} - ds.append(Instance(**ins)) - if len(ds) == 0: - raise RuntimeError("No data found {}.".format(path)) - return ds - - def download(self): - raise RuntimeError("conll2003 cannot be downloaded automatically.") - - -class OntoNotesNERLoader(ConllLoader): - r""" - 用以读取OntoNotes的NER数据,同时也是Conll2012的NER任务数据。将OntoNote数据处理为conll格式的过程可以参考 - https://github.com/yhcc/OntoNotes-5.0-NER。OntoNoteNERLoader将取第4列和第11列的内容。 - - 读取的数据格式为: - - Example:: - - bc/msnbc/00/msnbc_0000 0 0 Hi UH (TOP(FRAG(INTJ*) - - - Dan_Abrams * - - bc/msnbc/00/msnbc_0000 0 1 everyone NN (NP*) - - - Dan_Abrams * - - ... - - 返回的DataSet的内容为 - - .. csv-table:: - :header: "raw_words", "target" - - "['Hi', 'everyone', '.']", "['O', 'O', 'O']" - "['first', 'up', 'on', 'the', 'docket']", "['O', 'O', 'O', 'O', 'O']" - "[...]", "[...]" - - """ - - def __init__(self): - super().__init__(headers=[Const.RAW_WORD, Const.TARGET], indexes=[3, 10]) - - def _load(self, path: str): - dataset = super()._load(path) - - def convert_to_bio(tags): - bio_tags = [] - flag = None - for tag in tags: - label = tag.strip("()*") - if '(' in tag: - bio_label = 'B-' + label - flag = label - elif flag: - bio_label = 'I-' + flag - else: - bio_label = 'O' - if ')' in tag: - flag = None - bio_tags.append(bio_label) - return bio_tags - - def convert_word(words): - converted_words = [] - for word in words: - word = word.replace('/.', '.') # 有些结尾的.是/.形式的 - if not word.startswith('-'): - converted_words.append(word) - continue - # 以下是由于这些符号被转义了,再转回来 - tfrs = {'-LRB-': '(', - '-RRB-': ')', - '-LSB-': '[', - '-RSB-': ']', - '-LCB-': '{', - '-RCB-': '}' - } - if word in tfrs: - converted_words.append(tfrs[word]) - else: - converted_words.append(word) - return converted_words - - dataset.apply_field(convert_word, field_name=Const.RAW_WORD, new_field_name=Const.RAW_WORD) - dataset.apply_field(convert_to_bio, field_name=Const.TARGET, new_field_name=Const.TARGET) - - return dataset - - def download(self): - raise RuntimeError("Ontonotes cannot be downloaded automatically, you can refer " - "https://github.com/yhcc/OntoNotes-5.0-NER to download and preprocess.") - - -class CTBLoader(Loader): - r""" - 支持加载的数据应该具备以下格式, 其中第二列为词语,第四列为pos tag,第七列为依赖树的head,第八列为依赖树的label - - Example:: - - 1 印度 _ NR NR _ 3 nn _ _ - 2 海军 _ NN NN _ 3 nn _ _ - 3 参谋长 _ NN NN _ 5 nsubjpass _ _ - 4 被 _ SB SB _ 5 pass _ _ - 5 解职 _ VV VV _ 0 root _ _ - - 1 新华社 _ NR NR _ 7 dep _ _ - 2 新德里 _ NR NR _ 7 dep _ _ - 3 12月 _ NT NT _ 7 dep _ _ - ... - - 读取之后DataSet具备的格式为 - - .. csv-table:: - :header: "raw_words", "pos", "dep_head", "dep_label" - - "[印度, 海军, ...]", "[NR, NN, SB, ...]", "[3, 3, ...]", "[nn, nn, ...]" - "[新华社, 新德里, ...]", "[NR, NR, NT, ...]", "[7, 7, 7, ...]", "[dep, dep, dep, ...]" - "[...]", "[...]", "[...]", "[...]" - - """ - def __init__(self): - super().__init__() - headers = [ - 'raw_words', 'pos', 'dep_head', 'dep_label', - ] - indexes = [ - 1, 3, 6, 7, - ] - self.loader = ConllLoader(headers=headers, indexes=indexes) - - def _load(self, path: str): - dataset = self.loader._load(path) - return dataset - - def download(self): - r""" - 由于版权限制,不能提供自动下载功能。可参考 - - https://catalog.ldc.upenn.edu/LDC2013T21 - - :return: - """ - raise RuntimeError("CTB cannot be downloaded automatically.") - - -class CNNERLoader(Loader): - def _load(self, path: str): - r""" - 支持加载形如以下格式的内容,一行两列,以空格隔开两个sample - - Example:: - - 我 O - 们 O - 变 O - 而 O - 以 O - 书 O - 会 O - ... - - :param str path: 文件路径 - :return: DataSet,包含raw_words列和target列 - """ - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - raw_chars = [] - target = [] - for line in f: - line = line.strip() - if line: - parts = line.split() - if len(parts) == 1: # 网上下载的数据有一些列少tag,默认补充O - parts.append('O') - raw_chars.append(parts[0]) - target.append(parts[1]) - else: - if raw_chars: - ds.append(Instance(raw_chars=raw_chars, target=target)) - raw_chars = [] - target = [] - return ds - - -class MsraNERLoader(CNNERLoader): - r""" - 读取MSRA-NER数据,数据中的格式应该类似与下列的内容 - - Example:: - - 把 O - 欧 B-LOC - - 美 B-LOC - 、 O - - 港 B-LOC - 台 B-LOC - - 流 O - 行 O - - 的 O - - 食 O - - ... - - 读取后的DataSet包含以下的field - - .. csv-table:: - :header: "raw_chars", "target" - - "['把', '欧'] ", "['O', 'B-LOC']" - "['美', '、']", "['B-LOC', 'O']" - "[...]", "[...]" - - """ - - def __init__(self): - super().__init__() - - def download(self, dev_ratio: float = 0.1, re_download: bool = False) -> str: - r""" - 自动下载MSAR-NER的数据,如果你使用该数据,请引用 Gina-Anne Levow, 2006, The Third International Chinese Language - Processing Bakeoff: Word Segmentation and Named Entity Recognition. - - 根据dev_ratio的值随机将train中的数据取出一部分作为dev数据。下载完成后在output_dir中有train.conll, test.conll, - dev.conll三个文件。 - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str, 数据集的目录地址 - :return: - """ - dataset_name = 'msra-ner' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - modify_time = 0 - for filepath in glob.glob(os.path.join(data_dir, '*')): - modify_time = os.stat(filepath).st_mtime - break - if time.time() - modify_time > 1 and re_download: # 通过这种比较丑陋的方式判断一下文件是否是才下载的 - shutil.rmtree(data_dir) - data_dir = self._get_dataset_path(dataset_name=dataset_name) - - if not os.path.exists(os.path.join(data_dir, 'dev.conll')): - if dev_ratio > 0: - assert 0 < dev_ratio < 1, "dev_ratio should be in range (0,1)." - try: - with open(os.path.join(data_dir, 'train.conll'), 'r', encoding='utf-8') as f, \ - open(os.path.join(data_dir, 'middle_file.conll'), 'w', encoding='utf-8') as f1, \ - open(os.path.join(data_dir, 'dev.conll'), 'w', encoding='utf-8') as f2: - lines = [] # 一个sample包含很多行 - for line in f: - line = line.strip() - if line: - lines.append(line) - else: - if random.random() < dev_ratio: - f2.write('\n'.join(lines) + '\n\n') - else: - f1.write('\n'.join(lines) + '\n\n') - lines.clear() - os.remove(os.path.join(data_dir, 'train.conll')) - os.renames(os.path.join(data_dir, 'middle_file.conll'), os.path.join(data_dir, 'train.conll')) - finally: - if os.path.exists(os.path.join(data_dir, 'middle_file.conll')): - os.remove(os.path.join(data_dir, 'middle_file.conll')) - - return data_dir - - -class WeiboNERLoader(CNNERLoader): - r""" - 读取WeiboNER数据,数据中的格式应该类似与下列的内容 - - Example:: - - 老 B-PER.NOM - 百 I-PER.NOM - 姓 I-PER.NOM - - 心 O - - ... - - 读取后的DataSet包含以下的field - - .. csv-table:: - - :header: "raw_chars", "target" - - "['老', '百', '姓']", "['B-PER.NOM', 'I-PER.NOM', 'I-PER.NOM']" - "['心']", "['O']" - "[...]", "[...]" - - """ - def __init__(self): - super().__init__() - - def download(self) -> str: - r""" - 自动下载Weibo-NER的数据,如果你使用了该数据,请引用 Nanyun Peng and Mark Dredze, 2015, Named Entity Recognition for - Chinese Social Media with Jointly Trained Embeddings. - - :return: str - """ - dataset_name = 'weibo-ner' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - - return data_dir - - -class PeopleDailyNERLoader(CNNERLoader): - r""" - 支持加载的数据格式如下 - - Example:: - - 中 B-ORG - 共 I-ORG - 中 I-ORG - 央 I-ORG - - 致 O - 中 B-ORG - ... - - 读取后的DataSet包含以下的field - - .. csv-table:: target列是基于BIO的编码方式 - :header: "raw_chars", "target" - - "['中', '共', '中', '央']", "['B-ORG', 'I-ORG', 'I-ORG', 'I-ORG']" - "[...]", "[...]" - - """ - - def __init__(self): - super().__init__() - - def download(self) -> str: - dataset_name = 'peopledaily' - data_dir = self._get_dataset_path(dataset_name=dataset_name) - - return data_dir diff --git a/fastNLP/io/loader/coreference.py b/fastNLP/io/loader/coreference.py deleted file mode 100644 index ad882102..00000000 --- a/fastNLP/io/loader/coreference.py +++ /dev/null @@ -1,64 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CoReferenceLoader", -] - -from ...core.dataset import DataSet -from ..file_reader import _read_json -from ...core.instance import Instance -from ...core.const import Const -from .json import JsonLoader - - -class CoReferenceLoader(JsonLoader): - r""" - 原始数据中内容应该为, 每一行为一个json对象,其中doc_key包含文章的种类信息,speakers包含每句话的说话者信息,cluster是指向现实中同一个事物的聚集,sentences是文本信息内容。 - - Example:: - - {"doc_key": "bc/cctv/00/cctv_0000_0", - "speakers": [["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"], ["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"], ["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"]], - "clusters": [[[70, 70], [485, 486], [500, 500], [73, 73], [55, 55], [153, 154], [366, 366]]], - "sentences": [["In", "the", "summer", "of", "2005", ",", "a", "picture", "that", "people", "have", "long", "been", "looking", "forward", "to", "started", "emerging", "with", "frequency", "in", "various", "major", "Hong", "Kong", "media", "."], ["With", "their", "unique", "charm", ",", "these", "well", "-", "known", "cartoon", "images", "once", "again", "caused", "Hong", "Kong", "to", "be", "a", "focus", "of", "worldwide", "attention", "."]] - } - - 读取预处理好的Conll2012数据,数据结构如下: - - .. csv-table:: - :header: "raw_words1", "raw_words2", "raw_words3", "raw_words4" - - "bc/cctv/00/cctv_0000_0", "[['Speaker#1', 'Speaker#1', 'Speaker#1...", "[[[70, 70], [485, 486], [500, 500], [7...", "[['In', 'the', 'summer', 'of', '2005',..." - "...", "...", "...", "..." - - """ - def __init__(self, fields=None, dropna=False): - super().__init__(fields, dropna) - self.fields = {"doc_key": Const.RAW_WORDS(0), "speakers": Const.RAW_WORDS(1), "clusters": Const.RAW_WORDS(2), - "sentences": Const.RAW_WORDS(3)} - - def _load(self, path): - r""" - 加载数据 - :param path: 数据文件路径,文件为json - - :return: - """ - dataset = DataSet() - for idx, d in _read_json(path, fields=self.fields_list, dropna=self.dropna): - if self.fields: - ins = {self.fields[k]: v for k, v in d.items()} - else: - ins = d - dataset.append(Instance(**ins)) - return dataset - - def download(self): - r""" - 由于版权限制,不能提供自动下载功能。可参考 - - https://www.aclweb.org/anthology/W12-4501 - - :return: - """ - raise RuntimeError("CoReference cannot be downloaded automatically.") diff --git a/fastNLP/io/loader/csv.py b/fastNLP/io/loader/csv.py deleted file mode 100644 index 93ef48a8..00000000 --- a/fastNLP/io/loader/csv.py +++ /dev/null @@ -1,39 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CSVLoader", -] - -from .loader import Loader -from ..file_reader import _read_csv -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class CSVLoader(Loader): - r""" - 读取CSV格式的数据集, 返回 ``DataSet`` 。 - - """ - - def __init__(self, headers=None, sep=",", dropna=False): - r""" - - :param List[str] headers: CSV文件的文件头.定义每一列的属性名称,即返回的DataSet中`field`的名称 - 若为 ``None`` ,则将读入文件的第一行视作 ``headers`` . Default: ``None`` - :param str sep: CSV文件中列与列之间的分隔符. Default: "," - :param bool dropna: 是否忽略非法数据,若 ``True`` 则忽略,若 ``False`` ,在遇到非法数据时,抛出 ``ValueError`` . - Default: ``False`` - """ - super().__init__() - self.headers = headers - self.sep = sep - self.dropna = dropna - - def _load(self, path): - ds = DataSet() - for idx, data in _read_csv(path, headers=self.headers, - sep=self.sep, dropna=self.dropna): - ds.append(Instance(**data)) - return ds - diff --git a/fastNLP/io/loader/cws.py b/fastNLP/io/loader/cws.py deleted file mode 100644 index dbfe4e33..00000000 --- a/fastNLP/io/loader/cws.py +++ /dev/null @@ -1,97 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CWSLoader" -] - -import glob -import os -import random -import shutil -import time - -from .loader import Loader -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class CWSLoader(Loader): - r""" - CWSLoader支持的数据格式为,一行一句话,不同词之间用空格隔开, 例如: - - Example:: - - 上海 浦东 开发 与 法制 建设 同步 - 新华社 上海 二月 十日 电 ( 记者 谢金虎 、 张持坚 ) - ... - - 该Loader读取后的DataSet具有如下的结构 - - .. csv-table:: - :header: "raw_words" - - "上海 浦东 开发 与 法制 建设 同步" - "新华社 上海 二月 十日 电 ( 记者 谢金虎 、 张持坚 )" - "..." - - """ - def __init__(self, dataset_name:str=None): - r""" - - :param str dataset_name: data的名称,支持pku, msra, cityu(繁体), as(繁体), None - """ - super().__init__() - datanames = {'pku': 'cws-pku', 'msra':'cws-msra', 'as':'cws-as', 'cityu':'cws-cityu'} - if dataset_name in datanames: - self.dataset_name = datanames[dataset_name] - else: - self.dataset_name = None - - def _load(self, path:str): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line: - ds.append(Instance(raw_words=line)) - return ds - - def download(self, dev_ratio=0.1, re_download=False)->str: - r""" - 如果你使用了该数据集,请引用以下的文章:Thomas Emerson, The Second International Chinese Word Segmentation Bakeoff, - 2005. 更多信息可以在http://sighan.cs.uchicago.edu/bakeoff2005/查看 - - :param float dev_ratio: 如果路径中没有dev集,从train划分多少作为dev的数据. 如果为0,则不划分dev。 - :param bool re_download: 是否重新下载数据,以重新切分数据。 - :return: str - """ - if self.dataset_name is None: - return None - data_dir = self._get_dataset_path(dataset_name=self.dataset_name) - modify_time = 0 - for filepath in glob.glob(os.path.join(data_dir, '*')): - modify_time = os.stat(filepath).st_mtime - break - if time.time() - modify_time > 1 and re_download: # 通过这种比较丑陋的方式判断一下文件是否是才下载的 - shutil.rmtree(data_dir) - data_dir = self._get_dataset_path(dataset_name=self.dataset_name) - - if not os.path.exists(os.path.join(data_dir, 'dev.txt')): - if dev_ratio > 0: - assert 0 < dev_ratio < 1, "dev_ratio should be in range (0,1)." - try: - with open(os.path.join(data_dir, 'train.txt'), 'r', encoding='utf-8') as f, \ - open(os.path.join(data_dir, 'middle_file.txt'), 'w', encoding='utf-8') as f1, \ - open(os.path.join(data_dir, 'dev.txt'), 'w', encoding='utf-8') as f2: - for line in f: - if random.random() < dev_ratio: - f2.write(line) - else: - f1.write(line) - os.remove(os.path.join(data_dir, 'train.txt')) - os.renames(os.path.join(data_dir, 'middle_file.txt'), os.path.join(data_dir, 'train.txt')) - finally: - if os.path.exists(os.path.join(data_dir, 'middle_file.txt')): - os.remove(os.path.join(data_dir, 'middle_file.txt')) - - return data_dir diff --git a/fastNLP/io/loader/json.py b/fastNLP/io/loader/json.py deleted file mode 100644 index 76e2dbfb..00000000 --- a/fastNLP/io/loader/json.py +++ /dev/null @@ -1,46 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "JsonLoader" -] - -from .loader import Loader -from ..file_reader import _read_json -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class JsonLoader(Loader): - r""" - 别名::class:`fastNLP.io.JsonLoader` :class:`fastNLP.io.loader.JsonLoader` - - 读取json格式数据.数据必须按行存储,每行是一个包含各类属性的json对象 - - :param dict fields: 需要读入的json属性名称, 和读入后在DataSet中存储的field_name - ``fields`` 的 `key` 必须是json对象的属性名. ``fields`` 的 `value` 为读入后在DataSet存储的 `field_name` , - `value` 也可为 ``None`` , 这时读入后的 `field_name` 与json对象对应属性同名 - ``fields`` 可为 ``None`` , 这时,json对象所有属性都保存在DataSet中. Default: ``None`` - :param bool dropna: 是否忽略非法数据,若 ``True`` 则忽略,若 ``False`` ,在遇到非法数据时,抛出 ``ValueError`` . - Default: ``False`` - """ - - def __init__(self, fields=None, dropna=False): - super(JsonLoader, self).__init__() - self.dropna = dropna - self.fields = None - self.fields_list = None - if fields: - self.fields = {} - for k, v in fields.items(): - self.fields[k] = k if v is None else v - self.fields_list = list(self.fields.keys()) - - def _load(self, path): - ds = DataSet() - for idx, d in _read_json(path, fields=self.fields_list, dropna=self.dropna): - if self.fields: - ins = {self.fields[k]: v for k, v in d.items()} - else: - ins = d - ds.append(Instance(**ins)) - return ds diff --git a/fastNLP/io/loader/loader.py b/fastNLP/io/loader/loader.py deleted file mode 100644 index cfcae497..00000000 --- a/fastNLP/io/loader/loader.py +++ /dev/null @@ -1,94 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "Loader" -] - -from typing import Union, Dict - -from .. import DataBundle -from ..file_utils import _get_dataset_url, get_cache_path, cached_path -from ..utils import check_loader_paths -from ...core.dataset import DataSet - - -class Loader: - r""" - 各种数据 Loader 的基类,提供了 API 的参考. - Loader支持以下的三个函数 - - - download() 函数:自动将该数据集下载到缓存地址,默认缓存地址为~/.fastNLP/datasets/。由于版权等原因,不是所有的Loader都实现了该方法。该方法会返回下载后文件所处的缓存地址。 - - _load() 函数:从一个数据文件中读取数据,返回一个 :class:`~fastNLP.DataSet` 。返回的DataSet的内容可以通过每个Loader的文档判断出。 - - load() 函数:将文件分别读取为DataSet,然后将多个DataSet放入到一个DataBundle中并返回 - - """ - - def __init__(self): - pass - - def _load(self, path: str) -> DataSet: - r""" - 给定一个路径,返回读取的DataSet。 - - :param str path: 路径 - :return: DataSet - """ - raise NotImplementedError - - def load(self, paths: Union[str, Dict[str, str]] = None) -> DataBundle: - r""" - 从指定一个或多个路径中的文件中读取数据,返回 :class:`~fastNLP.io.DataBundle` 。 - - :param Union[str, Dict[str, str]] paths: 支持以下的几种输入方式: - - 0.如果为None,则先查看本地是否有缓存,如果没有则自动下载并缓存。 - - 1.传入一个目录, 该目录下名称包含train的被认为是train,包含test的被认为是test,包含dev的被认为是dev,如果检测到多个文件名包含'train'、 'dev'、 'test'则会报错:: - - data_bundle = xxxLoader().load('/path/to/dir') # 返回的DataBundle中datasets根据目录下是否检测到train - # dev、 test等有所变化,可以通过以下的方式取出DataSet - tr_data = data_bundle.get_dataset('train') - te_data = data_bundle.get_dataset('test') # 如果目录下有文件包含test这个字段 - - 2.传入一个dict,比如train,dev,test不在同一个目录下,或者名称中不包含train, dev, test:: - - paths = {'train':"/path/to/tr.conll", 'dev':"/to/validate.conll", "test":"/to/te.conll"} - data_bundle = xxxLoader().load(paths) # 返回的DataBundle中的dataset中包含"train", "dev", "test" - dev_data = data_bundle.get_dataset('dev') - - 3.传入文件路径:: - - data_bundle = xxxLoader().load("/path/to/a/train.conll") # 返回DataBundle对象, datasets中仅包含'train' - tr_data = data_bundle.get_dataset('train') # 取出DataSet - - :return: 返回的 :class:`~fastNLP.io.DataBundle` - """ - if paths is None: - paths = self.download() - paths = check_loader_paths(paths) - datasets = {name: self._load(path) for name, path in paths.items()} - data_bundle = DataBundle(datasets=datasets) - return data_bundle - - def download(self) -> str: - r""" - 自动下载该数据集 - - :return: 下载后解压目录 - """ - raise NotImplementedError(f"{self.__class__} cannot download data automatically.") - - @staticmethod - def _get_dataset_path(dataset_name): - r""" - 传入dataset的名称,获取读取数据的目录。如果数据不存在,会尝试自动下载并缓存(如果支持的话) - - :param str dataset_name: 数据集的名称 - :return: str, 数据集的目录地址。直接到该目录下读取相应的数据即可。 - """ - - default_cache_path = get_cache_path() - url = _get_dataset_url(dataset_name) - output_dir = cached_path(url_or_filename=url, cache_dir=default_cache_path, name='dataset') - - return output_dir diff --git a/fastNLP/io/loader/matching.py b/fastNLP/io/loader/matching.py deleted file mode 100644 index 6d90f750..00000000 --- a/fastNLP/io/loader/matching.py +++ /dev/null @@ -1,578 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "MNLILoader", - "SNLILoader", - "QNLILoader", - "RTELoader", - "QuoraLoader", - "BQCorpusLoader", - "CNXNLILoader", - "LCQMCLoader" -] - -import os -import warnings -from typing import Union, Dict - -from .csv import CSVLoader -from .json import JsonLoader -from .loader import Loader -from .. import DataBundle -from ..utils import check_loader_paths -from ...core.const import Const -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class MNLILoader(Loader): - r""" - 读取的数据格式为: - - Example:: - - index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 label1 gold_label - 0 31193 31193n government ( ( Conceptually ( cream skimming ) ) ... - 1 101457 101457e telephone ( you ( ( know ( during ( ( ( the season ) and ) ( i guess ) ) )... - ... - - 读取MNLI任务的数据,读取之后的DataSet中包含以下的内容,words0是sentence1, words1是sentence2, target是gold_label, 测试集中没 - 有target列。 - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "Conceptually cream ...", "Product and geography...", "neutral" - "you know during the ...", "You lose the things to the...", "entailment" - "...", "...", "..." - - """ - - def __init__(self): - super().__init__() - - def _load(self, path: str): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - f.readline() # 跳过header - if path.endswith("test_matched.tsv") or path.endswith('test_mismatched.tsv'): - warnings.warn("MNLI's test file has no target.") - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[8] - raw_words2 = parts[9] - idx = int(parts[0]) - if raw_words1 and raw_words2: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2, index=idx)) - else: - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[8] - raw_words2 = parts[9] - target = parts[-1] - idx = int(parts[0]) - if raw_words1 and raw_words2 and target: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2, target=target, index=idx)) - return ds - - def load(self, paths: str = None): - r""" - - :param str paths: 传入数据所在目录,会在该目录下寻找dev_matched.tsv, dev_mismatched.tsv, test_matched.tsv, - test_mismatched.tsv, train.tsv文件夹 - :return: DataBundle - """ - if paths: - paths = os.path.abspath(os.path.expanduser(paths)) - else: - paths = self.download() - if not os.path.isdir(paths): - raise NotADirectoryError(f"{paths} is not a valid directory.") - - files = {'dev_matched': "dev_matched.tsv", - "dev_mismatched": "dev_mismatched.tsv", - "test_matched": "test_matched.tsv", - "test_mismatched": "test_mismatched.tsv", - "train": 'train.tsv'} - - datasets = {} - for name, filename in files.items(): - filepath = os.path.join(paths, filename) - if not os.path.isfile(filepath): - if 'test' not in name: - raise FileNotFoundError(f"{name} not found in directory {filepath}.") - datasets[name] = self._load(filepath) - - data_bundle = DataBundle(datasets=datasets) - - return data_bundle - - def download(self): - r""" - 如果你使用了这个数据,请引用 - - https://www.nyu.edu/projects/bowman/multinli/paper.pdf - :return: - """ - output_dir = self._get_dataset_path('mnli') - return output_dir - - -class SNLILoader(JsonLoader): - r""" - 文件每一行是一个sample,每一行都为一个json对象,其数据格式为: - - Example:: - - {"annotator_labels": ["neutral", "entailment", "neutral", "neutral", "neutral"], "captionID": "4705552913.jpg#2", - "gold_label": "neutral", "pairID": "4705552913.jpg#2r1n", - "sentence1": "Two women are embracing while holding to go packages.", - "sentence1_binary_parse": "( ( Two women ) ( ( are ( embracing ( while ( holding ( to ( go packages ) ) ) ) ) ) . ) )", - "sentence1_parse": "(ROOT (S (NP (CD Two) (NNS women)) (VP (VBP are) (VP (VBG embracing) (SBAR (IN while) (S (NP (VBG holding)) (VP (TO to) (VP (VB go) (NP (NNS packages)))))))) (. .)))", - "sentence2": "The sisters are hugging goodbye while holding to go packages after just eating lunch.", - "sentence2_binary_parse": "( ( The sisters ) ( ( are ( ( hugging goodbye ) ( while ( holding ( to ( ( go packages ) ( after ( just ( eating lunch ) ) ) ) ) ) ) ) ) . ) )", - "sentence2_parse": "(ROOT (S (NP (DT The) (NNS sisters)) (VP (VBP are) (VP (VBG hugging) (NP (UH goodbye)) (PP (IN while) (S (VP (VBG holding) (S (VP (TO to) (VP (VB go) (NP (NNS packages)) (PP (IN after) (S (ADVP (RB just)) (VP (VBG eating) (NP (NN lunch))))))))))))) (. .)))" - } - - 读取之后的DataSet中的field情况为 - - .. csv-table:: 下面是使用SNLILoader加载的DataSet所具备的field - :header: "target", "raw_words1", "raw_words2", - - "neutral ", "Two women are embracing while holding..", "The sisters are hugging goodbye..." - "entailment", "Two women are embracing while holding...", "Two woman are holding packages." - "...", "...", "..." - - """ - - def __init__(self): - super().__init__(fields={ - 'sentence1': Const.RAW_WORDS(0), - 'sentence2': Const.RAW_WORDS(1), - 'gold_label': Const.TARGET, - }) - - def load(self, paths: Union[str, Dict[str, str]] = None) -> DataBundle: - r""" - 从指定一个或多个路径中的文件中读取数据,返回 :class:`~fastNLP.io.DataBundle` 。 - - 读取的field根据Loader初始化时传入的field决定。 - - :param str paths: 传入一个目录, 将在该目录下寻找snli_1.0_train.jsonl, snli_1.0_dev.jsonl - 和snli_1.0_test.jsonl三个文件。 - - :return: 返回的 :class:`~fastNLP.io.DataBundle` - """ - _paths = {} - if paths is None: - paths = self.download() - if paths: - if os.path.isdir(paths): - if not os.path.isfile(os.path.join(paths, 'snli_1.0_train.jsonl')): - raise FileNotFoundError(f"snli_1.0_train.jsonl is not found in {paths}") - _paths['train'] = os.path.join(paths, 'snli_1.0_train.jsonl') - for filename in ['snli_1.0_dev.jsonl', 'snli_1.0_test.jsonl']: - filepath = os.path.join(paths, filename) - _paths[filename.split('_')[-1].split('.')[0]] = filepath - paths = _paths - else: - raise NotADirectoryError(f"{paths} is not a valid directory.") - - datasets = {name: self._load(path) for name, path in paths.items()} - data_bundle = DataBundle(datasets=datasets) - return data_bundle - - def download(self): - r""" - 如果您的文章使用了这份数据,请引用 - - http://nlp.stanford.edu/pubs/snli_paper.pdf - - :return: str - """ - return self._get_dataset_path('snli') - - -class QNLILoader(JsonLoader): - r""" - 第一行为标题(具体内容会被忽略),之后每一行是一个sample,由index、问题、句子和标签构成(以制表符分割),数据结构如下: - - Example:: - - index question sentence label - 0 What came into force after the new constitution was herald? As of that day, the new constitution heralding the Second Republic came into force. entailment - - QNLI数据集的Loader, - 加载的DataSet将具备以下的field, raw_words1是question, raw_words2是sentence, target是label - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "What came into force after the new...", "As of that day...", "entailment" - "...","." - - test数据集没有target列 - - """ - - def __init__(self): - super().__init__() - - def _load(self, path): - ds = DataSet() - - with open(path, 'r', encoding='utf-8') as f: - f.readline() # 跳过header - if path.endswith("test.tsv"): - warnings.warn("QNLI's test file has no target.") - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[1] - raw_words2 = parts[2] - if raw_words1 and raw_words2: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2)) - else: - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[1] - raw_words2 = parts[2] - target = parts[-1] - if raw_words1 and raw_words2 and target: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2, target=target)) - return ds - - def download(self): - r""" - 如果您的实验使用到了该数据,请引用 - - https://arxiv.org/pdf/1809.05053.pdf - - :return: - """ - return self._get_dataset_path('qnli') - - -class RTELoader(Loader): - r""" - 第一行为标题(具体内容会被忽略),之后每一行是一个sample,由index、句子1、句子2和标签构成(以制表符分割),数据结构如下: - - Example:: - - index sentence1 sentence2 label - 0 Dana Reeve, the widow of the actor Christopher Reeve, has died of lung cancer at age 44, according to the Christopher Reeve Foundation. Christopher Reeve had an accident. not_entailment - - RTE数据的loader - 加载的DataSet将具备以下的field, raw_words1是sentence0,raw_words2是sentence1, target是label - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "Dana Reeve, the widow of the actor...", "Christopher Reeve had an...", "not_entailment" - "...","..." - - test数据集没有target列 - """ - - def __init__(self): - super().__init__() - - def _load(self, path: str): - ds = DataSet() - - with open(path, 'r', encoding='utf-8') as f: - f.readline() # 跳过header - if path.endswith("test.tsv"): - warnings.warn("RTE's test file has no target.") - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[1] - raw_words2 = parts[2] - if raw_words1 and raw_words2: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2)) - else: - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[1] - raw_words2 = parts[2] - target = parts[-1] - if raw_words1 and raw_words2 and target: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2, target=target)) - return ds - - def download(self): - r""" - 如果您的实验使用到了该数据,请引用GLUE Benchmark - - https://openreview.net/pdf?id=rJ4km2R5t7 - - :return: - """ - return self._get_dataset_path('rte') - - -class QuoraLoader(Loader): - r""" - Quora matching任务的数据集Loader - - 支持读取的文件中的内容,应该有以下的形式, 以制表符分隔,且前三列的内容必须是:第一列是label,第二列和第三列是句子 - - Example:: - - 1 How do I get funding for my web based startup idea ? How do I get seed funding pre product ? 327970 - 0 Is honey a viable alternative to sugar for diabetics ? How would you compare the United States ' euthanasia laws to Denmark ? 90348 - ... - - 加载的DataSet将具备以下的field - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "How do I get funding for my web based...", "How do I get seed funding...","1" - "Is honey a viable alternative ...", "How would you compare the United...","0" - "...","...","..." - - """ - - def __init__(self): - super().__init__() - - def _load(self, path: str): - ds = DataSet() - - with open(path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line: - parts = line.split('\t') - raw_words1 = parts[1] - raw_words2 = parts[2] - target = parts[0] - if raw_words1 and raw_words2 and target: - ds.append(Instance(raw_words1=raw_words1, raw_words2=raw_words2, target=target)) - return ds - - def download(self): - r""" - 由于版权限制,不能提供自动下载功能。可参考 - - https://www.kaggle.com/c/quora-question-pairs/data - - :return: - """ - raise RuntimeError("Quora cannot be downloaded automatically.") - - -class CNXNLILoader(Loader): - r""" - 数据集简介:中文句对NLI(本为multi-lingual的数据集,但是这里只取了中文的数据集)。原句子已被MOSES tokenizer处理,这里我们将其还原并重新按字tokenize - 原始数据数据为: - - Example:: - - premise hypo label - 我们 家里 有 一个 但 我 没 找到 我 可以 用 的 时间 我们 家里 有 一个 但 我 从来 没有 时间 使用 它 . entailment - - dev和test中的数据为csv或json格式,包括十多个field,这里只取与以上三个field中的数据 - 读取后的Dataset将具有以下数据结构: - - .. csv-table:: - :header: "raw_chars1", "raw_chars2", "target" - - "我们 家里 有 一个 但 我 没 找到 我 可以 用 的 时间", "我们 家里 有 一个 但 我 从来 没有 时间 使用 它 .", "0" - "...", "...", "..." - - """ - - def __init__(self): - super(CNXNLILoader, self).__init__() - - def _load(self, path: str = None): - ds_all = DataSet() - with open(path, 'r', encoding='utf-8') as f: - head_name_list = f.readline().strip().split('\t') - sentence1_index = head_name_list.index('sentence1') - sentence2_index = head_name_list.index('sentence2') - gold_label_index = head_name_list.index('gold_label') - language_index = head_name_list.index(('language')) - - for line in f: - line = line.strip() - raw_instance = line.split('\t') - sentence1 = raw_instance[sentence1_index] - sentence2 = raw_instance[sentence2_index] - gold_label = raw_instance[gold_label_index] - language = raw_instance[language_index] - if sentence1: - ds_all.append(Instance(sentence1=sentence1, sentence2=sentence2, gold_label=gold_label, language=language)) - - ds_zh = DataSet() - for i in ds_all: - if i['language'] == 'zh': - ds_zh.append(Instance(raw_chars1=i['sentence1'], raw_chars2=i['sentence2'], target=i['gold_label'])) - - return ds_zh - - def _load_train(self, path: str = None): - ds = DataSet() - - with open(path, 'r', encoding='utf-8') as f: - next(f) - for line in f: - raw_instance = line.strip().split('\t') - premise = "".join(raw_instance[0].split())# 把已经分好词的premise和hypo强制还原为character segmentation - hypo = "".join(raw_instance[1].split()) - label = "".join(raw_instance[-1].split()) - if premise: - ds.append(Instance(premise=premise, hypo=hypo, label=label)) - - ds.rename_field('label', 'target') - ds.rename_field('premise', 'raw_chars1') - ds.rename_field('hypo', 'raw_chars2') - ds.apply(lambda i: "".join(i['raw_chars1'].split()), new_field_name='raw_chars1') - ds.apply(lambda i: "".join(i['raw_chars2'].split()), new_field_name='raw_chars2') - return ds - - def load(self, paths: Union[str, Dict[str, str]] = None) -> DataBundle: - if paths is None: - paths = self.download() - paths = check_loader_paths(paths) - datasets = {} - for name, path in paths.items(): - if name == 'train': - datasets[name] = self._load_train(path) - else: - datasets[name] = self._load(path) - - data_bundle = DataBundle(datasets=datasets) - return data_bundle - - def download(self) -> str: - r""" - 自动下载数据,该数据取自 https://arxiv.org/abs/1809.05053 - 在 https://arxiv.org/pdf/1905.05526.pdf https://arxiv.org/pdf/1901.10125.pdf - https://arxiv.org/pdf/1809.05053.pdf 有使用 - :return: - """ - output_dir = self._get_dataset_path('cn-xnli') - return output_dir - - -class BQCorpusLoader(Loader): - r""" - 别名: - 数据集简介:句子对二分类任务(判断是否具有相同的语义) - 原始数据结构为: - - Example:: - - sentence1,sentence2,label - 综合评分不足什么原因,综合评估的依据,0 - 什么时候我能使用微粒贷,你就赶快给我开通就行了,0 - - 读取后的Dataset将具有以下数据结构: - - .. csv-table:: - :header: "raw_chars1", "raw_chars2", "target" - - "综合评分不足什么原因", "综合评估的依据", "0" - "什么时候我能使用微粒贷", "你就赶快给我开通就行了", "0" - "...", "...", "..." - - """ - - def __init__(self): - super(BQCorpusLoader, self).__init__() - - def _load(self, path: str = None): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - next(f) - for line in f: - line = line.strip() - target = line[-1] - sep_index = line.index(',') - raw_chars1 = line[:sep_index] - raw_chars2 = line[sep_index + 1:] - - if raw_chars1: - ds.append(Instance(raw_chars1=raw_chars1, raw_chars2=raw_chars2, target=target)) - return ds - - def download(self): - r""" - 由于版权限制,不能提供自动下载功能。可参考 - - https://github.com/ymcui/Chinese-BERT-wwm - - :return: - """ - raise RuntimeError("BQCorpus cannot be downloaded automatically.") - - -class LCQMCLoader(Loader): - r""" - 数据集简介:句对匹配(question matching) - - 原始数据为: - - Example:: - - 喜欢打篮球的男生喜欢什么样的女生 爱打篮球的男生喜欢什么样的女生 1 - 你帮我设计小说的封面吧 谁能帮我给小说设计个封面? 0 - - - 读取后的Dataset将具有以下的数据结构 - - .. csv-table:: - :header: "raw_chars1", "raw_chars2", "target" - - "喜欢打篮球的男生喜欢什么样的女生", "爱打篮球的男生喜欢什么样的女生", "1" - "你帮我设计小说的封面吧", "妇可以戴耳机听音乐吗?", "0" - "...", "...", "..." - - - """ - - def __init__(self): - super(LCQMCLoader, self).__init__() - - def _load(self, path: str = None): - ds = DataSet() - with open(path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - line_segments = line.split('\t') - assert len(line_segments) == 3 - - target = line_segments[-1] - - raw_chars1 = line_segments[0] - raw_chars2 = line_segments[1] - - if raw_chars1: - ds.append(Instance(raw_chars1=raw_chars1, raw_chars2=raw_chars2, target=target)) - return ds - - def download(self): - r""" - 由于版权限制,不能提供自动下载功能。可参考 - - https://github.com/ymcui/Chinese-BERT-wwm - - :return: - """ - raise RuntimeError("LCQMC cannot be downloaded automatically.") - - diff --git a/fastNLP/io/loader/qa.py b/fastNLP/io/loader/qa.py deleted file mode 100644 index 9b19c255..00000000 --- a/fastNLP/io/loader/qa.py +++ /dev/null @@ -1,74 +0,0 @@ -r""" -该文件中的Loader主要用于读取问答式任务的数据 - -""" - - -from . import Loader -import json -from ...core import DataSet, Instance - -__all__ = ['CMRC2018Loader'] - - -class CMRC2018Loader(Loader): - r""" - 请直接使用从fastNLP下载的数据进行处理。该数据集未提供测试集,测试需要通过上传到对应的系统进行评测 - - 读取之后训练集DataSet将具备以下的内容,每个问题的答案只有一个 - - .. csv-table:: - :header:"title", "context", "question", "answers", "answer_starts", "id" - - "范廷颂", "范廷颂枢机(,),圣名保禄·若瑟()...", "范廷颂是什么时候被任为主教的?", ["1963年"], ["30"], "TRAIN_186_QUERY_0" - "范廷颂", "范廷颂枢机(,),圣名保禄·若瑟()...", "1990年,范廷颂担任什么职务?", ["1990年被擢升为天..."], ["41"],"TRAIN_186_QUERY_1" - "...", "...", "...","...", ".", "..." - - 其中title是文本的标题,多条记录可能是相同的title;id是该问题的id,具备唯一性 - - 验证集DataSet将具备以下的内容,每个问题的答案可能有三个(有时候只是3个重复的答案) - - .. csv-table:: - :header: "title", "context", "question", "answers", "answer_starts", "id" - - "战国无双3", "《战国无双3》()是由光荣和ω-force开发...", "《战国无双3》是由哪两个公司合作开发的?", "['光荣和ω-force', '光荣和ω-force', '光荣和ω-force']", "[30, 30, 30]", "DEV_0_QUERY_0" - "战国无双3", "《战国无双3》()是由光荣和ω-force开发...", "男女主角亦有专属声优这一模式是由谁改编的?", "['村雨城', '村雨城', '任天堂游戏谜之村雨城']", "[226, 226, 219]", "DEV_0_QUERY_1" - "...", "...", "...","...", ".", "..." - - 其中answer_starts是从0开始的index。例如"我来自a复旦大学?",其中"复"的开始index为4。另外"Russell评价说"中的说的index为9, 因为 - 英文和数字都直接按照character计量的。 - """ - def __init__(self): - super().__init__() - - def _load(self, path: str) -> DataSet: - with open(path, 'r', encoding='utf-8') as f: - data = json.load(f)['data'] - ds = DataSet() - for entry in data: - title = entry['title'] - para = entry['paragraphs'][0] - context = para['context'] - qas = para['qas'] - for qa in qas: - question = qa['question'] - ans = qa['answers'] - answers = [] - answer_starts = [] - id = qa['id'] - for an in ans: - answers.append(an['text']) - answer_starts.append(an['answer_start']) - ds.append(Instance(title=title, context=context, question=question, answers=answers, - answer_starts=answer_starts,id=id)) - return ds - - def download(self) -> str: - r""" - 如果您使用了本数据,请引用A Span-Extraction Dataset for Chinese Machine Reading Comprehension. Yiming Cui, Ting Liu, etc. - - :return: - """ - output_dir = self._get_dataset_path('cmrc2018') - return output_dir - diff --git a/fastNLP/io/loader/summarization.py b/fastNLP/io/loader/summarization.py deleted file mode 100644 index 3fe5f7a3..00000000 --- a/fastNLP/io/loader/summarization.py +++ /dev/null @@ -1,63 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "ExtCNNDMLoader" -] - -import os -from typing import Union, Dict - -from ..data_bundle import DataBundle -from ..utils import check_loader_paths -from .json import JsonLoader - - -class ExtCNNDMLoader(JsonLoader): - r""" - 读取之后的DataSet中的field情况为 - - .. csv-table:: - :header: "text", "summary", "label", "publication" - - ["I got new tires from them and... ","..."], ["The new tires...","..."], [0, 1], "cnndm" - ["Don't waste your time. We had two...","..."], ["Time is precious","..."], [1], "cnndm" - ["..."], ["..."], [], "cnndm" - - """ - - def __init__(self, fields=None): - fields = fields or {"text": None, "summary": None, "label": None, "publication": None} - super(ExtCNNDMLoader, self).__init__(fields=fields) - - def load(self, paths: Union[str, Dict[str, str]] = None): - r""" - 从指定一个或多个路径中的文件中读取数据,返回 :class:`~fastNLP.io.DataBundle` 。 - - 读取的field根据ExtCNNDMLoader初始化时传入的headers决定。 - - :param str paths: 传入一个目录, 将在该目录下寻找train.label.jsonl, dev.label.jsonl - test.label.jsonl三个文件(该目录还应该需要有一个名字为vocab的文件,在 :class:`~fastNLP.io.ExtCNNDMPipe` - 当中需要用到)。 - - :return: 返回 :class:`~fastNLP.io.DataBundle` - """ - if paths is None: - paths = self.download() - paths = check_loader_paths(paths) - if ('train' in paths) and ('test' not in paths): - paths['test'] = paths['train'] - paths.pop('train') - - datasets = {name: self._load(path) for name, path in paths.items()} - data_bundle = DataBundle(datasets=datasets) - return data_bundle - - def download(self): - r""" - 如果你使用了这个数据,请引用 - - https://arxiv.org/pdf/1506.03340.pdf - :return: - """ - output_dir = self._get_dataset_path('ext-cnndm') - return output_dir diff --git a/fastNLP/io/model_io.py b/fastNLP/io/model_io.py deleted file mode 100644 index 7fea4527..00000000 --- a/fastNLP/io/model_io.py +++ /dev/null @@ -1,69 +0,0 @@ -r""" -用于载入和保存模型 -""" -__all__ = [ - "ModelLoader", - "ModelSaver" -] - -import torch - - -class ModelLoader: - r""" - 用于读取模型 - """ - - def __init__(self): - super(ModelLoader, self).__init__() - - @staticmethod - def load_pytorch(empty_model, model_path): - r""" - 从 ".pkl" 文件读取 PyTorch 模型 - - :param empty_model: 初始化参数的 PyTorch 模型 - :param str model_path: 模型保存的路径 - """ - empty_model.load_state_dict(torch.load(model_path)) - - @staticmethod - def load_pytorch_model(model_path): - r""" - 读取整个模型 - - :param str model_path: 模型保存的路径 - """ - return torch.load(model_path) - - -class ModelSaver(object): - r""" - 用于保存模型 - - Example:: - - saver = ModelSaver("./save/model_ckpt_100.pkl") - saver.save_pytorch(model) - - """ - - def __init__(self, save_path): - r""" - - :param save_path: 模型保存的路径 - """ - self.save_path = save_path - - def save_pytorch(self, model, param_only=True): - r""" - 把 PyTorch 模型存入 ".pkl" 文件 - - :param model: PyTorch 模型 - :param bool param_only: 是否只保存模型的参数(否则保存整个模型) - - """ - if param_only is True: - torch.save(model.state_dict(), self.save_path) - else: - torch.save(model, self.save_path) diff --git a/fastNLP/io/pipe/__init__.py b/fastNLP/io/pipe/__init__.py deleted file mode 100644 index 35965ca3..00000000 --- a/fastNLP/io/pipe/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -r""" -Pipe用于处理通过 Loader 读取的数据,所有的 Pipe 都包含 ``process`` 和 ``process_from_file`` 两种方法。 -``process(data_bundle)`` 传入一个 :class:`~fastNLP.io.DataBundle` 类型的对象, 在传入的 `data_bundle` 上进行原位修改,并将其返回; -``process_from_file(paths)`` 传入的文件路径,返回一个 :class:`~fastNLP.io.DataBundle` 类型的对象。 -``process(data_bundle)`` 或者 ``process_from_file(paths)`` 的返回 `data_bundle` 中的 :class:`~fastNLP.DataSet` -一般都包含原文与转换为index的输入以及转换为index的target;除了 :class:`~fastNLP.DataSet` 之外, -`data_bundle` 还会包含将field转为index时所建立的词表。 - -""" -__all__ = [ - "Pipe", - - "CWSPipe", - - "CLSBasePipe", - "AGsNewsPipe", - "DBPediaPipe", - "YelpFullPipe", - "YelpPolarityPipe", - "SSTPipe", - "SST2Pipe", - "IMDBPipe", - "ChnSentiCorpPipe", - "THUCNewsPipe", - "WeiboSenti100kPipe", - "MRPipe", "R52Pipe", "R8Pipe", "OhsumedPipe", "NG20Pipe", - - "Conll2003NERPipe", - "OntoNotesNERPipe", - "MsraNERPipe", - "WeiboNERPipe", - "PeopleDailyPipe", - "Conll2003Pipe", - - "MatchingBertPipe", - "RTEBertPipe", - "SNLIBertPipe", - "QuoraBertPipe", - "QNLIBertPipe", - "MNLIBertPipe", - "CNXNLIBertPipe", - "BQCorpusBertPipe", - "LCQMCBertPipe", - "MatchingPipe", - "RTEPipe", - "SNLIPipe", - "QuoraPipe", - "QNLIPipe", - "MNLIPipe", - "LCQMCPipe", - "CNXNLIPipe", - "BQCorpusPipe", - "RenamePipe", - "GranularizePipe", - "MachingTruncatePipe", - - "CoReferencePipe", - - "CMRC2018BertPipe", - - "R52PmiGraphPipe", - "R8PmiGraphPipe", - "OhsumedPmiGraphPipe", - "NG20PmiGraphPipe", - "MRPmiGraphPipe" -] - -from .classification import CLSBasePipe, YelpFullPipe, YelpPolarityPipe, SSTPipe, SST2Pipe, IMDBPipe, ChnSentiCorpPipe, THUCNewsPipe, \ - WeiboSenti100kPipe, AGsNewsPipe, DBPediaPipe, MRPipe, R8Pipe, R52Pipe, OhsumedPipe, NG20Pipe -from .conll import Conll2003NERPipe, OntoNotesNERPipe, MsraNERPipe, WeiboNERPipe, PeopleDailyPipe -from .conll import Conll2003Pipe -from .coreference import CoReferencePipe -from .cws import CWSPipe -from .matching import MatchingBertPipe, RTEBertPipe, SNLIBertPipe, QuoraBertPipe, QNLIBertPipe, MNLIBertPipe, \ - MatchingPipe, RTEPipe, SNLIPipe, QuoraPipe, QNLIPipe, MNLIPipe, CNXNLIBertPipe, CNXNLIPipe, BQCorpusBertPipe, \ - LCQMCPipe, BQCorpusPipe, LCQMCBertPipe, RenamePipe, GranularizePipe, MachingTruncatePipe -from .pipe import Pipe -from .qa import CMRC2018BertPipe - -from .construct_graph import MRPmiGraphPipe, R8PmiGraphPipe, R52PmiGraphPipe, NG20PmiGraphPipe, OhsumedPmiGraphPipe diff --git a/fastNLP/io/pipe/classification.py b/fastNLP/io/pipe/classification.py deleted file mode 100644 index 41682d3e..00000000 --- a/fastNLP/io/pipe/classification.py +++ /dev/null @@ -1,944 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CLSBasePipe", - "AGsNewsPipe", - "DBPediaPipe", - "YelpFullPipe", - "YelpPolarityPipe", - "SSTPipe", - "SST2Pipe", - 'IMDBPipe', - "ChnSentiCorpPipe", - "THUCNewsPipe", - "WeiboSenti100kPipe", - "MRPipe", "R8Pipe", "R52Pipe", "OhsumedPipe", "NG20Pipe" -] - -import re -import warnings - -try: - from nltk import Tree -except: - # only nltk in some versions can run - pass - -from .pipe import Pipe -from .utils import get_tokenizer, _indexize, _add_words_field, _add_chars_field, _granularize -from ..data_bundle import DataBundle -from ..loader.classification import ChnSentiCorpLoader, THUCNewsLoader, WeiboSenti100kLoader -from ..loader.classification import IMDBLoader, YelpFullLoader, SSTLoader, SST2Loader, YelpPolarityLoader, \ - AGsNewsLoader, DBPediaLoader, MRLoader, R52Loader, R8Loader, OhsumedLoader, NG20Loader -from ...core._logger import logger -from ...core.const import Const -from ...core.dataset import DataSet -from ...core.instance import Instance - - -class CLSBasePipe(Pipe): - - def __init__(self, lower: bool = False, tokenizer: str = 'spacy', lang='en'): - super().__init__() - self.lower = lower - self.tokenizer = get_tokenizer(tokenizer, lang=lang) - - def _tokenize(self, data_bundle, field_name=Const.INPUT, new_field_name=None): - r""" - 将DataBundle中的数据进行tokenize - - :param DataBundle data_bundle: - :param str field_name: - :param str new_field_name: - :return: 传入的DataBundle对象 - """ - new_field_name = new_field_name or field_name - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(self.tokenizer, field_name=field_name, new_field_name=new_field_name) - - return data_bundle - - def process(self, data_bundle: DataBundle): - r""" - 传入的DataSet应该具备如下的结构 - - .. csv-table:: - :header: "raw_words", "target" - - "I got 'new' tires from them and... ", "1" - "Don't waste your time. We had two...", "1" - "...", "..." - - :param data_bundle: - :return: - """ - # 复制一列words - data_bundle = _add_words_field(data_bundle, lower=self.lower) - # 进行tokenize - data_bundle = self._tokenize(data_bundle=data_bundle, field_name=Const.INPUT) - # 建立词表并index - data_bundle = _indexize(data_bundle=data_bundle) - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.INPUT) - - data_bundle.set_input(Const.INPUT, Const.INPUT_LEN) - data_bundle.set_target(Const.TARGET) - - return data_bundle - - def process_from_file(self, paths) -> DataBundle: - r""" - 传入文件路径,生成处理好的DataBundle对象。paths支持的路径形式可以参考 ::meth:`fastNLP.io.Loader.load()` - - :param paths: - :return: DataBundle - """ - raise NotImplementedError - - -class YelpFullPipe(CLSBasePipe): - r""" - 处理YelpFull的数据, 处理之后DataSet中的内容如下 - - .. csv-table:: 下面是使用YelpFullPipe处理后的DataSet所具备的field - :header: "raw_words", "target", "words", "seq_len" - - "I got 'new' tires from them and within...", 0 ,"[7, 110, 22, 107, 22, 499, 59, 140, 3,...]", 160 - " Don't waste your time. We had two dif... ", 0, "[277, 17, 278, 38, 30, 112, 24, 85, 27...", 40 - "...", ., "[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower: bool = False, granularity=5, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否对输入进行小写化。 - :param int granularity: 支持2, 3, 5。若为2, 则认为是2分类问题,将1、2归为1类,4、5归为一类,丢掉2;若为3, 则有3分类问题,将 - 1、2归为1类,3归为1类,4、5归为1类;若为5, 则有5分类问题。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(lower=lower, tokenizer=tokenizer, lang='en') - assert granularity in (2, 3, 5), "granularity can only be 2,3,5." - self.granularity = granularity - - if granularity == 2: - self.tag_map = {"1": "negative", "2": "negative", "4": "positive", "5": "positive"} - elif granularity == 3: - self.tag_map = {"1": "negative", "2": "negative", "3": "medium", "4": "positive", "5": "positive"} - else: - self.tag_map = None - - def process(self, data_bundle): - r""" - 传入的DataSet应该具备如下的结构 - - .. csv-table:: - :header: "raw_words", "target" - - "I got 'new' tires from them and... ", "1" - "Don't waste your time. We had two...", "1" - "...", "..." - - :param data_bundle: - :return: - """ - if self.tag_map is not None: - data_bundle = _granularize(data_bundle, self.tag_map) - - data_bundle = super().process(data_bundle) - - return data_bundle - - def process_from_file(self, paths=None): - r""" - - :param paths: - :return: DataBundle - """ - data_bundle = YelpFullLoader().load(paths) - return self.process(data_bundle=data_bundle) - - -class YelpPolarityPipe(CLSBasePipe): - r""" - 处理YelpPolarity的数据, 处理之后DataSet中的内容如下 - - .. csv-table:: 下面是使用YelpFullPipe处理后的DataSet所具备的field - :header: "raw_words", "target", "words", "seq_len" - - "I got 'new' tires from them and within...", 0 ,"[7, 110, 22, 107, 22, 499, 59, 140, 3,...]", 160 - " Don't waste your time. We had two dif... ", 0, "[277, 17, 278, 38, 30, 112, 24, 85, 27...", 40 - "...", ., "[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否对输入进行小写化。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(lower=lower, tokenizer=tokenizer, lang='en') - - def process_from_file(self, paths=None): - r""" - - :param str paths: - :return: DataBundle - """ - data_bundle = YelpPolarityLoader().load(paths) - return self.process(data_bundle=data_bundle) - - -class AGsNewsPipe(CLSBasePipe): - r""" - 处理AG's News的数据, 处理之后DataSet中的内容如下 - - .. csv-table:: 下面是使用AGsNewsPipe处理后的DataSet所具备的field - :header: "raw_words", "target", "words", "seq_len" - - "I got 'new' tires from them and within...", 0 ,"[7, 110, 22, 107, 22, 499, 59, 140, 3,...]", 160 - " Don't waste your time. We had two dif... ", 0, "[277, 17, 278, 38, 30, 112, 24, 85, 27...", 40 - "...", ., "[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否对输入进行小写化。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(lower=lower, tokenizer=tokenizer, lang='en') - - def process_from_file(self, paths=None): - r""" - :param str paths: - :return: DataBundle - """ - data_bundle = AGsNewsLoader().load(paths) - return self.process(data_bundle=data_bundle) - - -class DBPediaPipe(CLSBasePipe): - r""" - 处理DBPedia的数据, 处理之后DataSet中的内容如下 - - .. csv-table:: 下面是使用DBPediaPipe处理后的DataSet所具备的field - :header: "raw_words", "target", "words", "seq_len" - - "I got 'new' tires from them and within...", 0 ,"[7, 110, 22, 107, 22, 499, 59, 140, 3,...]", 160 - " Don't waste your time. We had two dif... ", 0, "[277, 17, 278, 38, 30, 112, 24, 85, 27...", 40 - "...", ., "[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否对输入进行小写化。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(lower=lower, tokenizer=tokenizer, lang='en') - - def process_from_file(self, paths=None): - r""" - :param str paths: - :return: DataBundle - """ - data_bundle = DBPediaLoader().load(paths) - return self.process(data_bundle=data_bundle) - - -class SSTPipe(CLSBasePipe): - r""" - 经过该Pipe之后,DataSet中具备的field如下所示 - - .. csv-table:: 下面是使用SSTPipe处理后的DataSet所具备的field - :header: "raw_words", "words", "target", "seq_len" - - "It 's a lovely film with lovely perfor...", 1, "[187, 6, 5, 132, 120, 70, 132, 188, 25...", 13 - "No one goes unindicted here , which is...", 0, "[191, 126, 192, 193, 194, 4, 195, 17, ...", 13 - "...", ., "[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, subtree=False, train_subtree=True, lower=False, granularity=5, tokenizer='spacy'): - r""" - - :param bool subtree: 是否将train, test, dev数据展开为子树,扩充数据量。 Default: ``False`` - :param bool train_subtree: 是否将train集通过子树扩展数据。 - :param bool lower: 是否对输入进行小写化。 - :param int granularity: 支持2, 3, 5。若为2, 则认为是2分类问题,将0、1归为1类,3、4归为一类,丢掉2;若为3, 则有3分类问题,将 - 0、1归为1类,2归为1类,3、4归为1类;若为5, 则有5分类问题。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.subtree = subtree - self.train_tree = train_subtree - self.lower = lower - assert granularity in (2, 3, 5), "granularity can only be 2,3,5." - self.granularity = granularity - - if granularity == 2: - self.tag_map = {"0": "negative", "1": "negative", "3": "positive", "4": "positive"} - elif granularity == 3: - self.tag_map = {"0": "negative", "1": "negative", "2": "medium", "3": "positive", "4": "positive"} - else: - self.tag_map = None - - def process(self, data_bundle: DataBundle): - r""" - 对DataBundle中的数据进行预处理。输入的DataSet应该至少拥有raw_words这一列,且内容类似与 - - .. csv-table:: 下面是使用SSTLoader读取的DataSet所具备的field - :header: "raw_words" - - "(2 (3 (3 Effective) (2 but)) (1 (1 too-tepid)..." - "(3 (3 (2 If) (3 (2 you) (3 (2 sometimes) ..." - "..." - - :param ~fastNLP.io.DataBundle data_bundle: 需要处理的DataBundle对象 - :return: - """ - # 先取出subtree - for name in list(data_bundle.datasets.keys()): - dataset = data_bundle.get_dataset(name) - ds = DataSet() - use_subtree = self.subtree or (name == 'train' and self.train_tree) - for ins in dataset: - raw_words = ins[Const.RAW_WORD] - tree = Tree.fromstring(raw_words) - if use_subtree: - for t in tree.subtrees(): - raw_words = " ".join(t.leaves()) - instance = Instance(raw_words=raw_words, target=t.label()) - ds.append(instance) - else: - instance = Instance(raw_words=' '.join(tree.leaves()), target=tree.label()) - ds.append(instance) - data_bundle.set_dataset(ds, name) - - # 根据granularity设置tag - data_bundle = _granularize(data_bundle, tag_map=self.tag_map) - - data_bundle = super().process(data_bundle) - - return data_bundle - - def process_from_file(self, paths=None): - data_bundle = SSTLoader().load(paths) - return self.process(data_bundle=data_bundle) - - -class SST2Pipe(CLSBasePipe): - r""" - 加载SST2的数据, 处理完成之后DataSet将拥有以下的field - - .. csv-table:: - :header: "raw_words", "target", "words", "seq_len" - - "it 's a charming and often affecting j... ", 1, "[19, 9, 6, 111, 5, 112, 113, 114, 3]", 9 - "unflinchingly bleak and desperate", 0, "[115, 116, 5, 117]", 4 - "...", "...", ., . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower=False, tokenizer='spacy'): - r""" - - :param bool lower: 是否对输入进行小写化。 - :param str tokenizer: 使用哪种tokenize方式将数据切成单词。支持'spacy'和'raw'。raw使用空格作为切分。 - """ - super().__init__(lower=lower, tokenizer=tokenizer, lang='en') - - def process_from_file(self, paths=None): - r""" - - :param str paths: 如果为None,则自动下载并缓存到fastNLP的缓存地址。 - :return: DataBundle - """ - data_bundle = SST2Loader().load(paths) - return self.process(data_bundle) - - -class IMDBPipe(CLSBasePipe): - r""" - 经过本Pipe处理后DataSet将如下 - - .. csv-table:: 输出DataSet的field - :header: "raw_words", "target", "words", "seq_len" - - "Bromwell High is a cartoon ... ", 0, "[3, 5, 6, 9, ...]", 20 - "Story of a man who has ...", 1, "[20, 43, 9, 10, ...]", 31 - "...", ., "[...]", . - - 其中raw_words为str类型,是原文; words是转换为index的输入; target是转换为index的目标值; - words列被设置为input; target列被设置为target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | False | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process(self, data_bundle: DataBundle): - r""" - 期待的DataBunlde中输入的DataSet应该类似于如下,有两个field,raw_words和target,且均为str类型 - - .. csv-table:: 输入DataSet的field - :header: "raw_words", "target" - - "Bromwell High is a cartoon ... ", "pos" - "Story of a man who has ...", "neg" - "...", "..." - - :param DataBunlde data_bundle: 传入的DataBundle中的DataSet必须包含raw_words和target两个field,且raw_words列应该为str, - target列应该为str。 - :return: DataBundle - """ - - # 替换
- def replace_br(raw_words): - raw_words = raw_words.replace("
", ' ') - return raw_words - - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(replace_br, field_name=Const.RAW_WORD, new_field_name=Const.RAW_WORD) - - data_bundle = super().process(data_bundle) - - return data_bundle - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = IMDBLoader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class ChnSentiCorpPipe(Pipe): - r""" - 处理之后的DataSet有以下的结构 - - .. csv-table:: - :header: "raw_chars", "target", "chars", "seq_len" - - "這間酒店環境和服務態度亦算不錯,但房間空間太小~~", 1, "[2, 3, 4, 5, ...]", 31 - "<荐书> 推荐所有喜欢<红楼>...", 1, "[10, 21, ....]", 25 - "..." - - 其中chars, seq_len是input,target是target - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, bigrams=False, trigrams=False): - r""" - - :param bool bigrams: 是否增加一列bigrams. bigrams的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...]。如果 - 设置为True,返回的DataSet将有一列名为bigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('bigrams')获取. - :param bool trigrams: 是否增加一列trigrams. trigrams的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - 。如果设置为True,返回的DataSet将有一列名为trigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('trigrams')获取. - """ - super().__init__() - - self.bigrams = bigrams - self.trigrams = trigrams - - def _tokenize(self, data_bundle): - r""" - 将DataSet中的"复旦大学"拆分为["复", "旦", "大", "学"]. 未来可以通过扩展这个函数实现分词。 - - :param data_bundle: - :return: - """ - data_bundle.apply_field(list, field_name=Const.CHAR_INPUT, new_field_name=Const.CHAR_INPUT) - return data_bundle - - def process(self, data_bundle: DataBundle): - r""" - 可以处理的DataSet应该具备以下的field - - .. csv-table:: - :header: "raw_chars", "target" - - "這間酒店環境和服務態度亦算不錯,但房間空間太小~~", "1" - "<荐书> 推荐所有喜欢<红楼>...", "1" - "..." - - :param data_bundle: - :return: - """ - _add_chars_field(data_bundle, lower=False) - - data_bundle = self._tokenize(data_bundle) - - input_field_names = [Const.CHAR_INPUT] - if self.bigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 for c1, c2 in zip(chars, chars[1:] + [''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - if self.trigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 + c3 for c1, c2, c3 in - zip(chars, chars[1:] + [''], chars[2:] + [''] * 2)], - field_name=Const.CHAR_INPUT, new_field_name='trigrams') - input_field_names.append('trigrams') - - # index - _indexize(data_bundle, input_field_names, Const.TARGET) - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.CHAR_INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = ChnSentiCorpLoader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class THUCNewsPipe(CLSBasePipe): - r""" - 处理之后的DataSet有以下的结构 - - .. csv-table:: - :header: "raw_chars", "target", "chars", "seq_len" - - "马晓旭意外受伤让国奥警惕 无奈大雨格外青睐殷家军记者傅亚雨沈阳报道...", 0, "[409, 1197, 2146, 213, ...]", 746 - "..." - - 其中chars, seq_len是input,target是target - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - :param bool bigrams: 是否增加一列bigrams. bigrams的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...]。如果 - 设置为True,返回的DataSet将有一列名为bigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('bigrams')获取. - :param bool trigrams: 是否增加一列trigrams. trigrams的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - 。如果设置为True,返回的DataSet将有一列名为trigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('trigrams')获取. - """ - - def __init__(self, bigrams=False, trigrams=False): - super().__init__() - - self.bigrams = bigrams - self.trigrams = trigrams - - def _chracter_split(self, sent): - return list(sent) - # return [w for w in sent] - - def _raw_split(self, sent): - return sent.split() - - def _tokenize(self, data_bundle, field_name=Const.INPUT, new_field_name=None): - new_field_name = new_field_name or field_name - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(self._chracter_split, field_name=field_name, new_field_name=new_field_name) - return data_bundle - - def process(self, data_bundle: DataBundle): - r""" - 可处理的DataSet应具备如下的field - - .. csv-table:: - :header: "raw_words", "target" - - "马晓旭意外受伤让国奥警惕 无奈大雨格外青睐殷家军记者傅亚雨沈阳报道 ... ", "体育" - "...", "..." - - :param data_bundle: - :return: - """ - # 根据granularity设置tag - tag_map = {'体育': 0, '财经': 1, '房产': 2, '家居': 3, '教育': 4, '科技': 5, '时尚': 6, '时政': 7, '游戏': 8, '娱乐': 9} - data_bundle = _granularize(data_bundle=data_bundle, tag_map=tag_map) - - # clean,lower - - # CWS(tokenize) - data_bundle = self._tokenize(data_bundle=data_bundle, field_name='raw_chars', new_field_name='chars') - - input_field_names = [Const.CHAR_INPUT] - - # n-grams - if self.bigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 for c1, c2 in zip(chars, chars[1:] + [''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - if self.trigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 + c3 for c1, c2, c3 in - zip(chars, chars[1:] + [''], chars[2:] + [''] * 2)], - field_name=Const.CHAR_INPUT, new_field_name='trigrams') - input_field_names.append('trigrams') - - # index - data_bundle = _indexize(data_bundle=data_bundle, input_field_names=Const.CHAR_INPUT) - - # add length - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(field_name=Const.CHAR_INPUT, new_field_name=Const.INPUT_LEN) - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET] - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths=None): - r""" - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - data_loader = THUCNewsLoader() # 此处需要实例化一个data_loader,否则传入load()的参数为None - data_bundle = data_loader.load(paths) - data_bundle = self.process(data_bundle) - return data_bundle - - -class WeiboSenti100kPipe(CLSBasePipe): - r""" - 处理之后的DataSet有以下的结构 - - .. csv-table:: - :header: "raw_chars", "target", "chars", "seq_len" - - "六一出生的?好讽刺…… //@祭春姬:他爸爸是外星人吧 //@面孔小高:现在的孩子都怎么了 [怒][怒][怒]", 0, "[0, 690, 18, ...]", 56 - "..." - - 其中chars, seq_len是input,target是target - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | False | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - :param bool bigrams: 是否增加一列bigrams. bigrams的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...]。如果 - 设置为True,返回的DataSet将有一列名为bigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('bigrams')获取. - :param bool trigrams: 是否增加一列trigrams. trigrams的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - 。如果设置为True,返回的DataSet将有一列名为trigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('trigrams')获取. - """ - - def __init__(self, bigrams=False, trigrams=False): - super().__init__() - - self.bigrams = bigrams - self.trigrams = trigrams - - def _chracter_split(self, sent): - return list(sent) - - def _tokenize(self, data_bundle, field_name=Const.INPUT, new_field_name=None): - new_field_name = new_field_name or field_name - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(self._chracter_split, field_name=field_name, new_field_name=new_field_name) - return data_bundle - - def process(self, data_bundle: DataBundle): - r""" - 可处理的DataSet应具备以下的field - - .. csv-table:: - :header: "raw_chars", "target" - - "六一出生的?好讽刺…… //@祭春姬:他爸爸是外星人吧 //@面孔小高:现在的孩子都怎么了 [怒][怒][怒]", "0" - "...", "..." - - :param data_bundle: - :return: - """ - # clean,lower - - # CWS(tokenize) - data_bundle = self._tokenize(data_bundle=data_bundle, field_name='raw_chars', new_field_name='chars') - - input_field_names = [Const.CHAR_INPUT] - - # n-grams - if self.bigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 for c1, c2 in zip(chars, chars[1:] + [''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - if self.trigrams: - for name, dataset in data_bundle.iter_datasets(): - dataset.apply_field(lambda chars: [c1 + c2 + c3 for c1, c2, c3 in - zip(chars, chars[1:] + [''], chars[2:] + [''] * 2)], - field_name=Const.CHAR_INPUT, new_field_name='trigrams') - input_field_names.append('trigrams') - - # index - data_bundle = _indexize(data_bundle=data_bundle, input_field_names='chars') - - # add length - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(field_name=Const.CHAR_INPUT, new_field_name=Const.INPUT_LEN) - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET] - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths=None): - r""" - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - data_loader = WeiboSenti100kLoader() # 此处需要实例化一个data_loader,否则传入load()的参数为None - data_bundle = data_loader.load(paths) - data_bundle = self.process(data_bundle) - return data_bundle - -class MRPipe(CLSBasePipe): - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = MRLoader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class R8Pipe(CLSBasePipe): - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = R8Loader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class R52Pipe(CLSBasePipe): - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = R52Loader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class OhsumedPipe(CLSBasePipe): - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = OhsumedLoader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class NG20Pipe(CLSBasePipe): - def __init__(self, lower: bool = False, tokenizer: str = 'spacy'): - r""" - - :param bool lower: 是否将words列的数据小写。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__(tokenizer=tokenizer, lang='en') - self.lower = lower - - def process_from_file(self, paths=None): - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.Loader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = NG20Loader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle \ No newline at end of file diff --git a/fastNLP/io/pipe/conll.py b/fastNLP/io/pipe/conll.py deleted file mode 100644 index ec029b7e..00000000 --- a/fastNLP/io/pipe/conll.py +++ /dev/null @@ -1,430 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "Conll2003NERPipe", - "Conll2003Pipe", - "OntoNotesNERPipe", - "MsraNERPipe", - "PeopleDailyPipe", - "WeiboNERPipe" -] - -from .pipe import Pipe -from .utils import _add_chars_field -from .utils import _indexize, _add_words_field -from .utils import iob2, iob2bioes -from .. import DataBundle -from ..loader.conll import Conll2003NERLoader, OntoNotesNERLoader -from ..loader.conll import PeopleDailyNERLoader, WeiboNERLoader, MsraNERLoader, ConllLoader -from ...core.const import Const -from ...core.vocabulary import Vocabulary - - -class _NERPipe(Pipe): - r""" - NER任务的处理Pipe, 该Pipe会(1)复制raw_words列,并命名为words; (2)在words, target列建立词表 - (创建 :class:`fastNLP.Vocabulary` 对象,所以在返回的DataBundle中将有两个Vocabulary); (3)将words,target列根据相应的 - Vocabulary转换为index。 - - raw_words列为List[str], 是未转换的原始数据; words列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有words, target, seq_len; 设置为target有target, seq_len。 - """ - - def __init__(self, encoding_type: str = 'bio', lower: bool = False): - r""" - - :param: str encoding_type: target列使用什么类型的encoding方式,支持bioes, bio两种。 - :param bool lower: 是否将words小写化后再建立词表,绝大多数情况都不需要设置为True。 - """ - if encoding_type == 'bio': - self.convert_tag = iob2 - elif encoding_type == 'bioes': - self.convert_tag = lambda words: iob2bioes(iob2(words)) - else: - raise ValueError("encoding_type only supports `bio` and `bioes`.") - self.lower = lower - - def process(self, data_bundle: DataBundle) -> DataBundle: - r""" - 支持的DataSet的field为 - - .. csv-table:: - :header: "raw_words", "target" - - "[Nadim, Ladki]", "[B-PER, I-PER]" - "[AL-AIN, United, Arab, ...]", "[B-LOC, B-LOC, I-LOC, ...]" - "[...]", "[...]" - - :param ~fastNLP.DataBundle data_bundle: 传入的DataBundle中的DataSet必须包含raw_words和ner两个field,且两个field的内容均为List[str]在传入DataBundle基础上原位修改。 - :return DataBundle: - """ - # 转换tag - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(self.convert_tag, field_name=Const.TARGET, new_field_name=Const.TARGET) - - _add_words_field(data_bundle, lower=self.lower) - - # index - _indexize(data_bundle) - - input_fields = [Const.TARGET, Const.INPUT, Const.INPUT_LEN] - target_fields = [Const.TARGET, Const.INPUT_LEN] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - -class Conll2003NERPipe(_NERPipe): - r""" - Conll2003的NER任务的处理Pipe, 该Pipe会(1)复制raw_words列,并命名为words; (2)在words, target列建立词表 - (创建 :class:`fastNLP.Vocabulary` 对象,所以在返回的DataBundle中将有两个Vocabulary); (3)将words,target列根据相应的 - Vocabulary转换为index。 - 经过该Pipe过后,DataSet中的内容如下所示 - - .. csv-table:: Following is a demo layout of DataSet returned by Conll2003Loader - :header: "raw_words", "target", "words", "seq_len" - - "[Nadim, Ladki]", "[1, 2]", "[2, 3]", 2 - "[AL-AIN, United, Arab, ...]", "[3, 4,...]", "[4, 5, 6,...]", 6 - "[...]", "[...]", "[...]", . - - raw_words列为List[str], 是未转换的原始数据; words列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有words, target, seq_len; 设置为target有target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def process_from_file(self, paths) -> DataBundle: - r""" - - :param paths: 支持路径类型参见 :class:`fastNLP.io.loader.ConllLoader` 的load函数。 - :return: DataBundle - """ - # 读取数据 - data_bundle = Conll2003NERLoader().load(paths) - data_bundle = self.process(data_bundle) - - return data_bundle - - -class Conll2003Pipe(Pipe): - r""" - 经过该Pipe后,DataSet中的内容如下 - - .. csv-table:: - :header: "raw_words" , "pos", "chunk", "ner", "words", "seq_len" - - "[Nadim, Ladki]", "[0, 0]", "[1, 2]", "[1, 2]", "[2, 3]", 2 - "[AL-AIN, United, Arab, ...]", "[1, 2...]", "[3, 4...]", "[3, 4...]", "[4, 5, 6,...]", 6 - "[...]", "[...]", "[...]", "[...]", "[...]", . - - 其中words, seq_len是input; pos, chunk, ner, seq_len是target - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+-------+-------+-------+-------+---------+ - | field_names | raw_words | pos | chunk | ner | words | seq_len | - +-------------+-----------+-------+-------+-------+-------+---------+ - | is_input | False | False | False | False | True | True | - | is_target | False | True | True | True | False | True | - | ignore_type | | False | False | False | False | False | - | pad_value | | 0 | 0 | 0 | 0 | 0 | - +-------------+-----------+-------+-------+-------+-------+---------+ - - - """ - def __init__(self, chunk_encoding_type='bioes', ner_encoding_type='bioes', lower: bool = False): - r""" - - :param str chunk_encoding_type: 支持bioes, bio。 - :param str ner_encoding_type: 支持bioes, bio。 - :param bool lower: 是否将words列小写化后再建立词表 - """ - if chunk_encoding_type == 'bio': - self.chunk_convert_tag = iob2 - elif chunk_encoding_type == 'bioes': - self.chunk_convert_tag = lambda tags: iob2bioes(iob2(tags)) - else: - raise ValueError("chunk_encoding_type only supports `bio` and `bioes`.") - if ner_encoding_type == 'bio': - self.ner_convert_tag = iob2 - elif ner_encoding_type == 'bioes': - self.ner_convert_tag = lambda tags: iob2bioes(iob2(tags)) - else: - raise ValueError("ner_encoding_type only supports `bio` and `bioes`.") - self.lower = lower - - def process(self, data_bundle) -> DataBundle: - r""" - 输入的DataSet应该类似于如下的形式 - - .. csv-table:: - :header: "raw_words", "pos", "chunk", "ner" - - "[Nadim, Ladki]", "[NNP, NNP]", "[B-NP, I-NP]", "[B-PER, I-PER]" - "[AL-AIN, United, Arab, ...]", "[NNP, NNP...]", "[B-NP, B-NP, ...]", "[B-LOC, B-LOC,...]" - "[...]", "[...]", "[...]", "[...]", . - - :param data_bundle: - :return: 传入的DataBundle - """ - # 转换tag - for name, dataset in data_bundle.datasets.items(): - dataset.drop(lambda x: "-DOCSTART-" in x[Const.RAW_WORD]) - dataset.apply_field(self.chunk_convert_tag, field_name='chunk', new_field_name='chunk') - dataset.apply_field(self.ner_convert_tag, field_name='ner', new_field_name='ner') - - _add_words_field(data_bundle, lower=self.lower) - - # index - _indexize(data_bundle, input_field_names=Const.INPUT, target_field_names=['pos', 'ner']) - # chunk中存在一些tag只在dev中出现,没在train中 - tgt_vocab = Vocabulary(unknown=None, padding=None) - tgt_vocab.from_dataset(*data_bundle.datasets.values(), field_name='chunk') - tgt_vocab.index_dataset(*data_bundle.datasets.values(), field_name='chunk') - data_bundle.set_vocab(tgt_vocab, 'chunk') - - input_fields = [Const.INPUT, Const.INPUT_LEN] - target_fields = ['pos', 'ner', 'chunk', Const.INPUT_LEN] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths): - r""" - - :param paths: - :return: - """ - data_bundle = ConllLoader(headers=['raw_words', 'pos', 'chunk', 'ner']).load(paths) - return self.process(data_bundle) - - -class OntoNotesNERPipe(_NERPipe): - r""" - 处理OntoNotes的NER数据,处理之后DataSet中的field情况为 - - .. csv-table:: - :header: "raw_words", "target", "words", "seq_len" - - "[Nadim, Ladki]", "[1, 2]", "[2, 3]", 2 - "[AL-AIN, United, Arab, ...]", "[3, 4]", "[4, 5, 6,...]", 6 - "[...]", "[...]", "[...]", . - - raw_words列为List[str], 是未转换的原始数据; words列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有words, target, seq_len; 设置为target有target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_words | target | words | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def process_from_file(self, paths): - data_bundle = OntoNotesNERLoader().load(paths) - return self.process(data_bundle) - - -class _CNNERPipe(Pipe): - r""" - 中文NER任务的处理Pipe, 该Pipe会(1)复制raw_chars列,并命名为chars; (2)在chars, target列建立词表 - (创建 :class:`fastNLP.Vocabulary` 对象,所以在返回的DataBundle中将有两个Vocabulary); (3)将chars,target列根据相应的 - Vocabulary转换为index。 - - raw_chars列为List[str], 是未转换的原始数据; chars列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有chars, target, seq_len; 设置为target有target, seq_len。 - - """ - - def __init__(self, encoding_type: str = 'bio', bigrams=False, trigrams=False): - r""" - - :param str encoding_type: target列使用什么类型的encoding方式,支持bioes, bio两种。 - :param bool bigrams: 是否增加一列bigrams. bigrams的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...]。如果 - 设置为True,返回的DataSet将有一列名为bigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('bigrams')获取. - :param bool trigrams: 是否增加一列trigrams. trigrams的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - 。如果设置为True,返回的DataSet将有一列名为trigrams, 且已经转换为了index并设置为input,对应的vocab可以通过 - data_bundle.get_vocab('trigrams')获取. - """ - if encoding_type == 'bio': - self.convert_tag = iob2 - elif encoding_type == 'bioes': - self.convert_tag = lambda words: iob2bioes(iob2(words)) - else: - raise ValueError("encoding_type only supports `bio` and `bioes`.") - - self.bigrams = bigrams - self.trigrams = trigrams - - def process(self, data_bundle: DataBundle) -> DataBundle: - r""" - 支持的DataSet的field为 - - .. csv-table:: - :header: "raw_chars", "target" - - "[相, 比, 之, 下,...]", "[O, O, O, O, ...]" - "[青, 岛, 海, 牛, 队, 和, ...]", "[B-ORG, I-ORG, I-ORG, ...]" - "[...]", "[...]" - - raw_chars列为List[str], 是未转换的原始数据; chars列为List[int],是转换为index的输入数据; target列是List[int], - 是转换为index的target。返回的DataSet中被设置为input有chars, target, seq_len; 设置为target有target。 - - :param ~fastNLP.DataBundle data_bundle: 传入的DataBundle中的DataSet必须包含raw_words和ner两个field,且两个field的内容均为List[str]。在传入DataBundle基础上原位修改。 - :return: DataBundle - """ - # 转换tag - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(self.convert_tag, field_name=Const.TARGET, new_field_name=Const.TARGET) - - _add_chars_field(data_bundle, lower=False) - - input_field_names = [Const.CHAR_INPUT] - if self.bigrams: - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: [c1 + c2 for c1, c2 in zip(chars, chars[1:] + [''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - if self.trigrams: - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: [c1 + c2 + c3 for c1, c2, c3 in - zip(chars, chars[1:] + [''], chars[2:] + [''] * 2)], - field_name=Const.CHAR_INPUT, new_field_name='trigrams') - input_field_names.append('trigrams') - - # index - _indexize(data_bundle, input_field_names, Const.TARGET) - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET, Const.INPUT_LEN] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.CHAR_INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - -class MsraNERPipe(_CNNERPipe): - r""" - 处理MSRA-NER的数据,处理之后的DataSet的field情况为 - - .. csv-table:: - :header: "raw_chars", "target", "chars", "seq_len" - - "[相, 比, 之, 下,...]", "[0, 0, 0, 0, ...]", "[2, 3, 4, 5, ...]", 11 - "[青, 岛, 海, 牛, 队, 和, ...]", "[1, 2, 3, ...]", "[10, 21, ....]", 21 - "[...]", "[...]", "[...]", . - - raw_chars列为List[str], 是未转换的原始数据; chars列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有chars, target, seq_len; 设置为target有target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def process_from_file(self, paths=None) -> DataBundle: - data_bundle = MsraNERLoader().load(paths) - return self.process(data_bundle) - - -class PeopleDailyPipe(_CNNERPipe): - r""" - 处理people daily的ner的数据,处理之后的DataSet的field情况为 - - .. csv-table:: - :header: "raw_chars", "target", "chars", "seq_len" - - "[相, 比, 之, 下,...]", "[0, 0, 0, 0, ...]", "[2, 3, 4, 5, ...]", 11 - "[青, 岛, 海, 牛, 队, 和, ...]", "[1, 2, 3, ...]", "[10, 21, ....]", 21 - "[...]", "[...]", "[...]", . - - raw_chars列为List[str], 是未转换的原始数据; chars列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有chars, target, seq_len; 设置为target有target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def process_from_file(self, paths=None) -> DataBundle: - data_bundle = PeopleDailyNERLoader().load(paths) - return self.process(data_bundle) - - -class WeiboNERPipe(_CNNERPipe): - r""" - 处理weibo的ner的数据,处理之后的DataSet的field情况为 - - .. csv-table:: - :header: "raw_chars", "chars", "target", "seq_len" - - "['老', '百', '姓']", "[4, 3, 3]", "[38, 39, 40]", 3 - "['心']", "[0]", "[41]", 1 - "[...]", "[...]", "[...]", . - - raw_chars列为List[str], 是未转换的原始数据; chars列为List[int],是转换为index的输入数据; target列是List[int],是转换为index的 - target。返回的DataSet中被设置为input有chars, target, seq_len; 设置为target有target。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def process_from_file(self, paths=None) -> DataBundle: - data_bundle = WeiboNERLoader().load(paths) - return self.process(data_bundle) diff --git a/fastNLP/io/pipe/construct_graph.py b/fastNLP/io/pipe/construct_graph.py deleted file mode 100644 index d597da9d..00000000 --- a/fastNLP/io/pipe/construct_graph.py +++ /dev/null @@ -1,268 +0,0 @@ - -__all__ =[ - 'MRPmiGraphPipe', - 'R8PmiGraphPipe', - 'R52PmiGraphPipe', - 'OhsumedPmiGraphPipe', - 'NG20PmiGraphPipe' -] -try: - import networkx as nx - from sklearn.feature_extraction.text import CountVectorizer - from sklearn.feature_extraction.text import TfidfTransformer - from sklearn.pipeline import Pipeline -except: - pass -from collections import defaultdict -import itertools -import math -from tqdm import tqdm -import numpy as np - -from ..data_bundle import DataBundle -from ...core.const import Const -from ..loader.classification import MRLoader, OhsumedLoader, R52Loader, R8Loader, NG20Loader - - -def _get_windows(content_lst: list, window_size:int): - r""" - 滑动窗口处理文本,获取词频和共现词语的词频 - :param content_lst: - :param window_size: - :return: 词频,共现词频,窗口化后文本段的数量 - """ - word_window_freq = defaultdict(int) # w(i) 单词在窗口单位内出现的次数 - word_pair_count = defaultdict(int) # w(i, j) - windows_len = 0 - for words in tqdm(content_lst, desc="Split by window"): - windows = list() - - if isinstance(words, str): - words = words.split() - length = len(words) - - if length <= window_size: - windows.append(words) - else: - for j in range(length - window_size + 1): - window = words[j: j + window_size] - windows.append(list(set(window))) - - for window in windows: - for word in window: - word_window_freq[word] += 1 - - for word_pair in itertools.combinations(window, 2): - word_pair_count[word_pair] += 1 - - windows_len += len(windows) - return word_window_freq, word_pair_count, windows_len - -def _cal_pmi(W_ij, W, word_freq_i, word_freq_j): - r""" - params: w_ij:为词语i,j的共现词频 - w:文本数量 - word_freq_i: 词语i的词频 - word_freq_j: 词语j的词频 - return: 词语i,j的tfidf值 - """ - p_i = word_freq_i / W - p_j = word_freq_j / W - p_i_j = W_ij / W - pmi = math.log(p_i_j / (p_i * p_j)) - - return pmi - -def _count_pmi(windows_len, word_pair_count, word_window_freq, threshold): - r""" - params: windows_len: 文本段数量 - word_pair_count: 词共现频率字典 - word_window_freq: 词频率字典 - threshold: 阈值 - return 词语pmi的list列表,其中元素为[word1, word2, pmi] - """ - word_pmi_lst = list() - for word_pair, W_i_j in tqdm(word_pair_count.items(), desc="Calculate pmi between words"): - word_freq_1 = word_window_freq[word_pair[0]] - word_freq_2 = word_window_freq[word_pair[1]] - - pmi = _cal_pmi(W_i_j, windows_len, word_freq_1, word_freq_2) - if pmi <= threshold: - continue - word_pmi_lst.append([word_pair[0], word_pair[1], pmi]) - return word_pmi_lst - -class GraphBuilderBase: - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - self.graph = nx.Graph() - self.word2id = dict() - self.graph_type = graph_type - self.window_size = widow_size - self.doc_node_num = 0 - self.tr_doc_index = None - self.te_doc_index = None - self.dev_doc_index = None - self.doc = None - self.threshold = threshold - - def _get_doc_edge(self, data_bundle: DataBundle): - r''' - 对输入的DataBundle进行处理,然后生成文档-单词的tfidf值 - :param: data_bundle中的文本若为英文,形式为[ 'This is the first document.'],若为中文则为['他 喜欢 吃 苹果'] - : return 返回带有具有tfidf边文档-单词稀疏矩阵 - ''' - tr_doc = list(data_bundle.get_dataset("train").get_field(Const.RAW_WORD)) - val_doc = list(data_bundle.get_dataset("dev").get_field(Const.RAW_WORD)) - te_doc = list(data_bundle.get_dataset("test").get_field(Const.RAW_WORD)) - doc = tr_doc + val_doc + te_doc - self.doc = doc - self.tr_doc_index = [ind for ind in range(len(tr_doc))] - self.dev_doc_index = [ind+len(tr_doc) for ind in range(len(val_doc))] - self.te_doc_index = [ind+len(tr_doc)+len(val_doc) for ind in range(len(te_doc))] - text_tfidf = Pipeline([('count', CountVectorizer(token_pattern=r'\S+', min_df=1, max_df=1.0)), - ('tfidf', TfidfTransformer(norm=None, use_idf=True, smooth_idf=False, sublinear_tf=False))]) - - tfidf_vec = text_tfidf.fit_transform(doc) - self.doc_node_num = tfidf_vec.shape[0] - vocab_lst = text_tfidf['count'].get_feature_names() - for ind, word in enumerate(vocab_lst): - self.word2id[word] = ind - for ind, row in enumerate(tfidf_vec): - for col_index, value in zip(row.indices, row.data): - self.graph.add_edge(ind, self.doc_node_num+col_index, weight=value) - return nx.to_scipy_sparse_matrix(self.graph) - - def _get_word_edge(self): - word_window_freq, word_pair_count, windows_len = _get_windows(self.doc, self.window_size) - pmi_edge_lst = _count_pmi(windows_len, word_pair_count, word_window_freq, self.threshold) - for edge_item in pmi_edge_lst: - word_indx1 = self.doc_node_num + self.word2id[edge_item[0]] - word_indx2 = self.doc_node_num + self.word2id[edge_item[1]] - if word_indx1 == word_indx2: - continue - self.graph.add_edge(word_indx1, word_indx2, weight=edge_item[2]) - - def build_graph(self, data_bundle: DataBundle): - r""" - 对输入的DataBundle进行处理,然后返回该scipy_sparse_matrix类型的邻接矩阵。 - - :param ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象 - :return: - """ - raise NotImplementedError - - def build_graph_from_file(self, path: str): - r""" - 传入文件路径,生成处理好的scipy_sparse_matrix对象。paths支持的路径形式可以参考 ::meth:`fastNLP.io.Loader.load()` - - :param paths: - :return: scipy_sparse_matrix - """ - raise NotImplementedError - - -class MRPmiGraphPipe(GraphBuilderBase): - - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - super().__init__(graph_type=graph_type, widow_size=widow_size, threshold=threshold) - - def build_graph(self, data_bundle: DataBundle): - r''' - params: ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象. - return 返回csr类型的稀疏矩阵图;训练集,验证集,测试集,在图中的index. - ''' - self._get_doc_edge(data_bundle) - self._get_word_edge() - return nx.to_scipy_sparse_matrix(self.graph, - nodelist=list(range(self.graph.number_of_nodes())), - weight='weight', dtype=np.float32, format='csr'), (self.tr_doc_index, self.dev_doc_index, self.te_doc_index) - - def build_graph_from_file(self, path: str): - data_bundle = MRLoader().load(path) - return self.build_graph(data_bundle) - -class R8PmiGraphPipe(GraphBuilderBase): - - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - super().__init__(graph_type=graph_type, widow_size=widow_size, threshold=threshold) - - def build_graph(self, data_bundle: DataBundle): - r''' - params: ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象. - return 返回csr类型的稀疏矩阵图;训练集,验证集,测试集,在图中的index. - ''' - self._get_doc_edge(data_bundle) - self._get_word_edge() - return nx.to_scipy_sparse_matrix(self.graph, - nodelist=list(range(self.graph.number_of_nodes())), - weight='weight', dtype=np.float32, format='csr'), (self.tr_doc_index, self.dev_doc_index, self.te_doc_index) - - def build_graph_from_file(self, path: str): - data_bundle = R8Loader().load(path) - return self.build_graph(data_bundle) - -class R52PmiGraphPipe(GraphBuilderBase): - - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - super().__init__(graph_type=graph_type, widow_size=widow_size, threshold=threshold) - - def build_graph(self, data_bundle: DataBundle): - r''' - params: ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象. - return 返回csr类型的稀疏矩阵;训练集,验证集,测试集,在图中的index. - ''' - self._get_doc_edge(data_bundle) - self._get_word_edge() - return nx.to_scipy_sparse_matrix(self.graph, - nodelist=list(range(self.graph.number_of_nodes())), - weight='weight', dtype=np.float32, format='csr'), (self.tr_doc_index, self.dev_doc_index, self.te_doc_index) - - def build_graph_from_file(self, path: str): - data_bundle = R52Loader().load(path) - return self.build_graph(data_bundle) - -class OhsumedPmiGraphPipe(GraphBuilderBase): - - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - super().__init__(graph_type=graph_type, widow_size=widow_size, threshold=threshold) - - def build_graph(self, data_bundle: DataBundle): - r''' - params: ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象. - return 返回csr类型的稀疏矩阵图;训练集,验证集,测试集,在图中的index. - ''' - self._get_doc_edge(data_bundle) - self._get_word_edge() - return nx.to_scipy_sparse_matrix(self.graph, - nodelist=list(range(self.graph.number_of_nodes())), - weight='weight', dtype=np.float32, format='csr'), (self.tr_doc_index, self.dev_doc_index, self.te_doc_index) - - def build_graph_from_file(self, path: str): - data_bundle = OhsumedLoader().load(path) - return self.build_graph(data_bundle) - - -class NG20PmiGraphPipe(GraphBuilderBase): - - def __init__(self, graph_type='pmi', widow_size=10, threshold=0.): - super().__init__(graph_type=graph_type, widow_size=widow_size, threshold=threshold) - - def build_graph(self, data_bundle: DataBundle): - r''' - params: ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象. - return 返回csr类型的稀疏矩阵图;训练集,验证集,测试集,在图中的index. - ''' - self._get_doc_edge(data_bundle) - self._get_word_edge() - return nx.to_scipy_sparse_matrix(self.graph, - nodelist=list(range(self.graph.number_of_nodes())), - weight='weight', dtype=np.float32, format='csr'), ( - self.tr_doc_index, self.dev_doc_index, self.te_doc_index) - - def build_graph_from_file(self, path: str): - r''' - param: path->数据集的路径. - return: 返回csr类型的稀疏矩阵图;训练集,验证集,测试集,在图中的index. - ''' - data_bundle = NG20Loader().load(path) - return self.build_graph(data_bundle) diff --git a/fastNLP/io/pipe/coreference.py b/fastNLP/io/pipe/coreference.py deleted file mode 100644 index 147b656d..00000000 --- a/fastNLP/io/pipe/coreference.py +++ /dev/null @@ -1,183 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CoReferencePipe" -] - -import collections - -import numpy as np - -from fastNLP.core.vocabulary import Vocabulary -from .pipe import Pipe -from ..data_bundle import DataBundle -from ..loader.coreference import CoReferenceLoader -from ...core.const import Const - - -class CoReferencePipe(Pipe): - r""" - 对Coreference resolution问题进行处理,得到文章种类/说话者/字符级信息/序列长度。 - - 处理完成后数据包含文章类别、speaker信息、句子信息、句子对应的index、char、句子长度、target: - - .. csv-table:: - :header: "words1", "words2","words3","words4","chars","seq_len","target" - - "bc", "[[0,0],[1,1]]","[['I','am'],[]]","[[1,2],[]]","[[[1],[2,3]],[]]","[2,3]","[[[2,3],[6,7]],[[10,12],[20,22]]]" - "[...]", "[...]","[...]","[...]","[...]","[...]","[...]" - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+--------+-------+---------+ - | field_names | raw_chars | target | chars | seq_len | - +-------------+-----------+--------+-------+---------+ - | is_input | False | True | True | True | - | is_target | False | True | False | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+--------+-------+---------+ - - """ - - def __init__(self, config): - super().__init__() - self.config = config - - def process(self, data_bundle: DataBundle): - r""" - 对load进来的数据进一步处理原始数据包含:raw_key,raw_speaker,raw_words,raw_clusters - - .. csv-table:: - :header: "raw_key", "raw_speaker","raw_words","raw_clusters" - - "bc/cctv/00/cctv_0000_0", "[[Speaker#1, Speaker#1],[]]","[['I','am'],[]]","[[[2,3],[6,7]],[[10,12],[20,22]]]" - "bc/cctv/00/cctv_0000_1", "[['Speaker#1', 'peaker#1'],[]]","[['He','is'],[]]","[[[2,3],[6,7]],[[10,12],[20,22]]]" - "[...]", "[...]","[...]","[...]" - - - :param data_bundle: - :return: - """ - genres = {g: i for i, g in enumerate(["bc", "bn", "mz", "nw", "pt", "tc", "wb"])} - vocab = Vocabulary().from_dataset(*data_bundle.datasets.values(), field_name= Const.RAW_WORDS(3)) - vocab.build_vocab() - word2id = vocab.word2idx - data_bundle.set_vocab(vocab, Const.INPUTS(0)) - if self.config.char_path: - char_dict = get_char_dict(self.config.char_path) - else: - char_set = set() - for i,w in enumerate(word2id): - if i < 2: - continue - for c in w: - char_set.add(c) - - char_dict = collections.defaultdict(int) - char_dict.update({c: i for i, c in enumerate(char_set)}) - - for name, ds in data_bundle.datasets.items(): - # genre - ds.apply(lambda x: genres[x[Const.RAW_WORDS(0)][:2]], new_field_name=Const.INPUTS(0)) - - # speaker_ids_np - ds.apply(lambda x: speaker2numpy(x[Const.RAW_WORDS(1)], self.config.max_sentences, is_train=name == 'train'), - new_field_name=Const.INPUTS(1)) - - # sentences - ds.rename_field(Const.RAW_WORDS(3),Const.INPUTS(2)) - - # doc_np - ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter), - self.config.max_sentences, is_train=name == 'train')[0], - new_field_name=Const.INPUTS(3)) - # char_index - ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter), - self.config.max_sentences, is_train=name == 'train')[1], - new_field_name=Const.CHAR_INPUT) - # seq len - ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter), - self.config.max_sentences, is_train=name == 'train')[2], - new_field_name=Const.INPUT_LEN) - - # clusters - ds.rename_field(Const.RAW_WORDS(2), Const.TARGET) - - ds.set_ignore_type(Const.TARGET) - ds.set_padder(Const.TARGET, None) - ds.set_input(Const.INPUTS(0), Const.INPUTS(1), Const.INPUTS(2), Const.INPUTS(3), Const.CHAR_INPUT, Const.INPUT_LEN) - ds.set_target(Const.TARGET) - - return data_bundle - - def process_from_file(self, paths): - bundle = CoReferenceLoader().load(paths) - return self.process(bundle) - - -# helper - -def doc2numpy(doc, word2id, chardict, max_filter, max_sentences, is_train): - docvec, char_index, length, max_len = _doc2vec(doc, word2id, chardict, max_filter, max_sentences, is_train) - assert max(length) == max_len - assert char_index.shape[0] == len(length) - assert char_index.shape[1] == max_len - doc_np = np.zeros((len(docvec), max_len), int) - for i in range(len(docvec)): - for j in range(len(docvec[i])): - doc_np[i][j] = docvec[i][j] - return doc_np, char_index, length - -def _doc2vec(doc,word2id,char_dict,max_filter,max_sentences,is_train): - max_len = 0 - max_word_length = 0 - docvex = [] - length = [] - if is_train: - sent_num = min(max_sentences,len(doc)) - else: - sent_num = len(doc) - - for i in range(sent_num): - sent = doc[i] - length.append(len(sent)) - if (len(sent) > max_len): - max_len = len(sent) - sent_vec =[] - for j,word in enumerate(sent): - if len(word)>max_word_length: - max_word_length = len(word) - if word in word2id: - sent_vec.append(word2id[word]) - else: - sent_vec.append(word2id["UNK"]) - docvex.append(sent_vec) - - char_index = np.zeros((sent_num, max_len, max_word_length),dtype=int) - for i in range(sent_num): - sent = doc[i] - for j,word in enumerate(sent): - char_index[i, j, :len(word)] = [char_dict[c] for c in word] - - return docvex,char_index,length,max_len - -def speaker2numpy(speakers_raw,max_sentences,is_train): - if is_train and len(speakers_raw)> max_sentences: - speakers_raw = speakers_raw[0:max_sentences] - speakers = flatten(speakers_raw) - speaker_dict = {s: i for i, s in enumerate(set(speakers))} - speaker_ids = np.array([speaker_dict[s] for s in speakers]) - return speaker_ids - -# 展平 -def flatten(l): - return [item for sublist in l for item in sublist] - -def get_char_dict(path): - vocab = [""] - with open(path) as f: - vocab.extend(c.strip() for c in f.readlines()) - char_dict = collections.defaultdict(int) - char_dict.update({c: i for i, c in enumerate(vocab)}) - return char_dict \ No newline at end of file diff --git a/fastNLP/io/pipe/cws.py b/fastNLP/io/pipe/cws.py deleted file mode 100644 index 3849a34b..00000000 --- a/fastNLP/io/pipe/cws.py +++ /dev/null @@ -1,283 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "CWSPipe" -] - -import re -from itertools import chain - -from .pipe import Pipe -from .utils import _indexize -from .. import DataBundle -from ..loader import CWSLoader -from ...core.const import Const - - -def _word_lens_to_bmes(word_lens): - r""" - - :param list word_lens: List[int], 每个词语的长度 - :return: List[str], BMES的序列 - """ - tags = [] - for word_len in word_lens: - if word_len == 1: - tags.append('S') - else: - tags.append('B') - tags.extend(['M'] * (word_len - 2)) - tags.append('E') - return tags - - -def _word_lens_to_segapp(word_lens): - r""" - - :param list word_lens: List[int], 每个词语的长度 - :return: List[str], BMES的序列 - """ - tags = [] - for word_len in word_lens: - if word_len == 1: - tags.append('SEG') - else: - tags.extend(['APP'] * (word_len - 1)) - tags.append('SEG') - return tags - - -def _alpha_span_to_special_tag(span): - r""" - 将span替换成特殊的字符 - - :param str span: - :return: - """ - if 'oo' == span.lower(): # speical case when represent 2OO8 - return span - if len(span) == 1: - return span - else: - return '' - - -def _find_and_replace_alpha_spans(line): - r""" - 传入原始句子,替换其中的字母为特殊标记 - - :param str line:原始数据 - :return: str - """ - new_line = '' - pattern = '[a-zA-Z]+(?=[\u4e00-\u9fff ,%,.。!<-“])' - prev_end = 0 - for match in re.finditer(pattern, line): - start, end = match.span() - span = line[start:end] - new_line += line[prev_end:start] + _alpha_span_to_special_tag(span) - prev_end = end - new_line += line[prev_end:] - return new_line - - -def _digit_span_to_special_tag(span): - r""" - - :param str span: 需要替换的str - :return: - """ - if span[0] == '0' and len(span) > 2: - return '' - decimal_point_count = 0 # one might have more than one decimal pointers - for idx, char in enumerate(span): - if char == '.' or char == '﹒' or char == '·': - decimal_point_count += 1 - if span[-1] == '.' or span[-1] == '﹒' or span[ - -1] == '·': # last digit being decimal point means this is not a number - if decimal_point_count == 1: - return span - else: - return '' - if decimal_point_count == 1: - return '' - elif decimal_point_count > 1: - return '' - else: - return '' - - -def _find_and_replace_digit_spans(line): - r""" - only consider words start with number, contains '.', characters. - - If ends with space, will be processed - - If ends with Chinese character, will be processed - - If ends with or contains english char, not handled. - - floats are replaced by - - otherwise unkdgt - """ - new_line = '' - pattern = r'\d[\d\\.﹒·]*(?=[\u4e00-\u9fff ,%%,。!<-“])' - prev_end = 0 - for match in re.finditer(pattern, line): - start, end = match.span() - span = line[start:end] - new_line += line[prev_end:start] + _digit_span_to_special_tag(span) - prev_end = end - new_line += line[prev_end:] - return new_line - - -class CWSPipe(Pipe): - r""" - 对CWS数据进行预处理, 处理之后的数据,具备以下的结构 - - .. csv-table:: - :header: "raw_words", "chars", "target", "seq_len" - - "共同 创造 美好...", "[2, 3, 4...]", "[0, 2, 0, 2,...]", 13 - "2001年 新年 钟声...", "[8, 9, 9, 7, ...]", "[0, 1, 1, 1, 2...]", 20 - "...", "[...]","[...]", . - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+-----------+-------+--------+---------+ - | field_names | raw_words | chars | target | seq_len | - +-------------+-----------+-------+--------+---------+ - | is_input | False | True | True | True | - | is_target | False | False | True | True | - | ignore_type | | False | False | False | - | pad_value | | 0 | 0 | 0 | - +-------------+-----------+-------+--------+---------+ - - """ - - def __init__(self, dataset_name=None, encoding_type='bmes', replace_num_alpha=True, bigrams=False, trigrams=False): - r""" - - :param str,None dataset_name: 支持'pku', 'msra', 'cityu', 'as', None - :param str encoding_type: 可以选择'bmes', 'segapp'两种。"我 来自 复旦大学...", bmes的tag为[S, B, E, B, M, M, E...]; segapp - 的tag为[seg, app, seg, app, app, app, seg, ...] - :param bool replace_num_alpha: 是否将数字和字母用特殊字符替换。 - :param bool bigrams: 是否增加一列bigram. bigram的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...] - :param bool trigrams: 是否增加一列trigram. trigram的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - """ - if encoding_type == 'bmes': - self.word_lens_to_tags = _word_lens_to_bmes - else: - self.word_lens_to_tags = _word_lens_to_segapp - - self.dataset_name = dataset_name - self.bigrams = bigrams - self.trigrams = trigrams - self.replace_num_alpha = replace_num_alpha - - def _tokenize(self, data_bundle): - r""" - 将data_bundle中的'chars'列切分成一个一个的word. - 例如输入是"共同 创造 美好.."->[[共, 同], [创, 造], [...], ] - - :param data_bundle: - :return: - """ - def split_word_into_chars(raw_chars): - words = raw_chars.split() - chars = [] - for word in words: - char = [] - subchar = [] - for c in word: - if c == '<': - if subchar: - char.extend(subchar) - subchar = [] - subchar.append(c) - continue - if c == '>' and len(subchar)>0 and subchar[0] == '<': - subchar.append(c) - char.append(''.join(subchar)) - subchar = [] - continue - if subchar: - subchar.append(c) - else: - char.append(c) - char.extend(subchar) - chars.append(char) - return chars - - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(split_word_into_chars, field_name=Const.CHAR_INPUT, - new_field_name=Const.CHAR_INPUT) - return data_bundle - - def process(self, data_bundle: DataBundle) -> DataBundle: - r""" - 可以处理的DataSet需要包含raw_words列 - - .. csv-table:: - :header: "raw_words" - - "上海 浦东 开发 与 法制 建设 同步" - "新华社 上海 二月 十日 电 ( 记者 谢金虎 、 张持坚 )" - "..." - - :param data_bundle: - :return: - """ - data_bundle.copy_field(Const.RAW_WORD, Const.CHAR_INPUT) - - if self.replace_num_alpha: - data_bundle.apply_field(_find_and_replace_alpha_spans, Const.CHAR_INPUT, Const.CHAR_INPUT) - data_bundle.apply_field(_find_and_replace_digit_spans, Const.CHAR_INPUT, Const.CHAR_INPUT) - - self._tokenize(data_bundle) - - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: self.word_lens_to_tags(map(len, chars)), field_name=Const.CHAR_INPUT, - new_field_name=Const.TARGET) - dataset.apply_field(lambda chars: list(chain(*chars)), field_name=Const.CHAR_INPUT, - new_field_name=Const.CHAR_INPUT) - input_field_names = [Const.CHAR_INPUT] - if self.bigrams: - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: [c1 + c2 for c1, c2 in zip(chars, chars[1:] + [''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - if self.trigrams: - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: [c1 + c2 + c3 for c1, c2, c3 in - zip(chars, chars[1:] + [''], chars[2:] + [''] * 2)], - field_name=Const.CHAR_INPUT, new_field_name='trigrams') - input_field_names.append('trigrams') - - _indexize(data_bundle, input_field_names, Const.TARGET) - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET, Const.INPUT_LEN] - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.CHAR_INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths=None) -> DataBundle: - r""" - - :param str paths: - :return: - """ - if self.dataset_name is None and paths is None: - raise RuntimeError( - "You have to set `paths` when calling process_from_file() or `dataset_name `when initialization.") - if self.dataset_name is not None and paths is not None: - raise RuntimeError("You cannot specify `paths` and `dataset_name` simultaneously") - data_bundle = CWSLoader(self.dataset_name).load(paths) - return self.process(data_bundle) diff --git a/fastNLP/io/pipe/matching.py b/fastNLP/io/pipe/matching.py deleted file mode 100644 index e43255b4..00000000 --- a/fastNLP/io/pipe/matching.py +++ /dev/null @@ -1,545 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "MatchingBertPipe", - "RTEBertPipe", - "SNLIBertPipe", - "QuoraBertPipe", - "QNLIBertPipe", - "MNLIBertPipe", - "CNXNLIBertPipe", - "BQCorpusBertPipe", - "LCQMCBertPipe", - "MatchingPipe", - "RTEPipe", - "SNLIPipe", - "QuoraPipe", - "QNLIPipe", - "MNLIPipe", - "LCQMCPipe", - "CNXNLIPipe", - "BQCorpusPipe", - "RenamePipe", - "GranularizePipe", - "MachingTruncatePipe", -] - -import warnings - -from .pipe import Pipe -from .utils import get_tokenizer -from ..data_bundle import DataBundle -from ..loader.matching import SNLILoader, MNLILoader, QNLILoader, RTELoader, QuoraLoader, BQCorpusLoader, CNXNLILoader, \ - LCQMCLoader -from ...core._logger import logger -from ...core.const import Const -from ...core.vocabulary import Vocabulary - - -class MatchingBertPipe(Pipe): - r""" - Matching任务的Bert pipe,输出的DataSet将包含以下的field - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target", "words", "seq_len" - - "The new rights are...", "Everyone really likes..", 1, "[2, 3, 4, 5, ...]", 10 - "This site includes a...", "The Government Executive...", 0, "[11, 12, 13,...]", 5 - "...", "...", ., "[...]", . - - words列是将raw_words1(即premise), raw_words2(即hypothesis)使用"[SEP]"链接起来转换为index的。 - words列被设置为input,target列被设置为target和input(设置为input以方便在forward函数中计算loss, - 如果不在forward函数中计算loss也不影响,fastNLP将根据forward函数的形参名进行传参). - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+------------+------------+--------+-------+---------+ - | field_names | raw_words1 | raw_words2 | target | words | seq_len | - +-------------+------------+------------+--------+-------+---------+ - | is_input | False | False | False | True | True | - | is_target | False | False | True | False | False | - | ignore_type | | | False | False | False | - | pad_value | | | 0 | 0 | 0 | - +-------------+------------+------------+--------+-------+---------+ - - """ - - def __init__(self, lower=False, tokenizer: str = 'raw'): - r""" - - :param bool lower: 是否将word小写化。 - :param str tokenizer: 使用什么tokenizer来将句子切分为words. 支持spacy, raw两种。raw即使用空格拆分。 - """ - super().__init__() - - self.lower = bool(lower) - self.tokenizer = get_tokenizer(tokenize_method=tokenizer) - - def _tokenize(self, data_bundle, field_names, new_field_names): - r""" - - :param DataBundle data_bundle: DataBundle. - :param list field_names: List[str], 需要tokenize的field名称 - :param list new_field_names: List[str], tokenize之后field的名称,与field_names一一对应。 - :return: 输入的DataBundle对象 - """ - for name, dataset in data_bundle.datasets.items(): - for field_name, new_field_name in zip(field_names, new_field_names): - dataset.apply_field(lambda words: self.tokenizer(words), field_name=field_name, - new_field_name=new_field_name) - return data_bundle - - def process(self, data_bundle): - r""" - 输入的data_bundle中的dataset需要具有以下结构: - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "Dana Reeve, the widow of the actor...", "Christopher Reeve had an...", "not_entailment" - "...","..." - - :param data_bundle: - :return: - """ - for dataset in data_bundle.datasets.values(): - if dataset.has_field(Const.TARGET): - dataset.drop(lambda x: x[Const.TARGET] == '-') - - for name, dataset in data_bundle.datasets.items(): - dataset.copy_field(Const.RAW_WORDS(0), Const.INPUTS(0), ) - dataset.copy_field(Const.RAW_WORDS(1), Const.INPUTS(1), ) - - if self.lower: - for name, dataset in data_bundle.datasets.items(): - dataset[Const.INPUTS(0)].lower() - dataset[Const.INPUTS(1)].lower() - - data_bundle = self._tokenize(data_bundle, [Const.INPUTS(0), Const.INPUTS(1)], - [Const.INPUTS(0), Const.INPUTS(1)]) - - # concat两个words - def concat(ins): - words0 = ins[Const.INPUTS(0)] - words1 = ins[Const.INPUTS(1)] - words = words0 + ['[SEP]'] + words1 - return words - - for name, dataset in data_bundle.datasets.items(): - dataset.apply(concat, new_field_name=Const.INPUT) - dataset.delete_field(Const.INPUTS(0)) - dataset.delete_field(Const.INPUTS(1)) - - word_vocab = Vocabulary() - word_vocab.from_dataset(*[dataset for name, dataset in data_bundle.datasets.items() if 'train' in name], - field_name=Const.INPUT, - no_create_entry_dataset=[dataset for name, dataset in data_bundle.datasets.items() if - 'train' not in name]) - word_vocab.index_dataset(*data_bundle.datasets.values(), field_name=Const.INPUT) - - target_vocab = Vocabulary(padding=None, unknown=None) - target_vocab.from_dataset(*[ds for name, ds in data_bundle.iter_datasets() if 'train' in name], - field_name=Const.TARGET, - no_create_entry_dataset=[ds for name, ds in data_bundle.iter_datasets() - if ('train' not in name) and (ds.has_field(Const.TARGET))] - ) - if len(target_vocab._no_create_word) > 0: - warn_msg = f"There are {len(target_vocab._no_create_word)} target labels" \ - f" in {[name for name in data_bundle.datasets.keys() if 'train' not in name]} " \ - f"data set but not in train data set!." - warnings.warn(warn_msg) - logger.warning(warn_msg) - - has_target_datasets = [dataset for name, dataset in data_bundle.datasets.items() if - dataset.has_field(Const.TARGET)] - target_vocab.index_dataset(*has_target_datasets, field_name=Const.TARGET) - - data_bundle.set_vocab(word_vocab, Const.INPUT) - data_bundle.set_vocab(target_vocab, Const.TARGET) - - input_fields = [Const.INPUT, Const.INPUT_LEN] - target_fields = [Const.TARGET] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.INPUT) - dataset.set_input(*input_fields, flag=True) - for fields in target_fields: - if dataset.has_field(fields): - dataset.set_target(fields, flag=True) - - return data_bundle - - -class RTEBertPipe(MatchingBertPipe): - def process_from_file(self, paths=None): - data_bundle = RTELoader().load(paths) - return self.process(data_bundle) - - -class SNLIBertPipe(MatchingBertPipe): - def process_from_file(self, paths=None): - data_bundle = SNLILoader().load(paths) - return self.process(data_bundle) - - -class QuoraBertPipe(MatchingBertPipe): - def process_from_file(self, paths): - data_bundle = QuoraLoader().load(paths) - return self.process(data_bundle) - - -class QNLIBertPipe(MatchingBertPipe): - def process_from_file(self, paths=None): - data_bundle = QNLILoader().load(paths) - return self.process(data_bundle) - - -class MNLIBertPipe(MatchingBertPipe): - def process_from_file(self, paths=None): - data_bundle = MNLILoader().load(paths) - return self.process(data_bundle) - - -class MatchingPipe(Pipe): - r""" - Matching任务的Pipe。输出的DataSet将包含以下的field - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target", "words1", "words2", "seq_len1", "seq_len2" - - "The new rights are...", "Everyone really likes..", 1, "[2, 3, 4, 5, ...]", "[10, 20, 6]", 10, 13 - "This site includes a...", "The Government Executive...", 0, "[11, 12, 13,...]", "[2, 7, ...]", 6, 7 - "...", "...", ., "[...]", "[...]", ., . - - words1是premise,words2是hypothesis。其中words1,words2,seq_len1,seq_len2被设置为input;target被设置为target - 和input(设置为input以方便在forward函数中计算loss,如果不在forward函数中计算loss也不影响,fastNLP将根据forward函数 - 的形参名进行传参)。 - - dataset的print_field_meta()函数输出的各个field的被设置成input和target的情况为:: - - +-------------+------------+------------+--------+--------+--------+----------+----------+ - | field_names | raw_words1 | raw_words2 | target | words1 | words2 | seq_len1 | seq_len2 | - +-------------+------------+------------+--------+--------+--------+----------+----------+ - | is_input | False | False | False | True | True | True | True | - | is_target | False | False | True | False | False | False | False | - | ignore_type | | | False | False | False | False | False | - | pad_value | | | 0 | 0 | 0 | 0 | 0 | - +-------------+------------+------------+--------+--------+--------+----------+----------+ - - """ - - def __init__(self, lower=False, tokenizer: str = 'raw'): - r""" - - :param bool lower: 是否将所有raw_words转为小写。 - :param str tokenizer: 将原始数据tokenize的方式。支持spacy, raw. spacy是使用spacy切分,raw就是用空格切分。 - """ - super().__init__() - - self.lower = bool(lower) - self.tokenizer = get_tokenizer(tokenize_method=tokenizer) - - def _tokenize(self, data_bundle, field_names, new_field_names): - r""" - - :param ~fastNLP.DataBundle data_bundle: DataBundle. - :param list field_names: List[str], 需要tokenize的field名称 - :param list new_field_names: List[str], tokenize之后field的名称,与field_names一一对应。 - :return: 输入的DataBundle对象 - """ - for name, dataset in data_bundle.datasets.items(): - for field_name, new_field_name in zip(field_names, new_field_names): - dataset.apply_field(lambda words: self.tokenizer(words), field_name=field_name, - new_field_name=new_field_name) - return data_bundle - - def process(self, data_bundle): - r""" - 接受的DataBundle中的DataSet应该具有以下的field, target列可以没有 - - .. csv-table:: - :header: "raw_words1", "raw_words2", "target" - - "The new rights are...", "Everyone really likes..", "entailment" - "This site includes a...", "The Government Executive...", "not_entailment" - "...", "..." - - :param ~fastNLP.DataBundle data_bundle: 通过loader读取得到的data_bundle,里面包含了数据集的原始数据内容 - :return: data_bundle - """ - data_bundle = self._tokenize(data_bundle, [Const.RAW_WORDS(0), Const.RAW_WORDS(1)], - [Const.INPUTS(0), Const.INPUTS(1)]) - - for dataset in data_bundle.datasets.values(): - if dataset.has_field(Const.TARGET): - dataset.drop(lambda x: x[Const.TARGET] == '-') - - if self.lower: - for name, dataset in data_bundle.datasets.items(): - dataset[Const.INPUTS(0)].lower() - dataset[Const.INPUTS(1)].lower() - - word_vocab = Vocabulary() - word_vocab.from_dataset(*[dataset for name, dataset in data_bundle.datasets.items() if 'train' in name], - field_name=[Const.INPUTS(0), Const.INPUTS(1)], - no_create_entry_dataset=[dataset for name, dataset in data_bundle.datasets.items() if - 'train' not in name]) - word_vocab.index_dataset(*data_bundle.datasets.values(), field_name=[Const.INPUTS(0), Const.INPUTS(1)]) - - target_vocab = Vocabulary(padding=None, unknown=None) - target_vocab.from_dataset(*[ds for name, ds in data_bundle.iter_datasets() if 'train' in name], - field_name=Const.TARGET, - no_create_entry_dataset=[ds for name, ds in data_bundle.iter_datasets() - if ('train' not in name) and (ds.has_field(Const.TARGET))] - ) - if len(target_vocab._no_create_word) > 0: - warn_msg = f"There are {len(target_vocab._no_create_word)} target labels" \ - f" in {[name for name in data_bundle.datasets.keys() if 'train' not in name]} " \ - f"data set but not in train data set!." - warnings.warn(warn_msg) - logger.warning(warn_msg) - - has_target_datasets = [dataset for name, dataset in data_bundle.datasets.items() if - dataset.has_field(Const.TARGET)] - target_vocab.index_dataset(*has_target_datasets, field_name=Const.TARGET) - - data_bundle.set_vocab(word_vocab, Const.INPUTS(0)) - data_bundle.set_vocab(target_vocab, Const.TARGET) - - input_fields = [Const.INPUTS(0), Const.INPUTS(1), Const.INPUT_LENS(0), Const.INPUT_LENS(1)] - target_fields = [Const.TARGET] - - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.INPUTS(0), Const.INPUT_LENS(0)) - dataset.add_seq_len(Const.INPUTS(1), Const.INPUT_LENS(1)) - dataset.set_input(*input_fields, flag=True) - for fields in target_fields: - if dataset.has_field(fields): - dataset.set_target(fields, flag=True) - - return data_bundle - - -class RTEPipe(MatchingPipe): - def process_from_file(self, paths=None): - data_bundle = RTELoader().load(paths) - return self.process(data_bundle) - - -class SNLIPipe(MatchingPipe): - def process_from_file(self, paths=None): - data_bundle = SNLILoader().load(paths) - return self.process(data_bundle) - - -class QuoraPipe(MatchingPipe): - def process_from_file(self, paths): - data_bundle = QuoraLoader().load(paths) - return self.process(data_bundle) - - -class QNLIPipe(MatchingPipe): - def process_from_file(self, paths=None): - data_bundle = QNLILoader().load(paths) - return self.process(data_bundle) - - -class MNLIPipe(MatchingPipe): - def process_from_file(self, paths=None): - data_bundle = MNLILoader().load(paths) - return self.process(data_bundle) - - -class LCQMCPipe(MatchingPipe): - def __init__(self, tokenizer='cn=char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = LCQMCLoader().load(paths) - data_bundle = RenamePipe().process(data_bundle) - data_bundle = self.process(data_bundle) - data_bundle = RenamePipe().process(data_bundle) - return data_bundle - - -class CNXNLIPipe(MatchingPipe): - def __init__(self, tokenizer='cn-char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = CNXNLILoader().load(paths) - data_bundle = GranularizePipe(task='XNLI').process(data_bundle) - data_bundle = RenamePipe().process(data_bundle) # 使中文数据的field - data_bundle = self.process(data_bundle) - data_bundle = RenamePipe().process(data_bundle) - return data_bundle - - -class BQCorpusPipe(MatchingPipe): - def __init__(self, tokenizer='cn-char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = BQCorpusLoader().load(paths) - data_bundle = RenamePipe().process(data_bundle) - data_bundle = self.process(data_bundle) - data_bundle = RenamePipe().process(data_bundle) - return data_bundle - - -class RenamePipe(Pipe): - def __init__(self, task='cn-nli'): - super().__init__() - self.task = task - - def process(self, data_bundle: DataBundle): # rename field name for Chinese Matching dataset - if (self.task == 'cn-nli'): - for name, dataset in data_bundle.datasets.items(): - if (dataset.has_field(Const.RAW_CHARS(0))): - dataset.rename_field(Const.RAW_CHARS(0), Const.RAW_WORDS(0)) # RAW_CHARS->RAW_WORDS - dataset.rename_field(Const.RAW_CHARS(1), Const.RAW_WORDS(1)) - elif (dataset.has_field(Const.INPUTS(0))): - dataset.rename_field(Const.INPUTS(0), Const.CHAR_INPUTS(0)) # WORDS->CHARS - dataset.rename_field(Const.INPUTS(1), Const.CHAR_INPUTS(1)) - dataset.rename_field(Const.RAW_WORDS(0), Const.RAW_CHARS(0)) - dataset.rename_field(Const.RAW_WORDS(1), Const.RAW_CHARS(1)) - else: - raise RuntimeError( - "field name of dataset is not qualified. It should have ether RAW_CHARS or WORDS") - elif (self.task == 'cn-nli-bert'): - for name, dataset in data_bundle.datasets.items(): - if (dataset.has_field(Const.RAW_CHARS(0))): - dataset.rename_field(Const.RAW_CHARS(0), Const.RAW_WORDS(0)) # RAW_CHARS->RAW_WORDS - dataset.rename_field(Const.RAW_CHARS(1), Const.RAW_WORDS(1)) - elif (dataset.has_field(Const.RAW_WORDS(0))): - dataset.rename_field(Const.RAW_WORDS(0), Const.RAW_CHARS(0)) - dataset.rename_field(Const.RAW_WORDS(1), Const.RAW_CHARS(1)) - dataset.rename_field(Const.INPUT, Const.CHAR_INPUT) - else: - raise RuntimeError( - "field name of dataset is not qualified. It should have ether RAW_CHARS or RAW_WORDS" - ) - else: - raise RuntimeError( - "Only support task='cn-nli' or 'cn-nli-bert'" - ) - - return data_bundle - - -class GranularizePipe(Pipe): - def __init__(self, task=None): - super().__init__() - self.task = task - - def _granularize(self, data_bundle, tag_map): - r""" - 该函数对data_bundle中'target'列中的内容进行转换。 - - :param data_bundle: - :param dict tag_map: 将target列中的tag做以下的映射,比如{"0":0, "1":0, "3":1, "4":1}, 则会删除target为"2"的instance, - 且将"1"认为是第0类。 - :return: 传入的data_bundle - """ - for name in list(data_bundle.datasets.keys()): - dataset = data_bundle.get_dataset(name) - dataset.apply_field(lambda target: tag_map.get(target, -100), field_name=Const.TARGET, - new_field_name=Const.TARGET) - dataset.drop(lambda ins: ins[Const.TARGET] == -100) - data_bundle.set_dataset(dataset, name) - return data_bundle - - def process(self, data_bundle: DataBundle): - task_tag_dict = { - 'XNLI': {'neutral': 0, 'entailment': 1, 'contradictory': 2, 'contradiction': 2} - } - if self.task in task_tag_dict: - data_bundle = self._granularize(data_bundle=data_bundle, tag_map=task_tag_dict[self.task]) - else: - raise RuntimeError(f"Only support {task_tag_dict.keys()} task_tag_map.") - return data_bundle - - -class MachingTruncatePipe(Pipe): # truncate sentence for bert, modify seq_len - def __init__(self): - super().__init__() - - def process(self, data_bundle: DataBundle): - for name, dataset in data_bundle.datasets.items(): - pass - return None - - -class LCQMCBertPipe(MatchingBertPipe): - def __init__(self, tokenizer='cn=char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = LCQMCLoader().load(paths) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - data_bundle = self.process(data_bundle) - data_bundle = TruncateBertPipe(task='cn').process(data_bundle) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - return data_bundle - - -class BQCorpusBertPipe(MatchingBertPipe): - def __init__(self, tokenizer='cn-char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = BQCorpusLoader().load(paths) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - data_bundle = self.process(data_bundle) - data_bundle = TruncateBertPipe(task='cn').process(data_bundle) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - return data_bundle - - -class CNXNLIBertPipe(MatchingBertPipe): - def __init__(self, tokenizer='cn-char'): - super().__init__(tokenizer=tokenizer) - - def process_from_file(self, paths=None): - data_bundle = CNXNLILoader().load(paths) - data_bundle = GranularizePipe(task='XNLI').process(data_bundle) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - data_bundle = self.process(data_bundle) - data_bundle = TruncateBertPipe(task='cn').process(data_bundle) - data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) - return data_bundle - - -class TruncateBertPipe(Pipe): - def __init__(self, task='cn'): - super().__init__() - self.task = task - - def _truncate(self, sentence_index:list, sep_index_vocab): - # 根据[SEP]在vocab中的index,找到[SEP]在dataset的field['words']中的index - sep_index_words = sentence_index.index(sep_index_vocab) - words_before_sep = sentence_index[:sep_index_words] - words_after_sep = sentence_index[sep_index_words:] # 注意此部分包括了[SEP] - if self.task == 'cn': - # 中文任务将Instance['words']中在[SEP]前后的文本分别截至长度不超过250 - words_before_sep = words_before_sep[:250] - words_after_sep = words_after_sep[:250] - elif self.task == 'en': - # 英文任务将Instance['words']中在[SEP]前后的文本分别截至长度不超过215 - words_before_sep = words_before_sep[:215] - words_after_sep = words_after_sep[:215] - else: - raise RuntimeError("Only support 'cn' or 'en' task.") - - return words_before_sep + words_after_sep - - def process(self, data_bundle: DataBundle) -> DataBundle: - for name in data_bundle.datasets.keys(): - dataset = data_bundle.get_dataset(name) - sep_index_vocab = data_bundle.get_vocab('words').to_index('[SEP]') - dataset.apply_field(lambda sent_index: self._truncate(sentence_index=sent_index, sep_index_vocab=sep_index_vocab), field_name='words', new_field_name='words') - - # truncate之后需要更新seq_len - dataset.add_seq_len(field_name='words') - return data_bundle - diff --git a/fastNLP/io/pipe/pipe.py b/fastNLP/io/pipe/pipe.py deleted file mode 100644 index 0ff32d83..00000000 --- a/fastNLP/io/pipe/pipe.py +++ /dev/null @@ -1,41 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "Pipe", -] - -from .. import DataBundle - - -class Pipe: - r""" - Pipe是fastNLP中用于处理DataBundle的类,但实际是处理DataBundle中的DataSet。所有Pipe都会在其process()函数的文档中指出该Pipe可处理的DataSet应该具备怎样的格式;在Pipe - 文档中说明该Pipe返回后DataSet的格式以及其field的信息;以及新增的Vocabulary的信息。 - - 一般情况下Pipe处理包含以下的几个过程,(1)将raw_words或raw_chars进行tokenize以切分成不同的词或字; - (2) 再建立词或字的 :class:`~fastNLP.Vocabulary` , 并将词或字转换为index; (3)将target列建立词表并将target列转为index; - - Pipe中提供了两个方法 - - -process()函数,输入为DataBundle - -process_from_file()函数,输入为对应Loader的load函数可接受的类型。 - - """ - - def process(self, data_bundle: DataBundle) -> DataBundle: - r""" - 对输入的DataBundle进行处理,然后返回该DataBundle。 - - :param ~fastNLP.DataBundle data_bundle: 需要处理的DataBundle对象 - :return: DataBundle - """ - raise NotImplementedError - - def process_from_file(self, paths: str) -> DataBundle: - r""" - 传入文件路径,生成处理好的DataBundle对象。paths支持的路径形式可以参考 ::meth:`fastNLP.io.Loader.load()` - - :param str paths: - :return: DataBundle - """ - raise NotImplementedError diff --git a/fastNLP/io/pipe/qa.py b/fastNLP/io/pipe/qa.py deleted file mode 100644 index 8dbad2bc..00000000 --- a/fastNLP/io/pipe/qa.py +++ /dev/null @@ -1,146 +0,0 @@ -r""" -本文件中的Pipe主要用于处理问答任务的数据。 - -""" - - -from copy import deepcopy - -from .pipe import Pipe -from .. import DataBundle -from ..loader.qa import CMRC2018Loader -from .utils import get_tokenizer -from ...core import DataSet -from ...core import Vocabulary - -__all__ = ['CMRC2018BertPipe'] - - -def _concat_clip(data_bundle, max_len, concat_field_name='raw_chars'): - r""" - 处理data_bundle中的DataSet,将context与question按照character进行tokenize,然后使用[SEP]将两者连接起来。 - - 会新增field: context_len(int), raw_words(list[str]), target_start(int), target_end(int)其中target_start - 与target_end是与raw_chars等长的。其中target_start和target_end是前闭后闭的区间。 - - :param DataBundle data_bundle: 类似["a", "b", "[SEP]", "c", ] - :return: - """ - tokenizer = get_tokenizer('cn-char', lang='cn') - for name in list(data_bundle.datasets.keys()): - ds = data_bundle.get_dataset(name) - data_bundle.delete_dataset(name) - new_ds = DataSet() - for ins in ds: - new_ins = deepcopy(ins) - context = ins['context'] - question = ins['question'] - - cnt_lst = tokenizer(context) - q_lst = tokenizer(question) - - answer_start = -1 - - if len(cnt_lst) + len(q_lst) + 3 > max_len: # 预留开头的[CLS]和[SEP]和中间的[sep] - if 'answer_starts' in ins and 'answers' in ins: - answer_start = int(ins['answer_starts'][0]) - answer = ins['answers'][0] - answer_end = answer_start + len(answer) - if answer_end > max_len - 3 - len(q_lst): - span_start = answer_end + 3 + len(q_lst) - max_len - span_end = answer_end - else: - span_start = 0 - span_end = max_len - 3 - len(q_lst) - cnt_lst = cnt_lst[span_start:span_end] - answer_start = int(ins['answer_starts'][0]) - answer_start -= span_start - answer_end = answer_start + len(ins['answers'][0]) - else: - cnt_lst = cnt_lst[:max_len - len(q_lst) - 3] - else: - if 'answer_starts' in ins and 'answers' in ins: - answer_start = int(ins['answer_starts'][0]) - answer_end = answer_start + len(ins['answers'][0]) - - tokens = cnt_lst + ['[SEP]'] + q_lst - new_ins['context_len'] = len(cnt_lst) - new_ins[concat_field_name] = tokens - - if answer_start != -1: - new_ins['target_start'] = answer_start - new_ins['target_end'] = answer_end - 1 - - new_ds.append(new_ins) - data_bundle.set_dataset(new_ds, name) - - return data_bundle - - -class CMRC2018BertPipe(Pipe): - r""" - 处理之后的DataSet将新增以下的field(传入的field仍然保留) - - .. csv-table:: - :header: "context_len", "raw_chars", "target_start", "target_end", "chars" - - 492, ['范', '廷', '颂... ], 30, 34, "[21, 25, ...]" - 491, ['范', '廷', '颂... ], 41, 61, "[21, 25, ...]" - - ".", "...", "...","...", "..." - - raw_words列是context与question拼起来的结果(连接的地方加入了[SEP]),words是转为index的值, target_start为答案start的index,target_end为答案end的index - (闭区间);context_len指示的是words列中context的长度。 - - 其中各列的meta信息如下: - - .. code:: - - +-------------+-------------+-----------+--------------+------------+-------+---------+ - | field_names | context_len | raw_chars | target_start | target_end | chars | answers | - +-------------+-------------+-----------+--------------+------------+-------+---------| - | is_input | False | False | False | False | True | False | - | is_target | True | True | True | True | False | True | - | ignore_type | False | True | False | False | False | True | - | pad_value | 0 | 0 | 0 | 0 | 0 | 0 | - +-------------+-------------+-----------+--------------+------------+-------+---------+ - - """ - def __init__(self, max_len=510): - super().__init__() - self.max_len = max_len - - def process(self, data_bundle: DataBundle) -> DataBundle: - r""" - 传入的DataSet应该具备以下的field - - .. csv-table:: - :header:"title", "context", "question", "answers", "answer_starts", "id" - - "范廷颂", "范廷颂枢机(,),圣名保禄·若瑟()...", "范廷颂是什么时候被任为主教的?", ["1963年"], ["30"], "TRAIN_186_QUERY_0" - "范廷颂", "范廷颂枢机(,),圣名保禄·若瑟()...", "1990年,范廷颂担任什么职务?", ["1990年被擢升为天..."], ["41"],"TRAIN_186_QUERY_1" - "...", "...", "...","...", ".", "..." - - :param data_bundle: - :return: - """ - data_bundle = _concat_clip(data_bundle, max_len=self.max_len, concat_field_name='raw_chars') - - src_vocab = Vocabulary() - src_vocab.from_dataset(*[ds for name, ds in data_bundle.iter_datasets() if 'train' in name], - field_name='raw_chars', - no_create_entry_dataset=[ds for name, ds in data_bundle.iter_datasets() - if 'train' not in name] - ) - src_vocab.index_dataset(*data_bundle.datasets.values(), field_name='raw_chars', new_field_name='chars') - data_bundle.set_vocab(src_vocab, 'chars') - - data_bundle.set_ignore_type('raw_chars', 'answers', flag=True) - data_bundle.set_input('chars') - data_bundle.set_target('raw_chars', 'answers', 'target_start', 'target_end', 'context_len') - - return data_bundle - - def process_from_file(self, paths=None) -> DataBundle: - data_bundle = CMRC2018Loader().load(paths) - return self.process(data_bundle) \ No newline at end of file diff --git a/fastNLP/io/pipe/summarization.py b/fastNLP/io/pipe/summarization.py deleted file mode 100644 index e0988336..00000000 --- a/fastNLP/io/pipe/summarization.py +++ /dev/null @@ -1,197 +0,0 @@ -r"""undocumented""" -import os -import numpy as np - -from .pipe import Pipe -from .utils import _drop_empty_instance -from ..loader.summarization import ExtCNNDMLoader -from ..data_bundle import DataBundle -from ...core.const import Const -from ...core.vocabulary import Vocabulary -from ...core._logger import logger - - -WORD_PAD = "[PAD]" -WORD_UNK = "[UNK]" -DOMAIN_UNK = "X" -TAG_UNK = "X" - - -class ExtCNNDMPipe(Pipe): - r""" - 对CNN/Daily Mail数据进行适用于extractive summarization task的预处理,预处理之后的数据,具备以下结构: - - .. csv-table:: - :header: "text", "summary", "label", "publication", "text_wd", "words", "seq_len", "target" - - """ - def __init__(self, vocab_size, sent_max_len, doc_max_timesteps, vocab_path=None, domain=False): - r""" - - :param vocab_size: int, 词表大小 - :param sent_max_len: int, 句子最大长度,不足的句子将padding,超出的将截断 - :param doc_max_timesteps: int, 文章最多句子个数,不足的将padding,超出的将截断 - :param vocab_path: str, 外部词表路径 - :param domain: bool, 是否需要建立domain词表 - """ - self.vocab_size = vocab_size - self.vocab_path = vocab_path - self.sent_max_len = sent_max_len - self.doc_max_timesteps = doc_max_timesteps - self.domain = domain - - def process(self, data_bundle: DataBundle): - r""" - 传入的DataSet应该具备如下的结构 - - .. csv-table:: - :header: "text", "summary", "label", "publication" - - ["I got new tires from them and... ","..."], ["The new tires...","..."], [0, 1], "cnndm" - ["Don't waste your time. We had two...","..."], ["Time is precious","..."], [1], "cnndm" - ["..."], ["..."], [], "cnndm" - - :param data_bundle: - :return: 处理得到的数据包括 - .. csv-table:: - :header: "text_wd", "words", "seq_len", "target" - - [["I","got",..."."],...,["..."]], [[54,89,...,5],...,[9,43,..,0]], [1,1,...,0], [0,1,...,0] - [["Don't","waste",...,"."],...,["..."]], [[5234,653,...,5],...,[87,234,..,0]], [1,1,...,0], [1,1,...,0] - [[""],...,[""]], [[],...,[]], [], [] - """ - - if self.vocab_path is None: - error_msg = 'vocab file is not defined!' - logger.error(error_msg) - raise RuntimeError(error_msg) - data_bundle.apply(lambda x: _lower_text(x['text']), new_field_name='text') - data_bundle.apply(lambda x: _lower_text(x['summary']), new_field_name='summary') - data_bundle.apply(lambda x: _split_list(x['text']), new_field_name='text_wd') - data_bundle.apply(lambda x: _convert_label(x["label"], len(x["text"])), new_field_name=Const.TARGET) - - data_bundle.apply(lambda x: _pad_sent(x["text_wd"], self.sent_max_len), new_field_name=Const.INPUT) - # db.apply(lambda x: _token_mask(x["text_wd"], self.sent_max_len), new_field_name="pad_token_mask") - - # pad document - data_bundle.apply(lambda x: _pad_doc(x[Const.INPUT], self.sent_max_len, self.doc_max_timesteps), new_field_name=Const.INPUT) - data_bundle.apply(lambda x: _sent_mask(x[Const.INPUT], self.doc_max_timesteps), new_field_name=Const.INPUT_LEN) - data_bundle.apply(lambda x: _pad_label(x[Const.TARGET], self.doc_max_timesteps), new_field_name=Const.TARGET) - - data_bundle = _drop_empty_instance(data_bundle, "label") - - # set input and target - data_bundle.set_input(Const.INPUT, Const.INPUT_LEN) - data_bundle.set_target(Const.TARGET, Const.INPUT_LEN) - - # print("[INFO] Load existing vocab from %s!" % self.vocab_path) - word_list = [] - with open(self.vocab_path, 'r', encoding='utf8') as vocab_f: - cnt = 2 # pad and unk - for line in vocab_f: - pieces = line.split("\t") - word_list.append(pieces[0]) - cnt += 1 - if cnt > self.vocab_size: - break - vocabs = Vocabulary(max_size=self.vocab_size, padding=WORD_PAD, unknown=WORD_UNK) - vocabs.add_word_lst(word_list) - vocabs.build_vocab() - data_bundle.set_vocab(vocabs, "vocab") - - if self.domain is True: - domaindict = Vocabulary(padding=None, unknown=DOMAIN_UNK) - domaindict.from_dataset(data_bundle.get_dataset("train"), field_name="publication") - data_bundle.set_vocab(domaindict, "domain") - - return data_bundle - - def process_from_file(self, paths=None): - r""" - :param paths: dict or string - :return: DataBundle - """ - loader = ExtCNNDMLoader() - if self.vocab_path is None: - if paths is None: - paths = loader.download() - if not os.path.isdir(paths): - error_msg = 'vocab file is not defined!' - logger.error(error_msg) - raise RuntimeError(error_msg) - self.vocab_path = os.path.join(paths, 'vocab') - db = loader.load(paths=paths) - db = self.process(db) - for ds in db.datasets.values(): - db.get_vocab("vocab").index_dataset(ds, field_name=Const.INPUT, new_field_name=Const.INPUT) - - return db - - -def _lower_text(text_list): - return [text.lower() for text in text_list] - - -def _split_list(text_list): - return [text.split() for text in text_list] - - -def _convert_label(label, sent_len): - np_label = np.zeros(sent_len, dtype=int) - if label != []: - np_label[np.array(label)] = 1 - return np_label.tolist() - - -def _pad_sent(text_wd, sent_max_len): - pad_text_wd = [] - for sent_wd in text_wd: - if len(sent_wd) < sent_max_len: - pad_num = sent_max_len - len(sent_wd) - sent_wd.extend([WORD_PAD] * pad_num) - else: - sent_wd = sent_wd[:sent_max_len] - pad_text_wd.append(sent_wd) - return pad_text_wd - - -def _token_mask(text_wd, sent_max_len): - token_mask_list = [] - for sent_wd in text_wd: - token_num = len(sent_wd) - if token_num < sent_max_len: - mask = [1] * token_num + [0] * (sent_max_len - token_num) - else: - mask = [1] * sent_max_len - token_mask_list.append(mask) - return token_mask_list - - -def _pad_label(label, doc_max_timesteps): - text_len = len(label) - if text_len < doc_max_timesteps: - pad_label = label + [0] * (doc_max_timesteps - text_len) - else: - pad_label = label[:doc_max_timesteps] - return pad_label - - -def _pad_doc(text_wd, sent_max_len, doc_max_timesteps): - text_len = len(text_wd) - if text_len < doc_max_timesteps: - padding = [WORD_PAD] * sent_max_len - pad_text = text_wd + [padding] * (doc_max_timesteps - text_len) - else: - pad_text = text_wd[:doc_max_timesteps] - return pad_text - - -def _sent_mask(text_wd, doc_max_timesteps): - text_len = len(text_wd) - if text_len < doc_max_timesteps: - sent_mask = [1] * text_len + [0] * (doc_max_timesteps - text_len) - else: - sent_mask = [1] * doc_max_timesteps - return sent_mask - - diff --git a/fastNLP/io/pipe/utils.py b/fastNLP/io/pipe/utils.py deleted file mode 100644 index f3f0e649..00000000 --- a/fastNLP/io/pipe/utils.py +++ /dev/null @@ -1,224 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "iob2", - "iob2bioes", - "get_tokenizer", -] - -from typing import List -import warnings - -from ...core.const import Const -from ...core.vocabulary import Vocabulary -from ...core._logger import logger -from pkg_resources import parse_version - - -def iob2(tags: List[str]) -> List[str]: - r""" - 检查数据是否是合法的IOB数据,如果是IOB1会被自动转换为IOB2。两种格式的区别见 - https://datascience.stackexchange.com/questions/37824/difference-between-iob-and-iob2-format - - :param tags: 需要转换的tags - """ - for i, tag in enumerate(tags): - if tag == "O": - continue - split = tag.split("-") - if len(split) != 2 or split[0] not in ["I", "B"]: - raise TypeError("The encoding schema is not a valid IOB type.") - if split[0] == "B": - continue - elif i == 0 or tags[i - 1] == "O": # conversion IOB1 to IOB2 - tags[i] = "B" + tag[1:] - elif tags[i - 1][1:] == tag[1:]: - continue - else: # conversion IOB1 to IOB2 - tags[i] = "B" + tag[1:] - return tags - - -def iob2bioes(tags: List[str]) -> List[str]: - r""" - 将iob的tag转换为bioes编码 - :param tags: - :return: - """ - new_tags = [] - for i, tag in enumerate(tags): - if tag == 'O': - new_tags.append(tag) - else: - split = tag.split('-')[0] - if split == 'B': - if i + 1 != len(tags) and tags[i + 1].split('-')[0] == 'I': - new_tags.append(tag) - else: - new_tags.append(tag.replace('B-', 'S-')) - elif split == 'I': - if i + 1 < len(tags) and tags[i + 1].split('-')[0] == 'I': - new_tags.append(tag) - else: - new_tags.append(tag.replace('I-', 'E-')) - else: - raise TypeError("Invalid IOB format.") - return new_tags - - -def get_tokenizer(tokenize_method: str, lang='en'): - r""" - - :param str tokenize_method: 获取tokenzier方法 - :param str lang: 语言,当前仅支持en - :return: 返回tokenize函数 - """ - tokenizer_dict = { - 'spacy': None, - 'raw': _raw_split, - 'cn-char': _cn_char_split, - } - if tokenize_method == 'spacy': - import spacy - spacy.prefer_gpu() - if lang != 'en': - raise RuntimeError("Spacy only supports en right right.") - if parse_version(spacy.__version__) >= parse_version('3.0'): - en = spacy.load('en_core_web_sm') - else: - en = spacy.load(lang) - tokenizer = lambda x: [w.text for w in en.tokenizer(x)] - elif tokenize_method in tokenizer_dict: - tokenizer = tokenizer_dict[tokenize_method] - else: - raise RuntimeError(f"Only support {tokenizer_dict.keys()} tokenizer.") - return tokenizer - - -def _cn_char_split(sent): - return [chars for chars in sent] - - -def _raw_split(sent): - return sent.split() - - -def _indexize(data_bundle, input_field_names=Const.INPUT, target_field_names=Const.TARGET): - r""" - 在dataset中的field_name列建立词表,Const.TARGET列建立词表,并把词表加入到data_bundle中。 - - :param ~fastNLP.DataBundle data_bundle: - :param: str,list input_field_names: - :param: str,list target_field_names: 这一列的vocabulary没有unknown和padding - :return: - """ - if isinstance(input_field_names, str): - input_field_names = [input_field_names] - if isinstance(target_field_names, str): - target_field_names = [target_field_names] - for input_field_name in input_field_names: - src_vocab = Vocabulary() - src_vocab.from_dataset(*[ds for name, ds in data_bundle.iter_datasets() if 'train' in name], - field_name=input_field_name, - no_create_entry_dataset=[ds for name, ds in data_bundle.iter_datasets() - if ('train' not in name) and (ds.has_field(input_field_name))] - ) - src_vocab.index_dataset(*data_bundle.datasets.values(), field_name=input_field_name) - data_bundle.set_vocab(src_vocab, input_field_name) - - for target_field_name in target_field_names: - tgt_vocab = Vocabulary(unknown=None, padding=None) - tgt_vocab.from_dataset(*[ds for name, ds in data_bundle.iter_datasets() if 'train' in name], - field_name=target_field_name, - no_create_entry_dataset=[ds for name, ds in data_bundle.iter_datasets() - if ('train' not in name) and (ds.has_field(target_field_name))] - ) - if len(tgt_vocab._no_create_word) > 0: - warn_msg = f"There are {len(tgt_vocab._no_create_word)} `{target_field_name}` labels" \ - f" in {[name for name in data_bundle.datasets.keys() if 'train' not in name]} " \ - f"data set but not in train data set!.\n" \ - f"These label(s) are {tgt_vocab._no_create_word}" - warnings.warn(warn_msg) - logger.warning(warn_msg) - tgt_vocab.index_dataset(*[ds for ds in data_bundle.datasets.values() if ds.has_field(target_field_name)], field_name=target_field_name) - data_bundle.set_vocab(tgt_vocab, target_field_name) - - return data_bundle - - -def _add_words_field(data_bundle, lower=False): - r""" - 给data_bundle中的dataset中复制一列words. 并根据lower参数判断是否需要小写化 - - :param data_bundle: - :param bool lower:是否要小写化 - :return: 传入的DataBundle - """ - data_bundle.copy_field(field_name=Const.RAW_WORD, new_field_name=Const.INPUT, ignore_miss_dataset=True) - - if lower: - for name, dataset in data_bundle.datasets.items(): - dataset[Const.INPUT].lower() - return data_bundle - - -def _add_chars_field(data_bundle, lower=False): - r""" - 给data_bundle中的dataset中复制一列chars. 并根据lower参数判断是否需要小写化 - - :param data_bundle: - :param bool lower:是否要小写化 - :return: 传入的DataBundle - """ - data_bundle.copy_field(field_name=Const.RAW_CHAR, new_field_name=Const.CHAR_INPUT, ignore_miss_dataset=True) - - if lower: - for name, dataset in data_bundle.datasets.items(): - dataset[Const.CHAR_INPUT].lower() - return data_bundle - - -def _drop_empty_instance(data_bundle, field_name): - r""" - 删除data_bundle的DataSet中存在的某个field为空的情况 - - :param ~fastNLP.DataBundle data_bundle: - :param str field_name: 对哪个field进行检查,如果为None,则任意field为空都会删掉 - :return: 传入的DataBundle - """ - - def empty_instance(ins): - if field_name: - field_value = ins[field_name] - if field_value in ((), {}, [], ''): - return True - return False - for _, field_value in ins.items(): - if field_value in ((), {}, [], ''): - return True - return False - - for name, dataset in data_bundle.datasets.items(): - dataset.drop(empty_instance) - - return data_bundle - - -def _granularize(data_bundle, tag_map): - r""" - 该函数对data_bundle中'target'列中的内容进行转换。 - - :param data_bundle: - :param dict tag_map: 将target列中的tag做以下的映射,比如{"0":0, "1":0, "3":1, "4":1}, 则会删除target为"2"的instance, - 且将"1"认为是第0类。 - :return: 传入的data_bundle - """ - if tag_map is None: - return data_bundle - for name in list(data_bundle.datasets.keys()): - dataset = data_bundle.get_dataset(name) - dataset.apply_field(lambda target: tag_map.get(target, -100), field_name=Const.TARGET, - new_field_name=Const.TARGET) - dataset.drop(lambda ins: ins[Const.TARGET] == -100) - data_bundle.set_dataset(dataset, name) - return data_bundle diff --git a/fastNLP/io/utils.py b/fastNLP/io/utils.py deleted file mode 100644 index c5dc7fd7..00000000 --- a/fastNLP/io/utils.py +++ /dev/null @@ -1,82 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "check_loader_paths" -] - -import os -from pathlib import Path -from typing import Union, Dict - -from ..core import logger - - -def check_loader_paths(paths: Union[str, Dict[str, str]]) -> Dict[str, str]: - r""" - 检查传入dataloader的文件的合法性。如果为合法路径,将返回至少包含'train'这个key的dict。类似于下面的结果:: - - { - 'train': '/some/path/to/', # 一定包含,建词表应该在这上面建立,剩下的其它文件应该只需要处理并index。 - 'test': 'xxx' # 可能有,也可能没有 - ... - } - - 如果paths为不合法的,将直接进行raise相应的错误. 如果paths内不包含train也会报错。 - - :param str paths: 路径. 可以为一个文件路径(则认为该文件就是train的文件); 可以为一个文件目录,将在该目录下寻找包含train(文件名 - 中包含train这个字段), test, dev这三个字段的文件或文件夹; 可以为一个dict, 则key是用户自定义的某个文件的名称,value是这个文件的路径。 - :return: - """ - if isinstance(paths, (str, Path)): - paths = os.path.abspath(os.path.expanduser(paths)) - if os.path.isfile(paths): - return {'train': paths} - elif os.path.isdir(paths): - filenames = os.listdir(paths) - filenames.sort() - files = {} - for filename in filenames: - path_pair = None - if 'train' in filename: - path_pair = ('train', filename) - if 'dev' in filename: - if path_pair: - raise Exception( - "Directory:{} in {} contains both `{}` and `dev`.".format(filename, paths, path_pair[0])) - path_pair = ('dev', filename) - if 'test' in filename: - if path_pair: - raise Exception( - "Directory:{} in {} contains both `{}` and `test`.".format(filename, paths, path_pair[0])) - path_pair = ('test', filename) - if path_pair: - if path_pair[0] in files: - raise FileExistsError(f"Two files contain `{path_pair[0]}` were found, please specify the " - f"filepath for `{path_pair[0]}`.") - files[path_pair[0]] = os.path.join(paths, path_pair[1]) - if 'train' not in files: - raise KeyError(f"There is no train file in {paths}.") - return files - else: - raise FileNotFoundError(f"{paths} is not a valid file path.") - - elif isinstance(paths, dict): - if paths: - if 'train' not in paths: - raise KeyError("You have to include `train` in your dict.") - for key, value in paths.items(): - if isinstance(key, str) and isinstance(value, str): - value = os.path.abspath(os.path.expanduser(value)) - if not os.path.exists(value): - raise TypeError(f"{value} is not a valid path.") - paths[key] = value - else: - raise TypeError("All keys and values in paths should be str.") - return paths - else: - raise ValueError("Empty paths is not allowed.") - else: - raise TypeError(f"paths only supports str and dict. not {type(paths)}.") diff --git a/fastNLP/models/__init__.py b/fastNLP/models/__init__.py deleted file mode 100644 index f00687b3..00000000 --- a/fastNLP/models/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -r""" -fastNLP 在 :mod:`~fastNLP.models` 模块中内置了如 :class:`~fastNLP.models.CNNText` 、 -:class:`~fastNLP.models.SeqLabeling` 等完整的模型,以供用户直接使用。 - -.. todo:: - 这些模型的介绍(与主页一致) - - -""" -__all__ = [ - "CNNText", - - "SeqLabeling", - "AdvSeqLabel", - "BiLSTMCRF", - - "ESIM", - - "StarTransEnc", - "STSeqLabel", - "STNLICls", - "STSeqCls", - - "BiaffineParser", - "GraphParser", - - "BertForSequenceClassification", - "BertForSentenceMatching", - "BertForMultipleChoice", - "BertForTokenClassification", - "BertForQuestionAnswering", - - "TransformerSeq2SeqModel", - "LSTMSeq2SeqModel", - "Seq2SeqModel", - - 'SequenceGeneratorModel' -] - -from .base_model import BaseModel -from .bert import BertForMultipleChoice, BertForQuestionAnswering, BertForSequenceClassification, \ - BertForTokenClassification, BertForSentenceMatching -from .biaffine_parser import BiaffineParser, GraphParser -from .cnn_text_classification import CNNText -from .sequence_labeling import SeqLabeling, AdvSeqLabel, BiLSTMCRF -from .snli import ESIM -from .star_transformer import StarTransEnc, STSeqCls, STNLICls, STSeqLabel -from .seq2seq_model import TransformerSeq2SeqModel, LSTMSeq2SeqModel, Seq2SeqModel -from .seq2seq_generator import SequenceGeneratorModel -import sys -from ..doc_utils import doc_process - -doc_process(sys.modules[__name__]) diff --git a/fastNLP/models/base_model.py b/fastNLP/models/base_model.py deleted file mode 100644 index f83f768f..00000000 --- a/fastNLP/models/base_model.py +++ /dev/null @@ -1,36 +0,0 @@ -r"""undocumented""" - -__all__ = [] - -import torch - -from ..modules.decoder.mlp import MLP - - -class BaseModel(torch.nn.Module): - r"""Base PyTorch model for all models. - """ - - def __init__(self): - super(BaseModel, self).__init__() - - def fit(self, train_data, dev_data=None, **train_args): - pass - - def predict(self, *args, **kwargs): - raise NotImplementedError - - -class NaiveClassifier(BaseModel): - r""" - 一个简单的分类器例子,可用于各种测试 - """ - def __init__(self, in_feature_dim, out_feature_dim): - super(NaiveClassifier, self).__init__() - self.mlp = MLP([in_feature_dim, in_feature_dim, out_feature_dim]) - - def forward(self, x): - return {"predict": torch.sigmoid(self.mlp(x))} - - def predict(self, x): - return {"predict": torch.sigmoid(self.mlp(x)) > 0.5} diff --git a/fastNLP/models/bert.py b/fastNLP/models/bert.py deleted file mode 100644 index 5851f8c8..00000000 --- a/fastNLP/models/bert.py +++ /dev/null @@ -1,271 +0,0 @@ -r""" -fastNLP提供了BERT应用到五个下游任务的模型代码,可以直接调用。这五个任务分别为 - - - 文本分类任务: :class:`~fastNLP.models.BertForSequenceClassification` - - Matching任务: :class:`~fastNLP.models.BertForSentenceMatching` - - 多选任务: :class:`~fastNLP.models.BertForMultipleChoice` - - 序列标注任务: :class:`~fastNLP.models.BertForTokenClassification` - - 抽取式QA任务: :class:`~fastNLP.models.BertForQuestionAnswering` - -每一个模型必须要传入一个名字为 `embed` 的 :class:`fastNLP.embeddings.BertEmbedding` ,这个参数包含了 -:class:`fastNLP.modules.encoder.BertModel` ,是下游模型的编码器(encoder)。 - -除此以外,还需要传入一个数字,这个数字在不同下游任务模型上的意义如下:: - - 下游任务模型 参数名称 含义 - BertForSequenceClassification num_labels 文本分类类别数目,默认值为2 - BertForSentenceMatching num_labels Matching任务类别数目,默认值为2 - BertForMultipleChoice num_choices 多选任务选项数目,默认值为2 - BertForTokenClassification num_labels 序列标注标签数目,无默认值 - BertForQuestionAnswering num_labels 抽取式QA列数,默认值为2(即第一列为start_span, 第二列为end_span) - -最后还可以传入dropout的大小,默认值为0.1。 - -""" - -__all__ = [ - "BertForSequenceClassification", - "BertForSentenceMatching", - "BertForMultipleChoice", - "BertForTokenClassification", - "BertForQuestionAnswering" -] - -import warnings - -import torch -from torch import nn - -from .base_model import BaseModel -from ..core._logger import logger -from ..core.const import Const -from ..embeddings.bert_embedding import BertEmbedding - - -class BertForSequenceClassification(BaseModel): - r""" - BERT model for classification. - - """ - def __init__(self, embed: BertEmbedding, num_labels: int=2, dropout=0.1): - r""" - - :param fastNLP.embeddings.BertEmbedding embed: 下游模型的编码器(encoder). - :param int num_labels: 文本分类类别数目,默认值为2. - :param float dropout: dropout的大小,默认值为0.1. - """ - super(BertForSequenceClassification, self).__init__() - - self.num_labels = num_labels - self.bert = embed - self.dropout = nn.Dropout(p=dropout) - self.classifier = nn.Linear(self.bert.embedding_dim, num_labels) - - if not self.bert.model.include_cls_sep: - self.bert.model.include_cls_sep = True - warn_msg = "Bert for sequence classification excepts BertEmbedding `include_cls_sep` True, " \ - "but got False. FastNLP has changed it to True." - logger.warning(warn_msg) - warnings.warn(warn_msg) - - def forward(self, words): - r""" - 输入为 [[w1, w2, w3, ...], [...]], BERTEmbedding会在开头和结尾额外加入[CLS]与[SEP] - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.Tensor [batch_size, num_labels] - """ - hidden = self.dropout(self.bert(words)) - cls_hidden = hidden[:, 0] - logits = self.classifier(cls_hidden) - if logits.size(-1) == 1: - logits = logits.squeeze(-1) - - return {Const.OUTPUT: logits} - - def predict(self, words): - r""" - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.LongTensor [batch_size] - """ - logits = self.forward(words)[Const.OUTPUT] - if self.num_labels > 1: - return {Const.OUTPUT: torch.argmax(logits, dim=-1)} - else: - return {Const.OUTPUT: logits} - - -class BertForSentenceMatching(BaseModel): - r""" - BERT model for sentence matching. - - """ - def __init__(self, embed: BertEmbedding, num_labels: int=2, dropout=0.1): - r""" - - :param fastNLP.embeddings.BertEmbedding embed: 下游模型的编码器(encoder). - :param int num_labels: Matching任务类别数目,默认值为2. - :param float dropout: dropout的大小,默认值为0.1. - """ - super(BertForSentenceMatching, self).__init__() - self.num_labels = num_labels - self.bert = embed - self.dropout = nn.Dropout(p=dropout) - self.classifier = nn.Linear(self.bert.embedding_dim, num_labels) - - if not self.bert.model.include_cls_sep: - self.bert.model.include_cls_sep = True - warn_msg = "Bert for sentence matching excepts BertEmbedding `include_cls_sep` True, " \ - "but got False. FastNLP has changed it to True." - logger.warning(warn_msg) - warnings.warn(warn_msg) - - def forward(self, words): - r""" - 输入words的格式为 [sent1] + [SEP] + [sent2](BertEmbedding会在开头加入[CLS]和在结尾加入[SEP]),输出为batch_size x num_labels - - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.Tensor [batch_size, num_labels] - """ - hidden = self.bert(words) - cls_hidden = self.dropout(hidden[:, 0]) - logits = self.classifier(cls_hidden) - - return {Const.OUTPUT: logits} - - def predict(self, words): - r""" - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.LongTensor [batch_size] - """ - logits = self.forward(words)[Const.OUTPUT] - return {Const.OUTPUT: torch.argmax(logits, dim=-1)} - - -class BertForMultipleChoice(BaseModel): - r""" - BERT model for multiple choice. - - """ - def __init__(self, embed: BertEmbedding, num_choices=2, dropout=0.1): - r""" - - :param fastNLP.embeddings.BertEmbedding embed: 下游模型的编码器(encoder). - :param int num_choices: 多选任务选项数目,默认值为2. - :param float dropout: dropout的大小,默认值为0.1. - """ - super(BertForMultipleChoice, self).__init__() - - self.num_choices = num_choices - self.bert = embed - self.dropout = nn.Dropout(p=dropout) - self.classifier = nn.Linear(self.bert.embedding_dim, 1) - - if not self.bert.model.include_cls_sep: - self.bert.model.include_cls_sep = True - warn_msg = "Bert for multiple choice excepts BertEmbedding `include_cls_sep` True, " \ - "but got False. FastNLP has changed it to True." - logger.warning(warn_msg) - warnings.warn(warn_msg) - - def forward(self, words): - r""" - :param torch.LongTensor words: [batch_size, num_choices, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.LongTensor [batch_size, num_choices] - """ - batch_size, num_choices, seq_len = words.size() - - input_ids = words.view(batch_size * num_choices, seq_len) - hidden = self.bert(input_ids) - pooled_output = self.dropout(hidden[:, 0]) - logits = self.classifier(pooled_output) - reshaped_logits = logits.view(-1, self.num_choices) - - return {Const.OUTPUT: reshaped_logits} - - def predict(self, words): - r""" - :param torch.LongTensor words: [batch_size, num_choices, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.LongTensor [batch_size] - """ - logits = self.forward(words)[Const.OUTPUT] - return {Const.OUTPUT: torch.argmax(logits, dim=-1)} - - -class BertForTokenClassification(BaseModel): - r""" - BERT model for token classification. - - """ - def __init__(self, embed: BertEmbedding, num_labels, dropout=0.1): - r""" - - :param fastNLP.embeddings.BertEmbedding embed: 下游模型的编码器(encoder). - :param int num_labels: 序列标注标签数目,无默认值. - :param float dropout: dropout的大小,默认值为0.1. - """ - super(BertForTokenClassification, self).__init__() - - self.num_labels = num_labels - self.bert = embed - self.dropout = nn.Dropout(p=dropout) - self.classifier = nn.Linear(self.bert.embedding_dim, num_labels) - - if self.bert.model.include_cls_sep: - self.bert.model.include_cls_sep = False - warn_msg = "Bert for token classification excepts BertEmbedding `include_cls_sep` False, " \ - "but got True. FastNLP has changed it to False." - logger.warning(warn_msg) - warnings.warn(warn_msg) - - def forward(self, words): - r""" - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.Tensor [batch_size, seq_len, num_labels] - """ - sequence_output = self.bert(words) # [batch_size, seq_len, embed_dim] - sequence_output = self.dropout(sequence_output) - logits = self.classifier(sequence_output) - - return {Const.OUTPUT: logits} - - def predict(self, words): - r""" - :param torch.LongTensor words: [batch_size, seq_len] - :return: { :attr:`fastNLP.Const.OUTPUT` : logits}: torch.LongTensor [batch_size, seq_len] - """ - logits = self.forward(words)[Const.OUTPUT] - return {Const.OUTPUT: torch.argmax(logits, dim=-1)} - - -class BertForQuestionAnswering(BaseModel): - r""" - 用于做Q&A的Bert模型,如果是Squad2.0请将BertEmbedding的include_cls_sep设置为True,Squad1.0或CMRC则设置为False - - """ - def __init__(self, embed: BertEmbedding): - r""" - - :param fastNLP.embeddings.BertEmbedding embed: 下游模型的编码器(encoder). - :param int num_labels: 抽取式QA列数,默认值为2(即第一列为start_span, 第二列为end_span). - """ - super(BertForQuestionAnswering, self).__init__() - - self.bert = embed - self.qa_outputs = nn.Linear(self.bert.embedding_dim, 2) - - def forward(self, words): - r""" - 输入words为question + [SEP] + [paragraph],BERTEmbedding在之后会额外加入开头的[CLS]和结尾的[SEP]. note: - 如果BERTEmbedding中include_cls_sep=True,则输出的start和end index相对输入words会增加一位;如果为BERTEmbedding中 - include_cls_sep=False, 则输出start和end index的位置与输入words的顺序完全一致 - - :param torch.LongTensor words: [batch_size, seq_len] - :return: 一个包含num_labels个logit的dict,每一个logit的形状都是[batch_size, seq_len + 2] - """ - sequence_output = self.bert(words) - logits = self.qa_outputs(sequence_output) # [batch_size, seq_len, num_labels] - - return {'pred_start': logits[:, :, 0], 'pred_end': logits[:, :, 1]} - - def predict(self, words): - return self.forward(words) diff --git a/fastNLP/models/biaffine_parser.py b/fastNLP/models/biaffine_parser.py deleted file mode 100644 index cd874e7c..00000000 --- a/fastNLP/models/biaffine_parser.py +++ /dev/null @@ -1,549 +0,0 @@ -r""" -Biaffine Dependency Parser 的 Pytorch 实现. -""" -__all__ = [ - "BiaffineParser", - "GraphParser" -] - -from collections import defaultdict - -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F - -from .base_model import BaseModel -from ..core.const import Const as C -from ..core.losses import LossFunc -from ..core.metrics import MetricBase -from ..core.utils import seq_len_to_mask -from ..embeddings.utils import get_embeddings -from ..modules.dropout import TimestepDropout -from ..modules.encoder.transformer import TransformerEncoder -from ..modules.encoder.variational_rnn import VarLSTM -from ..modules.utils import initial_parameter - - -def _mst(scores): - r""" - with some modification to support parser output for MST decoding - https://github.com/tdozat/Parser/blob/0739216129cd39d69997d28cbc4133b360ea3934/lib/models/nn.py#L692 - """ - length = scores.shape[0] - min_score = scores.min() - 1 - eye = np.eye(length) - scores = scores * (1 - eye) + min_score * eye - heads = np.argmax(scores, axis=1) - heads[0] = 0 - tokens = np.arange(1, length) - roots = np.where(heads[tokens] == 0)[0] + 1 - if len(roots) < 1: - root_scores = scores[tokens, 0] - head_scores = scores[tokens, heads[tokens]] - new_root = tokens[np.argmax(root_scores / head_scores)] - heads[new_root] = 0 - elif len(roots) > 1: - root_scores = scores[roots, 0] - scores[roots, 0] = 0 - new_heads = np.argmax(scores[roots][:, tokens], axis=1) + 1 - new_root = roots[np.argmin( - scores[roots, new_heads] / root_scores)] - heads[roots] = new_heads - heads[new_root] = 0 - - edges = defaultdict(set) - vertices = set((0,)) - for dep, head in enumerate(heads[tokens]): - vertices.add(dep + 1) - edges[head].add(dep + 1) - for cycle in _find_cycle(vertices, edges): - dependents = set() - to_visit = set(cycle) - while len(to_visit) > 0: - node = to_visit.pop() - if node not in dependents: - dependents.add(node) - to_visit.update(edges[node]) - cycle = np.array(list(cycle)) - old_heads = heads[cycle] - old_scores = scores[cycle, old_heads] - non_heads = np.array(list(dependents)) - scores[np.repeat(cycle, len(non_heads)), - np.repeat([non_heads], len(cycle), axis=0).flatten()] = min_score - new_heads = np.argmax(scores[cycle][:, tokens], axis=1) + 1 - new_scores = scores[cycle, new_heads] / old_scores - change = np.argmax(new_scores) - changed_cycle = cycle[change] - old_head = old_heads[change] - new_head = new_heads[change] - heads[changed_cycle] = new_head - edges[new_head].add(changed_cycle) - edges[old_head].remove(changed_cycle) - - return heads - - -def _find_cycle(vertices, edges): - r""" - https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm - https://github.com/tdozat/Parser/blob/0739216129cd39d69997d28cbc4133b360ea3934/lib/etc/tarjan.py - """ - _index = 0 - _stack = [] - _indices = {} - _lowlinks = {} - _onstack = defaultdict(lambda: False) - _SCCs = [] - - def _strongconnect(v): - nonlocal _index - _indices[v] = _index - _lowlinks[v] = _index - _index += 1 - _stack.append(v) - _onstack[v] = True - - for w in edges[v]: - if w not in _indices: - _strongconnect(w) - _lowlinks[v] = min(_lowlinks[v], _lowlinks[w]) - elif _onstack[w]: - _lowlinks[v] = min(_lowlinks[v], _indices[w]) - - if _lowlinks[v] == _indices[v]: - SCC = set() - while True: - w = _stack.pop() - _onstack[w] = False - SCC.add(w) - if not (w != v): - break - _SCCs.append(SCC) - - for v in vertices: - if v not in _indices: - _strongconnect(v) - - return [SCC for SCC in _SCCs if len(SCC) > 1] - - -class GraphParser(BaseModel): - r""" - 基于图的parser base class, 支持贪婪解码和最大生成树解码 - """ - - def __init__(self): - super(GraphParser, self).__init__() - - @staticmethod - def greedy_decoder(arc_matrix, mask=None): - r""" - 贪心解码方式, 输入图, 输出贪心解码的parsing结果, 不保证合法的构成树 - - :param arc_matrix: [batch, seq_len, seq_len] 输入图矩阵 - :param mask: [batch, seq_len] 输入图的padding mask, 有内容的部分为 1, 否则为 0. - 若为 ``None`` 时, 默认为全1向量. Default: ``None`` - :return heads: [batch, seq_len] 每个元素在树中对应的head(parent)预测结果 - """ - _, seq_len, _ = arc_matrix.shape - matrix = arc_matrix + torch.diag(arc_matrix.new(seq_len).fill_(-np.inf)) - flip_mask = mask.eq(False) - matrix.masked_fill_(flip_mask.unsqueeze(1), -np.inf) - _, heads = torch.max(matrix, dim=2) - if mask is not None: - heads *= mask.long() - return heads - - @staticmethod - def mst_decoder(arc_matrix, mask=None): - r""" - 用最大生成树算法, 计算parsing结果, 保证输出合法的树结构 - - :param arc_matrix: [batch, seq_len, seq_len] 输入图矩阵 - :param mask: [batch, seq_len] 输入图的padding mask, 有内容的部分为 1, 否则为 0. - 若为 ``None`` 时, 默认为全1向量. Default: ``None`` - :return heads: [batch, seq_len] 每个元素在树中对应的head(parent)预测结果 - """ - batch_size, seq_len, _ = arc_matrix.shape - matrix = arc_matrix.clone() - ans = matrix.new_zeros(batch_size, seq_len).long() - lens = (mask.long()).sum(1) if mask is not None else torch.zeros(batch_size) + seq_len - for i, graph in enumerate(matrix): - len_i = lens[i] - ans[i, :len_i] = torch.as_tensor(_mst(graph.detach()[:len_i, :len_i].cpu().numpy()), device=ans.device) - if mask is not None: - ans *= mask.long() - return ans - - -class ArcBiaffine(nn.Module): - r""" - Biaffine Dependency Parser 的子模块, 用于构建预测边的图 - - """ - - def __init__(self, hidden_size, bias=True): - r""" - - :param hidden_size: 输入的特征维度 - :param bias: 是否使用bias. Default: ``True`` - """ - super(ArcBiaffine, self).__init__() - self.U = nn.Parameter(torch.Tensor(hidden_size, hidden_size), requires_grad=True) - self.has_bias = bias - if self.has_bias: - self.bias = nn.Parameter(torch.Tensor(hidden_size), requires_grad=True) - else: - self.register_parameter("bias", None) - initial_parameter(self) - - def forward(self, head, dep): - r""" - - :param head: arc-head tensor [batch, length, hidden] - :param dep: arc-dependent tensor [batch, length, hidden] - :return output: tensor [bacth, length, length] - """ - output = dep.matmul(self.U) - output = output.bmm(head.transpose(-1, -2)) - if self.has_bias: - output = output + head.matmul(self.bias).unsqueeze(1) - return output - - -class LabelBilinear(nn.Module): - r""" - Biaffine Dependency Parser 的子模块, 用于构建预测边类别的图 - - """ - - def __init__(self, in1_features, in2_features, num_label, bias=True): - r""" - - :param in1_features: 输入的特征1维度 - :param in2_features: 输入的特征2维度 - :param num_label: 边类别的个数 - :param bias: 是否使用bias. Default: ``True`` - """ - super(LabelBilinear, self).__init__() - self.bilinear = nn.Bilinear(in1_features, in2_features, num_label, bias=bias) - self.lin = nn.Linear(in1_features + in2_features, num_label, bias=False) - - def forward(self, x1, x2): - r""" - - :param x1: [batch, seq_len, hidden] 输入特征1, 即label-head - :param x2: [batch, seq_len, hidden] 输入特征2, 即label-dep - :return output: [batch, seq_len, num_cls] 每个元素对应类别的概率图 - """ - output = self.bilinear(x1, x2) - output = output + self.lin(torch.cat([x1, x2], dim=2)) - return output - - -class BiaffineParser(GraphParser): - r""" - Biaffine Dependency Parser 实现. - 论文参考 `Deep Biaffine Attention for Neural Dependency Parsing (Dozat and Manning, 2016) `_ . - - """ - - def __init__(self, - embed, - pos_vocab_size, - pos_emb_dim, - num_label, - rnn_layers=1, - rnn_hidden_size=200, - arc_mlp_size=100, - label_mlp_size=100, - dropout=0.3, - encoder='lstm', - use_greedy_infer=False): - r""" - - :param embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 - embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象, - 此时就以传入的对象作为embedding - :param pos_vocab_size: part-of-speech 词典大小 - :param pos_emb_dim: part-of-speech 向量维度 - :param num_label: 边的类别个数 - :param rnn_layers: rnn encoder的层数 - :param rnn_hidden_size: rnn encoder 的隐状态维度 - :param arc_mlp_size: 边预测的MLP维度 - :param label_mlp_size: 类别预测的MLP维度 - :param dropout: dropout概率. - :param encoder: encoder类别, 可选 ('lstm', 'var-lstm', 'transformer'). Default: lstm - :param use_greedy_infer: 是否在inference时使用贪心算法. - 若 ``False`` , 使用更加精确但相对缓慢的MST算法. Default: ``False`` - """ - super(BiaffineParser, self).__init__() - rnn_out_size = 2 * rnn_hidden_size - word_hid_dim = pos_hid_dim = rnn_hidden_size - self.word_embedding = get_embeddings(embed) - word_emb_dim = self.word_embedding.embedding_dim - self.pos_embedding = nn.Embedding(num_embeddings=pos_vocab_size, embedding_dim=pos_emb_dim) - self.word_fc = nn.Linear(word_emb_dim, word_hid_dim) - self.pos_fc = nn.Linear(pos_emb_dim, pos_hid_dim) - self.word_norm = nn.LayerNorm(word_hid_dim) - self.pos_norm = nn.LayerNorm(pos_hid_dim) - self.encoder_name = encoder - self.max_len = 512 - if encoder == 'var-lstm': - self.encoder = VarLSTM(input_size=word_hid_dim + pos_hid_dim, - hidden_size=rnn_hidden_size, - num_layers=rnn_layers, - bias=True, - batch_first=True, - input_dropout=dropout, - hidden_dropout=dropout, - bidirectional=True) - elif encoder == 'lstm': - self.encoder = nn.LSTM(input_size=word_hid_dim + pos_hid_dim, - hidden_size=rnn_hidden_size, - num_layers=rnn_layers, - bias=True, - batch_first=True, - dropout=dropout, - bidirectional=True) - elif encoder == 'transformer': - n_head = 16 - d_k = d_v = int(rnn_out_size / n_head) - if (d_k * n_head) != rnn_out_size: - raise ValueError('unsupported rnn_out_size: {} for transformer'.format(rnn_out_size)) - self.position_emb = nn.Embedding(num_embeddings=self.max_len, - embedding_dim=rnn_out_size, ) - self.encoder = TransformerEncoder( num_layers=rnn_layers, d_model=rnn_out_size, - n_head=n_head, dim_ff=1024, dropout=dropout) - else: - raise ValueError('unsupported encoder type: {}'.format(encoder)) - - self.mlp = nn.Sequential(nn.Linear(rnn_out_size, arc_mlp_size * 2 + label_mlp_size * 2), - nn.ELU(), - TimestepDropout(p=dropout), ) - self.arc_mlp_size = arc_mlp_size - self.label_mlp_size = label_mlp_size - self.arc_predictor = ArcBiaffine(arc_mlp_size, bias=True) - self.label_predictor = LabelBilinear(label_mlp_size, label_mlp_size, num_label, bias=True) - self.use_greedy_infer = use_greedy_infer - self.reset_parameters() - self.dropout = dropout - - def reset_parameters(self): - for m in self.modules(): - if isinstance(m, nn.Embedding): - continue - elif isinstance(m, nn.LayerNorm): - nn.init.constant_(m.weight, 0.1) - nn.init.constant_(m.bias, 0) - else: - for p in m.parameters(): - nn.init.normal_(p, 0, 0.1) - - def forward(self, words1, words2, seq_len, target1=None): - r"""模型forward阶段 - - :param words1: [batch_size, seq_len] 输入word序列 - :param words2: [batch_size, seq_len] 输入pos序列 - :param seq_len: [batch_size, seq_len] 输入序列长度 - :param target1: [batch_size, seq_len] 输入真实标注的heads, 仅在训练阶段有效, - 用于训练label分类器. 若为 ``None`` , 使用预测的heads输入到label分类器 - Default: ``None`` - :return dict: parsing - 结果:: - - pred1: [batch_size, seq_len, seq_len] 边预测logits - pred2: [batch_size, seq_len, num_label] label预测logits - pred3: [batch_size, seq_len] heads的预测结果, 在 ``target1=None`` 时预测 - - """ - # prepare embeddings - batch_size, length = words1.shape - # print('forward {} {}'.format(batch_size, seq_len)) - - # get sequence mask - mask = seq_len_to_mask(seq_len, max_len=length).long() - - word = self.word_embedding(words1) # [N,L] -> [N,L,C_0] - pos = self.pos_embedding(words2) # [N,L] -> [N,L,C_1] - - word, pos = self.word_fc(word), self.pos_fc(pos) - word, pos = self.word_norm(word), self.pos_norm(pos) - x = torch.cat([word, pos], dim=2) # -> [N,L,C] - - # encoder, extract features - if self.encoder_name.endswith('lstm'): - sort_lens, sort_idx = torch.sort(seq_len, dim=0, descending=True) - x = x[sort_idx] - x = nn.utils.rnn.pack_padded_sequence(x, sort_lens.cpu(), batch_first=True) - feat, _ = self.encoder(x) # -> [N,L,C] - feat, _ = nn.utils.rnn.pad_packed_sequence(feat, batch_first=True) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - feat = feat[unsort_idx] - else: - seq_range = torch.arange(length, dtype=torch.long, device=x.device)[None, :] - x = x + self.position_emb(seq_range) - feat = self.encoder(x, mask.float()) - - # for arc biaffine - # mlp, reduce dim - feat = self.mlp(feat) - arc_sz, label_sz = self.arc_mlp_size, self.label_mlp_size - arc_dep, arc_head = feat[:, :, :arc_sz], feat[:, :, arc_sz:2 * arc_sz] - label_dep, label_head = feat[:, :, 2 * arc_sz:2 * arc_sz + label_sz], feat[:, :, 2 * arc_sz + label_sz:] - - # biaffine arc classifier - arc_pred = self.arc_predictor(arc_head, arc_dep) # [N, L, L] - - # use gold or predicted arc to predict label - if target1 is None or not self.training: - # use greedy decoding in training - if self.training or self.use_greedy_infer: - heads = self.greedy_decoder(arc_pred, mask) - else: - heads = self.mst_decoder(arc_pred, mask) - head_pred = heads - else: - assert self.training # must be training mode - if target1 is None: - heads = self.greedy_decoder(arc_pred, mask) - head_pred = heads - else: - head_pred = None - heads = target1 - - batch_range = torch.arange(start=0, end=batch_size, dtype=torch.long, device=words1.device).unsqueeze(1) - label_head = label_head[batch_range, heads].contiguous() - label_pred = self.label_predictor(label_head, label_dep) # [N, L, num_label] - res_dict = {C.OUTPUTS(0): arc_pred, C.OUTPUTS(1): label_pred} - if head_pred is not None: - res_dict[C.OUTPUTS(2)] = head_pred - return res_dict - - @staticmethod - def loss(pred1, pred2, target1, target2, seq_len): - r""" - 计算parser的loss - - :param pred1: [batch_size, seq_len, seq_len] 边预测logits - :param pred2: [batch_size, seq_len, num_label] label预测logits - :param target1: [batch_size, seq_len] 真实边的标注 - :param target2: [batch_size, seq_len] 真实类别的标注 - :param seq_len: [batch_size, seq_len] 真实目标的长度 - :return loss: scalar - """ - - batch_size, length, _ = pred1.shape - mask = seq_len_to_mask(seq_len, max_len=length) - flip_mask = (mask.eq(False)) - _arc_pred = pred1.clone() - _arc_pred = _arc_pred.masked_fill(flip_mask.unsqueeze(1), -float('inf')) - arc_logits = F.log_softmax(_arc_pred, dim=2) - label_logits = F.log_softmax(pred2, dim=2) - batch_index = torch.arange(batch_size, device=arc_logits.device, dtype=torch.long).unsqueeze(1) - child_index = torch.arange(length, device=arc_logits.device, dtype=torch.long).unsqueeze(0) - arc_loss = arc_logits[batch_index, child_index, target1] - label_loss = label_logits[batch_index, child_index, target2] - - arc_loss = arc_loss.masked_fill(flip_mask, 0) - label_loss = label_loss.masked_fill(flip_mask, 0) - arc_nll = -arc_loss.mean() - label_nll = -label_loss.mean() - return arc_nll + label_nll - - def predict(self, words1, words2, seq_len): - r"""模型预测API - - :param words1: [batch_size, seq_len] 输入word序列 - :param words2: [batch_size, seq_len] 输入pos序列 - :param seq_len: [batch_size, seq_len] 输入序列长度 - :return dict: parsing - 结果:: - - pred1: [batch_size, seq_len] heads的预测结果 - pred2: [batch_size, seq_len, num_label] label预测logits - - """ - res = self(words1, words2, seq_len) - output = {} - output[C.OUTPUTS(0)] = res.pop(C.OUTPUTS(2)) - _, label_pred = res.pop(C.OUTPUTS(1)).max(2) - output[C.OUTPUTS(1)] = label_pred - return output - - -class ParserLoss(LossFunc): - r""" - 计算parser的loss - - """ - - def __init__(self, pred1=None, pred2=None, - target1=None, target2=None, - seq_len=None): - r""" - - :param pred1: [batch_size, seq_len, seq_len] 边预测logits - :param pred2: [batch_size, seq_len, num_label] label预测logits - :param target1: [batch_size, seq_len] 真实边的标注 - :param target2: [batch_size, seq_len] 真实类别的标注 - :param seq_len: [batch_size, seq_len] 真实目标的长度 - :return loss: scalar - """ - super(ParserLoss, self).__init__(BiaffineParser.loss, - pred1=pred1, - pred2=pred2, - target1=target1, - target2=target2, - seq_len=seq_len) - - -class ParserMetric(MetricBase): - r""" - 评估parser的性能 - - """ - - def __init__(self, pred1=None, pred2=None, - target1=None, target2=None, seq_len=None): - r""" - - :param pred1: 边预测logits - :param pred2: label预测logits - :param target1: 真实边的标注 - :param target2: 真实类别的标注 - :param seq_len: 序列长度 - :return dict: 评估结果:: - - UAS: 不带label时, 边预测的准确率 - LAS: 同时预测边和label的准确率 - """ - super().__init__() - self._init_param_map(pred1=pred1, pred2=pred2, - target1=target1, target2=target2, - seq_len=seq_len) - self.num_arc = 0 - self.num_label = 0 - self.num_sample = 0 - - def get_metric(self, reset=True): - res = {'UAS': self.num_arc * 1.0 / self.num_sample, 'LAS': self.num_label * 1.0 / self.num_sample} - if reset: - self.num_sample = self.num_label = self.num_arc = 0 - return res - - def evaluate(self, pred1, pred2, target1, target2, seq_len=None): - r"""Evaluate the performance of prediction. - """ - if seq_len is None: - seq_mask = pred1.new_ones(pred1.size(), dtype=torch.long) - else: - seq_mask = seq_len_to_mask(seq_len.long()).long() - # mask out tag - seq_mask[:, 0] = 0 - head_pred_correct = (pred1 == target1).long() * seq_mask - label_pred_correct = (pred2 == target2).long() * head_pred_correct - self.num_arc += head_pred_correct.sum().item() - self.num_label += label_pred_correct.sum().item() - self.num_sample += seq_mask.sum().item() diff --git a/fastNLP/models/cnn_text_classification.py b/fastNLP/models/cnn_text_classification.py deleted file mode 100644 index 19f1e474..00000000 --- a/fastNLP/models/cnn_text_classification.py +++ /dev/null @@ -1,76 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "CNNText" -] - -import torch -import torch.nn as nn - -from ..core.const import Const as C -from ..core.utils import seq_len_to_mask -from ..embeddings import embedding -from ..modules import encoder - - -class CNNText(torch.nn.Module): - r""" - 使用CNN进行文本分类的模型 - 'Yoon Kim. 2014. Convolution Neural Networks for Sentence Classification.' - - """ - - def __init__(self, embed, - num_classes, - kernel_nums=(30, 40, 50), - kernel_sizes=(1, 3, 5), - dropout=0.5): - r""" - - :param tuple(int,int),torch.FloatTensor,nn.Embedding,numpy.ndarray embed: Embedding的大小(传入tuple(int, int), - 第一个int为vocab_zie, 第二个int为embed_dim); 如果为Tensor, Embedding, ndarray等则直接使用该值初始化Embedding - :param int num_classes: 一共有多少类 - :param int,tuple(int) kernel_sizes: 输出channel的kernel大小。 - :param float dropout: Dropout的大小 - """ - super(CNNText, self).__init__() - - # no support for pre-trained embedding currently - self.embed = embedding.Embedding(embed) - self.conv_pool = encoder.ConvMaxpool( - in_channels=self.embed.embedding_dim, - out_channels=kernel_nums, - kernel_sizes=kernel_sizes) - self.dropout = nn.Dropout(dropout) - self.fc = nn.Linear(sum(kernel_nums), num_classes) - - def forward(self, words, seq_len=None): - r""" - - :param torch.LongTensor words: [batch_size, seq_len],句子中word的index - :param torch.LongTensor seq_len: [batch,] 每个句子的长度 - :return output: dict of torch.LongTensor, [batch_size, num_classes] - """ - x = self.embed(words) # [N,L] -> [N,L,C] - if seq_len is not None: - mask = seq_len_to_mask(seq_len) - x = self.conv_pool(x, mask) - else: - x = self.conv_pool(x) # [N,L,C] -> [N,C] - x = self.dropout(x) - x = self.fc(x) # [N,C] -> [N, N_class] - return {C.OUTPUT: x} - - def predict(self, words, seq_len=None): - r""" - :param torch.LongTensor words: [batch_size, seq_len],句子中word的index - :param torch.LongTensor seq_len: [batch,] 每个句子的长度 - - :return predict: dict of torch.LongTensor, [batch_size, ] - """ - output = self(words, seq_len) - _, predict = output[C.OUTPUT].max(dim=1) - return {C.OUTPUT: predict} diff --git a/fastNLP/models/seq2seq_generator.py b/fastNLP/models/seq2seq_generator.py deleted file mode 100644 index 77c43944..00000000 --- a/fastNLP/models/seq2seq_generator.py +++ /dev/null @@ -1,70 +0,0 @@ -r"""undocumented""" - -import torch -from torch import nn -from .seq2seq_model import Seq2SeqModel -from ..modules.generator.seq2seq_generator import SequenceGenerator - - -__all__ = ['SequenceGeneratorModel'] - - -class SequenceGeneratorModel(nn.Module): - """ - 通过使用本模型封装seq2seq_model使得其既可以用于训练也可以用于生成。训练的时候,本模型的forward函数会被调用,生成的时候本模型的predict - 函数会被调用。 - - """ - - def __init__(self, seq2seq_model: Seq2SeqModel, bos_token_id, eos_token_id=None, max_length=30, max_len_a=0.0, - num_beams=1, do_sample=True, temperature=1.0, top_k=50, top_p=1.0, - repetition_penalty=1, length_penalty=1.0, pad_token_id=0): - """ - - :param Seq2SeqModel seq2seq_model: 序列到序列模型 - :param int,None bos_token_id: 句子开头的token id - :param int,None eos_token_id: 句子结束的token id - :param int max_length: 生成句子的最大长度, 每句话的decode长度为max_length + max_len_a*src_len - :param float max_len_a: 每句话的decode长度为max_length + max_len_a*src_len。 如果不为0,需要保证State中包含encoder_mask - :param int num_beams: beam search的大小 - :param bool do_sample: 是否通过采样的方式生成 - :param float temperature: 只有在do_sample为True才有意义 - :param int top_k: 只从top_k中采样 - :param float top_p: 只从top_p的token中采样,nucles sample - :param float repetition_penalty: 多大程度上惩罚重复的token - :param float length_penalty: 对长度的惩罚,小于1鼓励长句,大于1鼓励短剧 - :param int pad_token_id: 当某句话生成结束之后,之后生成的内容用pad_token_id补充 - """ - super().__init__() - self.seq2seq_model = seq2seq_model - self.generator = SequenceGenerator(seq2seq_model.decoder, max_length=max_length, max_len_a=max_len_a, - num_beams=num_beams, - do_sample=do_sample, temperature=temperature, top_k=top_k, top_p=top_p, - bos_token_id=bos_token_id, - eos_token_id=eos_token_id, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - - def forward(self, src_tokens, tgt_tokens, src_seq_len=None, tgt_seq_len=None): - """ - 透传调用seq2seq_model的forward。 - - :param torch.LongTensor src_tokens: bsz x max_len - :param torch.LongTensor tgt_tokens: bsz x max_len' - :param torch.LongTensor src_seq_len: bsz - :param torch.LongTensor tgt_seq_len: bsz - :return: - """ - return self.seq2seq_model(src_tokens, tgt_tokens, src_seq_len, tgt_seq_len) - - def predict(self, src_tokens, src_seq_len=None): - """ - 给定source的内容,输出generate的内容。 - - :param torch.LongTensor src_tokens: bsz x max_len - :param torch.LongTensor src_seq_len: bsz - :return: - """ - state = self.seq2seq_model.prepare_state(src_tokens, src_seq_len) - result = self.generator.generate(state) - return {'pred': result} diff --git a/fastNLP/models/seq2seq_model.py b/fastNLP/models/seq2seq_model.py deleted file mode 100644 index f6658fb6..00000000 --- a/fastNLP/models/seq2seq_model.py +++ /dev/null @@ -1,185 +0,0 @@ -r""" -主要包含组成Sequence-to-Sequence的model - -""" - -import torch -from torch import nn - -from ..embeddings import get_embeddings -from ..embeddings.utils import get_sinusoid_encoding_table -from ..modules.decoder.seq2seq_decoder import Seq2SeqDecoder, TransformerSeq2SeqDecoder, LSTMSeq2SeqDecoder -from ..modules.encoder.seq2seq_encoder import Seq2SeqEncoder, TransformerSeq2SeqEncoder, LSTMSeq2SeqEncoder - - -__all__ = ['Seq2SeqModel', 'TransformerSeq2SeqModel', 'LSTMSeq2SeqModel'] - - -class Seq2SeqModel(nn.Module): - def __init__(self, encoder: Seq2SeqEncoder, decoder: Seq2SeqDecoder): - """ - 可以用于在Trainer中训练的Seq2Seq模型。正常情况下,继承了该函数之后,只需要实现classmethod build_model即可。如果需要使用该模型 - 进行生成,需要把该模型输入到 :class:`~fastNLP.models.SequenceGeneratorModel` 中。在本模型中,forward()会把encoder后的 - 结果传入到decoder中,并将decoder的输出output出来。 - - :param encoder: Seq2SeqEncoder 对象,需要实现对应的forward()函数,接受两个参数,第一个为bsz x max_len的source tokens, 第二个为 - bsz的source的长度;需要返回两个tensor: encoder_outputs: bsz x max_len x hidden_size, encoder_mask: bsz x max_len - 为1的地方需要被attend。如果encoder的输出或者输入有变化,可以重载本模型的prepare_state()函数或者forward()函数 - :param decoder: Seq2SeqDecoder 对象,需要实现init_state()函数,输出为两个参数,第一个为bsz x max_len x hidden_size是 - encoder的输出; 第二个为bsz x max_len,为encoder输出的mask,为0的地方为pad。若decoder需要更多输入,请重载当前模型的 - prepare_state()或forward()函数 - """ - super().__init__() - self.encoder = encoder - self.decoder = decoder - - def forward(self, src_tokens, tgt_tokens, src_seq_len=None, tgt_seq_len=None): - """ - - :param torch.LongTensor src_tokens: source的token - :param torch.LongTensor tgt_tokens: target的token - :param torch.LongTensor src_seq_len: src的长度 - :param torch.LongTensor tgt_seq_len: target的长度,默认用不上 - :return: {'pred': torch.Tensor}, 其中pred的shape为bsz x max_len x vocab_size - """ - state = self.prepare_state(src_tokens, src_seq_len) - decoder_output = self.decoder(tgt_tokens, state) - if isinstance(decoder_output, torch.Tensor): - return {'pred': decoder_output} - elif isinstance(decoder_output, (tuple, list)): - return {'pred': decoder_output[0]} - else: - raise TypeError(f"Unsupported return type from Decoder:{type(self.decoder)}") - - def prepare_state(self, src_tokens, src_seq_len=None): - """ - 调用encoder获取state,会把encoder的encoder_output, encoder_mask直接传入到decoder.init_state中初始化一个state - - :param src_tokens: - :param src_seq_len: - :return: - """ - encoder_output, encoder_mask = self.encoder(src_tokens, src_seq_len) - state = self.decoder.init_state(encoder_output, encoder_mask) - return state - - @classmethod - def build_model(cls, *args, **kwargs): - """ - 需要实现本方法来进行Seq2SeqModel的初始化 - - :return: - """ - raise NotImplemented - - -class TransformerSeq2SeqModel(Seq2SeqModel): - """ - Encoder为TransformerSeq2SeqEncoder, decoder为TransformerSeq2SeqDecoder,通过build_model方法初始化 - - """ - - def __init__(self, encoder, decoder): - super().__init__(encoder, decoder) - - @classmethod - def build_model(cls, src_embed, tgt_embed=None, - pos_embed='sin', max_position=1024, num_layers=6, d_model=512, n_head=8, dim_ff=2048, dropout=0.1, - bind_encoder_decoder_embed=False, - bind_decoder_input_output_embed=True): - """ - 初始化一个TransformerSeq2SeqModel - - :param nn.Module, StaticEmbedding, Tuple[int, int] src_embed: source的embedding - :param nn.Module, StaticEmbedding, Tuple[int, int] tgt_embed: target的embedding,如果bind_encoder_decoder_embed为 - True,则不要输入该值 - :param str pos_embed: 支持sin, learned两种 - :param int max_position: 最大支持长度 - :param int num_layers: encoder和decoder的层数 - :param int d_model: encoder和decoder输入输出的大小 - :param int n_head: encoder和decoder的head的数量 - :param int dim_ff: encoder和decoder中FFN中间映射的维度 - :param float dropout: Attention和FFN dropout的大小 - :param bool bind_encoder_decoder_embed: 是否对encoder和decoder使用相同的embedding - :param bool bind_decoder_input_output_embed: decoder的输出embedding是否与其输入embedding是一样的权重 - :return: TransformerSeq2SeqModel - """ - if bind_encoder_decoder_embed and tgt_embed is not None: - raise RuntimeError("If you set `bind_encoder_decoder_embed=True`, please do not provide `tgt_embed`.") - - src_embed = get_embeddings(src_embed) - - if bind_encoder_decoder_embed: - tgt_embed = src_embed - else: - assert tgt_embed is not None, "You need to pass `tgt_embed` when `bind_encoder_decoder_embed=False`" - tgt_embed = get_embeddings(tgt_embed) - - if pos_embed == 'sin': - encoder_pos_embed = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(max_position + 1, src_embed.embedding_dim, padding_idx=0), - freeze=True) # 这里规定0是padding - deocder_pos_embed = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(max_position + 1, tgt_embed.embedding_dim, padding_idx=0), - freeze=True) # 这里规定0是padding - elif pos_embed == 'learned': - encoder_pos_embed = get_embeddings((max_position + 1, src_embed.embedding_dim), padding_idx=0) - deocder_pos_embed = get_embeddings((max_position + 1, src_embed.embedding_dim), padding_idx=1) - else: - raise ValueError("pos_embed only supports sin or learned.") - - encoder = TransformerSeq2SeqEncoder(embed=src_embed, pos_embed=encoder_pos_embed, - num_layers=num_layers, d_model=d_model, n_head=n_head, dim_ff=dim_ff, - dropout=dropout) - decoder = TransformerSeq2SeqDecoder(embed=tgt_embed, pos_embed=deocder_pos_embed, - d_model=d_model, num_layers=num_layers, n_head=n_head, dim_ff=dim_ff, - dropout=dropout, - bind_decoder_input_output_embed=bind_decoder_input_output_embed) - - return cls(encoder, decoder) - - -class LSTMSeq2SeqModel(Seq2SeqModel): - """ - 使用LSTMSeq2SeqEncoder和LSTMSeq2SeqDecoder的model - - """ - def __init__(self, encoder, decoder): - super().__init__(encoder, decoder) - - @classmethod - def build_model(cls, src_embed, tgt_embed=None, - num_layers = 3, hidden_size = 400, dropout = 0.3, bidirectional=True, - attention=True, bind_encoder_decoder_embed=False, - bind_decoder_input_output_embed=True): - """ - - :param nn.Module, StaticEmbedding, Tuple[int, int] src_embed: source的embedding - :param nn.Module, StaticEmbedding, Tuple[int, int] tgt_embed: target的embedding,如果bind_encoder_decoder_embed为 - True,则不要输入该值 - :param int num_layers: Encoder和Decoder的层数 - :param int hidden_size: encoder和decoder的隐藏层大小 - :param float dropout: 每层之间的Dropout的大小 - :param bool bidirectional: encoder是否使用双向LSTM - :param bool attention: decoder是否使用attention attend encoder在所有时刻的状态 - :param bool bind_encoder_decoder_embed: 是否对encoder和decoder使用相同的embedding - :param bool bind_decoder_input_output_embed: decoder的输出embedding是否与其输入embedding是一样的权重 - :return: LSTMSeq2SeqModel - """ - if bind_encoder_decoder_embed and tgt_embed is not None: - raise RuntimeError("If you set `bind_encoder_decoder_embed=True`, please do not provide `tgt_embed`.") - - src_embed = get_embeddings(src_embed) - - if bind_encoder_decoder_embed: - tgt_embed = src_embed - else: - assert tgt_embed is not None, "You need to pass `tgt_embed` when `bind_encoder_decoder_embed=False`" - tgt_embed = get_embeddings(tgt_embed) - - encoder = LSTMSeq2SeqEncoder(embed=src_embed, num_layers = num_layers, - hidden_size = hidden_size, dropout = dropout, bidirectional=bidirectional) - decoder = LSTMSeq2SeqDecoder(embed=tgt_embed, num_layers = num_layers, hidden_size = hidden_size, - dropout = dropout, bind_decoder_input_output_embed = bind_decoder_input_output_embed, - attention=attention) - return cls(encoder, decoder) diff --git a/fastNLP/models/sequence_labeling.py b/fastNLP/models/sequence_labeling.py deleted file mode 100644 index de7943c0..00000000 --- a/fastNLP/models/sequence_labeling.py +++ /dev/null @@ -1,273 +0,0 @@ -r""" -本模块实现了几种序列标注模型 -""" -__all__ = [ - "SeqLabeling", - "AdvSeqLabel", - "BiLSTMCRF" -] - -import torch -import torch.nn as nn -import torch.nn.functional as F - -from .base_model import BaseModel -from ..core.const import Const as C -from ..core.utils import seq_len_to_mask -from ..embeddings.utils import get_embeddings -from ..modules.decoder import ConditionalRandomField -from ..modules.encoder import LSTM -from ..modules import decoder, encoder -from ..modules.decoder.crf import allowed_transitions - - -class BiLSTMCRF(BaseModel): - r""" - 结构为embedding + BiLSTM + FC + Dropout + CRF. - - """ - def __init__(self, embed, num_classes, num_layers=1, hidden_size=100, dropout=0.5, - target_vocab=None): - r""" - - :param embed: 支持(1)fastNLP的各种Embedding, (2) tuple, 指明num_embedding, dimension, 如(1000, 100) - :param num_classes: 一共多少个类 - :param num_layers: BiLSTM的层数 - :param hidden_size: BiLSTM的hidden_size,实际hidden size为该值的两倍(前向、后向) - :param dropout: dropout的概率,0为不dropout - :param target_vocab: Vocabulary对象,target与index的对应关系。如果传入该值,将自动避免非法的解码序列。 - """ - super().__init__() - self.embed = get_embeddings(embed) - - if num_layers>1: - self.lstm = LSTM(self.embed.embedding_dim, num_layers=num_layers, hidden_size=hidden_size, bidirectional=True, - batch_first=True, dropout=dropout) - else: - self.lstm = LSTM(self.embed.embedding_dim, num_layers=num_layers, hidden_size=hidden_size, bidirectional=True, - batch_first=True) - - self.dropout = nn.Dropout(dropout) - self.fc = nn.Linear(hidden_size*2, num_classes) - - trans = None - if target_vocab is not None: - assert len(target_vocab)==num_classes, "The number of classes should be same with the length of target vocabulary." - trans = allowed_transitions(target_vocab.idx2word, include_start_end=True) - - self.crf = ConditionalRandomField(num_classes, include_start_end_trans=True, allowed_transitions=trans) - - def _forward(self, words, seq_len=None, target=None): - words = self.embed(words) - feats, _ = self.lstm(words, seq_len=seq_len) - feats = self.fc(feats) - feats = self.dropout(feats) - logits = F.log_softmax(feats, dim=-1) - mask = seq_len_to_mask(seq_len) - if target is None: - pred, _ = self.crf.viterbi_decode(logits, mask) - return {C.OUTPUT:pred} - else: - loss = self.crf(logits, target, mask).mean() - return {C.LOSS:loss} - - def forward(self, words, seq_len, target): - return self._forward(words, seq_len, target) - - def predict(self, words, seq_len): - return self._forward(words, seq_len) - - -class SeqLabeling(BaseModel): - r""" - 一个基础的Sequence labeling的模型。 - 用于做sequence labeling的基础类。结构包含一层Embedding,一层LSTM(单向,一层),一层FC,以及一层CRF。 - - """ - - def __init__(self, embed, hidden_size, num_classes): - r""" - - :param tuple(int,int),torch.FloatTensor,nn.Embedding,numpy.ndarray embed: Embedding的大小(传入tuple(int, int), - 第一个int为vocab_zie, 第二个int为embed_dim); 如果为Tensor, embedding, ndarray等则直接使用该值初始化Embedding - :param int hidden_size: LSTM隐藏层的大小 - :param int num_classes: 一共有多少类 - """ - super(SeqLabeling, self).__init__() - - self.embedding = get_embeddings(embed) - self.rnn = encoder.LSTM(self.embedding.embedding_dim, hidden_size) - self.fc = nn.Linear(hidden_size, num_classes) - self.crf = decoder.ConditionalRandomField(num_classes) - - def forward(self, words, seq_len, target): - r""" - :param torch.LongTensor words: [batch_size, max_len],序列的index - :param torch.LongTensor seq_len: [batch_size,], 这个序列的长度 - :param torch.LongTensor target: [batch_size, max_len], 序列的目标值 - :return y: If truth is None, return list of [decode path(list)]. Used in testing and predicting. - If truth is not None, return loss, a scalar. Used in training. - """ - mask = seq_len_to_mask(seq_len, max_len=words.size(1)) - x = self.embedding(words) - # [batch_size, max_len, word_emb_dim] - x, _ = self.rnn(x, seq_len) - # [batch_size, max_len, hidden_size * direction] - x = self.fc(x) - # [batch_size, max_len, num_classes] - return {C.LOSS: self._internal_loss(x, target, mask)} - - def predict(self, words, seq_len): - r""" - 用于在预测时使用 - - :param torch.LongTensor words: [batch_size, max_len] - :param torch.LongTensor seq_len: [batch_size,] - :return: {'pred': xx}, [batch_size, max_len] - """ - mask = seq_len_to_mask(seq_len, max_len=words.size(1)) - - x = self.embedding(words) - # [batch_size, max_len, word_emb_dim] - x, _ = self.rnn(x, seq_len) - # [batch_size, max_len, hidden_size * direction] - x = self.fc(x) - # [batch_size, max_len, num_classes] - pred = self._decode(x, mask) - return {C.OUTPUT: pred} - - def _internal_loss(self, x, y, mask): - r""" - Negative log likelihood loss. - :param x: Tensor, [batch_size, max_len, tag_size] - :param y: Tensor, [batch_size, max_len] - :return loss: a scalar Tensor - - """ - x = x.float() - y = y.long() - total_loss = self.crf(x, y, mask) - return torch.mean(total_loss) - - def _decode(self, x, mask): - r""" - :param torch.FloatTensor x: [batch_size, max_len, tag_size] - :return prediction: [batch_size, max_len] - """ - tag_seq, _ = self.crf.viterbi_decode(x, mask) - return tag_seq - - -class AdvSeqLabel(nn.Module): - r""" - 更复杂的Sequence Labelling模型。结构为Embedding, LayerNorm, 双向LSTM(两层),FC,LayerNorm,DropOut,FC,CRF。 - """ - - def __init__(self, embed, hidden_size, num_classes, dropout=0.3, id2words=None, encoding_type='bmes'): - r""" - - :param tuple(int,int),torch.FloatTensor,nn.Embedding,numpy.ndarray embed: Embedding的大小(传入tuple(int, int), - 第一个int为vocab_zie, 第二个int为embed_dim); 如果为Tensor, Embedding, ndarray等则直接使用该值初始化Embedding - :param int hidden_size: LSTM的隐层大小 - :param int num_classes: 有多少个类 - :param float dropout: LSTM中以及DropOut层的drop概率 - :param dict id2words: tag id转为其tag word的表。用于在CRF解码时防止解出非法的顺序,比如'BMES'这个标签规范中,'S' - 不能出现在'B'之后。这里也支持类似与'B-NN',即'-'前为标签类型的指示,后面为具体的tag的情况。这里不但会保证 - 'B-NN'后面不为'S-NN'还会保证'B-NN'后面不会出现'M-xx'(任何非'M-NN'和'E-NN'的情况。) - :param str encoding_type: 支持"BIO", "BMES", "BEMSO", 只有在id2words不为None的情况有用。 - """ - super().__init__() - - self.Embedding = get_embeddings(embed) - self.norm1 = torch.nn.LayerNorm(self.Embedding.embedding_dim) - self.Rnn = encoder.LSTM(input_size=self.Embedding.embedding_dim, hidden_size=hidden_size, num_layers=2, - dropout=dropout, - bidirectional=True, batch_first=True) - self.Linear1 = nn.Linear(hidden_size * 2, hidden_size * 2 // 3) - self.norm2 = torch.nn.LayerNorm(hidden_size * 2 // 3) - self.relu = torch.nn.LeakyReLU() - self.drop = torch.nn.Dropout(dropout) - self.Linear2 = nn.Linear(hidden_size * 2 // 3, num_classes) - - if id2words is None: - self.Crf = decoder.crf.ConditionalRandomField(num_classes, include_start_end_trans=False) - else: - self.Crf = decoder.crf.ConditionalRandomField(num_classes, include_start_end_trans=False, - allowed_transitions=allowed_transitions(id2words, - encoding_type=encoding_type)) - - def _decode(self, x, mask): - r""" - :param torch.FloatTensor x: [batch_size, max_len, tag_size] - :param torch.ByteTensor mask: [batch_size, max_len] - :return torch.LongTensor, [batch_size, max_len] - """ - tag_seq, _ = self.Crf.viterbi_decode(x, mask) - return tag_seq - - def _internal_loss(self, x, y, mask): - r""" - Negative log likelihood loss. - :param x: Tensor, [batch_size, max_len, tag_size] - :param y: Tensor, [batch_size, max_len] - :param mask: Tensor, [batch_size, max_len] - :return loss: a scalar Tensor - - """ - x = x.float() - y = y.long() - total_loss = self.Crf(x, y, mask) - return torch.mean(total_loss) - - def _forward(self, words, seq_len, target=None): - r""" - :param torch.LongTensor words: [batch_size, mex_len] - :param torch.LongTensor seq_len:[batch_size, ] - :param torch.LongTensor target: [batch_size, max_len] - :return y: If truth is None, return list of [decode path(list)]. Used in testing and predicting. - If truth is not None, return loss, a scalar. Used in training. - """ - - words = words.long() - seq_len = seq_len.long() - mask = seq_len_to_mask(seq_len, max_len=words.size(1)) - - target = target.long() if target is not None else None - - if next(self.parameters()).is_cuda: - words = words.cuda() - - x = self.Embedding(words) - x = self.norm1(x) - # [batch_size, max_len, word_emb_dim] - - x, _ = self.Rnn(x, seq_len=seq_len) - - x = self.Linear1(x) - x = self.norm2(x) - x = self.relu(x) - x = self.drop(x) - x = self.Linear2(x) - if target is not None: - return {"loss": self._internal_loss(x, target, mask)} - else: - return {"pred": self._decode(x, mask)} - - def forward(self, words, seq_len, target): - r""" - - :param torch.LongTensor words: [batch_size, mex_len] - :param torch.LongTensor seq_len: [batch_size, ] - :param torch.LongTensor target: [batch_size, max_len], 目标 - :return torch.Tensor: a scalar loss - """ - return self._forward(words, seq_len, target) - - def predict(self, words, seq_len): - r""" - - :param torch.LongTensor words: [batch_size, mex_len] - :param torch.LongTensor seq_len: [batch_size, ] - :return torch.LongTensor: [batch_size, max_len] - """ - return self._forward(words, seq_len) diff --git a/fastNLP/models/snli.py b/fastNLP/models/snli.py deleted file mode 100644 index ae5c607a..00000000 --- a/fastNLP/models/snli.py +++ /dev/null @@ -1,178 +0,0 @@ -r""" -.. todo:: - doc -""" -__all__ = [ - "ESIM" -] - -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch.nn import CrossEntropyLoss - -from .base_model import BaseModel -from ..core.const import Const -from ..core.utils import seq_len_to_mask -from ..embeddings.embedding import TokenEmbedding, Embedding -from ..modules.encoder import BiAttention - - -class ESIM(BaseModel): - r""" - ESIM model的一个PyTorch实现 - 论文参见: https://arxiv.org/pdf/1609.06038.pdf - - """ - - def __init__(self, embed, hidden_size=None, num_labels=3, dropout_rate=0.3, - dropout_embed=0.1): - r""" - - :param embed: 初始化的Embedding - :param int hidden_size: 隐藏层大小,默认值为Embedding的维度 - :param int num_labels: 目标标签种类数量,默认值为3 - :param float dropout_rate: dropout的比率,默认值为0.3 - :param float dropout_embed: 对Embedding的dropout比率,默认值为0.1 - """ - super(ESIM, self).__init__() - - if isinstance(embed, TokenEmbedding) or isinstance(embed, Embedding): - self.embedding = embed - else: - self.embedding = Embedding(embed) - self.dropout_embed = EmbedDropout(p=dropout_embed) - if hidden_size is None: - hidden_size = self.embedding.embed_size - self.rnn = BiRNN(self.embedding.embed_size, hidden_size, dropout_rate=dropout_rate) - # self.rnn = LSTM(self.embedding.embed_size, hidden_size, dropout=dropout_rate, bidirectional=True) - - self.interfere = nn.Sequential(nn.Dropout(p=dropout_rate), - nn.Linear(8 * hidden_size, hidden_size), - nn.ReLU()) - nn.init.xavier_uniform_(self.interfere[1].weight.data) - self.bi_attention = BiAttention() - - self.rnn_high = BiRNN(self.embedding.embed_size, hidden_size, dropout_rate=dropout_rate) - # self.rnn_high = LSTM(hidden_size, hidden_size, dropout=dropout_rate, bidirectional=True,) - - self.classifier = nn.Sequential(nn.Dropout(p=dropout_rate), - nn.Linear(8 * hidden_size, hidden_size), - nn.Tanh(), - nn.Dropout(p=dropout_rate), - nn.Linear(hidden_size, num_labels)) - - self.dropout_rnn = nn.Dropout(p=dropout_rate) - - nn.init.xavier_uniform_(self.classifier[1].weight.data) - nn.init.xavier_uniform_(self.classifier[4].weight.data) - - def forward(self, words1, words2, seq_len1, seq_len2, target=None): - r""" - :param words1: [batch, seq_len] - :param words2: [batch, seq_len] - :param seq_len1: [batch] - :param seq_len2: [batch] - :param target: - :return: - """ - mask1 = seq_len_to_mask(seq_len1, words1.size(1)) - mask2 = seq_len_to_mask(seq_len2, words2.size(1)) - a0 = self.embedding(words1) # B * len * emb_dim - b0 = self.embedding(words2) - a0, b0 = self.dropout_embed(a0), self.dropout_embed(b0) - a = self.rnn(a0, mask1.byte()) # a: [B, PL, 2 * H] - b = self.rnn(b0, mask2.byte()) - # a = self.dropout_rnn(self.rnn(a0, seq_len1)[0]) # a: [B, PL, 2 * H] - # b = self.dropout_rnn(self.rnn(b0, seq_len2)[0]) - - ai, bi = self.bi_attention(a, mask1, b, mask2) - - a_ = torch.cat((a, ai, a - ai, a * ai), dim=2) # ma: [B, PL, 8 * H] - b_ = torch.cat((b, bi, b - bi, b * bi), dim=2) - a_f = self.interfere(a_) - b_f = self.interfere(b_) - - a_h = self.rnn_high(a_f, mask1.byte()) # ma: [B, PL, 2 * H] - b_h = self.rnn_high(b_f, mask2.byte()) - # a_h = self.dropout_rnn(self.rnn_high(a_f, seq_len1)[0]) # ma: [B, PL, 2 * H] - # b_h = self.dropout_rnn(self.rnn_high(b_f, seq_len2)[0]) - - a_avg = self.mean_pooling(a_h, mask1, dim=1) - a_max, _ = self.max_pooling(a_h, mask1, dim=1) - b_avg = self.mean_pooling(b_h, mask2, dim=1) - b_max, _ = self.max_pooling(b_h, mask2, dim=1) - - out = torch.cat((a_avg, a_max, b_avg, b_max), dim=1) # v: [B, 8 * H] - logits = torch.tanh(self.classifier(out)) - - if target is not None: - loss_fct = CrossEntropyLoss() - loss = loss_fct(logits, target) - - return {Const.LOSS: loss, Const.OUTPUT: logits} - else: - return {Const.OUTPUT: logits} - - def predict(self, **kwargs): - pred = self.forward(**kwargs)[Const.OUTPUT].argmax(-1) - return {Const.OUTPUT: pred} - - # input [batch_size, len , hidden] - # mask [batch_size, len] (111...00) - @staticmethod - def mean_pooling(input, mask, dim=1): - masks = mask.view(mask.size(0), mask.size(1), -1).float() - return torch.sum(input * masks, dim=dim) / torch.sum(masks, dim=1) - - @staticmethod - def max_pooling(input, mask, dim=1): - my_inf = 10e12 - masks = mask.view(mask.size(0), mask.size(1), -1) - masks = masks.expand(-1, -1, input.size(2)).float() - return torch.max(input + masks.le(0.5).float() * -my_inf, dim=dim) - - -class EmbedDropout(nn.Dropout): - - def forward(self, sequences_batch): - ones = sequences_batch.data.new_ones(sequences_batch.shape[0], sequences_batch.shape[-1]) - dropout_mask = nn.functional.dropout(ones, self.p, self.training, inplace=False) - return dropout_mask.unsqueeze(1) * sequences_batch - - -class BiRNN(nn.Module): - def __init__(self, input_size, hidden_size, dropout_rate=0.3): - super(BiRNN, self).__init__() - self.dropout_rate = dropout_rate - self.rnn = nn.LSTM(input_size, hidden_size, - num_layers=1, - bidirectional=True, - batch_first=True) - - def forward(self, x, x_mask): - # Sort x - lengths = x_mask.data.eq(True).long().sum(1) - _, idx_sort = torch.sort(lengths, dim=0, descending=True) - _, idx_unsort = torch.sort(idx_sort, dim=0) - lengths = list(lengths[idx_sort]) - - x = x.index_select(0, idx_sort) - # Pack it up - rnn_input = nn.utils.rnn.pack_padded_sequence(x, lengths, batch_first=True) - # Apply dropout to input - if self.dropout_rate > 0: - dropout_input = F.dropout(rnn_input.data, p=self.dropout_rate, training=self.training) - rnn_input = nn.utils.rnn.PackedSequence(dropout_input, rnn_input.batch_sizes) - self.rnn.flatten_parameters() - output = self.rnn(rnn_input)[0] - # Unpack everything - output = nn.utils.rnn.pad_packed_sequence(output, batch_first=True)[0] - output = output.index_select(0, idx_unsort) - if output.size(1) != x_mask.size(1): - padding = torch.zeros(output.size(0), - x_mask.size(1) - output.size(1), - output.size(2)).type(output.data.type()) - output = torch.cat([output, padding], 1) - return output - diff --git a/fastNLP/models/star_transformer.py b/fastNLP/models/star_transformer.py deleted file mode 100644 index 7216657e..00000000 --- a/fastNLP/models/star_transformer.py +++ /dev/null @@ -1,307 +0,0 @@ -r""" -Star-Transformer 的 Pytorch 实现。 -""" -__all__ = [ - "StarTransEnc", - "STNLICls", - "STSeqCls", - "STSeqLabel", -] - -import torch -from torch import nn - -from ..core.const import Const -from ..core.utils import seq_len_to_mask -from ..embeddings.utils import get_embeddings -from ..modules.encoder.star_transformer import StarTransformer - - -class StarTransEnc(nn.Module): - r""" - 带word embedding的Star-Transformer Encoder - - """ - - def __init__(self, embed, - hidden_size, - num_layers, - num_head, - head_dim, - max_len, - emb_dropout, - dropout): - r""" - - :param embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 - embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象,此时就以传入的对象作为embedding - :param hidden_size: 模型中特征维度. - :param num_layers: 模型层数. - :param num_head: 模型中multi-head的head个数. - :param head_dim: 模型中multi-head中每个head特征维度. - :param max_len: 模型能接受的最大输入长度. - :param emb_dropout: 词嵌入的dropout概率. - :param dropout: 模型除词嵌入外的dropout概率. - """ - super(StarTransEnc, self).__init__() - self.embedding = get_embeddings(embed) - emb_dim = self.embedding.embedding_dim - self.emb_fc = nn.Linear(emb_dim, hidden_size) - # self.emb_drop = nn.Dropout(emb_dropout) - self.encoder = StarTransformer(hidden_size=hidden_size, - num_layers=num_layers, - num_head=num_head, - head_dim=head_dim, - dropout=dropout, - max_len=max_len) - - def forward(self, x, mask): - r""" - :param FloatTensor x: [batch, length, hidden] 输入的序列 - :param ByteTensor mask: [batch, length] 输入序列的padding mask, 在没有内容(padding 部分) 为 0, - 否则为 1 - :return: [batch, length, hidden] 编码后的输出序列 - - [batch, hidden] 全局 relay 节点, 详见论文 - """ - x = self.embedding(x) - x = self.emb_fc(x) - nodes, relay = self.encoder(x, mask) - return nodes, relay - - -class _Cls(nn.Module): - def __init__(self, in_dim, num_cls, hid_dim, dropout=0.1): - super(_Cls, self).__init__() - self.fc = nn.Sequential( - nn.Linear(in_dim, hid_dim), - nn.LeakyReLU(), - nn.Dropout(dropout), - nn.Linear(hid_dim, num_cls), - ) - - def forward(self, x): - h = self.fc(x) - return h - - -class _NLICls(nn.Module): - def __init__(self, in_dim, num_cls, hid_dim, dropout=0.1): - super(_NLICls, self).__init__() - self.fc = nn.Sequential( - nn.Dropout(dropout), - nn.Linear(in_dim * 4, hid_dim), # 4 - nn.LeakyReLU(), - nn.Dropout(dropout), - nn.Linear(hid_dim, num_cls), - ) - - def forward(self, x1, x2): - x = torch.cat([x1, x2, torch.abs(x1 - x2), x1 * x2], 1) - h = self.fc(x) - return h - - -class STSeqLabel(nn.Module): - r""" - 用于序列标注的Star-Transformer模型 - - """ - - def __init__(self, embed, num_cls, - hidden_size=300, - num_layers=4, - num_head=8, - head_dim=32, - max_len=512, - cls_hidden_size=600, - emb_dropout=0.1, - dropout=0.1, ): - r""" - - :param embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 - embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象, 此时就以传入的对象作为embedding - :param num_cls: 输出类别个数 - :param hidden_size: 模型中特征维度. Default: 300 - :param num_layers: 模型层数. Default: 4 - :param num_head: 模型中multi-head的head个数. Default: 8 - :param head_dim: 模型中multi-head中每个head特征维度. Default: 32 - :param max_len: 模型能接受的最大输入长度. Default: 512 - :param cls_hidden_size: 分类器隐层维度. Default: 600 - :param emb_dropout: 词嵌入的dropout概率. Default: 0.1 - :param dropout: 模型除词嵌入外的dropout概率. Default: 0.1 - """ - super(STSeqLabel, self).__init__() - self.enc = StarTransEnc(embed=embed, - hidden_size=hidden_size, - num_layers=num_layers, - num_head=num_head, - head_dim=head_dim, - max_len=max_len, - emb_dropout=emb_dropout, - dropout=dropout) - self.cls = _Cls(hidden_size, num_cls, cls_hidden_size) - - def forward(self, words, seq_len): - r""" - - :param words: [batch, seq_len] 输入序列 - :param seq_len: [batch,] 输入序列的长度 - :return output: [batch, num_cls, seq_len] 输出序列中每个元素的分类的概率 - """ - mask = seq_len_to_mask(seq_len) - nodes, _ = self.enc(words, mask) - output = self.cls(nodes) - output = output.transpose(1, 2) # make hidden to be dim 1 - return {Const.OUTPUT: output} # [bsz, n_cls, seq_len] - - def predict(self, words, seq_len): - r""" - - :param words: [batch, seq_len] 输入序列 - :param seq_len: [batch,] 输入序列的长度 - :return output: [batch, seq_len] 输出序列中每个元素的分类 - """ - y = self.forward(words, seq_len) - _, pred = y[Const.OUTPUT].max(1) - return {Const.OUTPUT: pred} - - -class STSeqCls(nn.Module): - r""" - 用于分类任务的Star-Transformer - - """ - - def __init__(self, embed, num_cls, - hidden_size=300, - num_layers=4, - num_head=8, - head_dim=32, - max_len=512, - cls_hidden_size=600, - emb_dropout=0.1, - dropout=0.1, ): - r""" - - :param embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 - embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象, 此时就以传入的对象作为embedding - :param num_cls: 输出类别个数 - :param hidden_size: 模型中特征维度. Default: 300 - :param num_layers: 模型层数. Default: 4 - :param num_head: 模型中multi-head的head个数. Default: 8 - :param head_dim: 模型中multi-head中每个head特征维度. Default: 32 - :param max_len: 模型能接受的最大输入长度. Default: 512 - :param cls_hidden_size: 分类器隐层维度. Default: 600 - :param emb_dropout: 词嵌入的dropout概率. Default: 0.1 - :param dropout: 模型除词嵌入外的dropout概率. Default: 0.1 - """ - super(STSeqCls, self).__init__() - self.enc = StarTransEnc(embed=embed, - hidden_size=hidden_size, - num_layers=num_layers, - num_head=num_head, - head_dim=head_dim, - max_len=max_len, - emb_dropout=emb_dropout, - dropout=dropout) - self.cls = _Cls(hidden_size, num_cls, cls_hidden_size, dropout=dropout) - - def forward(self, words, seq_len): - r""" - - :param words: [batch, seq_len] 输入序列 - :param seq_len: [batch,] 输入序列的长度 - :return output: [batch, num_cls] 输出序列的分类的概率 - """ - mask = seq_len_to_mask(seq_len) - nodes, relay = self.enc(words, mask) - y = 0.5 * (relay + nodes.max(1)[0]) - output = self.cls(y) # [bsz, n_cls] - return {Const.OUTPUT: output} - - def predict(self, words, seq_len): - r""" - - :param words: [batch, seq_len] 输入序列 - :param seq_len: [batch,] 输入序列的长度 - :return output: [batch, num_cls] 输出序列的分类 - """ - y = self.forward(words, seq_len) - _, pred = y[Const.OUTPUT].max(1) - return {Const.OUTPUT: pred} - - -class STNLICls(nn.Module): - r""" - 用于自然语言推断(NLI)的Star-Transformer - - """ - - def __init__(self, embed, num_cls, - hidden_size=300, - num_layers=4, - num_head=8, - head_dim=32, - max_len=512, - cls_hidden_size=600, - emb_dropout=0.1, - dropout=0.1, ): - r""" - - :param embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 - embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象, 此时就以传入的对象作为embedding - :param num_cls: 输出类别个数 - :param hidden_size: 模型中特征维度. Default: 300 - :param num_layers: 模型层数. Default: 4 - :param num_head: 模型中multi-head的head个数. Default: 8 - :param head_dim: 模型中multi-head中每个head特征维度. Default: 32 - :param max_len: 模型能接受的最大输入长度. Default: 512 - :param cls_hidden_size: 分类器隐层维度. Default: 600 - :param emb_dropout: 词嵌入的dropout概率. Default: 0.1 - :param dropout: 模型除词嵌入外的dropout概率. Default: 0.1 - """ - super(STNLICls, self).__init__() - self.enc = StarTransEnc(embed=embed, - hidden_size=hidden_size, - num_layers=num_layers, - num_head=num_head, - head_dim=head_dim, - max_len=max_len, - emb_dropout=emb_dropout, - dropout=dropout) - self.cls = _NLICls(hidden_size, num_cls, cls_hidden_size) - - def forward(self, words1, words2, seq_len1, seq_len2): - r""" - - :param words1: [batch, seq_len] 输入序列1 - :param words2: [batch, seq_len] 输入序列2 - :param seq_len1: [batch,] 输入序列1的长度 - :param seq_len2: [batch,] 输入序列2的长度 - :return output: [batch, num_cls] 输出分类的概率 - """ - mask1 = seq_len_to_mask(seq_len1) - mask2 = seq_len_to_mask(seq_len2) - - def enc(seq, mask): - nodes, relay = self.enc(seq, mask) - return 0.5 * (relay + nodes.max(1)[0]) - - y1 = enc(words1, mask1) - y2 = enc(words2, mask2) - output = self.cls(y1, y2) # [bsz, n_cls] - return {Const.OUTPUT: output} - - def predict(self, words1, words2, seq_len1, seq_len2): - r""" - - :param words1: [batch, seq_len] 输入序列1 - :param words2: [batch, seq_len] 输入序列2 - :param seq_len1: [batch,] 输入序列1的长度 - :param seq_len2: [batch,] 输入序列2的长度 - :return output: [batch, num_cls] 输出分类的概率 - """ - y = self.forward(words1, words2, seq_len1, seq_len2) - _, pred = y[Const.OUTPUT].max(1) - return {Const.OUTPUT: pred} diff --git a/fastNLP/modules/__init__.py b/fastNLP/modules/__init__.py deleted file mode 100644 index 77144660..00000000 --- a/fastNLP/modules/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -r""" - -.. image:: figures/text_classification.png - -大部分用于的 NLP 任务神经网络都可以看做由 :mod:`embedding` 、 :mod:`~fastNLP.modules.encoder` 、 -:mod:`~fastNLP.modules.decoder` 三种模块组成。 本模块中实现了 fastNLP 提供的诸多模块组件, -可以帮助用户快速搭建自己所需的网络。几种模块的功能和常见组件如下: - -.. csv-table:: - :header: "类型", "功能", "常见组件" - - "embedding", 参见 :mod:`/fastNLP.embeddings` , "Elmo, Bert" - "encoder", "将输入编码为具有表示能力的向量", "CNN, LSTM, Transformer" - "decoder", "将具有某种表示意义的向量解码为需要的输出形式 ", "MLP, CRF" - "其它", "配合其它组件使用的组件", "Dropout" - - -""" -__all__ = [ - # "BertModel", - - "ConvolutionCharEncoder", - "LSTMCharEncoder", - - "ConvMaxpool", - - "LSTM", - - "StarTransformer", - - "TransformerEncoder", - - "VarRNN", - "VarLSTM", - "VarGRU", - - "MaxPool", - "MaxPoolWithMask", - "KMaxPool", - "AvgPool", - "AvgPoolWithMask", - - "MultiHeadAttention", - - "MLP", - "ConditionalRandomField", - "viterbi_decode", - "allowed_transitions", - - "TimestepDropout", - - 'summary', - - "BertTokenizer", - "BertModel", - - "RobertaTokenizer", - "RobertaModel", - - "GPT2Model", - "GPT2Tokenizer", - - "TransformerSeq2SeqEncoder", - "LSTMSeq2SeqEncoder", - "Seq2SeqEncoder", - - "TransformerSeq2SeqDecoder", - "LSTMSeq2SeqDecoder", - "Seq2SeqDecoder", - - "TransformerState", - "LSTMState", - "State", - - "SequenceGenerator" -] - -import sys - -from . import decoder -from . import encoder -from .decoder import * -from .dropout import TimestepDropout -from .encoder import * -from .generator import * -from .utils import summary -from ..doc_utils import doc_process -from .tokenizer import * - -doc_process(sys.modules[__name__]) diff --git a/fastNLP/modules/attention.py b/fastNLP/modules/attention.py deleted file mode 100644 index 85810670..00000000 --- a/fastNLP/modules/attention.py +++ /dev/null @@ -1,324 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "MultiHeadAttention", - "BiAttention", - "SelfAttention", -] - -import math - -import torch -import torch.nn.functional as F -from torch import nn - -from .utils import initial_parameter -from .decoder.seq2seq_state import TransformerState - - -class DotAttention(nn.Module): - r""" - Transformer当中的DotAttention - """ - - def __init__(self, key_size, value_size, dropout=0.0): - super(DotAttention, self).__init__() - self.key_size = key_size - self.value_size = value_size - self.scale = math.sqrt(key_size) - self.drop = nn.Dropout(dropout) - self.softmax = nn.Softmax(dim=-1) - - def forward(self, Q, K, V, mask_out=None): - r""" - - :param Q: [..., seq_len_q, key_size] - :param K: [..., seq_len_k, key_size] - :param V: [..., seq_len_k, value_size] - :param mask_out: [..., 1, seq_len] or [..., seq_len_q, seq_len_k] - """ - output = torch.matmul(Q, K.transpose(-1, -2)) / self.scale - if mask_out is not None: - output.masked_fill_(mask_out, -1e9) - output = self.softmax(output) - output = self.drop(output) - return torch.matmul(output, V) - - -class MultiHeadAttention(nn.Module): - """ - Attention is all you need中提到的多头注意力 - - """ - def __init__(self, d_model: int = 512, n_head: int = 8, dropout: float = 0.0, layer_idx: int = None): - super(MultiHeadAttention, self).__init__() - self.d_model = d_model - self.n_head = n_head - self.dropout = dropout - self.head_dim = d_model // n_head - self.layer_idx = layer_idx - assert d_model % n_head == 0, "d_model should be divisible by n_head" - self.scaling = self.head_dim ** -0.5 - - self.q_proj = nn.Linear(d_model, d_model) - self.k_proj = nn.Linear(d_model, d_model) - self.v_proj = nn.Linear(d_model, d_model) - self.out_proj = nn.Linear(d_model, d_model) - - self.reset_parameters() - - def forward(self, query, key, value, key_mask=None, attn_mask=None, state=None): - """ - - :param query: batch x seq x dim - :param key: batch x seq x dim - :param value: batch x seq x dim - :param key_mask: batch x seq 用于指示哪些key不要attend到;注意到mask为1的地方是要attend到的 - :param attn_mask: seq x seq, 用于mask掉attention map。 主要是用在训练时decoder端的self attention,下三角为1 - :param state: 过去的信息,在inference的时候会用到,比如encoder output、decoder的prev kv。这样可以减少计算。 - :return: - """ - assert key.size() == value.size() - if state is not None: - assert self.layer_idx is not None - qkv_same = query.data_ptr() == key.data_ptr() == value.data_ptr() - - q = self.q_proj(query) # batch x seq x dim - q *= self.scaling - k = v = None - prev_k = prev_v = None - - # 从state中取kv - if isinstance(state, TransformerState): # 说明此时在inference阶段 - if qkv_same: # 此时在decoder self attention - prev_k = state.decoder_prev_key[self.layer_idx] - prev_v = state.decoder_prev_value[self.layer_idx] - else: # 此时在decoder-encoder attention,直接将保存下来的key装载起来即可 - k = state.encoder_key[self.layer_idx] - v = state.encoder_value[self.layer_idx] - - if k is None: - k = self.k_proj(key) - v = self.v_proj(value) - - if prev_k is not None: - k = torch.cat((prev_k, k), dim=1) - v = torch.cat((prev_v, v), dim=1) - - # 更新state - if isinstance(state, TransformerState): - if qkv_same: - state.decoder_prev_key[self.layer_idx] = k - state.decoder_prev_value[self.layer_idx] = v - else: - state.encoder_key[self.layer_idx] = k - state.encoder_value[self.layer_idx] = v - - # 开始计算attention - batch_size, q_len, d_model = query.size() - k_len, v_len = k.size(1), v.size(1) - q = q.reshape(batch_size, q_len, self.n_head, self.head_dim) - k = k.reshape(batch_size, k_len, self.n_head, self.head_dim) - v = v.reshape(batch_size, v_len, self.n_head, self.head_dim) - - attn_weights = torch.einsum('bqnh,bknh->bqkn', q, k) # bs,q_len,k_len,n_head - if key_mask is not None: - _key_mask = ~key_mask[:, None, :, None].bool() # batch,1,k_len,1 - attn_weights = attn_weights.masked_fill(_key_mask, -float('inf')) - - if attn_mask is not None: - _attn_mask = attn_mask[None, :, :, None].eq(0) # 1,q_len,k_len,n_head - attn_weights = attn_weights.masked_fill(_attn_mask, -float('inf')) - - attn_weights = F.softmax(attn_weights, dim=2) - attn_weights = F.dropout(attn_weights, p=self.dropout, training=self.training) - - output = torch.einsum('bqkn,bknh->bqnh', attn_weights, v) # batch,q_len,n_head,head_dim - output = output.reshape(batch_size, q_len, -1) - output = self.out_proj(output) # batch,q_len,dim - - return output, attn_weights - - def reset_parameters(self): - nn.init.xavier_uniform_(self.q_proj.weight) - nn.init.xavier_uniform_(self.k_proj.weight) - nn.init.xavier_uniform_(self.v_proj.weight) - nn.init.xavier_uniform_(self.out_proj.weight) - - def set_layer_idx(self, layer_idx): - self.layer_idx = layer_idx - - -class AttentionLayer(nn.Module): - def __init__(selfu, input_size, key_dim, value_dim, bias=False): - """ - 可用于LSTM2LSTM的序列到序列模型的decode过程中,该attention是在decode过程中根据上一个step的hidden计算对encoder结果的attention - - :param int input_size: 输入的大小 - :param int key_dim: 一般就是encoder_output输出的维度 - :param int value_dim: 输出的大小维度, 一般就是decoder hidden的大小 - :param bias: - """ - super().__init__() - - selfu.input_proj = nn.Linear(input_size, key_dim, bias=bias) - selfu.output_proj = nn.Linear(input_size + key_dim, value_dim, bias=bias) - - def forward(self, input, encode_outputs, encode_mask): - """ - - :param input: batch_size x input_size - :param encode_outputs: batch_size x max_len x key_dim - :param encode_mask: batch_size x max_len, 为0的地方为padding - :return: hidden: batch_size x value_dim, scores: batch_size x max_len, normalized过的 - """ - - # x: bsz x encode_hidden_size - x = self.input_proj(input) - - # compute attention - attn_scores = torch.matmul(encode_outputs, x.unsqueeze(-1)).squeeze(-1) # b x max_len - - # don't attend over padding - if encode_mask is not None: - attn_scores = attn_scores.float().masked_fill_( - encode_mask.eq(0), - float('-inf') - ).type_as(attn_scores) # FP16 support: cast to float and back - - attn_scores = F.softmax(attn_scores, dim=-1) # srclen x bsz - - # sum weighted sources - x = torch.matmul(attn_scores.unsqueeze(1), encode_outputs).squeeze(1) # b x encode_hidden_size - - x = torch.tanh(self.output_proj(torch.cat((x, input), dim=1))) - return x, attn_scores - - -def _masked_softmax(tensor, mask): - tensor_shape = tensor.size() - reshaped_tensor = tensor.view(-1, tensor_shape[-1]) - - # Reshape the mask so it matches the size of the input tensor. - while mask.dim() < tensor.dim(): - mask = mask.unsqueeze(1) - mask = mask.expand_as(tensor).contiguous().float() - reshaped_mask = mask.view(-1, mask.size()[-1]) - result = F.softmax(reshaped_tensor * reshaped_mask, dim=-1) - result = result * reshaped_mask - # 1e-13 is added to avoid divisions by zero. - result = result / (result.sum(dim=-1, keepdim=True) + 1e-13) - return result.view(*tensor_shape) - - -def _weighted_sum(tensor, weights, mask): - w_sum = weights.bmm(tensor) - while mask.dim() < w_sum.dim(): - mask = mask.unsqueeze(1) - mask = mask.transpose(-1, -2) - mask = mask.expand_as(w_sum).contiguous().float() - return w_sum * mask - - -class BiAttention(nn.Module): - r""" - Bi Attention module - - 对于给定的两个向量序列 :math:`a_i` 和 :math:`b_j` , BiAttention模块将通过以下的公式来计算attention结果 - - .. math:: - - \begin{array}{ll} \\ - e_{ij} = {a}^{\mathrm{T}}_{i}{b}_{j} \\ - {\hat{a}}_{i} = \sum_{j=1}^{\mathcal{l}_{b}}{\frac{\mathrm{exp}(e_{ij})}{\sum_{k=1}^{\mathcal{l}_{b}}{\mathrm{exp}(e_{ik})}}}{b}_{j} \\ - {\hat{b}}_{j} = \sum_{i=1}^{\mathcal{l}_{a}}{\frac{\mathrm{exp}(e_{ij})}{\sum_{k=1}^{\mathcal{l}_{a}}{\mathrm{exp}(e_{ik})}}}{a}_{i} \\ - \end{array} - - """ - - def forward(self, premise_batch, premise_mask, hypothesis_batch, hypothesis_mask): - r""" - :param torch.Tensor premise_batch: [batch_size, a_seq_len, hidden_size] - :param torch.Tensor premise_mask: [batch_size, a_seq_len] - :param torch.Tensor hypothesis_batch: [batch_size, b_seq_len, hidden_size] - :param torch.Tensor hypothesis_mask: [batch_size, b_seq_len] - :return: torch.Tensor attended_premises: [batch_size, a_seq_len, hidden_size] torch.Tensor attended_hypotheses: [batch_size, b_seq_len, hidden_size] - """ - similarity_matrix = premise_batch.bmm(hypothesis_batch.transpose(2, 1) - .contiguous()) - - prem_hyp_attn = _masked_softmax(similarity_matrix, hypothesis_mask) - hyp_prem_attn = _masked_softmax(similarity_matrix.transpose(1, 2) - .contiguous(), - premise_mask) - - attended_premises = _weighted_sum(hypothesis_batch, - prem_hyp_attn, - premise_mask) - attended_hypotheses = _weighted_sum(premise_batch, - hyp_prem_attn, - hypothesis_mask) - - return attended_premises, attended_hypotheses - - -class SelfAttention(nn.Module): - r""" - 这是一个基于论文 `A structured self-attentive sentence embedding `_ - 的Self Attention Module. - """ - - def __init__(self, input_size, attention_unit=300, attention_hops=10, drop=0.5, initial_method=None, ): - r""" - - :param int input_size: 输入tensor的hidden维度 - :param int attention_unit: 输出tensor的hidden维度 - :param int attention_hops: - :param float drop: dropout概率,默认值为0.5 - :param str initial_method: 初始化参数方法 - """ - super(SelfAttention, self).__init__() - - self.attention_hops = attention_hops - self.ws1 = nn.Linear(input_size, attention_unit, bias=False) - self.ws2 = nn.Linear(attention_unit, attention_hops, bias=False) - self.I = torch.eye(attention_hops, requires_grad=False) - self.I_origin = self.I - self.drop = nn.Dropout(drop) - self.tanh = nn.Tanh() - initial_parameter(self, initial_method) - - def _penalization(self, attention): - r""" - compute the penalization term for attention module - """ - baz = attention.size(0) - size = self.I.size() - if len(size) != 3 or size[0] != baz: - self.I = self.I_origin.expand(baz, -1, -1) - self.I = self.I.to(device=attention.device) - attention_t = torch.transpose(attention, 1, 2).contiguous() - mat = torch.bmm(attention, attention_t) - self.I[:attention.size(0)] - ret = (torch.sum(torch.sum((mat ** 2), 2), 1).squeeze() + 1e-10) ** 0.5 - return torch.sum(ret) / size[0] - - def forward(self, input, input_origin): - r""" - :param torch.Tensor input: [batch_size, seq_len, hidden_size] 要做attention的矩阵 - :param torch.Tensor input_origin: [batch_size, seq_len] 原始token的index组成的矩阵,含有pad部分内容 - :return torch.Tensor output1: [batch_size, multi-head, hidden_size] 经过attention操作后输入矩阵的结果 - :return torch.Tensor output2: [1] attention惩罚项,是一个标量 - """ - input = input.contiguous() - size = input.size() # [bsz, len, nhid] - - input_origin = input_origin.expand(self.attention_hops, -1, -1) # [hops,baz, len] - input_origin = input_origin.transpose(0, 1).contiguous() # [baz, hops,len] - - y1 = self.tanh(self.ws1(self.drop(input))) # [baz,len,dim] -->[bsz,len, attention-unit] - attention = self.ws2(y1).transpose(1, 2).contiguous() - # [bsz,len, attention-unit]--> [bsz, len, hop]--> [baz,hop,len] - - attention = attention + (-999999 * (input_origin == 0).float()) # remove the weight on padding token. - attention = F.softmax(attention, 2) # [baz ,hop, len] - return torch.bmm(attention, input), self._penalization(attention) # output1 --> [baz ,hop ,nhid] diff --git a/fastNLP/modules/decoder/__init__.py b/fastNLP/modules/decoder/__init__.py deleted file mode 100644 index 93099be0..00000000 --- a/fastNLP/modules/decoder/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -r""" -.. todo:: - doc -""" -__all__ = [ - "MLP", - "ConditionalRandomField", - "viterbi_decode", - "allowed_transitions", - - "LSTMState", - "TransformerState", - "State", - - "TransformerSeq2SeqDecoder", - "LSTMSeq2SeqDecoder", - "Seq2SeqDecoder" -] - -from .crf import ConditionalRandomField -from .crf import allowed_transitions -from .mlp import MLP -from .utils import viterbi_decode -from .seq2seq_decoder import Seq2SeqDecoder, LSTMSeq2SeqDecoder, TransformerSeq2SeqDecoder -from .seq2seq_state import State, LSTMState, TransformerState diff --git a/fastNLP/modules/decoder/crf.py b/fastNLP/modules/decoder/crf.py deleted file mode 100644 index 0a05f6f4..00000000 --- a/fastNLP/modules/decoder/crf.py +++ /dev/null @@ -1,359 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "ConditionalRandomField", - "allowed_transitions" -] - -from typing import Union - -import torch -from torch import nn - -from ..utils import initial_parameter -from ...core.metrics import _get_encoding_type_from_tag_vocab, _check_tag_vocab_and_encoding_type -from ...core.vocabulary import Vocabulary - - -def allowed_transitions(tag_vocab:Union[Vocabulary, dict], encoding_type=None, include_start_end=False): - r""" - 给定一个id到label的映射表,返回所有可以跳转的(from_tag_id, to_tag_id)列表。 - - :param ~fastNLP.Vocabulary,dict tag_vocab: 支持类型为tag或tag-label。只有tag的,比如"B", "M"; 也可以是"B-NN", "M-NN", - tag和label之间一定要用"-"隔开。如果传入dict,格式需要形如{0:"O", 1:"B-tag1"},即index在前,tag在后。 - :param str encoding_type: 支持"bio", "bmes", "bmeso", "bioes"。默认为None,通过vocab自动推断 - :param bool include_start_end: 是否包含开始与结尾的转换。比如在bio中,b/o可以在开头,但是i不能在开头; - 为True,返回的结果中会包含(start_idx, b_idx), (start_idx, o_idx), 但是不包含(start_idx, i_idx); - start_idx=len(id2label), end_idx=len(id2label)+1。为False, 返回的结果中不含与开始结尾相关的内容 - :return: List[Tuple(int, int)]], 内部的Tuple是可以进行跳转的(from_tag_id, to_tag_id)。 - """ - if encoding_type is None: - encoding_type = _get_encoding_type_from_tag_vocab(tag_vocab) - else: - encoding_type = encoding_type.lower() - _check_tag_vocab_and_encoding_type(tag_vocab, encoding_type) - - pad_token = '' - unk_token = '' - - if isinstance(tag_vocab, Vocabulary): - id_label_lst = list(tag_vocab.idx2word.items()) - pad_token = tag_vocab.padding - unk_token = tag_vocab.unknown - else: - id_label_lst = list(tag_vocab.items()) - - num_tags = len(tag_vocab) - start_idx = num_tags - end_idx = num_tags + 1 - allowed_trans = [] - if include_start_end: - id_label_lst += [(start_idx, 'start'), (end_idx, 'end')] - def split_tag_label(from_label): - from_label = from_label.lower() - if from_label in ['start', 'end']: - from_tag = from_label - from_label = '' - else: - from_tag = from_label[:1] - from_label = from_label[2:] - return from_tag, from_label - - for from_id, from_label in id_label_lst: - if from_label in [pad_token, unk_token]: - continue - from_tag, from_label = split_tag_label(from_label) - for to_id, to_label in id_label_lst: - if to_label in [pad_token, unk_token]: - continue - to_tag, to_label = split_tag_label(to_label) - if _is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label): - allowed_trans.append((from_id, to_id)) - return allowed_trans - - -def _is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label): - r""" - - :param str encoding_type: 支持"BIO", "BMES", "BEMSO", 'bioes'。 - :param str from_tag: 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag - :param str from_label: 比如"PER", "LOC"等label - :param str to_tag: 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag - :param str to_label: 比如"PER", "LOC"等label - :return: bool,能否跃迁 - """ - if to_tag == 'start' or from_tag == 'end': - return False - encoding_type = encoding_type.lower() - if encoding_type == 'bio': - r""" - 第一行是to_tag, 第一列是from_tag. y任意条件下可转,-只有在label相同时可转,n不可转 - +-------+---+---+---+-------+-----+ - | | B | I | O | start | end | - +-------+---+---+---+-------+-----+ - | B | y | - | y | n | y | - +-------+---+---+---+-------+-----+ - | I | y | - | y | n | y | - +-------+---+---+---+-------+-----+ - | O | y | n | y | n | y | - +-------+---+---+---+-------+-----+ - | start | y | n | y | n | n | - +-------+---+---+---+-------+-----+ - | end | n | n | n | n | n | - +-------+---+---+---+-------+-----+ - """ - if from_tag == 'start': - return to_tag in ('b', 'o') - elif from_tag in ['b', 'i']: - return any([to_tag in ['end', 'b', 'o'], to_tag == 'i' and from_label == to_label]) - elif from_tag == 'o': - return to_tag in ['end', 'b', 'o'] - else: - raise ValueError("Unexpect tag {}. Expect only 'B', 'I', 'O'.".format(from_tag)) - - elif encoding_type == 'bmes': - r""" - 第一行是to_tag, 第一列是from_tag,y任意条件下可转,-只有在label相同时可转,n不可转 - +-------+---+---+---+---+-------+-----+ - | | B | M | E | S | start | end | - +-------+---+---+---+---+-------+-----+ - | B | n | - | - | n | n | n | - +-------+---+---+---+---+-------+-----+ - | M | n | - | - | n | n | n | - +-------+---+---+---+---+-------+-----+ - | E | y | n | n | y | n | y | - +-------+---+---+---+---+-------+-----+ - | S | y | n | n | y | n | y | - +-------+---+---+---+---+-------+-----+ - | start | y | n | n | y | n | n | - +-------+---+---+---+---+-------+-----+ - | end | n | n | n | n | n | n | - +-------+---+---+---+---+-------+-----+ - """ - if from_tag == 'start': - return to_tag in ['b', 's'] - elif from_tag == 'b': - return to_tag in ['m', 'e'] and from_label == to_label - elif from_tag == 'm': - return to_tag in ['m', 'e'] and from_label == to_label - elif from_tag in ['e', 's']: - return to_tag in ['b', 's', 'end'] - else: - raise ValueError("Unexpect tag type {}. Expect only 'B', 'M', 'E', 'S'.".format(from_tag)) - elif encoding_type == 'bmeso': - if from_tag == 'start': - return to_tag in ['b', 's', 'o'] - elif from_tag == 'b': - return to_tag in ['m', 'e'] and from_label == to_label - elif from_tag == 'm': - return to_tag in ['m', 'e'] and from_label == to_label - elif from_tag in ['e', 's', 'o']: - return to_tag in ['b', 's', 'end', 'o'] - else: - raise ValueError("Unexpect tag type {}. Expect only 'B', 'M', 'E', 'S', 'O'.".format(from_tag)) - elif encoding_type == 'bioes': - if from_tag == 'start': - return to_tag in ['b', 's', 'o'] - elif from_tag == 'b': - return to_tag in ['i', 'e'] and from_label == to_label - elif from_tag == 'i': - return to_tag in ['i', 'e'] and from_label == to_label - elif from_tag in ['e', 's', 'o']: - return to_tag in ['b', 's', 'end', 'o'] - else: - raise ValueError("Unexpect tag type {}. Expect only 'B', 'I', 'E', 'S', 'O'.".format(from_tag)) - else: - raise ValueError("Only support BIO, BMES, BMESO, BIOES encoding type, got {}.".format(encoding_type)) - - -class ConditionalRandomField(nn.Module): - r""" - 条件随机场。提供forward()以及viterbi_decode()两个方法,分别用于训练与inference。 - - """ - - def __init__(self, num_tags, include_start_end_trans=False, allowed_transitions=None, - initial_method=None): - r""" - - :param int num_tags: 标签的数量 - :param bool include_start_end_trans: 是否考虑各个tag作为开始以及结尾的分数。 - :param List[Tuple[from_tag_id(int), to_tag_id(int)]] allowed_transitions: 内部的Tuple[from_tag_id(int), - to_tag_id(int)]视为允许发生的跃迁,其他没有包含的跃迁认为是禁止跃迁,可以通过 - allowed_transitions()函数得到;如果为None,则所有跃迁均为合法 - :param str initial_method: 初始化方法。见initial_parameter - """ - super(ConditionalRandomField, self).__init__() - - self.include_start_end_trans = include_start_end_trans - self.num_tags = num_tags - - # the meaning of entry in this matrix is (from_tag_id, to_tag_id) score - self.trans_m = nn.Parameter(torch.randn(num_tags, num_tags)) - if self.include_start_end_trans: - self.start_scores = nn.Parameter(torch.randn(num_tags)) - self.end_scores = nn.Parameter(torch.randn(num_tags)) - - if allowed_transitions is None: - constrain = torch.zeros(num_tags + 2, num_tags + 2) - else: - constrain = torch.full((num_tags + 2, num_tags + 2), fill_value=-10000.0, dtype=torch.float) - has_start = False - has_end = False - for from_tag_id, to_tag_id in allowed_transitions: - constrain[from_tag_id, to_tag_id] = 0 - if from_tag_id==num_tags: - has_start = True - if to_tag_id==num_tags+1: - has_end = True - if not has_start: - constrain[num_tags, :].fill_(0) - if not has_end: - constrain[:, num_tags+1].fill_(0) - self._constrain = nn.Parameter(constrain, requires_grad=False) - - initial_parameter(self, initial_method) - - def _normalizer_likelihood(self, logits, mask): - r"""Computes the (batch_size,) denominator term for the log-likelihood, which is the - sum of the likelihoods across all possible state sequences. - - :param logits:FloatTensor, max_len x batch_size x num_tags - :param mask:ByteTensor, max_len x batch_size - :return:FloatTensor, batch_size - """ - seq_len, batch_size, n_tags = logits.size() - alpha = logits[0] - if self.include_start_end_trans: - alpha = alpha + self.start_scores.view(1, -1) - - flip_mask = mask.eq(False) - - for i in range(1, seq_len): - emit_score = logits[i].view(batch_size, 1, n_tags) - trans_score = self.trans_m.view(1, n_tags, n_tags) - tmp = alpha.view(batch_size, n_tags, 1) + emit_score + trans_score - alpha = torch.logsumexp(tmp, 1).masked_fill(flip_mask[i].view(batch_size, 1), 0) + \ - alpha.masked_fill(mask[i].eq(True).view(batch_size, 1), 0) - - if self.include_start_end_trans: - alpha = alpha + self.end_scores.view(1, -1) - - return torch.logsumexp(alpha, 1) - - def _gold_score(self, logits, tags, mask): - r""" - Compute the score for the gold path. - :param logits: FloatTensor, max_len x batch_size x num_tags - :param tags: LongTensor, max_len x batch_size - :param mask: ByteTensor, max_len x batch_size - :return:FloatTensor, batch_size - """ - seq_len, batch_size, _ = logits.size() - batch_idx = torch.arange(batch_size, dtype=torch.long, device=logits.device) - seq_idx = torch.arange(seq_len, dtype=torch.long, device=logits.device) - - # trans_socre [L-1, B] - mask = mask.eq(True) - flip_mask = mask.eq(False) - trans_score = self.trans_m[tags[:seq_len - 1], tags[1:]].masked_fill(flip_mask[1:, :], 0) - # emit_score [L, B] - emit_score = logits[seq_idx.view(-1, 1), batch_idx.view(1, -1), tags].masked_fill(flip_mask, 0) - # score [L-1, B] - score = trans_score + emit_score[:seq_len - 1, :] - score = score.sum(0) + emit_score[-1].masked_fill(flip_mask[-1], 0) - if self.include_start_end_trans: - st_scores = self.start_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[0]] - last_idx = mask.long().sum(0) - 1 - ed_scores = self.end_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[last_idx, batch_idx]] - score = score + st_scores + ed_scores - # return [B,] - return score - - def forward(self, feats, tags, mask): - r""" - 用于计算CRF的前向loss,返回值为一个batch_size的FloatTensor,可能需要mean()求得loss。 - - :param torch.FloatTensor feats: batch_size x max_len x num_tags,特征矩阵。 - :param torch.LongTensor tags: batch_size x max_len,标签矩阵。 - :param torch.ByteTensor mask: batch_size x max_len,为0的位置认为是padding。 - :return: torch.FloatTensor, (batch_size,) - """ - feats = feats.transpose(0, 1) - tags = tags.transpose(0, 1).long() - mask = mask.transpose(0, 1).float() - all_path_score = self._normalizer_likelihood(feats, mask) - gold_path_score = self._gold_score(feats, tags, mask) - - return all_path_score - gold_path_score - - def viterbi_decode(self, logits, mask, unpad=False): - r"""给定一个特征矩阵以及转移分数矩阵,计算出最佳的路径以及对应的分数 - - :param torch.FloatTensor logits: batch_size x max_len x num_tags,特征矩阵。 - :param torch.ByteTensor mask: batch_size x max_len, 为0的位置认为是pad;如果为None,则认为没有padding。 - :param bool unpad: 是否将结果删去padding。False, 返回的是batch_size x max_len的tensor; True,返回的是 - List[List[int]], 内部的List[int]为每个sequence的label,已经除去pad部分,即每个List[int]的长度是这 - 个sample的有效长度。 - :return: 返回 (paths, scores)。 - paths: 是解码后的路径, 其值参照unpad参数. - scores: torch.FloatTensor, size为(batch_size,), 对应每个最优路径的分数。 - - """ - batch_size, max_len, n_tags = logits.size() - seq_len = mask.long().sum(1) - logits = logits.transpose(0, 1).data # L, B, H - mask = mask.transpose(0, 1).data.eq(True) # L, B - flip_mask = mask.eq(False) - - # dp - vpath = logits.new_zeros((max_len, batch_size, n_tags), dtype=torch.long) - vscore = logits[0] # bsz x n_tags - transitions = self._constrain.data.clone() - transitions[:n_tags, :n_tags] += self.trans_m.data - if self.include_start_end_trans: - transitions[n_tags, :n_tags] += self.start_scores.data - transitions[:n_tags, n_tags + 1] += self.end_scores.data - - vscore += transitions[n_tags, :n_tags] - - trans_score = transitions[:n_tags, :n_tags].view(1, n_tags, n_tags).data - end_trans_score = transitions[:n_tags, n_tags+1].view(1, 1, n_tags).repeat(batch_size, 1, 1) # bsz, 1, n_tags - - # 针对长度为1的句子 - vscore += transitions[:n_tags, n_tags+1].view(1, n_tags).repeat(batch_size, 1) \ - .masked_fill(seq_len.ne(1).view(-1, 1), 0) - for i in range(1, max_len): - prev_score = vscore.view(batch_size, n_tags, 1) - cur_score = logits[i].view(batch_size, 1, n_tags) + trans_score - score = prev_score + cur_score.masked_fill(flip_mask[i].view(batch_size, 1, 1), 0) # bsz x n_tag x n_tag - # 需要考虑当前位置是该序列的最后一个 - score += end_trans_score.masked_fill(seq_len.ne(i+1).view(-1, 1, 1), 0) - - best_score, best_dst = score.max(1) - vpath[i] = best_dst - # 由于最终是通过last_tags回溯,需要保持每个位置的vscore情况 - vscore = best_score.masked_fill(flip_mask[i].view(batch_size, 1), 0) + \ - vscore.masked_fill(mask[i].view(batch_size, 1), 0) - - # backtrace - batch_idx = torch.arange(batch_size, dtype=torch.long, device=logits.device) - seq_idx = torch.arange(max_len, dtype=torch.long, device=logits.device) - lens = (seq_len - 1) - # idxes [L, B], batched idx from seq_len-1 to 0 - idxes = (lens.view(1, -1) - seq_idx.view(-1, 1)) % max_len - - ans = logits.new_empty((max_len, batch_size), dtype=torch.long) - ans_score, last_tags = vscore.max(1) - ans[idxes[0], batch_idx] = last_tags - for i in range(max_len - 1): - last_tags = vpath[idxes[i], batch_idx, last_tags] - ans[idxes[i + 1], batch_idx] = last_tags - ans = ans.transpose(0, 1) - if unpad: - paths = [] - for idx, max_len in enumerate(lens): - paths.append(ans[idx, :max_len + 1].tolist()) - else: - paths = ans - return paths, ans_score diff --git a/fastNLP/modules/decoder/mlp.py b/fastNLP/modules/decoder/mlp.py deleted file mode 100644 index e4df542d..00000000 --- a/fastNLP/modules/decoder/mlp.py +++ /dev/null @@ -1,100 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "MLP" -] - -import torch -import torch.nn as nn - -from ..utils import initial_parameter - - -class MLP(nn.Module): - r""" - 多层感知器 - - - .. note:: - 隐藏层的激活函数通过activation定义。一个str/function或者一个str/function的list可以被传入activation。 - 如果只传入了一个str/function,那么所有隐藏层的激活函数都由这个str/function定义; - 如果传入了一个str/function的list,那么每一个隐藏层的激活函数由这个list中对应的元素定义,其中list的长度为隐藏层数。 - 输出层的激活函数由output_activation定义,默认值为None,此时输出层没有激活函数。 - - Examples:: - - >>> net1 = MLP([5, 10, 5]) - >>> net2 = MLP([5, 10, 5], 'tanh') - >>> net3 = MLP([5, 6, 7, 8, 5], 'tanh') - >>> net4 = MLP([5, 6, 7, 8, 5], 'relu', output_activation='tanh') - >>> net5 = MLP([5, 6, 7, 8, 5], ['tanh', 'relu', 'tanh'], 'tanh') - >>> for net in [net1, net2, net3, net4, net5]: - >>> x = torch.randn(5, 5) - >>> y = net(x) - >>> print(x) - >>> print(y) - """ - - def __init__(self, size_layer, activation='relu', output_activation=None, initial_method=None, dropout=0.0): - r""" - - :param List[int] size_layer: 一个int的列表,用来定义MLP的层数,列表中的数字为每一层是hidden数目。MLP的层数为 len(size_layer) - 1 - :param Union[str,func,List[str]] activation: 一个字符串或者函数的列表,用来定义每一个隐层的激活函数,字符串包括relu,tanh和 - sigmoid,默认值为relu - :param Union[str,func] output_activation: 字符串或者函数,用来定义输出层的激活函数,默认值为None,表示输出层没有激活函数 - :param str initial_method: 参数初始化方式 - :param float dropout: dropout概率,默认值为0 - """ - super(MLP, self).__init__() - self.hiddens = nn.ModuleList() - self.output = None - self.output_activation = output_activation - for i in range(1, len(size_layer)): - if i + 1 == len(size_layer): - self.output = nn.Linear(size_layer[i - 1], size_layer[i]) - else: - self.hiddens.append(nn.Linear(size_layer[i - 1], size_layer[i])) - - self.dropout = nn.Dropout(p=dropout) - - actives = { - 'relu': nn.ReLU(), - 'tanh': nn.Tanh(), - 'sigmoid': nn.Sigmoid(), - } - if not isinstance(activation, list): - activation = [activation] * (len(size_layer) - 2) - elif len(activation) == len(size_layer) - 2: - pass - else: - raise ValueError( - f"the length of activation function list except {len(size_layer) - 2} but got {len(activation)}!") - self.hidden_active = [] - for func in activation: - if callable(func): - self.hidden_active.append(func) - elif func.lower() in actives: - self.hidden_active.append(actives[func]) - else: - raise ValueError("should set activation correctly: {}".format(activation)) - if self.output_activation is not None: - if callable(self.output_activation): - pass - elif self.output_activation.lower() in actives: - self.output_activation = actives[self.output_activation] - else: - raise ValueError("should set activation correctly: {}".format(activation)) - initial_parameter(self, initial_method) - - def forward(self, x): - r""" - :param torch.Tensor x: MLP接受的输入 - :return: torch.Tensor : MLP的输出结果 - """ - for layer, func in zip(self.hiddens, self.hidden_active): - x = self.dropout(func(layer(x))) - x = self.output(x) - if self.output_activation is not None: - x = self.output_activation(x) - x = self.dropout(x) - return x diff --git a/fastNLP/modules/decoder/seq2seq_decoder.py b/fastNLP/modules/decoder/seq2seq_decoder.py deleted file mode 100644 index 320a7254..00000000 --- a/fastNLP/modules/decoder/seq2seq_decoder.py +++ /dev/null @@ -1,416 +0,0 @@ -r"""undocumented""" -from typing import Union, Tuple -import math - -import torch -from torch import nn -import torch.nn.functional as F -from ..attention import AttentionLayer, MultiHeadAttention -from ...embeddings import StaticEmbedding -from ...embeddings.utils import get_embeddings -from .seq2seq_state import State, LSTMState, TransformerState - - -__all__ = ['Seq2SeqDecoder', 'TransformerSeq2SeqDecoder', 'LSTMSeq2SeqDecoder'] - - -class Seq2SeqDecoder(nn.Module): - """ - Sequence-to-Sequence Decoder的基类。一定需要实现forward、decode函数,剩下的函数根据需要实现。每个Seq2SeqDecoder都应该有相应的State对象 - 用来承载该Decoder所需要的Encoder输出、Decoder需要记录的历史信息(例如LSTM的hidden信息)。 - - """ - def __init__(self): - super().__init__() - - def forward(self, tokens, state, **kwargs): - """ - - :param torch.LongTensor tokens: bsz x max_len - :param State state: state包含了encoder的输出以及decode之前的内容 - :return: 返回值可以为bsz x max_len x vocab_size的Tensor,也可以是一个list,但是第一个元素必须是词的预测分布 - """ - raise NotImplemented - - def reorder_states(self, indices, states): - """ - 根据indices重新排列states中的状态,在beam search进行生成时,会用到该函数。 - - :param torch.LongTensor indices: - :param State states: - :return: - """ - assert isinstance(states, State), f"`states` should be of type State instead of {type(states)}" - states.reorder_state(indices) - - def init_state(self, encoder_output, encoder_mask): - """ - 初始化一个state对象,用来记录了encoder的输出以及decode已经完成的部分。 - - :param Union[torch.Tensor, list, tuple] encoder_output: 如果不为None,内部元素需要为torch.Tensor, 默认其中第一维是batch - 维度 - :param Union[torch.Tensor, list, tuple] encoder_mask: 如果部位None,内部元素需要torch.Tensor, 默认其中第一维是batch - 维度 - :param kwargs: - :return: State, 返回一个State对象,记录了encoder的输出 - """ - state = State(encoder_output, encoder_mask) - return state - - def decode(self, tokens, state): - """ - 根据states中的内容,以及tokens中的内容进行之后的生成。 - - :param torch.LongTensor tokens: bsz x max_len, 截止到上一个时刻所有的token输出。 - :param State state: 记录了encoder输出与decoder过去状态 - :return: torch.FloatTensor: bsz x vocab_size, 输出的是下一个时刻的分布 - """ - outputs = self(state=state, tokens=tokens) - if isinstance(outputs, torch.Tensor): - return outputs[:, -1] - else: - raise RuntimeError("Unrecognized output from the `forward()` function. Please override the `decode()` function.") - - -class TiedEmbedding(nn.Module): - """ - 用于将weight和原始weight绑定 - - """ - def __init__(self, weight): - super().__init__() - self.weight = weight # vocab_size x embed_size - - def forward(self, x): - """ - - :param torch.FloatTensor x: bsz x * x embed_size - :return: torch.FloatTensor bsz x * x vocab_size - """ - return torch.matmul(x, self.weight.t()) - - -def get_binded_decoder_output_embed(embed): - """ - 给定一个embedding,输出对应的绑定的embedding,输出对象为TiedEmbedding - - :param embed: - :return: - """ - if isinstance(embed, StaticEmbedding): - for idx, map2idx in enumerate(embed.words_to_words): - assert idx == map2idx, "Invalid StaticEmbedding for Decoder, please check:(1) whether the vocabulary " \ - "include `no_create_entry=True` word; (2) StaticEmbedding should not initialize with " \ - "`lower=True` or `min_freq!=1`." - elif not isinstance(embed, nn.Embedding): - raise TypeError("Only nn.Embedding or StaticEmbedding is allowed for binding.") - - return TiedEmbedding(embed.weight) - - -class LSTMSeq2SeqDecoder(Seq2SeqDecoder): - def __init__(self, embed: Union[nn.Module, StaticEmbedding, Tuple[int, int]], num_layers = 3, hidden_size = 300, - dropout = 0.3, bind_decoder_input_output_embed = True, attention=True): - """ - LSTM的Decoder - - :param nn.Module,tuple embed: decoder输入的embedding. - :param int num_layers: 多少层LSTM - :param int hidden_size: 隐藏层大小, 该值也被认为是encoder的输出维度大小 - :param dropout: Dropout的大小 - :param bool bind_decoder_input_output_embed: 是否将输出层和输入层的词向量绑定在一起(即为同一个),若embed为StaticEmbedding, - 则StaticEmbedding的vocab不能包含no_create_entry的token,同时StaticEmbedding初始化时lower为False, min_freq=1. - :param bool attention: 是否使用attention - """ - super().__init__() - self.embed = get_embeddings(init_embed=embed) - self.embed_dim = embed.embedding_dim - - if bind_decoder_input_output_embed: - self.output_layer = get_binded_decoder_output_embed(self.embed) - else: # 不需要bind - self.output_embed = get_embeddings((self.embed.num_embeddings, self.embed.embedding_dim)) - self.output_layer = TiedEmbedding(self.output_embed.weight) - - self.hidden_size = hidden_size - self.num_layers = num_layers - self.lstm = nn.LSTM(input_size=self.embed_dim + hidden_size, hidden_size=hidden_size, num_layers=num_layers, - batch_first=True, bidirectional=False, dropout=dropout if num_layers>1 else 0) - - self.attention_layer = AttentionLayer(hidden_size, hidden_size, hidden_size) if attention else None - self.output_proj = nn.Linear(hidden_size, self.embed_dim) - self.dropout_layer = nn.Dropout(dropout) - - def forward(self, tokens, state, return_attention=False): - """ - - :param torch.LongTensor tokens: batch x max_len - :param LSTMState state: 保存encoder输出和decode状态的State对象 - :param bool return_attention: 是否返回attention的的score - :return: bsz x max_len x vocab_size; 如果return_attention=True, 还会返回bsz x max_len x encode_length - """ - src_output = state.encoder_output - encoder_mask = state.encoder_mask - - assert tokens.size(1)>state.decode_length, "The state does not match the tokens." - tokens = tokens[:, state.decode_length:] - x = self.embed(tokens) - - attn_weights = [] if self.attention_layer is not None else None # 保存attention weight, batch,tgt_seq,src_seq - input_feed = state.input_feed - decoder_out = [] - - cur_hidden = state.hidden - cur_cell = state.cell - - # 开始计算 - for i in range(tokens.size(1)): - input = torch.cat( - (x[:, i:i + 1, :], - input_feed[:, None, :] - ), - dim=2 - ) # batch,1,2*dim - _, (cur_hidden, cur_cell) = self.lstm(input, hx=(cur_hidden, cur_cell)) # hidden/cell保持原来的size - if self.attention_layer is not None: - input_feed, attn_weight = self.attention_layer(cur_hidden[-1], src_output, encoder_mask) - attn_weights.append(attn_weight) - else: - input_feed = cur_hidden[-1] - - state.input_feed = input_feed # batch, hidden - state.hidden = cur_hidden - state.cell = cur_cell - state.decode_length += 1 - decoder_out.append(input_feed) - - decoder_out = torch.stack(decoder_out, dim=1) # batch,seq_len,hidden - decoder_out = self.dropout_layer(decoder_out) - if attn_weights is not None: - attn_weights = torch.cat(attn_weights, dim=1) # batch, tgt_len, src_len - - decoder_out = self.output_proj(decoder_out) - feats = self.output_layer(decoder_out) - - if return_attention: - return feats, attn_weights - return feats - - def init_state(self, encoder_output, encoder_mask) -> LSTMState: - """ - - :param encoder_output: 输入可以有两种情况(1) 输入为一个tuple,包含三个内容(encoder_output, (hidden, cell)),其中encoder_output: - bsz x max_len x hidden_size, hidden: bsz x hidden_size, cell:bsz x hidden_size,一般使用LSTMEncoder的最后一层的 - hidden state和cell state来赋值这两个值 - (2) 只有encoder_output: bsz x max_len x hidden_size, 这种情况下hidden和cell使用0初始化 - :param torch.ByteTensor encoder_mask: bsz x max_len, 为0的位置是padding, 用来指示source中哪些不需要attend - :return: - """ - if not isinstance(encoder_output, torch.Tensor): - encoder_output, (hidden, cell) = encoder_output - else: - hidden = cell = None - assert encoder_output.ndim==3 - assert encoder_mask.size()==encoder_output.size()[:2] - assert encoder_output.size(-1)==self.hidden_size, "The dimension of encoder outputs should be the same with " \ - "the hidden_size." - - t = [hidden, cell] - for idx in range(2): - v = t[idx] - if v is None: - v = encoder_output.new_zeros(self.num_layers, encoder_output.size(0), self.hidden_size) - else: - assert v.dim()==2 - assert v.size(-1)==self.hidden_size - v = v[None].repeat(self.num_layers, 1, 1) # num_layers x bsz x hidden_size - t[idx] = v - - state = LSTMState(encoder_output, encoder_mask, t[0], t[1]) - - return state - - -class TransformerSeq2SeqDecoderLayer(nn.Module): - def __init__(self, d_model = 512, n_head = 8, dim_ff = 2048, dropout = 0.1, layer_idx = None): - """ - - :param int d_model: 输入、输出的维度 - :param int n_head: 多少个head,需要能被d_model整除 - :param int dim_ff: - :param float dropout: - :param int layer_idx: layer的编号 - """ - super().__init__() - self.d_model = d_model - self.n_head = n_head - self.dim_ff = dim_ff - self.dropout = dropout - self.layer_idx = layer_idx # 记录layer的层索引,以方便获取state的信息 - - self.self_attn = MultiHeadAttention(d_model, n_head, dropout, layer_idx) - self.self_attn_layer_norm = nn.LayerNorm(d_model) - - self.encoder_attn = MultiHeadAttention(d_model, n_head, dropout, layer_idx) - self.encoder_attn_layer_norm = nn.LayerNorm(d_model) - - self.ffn = nn.Sequential(nn.Linear(self.d_model, self.dim_ff), - nn.ReLU(), - nn.Dropout(dropout), - nn.Linear(self.dim_ff, self.d_model), - nn.Dropout(dropout)) - - self.final_layer_norm = nn.LayerNorm(self.d_model) - - def forward(self, x, encoder_output, encoder_mask=None, self_attn_mask=None, state=None): - """ - - :param x: (batch, seq_len, dim), decoder端的输入 - :param encoder_output: (batch,src_seq_len,dim), encoder的输出 - :param encoder_mask: batch,src_seq_len, 为1的地方需要attend - :param self_attn_mask: seq_len, seq_len,下三角的mask矩阵,只在训练时传入 - :param TransformerState state: 只在inference阶段传入 - :return: - """ - - # self attention part - residual = x - x = self.self_attn_layer_norm(x) - x, _ = self.self_attn(query=x, - key=x, - value=x, - attn_mask=self_attn_mask, - state=state) - - x = F.dropout(x, p=self.dropout, training=self.training) - x = residual + x - - # encoder attention part - residual = x - x = self.encoder_attn_layer_norm(x) - x, attn_weight = self.encoder_attn(query=x, - key=encoder_output, - value=encoder_output, - key_mask=encoder_mask, - state=state) - x = F.dropout(x, p=self.dropout, training=self.training) - x = residual + x - - # ffn - residual = x - x = self.final_layer_norm(x) - x = self.ffn(x) - x = residual + x - - return x, attn_weight - - -class TransformerSeq2SeqDecoder(Seq2SeqDecoder): - def __init__(self, embed: Union[nn.Module, StaticEmbedding, Tuple[int, int]], pos_embed: nn.Module = None, - d_model = 512, num_layers=6, n_head = 8, dim_ff = 2048, dropout = 0.1, - bind_decoder_input_output_embed = True): - """ - - :param embed: 输入token的embedding - :param nn.Module pos_embed: 位置embedding - :param int d_model: 输出、输出的大小 - :param int num_layers: 多少层 - :param int n_head: 多少个head - :param int dim_ff: FFN 的中间大小 - :param float dropout: Self-Attention和FFN中的dropout的大小 - :param bool bind_decoder_input_output_embed: 是否将输出层和输入层的词向量绑定在一起(即为同一个),若embed为StaticEmbedding, - 则StaticEmbedding的vocab不能包含no_create_entry的token,同时StaticEmbedding初始化时lower为False, min_freq=1. - """ - super().__init__() - - self.embed = get_embeddings(embed) - self.pos_embed = pos_embed - - if bind_decoder_input_output_embed: - self.output_layer = get_binded_decoder_output_embed(self.embed) - else: # 不需要bind - self.output_embed = get_embeddings((self.embed.num_embeddings, self.embed.embedding_dim)) - self.output_layer = TiedEmbedding(self.output_embed.weight) - - self.num_layers = num_layers - self.d_model = d_model - self.n_head = n_head - self.dim_ff = dim_ff - self.dropout = dropout - - self.input_fc = nn.Linear(self.embed.embedding_dim, d_model) - self.layer_stacks = nn.ModuleList([TransformerSeq2SeqDecoderLayer(d_model, n_head, dim_ff, dropout, layer_idx) - for layer_idx in range(num_layers)]) - - self.embed_scale = math.sqrt(d_model) - self.layer_norm = nn.LayerNorm(d_model) - self.output_fc = nn.Linear(self.d_model, self.embed.embedding_dim) - - def forward(self, tokens, state, return_attention=False): - """ - - :param torch.LongTensor tokens: batch x tgt_len,decode的词 - :param TransformerState state: 用于记录encoder的输出以及decode状态的对象,可以通过init_state()获取 - :param bool return_attention: 是否返回对encoder结果的attention score - :return: bsz x max_len x vocab_size; 如果return_attention=True, 还会返回bsz x max_len x encode_length - """ - - encoder_output = state.encoder_output - encoder_mask = state.encoder_mask - - assert state.decode_length1: - triangle_mask = self._get_triangle_mask(tokens) - else: - triangle_mask = None - - for layer in self.layer_stacks: - x, attn_weight = layer(x=x, - encoder_output=encoder_output, - encoder_mask=encoder_mask, - self_attn_mask=triangle_mask, - state=state - ) - - x = self.layer_norm(x) # batch, tgt_len, dim - x = self.output_fc(x) - feats = self.output_layer(x) - - if return_attention: - return feats, attn_weight - return feats - - def init_state(self, encoder_output, encoder_mask): - """ - 初始化一个TransformerState用于forward - - :param torch.FloatTensor encoder_output: bsz x max_len x d_model, encoder的输出 - :param torch.ByteTensor encoder_mask: bsz x max_len, 为1的位置需要attend。 - :return: TransformerState - """ - if isinstance(encoder_output, torch.Tensor): - encoder_output = encoder_output - elif isinstance(encoder_output, (list, tuple)): - encoder_output = encoder_output[0] # 防止是LSTMEncoder的输出结果 - else: - raise TypeError("Unsupported `encoder_output` for TransformerSeq2SeqDecoder") - state = TransformerState(encoder_output, encoder_mask, num_decoder_layer=self.num_layers) - return state - - @staticmethod - def _get_triangle_mask(tokens): - tensor = tokens.new_ones(tokens.size(1), tokens.size(1)) - return torch.tril(tensor).byte() - - diff --git a/fastNLP/modules/decoder/seq2seq_state.py b/fastNLP/modules/decoder/seq2seq_state.py deleted file mode 100644 index de200f86..00000000 --- a/fastNLP/modules/decoder/seq2seq_state.py +++ /dev/null @@ -1,145 +0,0 @@ -r""" -每个Decoder都有对应的State用来记录encoder的输出以及Decode的历史记录 - -""" - -__all__ = [ - 'State', - "LSTMState", - "TransformerState" -] - -from typing import Union -import torch - - -class State: - def __init__(self, encoder_output=None, encoder_mask=None, **kwargs): - """ - 每个Decoder都有对应的State对象用来承载encoder的输出以及当前时刻之前的decode状态。 - - :param Union[torch.Tensor, list, tuple] encoder_output: 如果不为None,内部元素需要为torch.Tensor, 默认其中第一维是batch - 维度 - :param Union[torch.Tensor, list, tuple] encoder_mask: 如果部位None,内部元素需要torch.Tensor, 默认其中第一维是batch - 维度 - :param kwargs: - """ - self.encoder_output = encoder_output - self.encoder_mask = encoder_mask - self._decode_length = 0 - - @property - def num_samples(self): - """ - 返回的State中包含的是多少个sample的encoder状态,主要用于Generate的时候确定batch的大小。 - - :return: - """ - if self.encoder_output is not None: - return self.encoder_output.size(0) - else: - return None - - @property - def decode_length(self): - """ - 当前Decode到哪个token了,decoder只会从decode_length之后的token开始decode, 为0说明还没开始decode。 - - :return: - """ - return self._decode_length - - @decode_length.setter - def decode_length(self, value): - self._decode_length = value - - def _reorder_state(self, state: Union[torch.Tensor, list, tuple], indices: torch.LongTensor, dim: int = 0): - if isinstance(state, torch.Tensor): - state = state.index_select(index=indices, dim=dim) - elif isinstance(state, list): - for i in range(len(state)): - assert state[i] is not None - state[i] = self._reorder_state(state[i], indices, dim) - elif isinstance(state, tuple): - tmp_list = [] - for i in range(len(state)): - assert state[i] is not None - tmp_list.append(self._reorder_state(state[i], indices, dim)) - state = tuple(tmp_list) - else: - raise TypeError(f"Cannot reorder data of type:{type(state)}") - - return state - - def reorder_state(self, indices: torch.LongTensor): - if self.encoder_mask is not None: - self.encoder_mask = self._reorder_state(self.encoder_mask, indices) - if self.encoder_output is not None: - self.encoder_output = self._reorder_state(self.encoder_output, indices) - - -class LSTMState(State): - def __init__(self, encoder_output, encoder_mask, hidden, cell): - """ - LSTMDecoder对应的State,保存encoder的输出以及LSTM解码过程中的一些中间状态 - - :param torch.FloatTensor encoder_output: bsz x src_seq_len x encode_output_size,encoder的输出 - :param torch.BoolTensor encoder_mask: bsz x src_seq_len, 为0的地方是padding - :param torch.FloatTensor hidden: num_layers x bsz x hidden_size, 上个时刻的hidden状态 - :param torch.FloatTensor cell: num_layers x bsz x hidden_size, 上个时刻的cell状态 - """ - super().__init__(encoder_output, encoder_mask) - self.hidden = hidden - self.cell = cell - self._input_feed = hidden[0] # 默认是上一个时刻的输出 - - @property - def input_feed(self): - """ - LSTMDecoder中每个时刻的输入会把上个token的embedding和input_feed拼接起来输入到下个时刻,在LSTMDecoder不使用attention时, - input_feed即上个时刻的hidden state, 否则是attention layer的输出。 - :return: torch.FloatTensor, bsz x hidden_size - """ - return self._input_feed - - @input_feed.setter - def input_feed(self, value): - self._input_feed = value - - def reorder_state(self, indices: torch.LongTensor): - super().reorder_state(indices) - self.hidden = self._reorder_state(self.hidden, indices, dim=1) - self.cell = self._reorder_state(self.cell, indices, dim=1) - if self.input_feed is not None: - self.input_feed = self._reorder_state(self.input_feed, indices, dim=0) - - -class TransformerState(State): - def __init__(self, encoder_output, encoder_mask, num_decoder_layer): - """ - 与TransformerSeq2SeqDecoder对应的State, - - :param torch.FloatTensor encoder_output: bsz x encode_max_len x encoder_output_size, encoder的输出 - :param torch.ByteTensor encoder_mask: bsz x encode_max_len 为1的地方需要attend - :param int num_decoder_layer: decode有多少层 - """ - super().__init__(encoder_output, encoder_mask) - self.encoder_key = [None] * num_decoder_layer # 每一个元素 bsz x encoder_max_len x key_dim - self.encoder_value = [None] * num_decoder_layer # 每一个元素 bsz x encoder_max_len x value_dim - self.decoder_prev_key = [None] * num_decoder_layer # 每一个元素 bsz x decode_length x key_dim - self.decoder_prev_value = [None] * num_decoder_layer # 每一个元素 bsz x decode_length x key_dim - - def reorder_state(self, indices: torch.LongTensor): - super().reorder_state(indices) - self.encoder_key = self._reorder_state(self.encoder_key, indices) - self.encoder_value = self._reorder_state(self.encoder_value, indices) - self.decoder_prev_key = self._reorder_state(self.decoder_prev_key, indices) - self.decoder_prev_value = self._reorder_state(self.decoder_prev_value, indices) - - @property - def decode_length(self): - if self.decoder_prev_key[0] is not None: - return self.decoder_prev_key[0].size(1) - return 0 - - diff --git a/fastNLP/modules/decoder/utils.py b/fastNLP/modules/decoder/utils.py deleted file mode 100644 index 2600bee0..00000000 --- a/fastNLP/modules/decoder/utils.py +++ /dev/null @@ -1,79 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "viterbi_decode" -] -import torch - - -def viterbi_decode(logits, transitions, mask=None, unpad=False): - r""" - 给定一个特征矩阵以及转移分数矩阵,计算出最佳的路径以及对应的分数 - - :param torch.FloatTensor logits: batch_size x max_len x num_tags,特征矩阵。 - :param torch.FloatTensor transitions: n_tags x n_tags,[i, j]位置的值认为是从tag i到tag j的转换; 或者(n_tags+2) x - (n_tags+2), 其中n_tag是start的index, n_tags+1是end的index; 如果要i->j之间不允许越迁,就把transitions中(i,j)设置为很小的 - 负数,例如-10000000.0 - :param torch.ByteTensor mask: batch_size x max_len, 为0的位置认为是pad;如果为None,则认为没有padding。 - :param bool unpad: 是否将结果删去padding。False, 返回的是batch_size x max_len的tensor; True,返回的是 - List[List[int]], 内部的List[int]为每个sequence的label,已经除去pad部分,即每个List[int]的长度是这 - 个sample的有效长度。 - :return: 返回 (paths, scores)。 - paths: 是解码后的路径, 其值参照unpad参数. - scores: torch.FloatTensor, size为(batch_size,), 对应每个最优路径的分数。 - - """ - batch_size, seq_len, n_tags = logits.size() - if transitions.size(0) == n_tags+2: - include_start_end_trans = True - elif transitions.size(0) == n_tags: - include_start_end_trans = False - else: - raise RuntimeError("The shapes of transitions and feats are not " \ - "compatible.") - logits = logits.transpose(0, 1).data # L, B, H - if mask is not None: - mask = mask.transpose(0, 1).data.eq(True) # L, B - else: - mask = logits.new_ones((seq_len, batch_size), dtype=torch.uint8).eq(1) - - trans_score = transitions[:n_tags, :n_tags].view(1, n_tags, n_tags).data - - # dp - vpath = logits.new_zeros((seq_len, batch_size, n_tags), dtype=torch.long) - vscore = logits[0] - if include_start_end_trans: - vscore += transitions[n_tags, :n_tags] - - for i in range(1, seq_len): - prev_score = vscore.view(batch_size, n_tags, 1) - cur_score = logits[i].view(batch_size, 1, n_tags) - score = prev_score + trans_score + cur_score - best_score, best_dst = score.max(1) - vpath[i] = best_dst - vscore = best_score.masked_fill(mask[i].eq(False).view(batch_size, 1), 0) + \ - vscore.masked_fill(mask[i].view(batch_size, 1), 0) - - if include_start_end_trans: - vscore += transitions[:n_tags, n_tags + 1].view(1, -1) - # backtrace - batch_idx = torch.arange(batch_size, dtype=torch.long, device=logits.device) - seq_idx = torch.arange(seq_len, dtype=torch.long, device=logits.device) - lens = (mask.long().sum(0) - 1) - # idxes [L, B], batched idx from seq_len-1 to 0 - idxes = (lens.view(1, -1) - seq_idx.view(-1, 1)) % seq_len - - ans = logits.new_empty((seq_len, batch_size), dtype=torch.long) - ans_score, last_tags = vscore.max(1) - ans[idxes[0], batch_idx] = last_tags - for i in range(seq_len - 1): - last_tags = vpath[idxes[i], batch_idx, last_tags] - ans[idxes[i + 1], batch_idx] = last_tags - ans = ans.transpose(0, 1) - if unpad: - paths = [] - for idx, seq_len in enumerate(lens): - paths.append(ans[idx, :seq_len + 1].tolist()) - else: - paths = ans - return paths, ans_score diff --git a/fastNLP/modules/dropout.py b/fastNLP/modules/dropout.py deleted file mode 100644 index 62b039b4..00000000 --- a/fastNLP/modules/dropout.py +++ /dev/null @@ -1,24 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "TimestepDropout" -] - -import torch - - -class TimestepDropout(torch.nn.Dropout): - r""" - 传入参数的shape为 ``(batch_size, num_timesteps, embedding_dim)`` - 使用同一个shape为 ``(batch_size, embedding_dim)`` 的mask在每个timestamp上做dropout。 - """ - - def forward(self, x): - dropout_mask = x.new_ones(x.shape[0], x.shape[-1]) - torch.nn.functional.dropout(dropout_mask, self.p, self.training, inplace=True) - dropout_mask = dropout_mask.unsqueeze(1) # [batch_size, 1, embedding_dim] - if self.inplace: - x *= dropout_mask - return - else: - return x * dropout_mask diff --git a/fastNLP/modules/encoder/__init__.py b/fastNLP/modules/encoder/__init__.py deleted file mode 100644 index f9a637a7..00000000 --- a/fastNLP/modules/encoder/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "ConvolutionCharEncoder", - "LSTMCharEncoder", - - "ConvMaxpool", - - "LSTM", - - "StarTransformer", - - "TransformerEncoder", - - "VarRNN", - "VarLSTM", - "VarGRU", - - "MaxPool", - "MaxPoolWithMask", - "KMaxPool", - "AvgPool", - "AvgPoolWithMask", - - "MultiHeadAttention", - "BiAttention", - "SelfAttention", - - "BertModel", - - "RobertaModel", - - "GPT2Model", - - "LSTMSeq2SeqEncoder", - "TransformerSeq2SeqEncoder", - "Seq2SeqEncoder" -] - -from fastNLP.modules.attention import MultiHeadAttention, BiAttention, SelfAttention -from .bert import BertModel -from .roberta import RobertaModel -from .gpt2 import GPT2Model -from .char_encoder import ConvolutionCharEncoder, LSTMCharEncoder -from .conv_maxpool import ConvMaxpool -from .lstm import LSTM -from .pooling import MaxPool, MaxPoolWithMask, AvgPool, AvgPoolWithMask, KMaxPool -from .star_transformer import StarTransformer -from .transformer import TransformerEncoder -from .variational_rnn import VarRNN, VarLSTM, VarGRU -from .seq2seq_encoder import LSTMSeq2SeqEncoder, TransformerSeq2SeqEncoder, Seq2SeqEncoder diff --git a/fastNLP/modules/encoder/_elmo.py b/fastNLP/modules/encoder/_elmo.py deleted file mode 100644 index 7a2cf4bc..00000000 --- a/fastNLP/modules/encoder/_elmo.py +++ /dev/null @@ -1,540 +0,0 @@ -r"""undocumented -这个页面的代码大量参考了 allenNLP -""" - -__all__ = [] - -from typing import Optional, Tuple, List, Callable - -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch.nn.utils.rnn import PackedSequence, pad_packed_sequence - -from ..utils import get_dropout_mask - - -class LstmCellWithProjection(torch.nn.Module): - r""" - An LSTM with Recurrent Dropout and a projected and clipped hidden state and - memory. Note: this implementation is slower than the native Pytorch LSTM because - it cannot make use of CUDNN optimizations for stacked RNNs due to and - variational dropout and the custom nature of the cell state. - Parameters - ---------- - input_size : ``int``, required. - The dimension of the inputs to the LSTM. - hidden_size : ``int``, required. - The dimension of the outputs of the LSTM. - cell_size : ``int``, required. - The dimension of the memory cell used for the LSTM. - go_forward: ``bool``, optional (default = True) - The direction in which the LSTM is applied to the sequence. - Forwards by default, or backwards if False. - recurrent_dropout_probability: ``float``, optional (default = 0.0) - The dropout probability to be used in a dropout scheme as stated in - `A Theoretically Grounded Application of Dropout in Recurrent Neural Networks - `_ . Implementation wise, this simply - applies a fixed dropout mask per sequence to the recurrent connection of the - LSTM. - state_projection_clip_value: ``float``, optional, (default = None) - The magnitude with which to clip the hidden_state after projecting it. - memory_cell_clip_value: ``float``, optional, (default = None) - The magnitude with which to clip the memory cell. - Returns - ------- - output_accumulator : ``torch.FloatTensor`` - The outputs of the LSTM for each timestep. A tensor of shape - (batch_size, max_timesteps, hidden_size) where for a given batch - element, all outputs past the sequence length for that batch are - zero tensors. - final_state: ``Tuple[torch.FloatTensor, torch.FloatTensor]`` - The final (state, memory) states of the LSTM, with shape - (1, batch_size, hidden_size) and (1, batch_size, cell_size) - respectively. The first dimension is 1 in order to match the Pytorch - API for returning stacked LSTM states. - """ - - def __init__(self, - input_size: int, - hidden_size: int, - cell_size: int, - go_forward: bool = True, - recurrent_dropout_probability: float = 0.0, - memory_cell_clip_value: Optional[float] = None, - state_projection_clip_value: Optional[float] = None) -> None: - super(LstmCellWithProjection, self).__init__() - # Required to be wrapped with a :class:`PytorchSeq2SeqWrapper`. - self.input_size = input_size - self.hidden_size = hidden_size - self.cell_size = cell_size - - self.go_forward = go_forward - self.state_projection_clip_value = state_projection_clip_value - self.memory_cell_clip_value = memory_cell_clip_value - self.recurrent_dropout_probability = recurrent_dropout_probability - - # We do the projections for all the gates all at once. - self.input_linearity = torch.nn.Linear(input_size, 4 * cell_size, bias=False) - self.state_linearity = torch.nn.Linear(hidden_size, 4 * cell_size, bias=True) - - # Additional projection matrix for making the hidden state smaller. - self.state_projection = torch.nn.Linear(cell_size, hidden_size, bias=False) - self.reset_parameters() - - def reset_parameters(self): - # Use sensible default initializations for parameters. - nn.init.orthogonal_(self.input_linearity.weight.data) - nn.init.orthogonal_(self.state_linearity.weight.data) - - self.state_linearity.bias.data.fill_(0.0) - # Initialize forget gate biases to 1.0 as per An Empirical - # Exploration of Recurrent Network Architectures, (Jozefowicz, 2015). - self.state_linearity.bias.data[self.cell_size:2 * self.cell_size].fill_(1.0) - - def forward(self, # pylint: disable=arguments-differ - inputs: torch.FloatTensor, - batch_lengths: List[int], - initial_state: Optional[Tuple[torch.Tensor, torch.Tensor]] = None): - r""" - Parameters - ---------- - inputs : ``torch.FloatTensor``, required. - A tensor of shape (batch_size, num_timesteps, input_size) - to apply the LSTM over. - batch_lengths : ``List[int]``, required. - A list of length batch_size containing the lengths of the sequences in batch. - initial_state : ``Tuple[torch.Tensor, torch.Tensor]``, optional, (default = None) - A tuple (state, memory) representing the initial hidden state and memory - of the LSTM. The ``state`` has shape (1, batch_size, hidden_size) and the - ``memory`` has shape (1, batch_size, cell_size). - Returns - ------- - output_accumulator : ``torch.FloatTensor`` - The outputs of the LSTM for each timestep. A tensor of shape - (batch_size, max_timesteps, hidden_size) where for a given batch - element, all outputs past the sequence length for that batch are - zero tensors. - final_state : ``Tuple[``torch.FloatTensor, torch.FloatTensor]`` - A tuple (state, memory) representing the initial hidden state and memory - of the LSTM. The ``state`` has shape (1, batch_size, hidden_size) and the - ``memory`` has shape (1, batch_size, cell_size). - """ - batch_size = inputs.size()[0] - total_timesteps = inputs.size()[1] - - # We have to use this '.data.new().fill_' pattern to create tensors with the correct - # type - forward has no knowledge of whether these are torch.Tensors or torch.cuda.Tensors. - output_accumulator = inputs.data.new(batch_size, - total_timesteps, - self.hidden_size).fill_(0) - if initial_state is None: - full_batch_previous_memory = inputs.data.new(batch_size, - self.cell_size).fill_(0) - full_batch_previous_state = inputs.data.new(batch_size, - self.hidden_size).fill_(0) - else: - full_batch_previous_state = initial_state[0].squeeze(0) - full_batch_previous_memory = initial_state[1].squeeze(0) - - current_length_index = batch_size - 1 if self.go_forward else 0 - if self.recurrent_dropout_probability > 0.0 and self.training: - dropout_mask = get_dropout_mask(self.recurrent_dropout_probability, - full_batch_previous_state) - else: - dropout_mask = None - - for timestep in range(total_timesteps): - # The index depends on which end we start. - index = timestep if self.go_forward else total_timesteps - timestep - 1 - - # What we are doing here is finding the index into the batch dimension - # which we need to use for this timestep, because the sequences have - # variable length, so once the index is greater than the length of this - # particular batch sequence, we no longer need to do the computation for - # this sequence. The key thing to recognise here is that the batch inputs - # must be _ordered_ by length from longest (first in batch) to shortest - # (last) so initially, we are going forwards with every sequence and as we - # pass the index at which the shortest elements of the batch finish, - # we stop picking them up for the computation. - if self.go_forward: - while batch_lengths[current_length_index] <= index: - current_length_index -= 1 - # If we're going backwards, we are _picking up_ more indices. - else: - # First conditional: Are we already at the maximum number of elements in the batch? - # Second conditional: Does the next shortest sequence beyond the current batch - # index require computation use this timestep? - while current_length_index < (len(batch_lengths) - 1) and \ - batch_lengths[current_length_index + 1] > index: - current_length_index += 1 - - # Actually get the slices of the batch which we - # need for the computation at this timestep. - # shape (batch_size, cell_size) - previous_memory = full_batch_previous_memory[0: current_length_index + 1].clone() - # Shape (batch_size, hidden_size) - previous_state = full_batch_previous_state[0: current_length_index + 1].clone() - # Shape (batch_size, input_size) - timestep_input = inputs[0: current_length_index + 1, index] - - # Do the projections for all the gates all at once. - # Both have shape (batch_size, 4 * cell_size) - projected_input = self.input_linearity(timestep_input) - projected_state = self.state_linearity(previous_state) - - # Main LSTM equations using relevant chunks of the big linear - # projections of the hidden state and inputs. - input_gate = torch.sigmoid(projected_input[:, (0 * self.cell_size):(1 * self.cell_size)] + - projected_state[:, (0 * self.cell_size):(1 * self.cell_size)]) - forget_gate = torch.sigmoid(projected_input[:, (1 * self.cell_size):(2 * self.cell_size)] + - projected_state[:, (1 * self.cell_size):(2 * self.cell_size)]) - memory_init = torch.tanh(projected_input[:, (2 * self.cell_size):(3 * self.cell_size)] + - projected_state[:, (2 * self.cell_size):(3 * self.cell_size)]) - output_gate = torch.sigmoid(projected_input[:, (3 * self.cell_size):(4 * self.cell_size)] + - projected_state[:, (3 * self.cell_size):(4 * self.cell_size)]) - memory = input_gate * memory_init + forget_gate * previous_memory - - # Here is the non-standard part of this LSTM cell; first, we clip the - # memory cell, then we project the output of the timestep to a smaller size - # and again clip it. - - if self.memory_cell_clip_value: - # pylint: disable=invalid-unary-operand-type - memory = torch.clamp(memory, -self.memory_cell_clip_value, self.memory_cell_clip_value) - - # shape (current_length_index, cell_size) - pre_projection_timestep_output = output_gate * torch.tanh(memory) - - # shape (current_length_index, hidden_size) - timestep_output = self.state_projection(pre_projection_timestep_output) - if self.state_projection_clip_value: - # pylint: disable=invalid-unary-operand-type - timestep_output = torch.clamp(timestep_output, - -self.state_projection_clip_value, - self.state_projection_clip_value) - - # Only do dropout if the dropout prob is > 0.0 and we are in training mode. - if dropout_mask is not None: - timestep_output = timestep_output * dropout_mask[0: current_length_index + 1] - - # We've been doing computation with less than the full batch, so here we create a new - # variable for the the whole batch at this timestep and insert the result for the - # relevant elements of the batch into it. - full_batch_previous_memory = full_batch_previous_memory.data.clone() - full_batch_previous_state = full_batch_previous_state.data.clone() - full_batch_previous_memory[0:current_length_index + 1] = memory - full_batch_previous_state[0:current_length_index + 1] = timestep_output - output_accumulator[0:current_length_index + 1, index] = timestep_output - - # Mimic the pytorch API by returning state in the following shape: - # (num_layers * num_directions, batch_size, ...). As this - # LSTM cell cannot be stacked, the first dimension here is just 1. - final_state = (full_batch_previous_state.unsqueeze(0), - full_batch_previous_memory.unsqueeze(0)) - - return output_accumulator, final_state - - -class LstmbiLm(nn.Module): - def __init__(self, config): - super(LstmbiLm, self).__init__() - self.config = config - self.encoder = nn.LSTM(self.config['lstm']['projection_dim'], - self.config['lstm']['dim'], - num_layers=self.config['lstm']['n_layers'], - bidirectional=True, - batch_first=True, - dropout=self.config['dropout']) - self.projection = nn.Linear(self.config['lstm']['dim'], self.config['lstm']['projection_dim'], bias=True) - - def forward(self, inputs, seq_len): - sort_lens, sort_idx = torch.sort(seq_len, dim=0, descending=True) - inputs = inputs[sort_idx] - inputs = nn.utils.rnn.pack_padded_sequence(inputs, sort_lens.cpu(), batch_first=self.batch_first) - output, hx = self.encoder(inputs, None) # -> [N,L,C] - output, _ = nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - output = output[unsort_idx] - forward, backward = output.split(self.config['lstm']['dim'], 2) - return torch.cat([self.projection(forward), self.projection(backward)], dim=2) - - -class ElmobiLm(torch.nn.Module): - def __init__(self, config): - super(ElmobiLm, self).__init__() - self.config = config - input_size = config['lstm']['projection_dim'] - hidden_size = config['lstm']['projection_dim'] - cell_size = config['lstm']['dim'] - num_layers = config['lstm']['n_layers'] - memory_cell_clip_value = config['lstm']['cell_clip'] - state_projection_clip_value = config['lstm']['proj_clip'] - recurrent_dropout_probability = 0.0 - - self.input_size = input_size - self.hidden_size = hidden_size - self.num_layers = num_layers - self.cell_size = cell_size - - forward_layers = [] - backward_layers = [] - - lstm_input_size = input_size - go_forward = True - for layer_index in range(num_layers): - forward_layer = LstmCellWithProjection(lstm_input_size, - hidden_size, - cell_size, - go_forward, - recurrent_dropout_probability, - memory_cell_clip_value, - state_projection_clip_value) - backward_layer = LstmCellWithProjection(lstm_input_size, - hidden_size, - cell_size, - not go_forward, - recurrent_dropout_probability, - memory_cell_clip_value, - state_projection_clip_value) - lstm_input_size = hidden_size - - self.add_module('forward_layer_{}'.format(layer_index), forward_layer) - self.add_module('backward_layer_{}'.format(layer_index), backward_layer) - forward_layers.append(forward_layer) - backward_layers.append(backward_layer) - self.forward_layers = forward_layers - self.backward_layers = backward_layers - - def forward(self, inputs, seq_len): - r""" - - :param inputs: batch_size x max_len x embed_size - :param seq_len: batch_size - :return: torch.FloatTensor. num_layers x batch_size x max_len x hidden_size - """ - max_len = inputs.size(1) - sort_lens, sort_idx = torch.sort(seq_len, dim=0, descending=True) - inputs = inputs[sort_idx] - inputs = nn.utils.rnn.pack_padded_sequence(inputs, sort_lens.cpu(), batch_first=True) - output, _ = self._lstm_forward(inputs, None) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - output = output[:, unsort_idx] - return output - - def _lstm_forward(self, - inputs: PackedSequence, - initial_state: Optional[Tuple[torch.Tensor, torch.Tensor]] = None) -> \ - Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: - r""" - Parameters - ---------- - inputs : ``PackedSequence``, required. - A batch first ``PackedSequence`` to run the stacked LSTM over. - initial_state : ``Tuple[torch.Tensor, torch.Tensor]``, optional, (default = None) - A tuple (state, memory) representing the initial hidden state and memory - of the LSTM, with shape (num_layers, batch_size, 2 * hidden_size) and - (num_layers, batch_size, 2 * cell_size) respectively. - Returns - ------- - output_sequence : ``torch.FloatTensor`` - The encoded sequence of shape (num_layers, batch_size, sequence_length, hidden_size) - final_states: ``Tuple[torch.FloatTensor, torch.FloatTensor]`` - The per-layer final (state, memory) states of the LSTM, with shape - (num_layers, batch_size, 2 * hidden_size) and (num_layers, batch_size, 2 * cell_size) - respectively. The last dimension is duplicated because it contains the state/memory - for both the forward and backward layers. - """ - - if initial_state is None: - hidden_states: List[Optional[Tuple[torch.Tensor, - torch.Tensor]]] = [None] * len(self.forward_layers) - elif initial_state[0].size()[0] != len(self.forward_layers): - raise Exception("Initial states were passed to forward() but the number of " - "initial states does not match the number of layers.") - else: - hidden_states = list(zip(initial_state[0].split(1, 0), initial_state[1].split(1, 0))) - - inputs, batch_lengths = pad_packed_sequence(inputs, batch_first=True) - forward_output_sequence = inputs - backward_output_sequence = inputs - - final_states = [] - sequence_outputs = [] - for layer_index, state in enumerate(hidden_states): - forward_layer = getattr(self, 'forward_layer_{}'.format(layer_index)) - backward_layer = getattr(self, 'backward_layer_{}'.format(layer_index)) - - forward_cache = forward_output_sequence - backward_cache = backward_output_sequence - - if state is not None: - forward_hidden_state, backward_hidden_state = state[0].split(self.hidden_size, 2) - forward_memory_state, backward_memory_state = state[1].split(self.cell_size, 2) - forward_state = (forward_hidden_state, forward_memory_state) - backward_state = (backward_hidden_state, backward_memory_state) - else: - forward_state = None - backward_state = None - - forward_output_sequence, forward_state = forward_layer(forward_output_sequence, - batch_lengths, - forward_state) - backward_output_sequence, backward_state = backward_layer(backward_output_sequence, - batch_lengths, - backward_state) - # Skip connections, just adding the input to the output. - if layer_index != 0: - forward_output_sequence += forward_cache - backward_output_sequence += backward_cache - - sequence_outputs.append(torch.cat([forward_output_sequence, - backward_output_sequence], -1)) - # Append the state tuples in a list, so that we can return - # the final states for all the layers. - final_states.append((torch.cat([forward_state[0], backward_state[0]], -1), - torch.cat([forward_state[1], backward_state[1]], -1))) - - stacked_sequence_outputs: torch.FloatTensor = torch.stack(sequence_outputs) - # Stack the hidden state and memory for each layer in。to 2 tensors of shape - # (num_layers, batch_size, hidden_size) and (num_layers, batch_size, cell_size) - # respectively. - final_hidden_states, final_memory_states = zip(*final_states) - final_state_tuple: Tuple[torch.FloatTensor, - torch.FloatTensor] = (torch.cat(final_hidden_states, 0), - torch.cat(final_memory_states, 0)) - return stacked_sequence_outputs, final_state_tuple - - -class ConvTokenEmbedder(nn.Module): - def __init__(self, config, weight_file, word_emb_layer, char_emb_layer): - super(ConvTokenEmbedder, self).__init__() - self.weight_file = weight_file - self.word_emb_layer = word_emb_layer - self.char_emb_layer = char_emb_layer - - self.output_dim = config['lstm']['projection_dim'] - self._options = config - - char_cnn_options = self._options['char_cnn'] - if char_cnn_options['activation'] == 'tanh': - self.activation = torch.tanh - elif char_cnn_options['activation'] == 'relu': - self.activation = torch.nn.functional.relu - else: - raise Exception("Unknown activation") - - if char_emb_layer is not None: - self.char_conv = [] - cnn_config = config['char_cnn'] - filters = cnn_config['filters'] - char_embed_dim = cnn_config['embedding']['dim'] - convolutions = [] - - for i, (width, num) in enumerate(filters): - conv = torch.nn.Conv1d( - in_channels=char_embed_dim, - out_channels=num, - kernel_size=width, - bias=True - ) - convolutions.append(conv) - self.add_module('char_conv_{}'.format(i), conv) - - self._convolutions = convolutions - - n_filters = sum(f[1] for f in filters) - n_highway = cnn_config['n_highway'] - - self._highways = Highway(n_filters, n_highway, activation=torch.nn.functional.relu) - - self._projection = torch.nn.Linear(n_filters, self.output_dim, bias=True) - - def forward(self, words, chars): - r""" - :param words: - :param chars: Tensor Shape ``(batch_size, sequence_length, 50)``: - :return Tensor Shape ``(batch_size, sequence_length + 2, embedding_dim)`` : - """ - # the character id embedding - # (batch_size * sequence_length, max_chars_per_token, embed_dim) - # character_embedding = torch.nn.functional.embedding( - # chars.view(-1, max_chars_per_token), - # self._char_embedding_weights - # ) - batch_size, sequence_length, max_char_len = chars.size() - character_embedding = self.char_emb_layer(chars).reshape(batch_size * sequence_length, max_char_len, -1) - # run convolutions - - # (batch_size * sequence_length, embed_dim, max_chars_per_token) - character_embedding = torch.transpose(character_embedding, 1, 2) - convs = [] - for i in range(len(self._convolutions)): - conv = getattr(self, 'char_conv_{}'.format(i)) - convolved = conv(character_embedding) - # (batch_size * sequence_length, n_filters for this width) - convolved, _ = torch.max(convolved, dim=-1) - convolved = self.activation(convolved) - convs.append(convolved) - - # (batch_size * sequence_length, n_filters) - token_embedding = torch.cat(convs, dim=-1) - - # apply the highway layers (batch_size * sequence_length, n_filters) - token_embedding = self._highways(token_embedding) - - # final projection (batch_size * sequence_length, embedding_dim) - token_embedding = self._projection(token_embedding) - - # reshape to (batch_size, sequence_length+2, embedding_dim) - return token_embedding.view(batch_size, sequence_length, -1) - - -class Highway(torch.nn.Module): - r""" - A `Highway layer `_ does a gated combination of a linear - transformation and a non-linear transformation of its input. :math:`y = g * x + (1 - g) * - f(A(x))`, where :math:`A` is a linear transformation, :math:`f` is an element-wise - non-linearity, and :math:`g` is an element-wise gate, computed as :math:`sigmoid(B(x))`. - This module will apply a fixed number of highway layers to its input, returning the final - result. - Parameters - ---------- - input_dim : ``int`` - The dimensionality of :math:`x`. We assume the input has shape ``(batch_size, - input_dim)``. - num_layers : ``int``, optional (default=``1``) - The number of highway layers to apply to the input. - activation : ``Callable[[torch.Tensor], torch.Tensor]``, optional (default=``torch.nn.functional.relu``) - The non-linearity to use in the highway layers. - """ - - def __init__(self, - input_dim: int, - num_layers: int = 1, - activation: Callable[[torch.Tensor], torch.Tensor] = torch.nn.functional.relu) -> None: - super(Highway, self).__init__() - self._input_dim = input_dim - self._layers = torch.nn.ModuleList([torch.nn.Linear(input_dim, input_dim * 2) - for _ in range(num_layers)]) - self._activation = activation - for layer in self._layers: - # We should bias the highway layer to just carry its input forward. We do that by - # setting the bias on `B(x)` to be positive, because that means `g` will be biased to - # be high, to we will carry the input forward. The bias on `B(x)` is the second half - # of the bias vector in each Linear layer. - layer.bias[input_dim:].data.fill_(1) - - def forward(self, inputs: torch.Tensor) -> torch.Tensor: # pylint: disable=arguments-differ - current_input = inputs - for layer in self._layers: - projected_input = layer(current_input) - linear_part = current_input - # NOTE: if you modify this, think about whether you should modify the initialization - # above, too. - nonlinear_part = projected_input[:, (0 * self._input_dim):(1 * self._input_dim)] - gate = projected_input[:, (1 * self._input_dim):(2 * self._input_dim)] - nonlinear_part = self._activation(nonlinear_part) - gate = torch.sigmoid(gate) - current_input = gate * linear_part + (1 - gate) * nonlinear_part - return current_input diff --git a/fastNLP/modules/encoder/bert.py b/fastNLP/modules/encoder/bert.py deleted file mode 100644 index 55e79d63..00000000 --- a/fastNLP/modules/encoder/bert.py +++ /dev/null @@ -1,662 +0,0 @@ -r"""undocumented -这个页面的代码很大程度上参考(复制粘贴)了https://github.com/huggingface/pytorch-pretrained-BERT的代码, 如果你发现该代码对你 - 有用,也请引用一下他们。 -""" - -__all__ = [ - "BertModel", -] - -import copy -import json -import math -import os - -import torch -from torch import nn -import numpy as np - -from ...io.file_utils import _get_file_name_base_on_postfix -from ...io.file_utils import _get_bert_dir -from ...core import logger - - -CONFIG_FILE = 'config.json' -WEIGHTS_NAME = 'pytorch_model.bin' - -BERT_KEY_RENAME_MAP_1 = { - 'gamma': 'weight', - 'beta': 'bias', - 'distilbert.embeddings': 'bert.embeddings', - 'distilbert.transformer': 'bert.encoder', -} - -BERT_KEY_RENAME_MAP_2 = { - 'q_lin': 'self.query', - 'k_lin': 'self.key', - 'v_lin': 'self.value', - 'out_lin': 'output.dense', - 'sa_layer_norm': 'attention.output.LayerNorm', - 'ffn.lin1': 'intermediate.dense', - 'ffn.lin2': 'output.dense', - 'output_layer_norm': 'output.LayerNorm', -} - - -class BertConfig(object): - r"""Configuration class to store the configuration of a `BertModel`. - """ - - def __init__(self, - vocab_size_or_config_json_file, - hidden_size=768, - num_hidden_layers=12, - num_attention_heads=12, - intermediate_size=3072, - hidden_act="gelu", - hidden_dropout_prob=0.1, - attention_probs_dropout_prob=0.1, - max_position_embeddings=512, - type_vocab_size=2, - initializer_range=0.02, - layer_norm_eps=1e-12, - architectures='bert'): - r"""Constructs BertConfig. - - Args: - vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`. - hidden_size: Size of the encoder layers and the pooler layer. - num_hidden_layers: Number of hidden layers in the Transformer encoder. - num_attention_heads: Number of attention heads for each attention layer in - the Transformer encoder. - intermediate_size: The size of the "intermediate" (i.e., feed-forward) - layer in the Transformer encoder. - hidden_act: The non-linear activation function (function or string) in the - encoder and pooler. If string, "gelu", "relu" and "swish" are supported. - hidden_dropout_prob: The dropout probabilitiy for all fully connected - layers in the embeddings, encoder, and pooler. - attention_probs_dropout_prob: The dropout ratio for the attention - probabilities. - max_position_embeddings: The maximum sequence length that this model might - ever be used with. Typically set this to something large just in case - (e.g., 512 or 1024 or 2048). - type_vocab_size: The vocabulary size of the `token_type_ids` passed into - `BertModel`. - initializer_range: The sttdev of the truncated_normal_initializer for - initializing all weight matrices. - layer_norm_eps: The epsilon used by LayerNorm. - """ - if isinstance(vocab_size_or_config_json_file, str): - with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader: - json_config = json.loads(reader.read()) - for key, value in json_config.items(): - self.__dict__[key] = value - elif isinstance(vocab_size_or_config_json_file, int): - self.vocab_size = vocab_size_or_config_json_file - self.hidden_size = hidden_size - self.num_hidden_layers = num_hidden_layers - self.num_attention_heads = num_attention_heads - self.hidden_act = hidden_act - self.intermediate_size = intermediate_size - self.hidden_dropout_prob = hidden_dropout_prob - self.attention_probs_dropout_prob = attention_probs_dropout_prob - self.max_position_embeddings = max_position_embeddings - self.type_vocab_size = type_vocab_size - self.initializer_range = initializer_range - self.layer_norm_eps = layer_norm_eps - self.architectures = architectures - else: - raise ValueError("First argument must be either a vocabulary size (int)" - "or the path to a pretrained model config file (str)") - - @classmethod - def from_dict(cls, json_object): - r"""Constructs a `BertConfig` from a Python dictionary of parameters.""" - config = BertConfig(vocab_size_or_config_json_file=-1) - for key, value in json_object.items(): - config.__dict__[key] = value - return config - - @classmethod - def from_json_file(cls, json_file): - r"""Constructs a `BertConfig` from a json file of parameters.""" - with open(json_file, "r", encoding='utf-8') as reader: - text = reader.read() - return cls.from_dict(json.loads(text)) - - def __repr__(self): - return str(self.to_json_string()) - - def to_dict(self): - r"""Serializes this instance to a Python dictionary.""" - output = copy.deepcopy(self.__dict__) - return output - - def to_json_string(self): - r"""Serializes this instance to a JSON string.""" - return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" - - def to_json_file(self, json_file_path): - r""" Save this instance to a json file.""" - if os.path.isdir(json_file_path): - json_file_path = os.path.join(json_file_path, CONFIG_FILE) - with open(json_file_path, "w", encoding='utf-8') as writer: - writer.write(self.to_json_string()) - - def save_pretrained(self, save_directory): - self.to_json_file(save_directory) - -def gelu(x): - return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) - - -def swish(x): - return x * torch.sigmoid(x) - - -ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish} - - -BertLayerNorm = torch.nn.LayerNorm - - -class DistilBertEmbeddings(nn.Module): - def __init__(self, config): - super(DistilBertEmbeddings, self).__init__() - - def create_sinusoidal_embeddings(n_pos, dim, out): - position_enc = np.array([ - [pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)] - for pos in range(n_pos) - ]) - out[:, 0::2] = torch.FloatTensor(np.sin(position_enc[:, 0::2])) - out[:, 1::2] = torch.FloatTensor(np.cos(position_enc[:, 1::2])) - out.detach_() - out.requires_grad = False - - self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0) - self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) - if config.sinusoidal_pos_embds: - create_sinusoidal_embeddings(n_pos=config.max_position_embeddings, - dim=config.hidden_size, - out=self.position_embeddings.weight) - - self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=1e-12) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, input_ids, token_type_ids, position_ids=None): - r""" - Parameters - ---------- - input_ids: torch.tensor(bs, max_seq_length) - The token ids to embed. - token_type_ids: no used. - position_ids: no used. - Outputs - ------- - embeddings: torch.tensor(bs, max_seq_length, dim) - The embedded tokens (plus position embeddings, no token_type embeddings) - """ - seq_length = input_ids.size(1) - if position_ids is None: - position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) # (max_seq_length) - position_ids = position_ids.unsqueeze(0).expand_as(input_ids) # (bs, max_seq_length) - - word_embeddings = self.word_embeddings(input_ids) # (bs, max_seq_length, dim) - position_embeddings = self.position_embeddings(position_ids) # (bs, max_seq_length, dim) - - embeddings = word_embeddings + position_embeddings # (bs, max_seq_length, dim) - embeddings = self.LayerNorm(embeddings) # (bs, max_seq_length, dim) - embeddings = self.dropout(embeddings) # (bs, max_seq_length, dim) - return embeddings - - -class BertEmbeddings(nn.Module): - r"""Construct the embeddings from word, position and token_type embeddings. - """ - - def __init__(self, config): - super(BertEmbeddings, self).__init__() - self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0) - self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) - self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size) - - # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load - # any TensorFlow checkpoint file - self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, input_ids, token_type_ids=None, position_ids=None, words_embeddings=None): - seq_length = input_ids.size(1) - if position_ids is None: - position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) - position_ids = position_ids.unsqueeze(0).expand_as(input_ids) - if token_type_ids is None: - token_type_ids = torch.zeros_like(input_ids) - - if words_embeddings is None: - words_embeddings = self.word_embeddings(input_ids) - else: - assert input_ids.size() == words_embeddings.size()[: -1] - position_embeddings = self.position_embeddings(position_ids) - token_type_embeddings = self.token_type_embeddings(token_type_ids) - - embeddings = words_embeddings + position_embeddings + token_type_embeddings - embeddings = self.LayerNorm(embeddings) - embeddings = self.dropout(embeddings) - return embeddings - - -class BertSelfAttention(nn.Module): - def __init__(self, config): - super(BertSelfAttention, self).__init__() - if config.hidden_size % config.num_attention_heads != 0: - raise ValueError( - "The hidden size (%d) is not a multiple of the number of attention " - "heads (%d)" % (config.hidden_size, config.num_attention_heads)) - self.num_attention_heads = config.num_attention_heads - self.attention_head_size = int(config.hidden_size / config.num_attention_heads) - self.all_head_size = self.num_attention_heads * self.attention_head_size - - self.query = nn.Linear(config.hidden_size, self.all_head_size) - self.key = nn.Linear(config.hidden_size, self.all_head_size) - self.value = nn.Linear(config.hidden_size, self.all_head_size) - - self.dropout = nn.Dropout(config.attention_probs_dropout_prob) - - def transpose_for_scores(self, x): - new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) - x = x.view(*new_x_shape) - return x.permute(0, 2, 1, 3) - - def forward(self, hidden_states, attention_mask): - mixed_query_layer = self.query(hidden_states) - mixed_key_layer = self.key(hidden_states) - mixed_value_layer = self.value(hidden_states) - - query_layer = self.transpose_for_scores(mixed_query_layer) - key_layer = self.transpose_for_scores(mixed_key_layer) - value_layer = self.transpose_for_scores(mixed_value_layer) - - # Take the dot product between "query" and "key" to get the raw attention scores. - attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) - attention_scores = attention_scores / math.sqrt(self.attention_head_size) - # Apply the attention mask is (precomputed for all layers in BertModel forward() function) - attention_scores = attention_scores + attention_mask - - # Normalize the attention scores to probabilities. - attention_probs = nn.Softmax(dim=-1)(attention_scores) - - # This is actually dropping out entire tokens to attend to, which might - # seem a bit unusual, but is taken from the original Transformer paper. - attention_probs = self.dropout(attention_probs) - - context_layer = torch.matmul(attention_probs, value_layer) - context_layer = context_layer.permute(0, 2, 1, 3).contiguous() - new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) - context_layer = context_layer.view(*new_context_layer_shape) - return context_layer - - -class BertSelfOutput(nn.Module): - def __init__(self, config): - super(BertSelfOutput, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, hidden_states, input_tensor): - hidden_states = self.dense(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.LayerNorm(hidden_states + input_tensor) - return hidden_states - - -class BertAttention(nn.Module): - def __init__(self, config): - super(BertAttention, self).__init__() - self.self = BertSelfAttention(config) - self.output = BertSelfOutput(config) - - def forward(self, input_tensor, attention_mask): - self_output = self.self(input_tensor, attention_mask) - attention_output = self.output(self_output, input_tensor) - return attention_output - - -class BertIntermediate(nn.Module): - def __init__(self, config): - super(BertIntermediate, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.intermediate_size) - if isinstance(config.hidden_act, str): - self.intermediate_act_fn = ACT2FN[config.hidden_act] - else: - self.intermediate_act_fn = config.hidden_act - - def forward(self, hidden_states): - hidden_states = self.dense(hidden_states) - hidden_states = self.intermediate_act_fn(hidden_states) - return hidden_states - - -class BertOutput(nn.Module): - def __init__(self, config): - super(BertOutput, self).__init__() - self.dense = nn.Linear(config.intermediate_size, config.hidden_size) - self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, hidden_states, input_tensor): - hidden_states = self.dense(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.LayerNorm(hidden_states + input_tensor) - return hidden_states - - -class BertLayer(nn.Module): - def __init__(self, config): - super(BertLayer, self).__init__() - self.attention = BertAttention(config) - self.intermediate = BertIntermediate(config) - self.output = BertOutput(config) - - def forward(self, hidden_states, attention_mask): - attention_output = self.attention(hidden_states, attention_mask) - intermediate_output = self.intermediate(attention_output) - layer_output = self.output(intermediate_output, attention_output) - return layer_output - - -class BertEncoder(nn.Module): - def __init__(self, config, num_output_layer=-1): - super(BertEncoder, self).__init__() - layer = BertLayer(config) - self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)]) - num_output_layer = num_output_layer if num_output_layer >= 0 else (len(self.layer) + num_output_layer) - self.num_output_layer = max(min(num_output_layer, len(self.layer)), 0) - if self.num_output_layer + 1 < len(self.layer): - logger.info(f'The transformer encoder will early exit after layer-{self.num_output_layer} ' - f'(layer 0 means embedding layer)!') - - def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True): - all_encoder_layers = [] - for idx, layer_module in enumerate(self.layer): - if idx >= self.num_output_layer: - break - hidden_states = layer_module(hidden_states, attention_mask) - if output_all_encoded_layers: - all_encoder_layers.append(hidden_states) - if not output_all_encoded_layers: - all_encoder_layers.append(hidden_states) - return all_encoder_layers - - -class BertPooler(nn.Module): - def __init__(self, config): - super(BertPooler, self).__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.activation = nn.Tanh() - - def forward(self, hidden_states): - # We "pool" the model by simply taking the hidden state corresponding - # to the first token. - first_token_tensor = hidden_states[:, 0] - pooled_output = self.dense(first_token_tensor) - pooled_output = self.activation(pooled_output) - return pooled_output - - -class BertModel(nn.Module): - r""" - BERT(Bidirectional Embedding Representations from Transformers). - - 用预训练权重矩阵来建立BERT模型:: - - model = BertModel.from_pretrained(model_dir_or_name) - - 用随机初始化权重矩阵来建立BERT模型:: - - model = BertModel() - - :param int vocab_size: 词表大小,默认值为30522,为BERT English uncase版本的词表大小 - :param int hidden_size: 隐层大小,默认值为768,为BERT base的版本 - :param int num_hidden_layers: 隐藏层数,默认值为12,为BERT base的版本 - :param int num_attention_heads: 多头注意力头数,默认值为12,为BERT base的版本 - :param int intermediate_size: FFN隐藏层大小,默认值是3072,为BERT base的版本 - :param str hidden_act: FFN隐藏层激活函数,默认值为``gelu`` - :param float hidden_dropout_prob: FFN隐藏层dropout,默认值为0.1 - :param float attention_probs_dropout_prob: Attention层的dropout,默认值为0.1 - :param int max_position_embeddings: 最大的序列长度,默认值为512, - :param int type_vocab_size: 最大segment数量,默认值为2 - :param int initializer_range: 初始化权重范围,默认值为0.02 - """ - - def __init__(self, config, *inputs, **kwargs): - super(BertModel, self).__init__() - if not isinstance(config, BertConfig): - raise ValueError( - "Parameter config in `{}(config)` should be an instance of class `BertConfig`. " - "To create a model from a Google pretrained model use " - "`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format( - self.__class__.__name__, self.__class__.__name__ - )) - super(BertModel, self).__init__() - self.config = config - self.hidden_size = self.config.hidden_size - self.model_type = 'bert' - neg_num_output_layer = kwargs.get('neg_num_output_layer', -1) - pos_num_output_layer = kwargs.get('pos_num_output_layer', self.config.num_hidden_layers) - self.num_output_layer = max(neg_num_output_layer + 1 + self.config.num_hidden_layers, pos_num_output_layer) - if hasattr(config, 'sinusoidal_pos_embds'): - self.model_type = 'distilbert' - elif 'model_type' in kwargs: - self.model_type = kwargs['model_type'].lower() - - if self.model_type == 'distilbert': - self.embeddings = DistilBertEmbeddings(config) - else: - self.embeddings = BertEmbeddings(config) - - self.encoder = BertEncoder(config, num_output_layer=self.num_output_layer) - if self.model_type != 'distilbert': - self.pooler = BertPooler(config) - else: - logger.info('DistilBert has NOT pooler, will use hidden states of [CLS] token as pooled output.') - self.apply(self.init_bert_weights) - - @property - def dtype(self): - """ - :obj:`torch.dtype`: The dtype of the module (assuming that all the module parameters have the same dtype). - """ - try: - return next(self.parameters()).dtype - except StopIteration: - # For nn.DataParallel compatibility in PyTorch 1.5 - - def find_tensor_attributes(module: nn.Module): - tuples = [(k, v) for k, v in module.__dict__.items() if torch.is_tensor(v)] - return tuples - - gen = self._named_members(get_members_fn=find_tensor_attributes) - first_tuple = next(gen) - return first_tuple[1].dtype - - def init_bert_weights(self, module): - r""" Initialize the weights. - """ - if isinstance(module, (nn.Linear, nn.Embedding)): - # Slightly different from the TF version which uses truncated_normal for initialization - # cf https://github.com/pytorch/pytorch/pull/5617 - module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) - elif isinstance(module, BertLayerNorm): - module.bias.data.zero_() - module.weight.data.fill_(1.0) - if isinstance(module, nn.Linear) and module.bias is not None: - module.bias.data.zero_() - - def forward(self, input_ids, token_type_ids=None, attention_mask=None, output_all_encoded_layers=True, - position_ids=None): - """ - - :param torch.LongTensor input_ids: bsz x max_len的输入id - :param torch.LongTensor token_type_ids: bsz x max_len,如果不输入认为全为0,一般第一个sep(含)及以前为0, 一个sep之后为1 - :param attention_mask: 需要attend的为1,不需要为0 - :param bool output_all_encoded_layers: 是否输出所有层,默认输出token embedding(包含bpe, position以及type embedding) - 及每一层的hidden states。如果为False,只输出最后一层的结果 - :param torch.LongTensor position_ids: bsz x max_len, position的id - :return: encode_layers: 如果output_all_encoded_layers为True,返回list(共num_layers+1个元素),每个元素为 - bsz x max_len x hidden_size否则返回bsz x max_len x hidden_size的tensor; - pooled_output: bsz x hidden_size为cls的表示,可以用于句子的分类 - """ - if attention_mask is None: - attention_mask = torch.ones_like(input_ids) - if token_type_ids is None: - token_type_ids = torch.zeros_like(input_ids) - - # We create a 3D attention mask from a 2D tensor mask. - # Sizes are [batch_size, 1, 1, to_seq_length] - # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] - # this attention mask is more simple than the triangular masking of causal attention - # used in OpenAI GPT, we just need to prepare the broadcast dimension here. - extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) - - # Since attention_mask is 1.0 for positions we want to attend and 0.0 for - # masked positions, this operation will create a tensor which is 0.0 for - # positions we want to attend and -10000.0 for masked positions. - # Since we are adding it to the raw scores before the softmax, this is - # effectively the same as removing these entirely. - # this will case an issue when DataParallel: https://github.com/pytorch/pytorch/issues/40457#issuecomment-648396469 - # extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility - extended_attention_mask = extended_attention_mask.to(self.dtype) - extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 - - embedding_output = self.embeddings(input_ids, token_type_ids=token_type_ids, position_ids=position_ids) - encoded_layers = self.encoder(embedding_output, - extended_attention_mask, - output_all_encoded_layers=output_all_encoded_layers) - encoded_layers.insert(0, embedding_output) - sequence_output = encoded_layers[-1] - if self.model_type != 'distilbert': - pooled_output = self.pooler(sequence_output) - else: - pooled_output = sequence_output[:, 0] - if not output_all_encoded_layers: - encoded_layers = encoded_layers[-1] - return encoded_layers, pooled_output - - @classmethod - def from_pretrained(cls, model_dir_or_name, *inputs, **kwargs): - state_dict = kwargs.get('state_dict', None) - kwargs.pop('state_dict', None) - kwargs.pop('cache_dir', None) - kwargs.pop('from_tf', None) - - # get model dir from name or dir - pretrained_model_dir = _get_bert_dir(model_dir_or_name) - - # Load config - config_file = _get_file_name_base_on_postfix(pretrained_model_dir, '.json') - config = BertConfig.from_json_file(config_file) - - if state_dict is None: - weights_path = _get_file_name_base_on_postfix(pretrained_model_dir, '.bin') - state_dict = torch.load(weights_path, map_location='cpu') - else: - logger.error(f'Cannot load parameters through `state_dict` variable.') - raise RuntimeError(f'Cannot load parameters through `state_dict` variable.') - - model_type = 'BERT' - old_keys = [] - new_keys = [] - for key in state_dict.keys(): - new_key = None - if 'bert' not in key: - new_key = 'bert.' + key - if new_key: - old_keys.append(key) - new_keys.append(new_key) - for old_key, new_key in zip(old_keys, new_keys): - state_dict[new_key] = state_dict.pop(old_key) - - old_keys = [] - new_keys = [] - for key in state_dict.keys(): - new_key = None - for key_name in BERT_KEY_RENAME_MAP_1: - if key_name in key: - new_key = key.replace(key_name, BERT_KEY_RENAME_MAP_1[key_name]) - if 'distilbert' in key: - model_type = 'DistilBert' - break - if new_key: - old_keys.append(key) - new_keys.append(new_key) - for old_key, new_key in zip(old_keys, new_keys): - state_dict[new_key] = state_dict.pop(old_key) - - old_keys = [] - new_keys = [] - for key in state_dict.keys(): - new_key = None - for key_name in BERT_KEY_RENAME_MAP_2: - if key_name in key: - new_key = key.replace(key_name, BERT_KEY_RENAME_MAP_2[key_name]) - break - if new_key: - old_keys.append(key) - new_keys.append(new_key) - for old_key, new_key in zip(old_keys, new_keys): - state_dict[new_key] = state_dict.pop(old_key) - - # Instantiate model. - model = cls(config, model_type=model_type, *inputs, **kwargs) - - missing_keys = [] - unexpected_keys = [] - error_msgs = [] - # copy state_dict so _load_from_state_dict can modify it - metadata = getattr(state_dict, '_metadata', None) - state_dict = state_dict.copy() - if metadata is not None: - state_dict._metadata = metadata - - def load(module, prefix=''): - local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) - module._load_from_state_dict( - state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs) - for name, child in module._modules.items(): - if child is not None: - load(child, prefix + name + '.') - - load(model, prefix='' if hasattr(model, 'bert') else 'bert.') - if len(missing_keys) > 0: - logger.warning("Weights of {} not initialized from pretrained model: {}".format( - model.__class__.__name__, missing_keys)) - if len(unexpected_keys) > 0: - logger.debug("Weights from pretrained model not used in {}: {}".format( - model.__class__.__name__, unexpected_keys)) - - logger.info(f"Load pre-trained {model_type} parameters from file {weights_path}.") - return model - - def save_pretrained(self, save_directory): - """ 保存模型到某个folder - """ - assert os.path.isdir( - save_directory - ), "Saving path should be a directory where the model and configuration can be saved" - - # Only save the model itself if we are using distributed training - model_to_save = self.module if hasattr(self, "module") else self - - # Attach architecture to the config - model_to_save.config.architectures = [model_to_save.__class__.__name__] - - # Save configuration file - model_to_save.config.save_pretrained(save_directory) - - # If we save using the predefined names, we can load using `from_pretrained` - output_model_file = os.path.join(save_directory, WEIGHTS_NAME) - torch.save(model_to_save.state_dict(), output_model_file) - logger.debug("Model weights saved in {}".format(output_model_file)) diff --git a/fastNLP/modules/encoder/char_encoder.py b/fastNLP/modules/encoder/char_encoder.py deleted file mode 100644 index f40c8bb5..00000000 --- a/fastNLP/modules/encoder/char_encoder.py +++ /dev/null @@ -1,96 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "ConvolutionCharEncoder", - "LSTMCharEncoder" -] -import torch -import torch.nn as nn - -from ..utils import initial_parameter - - -# from torch.nn.init import xavier_uniform -class ConvolutionCharEncoder(nn.Module): - r""" - char级别的卷积编码器. - - """ - - def __init__(self, char_emb_size=50, feature_maps=(40, 30, 30), kernels=(1, 3, 5), initial_method=None): - r""" - - :param int char_emb_size: char级别embedding的维度. Default: 50 - :例: 有26个字符, 每一个的embedding是一个50维的向量, 所以输入的向量维度为50. - :param tuple feature_maps: 一个由int组成的tuple. tuple的长度是char级别卷积操作的数目, 第`i`个int表示第`i`个卷积操作的filter. - :param tuple kernels: 一个由int组成的tuple. tuple的长度是char级别卷积操作的数目, 第`i`个int表示第`i`个卷积操作的卷积核. - :param initial_method: 初始化参数的方式, 默认为`xavier normal` - """ - super(ConvolutionCharEncoder, self).__init__() - self.convs = nn.ModuleList([ - nn.Conv2d(1, feature_maps[i], kernel_size=(char_emb_size, kernels[i]), bias=True, - padding=(0, kernels[i] // 2)) - for i in range(len(kernels))]) - - initial_parameter(self, initial_method) - - def forward(self, x): - r""" - :param torch.Tensor x: ``[batch_size * sent_length, word_length, char_emb_size]`` 输入字符的embedding - :return: torch.Tensor : 卷积计算的结果, 维度为[batch_size * sent_length, sum(feature_maps), 1] - """ - x = x.contiguous().view(x.size(0), 1, x.size(1), x.size(2)) - # [batch_size*sent_length, channel, width, height] - x = x.transpose(2, 3) - # [batch_size*sent_length, channel, height, width] - return self._convolute(x).unsqueeze(2) - - def _convolute(self, x): - feats = [] - for conv in self.convs: - y = conv(x) - # [batch_size*sent_length, feature_maps[i], 1, width - kernels[i] + 1] - y = torch.squeeze(y, 2) - # [batch_size*sent_length, feature_maps[i], width - kernels[i] + 1] - y = torch.tanh(y) - y, __ = torch.max(y, 2) - # [batch_size*sent_length, feature_maps[i]] - feats.append(y) - return torch.cat(feats, 1) # [batch_size*sent_length, sum(feature_maps)] - - -class LSTMCharEncoder(nn.Module): - r""" - char级别基于LSTM的encoder. - """ - - def __init__(self, char_emb_size=50, hidden_size=None, initial_method=None): - r""" - :param int char_emb_size: char级别embedding的维度. Default: 50 - 例: 有26个字符, 每一个的embedding是一个50维的向量, 所以输入的向量维度为50. - :param int hidden_size: LSTM隐层的大小, 默认为char的embedding维度 - :param initial_method: 初始化参数的方式, 默认为`xavier normal` - """ - super(LSTMCharEncoder, self).__init__() - self.hidden_size = char_emb_size if hidden_size is None else hidden_size - - self.lstm = nn.LSTM(input_size=char_emb_size, - hidden_size=self.hidden_size, - num_layers=1, - bias=True, - batch_first=True) - initial_parameter(self, initial_method) - - def forward(self, x): - r""" - :param torch.Tensor x: ``[ n_batch*n_word, word_length, char_emb_size]`` 输入字符的embedding - :return: torch.Tensor : [ n_batch*n_word, char_emb_size]经过LSTM编码的结果 - """ - batch_size = x.shape[0] - h0 = torch.empty(1, batch_size, self.hidden_size) - h0 = nn.init.orthogonal_(h0) - c0 = torch.empty(1, batch_size, self.hidden_size) - c0 = nn.init.orthogonal_(c0) - - _, hidden = self.lstm(x, (h0, c0)) - return hidden[0].squeeze().unsqueeze(2) diff --git a/fastNLP/modules/encoder/conv_maxpool.py b/fastNLP/modules/encoder/conv_maxpool.py deleted file mode 100644 index 20a844c6..00000000 --- a/fastNLP/modules/encoder/conv_maxpool.py +++ /dev/null @@ -1,87 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "ConvMaxpool" -] -import torch -import torch.nn as nn -import torch.nn.functional as F - - -class ConvMaxpool(nn.Module): - r""" - 集合了Convolution和Max-Pooling于一体的层。给定一个batch_size x max_len x input_size的输入,返回batch_size x - sum(output_channels) 大小的matrix。在内部,是先使用CNN给输入做卷积,然后经过activation激活层,在通过在长度(max_len) - 这一维进行max_pooling。最后得到每个sample的一个向量表示。 - - """ - - def __init__(self, in_channels, out_channels, kernel_sizes, activation="relu"): - r""" - - :param int in_channels: 输入channel的大小,一般是embedding的维度; 或encoder的output维度 - :param int,tuple(int) out_channels: 输出channel的数量。如果为list,则需要与kernel_sizes的数量保持一致 - :param int,tuple(int) kernel_sizes: 输出channel的kernel大小。 - :param str activation: Convolution后的结果将通过该activation后再经过max-pooling。支持relu, sigmoid, tanh - """ - super(ConvMaxpool, self).__init__() - - for kernel_size in kernel_sizes: - assert kernel_size % 2 == 1, "kernel size has to be odd numbers." - - # convolution - if isinstance(kernel_sizes, (list, tuple, int)): - if isinstance(kernel_sizes, int) and isinstance(out_channels, int): - out_channels = [out_channels] - kernel_sizes = [kernel_sizes] - elif isinstance(kernel_sizes, (tuple, list)) and isinstance(out_channels, (tuple, list)): - assert len(out_channels) == len( - kernel_sizes), "The number of out_channels should be equal to the number" \ - " of kernel_sizes." - else: - raise ValueError("The type of out_channels and kernel_sizes should be the same.") - - self.convs = nn.ModuleList([nn.Conv1d( - in_channels=in_channels, - out_channels=oc, - kernel_size=ks, - stride=1, - padding=ks // 2, - dilation=1, - groups=1, - bias=None) - for oc, ks in zip(out_channels, kernel_sizes)]) - - else: - raise Exception( - 'Incorrect kernel sizes: should be list, tuple or int') - - # activation function - if activation == 'relu': - self.activation = F.relu - elif activation == 'sigmoid': - self.activation = F.sigmoid - elif activation == 'tanh': - self.activation = F.tanh - else: - raise Exception( - "Undefined activation function: choose from: relu, tanh, sigmoid") - - def forward(self, x, mask=None): - r""" - - :param torch.FloatTensor x: batch_size x max_len x input_size, 一般是经过embedding后的值 - :param mask: batch_size x max_len, pad的地方为0。不影响卷积运算,max-pool一定不会pool到pad为0的位置 - :return: - """ - # [N,L,C] -> [N,C,L] - x = torch.transpose(x, 1, 2) - # convolution - xs = [self.activation(conv(x)) for conv in self.convs] # [[N,C,L], ...] - if mask is not None: - mask = mask.unsqueeze(1) # B x 1 x L - xs = [x.masked_fill_(mask.eq(False), float('-inf')) for x in xs] - # max-pooling - xs = [F.max_pool1d(input=i, kernel_size=i.size(2)).squeeze(2) - for i in xs] # [[N, C], ...] - return torch.cat(xs, dim=-1) # [N, C] diff --git a/fastNLP/modules/encoder/gpt2.py b/fastNLP/modules/encoder/gpt2.py deleted file mode 100644 index f6bf6dde..00000000 --- a/fastNLP/modules/encoder/gpt2.py +++ /dev/null @@ -1,1073 +0,0 @@ -r""" - -""" - -__all__ = ['GPT2Model'] - - -from torch import nn -import torch -from fastNLP.core import logger -import os -import copy -import json -import math -from torch.nn import CrossEntropyLoss -from fastNLP.io.file_utils import _get_file_name_base_on_postfix - -from ..decoder.seq2seq_decoder import Seq2SeqDecoder, State -from ..generator.seq2seq_generator import SequenceGenerator - - -GELU_CONSTANT = math.sqrt(2 / math.pi) - - -from ...io.file_utils import _get_gpt2_dir - - -class GPT2Config: - """Configuration class to store the configuration of a `GPT2Model`. - - Args: - vocab_size: Vocabulary size of `inputs_ids` in `GPT2Model` or a configuration json file. - n_positions: Number of positional embeddings. - n_ctx: Size of the causal mask (usually same as n_positions). - n_embd: Dimensionality of the embeddings and hidden states. - n_layer: Number of hidden layers in the Transformer encoder. - n_head: Number of attention heads for each attention layer in - the Transformer encoder. - layer_norm_epsilon: epsilon to use in the layer norm layers - resid_pdrop: The dropout probabilitiy for all fully connected - layers in the embeddings, encoder, and pooler. - attn_pdrop: The dropout ratio for the attention - probabilities. - embd_pdrop: The dropout ratio for the embeddings. - initializer_range: The sttdev of the truncated_normal_initializer for - initializing all weight matrices. - """ - - def __init__( - self, - vocab_size=50257, - n_positions=1024, - n_ctx=1024, - n_embd=768, - n_layer=12, - n_head=12, - resid_pdrop=0.1, - embd_pdrop=0.1, - attn_pdrop=0.1, - layer_norm_epsilon=1e-5, - initializer_range=0.02, - summary_type="cls_index", - summary_use_proj=True, - summary_activation=None, - summary_proj_to_labels=True, - summary_first_dropout=0.1, - **kwargs - ): - """Constructs GPT2Config. - - Args: - vocab_size: Vocabulary size of `inputs_ids` in `GPT2Model` or a configuration json file. - n_positions: Number of positional embeddings. - n_ctx: Size of the causal mask (usually same as n_positions). - n_embd: Dimensionality of the embeddings and hidden states. - n_layer: Number of hidden layers in the Transformer encoder. - n_head: Number of attention heads for each attention layer in - the Transformer encoder. - layer_norm_epsilon: epsilon to use in the layer norm layers - resid_pdrop: The dropout probabilitiy for all fully connected - layers in the embeddings, encoder, and pooler. - attn_pdrop: The dropout ratio for the attention - probabilities. - embd_pdrop: The dropout ratio for the embeddings. - initializer_range: The sttdev of the truncated_normal_initializer for - initializing all weight matrices. - """ - self.output_attentions = kwargs.pop("output_attentions", False) - self.output_hidden_states = kwargs.pop("output_hidden_states", False) - self.output_past = kwargs.pop("output_past", True) # Not used by all models - self.torchscript = kwargs.pop("torchscript", False) # Only used by PyTorch models - self.use_bfloat16 = kwargs.pop("use_bfloat16", False) - self.pruned_heads = kwargs.pop("pruned_heads", {}) - - # Is decoder is used in encoder-decoder models to differentiate encoder from decoder - self.is_decoder = kwargs.pop("is_decoder", False) - - # Parameters for sequence generation - self.max_length = kwargs.pop("max_length", 20) - self.do_sample = kwargs.pop("do_sample", False) - self.num_beams = kwargs.pop("num_beams", 1) - self.temperature = kwargs.pop("temperature", 1.0) - self.top_k = kwargs.pop("top_k", 50) - self.top_p = kwargs.pop("top_p", 1.0) - self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0) - self.bos_token_id = kwargs.pop("bos_token_id", 0) - self.pad_token_id = kwargs.pop("pad_token_id", 0) - self.eos_token_ids = kwargs.pop("eos_token_ids", 0) - self.length_penalty = kwargs.pop("length_penalty", 1.0) - self.num_return_sequences = kwargs.pop("num_return_sequences", 1) - - # Fine-tuning task arguments - self.finetuning_task = kwargs.pop("finetuning_task", None) - self.num_labels = kwargs.pop("num_labels", 2) - self.id2label = kwargs.pop("id2label", {i: "LABEL_{}".format(i) for i in range(self.num_labels)}) - self.id2label = dict((int(key), value) for key, value in self.id2label.items()) - self.label2id = kwargs.pop("label2id", dict(zip(self.id2label.values(), self.id2label.keys()))) - self.label2id = dict((key, int(value)) for key, value in self.label2id.items()) - - # Additional attributes without default values - for key, value in kwargs.items(): - try: - setattr(self, key, value) - except AttributeError as err: - logger.error("Can't set {} with value {} for {}".format(key, value, self)) - raise err - - self.vocab_size = vocab_size - self.n_ctx = n_ctx - self.n_positions = n_positions - self.n_embd = n_embd - self.n_layer = n_layer - self.n_head = n_head - self.resid_pdrop = resid_pdrop - self.embd_pdrop = embd_pdrop - self.attn_pdrop = attn_pdrop - self.layer_norm_epsilon = layer_norm_epsilon - self.initializer_range = initializer_range - self.summary_type = summary_type - self.summary_use_proj = summary_use_proj - self.summary_activation = summary_activation - self.summary_first_dropout = summary_first_dropout - self.summary_proj_to_labels = summary_proj_to_labels - - @property - def max_position_embeddings(self): - return self.n_positions - - @property - def hidden_size(self): - return self.n_embd - - @property - def num_attention_heads(self): - return self.n_head - - @property - def num_hidden_layers(self): - return self.n_layer - - def save_pretrained(self, save_directory): - """ Save a configuration object to the directory `save_directory`, so that it - can be re-loaded using the :func:`~transformers.PretrainedConfig.from_pretrained` class method. - """ - assert os.path.isdir( - save_directory - ), "Saving path should be a directory where the model and configuration can be saved" - - # If we save using the predefined names, we can load using `from_pretrained` - output_config_file = os.path.join(save_directory, 'config.json') - - self.to_json_file(output_config_file) - - def to_json_file(self, json_file_path): - """ Save this instance to a json file.""" - with open(json_file_path, "w", encoding="utf-8") as writer: - writer.write(self.to_json_string()) - - def to_dict(self): - """Serializes this instance to a Python dictionary.""" - output = copy.deepcopy(self.__dict__) - return output - - def to_json_string(self): - """Serializes this instance to a JSON string.""" - return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" - - @classmethod - def from_json_file(cls, json_file): - """Constructs a `Config` from a json file of parameters.""" - with open(json_file, "r", encoding="utf-8") as reader: - text = reader.read() - dict_obj = json.loads(text) - return cls(**dict_obj) - - @classmethod - def from_pretrained(cls, model_dir_or_name, **kwargs): - r""" Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pre-trained model configuration. - - Parameters: - model_dir_or_name: - - """ - model_dir = _get_gpt2_dir(model_dir_or_name) - tokenizer_config_file = _get_file_name_base_on_postfix(model_dir, 'config.json') - - config = cls.from_json_file(tokenizer_config_file) - - # if resolved_config_file == config_file: - # logger.info("loading configuration file {}".format(config_file)) - # else: - # logger.info("loading configuration file {} from cache at {}".format(config_file, resolved_config_file)) - - if hasattr(config, "pruned_heads"): - config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items()) - - # Update config with kwargs if needed - to_remove = [] - for key, value in kwargs.items(): - if hasattr(config, key): - setattr(config, key, value) - to_remove.append(key) - for key in to_remove: - kwargs.pop(key, None) - - return config - - -def gelu(x): - return 0.5 * x * (1 + torch.tanh(GELU_CONSTANT * (x + 0.044715 * torch.pow(x, 3)))) - - -def prune_conv1d_layer(layer, index, dim=1): - """ Prune a Conv1D layer (a model parameters) to keep only entries in index. - A Conv1D work as a Linear layer (see e.g. BERT) but the weights are transposed. - Return the pruned layer as a new layer with requires_grad=True. - Used to remove heads. - """ - index = index.to(layer.weight.device) - W = layer.weight.index_select(dim, index).clone().detach() - if dim == 0: - b = layer.bias.clone().detach() - else: - b = layer.bias[index].clone().detach() - new_size = list(layer.weight.size()) - new_size[dim] = len(index) - new_layer = Conv1D(new_size[1], new_size[0]).to(layer.weight.device) - new_layer.weight.requires_grad = False - new_layer.weight.copy_(W.contiguous()) - new_layer.weight.requires_grad = True - new_layer.bias.requires_grad = False - new_layer.bias.copy_(b.contiguous()) - new_layer.bias.requires_grad = True - return new_layer - - -class Attention(nn.Module): - def __init__(self, nx, n_ctx, config, scale=False): - super(Attention, self).__init__() - - n_state = nx # in Attention: n_state=768 (nx=n_embd) - # [switch nx => n_state from Block to Attention to keep identical to TF implem] - assert n_state % config.n_head == 0 - self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx)) - self.n_head = config.n_head - self.split_size = n_state - self.scale = scale - - self.c_attn = Conv1D(n_state * 3, nx) - self.c_proj = Conv1D(n_state, nx) - self.attn_dropout = nn.Dropout(config.attn_pdrop) - self.resid_dropout = nn.Dropout(config.resid_pdrop) - self.pruned_heads = set() - - def prune_heads(self, heads): - if len(heads) == 0: - return - mask = torch.ones(self.n_head, self.split_size // self.n_head) - heads = set(heads) - self.pruned_heads # Convert to set and emove already pruned heads - for head in heads: - # Compute how many pruned heads are before the head and move the index accordingly - head = head - sum(1 if h < head else 0 for h in self.pruned_heads) - mask[head] = 0 - mask = mask.view(-1).contiguous().eq(1) - index = torch.arange(len(mask))[mask].long() - index_attn = torch.cat([index, index + self.split_size, index + (2 * self.split_size)]) - - # Prune conv1d layers - self.c_attn = prune_conv1d_layer(self.c_attn, index_attn, dim=1) - self.c_proj = prune_conv1d_layer(self.c_proj, index, dim=0) - - # Update hyper params - self.split_size = (self.split_size // self.n_head) * (self.n_head - len(heads)) - self.n_head = self.n_head - len(heads) - self.pruned_heads = self.pruned_heads.union(heads) - - def _attn(self, q, k, v, attention_mask=None, head_mask=None): - w = torch.matmul(q, k) # batch_size x n_head x pre_len x (past_len+pre_len) - if self.scale: - w = w / math.sqrt(v.size(-1)) - nd, ns = w.size(-2), w.size(-1) - b = self.bias[:, :, ns - nd : ns, :ns] # 1 x 1 x pre_len x (past_len + pre_len) - w = w * b - 1e4 * (1 - b) # batch_size x n_head x pre_len x (past_len + pre_len) - - if attention_mask is not None: - # Apply the attention mask - w = w + attention_mask - - w = nn.Softmax(dim=-1)(w) - w = self.attn_dropout(w) - - # Mask heads if we want to - if head_mask is not None: - w = w * head_mask - - outputs = [torch.matmul(w, v)] - outputs.append(w) - return outputs - - def merge_heads(self, x): - x = x.permute(0, 2, 1, 3).contiguous() - new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),) - return x.view(*new_x_shape) # in Tensorflow implem: fct merge_states - - def split_heads(self, x, k=False): - new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head) - x = x.view(*new_x_shape) # in Tensorflow implem: fct split_states - if k: - return x.permute(0, 2, 3, 1) # (batch, head, head_features, seq_length) - else: - return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features) - - def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): - x = self.c_attn(x) - query, key, value = x.split(self.split_size, dim=2) - query = self.split_heads(query) # (batch, head, seq_length, head_features) - key = self.split_heads(key, k=True) - value = self.split_heads(value) - if layer_past is not None: - past_key, past_value = layer_past[0].transpose(-2, -1), layer_past[1] # transpose back cf below - # key: (batch, head, head_features, seq_length) - key = torch.cat((past_key, key), dim=-1) - # value: (batch, head, seq_length, head_features) - value = torch.cat((past_value, value), dim=-2) - present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking - - attn_outputs = self._attn(query, key, value, attention_mask, head_mask) - a = attn_outputs[0] - - a = self.merge_heads(a) - a = self.c_proj(a) - a = self.resid_dropout(a) - - outputs = [a, present] + attn_outputs[1:] - return outputs # a, present, (attentions) - - -class Conv1D(nn.Module): - def __init__(self, nf, nx): - """ Conv1D layer as defined by Radford et al. for OpenAI GPT (and also used in GPT-2) - Basically works like a Linear layer but the weights are transposed - """ - super(Conv1D, self).__init__() - self.nf = nf - w = torch.empty(nx, nf) - nn.init.normal_(w, std=0.02) - self.weight = nn.Parameter(w) - self.bias = nn.Parameter(torch.zeros(nf)) - - def forward(self, x): - size_out = x.size()[:-1] + (self.nf,) - x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight) - x = x.view(*size_out) - return x - - -class MLP(nn.Module): - def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd) - super(MLP, self).__init__() - nx = config.n_embd - self.c_fc = Conv1D(n_state, nx) - self.c_proj = Conv1D(nx, n_state) - self.act = gelu - self.dropout = nn.Dropout(config.resid_pdrop) - - def forward(self, x): - h = self.act(self.c_fc(x)) - h2 = self.c_proj(h) - return self.dropout(h2) - - -class Block(nn.Module): - def __init__(self, n_ctx, config, scale=False): - super(Block, self).__init__() - nx = config.n_embd - self.ln_1 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) - self.attn = Attention(nx, n_ctx, config, scale) - self.ln_2 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) - self.mlp = MLP(4 * nx, config) - - def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): - output_attn = self.attn( - self.ln_1(x), layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask - ) - a = output_attn[0] # output_attn: a, present, (attentions) - - x = x + a - m = self.mlp(self.ln_2(x)) - x = x + m - - outputs = [x] + output_attn[1:] - return outputs # x, present, (attentions) - - -class GPT2PreTrainedModel(nn.Module): - """ An abstract class to handle weights initialization and - a simple interface for dowloading and loading pretrained models. - """ - - config_class = GPT2Config - base_model_prefix = "transformer" - - def _init_weights(self, module): - """ Initialize the weights. - """ - if isinstance(module, (nn.Linear, nn.Embedding, Conv1D)): - # Slightly different from the TF version which uses truncated_normal for initialization - # cf https://github.com/pytorch/pytorch/pull/5617 - module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) - if isinstance(module, (nn.Linear, Conv1D)) and module.bias is not None: - module.bias.data.zero_() - elif isinstance(module, nn.LayerNorm): - module.bias.data.zero_() - module.weight.data.fill_(1.0) - - def __init__(self, config, *inputs, **kwargs): - super().__init__() - if not isinstance(config, GPT2Config): - raise ValueError( - "Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. " - "To create a model from a pretrained model use " - "`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format( - self.__class__.__name__, self.__class__.__name__ - ) - ) - # Save config in model - self.config = config - - @property - def base_model(self): - return getattr(self, self.base_model_prefix, self) - - def get_input_embeddings(self): - """ Get model's input embeddings - """ - base_model = getattr(self, self.base_model_prefix, self) - if base_model is not self: - return base_model.get_input_embeddings() - else: - raise NotImplementedError - - def set_input_embeddings(self, value): - """ Set model's input embeddings - """ - base_model = getattr(self, self.base_model_prefix, self) - if base_model is not self: - base_model.set_input_embeddings(value) - else: - raise NotImplementedError - - def get_output_embeddings(self): - """ Get model's output embeddings - Return None if the model doesn't have output embeddings - """ - return None # Overwrite for models with output embeddings - - def tie_weights(self): - """ Make sure we are sharing the input and output embeddings. - Export to TorchScript can't handle parameter sharing so we are cloning them instead. - """ - output_embeddings = self.get_output_embeddings() - if output_embeddings is not None: - self._tie_or_clone_weights(output_embeddings, self.get_input_embeddings()) - - def _tie_or_clone_weights(self, output_embeddings, input_embeddings): - """ Tie or clone module weights depending of weither we are using TorchScript or not - """ - if self.config.torchscript: - output_embeddings.weight = nn.Parameter(input_embeddings.weight.clone()) - else: - output_embeddings.weight = input_embeddings.weight - - if hasattr(output_embeddings, "bias") and output_embeddings.bias is not None: - output_embeddings.bias.data = torch.nn.functional.pad( - output_embeddings.bias.data, - (0, output_embeddings.weight.shape[0] - output_embeddings.bias.shape[0]), - "constant", - 0, - ) - if hasattr(output_embeddings, "out_features") and hasattr(input_embeddings, "num_embeddings"): - output_embeddings.out_features = input_embeddings.num_embeddings - - def init_weights(self): - """ Initialize and prunes weights if needed. """ - # Initialize weights - self.apply(self._init_weights) - - # Prune heads if needed - if self.config.pruned_heads: - self.prune_heads(self.config.pruned_heads) - - # Tie weights if needed - self.tie_weights() - - def prune_heads(self, heads_to_prune): - """ Prunes heads of the base model. - - Arguments: - - heads_to_prune: dict with keys being selected layer indices (`int`) and associated values being the list of heads to prune in said layer (list of `int`). - E.g. {1: [0, 2], 2: [2, 3]} will prune heads 0 and 2 on layer 1 and heads 2 and 3 on layer 2. - """ - # save new sets of pruned heads as union of previously stored pruned heads and newly pruned heads - for layer, heads in heads_to_prune.items(): - union_heads = set(self.config.pruned_heads.get(layer, [])) | set(heads) - self.config.pruned_heads[layer] = list(union_heads) # Unfortunately we have to store it as list for JSON - - self.base_model._prune_heads(heads_to_prune) - - def save_pretrained(self, save_directory): - """ Save a model and its configuration file to a directory, so that it - can be re-loaded using the `:func:`~transformers.PreTrainedModel.from_pretrained`` class method. - """ - assert os.path.isdir( - save_directory - ), "Saving path should be a directory where the model and configuration can be saved" - - # Only save the model itself if we are using distributed training - model_to_save = self.module if hasattr(self, "module") else self - - # Save configuration file - model_to_save.config.save_pretrained(save_directory) - - # If we save using the predefined names, we can load using `from_pretrained` - output_model_file = os.path.join(save_directory, "pytorch_model.bin") - torch.save(model_to_save.state_dict(), output_model_file) - logger.info("Model weights saved in {}".format(output_model_file)) - - @classmethod - def from_pretrained(cls, model_dir_or_name, *model_args, **kwargs): - r"""Instantiate a pretrained pytorch model from a pre-trained model configuration. - - The model is set in evaluation mode by default using ``model.eval()`` (Dropout modules are deactivated) - To train the model, you should first set it back in training mode with ``model.train()`` - - The warning ``Weights from XXX not initialized from pretrained model`` means that the weights of XXX do not come pre-trained with the rest of the model. - It is up to you to train those weights with a downstream fine-tuning task. - - The warning ``Weights from XXX not used in YYY`` means that the layer XXX is not used by YYY, therefore those weights are discarded. - - Parameters: - model_dir_or_name: either: - - - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. - - a string with the `identifier name` of a pre-trained model that was user-uploaded to our S3, e.g.: ``dbmdz/bert-base-german-cased``. - - a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``. - - a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards. - - None if you are both providing the configuration and state dictionary (resp. with keyword arguments ``config`` and ``state_dict``) - - Examples:: - - model = BertModel.from_pretrained('bert-base-uncased') # Download model and configuration from S3 and cache. - model = BertModel.from_pretrained('./test/saved_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')` - model = BertModel.from_pretrained('bert-base-uncased', output_attention=True) # Update configuration during loading - assert model.config.output_attention == True - # Loading from a TF checkpoint file instead of a PyTorch model (slower) - config = BertConfig.from_json_file('./tf_model/my_tf_model_config.json') - model = BertModel.from_pretrained('./tf_model/my_tf_checkpoint.ckpt.index', from_tf=True, config=config) - - """ - config = kwargs.pop("config", None) - state_dict = kwargs.pop("state_dict", None) - - model_dir = _get_gpt2_dir(model_dir_or_name) - - # Load config if we don't provide a configuration - model_kwargs = {} - if not isinstance(config, GPT2Config): - config = cls.config_class.from_pretrained( - model_dir, - *model_args, - **kwargs - ) - else: - model_kwargs = kwargs - - # Instantiate model. - model = cls(config, *model_args, **model_kwargs) - - model_path = _get_file_name_base_on_postfix(model_dir, 'model.bin') - state_dict = torch.load(model_path, map_location="cpu") - - missing_keys = [] - unexpected_keys = [] - error_msgs = [] - - # Convert old format to new format if needed from a PyTorch state_dict - old_keys = [] - new_keys = [] - for key in state_dict.keys(): - new_key = None - if "gamma" in key: - new_key = key.replace("gamma", "weight") - if "beta" in key: - new_key = key.replace("beta", "bias") - if new_key: - old_keys.append(key) - new_keys.append(new_key) - for old_key, new_key in zip(old_keys, new_keys): - state_dict[new_key] = state_dict.pop(old_key) - - # copy state_dict so _load_from_state_dict can modify it - metadata = getattr(state_dict, "_metadata", None) - state_dict = state_dict.copy() - if metadata is not None: - state_dict._metadata = metadata - - # PyTorch's `_load_from_state_dict` does not copy parameters in a module's descendants - # so we need to apply the function recursively. - def load(module, prefix=""): - local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) - module._load_from_state_dict( - state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs - ) - for name, child in module._modules.items(): - if child is not None: - load(child, prefix + name + ".") - - # Make sure we are able to load base models as well as derived models (with heads) - start_prefix = "" - model_to_load = model - if not hasattr(model, cls.base_model_prefix) and any( - s.startswith(cls.base_model_prefix) for s in state_dict.keys() - ): - start_prefix = cls.base_model_prefix + "." - if hasattr(model, cls.base_model_prefix) and not any( - s.startswith(cls.base_model_prefix) for s in state_dict.keys() - ): - model_to_load = getattr(model, cls.base_model_prefix) - - load(model_to_load, prefix=start_prefix) - if len(missing_keys) > 0: - logger.info( - "Weights of {} not initialized from pretrained model: {}".format( - model.__class__.__name__, missing_keys - ) - ) - if len(unexpected_keys) > 0: - logger.info( - "Weights from pretrained model not used in {}: {}".format( - model.__class__.__name__, unexpected_keys - ) - ) - if len(error_msgs) > 0: - raise RuntimeError( - "Error(s) in loading state_dict for {}:\n\t{}".format( - model.__class__.__name__, "\n\t".join(error_msgs) - ) - ) - - model.tie_weights() # make sure word embedding weights are still tied if needed - - # Set model in evaluation mode to desactivate DropOut modules by default - model.eval() - - return model - - def prepare_inputs_for_generation(self, input_ids, **kwargs): - return {"input_ids": input_ids, **kwargs} - - @torch.no_grad() - def generate( - self, - input_ids, - max_length=None, - do_sample=None, - num_beams=None, - temperature=None, - top_k=None, - top_p=None, - repetition_penalty=None, - bos_token_id=None, - pad_token_id=None, - eos_token_ids=None, - length_penalty=None): - """ Sequence generator for models with a LM head. - - The method currently supports greedy or penalized greedy decoding, sampling with top-k or nucleus sampling - and beam-search. - - Params: - **input_ids**: (`optional`) `torch.LongTensor` of shape (1, sequence_length) - The sequence used as a prompt for the generation. If `None` the method initializes - it as an empty `torch.LongTensor` of shape (1,) - **max_length**: (`optional`) int - The max length of the sequence to be generated. Between 1 and infinity. Default to 20. - **do_sample**: (`optional`) bool - If set to `False` we use greedy decoding; otherwise sampling. Default to greedy sampling. - **num_beams**: (`optional`) int - Number of beams for beam search. 1 means no beam serach. Default to 1. - **temperature**: (`optional`) float - The value used to module the next token probabilities. - **top_k**: (`optional`) int - The number of highest probability vocabulary tokens to keep for top-k-filtering. Between 1 and infinity. Default to 50. - **top_p**: (`optional`) float - The cumulative probability of parameter highest probability vocabulary tokens to keep for nucleus sampling. Must be between 0 and 1. Default to 1. - **repetition_penalty**: (`optional`) float - The parameter for repetition penalty. Between 1.0 and + infinity. 1.0 means no penalty. Default to 1. - **bos_token_id**: (`optional`) int - Beginning of sentence token if no prompt is provided. Default to 0. - **eos_token_ids**: (`optional`) int or list of int - End of sequence token or list of tokens to stop the generation. Default to 0. - **length_penalty**: (`optional`) int - Exponential penalty to the length. Default to 0. - **length_penalty**: (`optional`) float - Exponential penalty to the length. Default to 1. - """ - decoder = _GPT2Decoder(self) - generator = SequenceGenerator(decoder=decoder, max_length=max_length, num_beams=num_beams, - do_sample=do_sample, temperature=temperature, top_k=top_k, top_p=top_p, - bos_token_id=bos_token_id, eos_token_id=eos_token_ids, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - results = generator.generate(tokens=input_ids, state=GPT2State()) - return results - - -class GPT2Model(GPT2PreTrainedModel): - r""" - Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: - **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)`` - Sequence of hidden-states at the last layer of the model. - **past**: - list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``: - that contains pre-computed hidden-states (key and values in the attention blocks). - Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model - should not be passed as input ids as they have already been computed. - **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) - list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) - of shape ``(batch_size, sequence_length, hidden_size)``: - Hidden-states of the model at the output of each layer plus the initial embedding outputs. - **attentions**: (`optional`, returned when ``config.output_attentions=True``) - list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: - Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. - - Examples:: - - tokenizer = GPT2Tokenizer.from_pretrained('gpt2') - model = GPT2Model.from_pretrained('gpt2') - input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute", add_special_tokens=True)).unsqueeze(0) # Batch size 1 - outputs = model(input_ids) - last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple - - """ - - def __init__(self, config): - super().__init__(config) - - self.wte = nn.Embedding(config.vocab_size, config.n_embd) - self.wpe = nn.Embedding(config.n_positions, config.n_embd) - self.drop = nn.Dropout(config.embd_pdrop) - self.h = nn.ModuleList([Block(config.n_ctx, config, scale=True) for _ in range(config.n_layer)]) - self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) - - self.init_weights() - - def get_input_embeddings(self): - return self.wte - - def set_input_embeddings(self, new_embeddings): - self.wte = new_embeddings - - def _prune_heads(self, heads_to_prune): - """ Prunes heads of the model. - heads_to_prune: dict of {layer_num: list of heads to prune in this layer} - """ - for layer, heads in heads_to_prune.items(): - self.h[layer].attn.prune_heads(heads) - - @property - def dtype(self): - """ - :obj:`torch.dtype`: The dtype of the module (assuming that all the module parameters have the same dtype). - """ - try: - return next(self.parameters()).dtype - except StopIteration: - # For nn.DataParallel compatibility in PyTorch 1.5 - - def find_tensor_attributes(module: nn.Module): - tuples = [(k, v) for k, v in module.__dict__.items() if torch.is_tensor(v)] - return tuples - - gen = self._named_members(get_members_fn=find_tensor_attributes) - first_tuple = next(gen) - return first_tuple[1].dtype - - def forward(self, input_ids, state=None, attention_mask=None, token_type_ids=None, position_ids=None, - head_mask=None, output_attentions=True): - """ - - :param torch.LongTensor input_ids: batch_size x max_len or batch_size x beam_size x 1 - :param GPT2State state: 之前的状态 - :param torch.ByteTensor attention_mask: batch_size x (pre_len+past_len), 与input_ids与state的concat一样大。 - 为0的地方为padding。 - :param torch.LongTensor token_type_ids: batch_size x max_len。 - :param torch.LongTensor position_ids: 与input_ids对应的位置 - :param head_mask: - :param bool output_attentions: 是否输出attention状态 - :return: - """ - input_shape = input_ids.size() # batch_size x max_len 或 batch_size x beam_size x 1 - input_ids = input_ids.view(-1, input_shape[-1]) # input_shape是 batch_size' x max_len - - if token_type_ids is not None: - token_type_ids = token_type_ids.view(-1, input_shape[-1]) - if position_ids is not None: - position_ids = position_ids.view(-1, input_shape[-1]) - - if state is None or len(state)==0: - past_length = 0 - state = [None] * len(self.h) # len(self.h) 是layer的层数 - else: - past_length = state[0][0].size(-2) - if position_ids is None: # 如果没有position id则生成 - device = input_ids.device - position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device) - position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1]) - - # Attention mask. - if attention_mask is not None: - attention_mask = attention_mask.view(-1, input_shape[-1]) - # We create a 3D attention mask from a 2D tensor mask. - # Sizes are [batch_size, 1, 1, to_seq_length] - # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] - # this attention mask is more simple than the triangular masking of causal attention - # used in OpenAI GPT, we just need to prepare the broadcast dimension here. - attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) - - # Since attention_mask is 1.0 for positions we want to attend and 0.0 for - # masked positions, this operation will create a tensor which is 0.0 for - # positions we want to attend and -10000.0 for masked positions. - # Since we are adding it to the raw scores before the softmax, this is - # effectively the same as removing these entirely. - # this will case an issue when DataParallel: https://github.com/pytorch/pytorch/issues/40457#issuecomment-648396469 - # attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility - attention_mask = attention_mask.to(self.dtype) - attention_mask = (1.0 - attention_mask) * -10000.0 - # attention_mask = attention_mask.masked_fill(attention_mask.eq(0), -10000.0) - - # Prepare head mask if needed - # 1.0 in head_mask indicate we keep the head - # attention_probs has shape bsz x n_heads x N x N - # head_mask has shape n_layer x batch x n_heads x N x N - if head_mask is not None: - if head_mask.dim() == 1: - head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) - head_mask = head_mask.expand(self.config.n_layer, -1, -1, -1, -1) - elif head_mask.dim() == 2: - head_mask = ( - head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) - ) # We can specify head_mask for each layer - head_mask = head_mask.to( - dtype=next(self.parameters()).dtype - ) # switch to fload if need + fp16 compatibility - else: - head_mask = [None] * self.config.n_layer - - inputs_embeds = self.wte(input_ids) - position_embeds = self.wpe(position_ids) - if token_type_ids is not None: - token_type_embeds = self.wte(token_type_ids) - else: - token_type_embeds = 0 - hidden_states = inputs_embeds + position_embeds + token_type_embeds - hidden_states = self.drop(hidden_states) - - # batch_size x max_len x embed_size - output_shape = input_shape + (hidden_states.size(-1),) - - presents = () - all_attentions = [] - all_hidden_states = () - for i, (block, layer_past) in enumerate(zip(self.h, state)): - all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),) - - outputs = block( - hidden_states, layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask[i] - ) - - hidden_states, present = outputs[:2] - presents = presents + (present,) - - all_attentions.append(outputs[2]) - - hidden_states = self.ln_f(hidden_states) - - hidden_states = hidden_states.view(*output_shape) - # Add last hidden state - all_hidden_states = all_hidden_states + (hidden_states,) - - outputs = (hidden_states,) - outputs = outputs + (presents,) - - outputs = outputs + (all_hidden_states,) - if output_attentions: - # let the number of heads free (-1) so we can extract attention even after head pruning - attention_output_shape = input_shape[:-1] + (-1,) + all_attentions[0].shape[-2:] - all_attentions = tuple(t.view(*attention_output_shape) for t in all_attentions) - outputs = outputs + (all_attentions,) - # 写出所有输出的shape. - # last hidden states, Tensor: batch_size x max_len x embed_size - # presents, tuple: n_layer x 2 x batch_size x n_head x (max_len+past_len) x head_dim, 第二维前一半为key,后一半为value - # all hidden states, tuple: n_layer x batch_size x max_len x embed_size, - # attention, tuple: n_layer x batch_size x n_head' x src_len x tgt_len - return outputs # last hidden state, (presents), (all hidden_states), (attentions) - - -class GPT2State(State): - def __init__(self): - super().__init__(None, None) - self.state = None # tuple [n_layer, 2 x batch_size x n_head x past_len x head_dim] - - @property - def num_samples(self): - if self.state is not None: - return self.state[0].size(1) - return None - - @property - def decode_length(self): - if self.state is None: - return 0 - return self.state[0].size(-2) - - def reorder_state(self, indices): - if self.state: - for i in range(len(self.state)): - assert self.state[i] is not None - self.state[i] = self.state[i].index_select(index=indices, dim=1) - - def __iter__(self): - for p in self.state: - yield p - - def __getitem__(self, item): - assert isinstance(item, int) - return self.state[item] - - def __len__(self): - if self.state is not None: - return len(self.state) - return 0 - - -class _GPT2Decoder(Seq2SeqDecoder): - """ - 用于wrap GPT2是的可以在SequenceGenerator中使用 - """ - def __init__(self, gpt_model): - super().__init__() - self.gpt_model = gpt_model - - def decode(self, tokens, state=None) -> torch.Tensor: - if state is None: - state = GPT2State() - lm_logits, presents, _ = self.gpt_model(input_ids=tokens[:, state.decode_length:], - state=state, - attention_mask=None, - token_type_ids=None, - position_ids=None, - head_mask=None, - output_attentions=False) - state.state = list(presents) - return lm_logits[:, -1] - - -class GPT2LMHeadModel(GPT2PreTrainedModel): - r""" - **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: - Labels for language modeling. - Note that the labels **are shifted** inside the model, i.e. you can set ``lm_labels = input_ids`` - Indices are selected in ``[-1, 0, ..., config.vocab_size]`` - All labels set to ``-100`` are ignored (masked), the loss is only - computed for labels in ``[0, ..., config.vocab_size]`` - - Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: - **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: - Language modeling loss. - **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)`` - Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). - **past**: - list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``: - that contains pre-computed hidden-states (key and values in the attention blocks). - Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model - should not be passed as input ids as they have already been computed. - **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) - list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) - of shape ``(batch_size, sequence_length, hidden_size)``: - Hidden-states of the model at the output of each layer plus the initial embedding outputs. - **attentions**: (`optional`, returned when ``config.output_attentions=True``) - list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: - Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. - """ - - def __init__(self, config): - super(GPT2LMHeadModel, self).__init__(config) - self.transformer = GPT2Model(config) - self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) - - self.init_weights() - - def get_output_embeddings(self): - return self.lm_head - - def get_input_embeddings(self): - return self.transformer.wte - - def forward(self, input_ids, state=None, attention_mask=None, token_type_ids=None, position_ids=None, - head_mask=None, labels=None, output_attentions=False): - """ - - :param torch.LongTensor input_ids: batch_size x max_len or batch_size x beam_size x 1 - :param tuple state: num_layers x 2 x batch_size x n_head x max_len' x head_dim. 可以将前一个时刻的presents作为输入 - :param torch.ByteTensor attention_mask: batch_size x max_len, 与input_ids一样大。为0的地方为padding。 - :param torch.LongTensor token_type_ids: batch_size x max_len。 - :param torch.LongTensor position_ids: 与input_ids对应的位置 - :param head_mask: - :param labels: language model应该预测的值。如果为None,则没有language model的额外loss。最好把padding位置设置为-100 - 使得language model不要计算这部分的loss - :param output_attentions: 是否输出output_attentions - :return: - """ - transformer_outputs = self.transformer( - input_ids, - state=state, - attention_mask=attention_mask, - token_type_ids=token_type_ids, - position_ids=position_ids, - head_mask=head_mask, - output_attentions=output_attentions - ) - hidden_states = transformer_outputs[0] - - lm_logits = self.lm_head(hidden_states) - - outputs = (lm_logits,) + transformer_outputs[1:] - if labels is not None: - # Shift so that tokens < n predict n - shift_logits = lm_logits[..., :-1, :].contiguous() - shift_labels = labels[..., 1:].contiguous() - # Flatten the tokens - loss_fct = CrossEntropyLoss() - loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) - outputs = (loss,) + outputs - - # 返回值 - # loss: torch.FloatTensor, 如果labels为None则没有该loss - # lm_logits: batch_size x max_len x vocab_size - # presents, tuple: n_layer x 2 x batch_size x n_head x (max_len+past_len) x head_dim, 第二维前一半为key,后一半为value - # all hidden states, tuple: n_layer x batch_size x max_len x embed_size, - # attention, tuple: n_layer x batch_size x n_head' x src_len x tgt_len - return outputs # (loss), lm_logits, presents, all hidden_states, (attentions) diff --git a/fastNLP/modules/encoder/lstm.py b/fastNLP/modules/encoder/lstm.py deleted file mode 100644 index 06f8bbb7..00000000 --- a/fastNLP/modules/encoder/lstm.py +++ /dev/null @@ -1,84 +0,0 @@ -r"""undocumented -轻量封装的 Pytorch LSTM 模块. -可在 forward 时传入序列的长度, 自动对padding做合适的处理. -""" - -__all__ = [ - "LSTM" -] - -import torch -import torch.nn as nn -import torch.nn.utils.rnn as rnn - - -class LSTM(nn.Module): - r""" - LSTM 模块, 轻量封装的Pytorch LSTM. 在提供seq_len的情况下,将自动使用pack_padded_sequence; 同时默认将forget gate的bias初始化 - 为1; 且可以应对DataParallel中LSTM的使用问题。 - - """ - - def __init__(self, input_size, hidden_size=100, num_layers=1, dropout=0.0, batch_first=True, - bidirectional=False, bias=True): - r""" - - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度. 如果bidirectional为True,则输出的维度会是hidde_size*2 - :param num_layers: rnn的层数. Default: 1 - :param dropout: 层间dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的RNN. Default: ``False`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - :(batch, seq, feature). Default: ``False`` - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - """ - super(LSTM, self).__init__() - self.batch_first = batch_first - self.lstm = nn.LSTM(input_size, hidden_size, num_layers, bias=bias, batch_first=batch_first, - dropout=dropout, bidirectional=bidirectional) - self.init_param() - - def init_param(self): - for name, param in self.named_parameters(): - if 'bias' in name: - # based on https://github.com/pytorch/pytorch/issues/750#issuecomment-280671871 - param.data.fill_(0) - n = param.size(0) - start, end = n // 4, n // 2 - param.data[start:end].fill_(1) - else: - nn.init.xavier_uniform_(param) - - def forward(self, x, seq_len=None, h0=None, c0=None): - r""" - - :param x: [batch, seq_len, input_size] 输入序列 - :param seq_len: [batch, ] 序列长度, 若为 ``None``, 所有输入看做一样长. Default: ``None`` - :param h0: [batch, hidden_size] 初始隐状态, 若为 ``None`` , 设为全0向量. Default: ``None`` - :param c0: [batch, hidden_size] 初始Cell状态, 若为 ``None`` , 设为全0向量. Default: ``None`` - :return (output, (ht, ct)): output: [batch, seq_len, hidden_size*num_direction] 输出序列 - 和 ht,ct: [num_layers*num_direction, batch, hidden_size] 最后时刻隐状态. - """ - batch_size, max_len, _ = x.size() - if h0 is not None and c0 is not None: - hx = (h0, c0) - else: - hx = None - if seq_len is not None and not isinstance(x, rnn.PackedSequence): - sort_lens, sort_idx = torch.sort(seq_len, dim=0, descending=True) - if self.batch_first: - x = x[sort_idx] - else: - x = x[:, sort_idx] - x = rnn.pack_padded_sequence(x, sort_lens.cpu(), batch_first=self.batch_first) - output, hx = self.lstm(x, hx) # -> [N,L,C] - output, _ = rnn.pad_packed_sequence(output, batch_first=self.batch_first, total_length=max_len) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - if self.batch_first: - output = output[unsort_idx] - else: - output = output[:, unsort_idx] - hx = hx[0][:, unsort_idx], hx[1][:, unsort_idx] - else: - output, hx = self.lstm(x, hx) - return output, hx diff --git a/fastNLP/modules/encoder/pooling.py b/fastNLP/modules/encoder/pooling.py deleted file mode 100644 index a097da67..00000000 --- a/fastNLP/modules/encoder/pooling.py +++ /dev/null @@ -1,144 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "MaxPool", - "MaxPoolWithMask", - "KMaxPool", - "AvgPool", - "AvgPoolWithMask" -] -import torch -import torch.nn as nn - - -class MaxPool(nn.Module): - r""" - Max-pooling模块。 - - """ - - def __init__(self, stride=None, padding=0, dilation=1, dimension=1, kernel_size=None, ceil_mode=False): - r""" - - :param stride: 窗口移动大小,默认为kernel_size - :param padding: padding的内容,默认为0 - :param dilation: 控制窗口内元素移动距离的大小 - :param dimension: MaxPool的维度,支持1,2,3维。 - :param kernel_size: max pooling的窗口大小,默认为tensor最后k维,其中k为dimension - :param ceil_mode: - """ - super(MaxPool, self).__init__() - assert dimension in [1, 2, 3], f'Now we only support 1d, 2d, or 3d Pooling' - self.dimension = dimension - self.stride = stride - self.padding = padding - self.dilation = dilation - self.kernel_size = kernel_size - self.ceil_mode = ceil_mode - - def forward(self, x): - if self.dimension == 1: - x = torch.transpose(x, 1, 2) # [N,L,C] -> [N,C,L] - pooling = nn.MaxPool1d( - stride=self.stride, padding=self.padding, dilation=self.dilation, - kernel_size=self.kernel_size if self.kernel_size is not None else x.size(-1), - return_indices=False, ceil_mode=self.ceil_mode - ) - elif self.dimension == 2: - pooling = nn.MaxPool2d( - stride=self.stride, padding=self.padding, dilation=self.dilation, - kernel_size=self.kernel_size if self.kernel_size is not None else (x.size(-2), x.size(-1)), - return_indices=False, ceil_mode=self.ceil_mode - ) - else: - pooling = nn.MaxPool3d( - stride=self.stride, padding=self.padding, dilation=self.dilation, - kernel_size=self.kernel_size if self.kernel_size is not None else (x.size(-3), x.size(-2), x.size(-1)), - return_indices=False, ceil_mode=self.ceil_mode - ) - x = pooling(x) - return x.squeeze(dim=-1) # [N,C,1] -> [N,C] - - -class MaxPoolWithMask(nn.Module): - r""" - 带mask矩阵的max pooling。在做max-pooling的时候不会考虑mask值为0的位置。 - """ - - def __init__(self): - super(MaxPoolWithMask, self).__init__() - self.inf = 10e12 - - def forward(self, tensor, mask, dim=1): - r""" - :param torch.FloatTensor tensor: [batch_size, seq_len, channels] 初始tensor - :param torch.LongTensor mask: [batch_size, seq_len] 0/1的mask矩阵 - :param int dim: 需要进行max pooling的维度 - :return: - """ - masks = mask.view(mask.size(0), mask.size(1), -1) - masks = masks.expand(-1, -1, tensor.size(2)).float() - return torch.max(tensor + masks.le(0.5).float() * -self.inf, dim=dim)[0] - - -class KMaxPool(nn.Module): - r"""K max-pooling module.""" - - def __init__(self, k=1): - super(KMaxPool, self).__init__() - self.k = k - - def forward(self, x): - r""" - :param torch.Tensor x: [N, C, L] 初始tensor - :return: torch.Tensor x: [N, C*k] k-max pool后的结果 - """ - x, index = torch.topk(x, self.k, dim=-1, sorted=False) - x = torch.reshape(x, (x.size(0), -1)) - return x - - -class AvgPool(nn.Module): - r""" - 给定形如[batch_size, max_len, hidden_size]的输入,在最后一维进行avg pooling. 输出为[batch_size, hidden_size] - """ - - def __init__(self, stride=None, padding=0): - super(AvgPool, self).__init__() - self.stride = stride - self.padding = padding - - def forward(self, x): - r""" - :param torch.Tensor x: [N, C, L] 初始tensor - :return: torch.Tensor x: [N, C] avg pool后的结果 - """ - # [N,C,L] -> [N,C] - kernel_size = x.size(2) - pooling = nn.AvgPool1d( - kernel_size=kernel_size, - stride=self.stride, - padding=self.padding) - x = pooling(x) - return x.squeeze(dim=-1) - - -class AvgPoolWithMask(nn.Module): - r""" - 给定形如[batch_size, max_len, hidden_size]的输入,在最后一维进行avg pooling. 输出为[batch_size, hidden_size], pooling - 的时候只会考虑mask为1的位置 - """ - - def __init__(self): - super(AvgPoolWithMask, self).__init__() - self.inf = 10e12 - - def forward(self, tensor, mask, dim=1): - r""" - :param torch.FloatTensor tensor: [batch_size, seq_len, channels] 初始tensor - :param torch.LongTensor mask: [batch_size, seq_len] 0/1的mask矩阵 - :param int dim: 需要进行max pooling的维度 - :return: - """ - masks = mask.view(mask.size(0), mask.size(1), -1).float() - return torch.sum(tensor * masks.float(), dim=dim) / torch.sum(masks.float(), dim=1) diff --git a/fastNLP/modules/encoder/roberta.py b/fastNLP/modules/encoder/roberta.py deleted file mode 100644 index aab89efc..00000000 --- a/fastNLP/modules/encoder/roberta.py +++ /dev/null @@ -1,182 +0,0 @@ - -r"""undocumented -这个页面的代码很大程度上参考(复制粘贴)了https://github.com/huggingface/pytorch-pretrained-BERT的代码, 如果你发现该代码对你 - 有用,也请引用一下他们。 -""" - -__all__ = [ - 'RobertaModel' -] - -import torch -import torch.nn as nn - -from .bert import BertEmbeddings, BertModel, BertConfig -from fastNLP.io.file_utils import _get_file_name_base_on_postfix -from ...io.file_utils import _get_roberta_dir -from ...core import logger - -PRETRAINED_ROBERTA_POSITIONAL_EMBEDDINGS_SIZES = { - "roberta-base": 512, - "roberta-large": 512, - "roberta-large-mnli": 512, - "distilroberta-base": 512, - "roberta-base-openai-detector": 512, - "roberta-large-openai-detector": 512, -} - - -class RobertaEmbeddings(BertEmbeddings): - """ - Same as BertEmbeddings with a tiny tweak for positional embeddings indexing. - """ - - def __init__(self, config): - super().__init__(config) - self.padding_idx = 1 - self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=self.padding_idx) - self.position_embeddings = nn.Embedding( - config.max_position_embeddings, config.hidden_size, padding_idx=self.padding_idx - ) - - def forward(self, input_ids, token_type_ids, words_embeddings=None, **kwargs): - position_ids = self.create_position_ids_from_input_ids(input_ids) - - return super().forward( - input_ids, token_type_ids=token_type_ids, position_ids=position_ids, words_embeddings=words_embeddings - ) - - def create_position_ids_from_input_ids(self, x): - """ Replace non-padding symbols with their position numbers. Position numbers begin at - padding_idx+1. Padding symbols are ignored. This is modified from fairseq's - `utils.make_positions`. - - :param torch.Tensor x: - :return torch.Tensor: - """ - mask = x.ne(self.padding_idx).long() - incremental_indicies = torch.cumsum(mask, dim=1) * mask - return incremental_indicies + self.padding_idx - - -class RobertaModel(BertModel): - r""" - undocumented - """ - - def __init__(self, config, *inputs, **kwargs): - super().__init__(config, *inputs, **kwargs) - - self.embeddings = RobertaEmbeddings(config) - self.apply(self.init_bert_weights) - - @classmethod - def from_pretrained(cls, model_dir_or_name, *inputs, **kwargs): - state_dict = kwargs.get('state_dict', None) - kwargs.pop('state_dict', None) - kwargs.pop('cache_dir', None) - kwargs.pop('from_tf', None) - - # get model dir from name or dir - pretrained_model_dir = _get_roberta_dir(model_dir_or_name) - - # Load config - config_file = _get_file_name_base_on_postfix(pretrained_model_dir, 'config.json') - config = BertConfig.from_json_file(config_file) - - # Load model - if state_dict is None: - weights_path = _get_file_name_base_on_postfix(pretrained_model_dir, '.bin') - state_dict = torch.load(weights_path, map_location='cpu') - else: - logger.error(f'Cannot load parameters through `state_dict` variable.') - raise RuntimeError(f'Cannot load parameters through `state_dict` variable.') - - # Instantiate model. - model = cls(config, *inputs, **kwargs) - - missing_keys = [] - unexpected_keys = [] - error_msgs = [] - - # Convert old format to new format if needed from a PyTorch state_dict - old_keys = [] - new_keys = [] - for key in state_dict.keys(): - new_key = None - if "gamma" in key: - new_key = key.replace("gamma", "weight") - if "beta" in key: - new_key = key.replace("beta", "bias") - if new_key: - old_keys.append(key) - new_keys.append(new_key) - for old_key, new_key in zip(old_keys, new_keys): - state_dict[new_key] = state_dict.pop(old_key) - - # copy state_dict so _load_from_state_dict can modify it - metadata = getattr(state_dict, "_metadata", None) - state_dict = state_dict.copy() - if metadata is not None: - state_dict._metadata = metadata - - # PyTorch's `_load_from_state_dict` does not copy parameters in a module's descendants - # so we need to apply the function recursively. - def load(module: nn.Module, prefix=""): - local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) - module._load_from_state_dict( - state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs, - ) - for name, child in module._modules.items(): - if child is not None: - load(child, prefix + name + ".") - - # Make sure we are able to load base models as well as derived models (with heads) - start_prefix = "" - model_to_load = model - if not hasattr(model, 'roberta') and any( - s.startswith('roberta') for s in state_dict.keys() - ): - start_prefix = 'roberta.' - if hasattr(model, 'roberta') and not any( - s.startswith('roberta') for s in state_dict.keys() - ): - model_to_load = getattr(model, 'roberta') - - load(model_to_load, prefix=start_prefix) - - if model.__class__.__name__ != model_to_load.__class__.__name__: - base_model_state_dict = model_to_load.state_dict().keys() - head_model_state_dict_without_base_prefix = [ - key.split('roberta.')[-1] for key in model.state_dict().keys() - ] - - missing_keys.extend(head_model_state_dict_without_base_prefix - base_model_state_dict) - - if len(missing_keys) > 0: - logger.info( - "Weights of {} not initialized from pretrained model: {}".format( - model.__class__.__name__, missing_keys - ) - ) - if len(unexpected_keys) > 0: - logger.info( - "Weights from pretrained model not used in {}: {}".format( - model.__class__.__name__, unexpected_keys - ) - ) - if len(error_msgs) > 0: - raise RuntimeError( - "Error(s) in loading state_dict for {}:\n\t{}".format( - model.__class__.__name__, "\n\t".join(error_msgs) - ) - ) - - # Set model in evaluation mode to desactivate DropOut modules by default - model.eval() - - logger.info(f"Load pre-trained RoBERTa parameters from file {weights_path}.") - - return model - - diff --git a/fastNLP/modules/encoder/seq2seq_encoder.py b/fastNLP/modules/encoder/seq2seq_encoder.py deleted file mode 100644 index 5eae1e6d..00000000 --- a/fastNLP/modules/encoder/seq2seq_encoder.py +++ /dev/null @@ -1,193 +0,0 @@ -r"""undocumented""" -import torch.nn as nn -import torch -from torch.nn import LayerNorm -import torch.nn.functional as F -from typing import Union, Tuple -from ...core.utils import seq_len_to_mask -import math -from ...modules.encoder.lstm import LSTM -from fastNLP.modules.attention import MultiHeadAttention -from ...embeddings import StaticEmbedding -from ...embeddings.utils import get_embeddings - - -__all__ = ['Seq2SeqEncoder', 'TransformerSeq2SeqEncoder', 'LSTMSeq2SeqEncoder'] - - -class Seq2SeqEncoder(nn.Module): - """ - 所有Sequence2Sequence Encoder的基类。需要实现forward函数 - - """ - def __init__(self): - super().__init__() - - def forward(self, tokens, seq_len): - """ - - :param torch.LongTensor tokens: bsz x max_len, encoder的输入 - :param torch.LongTensor seq_len: bsz - :return: - """ - raise NotImplementedError - - -class TransformerSeq2SeqEncoderLayer(nn.Module): - def __init__(self, d_model: int = 512, n_head: int = 8, dim_ff: int = 2048, - dropout: float = 0.1): - """ - Self-Attention的Layer, - - :param int d_model: input和output的输出维度 - :param int n_head: 多少个head,每个head的维度为d_model/n_head - :param int dim_ff: FFN的维度大小 - :param float dropout: Self-attention和FFN的dropout大小,0表示不drop - """ - super(TransformerSeq2SeqEncoderLayer, self).__init__() - self.d_model = d_model - self.n_head = n_head - self.dim_ff = dim_ff - self.dropout = dropout - - self.self_attn = MultiHeadAttention(d_model, n_head, dropout) - self.attn_layer_norm = LayerNorm(d_model) - self.ffn_layer_norm = LayerNorm(d_model) - - self.ffn = nn.Sequential(nn.Linear(self.d_model, self.dim_ff), - nn.ReLU(), - nn.Dropout(dropout), - nn.Linear(self.dim_ff, self.d_model), - nn.Dropout(dropout)) - - def forward(self, x, mask): - """ - - :param x: batch x src_seq x d_model - :param mask: batch x src_seq,为0的地方为padding - :return: - """ - # attention - residual = x - x = self.attn_layer_norm(x) - x, _ = self.self_attn(query=x, - key=x, - value=x, - key_mask=mask) - x = F.dropout(x, p=self.dropout, training=self.training) - x = residual + x - - # ffn - residual = x - x = self.ffn_layer_norm(x) - x = self.ffn(x) - x = residual + x - - return x - - -class TransformerSeq2SeqEncoder(Seq2SeqEncoder): - def __init__(self, embed: Union[nn.Module, StaticEmbedding, Tuple[int, int]], pos_embed = None, - num_layers = 6, d_model = 512, n_head = 8, dim_ff = 2048, dropout = 0.1): - """ - 基于Transformer的Encoder - - :param embed: encoder输入token的embedding - :param nn.Module pos_embed: position embedding - :param int num_layers: 多少层的encoder - :param int d_model: 输入输出的维度 - :param int n_head: 多少个head - :param int dim_ff: FFN中间的维度大小 - :param float dropout: Attention和FFN的dropout大小 - """ - super(TransformerSeq2SeqEncoder, self).__init__() - self.embed = get_embeddings(embed) - self.embed_scale = math.sqrt(d_model) - self.pos_embed = pos_embed - self.num_layers = num_layers - self.d_model = d_model - self.n_head = n_head - self.dim_ff = dim_ff - self.dropout = dropout - - self.input_fc = nn.Linear(self.embed.embedding_dim, d_model) - self.layer_stacks = nn.ModuleList([TransformerSeq2SeqEncoderLayer(d_model, n_head, dim_ff, dropout) - for _ in range(num_layers)]) - self.layer_norm = LayerNorm(d_model) - - def forward(self, tokens, seq_len): - """ - - :param tokens: batch x max_len - :param seq_len: [batch] - :return: bsz x max_len x d_model, bsz x max_len(为0的地方为padding) - """ - x = self.embed(tokens) * self.embed_scale # batch, seq, dim - batch_size, max_src_len, _ = x.size() - device = x.device - if self.pos_embed is not None: - position = torch.arange(1, max_src_len + 1).unsqueeze(0).long().to(device) - x += self.pos_embed(position) - - x = self.input_fc(x) - x = F.dropout(x, p=self.dropout, training=self.training) - - encoder_mask = seq_len_to_mask(seq_len, max_len=max_src_len) - encoder_mask = encoder_mask.to(device) - - for layer in self.layer_stacks: - x = layer(x, encoder_mask) - - x = self.layer_norm(x) - - return x, encoder_mask - - -class LSTMSeq2SeqEncoder(Seq2SeqEncoder): - def __init__(self, embed: Union[nn.Module, StaticEmbedding, Tuple[int, int]], num_layers = 3, - hidden_size = 400, dropout = 0.3, bidirectional=True): - """ - LSTM的Encoder - - :param embed: encoder的token embed - :param int num_layers: 多少层 - :param int hidden_size: LSTM隐藏层、输出的大小 - :param float dropout: LSTM层之间的Dropout是多少 - :param bool bidirectional: 是否使用双向 - """ - super().__init__() - self.embed = get_embeddings(embed) - self.num_layers = num_layers - self.dropout = dropout - self.hidden_size = hidden_size - self.bidirectional = bidirectional - hidden_size = hidden_size//2 if bidirectional else hidden_size - self.lstm = LSTM(input_size=embed.embedding_dim, hidden_size=hidden_size, bidirectional=bidirectional, - batch_first=True, dropout=dropout if num_layers>1 else 0, num_layers=num_layers) - - def forward(self, tokens, seq_len): - """ - - :param torch.LongTensor tokens: bsz x max_len - :param torch.LongTensor seq_len: bsz - :return: (output, (hidden, cell)), encoder_mask - output: bsz x max_len x hidden_size, - hidden,cell: batch_size x hidden_size, 最后一层的隐藏状态或cell状态 - encoder_mask: bsz x max_len, 为0的地方是padding - """ - x = self.embed(tokens) - device = x.device - x, (final_hidden, final_cell) = self.lstm(x, seq_len) - encoder_mask = seq_len_to_mask(seq_len).to(device) - - # x: batch,seq_len,dim; h/c: num_layers*2,batch,dim - - if self.bidirectional: - final_hidden = self.concat_bidir(final_hidden) # 将双向的hidden state拼接起来,用于接下来的decoder的input - final_cell = self.concat_bidir(final_cell) - - return (x, (final_hidden[-1], final_cell[-1])), encoder_mask # 为了配合Seq2SeqBaseModel的forward,这边需要分为两个return - - def concat_bidir(self, input): - output = input.view(self.num_layers, 2, input.size(1), -1).transpose(1, 2) - return output.reshape(self.num_layers, input.size(1), -1) diff --git a/fastNLP/modules/encoder/star_transformer.py b/fastNLP/modules/encoder/star_transformer.py deleted file mode 100644 index 7e719e69..00000000 --- a/fastNLP/modules/encoder/star_transformer.py +++ /dev/null @@ -1,166 +0,0 @@ -r"""undocumented -Star-Transformer 的encoder部分的 Pytorch 实现 -""" - -__all__ = [ - "StarTransformer" -] - -import numpy as NP -import torch -from torch import nn -from torch.nn import functional as F - - -class StarTransformer(nn.Module): - r""" - Star-Transformer 的encoder部分。 输入3d的文本输入, 返回相同长度的文本编码 - - paper: https://arxiv.org/abs/1902.09113 - - """ - - def __init__(self, hidden_size, num_layers, num_head, head_dim, dropout=0.1, max_len=None): - r""" - - :param int hidden_size: 输入维度的大小。同时也是输出维度的大小。 - :param int num_layers: star-transformer的层数 - :param int num_head: head的数量。 - :param int head_dim: 每个head的维度大小。 - :param float dropout: dropout 概率. Default: 0.1 - :param int max_len: int or None, 如果为int,输入序列的最大长度, - 模型会为输入序列加上position embedding。 - 若为`None`,忽略加上position embedding的步骤. Default: `None` - """ - super(StarTransformer, self).__init__() - self.iters = num_layers - - self.norm = nn.ModuleList([nn.LayerNorm(hidden_size, eps=1e-6) for _ in range(self.iters)]) - # self.emb_fc = nn.Conv2d(hidden_size, hidden_size, 1) - self.emb_drop = nn.Dropout(dropout) - self.ring_att = nn.ModuleList( - [_MSA1(hidden_size, nhead=num_head, head_dim=head_dim, dropout=0.0) - for _ in range(self.iters)]) - self.star_att = nn.ModuleList( - [_MSA2(hidden_size, nhead=num_head, head_dim=head_dim, dropout=0.0) - for _ in range(self.iters)]) - - if max_len is not None: - self.pos_emb = nn.Embedding(max_len, hidden_size) - else: - self.pos_emb = None - - def forward(self, data, mask): - r""" - :param FloatTensor data: [batch, length, hidden] 输入的序列 - :param ByteTensor mask: [batch, length] 输入序列的padding mask, 在没有内容(padding 部分) 为 0, - 否则为 1 - :return: [batch, length, hidden] 编码后的输出序列 - - [batch, hidden] 全局 relay 节点, 详见论文 - """ - - def norm_func(f, x): - # B, H, L, 1 - return f(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) - - B, L, H = data.size() - mask = (mask.eq(False)) # flip the mask for masked_fill_ - smask = torch.cat([torch.zeros(B, 1, ).byte().to(mask), mask], 1) - - embs = data.permute(0, 2, 1)[:, :, :, None] # B H L 1 - if self.pos_emb: - P = self.pos_emb(torch.arange(L, dtype=torch.long, device=embs.device) \ - .view(1, L)).permute(0, 2, 1).contiguous()[:, :, :, None] # 1 H L 1 - embs = embs + P - embs = norm_func(self.emb_drop, embs) - nodes = embs - relay = embs.mean(2, keepdim=True) - ex_mask = mask[:, None, :, None].expand(B, H, L, 1) - r_embs = embs.view(B, H, 1, L) - for i in range(self.iters): - ax = torch.cat([r_embs, relay.expand(B, H, 1, L)], 2) - nodes = F.leaky_relu(self.ring_att[i](norm_func(self.norm[i], nodes), ax=ax)) - # nodes = F.leaky_relu(self.ring_att[i](nodes, ax=ax)) - relay = F.leaky_relu(self.star_att[i](relay, torch.cat([relay, nodes], 2), smask)) - - nodes = nodes.masked_fill_(ex_mask, 0) - - nodes = nodes.view(B, H, L).permute(0, 2, 1) - - return nodes, relay.view(B, H) - - -class _MSA1(nn.Module): - def __init__(self, nhid, nhead=10, head_dim=10, dropout=0.1): - super(_MSA1, self).__init__() - # Multi-head Self Attention Case 1, doing self-attention for small regions - # Due to the architecture of GPU, using hadamard production and summation are faster than dot production when unfold_size is very small - self.WQ = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WK = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WV = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WO = nn.Conv2d(nhead * head_dim, nhid, 1) - - self.drop = nn.Dropout(dropout) - - # print('NUM_HEAD', nhead, 'DIM_HEAD', head_dim) - self.nhid, self.nhead, self.head_dim, self.unfold_size = nhid, nhead, head_dim, 3 - - def forward(self, x, ax=None): - # x: B, H, L, 1, ax : B, H, X, L append features - nhid, nhead, head_dim, unfold_size = self.nhid, self.nhead, self.head_dim, self.unfold_size - B, H, L, _ = x.shape - - q, k, v = self.WQ(x), self.WK(x), self.WV(x) # x: (B,H,L,1) - - if ax is not None: - aL = ax.shape[2] - ak = self.WK(ax).view(B, nhead, head_dim, aL, L) - av = self.WV(ax).view(B, nhead, head_dim, aL, L) - q = q.view(B, nhead, head_dim, 1, L) - k = F.unfold(k.view(B, nhead * head_dim, L, 1), (unfold_size, 1), padding=(unfold_size // 2, 0)) \ - .view(B, nhead, head_dim, unfold_size, L) - v = F.unfold(v.view(B, nhead * head_dim, L, 1), (unfold_size, 1), padding=(unfold_size // 2, 0)) \ - .view(B, nhead, head_dim, unfold_size, L) - if ax is not None: - k = torch.cat([k, ak], 3) - v = torch.cat([v, av], 3) - - alphas = self.drop(F.softmax((q * k).sum(2, keepdim=True) / NP.sqrt(head_dim), 3)) # B N L 1 U - att = (alphas * v).sum(3).view(B, nhead * head_dim, L, 1) - - ret = self.WO(att) - - return ret - - -class _MSA2(nn.Module): - def __init__(self, nhid, nhead=10, head_dim=10, dropout=0.1): - # Multi-head Self Attention Case 2, a broadcastable query for a sequence key and value - super(_MSA2, self).__init__() - self.WQ = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WK = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WV = nn.Conv2d(nhid, nhead * head_dim, 1) - self.WO = nn.Conv2d(nhead * head_dim, nhid, 1) - - self.drop = nn.Dropout(dropout) - - # print('NUM_HEAD', nhead, 'DIM_HEAD', head_dim) - self.nhid, self.nhead, self.head_dim, self.unfold_size = nhid, nhead, head_dim, 3 - - def forward(self, x, y, mask=None): - # x: B, H, 1, 1, 1 y: B H L 1 - nhid, nhead, head_dim, unfold_size = self.nhid, self.nhead, self.head_dim, self.unfold_size - B, H, L, _ = y.shape - - q, k, v = self.WQ(x), self.WK(y), self.WV(y) - - q = q.view(B, nhead, 1, head_dim) # B, H, 1, 1 -> B, N, 1, h - k = k.view(B, nhead, head_dim, L) # B, H, L, 1 -> B, N, h, L - v = v.view(B, nhead, head_dim, L).permute(0, 1, 3, 2) # B, H, L, 1 -> B, N, L, h - pre_a = torch.matmul(q, k) / NP.sqrt(head_dim) - if mask is not None: - pre_a = pre_a.masked_fill(mask[:, None, None, :], -float('inf')) - alphas = self.drop(F.softmax(pre_a, 3)) # B, N, 1, L - att = torch.matmul(alphas, v).view(B, -1, 1, 1) # B, N, 1, h -> B, N*h, 1, 1 - return self.WO(att) diff --git a/fastNLP/modules/encoder/transformer.py b/fastNLP/modules/encoder/transformer.py deleted file mode 100644 index 3597c1be..00000000 --- a/fastNLP/modules/encoder/transformer.py +++ /dev/null @@ -1,42 +0,0 @@ -r"""undocumented""" - -__all__ = [ - "TransformerEncoder" -] -from torch import nn - -from .seq2seq_encoder import TransformerSeq2SeqEncoderLayer - - -class TransformerEncoder(nn.Module): - r""" - transformer的encoder模块,不包含embedding层 - - """ - def __init__(self, num_layers, d_model=512, n_head=8, dim_ff=2048, dropout=0.1): - """ - - :param int num_layers: 多少层Transformer - :param int d_model: input和output的大小 - :param int n_head: 多少个head - :param int dim_ff: FFN中间hidden大小 - :param float dropout: 多大概率drop attention和ffn中间的表示 - """ - super(TransformerEncoder, self).__init__() - self.layers = nn.ModuleList([TransformerSeq2SeqEncoderLayer(d_model = d_model, n_head = n_head, dim_ff = dim_ff, - dropout = dropout) for _ in range(num_layers)]) - self.norm = nn.LayerNorm(d_model, eps=1e-6) - - def forward(self, x, seq_mask=None): - r""" - :param x: [batch, seq_len, model_size] 输入序列 - :param seq_mask: [batch, seq_len] 输入序列的padding mask, 若为 ``None`` , 生成全1向量. 为1的地方需要attend - Default: ``None`` - :return: [batch, seq_len, model_size] 输出序列 - """ - output = x - if seq_mask is None: - seq_mask = x.new_ones(x.size(0), x.size(1)).bool() - for layer in self.layers: - output = layer(output, seq_mask) - return self.norm(output) diff --git a/fastNLP/modules/encoder/variational_rnn.py b/fastNLP/modules/encoder/variational_rnn.py deleted file mode 100644 index fd466268..00000000 --- a/fastNLP/modules/encoder/variational_rnn.py +++ /dev/null @@ -1,306 +0,0 @@ -r"""undocumented -Variational RNN 及相关模型的 fastNLP实现,相关论文参考: -`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks (Yarin Gal and Zoubin Ghahramani, 2016) `_ -""" - -__all__ = [ - "VarRNN", - "VarLSTM", - "VarGRU" -] - -import torch -import torch.nn as nn -from torch.nn.utils.rnn import PackedSequence, pack_padded_sequence, pad_packed_sequence - -try: - from torch import flip -except ImportError: - def flip(x, dims): - indices = [slice(None)] * x.dim() - for dim in dims: - indices[dim] = torch.arange( - x.size(dim) - 1, -1, -1, dtype=torch.long, device=x.device) - return x[tuple(indices)] - -from ..utils import initial_parameter - - -class VarRnnCellWrapper(nn.Module): - r""" - Wrapper for normal RNN Cells, make it support variational dropout - """ - - def __init__(self, cell, hidden_size, input_p, hidden_p): - super(VarRnnCellWrapper, self).__init__() - self.cell = cell - self.hidden_size = hidden_size - self.input_p = input_p - self.hidden_p = hidden_p - - def forward(self, input_x, hidden, mask_x, mask_h, is_reversed=False): - r""" - :param PackedSequence input_x: [seq_len, batch_size, input_size] - :param hidden: for LSTM, tuple of (h_0, c_0), [batch_size, hidden_size] - for other RNN, h_0, [batch_size, hidden_size] - :param mask_x: [batch_size, input_size] dropout mask for input - :param mask_h: [batch_size, hidden_size] dropout mask for hidden - :return PackedSequence output: [seq_len, bacth_size, hidden_size] - hidden: for LSTM, tuple of (h_n, c_n), [batch_size, hidden_size] - for other RNN, h_n, [batch_size, hidden_size] - """ - - def get_hi(hi, h0, size): - h0_size = size - hi.size(0) - if h0_size > 0: - return torch.cat([hi, h0[:h0_size]], dim=0) - return hi[:size] - - is_lstm = isinstance(hidden, tuple) - input, batch_sizes = input_x.data, input_x.batch_sizes - output = [] - cell = self.cell - if is_reversed: - batch_iter = flip(batch_sizes, [0]) - idx = input.size(0) - else: - batch_iter = batch_sizes - idx = 0 - - if is_lstm: - hn = (hidden[0].clone(), hidden[1].clone()) - else: - hn = hidden.clone() - hi = hidden - for size in batch_iter: - if is_reversed: - input_i = input[idx - size: idx] * mask_x[:size] - idx -= size - else: - input_i = input[idx: idx + size] * mask_x[:size] - idx += size - mask_hi = mask_h[:size] - if is_lstm: - hx, cx = hi - hi = (get_hi(hx, hidden[0], size) * - mask_hi, get_hi(cx, hidden[1], size)) - hi = cell(input_i, hi) - hn[0][:size] = hi[0] - hn[1][:size] = hi[1] - output.append(hi[0]) - else: - hi = get_hi(hi, hidden, size) * mask_hi - hi = cell(input_i, hi) - hn[:size] = hi - output.append(hi) - - if is_reversed: - output = list(reversed(output)) - output = torch.cat(output, dim=0) - return PackedSequence(output, batch_sizes), hn - - -class VarRNNBase(nn.Module): - r""" - Variational Dropout RNN 实现. - - 论文参考: `A Theoretically Grounded Application of Dropout in Recurrent Neural Networks (Yarin Gal and Zoubin Ghahramani, 2016) - https://arxiv.org/abs/1512.05287`. - - """ - - def __init__(self, mode, Cell, input_size, hidden_size, num_layers=1, - bias=True, batch_first=False, - input_dropout=0, hidden_dropout=0, bidirectional=False): - r""" - - :param mode: rnn 模式, (lstm or not) - :param Cell: rnn cell 类型, (lstm, gru, etc) - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度 - :param num_layers: rnn的层数. Default: 1 - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - (batch, seq, feature). Default: ``False`` - :param input_dropout: 对输入的dropout概率. Default: 0 - :param hidden_dropout: 对每个隐状态的dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的RNN. Default: ``False`` - """ - super(VarRNNBase, self).__init__() - self.mode = mode - self.input_size = input_size - self.hidden_size = hidden_size - self.num_layers = num_layers - self.bias = bias - self.batch_first = batch_first - self.input_dropout = input_dropout - self.hidden_dropout = hidden_dropout - self.bidirectional = bidirectional - self.num_directions = 2 if bidirectional else 1 - self._all_cells = nn.ModuleList() - for layer in range(self.num_layers): - for direction in range(self.num_directions): - input_size = self.input_size if layer == 0 else self.hidden_size * self.num_directions - cell = Cell(input_size, self.hidden_size, bias) - self._all_cells.append(VarRnnCellWrapper( - cell, self.hidden_size, input_dropout, hidden_dropout)) - initial_parameter(self) - self.is_lstm = (self.mode == "LSTM") - - def _forward_one(self, n_layer, n_direction, input, hx, mask_x, mask_h): - is_lstm = self.is_lstm - idx = self.num_directions * n_layer + n_direction - cell = self._all_cells[idx] - hi = (hx[0][idx], hx[1][idx]) if is_lstm else hx[idx] - output_x, hidden_x = cell( - input, hi, mask_x, mask_h, is_reversed=(n_direction == 1)) - return output_x, hidden_x - - def forward(self, x, hx=None): - r""" - - :param x: [batch, seq_len, input_size] 输入序列 - :param hx: [batch, hidden_size] 初始隐状态, 若为 ``None`` , 设为全1向量. Default: ``None`` - :return (output, ht): [batch, seq_len, hidden_size*num_direction] 输出序列 - 和 [batch, hidden_size*num_direction] 最后时刻隐状态 - """ - is_lstm = self.is_lstm - is_packed = isinstance(x, PackedSequence) - if not is_packed: - seq_len = x.size(1) if self.batch_first else x.size(0) - max_batch_size = x.size(0) if self.batch_first else x.size(1) - seq_lens = torch.LongTensor( - [seq_len for _ in range(max_batch_size)]) - x = pack_padded_sequence(x, seq_lens, batch_first=self.batch_first) - else: - max_batch_size = int(x.batch_sizes[0]) - x, batch_sizes = x.data, x.batch_sizes - - if hx is None: - hx = x.new_zeros(self.num_layers * self.num_directions, - max_batch_size, self.hidden_size, requires_grad=True) - if is_lstm: - hx = (hx, hx.new_zeros(hx.size(), requires_grad=True)) - - mask_x = x.new_ones((max_batch_size, self.input_size)) - mask_out = x.new_ones( - (max_batch_size, self.hidden_size * self.num_directions)) - mask_h_ones = x.new_ones((max_batch_size, self.hidden_size)) - nn.functional.dropout(mask_x, p=self.input_dropout, - training=self.training, inplace=True) - nn.functional.dropout(mask_out, p=self.hidden_dropout, - training=self.training, inplace=True) - - hidden = x.new_zeros( - (self.num_layers * self.num_directions, max_batch_size, self.hidden_size)) - if is_lstm: - cellstate = x.new_zeros( - (self.num_layers * self.num_directions, max_batch_size, self.hidden_size)) - for layer in range(self.num_layers): - output_list = [] - input_seq = PackedSequence(x, batch_sizes) - mask_h = nn.functional.dropout( - mask_h_ones, p=self.hidden_dropout, training=self.training, inplace=False) - for direction in range(self.num_directions): - output_x, hidden_x = self._forward_one(layer, direction, input_seq, hx, - mask_x if layer == 0 else mask_out, mask_h) - output_list.append(output_x.data) - idx = self.num_directions * layer + direction - if is_lstm: - hidden[idx] = hidden_x[0] - cellstate[idx] = hidden_x[1] - else: - hidden[idx] = hidden_x - x = torch.cat(output_list, dim=-1) - - if is_lstm: - hidden = (hidden, cellstate) - - if is_packed: - output = PackedSequence(x, batch_sizes) - else: - x = PackedSequence(x, batch_sizes) - output, _ = pad_packed_sequence(x, batch_first=self.batch_first) - - return output, hidden - - -class VarLSTM(VarRNNBase): - r""" - Variational Dropout LSTM. - 相关论文参考:`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks (Yarin Gal and Zoubin Ghahramani, 2016) `_ - - """ - - def __init__(self, *args, **kwargs): - r""" - - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度 - :param num_layers: rnn的层数. Default: 1 - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - (batch, seq, feature). Default: ``False`` - :param input_dropout: 对输入的dropout概率. Default: 0 - :param hidden_dropout: 对每个隐状态的dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的LSTM. Default: ``False`` - """ - super(VarLSTM, self).__init__( - mode="LSTM", Cell=nn.LSTMCell, *args, **kwargs) - - def forward(self, x, hx=None): - return super(VarLSTM, self).forward(x, hx) - - -class VarRNN(VarRNNBase): - r""" - Variational Dropout RNN. - 相关论文参考:`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks (Yarin Gal and Zoubin Ghahramani, 2016) `_ - - """ - - def __init__(self, *args, **kwargs): - r""" - - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度 - :param num_layers: rnn的层数. Default: 1 - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - (batch, seq, feature). Default: ``False`` - :param input_dropout: 对输入的dropout概率. Default: 0 - :param hidden_dropout: 对每个隐状态的dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的RNN. Default: ``False`` - """ - super(VarRNN, self).__init__( - mode="RNN", Cell=nn.RNNCell, *args, **kwargs) - - def forward(self, x, hx=None): - return super(VarRNN, self).forward(x, hx) - - -class VarGRU(VarRNNBase): - r""" - Variational Dropout GRU. - 相关论文参考:`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks (Yarin Gal and Zoubin Ghahramani, 2016) `_ - - """ - - def __init__(self, *args, **kwargs): - r""" - - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度 - :param num_layers: rnn的层数. Default: 1 - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - (batch, seq, feature). Default: ``False`` - :param input_dropout: 对输入的dropout概率. Default: 0 - :param hidden_dropout: 对每个隐状态的dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的GRU. Default: ``False`` - """ - super(VarGRU, self).__init__( - mode="GRU", Cell=nn.GRUCell, *args, **kwargs) - - def forward(self, x, hx=None): - return super(VarGRU, self).forward(x, hx) diff --git a/fastNLP/modules/generator/__init__.py b/fastNLP/modules/generator/__init__.py deleted file mode 100644 index 512a95d7..00000000 --- a/fastNLP/modules/generator/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -r""" - -""" - -__all__ = [ - "SequenceGenerator" -] - -from .seq2seq_generator import SequenceGenerator \ No newline at end of file diff --git a/fastNLP/modules/generator/seq2seq_generator.py b/fastNLP/modules/generator/seq2seq_generator.py deleted file mode 100644 index c4e0cd87..00000000 --- a/fastNLP/modules/generator/seq2seq_generator.py +++ /dev/null @@ -1,519 +0,0 @@ -r""" - -""" - -__all__ = [ - 'SequenceGenerator' -] - -import torch -from ..decoder.seq2seq_decoder import Seq2SeqDecoder, State -import torch.nn.functional as F -from ...core.utils import _get_model_device -from functools import partial - - -class SequenceGenerator: - """ - 给定一个Seq2SeqDecoder,decode出句子。输入的decoder对象需要有decode()函数, 接受的第一个参数为decode的到目前位置的所有输出, - 第二个参数为state。SequenceGenerator不会对state进行任何操作。 - - """ - def __init__(self, decoder: Seq2SeqDecoder, max_length=20, max_len_a=0.0, num_beams=1, - do_sample=True, temperature=1.0, top_k=50, top_p=1.0, bos_token_id=None, eos_token_id=None, - repetition_penalty=1, length_penalty=1.0, pad_token_id=0): - """ - - :param Seq2SeqDecoder decoder: Decoder对象 - :param int max_length: 生成句子的最大长度, 每句话的decode长度为max_length + max_len_a*src_len - :param float max_len_a: 每句话的decode长度为max_length + max_len_a*src_len。 如果不为0,需要保证State中包含encoder_mask - :param int num_beams: beam search的大小 - :param bool do_sample: 是否通过采样的方式生成 - :param float temperature: 只有在do_sample为True才有意义 - :param int top_k: 只从top_k中采样 - :param float top_p: 只从top_p的token中采样,nucles sample - :param int,None bos_token_id: 句子开头的token id - :param int,None eos_token_id: 句子结束的token id - :param float repetition_penalty: 多大程度上惩罚重复的token - :param float length_penalty: 对长度的惩罚,小于1鼓励长句,大于1鼓励短剧 - :param int pad_token_id: 当某句话生成结束之后,之后生成的内容用pad_token_id补充 - """ - if do_sample: - self.generate_func = partial(sample_generate, decoder=decoder, max_length=max_length, max_len_a=max_len_a, - num_beams=num_beams, - temperature=temperature, top_k=top_k, top_p=top_p, bos_token_id=bos_token_id, - eos_token_id=eos_token_id, repetition_penalty=repetition_penalty, - length_penalty=length_penalty, pad_token_id=pad_token_id) - else: - self.generate_func = partial(greedy_generate, decoder=decoder, max_length=max_length, max_len_a=max_len_a, - num_beams=num_beams, - bos_token_id=bos_token_id, eos_token_id=eos_token_id, - repetition_penalty=repetition_penalty, - length_penalty=length_penalty, pad_token_id=pad_token_id) - self.do_sample = do_sample - self.max_length = max_length - self.num_beams = num_beams - self.temperature = temperature - self.top_k = top_k - self.top_p = top_p - self.bos_token_id = bos_token_id - self.eos_token_id = eos_token_id - self.repetition_penalty = repetition_penalty - self.length_penalty = length_penalty - self.decoder = decoder - - @torch.no_grad() - def generate(self, state, tokens=None): - """ - - :param State state: encoder结果的State, 是与Decoder配套是用的 - :param torch.LongTensor,None tokens: batch_size x length, 开始的token。如果为None,则默认添加bos_token作为开头的token - 进行生成。 - :return: bsz x max_length' 生成的token序列。如果eos_token_id不为None, 每个sequence的结尾一定是eos_token_id - """ - - return self.generate_func(tokens=tokens, state=state) - - -@torch.no_grad() -def greedy_generate(decoder, tokens=None, state=None, max_length=20, max_len_a=0.0, num_beams=1, - bos_token_id=None, eos_token_id=None, pad_token_id=0, - repetition_penalty=1, length_penalty=1.0): - """ - 贪婪地搜索句子 - - :param Decoder decoder: Decoder对象 - :param torch.LongTensor tokens: batch_size x len, decode的输入值,如果为None,则自动从bos_token_id开始生成 - :param State state: 应该包含encoder的一些输出。 - :param int max_length: 生成句子的最大长度, 每句话的decode长度为max_length + max_len_a*src_len - :param float max_len_a: 每句话的decode长度为max_length + max_len_a*src_len。 如果不为0,需要保证State中包含encoder_mask - :param int num_beams: 使用多大的beam进行解码。 - :param int bos_token_id: 如果tokens传入为None,则使用bos_token_id开始往后解码。 - :param int eos_token_id: 结束的token,如果为None,则一定会解码到max_length这么长。 - :param int pad_token_id: pad的token id - :param float repetition_penalty: 对重复出现的token多大的惩罚。 - :param float length_penalty: 对每个token(除了eos)按照长度进行一定的惩罚。 - :return: - """ - if num_beams == 1: - token_ids = _no_beam_search_generate(decoder, tokens=tokens, state=state, max_length=max_length, max_len_a=max_len_a, - temperature=1, top_k=50, top_p=1, - bos_token_id=bos_token_id, eos_token_id=eos_token_id, do_sample=False, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - else: - token_ids = _beam_search_generate(decoder, tokens=tokens, state=state, max_length=max_length, max_len_a=max_len_a, - num_beams=num_beams, temperature=1, top_k=50, top_p=1, - bos_token_id=bos_token_id, eos_token_id=eos_token_id, do_sample=False, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - - return token_ids - - -@torch.no_grad() -def sample_generate(decoder, tokens=None, state=None, max_length=20, max_len_a=0.0, num_beams=1, temperature=1.0, top_k=50, - top_p=1.0, bos_token_id=None, eos_token_id=None, pad_token_id=0, repetition_penalty=1.0, - length_penalty=1.0): - """ - 使用采样的方法生成句子 - - :param Decoder decoder: Decoder对象 - :param torch.LongTensor tokens: batch_size x len, decode的输入值,如果为None,则自动从bos_token_id开始生成 - :param State state: 应该包含encoder的一些输出。 - :param int max_length: 生成句子的最大长度, 每句话的decode长度为max_length + max_len_a*src_len - :param float max_len_a: 每句话的decode长度为max_length + max_len_a*src_len。 如果不为0,需要保证State中包含encoder_mask - :param int num_beam: 使用多大的beam进行解码。 - :param float temperature: 采样时的退火大小 - :param int top_k: 只在top_k的sample里面采样 - :param float top_p: 介于0,1的值。 - :param int bos_token_id: 如果tokens传入为None,则使用bos_token_id开始往后解码。 - :param int eos_token_id: 结束的token,如果为None,则一定会解码到max_length这么长。 - :param int pad_token_id: pad的token id - :param float repetition_penalty: 对重复出现的token多大的惩罚。 - :param float length_penalty: 对每个token(除了eos)按照长度进行一定的惩罚。 - :return: - """ - # 每个位置在生成的时候会sample生成 - if num_beams == 1: - token_ids = _no_beam_search_generate(decoder, tokens=tokens, state=state, max_length=max_length, max_len_a=max_len_a, - temperature=temperature, top_k=top_k, top_p=top_p, - bos_token_id=bos_token_id, eos_token_id=eos_token_id, do_sample=True, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - else: - token_ids = _beam_search_generate(decoder, tokens=tokens, state=state, max_length=max_length, max_len_a=max_len_a, - num_beams=num_beams, temperature=temperature, top_k=top_k, top_p=top_p, - bos_token_id=bos_token_id, eos_token_id=eos_token_id, do_sample=True, - repetition_penalty=repetition_penalty, length_penalty=length_penalty, - pad_token_id=pad_token_id) - return token_ids - - -def _no_beam_search_generate(decoder: Seq2SeqDecoder, state, tokens=None, max_length=20, max_len_a=0.0, temperature=1.0, top_k=50, - top_p=1.0, bos_token_id=None, eos_token_id=None, do_sample=True, - repetition_penalty=1.0, length_penalty=1.0, pad_token_id=0): - device = _get_model_device(decoder) - if tokens is None: - if bos_token_id is None: - raise RuntimeError("You have to specify either `tokens` or `bos_token_id`.") - batch_size = state.num_samples - if batch_size is None: - raise RuntimeError("Cannot infer the number of samples from `state`.") - tokens = torch.full([batch_size, 1], fill_value=bos_token_id, dtype=torch.long).to(device) - batch_size = tokens.size(0) - if state.num_samples: - assert state.num_samples == batch_size, "The number of samples in `tokens` and `state` should match." - - if eos_token_id is None: - _eos_token_id = -1 - else: - _eos_token_id = eos_token_id - - scores = decoder.decode(tokens=tokens, state=state) # 主要是为了update state - if _eos_token_id!=-1: # 防止第一个位置为结束 - scores[:, _eos_token_id] = -1e12 - next_tokens = scores.argmax(dim=-1, keepdim=True) - token_ids = torch.cat([tokens, next_tokens], dim=1) - cur_len = token_ids.size(1) - dones = token_ids.new_zeros(batch_size).eq(1) - # tokens = tokens[:, -1:] - - if max_len_a!=0: - # (bsz x num_beams, ) - if state.encoder_mask is not None: - max_lengths = (state.encoder_mask.sum(dim=1).float()*max_len_a).long() + max_length - else: - max_lengths = tokens.new_full((tokens.size(0), ), fill_value=max_length, dtype=torch.long) - real_max_length = max_lengths.max().item() - else: - real_max_length = max_length - if state.encoder_mask is not None: - max_lengths = state.encoder_mask.new_ones(state.encoder_mask.size(0)).long()*max_length - else: - max_lengths = tokens.new_full((tokens.size(0),), fill_value=max_length, dtype=torch.long) - - while cur_len < real_max_length: - scores = decoder.decode(tokens=token_ids, state=state) # batch_size x vocab_size - - if repetition_penalty != 1.0: - token_scores = scores.gather(dim=1, index=token_ids) - lt_zero_mask = token_scores.lt(0).float() - ge_zero_mask = lt_zero_mask.eq(0).float() - token_scores = lt_zero_mask * repetition_penalty * token_scores + ge_zero_mask / repetition_penalty * token_scores - scores.scatter_(dim=1, index=token_ids, src=token_scores) - - if eos_token_id is not None and length_penalty != 1.0: - token_scores = scores / cur_len ** length_penalty # batch_size x vocab_size - eos_mask = scores.new_ones(scores.size(1)) - eos_mask[eos_token_id] = 0 - eos_mask = eos_mask.unsqueeze(0).eq(1) - scores = scores.masked_scatter(eos_mask, token_scores) # 也即除了eos,其他词的分数经过了放大/缩小 - - if do_sample: - if temperature > 0 and temperature != 1: - scores = scores / temperature - - scores = top_k_top_p_filtering(scores, top_k, top_p, min_tokens_to_keep=2) - # 加上1e-12是为了避免https://github.com/pytorch/pytorch/pull/27523 - probs = F.softmax(scores, dim=-1) + 1e-12 - - next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) # batch_size - else: - next_tokens = torch.argmax(scores, dim=-1) # batch_size - - # 如果已经达到对应的sequence长度了,就直接填为eos了 - if _eos_token_id!=-1: - next_tokens = next_tokens.masked_fill(max_lengths.eq(cur_len+1), _eos_token_id) - next_tokens = next_tokens.masked_fill(dones, pad_token_id) # 对已经搜索完成的sample做padding - tokens = next_tokens.unsqueeze(1) - - token_ids = torch.cat([token_ids, tokens], dim=-1) # batch_size x max_len - - end_mask = next_tokens.eq(_eos_token_id) - dones = dones.__or__(end_mask) - cur_len += 1 - - if dones.min() == 1: - break - - # if eos_token_id is not None: - # tokens.scatter(index=max_lengths[:, None], dim=1, value=eos_token_id) # 将最大长度位置设置为eos - # if cur_len == max_length: - # token_ids[:, -1].masked_fill_(~dones, eos_token_id) # 若到最长长度仍未到EOS,则强制将最后一个词替换成eos - return token_ids - - -def _beam_search_generate(decoder: Seq2SeqDecoder, tokens=None, state=None, max_length=20, max_len_a=0.0, num_beams=4, temperature=1.0, - top_k=50, top_p=1.0, bos_token_id=None, eos_token_id=None, do_sample=True, - repetition_penalty=1.0, length_penalty=None, pad_token_id=0) -> torch.LongTensor: - # 进行beam search - device = _get_model_device(decoder) - if tokens is None: - if bos_token_id is None: - raise RuntimeError("You have to specify either `tokens` or `bos_token_id`.") - batch_size = state.num_samples - if batch_size is None: - raise RuntimeError("Cannot infer the number of samples from `state`.") - tokens = torch.full([batch_size, 1], fill_value=bos_token_id, dtype=torch.long).to(device) - batch_size = tokens.size(0) - if state.num_samples: - assert state.num_samples == batch_size, "The number of samples in `tokens` and `state` should match." - - if eos_token_id is None: - _eos_token_id = -1 - else: - _eos_token_id = eos_token_id - - scores = decoder.decode(tokens=tokens, state=state) # 这里要传入的是整个句子的长度 - if _eos_token_id!=-1: # 防止第一个位置为结束 - scores[:, _eos_token_id] = -1e12 - vocab_size = scores.size(1) - assert vocab_size >= num_beams, "num_beams should be smaller than the number of vocabulary size." - - if do_sample: - probs = F.softmax(scores, dim=-1) + 1e-12 - next_tokens = torch.multinomial(probs, num_samples=num_beams) # (batch_size, num_beams) - logits = probs.log() - next_scores = logits.gather(dim=1, index=next_tokens) # (batch_size, num_beams) - else: - scores = F.log_softmax(scores, dim=-1) # (batch_size, vocab_size) - # 得到(batch_size, num_beams), (batch_size, num_beams) - next_scores, next_tokens = torch.topk(scores, num_beams, dim=1, largest=True, sorted=True) - - # 根据index来做顺序的调转 - indices = torch.arange(batch_size, dtype=torch.long).to(device) - indices = indices.repeat_interleave(num_beams) - state.reorder_state(indices) - - tokens = tokens.index_select(dim=0, index=indices) # batch_size * num_beams x length - # 记录生成好的token (batch_size', cur_len) - token_ids = torch.cat([tokens, next_tokens.view(-1, 1)], dim=-1) - dones = [False] * batch_size - - beam_scores = next_scores.view(-1) # batch_size * num_beams - - # 用来记录已经生成好的token的长度 - cur_len = token_ids.size(1) - - if max_len_a!=0: - # (bsz x num_beams, ) - if state.encoder_mask is not None: - max_lengths = (state.encoder_mask.sum(dim=1).float()*max_len_a).long() + max_length - else: - max_lengths = tokens.new_full((tokens.size(0), ), fill_value=max_length, dtype=torch.long) - real_max_length = max_lengths.max().item() - else: - real_max_length = max_length - if state.encoder_mask is not None: - max_lengths = state.encoder_mask.new_ones(state.encoder_mask.size(0)).long()*max_length - else: - max_lengths = tokens.new_full((tokens.size(0),), fill_value=max_length, dtype=torch.long) - hypos = [ - BeamHypotheses(num_beams, real_max_length, length_penalty, early_stopping=False) for _ in range(batch_size) - ] - # 0, num_beams, 2*num_beams, ... - batch_inds_with_numbeams_interval = (torch.arange(batch_size) * num_beams).view(-1, 1).to(token_ids) - - while cur_len < real_max_length: - scores = decoder.decode(token_ids, state) # (bsz x num_beams, vocab_size) - if repetition_penalty != 1.0: - token_scores = scores.gather(dim=1, index=token_ids) - lt_zero_mask = token_scores.lt(0).float() - ge_zero_mask = lt_zero_mask.eq(0).float() - token_scores = lt_zero_mask * repetition_penalty * token_scores + ge_zero_mask / repetition_penalty * token_scores - scores.scatter_(dim=1, index=token_ids, src=token_scores) - - if _eos_token_id!=-1: - max_len_eos_mask = max_lengths.eq(cur_len+1) - eos_scores = scores[:, _eos_token_id] - # 如果已经达到最大长度,就把eos的分数加大 - scores[:, _eos_token_id] = torch.where(max_len_eos_mask, eos_scores+1e32, eos_scores) - - if do_sample: - if temperature > 0 and temperature != 1: - scores = scores / temperature - - # 多召回一个防止eos - scores = top_k_top_p_filtering(scores, top_k, top_p, min_tokens_to_keep=num_beams + 1) - # 加上1e-12是为了避免https://github.com/pytorch/pytorch/pull/27523 - probs = F.softmax(scores, dim=-1) + 1e-12 - - # 保证至少有一个不是eos的值 - _tokens = torch.multinomial(probs, num_samples=num_beams + 1) # batch_size' x (num_beams+1) - - logits = probs.log() - # 防止全是这个beam的被选中了,且需要考虑eos被选择的情况 - _scores = logits.gather(dim=1, index=_tokens) # batch_size' x (num_beams+1) - _scores = _scores + beam_scores[:, None] # batch_size' x (num_beams+1) - # 从这里面再选择top的2*num_beam个 - _scores = _scores.view(batch_size, num_beams * (num_beams + 1)) - next_scores, ids = _scores.topk(2 * num_beams, dim=1, largest=True, sorted=True) - _tokens = _tokens.view(batch_size, num_beams * (num_beams + 1)) - next_tokens = _tokens.gather(dim=1, index=ids) # (batch_size, 2*num_beams) - from_which_beam = ids // (num_beams + 1) # (batch_size, 2*num_beams) - else: - scores = F.log_softmax(scores, dim=-1) # (batch_size * num_beams, vocab_size) - _scores = scores + beam_scores[:, None] # (batch_size * num_beams, vocab_size) - _scores = _scores.view(batch_size, -1) # (batch_size, num_beams*vocab_size) - next_scores, ids = torch.topk(_scores, 2 * num_beams, dim=1, largest=True, sorted=True) # (bsz, 2*num_beams) - from_which_beam = ids // vocab_size # (batch_size, 2*num_beams) - next_tokens = ids % vocab_size # (batch_size, 2*num_beams) - - # 接下来需要组装下一个batch的结果。 - # 需要选定哪些留下来 - # next_scores, sorted_inds = next_scores.sort(dim=-1, descending=True) - # next_tokens = next_tokens.gather(dim=1, index=sorted_inds) - # from_which_beam = from_which_beam.gather(dim=1, index=sorted_inds) - - not_eos_mask = next_tokens.ne(_eos_token_id) # 为1的地方不是eos - keep_mask = not_eos_mask.cumsum(dim=1).le(num_beams) # 为1的地方需要保留 - keep_mask = not_eos_mask.__and__(keep_mask) # 为1的地方是需要进行下一步search的 - - _next_tokens = next_tokens.masked_select(keep_mask).view(-1, 1) - _from_which_beam = from_which_beam.masked_select(keep_mask).view(batch_size, num_beams) # 上面的token是来自哪个beam - _next_scores = next_scores.masked_select(keep_mask).view(batch_size, num_beams) - beam_scores = _next_scores.view(-1) - - flag = True - if cur_len+1 == real_max_length: - eos_batch_idx = torch.arange(batch_size).to(next_tokens).repeat_interleave(repeats=num_beams, dim=0) - eos_beam_ind = torch.arange(num_beams).to(token_ids).repeat(batch_size) # 表示的是indice - eos_beam_idx = from_which_beam[:, :num_beams].reshape(-1) # 表示的是从哪个beam获取得到的 - else: - # 将每个batch中在num_beam内的序列添加到结束中, 为1的地方需要结束了 - effective_eos_mask = next_tokens[:, :num_beams].eq(_eos_token_id) # batch_size x num_beams - if effective_eos_mask.sum().gt(0): - eos_batch_idx, eos_beam_ind = effective_eos_mask.nonzero(as_tuple=True) - # 是由于from_which_beam是 (batch_size, 2*num_beams)的,所以需要2*num_beams - eos_beam_idx = eos_batch_idx * num_beams * 2 + eos_beam_ind - eos_beam_idx = from_which_beam.view(-1)[eos_beam_idx] # 获取真实的从哪个beam获取的eos - else: - flag = False - - if flag: - _token_ids = torch.cat([token_ids, _next_tokens], dim=-1) - for batch_idx, beam_ind, beam_idx in zip(eos_batch_idx.tolist(), eos_beam_ind.tolist(), - eos_beam_idx.tolist()): - if not dones[batch_idx]: - score = next_scores[batch_idx, beam_ind].item() - # 之后需要在结尾新增一个eos - if _eos_token_id!=-1: - hypos[batch_idx].add(_token_ids[batch_idx * num_beams + beam_idx, :cur_len].clone(), score) - else: - hypos[batch_idx].add(_token_ids[batch_idx * num_beams + beam_idx].clone(), score) - - # 更改state状态, 重组token_ids - reorder_inds = (batch_inds_with_numbeams_interval + _from_which_beam).view(-1) # flatten成一维 - state.reorder_state(reorder_inds) - # 重新组织token_ids的状态 - token_ids = torch.cat([token_ids.index_select(index=reorder_inds, dim=0), _next_tokens], dim=-1) - - for batch_idx in range(batch_size): - dones[batch_idx] = dones[batch_idx] or hypos[batch_idx].is_done(next_scores[batch_idx, 0].item()) or \ - max_lengths[batch_idx*num_beams]==cur_len+1 - - cur_len += 1 - - if all(dones): - break - - # select the best hypotheses - tgt_len = token_ids.new_zeros(batch_size) - best = [] - - for i, hypotheses in enumerate(hypos): - best_hyp = max(hypotheses.hyp, key=lambda x: x[0])[1] - # 把上面替换为非eos的词替换回eos - if _eos_token_id!=-1: - best_hyp = torch.cat([best_hyp, best_hyp.new_ones(1)*_eos_token_id]) - tgt_len[i] = len(best_hyp) - best.append(best_hyp) - - # generate target batch - decoded = token_ids.new_zeros(batch_size, tgt_len.max().item()).fill_(pad_token_id) - for i, hypo in enumerate(best): - decoded[i, :tgt_len[i]] = hypo - - return decoded - - -class BeamHypotheses(object): - def __init__(self, num_beams, max_length, length_penalty, early_stopping): - """ - Initialize n-best list of hypotheses. - """ - self.max_length = max_length - 1 # ignoring bos_token - self.length_penalty = length_penalty - self.early_stopping = early_stopping - self.num_beams = num_beams - self.hyp = [] - self.worst_score = 1e9 - - def __len__(self): - """ - Number of hypotheses in the list. - """ - return len(self.hyp) - - def add(self, hyp, sum_logprobs): - """ - Add a new hypothesis to the list. - """ - score = sum_logprobs / len(hyp) ** self.length_penalty - if len(self) < self.num_beams or score > self.worst_score: - self.hyp.append((score, hyp)) - if len(self) > self.num_beams: - sorted_scores = sorted([(s, idx) for idx, (s, _) in enumerate(self.hyp)]) - del self.hyp[sorted_scores[0][1]] - self.worst_score = sorted_scores[1][0] - else: - self.worst_score = min(score, self.worst_score) - - def is_done(self, best_sum_logprobs): - """ - If there are enough hypotheses and that none of the hypotheses being generated - can become better than the worst one in the heap, then we are done with this sentence. - """ - if len(self) < self.num_beams: - return False - elif self.early_stopping: - return True - else: - return self.worst_score >= best_sum_logprobs / self.max_length ** self.length_penalty - - -def top_k_top_p_filtering(logits, top_k=0, top_p=1.0, filter_value=-float("Inf"), min_tokens_to_keep=1): - """ - 根据top_k, top_p的值,将不满足的值置为filter_value的值 - - :param torch.Tensor logits: bsz x vocab_size - :param int top_k: 如果大于0,则只保留最top_k的词汇的概率,剩下的位置被置为filter_value - :param int top_p: 根据(http://arxiv.org/abs/1904.09751)设置的筛选方式 - :param float filter_value: - :param int min_tokens_to_keep: 每个sample返回的分布中有概率的词不会低于这个值 - :return: - """ - if top_k > 0: - top_k = min(max(top_k, min_tokens_to_keep), logits.size(-1)) # Safety check - # Remove all tokens with a probability less than the last token of the top-k - indices_to_remove = logits < torch.topk(logits, top_k)[0][..., -1, None] - logits[indices_to_remove] = filter_value - - if top_p < 1.0: - sorted_logits, sorted_indices = torch.sort(logits, descending=True) - cumulative_probs = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1) - - # Remove tokens with cumulative probability above the threshold (token with 0 are kept) - sorted_indices_to_remove = cumulative_probs > top_p - if min_tokens_to_keep > 1: - # Keep at least min_tokens_to_keep (set to min_tokens_to_keep-1 because we add the first one below) - sorted_indices_to_remove[..., :min_tokens_to_keep] = 0 - # Shift the indices to the right to keep also the first token above the threshold - sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone() - sorted_indices_to_remove[..., 0] = 0 - - # scatter sorted tensors to original indexing - indices_to_remove = sorted_indices_to_remove.scatter(1, sorted_indices, sorted_indices_to_remove) - logits[indices_to_remove] = filter_value - return logits diff --git a/fastNLP/modules/tokenizer/__init__.py b/fastNLP/modules/tokenizer/__init__.py deleted file mode 100644 index f3c4faae..00000000 --- a/fastNLP/modules/tokenizer/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -r""" - -""" -__all__=[ - 'BertTokenizer', - - "GPT2Tokenizer", - - "RobertaTokenizer" -] - -from .bert_tokenizer import BertTokenizer -from .gpt2_tokenizer import GPT2Tokenizer -from .roberta_tokenizer import RobertaTokenizer \ No newline at end of file diff --git a/fastNLP/modules/tokenizer/bert_tokenizer.py b/fastNLP/modules/tokenizer/bert_tokenizer.py deleted file mode 100644 index 4acf042a..00000000 --- a/fastNLP/modules/tokenizer/bert_tokenizer.py +++ /dev/null @@ -1,450 +0,0 @@ -r""" - -""" - -__all__ = [ - 'BertTokenizer' -] - -import os -import collections -import unicodedata -from ...core import logger -from ...io.file_utils import _get_file_name_base_on_postfix -from ...io.file_utils import _get_bert_dir - -VOCAB_NAME = 'vocab.txt' - -PRETRAINED_INIT_CONFIGURATION = { - "en": {"do_lower_case": False}, - "en-base-uncased": {'do_lower_case': True}, - 'en-base-cased': {'do_lower_case':False}, - "en-large-cased-wwm": {"do_lower_case": False}, - 'en-large-cased': {'do_lower_case':False}, - 'en-large-uncased': {'do_lower_case':True}, - 'en-large-uncased-wwm': {'do_lower_case':True}, - 'cn': {'do_lower_case':True}, - 'cn-base': {'do_lower_case': True}, - 'cn-wwm-ext': {'do_lower_case': True}, - 'multi-base-cased': {'do_lower_case': False}, - 'multi-base-uncased': {'do_lower_case': True}, -} - -def _is_control(char): - r"""Checks whether `chars` is a control character.""" - # These are technically control characters but we count them as whitespace - # characters. - if char == "\t" or char == "\n" or char == "\r": - return False - cat = unicodedata.category(char) - if cat.startswith("C"): - return True - return False - - -def _is_punctuation(char): - r"""Checks whether `chars` is a punctuation character.""" - cp = ord(char) - # We treat all non-letter/number ASCII as punctuation. - # Characters such as "^", "$", and "`" are not in the Unicode - # Punctuation class but we treat them as punctuation anyways, for - # consistency. - if (((cp >= 33) and (cp <= 47)) or ((cp >= 58) and (cp <= 64)) or - ((cp >= 91) and (cp <= 96)) or ((cp >= 123) and (cp <= 126))): - return True - cat = unicodedata.category(char) - if cat.startswith("P"): - return True - return False - - -def _is_whitespace(char): - r"""Checks whether `chars` is a whitespace character.""" - # \t, \n, and \r are technically contorl characters but we treat them - # as whitespace since they are generally considered as such. - if char == " " or char == "\t" or char == "\n" or char == "\r": - return True - cat = unicodedata.category(char) - if cat == "Zs": - return True - return False - - -def whitespace_tokenize(text): - r"""Runs basic whitespace cleaning and splitting on a piece of text.""" - text = text.strip() - if not text: - return [] - tokens = text.split() - return tokens - - -class BasicTokenizer(object): - r"""Runs basic tokenization (punctuation splitting, lower casing, etc.).""" - - def __init__(self, - do_lower_case=True, - never_split=("[UNK]", "[SEP]", "[PAD]", "[CLS]", "[MASK]")): - r"""Constructs a BasicTokenizer. - - Args: - do_lower_case: Whether to lower case the input. - """ - self.do_lower_case = do_lower_case - self.never_split = never_split - - def tokenize(self, text): - r"""Tokenizes a piece of text.""" - text = self._clean_text(text) - # This was added on November 1st, 2018 for the multilingual and Chinese - # models. This is also applied to the English models now, but it doesn't - # matter since the English models were not trained on any Chinese data - # and generally don't have any Chinese data in them (there are Chinese - # characters in the vocabulary because Wikipedia does have some Chinese - # words in the English Wikipedia.). - text = self._tokenize_chinese_chars(text) - orig_tokens = whitespace_tokenize(text) - split_tokens = [] - for token in orig_tokens: - if self.do_lower_case and token not in self.never_split: - token = token.lower() - token = self._run_strip_accents(token) - split_tokens.extend(self._run_split_on_punc(token)) - - output_tokens = whitespace_tokenize(" ".join(split_tokens)) - return output_tokens - - def _run_strip_accents(self, text): - r"""Strips accents from a piece of text.""" - text = unicodedata.normalize("NFD", text) - output = [] - for char in text: - cat = unicodedata.category(char) - if cat == "Mn": - continue - output.append(char) - return "".join(output) - - def _run_split_on_punc(self, text): - r"""Splits punctuation on a piece of text.""" - if text in self.never_split: - return [text] - chars = list(text) - i = 0 - start_new_word = True - output = [] - while i < len(chars): - char = chars[i] - if _is_punctuation(char): - output.append([char]) - start_new_word = True - else: - if start_new_word: - output.append([]) - start_new_word = False - output[-1].append(char) - i += 1 - - return ["".join(x) for x in output] - - def _tokenize_chinese_chars(self, text): - r"""Adds whitespace around any CJK character.""" - output = [] - for char in text: - cp = ord(char) - if self._is_chinese_char(cp): - output.append(" ") - output.append(char) - output.append(" ") - else: - output.append(char) - return "".join(output) - - def _is_chinese_char(self, cp): - r"""Checks whether CP is the codepoint of a CJK character.""" - # This defines a "chinese character" as anything in the CJK Unicode block: - # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) - # - # Note that the CJK Unicode block is NOT all Japanese and Korean characters, - # despite its name. The modern Korean Hangul alphabet is a different block, - # as is Japanese Hiragana and Katakana. Those alphabets are used to write - # space-separated words, so they are not treated specially and handled - # like the all of the other languages. - if (((cp >= 0x4E00) and (cp <= 0x9FFF)) or # - ((cp >= 0x3400) and (cp <= 0x4DBF)) or # - ((cp >= 0x20000) and (cp <= 0x2A6DF)) or # - ((cp >= 0x2A700) and (cp <= 0x2B73F)) or # - ((cp >= 0x2B740) and (cp <= 0x2B81F)) or # - ((cp >= 0x2B820) and (cp <= 0x2CEAF)) or - ((cp >= 0xF900) and (cp <= 0xFAFF)) or # - ((cp >= 0x2F800) and (cp <= 0x2FA1F))): # - return True - - return False - - def _clean_text(self, text): - r"""Performs invalid character removal and whitespace cleanup on text.""" - output = [] - for char in text: - cp = ord(char) - if cp == 0 or cp == 0xfffd or _is_control(char): - continue - if _is_whitespace(char): - output.append(" ") - else: - output.append(char) - return "".join(output) - - -def load_vocab(vocab_file): - r"""Loads a vocabulary file into a dictionary.""" - vocab = collections.OrderedDict() - index = 0 - with open(vocab_file, "r", encoding="utf-8") as reader: - while True: - token = reader.readline() - if not token: - break - token = token.strip() - vocab[token] = index - index += 1 - return vocab - - -class WordpieceTokenizer(object): - r"""Runs WordPiece tokenization.""" - - def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=100): - self.vocab = vocab - self.unk_token = unk_token - self.max_input_chars_per_word = max_input_chars_per_word - - def tokenize(self, text): - r"""Tokenizes a piece of text into its word pieces. - - This uses a greedy longest-match-first algorithm to perform tokenization - using the given vocabulary. - - For example: - input = "unaffable" - output = ["un", "##aff", "##able"] - - Args: - text: A single token or whitespace separated tokens. This should have - already been passed through `BasicTokenizer`. - - Returns: - A list of wordpiece tokens. - """ - - output_tokens = [] - for token in whitespace_tokenize(text): - chars = list(token) - if len(chars) > self.max_input_chars_per_word: - output_tokens.append(self.unk_token) - continue - - is_bad = False - start = 0 - sub_tokens = [] - while start < len(chars): - end = len(chars) - cur_substr = None - while start < end: - substr = "".join(chars[start:end]) - if start > 0: - substr = "##" + substr - if substr in self.vocab: - cur_substr = substr - break - end -= 1 - if cur_substr is None: - is_bad = True - break - sub_tokens.append(cur_substr) - start = end - - if is_bad: - output_tokens.append(self.unk_token) - else: - output_tokens.extend(sub_tokens) - if len(output_tokens) == 0: # 防止里面全是空格或者回车符号 - return [self.unk_token] - return output_tokens - - -class BertTokenizer(object): - r"""Runs end-to-end tokenization: punctuation splitting + wordpiece""" - - def __init__(self, vocab_file, do_lower_case=True, max_len=None, do_basic_tokenize=True, - never_split=("[UNK]", "[SEP]", "[PAD]", "[CLS]", "[MASK]")): - r"""Constructs a BertTokenizer. - - Args: - vocab_file: Path to a one-wordpiece-per-line vocabulary file - do_lower_case: Whether to lower case the input - Only has an effect when do_wordpiece_only=False - do_basic_tokenize: Whether to do basic tokenization before wordpiece. - max_len: An artificial maximum length to truncate tokenized sequences to; - Effective maximum length is always the minimum of this - value (if specified) and the underlying BERT model's - sequence length. - never_split: List of tokens which will never be split during tokenization. - Only has an effect when do_wordpiece_only=False - """ - if not os.path.isfile(vocab_file): - raise ValueError( - "Can't find a vocabulary file at path '{}'. To load the vocabulary from a Google pretrained " - "model use `tokenizer = BertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`".format(vocab_file)) - self.vocab = load_vocab(vocab_file) - self.ids_to_tokens = collections.OrderedDict( - [(ids, tok) for tok, ids in self.vocab.items()]) - self.do_basic_tokenize = do_basic_tokenize - if do_basic_tokenize: - self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case, - never_split=never_split) - self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) - self.max_len = max_len if max_len is not None else int(1e12) - - @property - def unk_index(self): - return self.vocab['[UNK]'] - - @property - def pad_index(self): - return self.vocab['[PAD]'] - - @property - def cls_index(self): - return self.vocab['[CLS]'] - - @property - def sep_index(self): - return self.vocab['[SEP]'] - - def _reinit_on_new_vocab(self, vocab): - r""" - 在load bert之后,可能会对vocab进行重新排列。重新排列之后调用这个函数重新初始化与vocab相关的性质 - - :param vocab: - :return: - """ - self.vocab = vocab - self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) - - def tokenize(self, text): - split_tokens = [] - if self.do_basic_tokenize: - for token in self.basic_tokenizer.tokenize(text): - for sub_token in self.wordpiece_tokenizer.tokenize(token): - split_tokens.append(sub_token) - else: - split_tokens = self.wordpiece_tokenizer.tokenize(text) - return split_tokens - - def convert_tokens_to_ids(self, tokens): - r"""Converts a sequence of tokens into ids using the vocab.""" - ids = [] - for token in tokens: - ids.append(self.vocab[token]) - if len(ids) > self.max_len: - logger.warning( - "Token indices sequence length is longer than the specified maximum " - " sequence length for this BERT model ({} > {}). Running this" - " sequence through BERT will result in indexing errors".format(len(ids), self.max_len) - ) - return ids - - def convert_ids_to_tokens(self, ids): - r"""将token ids转换为一句话""" - tokens = [] - for i in ids: - tokens.append(self.ids_to_tokens[i]) - return self._convert_tokens_to_string(tokens) - - def _convert_tokens_to_string(self, tokens): - """ Converts a sequence of tokens (string) in a single string. """ - out_string = " ".join(tokens).replace(" ##", "").strip() - return out_string - - def save_vocabulary(self, vocab_path): - r"""Save the tokenizer vocabulary to a directory or file.""" - index = 0 - if os.path.isdir(vocab_path): - vocab_file = os.path.join(vocab_path, VOCAB_NAME) - else: - vocab_file = vocab_path - with open(vocab_file, "w", encoding="utf-8") as writer: - for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): - if index != token_index: - logger.warning("Saving vocabulary to {}: vocabulary indices are not consecutive." - " Please check that the vocabulary is not corrupted!".format(vocab_file)) - index = token_index - writer.write(token + u'\n') - index += 1 - return vocab_file - - def save_pretrained(self, save_directory): - self.save_vocabulary(save_directory) - - @classmethod - def from_pretrained(cls, model_dir_or_name, *inputs, **kwargs): - r""" - 给定模型的名字或者路径,直接读取vocab. - """ - model_dir = _get_bert_dir(model_dir_or_name) - pretrained_model_name_or_path = _get_file_name_base_on_postfix(model_dir, '.txt') - logger.info("loading vocabulary file {}".format(pretrained_model_name_or_path)) - max_len = 512 - kwargs['max_len'] = min(kwargs.get('max_position_embeddings', int(1e12)), max_len) - # Instantiate tokenizer. - if 'do_lower_case' not in kwargs: - if model_dir_or_name in PRETRAINED_INIT_CONFIGURATION: - kwargs['do_lower_case'] = PRETRAINED_INIT_CONFIGURATION[model_dir_or_name]['do_lower_case'] - else: - if 'case' in model_dir_or_name: - kwargs['do_lower_case'] = False - elif 'uncase' in model_dir_or_name: - kwargs['do_lower_case'] = True - - tokenizer = cls(pretrained_model_name_or_path, *inputs, **kwargs) - return tokenizer - - def encode(self, text, add_special_tokens=True): - """ - 给定text输入将数据encode为index的形式。 - - Example:: - - >>> from fastNLP.modules import BertTokenizer - >>> bert_tokenizer = BertTokenizer.from_pretrained('en') - >>> print(bert_tokenizer.encode('from')) - >>> print(bert_tokenizer.encode("This is a demo sentence")) - >>> print(bert_tokenizer.encode(["This", "is", 'a'])) - - - :param List[str],str text: 输入的一条认为是一句话。 - :param bool add_special_tokens: 是否保证句首和句尾是cls和sep。 - :return: - """ - - word_pieces = [] - if isinstance(text, str): - words = text.split() - elif isinstance(text, list): - words = text - else: - raise TypeError("Only support str or List[str]") - for word in words: - _words = self.basic_tokenizer._tokenize_chinese_chars(word).split() - tokens = [] - for word in _words: - tokens.extend(self.wordpiece_tokenizer.tokenize(word)) - word_piece_ids = self.convert_tokens_to_ids(tokens) - word_pieces.extend(word_piece_ids) - if add_special_tokens: - if word_pieces[0] != self.cls_index: - word_pieces.insert(0, self.cls_index) - if word_pieces[-1] != self.sep_index: - word_pieces.append(self.sep_index) - return word_pieces diff --git a/fastNLP/modules/tokenizer/gpt2_tokenizer.py b/fastNLP/modules/tokenizer/gpt2_tokenizer.py deleted file mode 100644 index 6bf6ce67..00000000 --- a/fastNLP/modules/tokenizer/gpt2_tokenizer.py +++ /dev/null @@ -1,739 +0,0 @@ -r"""undocumented -这个页面的代码很大程度上参考(复制粘贴)了https://github.com/huggingface/pytorch-pretrained-BERT的代码, 如果你发现该代码对你 - 有用,也请引用一下他们。 -""" - -__all__ = [ - 'GPT2Tokenizer' -] - -from functools import lru_cache -import json -import regex as re -import itertools - - -from ...io.file_utils import _get_gpt2_dir -from ...core import logger -from fastNLP.io.file_utils import _get_file_name_base_on_postfix - -import os - -PRETRAINED_GPT2_MODEL_DIR = PRETRAINED_BERT_MODEL_DIR = { - 'en-small': 'gpt2-small.zip', - 'en-median': 'gpt2-medium.zip', - 'en': 'gpt2-medium.zip' -} - - -@lru_cache() -def bytes_to_unicode(): - """ - Returns list of utf-8 byte and a mapping to unicode strings. - We specifically avoids mapping to whitespace/control characters the bpe code barfs on. - - The reversible bpe codes work on unicode strings. - This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. - When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. - This is a signficant percentage of your normal, say, 32K bpe vocab. - To avoid that, we want lookup tables between utf-8 bytes and unicode strings. - """ - bs = ( - list(range(ord("!"), ord("~") + 1)) + list(range(ord("¡"), ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1)) - ) - cs = bs[:] - n = 0 - for b in range(2 ** 8): - if b not in bs: - bs.append(b) - cs.append(2 ** 8 + n) - n += 1 - cs = [chr(n) for n in cs] - return dict(zip(bs, cs)) - - -def get_pairs(word): - """Return set of symbol pairs in a word. - - Word is represented as tuple of symbols (symbols being variable-length strings). - """ - pairs = set() - prev_char = word[0] - for char in word[1:]: - pairs.add((prev_char, char)) - prev_char = char - return pairs - - -VOCAB_FILES_NAMES = { - "vocab_file": "vocab.json", - "merges_file": "merges.txt", -} - - -PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { - "en-small": 1024, - 'en': 1024, - "en-medium": 1024, - "en-large": 1024, - "en-xl": 1024, - "en-distilgpt2": 1024, -} - -PATTERN = re.compile(r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+""") - - -def gpt2_tokenize(text, add_prefix_space=True): - """ - - :param str text: - :param bool add_prefix_space: 是否在句子前面加上space,如果加上才能保证与GPT2训练时一致 - :return: [] - """ - if text is '': - return [] - if add_prefix_space: - text = ' ' + text - tokens = [] - for token in re.findall(PATTERN, text): - tokens.append(token) - return tokens - - -class GPT2Tokenizer: - """ - GPT-2 BPE tokenizer. Peculiarities: - - Byte-level Byte-Pair-Encoding - - Requires a space to start the input string => the encoding and tokenize methods should be called with the - ``add_prefix_space`` flag set to ``True``. - Otherwise, this tokenizer's ``encode``, ``decode``, and ``tokenize`` methods will not conserve - the spaces at the beginning of a string: `tokenizer.decode(tokenizer.encode(" Hello")) = "Hello"` - """ - - SPECIAL_TOKENS_ATTRIBUTES = [ - "bos_token", - "eos_token", - "unk_token", - "pad_token", - "cls_token", - "mask_token", - "sep_token", - ] - - padding_side = "right" - - def __init__( - self, - vocab_file, - merges_file, - errors="replace", - unk_token="<|endoftext|>", - bos_token="<|endoftext|>", - eos_token="<|endoftext|>", - **kwargs - ): - self._bos_token = None - self._eos_token = None - self._unk_token = None - self._sep_token = None - self._pad_token = None - self._cls_token = None - self._mask_token = None - self._pad_token_type_id = 0 - - self.bos_token = bos_token - self.eos_token = eos_token - self.unk_token = unk_token - - self.max_len = int(1e12) - self.padding_side = kwargs.pop("padding_side", self.padding_side) - self.added_tokens_encoder = {} - self.unique_added_tokens_encoder = set() - self.added_tokens_decoder = {} - # inputs and kwargs for saving and re-loading (see ``from_pretrained`` and ``save_pretrained``) - self.init_inputs = () - self.init_kwargs = {} - - for key, value in kwargs.items(): - if key in self.SPECIAL_TOKENS_ATTRIBUTES: - if key == "additional_special_tokens": - assert isinstance(value, (list, tuple)) and all(isinstance(t, str) for t in value) - else: - assert isinstance(value, str) - setattr(self, key, value) - - self.max_len_single_sentence = ( - self.max_len - ) # no default special tokens - you can update this value if you add special tokens - self.max_len_sentences_pair = ( - self.max_len - ) # no default special tokens - you can update this value if you add special tokens - - with open(vocab_file, encoding="utf-8") as vocab_handle: - self.encoder = json.load(vocab_handle) - self.decoder = {v: k for k, v in self.encoder.items()} - self.errors = errors # how to handle errors in decoding - self.byte_encoder = bytes_to_unicode() - self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} - with open(merges_file, encoding="utf-8") as merges_handle: - bpe_merges = merges_handle.read().split("\n")[1:-1] - bpe_merges = [tuple(merge.split()) for merge in bpe_merges] - self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges)))) - self.cache = {} - - def _reinit_on_new_vocab(self, vocab): - self.encoder = {k:v for k,v in vocab.items()} - self.decoder = {v:k for k,v in vocab.items()} - self.cache = {} - - @property - def bos_token(self): - """ Beginning of sentence token (string). Log an error if used while not having been set. """ - if self._bos_token is None: - logger.error("Using bos_token, but it is not set yet.") - return self._bos_token - - @property - def eos_token(self): - """ End of sentence token (string). Log an error if used while not having been set. """ - if self._eos_token is None: - logger.error("Using eos_token, but it is not set yet.") - return self._eos_token - - @property - def unk_token(self): - """ Unknown token (string). Log an error if used while not having been set. """ - if self._unk_token is None: - logger.error("Using unk_token, but it is not set yet.") - return self._unk_token - - @property - def pad_token(self): - """ Padding token (string). Log an error if used while not having been set. """ - if self._pad_token is None: - logger.error("Using pad_token, but it is not set yet.") - return self._pad_token - - @property - def cls_token(self): - """ Classification token (string). E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """ - if self._cls_token is None: - logger.error("Using cls_token, but it is not set yet.") - return self._cls_token - - @property - def sep_token(self): - if self._sep_token is None: - logger.error("Using sep_token, but it is not set yet.") - return self._sep_token - - @property - def mask_token(self): - """ Mask token (string). E.g. when training a model with masked-language modeling. Log an error if used while not having been set. """ - if self._mask_token is None: - logger.error("Using mask_token, but it is not set yet.") - return self._mask_token - - @bos_token.setter - def bos_token(self, value): - self._bos_token = value - - @eos_token.setter - def eos_token(self, value): - self._eos_token = value - - @unk_token.setter - def unk_token(self, value): - self._unk_token = value - - @pad_token.setter - def pad_token(self, value): - self._pad_token = value - - @cls_token.setter - def cls_token(self, value): - self._cls_token = value - - @sep_token.setter - def sep_token(self, value): - self._sep_token = value - - @mask_token.setter - def mask_token(self, value): - self._mask_token = value - - @property - def bos_index(self): - """ Id of the beginning of sentence token in the vocabulary. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.bos_token) - - @property - def sep_index(self): - return self.convert_tokens_to_ids(self.sep_token) - - @property - def eos_index(self): - """ Id of the end of sentence token in the vocabulary. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.eos_token) - - @property - def unk_index(self): - """ Id of the unknown token in the vocabulary. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.unk_token) - - @property - def pad_index(self): - """ Id of the padding token in the vocabulary. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.pad_token) - - @property - def pad_token_type_id(self): - """ Id of the padding token type in the vocabulary.""" - return self._pad_token_type_id - - @property - def cls_index(self): - """ Id of the classification token in the vocabulary. E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.cls_token) - - @property - def mask_index(self): - """ Id of the mask token in the vocabulary. E.g. when training a model with masked-language modeling. Log an error if used while not having been set. """ - return self.convert_tokens_to_ids(self.mask_token) - - @property - def vocab_size(self): - return len(self.encoder) - - def bpe(self, token): - # 如果token没有找到,会被拆分成字母返回 - if token in self.cache: - return self.cache[token] - word = tuple(token) - pairs = get_pairs(word) # 如果word是abcd,则((a,b), (b,c), (c, d), (e,f)) - - if not pairs: - return token - - while True: - # 首先找到最常的pair - bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) - if bigram not in self.bpe_ranks: - break - first, second = bigram - new_word = [] - i = 0 - while i < len(word): - try: - j = word.index(first, i) - except ValueError: - new_word.extend(word[i:]) - break - else: - new_word.extend(word[i:j]) #最先找的 - i = j - - if word[i] == first and i < len(word) - 1 and word[i + 1] == second: - new_word.append(first + second) - i += 2 - else: - new_word.append(word[i]) - i += 1 - new_word = tuple(new_word) - word = new_word - if len(word) == 1: - break - else: - pairs = get_pairs(word) - word = " ".join(word) - self.cache[token] = word - return word - - def _tokenize(self, text, add_prefix_space=False): - """ Tokenize a string. - Args: - - add_prefix_space (boolean, default False): - Begin the sentence with at least one space to get invariance to word order in GPT-2 (and RoBERTa) tokenizers. - """ - bpe_tokens = [] - for token in gpt2_tokenize(text, add_prefix_space=add_prefix_space): - token = "".join( - self.byte_encoder[b] for b in token.encode("utf-8") - ) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case) - bpe_tokens.extend(bpe_token for bpe_token in self.bpe(token).split(" ")) - return bpe_tokens - - def _convert_token_to_id(self, token): - """ Converts a token (str) in an id using the vocab. """ - return self.encoder.get(token, self.encoder.get(self.unk_token)) - - def _convert_id_to_token(self, index): - """Converts an index (integer) in a token (str) using the vocab.""" - return self.decoder.get(index) - - def convert_tokens_to_string(self, tokens): - """ Converts a sequence of tokens (string) in a single string. """ - text = "".join(tokens) - text = bytearray([self.byte_decoder[c] for c in text]).decode("utf-8", errors=self.errors) - return text - - def save_pretrained(self, save_directory): - return self.save_vocabulary(save_directory) - - def save_vocabulary(self, save_directory): - """Save the tokenizer vocabulary and merge files to a directory.""" - if not os.path.isdir(save_directory): - logger.error("Vocabulary path ({}) should be a directory".format(save_directory)) - return - vocab_file = os.path.join(save_directory, VOCAB_FILES_NAMES["vocab_file"]) - merge_file = os.path.join(save_directory, VOCAB_FILES_NAMES["merges_file"]) - - with open(vocab_file, "w", encoding="utf-8") as f: - f.write(json.dumps(self.encoder, ensure_ascii=False)) - - index = 0 - with open(merge_file, "w", encoding="utf-8") as writer: - writer.write("#version: 0.2\n") - for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]): - if index != token_index: - logger.warning( - "Saving vocabulary to {}: BPE merge indices are not consecutive." - " Please check that the tokenizer is not corrupted!".format(merge_file) - ) - index = token_index - writer.write(" ".join(bpe_tokens) + "\n") - index += 1 - - return vocab_file, merge_file - - @classmethod - def from_pretrained(cls, model_dir_or_name): - r""" - """ - return cls._from_pretrained(model_dir_or_name) - - # 将它修改一定传入文件夹 - @classmethod - def _from_pretrained(cls, model_dir_or_name): - """ - - :param str model_dir_or_name: 目录或者缩写名 - :param init_inputs: - :param kwargs: - :return: - """ - # 它需要两个文件,第一个是vocab.json,第二个是merge_file? - model_dir = _get_gpt2_dir(model_dir_or_name) - # 里面会包含四个文件vocab.json, merge.txt, config.json, model.bin - - tokenizer_config_file = _get_file_name_base_on_postfix(model_dir, 'config.json') - with open(tokenizer_config_file, encoding="utf-8") as tokenizer_config_handle: - init_kwargs = json.load(tokenizer_config_handle) - if 'max_len' not in init_kwargs: - init_kwargs['max_len'] = 1024 - # Set max length if needed - if model_dir_or_name in PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES: - # if we're using a pretrained model, ensure the tokenizer - # wont index sequences longer than the number of positional embeddings - max_len = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES[model_dir_or_name] - if max_len is not None and isinstance(max_len, (int, float)): - init_kwargs["max_len"] = min(init_kwargs.get("max_len", int(1e12)), max_len) - - # 将vocab, merge加入到init_kwargs中 - init_kwargs['vocab_file'] = _get_file_name_base_on_postfix(model_dir, 'vocab.json') - init_kwargs['merges_file'] = _get_file_name_base_on_postfix(model_dir, 'merges.txt') - - init_inputs = init_kwargs.pop("init_inputs", ()) - # Instantiate tokenizer. - try: - tokenizer = cls(*init_inputs, **init_kwargs) - except OSError: - OSError( - "Unable to load vocabulary from file. " - "Please check that the provided vocabulary is accessible and not corrupted." - ) - - return tokenizer - - def __len__(self): - """ Size of the full vocabulary with the added tokens """ - return self.vocab_size + len(self.added_tokens_encoder) - - def tokenize(self, text, add_prefix_space=True): - """ Converts a string in a sequence of tokens (string), using the tokenizer. - Split in words for word-based vocabulary or sub-words for sub-word-based - vocabularies (BPE/SentencePieces/WordPieces). - - Take care of added tokens. - Args: - - text: The sequence to be encoded. - - add_prefix_space (boolean, default True): - Begin the sentence with at least one space to get invariance to word order in GPT-2 (and RoBERTa) tokenizers. - """ - all_special_tokens = self.all_special_tokens - - def lowercase_text(t): - # convert non-special tokens to lowercase - escaped_special_toks = [re.escape(s_tok) for s_tok in all_special_tokens] - pattern = r'(' + r'|'.join(escaped_special_toks) + r')|' + \ - r'(.+?)' - return re.sub( - pattern, - lambda m: m.groups()[0] or m.groups()[1].lower(), - t) - - if self.init_kwargs.get('do_lower_case', False): - text = lowercase_text(text) - - def split_on_token(tok, text): - result = [] - split_text = text.split(tok) - for i, sub_text in enumerate(split_text): - sub_text = sub_text.strip() - if i == 0 and not sub_text: - result += [tok] - elif i == len(split_text) - 1: - if sub_text: - result += [sub_text] - else: - pass - else: - if sub_text: - result += [sub_text] - result += [tok] - return result - - def split_on_tokens(tok_list, text): - if not text.strip(): - return [] - if not tok_list: - return self._tokenize(text, add_prefix_space=add_prefix_space) - - tokenized_text = [] - text_list = [text] - for tok in tok_list: - tokenized_text = [] - for sub_text in text_list: - if sub_text not in self.added_tokens_encoder \ - and sub_text not in all_special_tokens: - tokenized_text += split_on_token(tok, sub_text) - else: - tokenized_text += [sub_text] - text_list = tokenized_text - - return list(itertools.chain.from_iterable((self._tokenize(token, add_prefix_space=add_prefix_space) if token not \ - in self.added_tokens_encoder and token not in all_special_tokens \ - else [token] for token in tokenized_text))) - - added_tokens = list(self.added_tokens_encoder.keys()) + all_special_tokens - tokenized_text = split_on_tokens(added_tokens, text) - return tokenized_text - - def convert_tokens_to_ids(self, tokens): - """ Converts a single token, or a sequence of tokens, (str) in a single integer id - (resp. a sequence of ids), using the vocabulary. - """ - if tokens is None: - return None - - if isinstance(tokens, str): - return self._convert_token_to_id_with_added_voc(tokens) - - ids = [] - for token in tokens: - ids.append(self._convert_token_to_id_with_added_voc(token)) - return ids - - def _convert_token_to_id_with_added_voc(self, token): - if token is None: - return None - - if token in self.added_tokens_encoder: - return self.added_tokens_encoder[token] - return self._convert_token_to_id(token) - - def convert_ids_to_tokens(self, ids, skip_special_tokens=False): - """ Converts a single index or a sequence of indices (integers) in a token " - (resp.) a sequence of tokens (str), using the vocabulary and added tokens. - - Args: - skip_special_tokens: Don't decode special tokens (self.all_special_tokens). Default: False - """ - if isinstance(ids, int): - return self._convert_id_to_token(ids) - tokens = [] - for index in ids: - index = int(index) - if skip_special_tokens and index in self.all_special_ids: - continue - tokens.append(self._convert_id_to_token(index)) - return tokens - - def convert_id_to_tokens(self, token_ids, skip_special_tokens=False, clean_up_tokenization_spaces=True): - """ - Converts a sequence of ids (integer) in a string, using the tokenizer and vocabulary - with options to remove special tokens and clean up tokenization spaces. - Similar to doing ``self.convert_tokens_to_string(self.convert_ids_to_tokens(token_ids))``. - - Args: - token_ids: list of tokenized input ids. Can be obtained using the `encode` or `encode_plus` methods. - skip_special_tokens: if set to True, will replace special tokens. - clean_up_tokenization_spaces: if set to True, will clean up the tokenization spaces. - """ - filtered_tokens = self.convert_ids_to_tokens(token_ids, skip_special_tokens=skip_special_tokens) - - # To avoid mixing byte-level and unicode for byte-level BPT - # we need to build string separatly for added tokens and byte-level tokens - # cf. https://github.com/huggingface/transformers/issues/1133 - sub_texts = [] - current_sub_text = [] - for token in filtered_tokens: - if skip_special_tokens and token in self.all_special_ids: - continue - if token in self.added_tokens_encoder: - if current_sub_text: - sub_texts.append(self.convert_tokens_to_string(current_sub_text)) - current_sub_text = [] - sub_texts.append(token) - else: - current_sub_text.append(token) - if current_sub_text: - sub_texts.append(self.convert_tokens_to_string(current_sub_text)) - text = " ".join(sub_texts) - - if clean_up_tokenization_spaces: - clean_text = self.clean_up_tokenization(text) - return clean_text - else: - return text - - @property - def special_tokens_map(self): - """ A dictionary mapping special token class attribute (cls_token, unk_token...) to their - values ('', ''...) - """ - set_attr = {} - for attr in self.SPECIAL_TOKENS_ATTRIBUTES: - attr_value = getattr(self, "_" + attr) - if attr_value: - set_attr[attr] = attr_value - return set_attr - - @property - def all_special_tokens(self): - """ List all the special tokens ('', ''...) mapped to class attributes - (cls_token, unk_token...). - """ - all_toks = [] - set_attr = self.special_tokens_map - for attr_value in set_attr.values(): - all_toks = all_toks + (list(attr_value) if isinstance(attr_value, (list, tuple)) else [attr_value]) - all_toks = list(set(all_toks)) - return all_toks - - @property - def all_special_ids(self): - """ List the vocabulary indices of the special tokens ('', ''...) mapped to - class attributes (cls_token, unk_token...). - """ - all_toks = self.all_special_tokens - all_ids = self.convert_tokens_to_ids(all_toks) - return all_ids - - @staticmethod - def clean_up_tokenization(out_string): - """ Clean up a list of simple English tokenization artifacts like spaces before punctuations and abreviated forms. - """ - out_string = ( - out_string.replace(" .", ".") - .replace(" ?", "?") - .replace(" !", "!") - .replace(" ,", ",") - .replace(" ' ", "'") - .replace(" n't", "n't") - .replace(" 'm", "'m") - .replace(" do not", " don't") - .replace(" 's", "'s") - .replace(" 've", "'ve") - .replace(" 're", "'re") - ) - return out_string - - def encode(self, text, add_special_tokens=False, add_prefix_space=True): - """ - 给定text输入将数据encode为index的形式。 - - Example:: - - >>> from fastNLP.modules import GPT2Tokenizer - >>> gpt2_tokenizer = GPT2Tokenizer.from_pretrained('en') - >>> print(gpt2_tokenizer.encode('from')) - >>> print(gpt2_tokenizer.encode("This is a demo sentence")) - >>> print(gpt2_tokenizer.encode(["This", "is", 'a'])) - - - :param List[str],str text: 输入的一条认为是一句话。 - :param bool add_special_tokens: 是否保证句首和句尾是cls和sep。GPT2没有cls和sep这一说 - :return: - """ - if isinstance(text, str): - words = text.split() - elif isinstance(text, list): - words = text - else: - raise TypeError("Only support str or List[str]") - - word_pieces = [] - for word in words: - tokens = self.tokenize(word, add_prefix_space=add_prefix_space) - word_piece_ids = self.convert_tokens_to_ids(tokens) - word_pieces.extend(word_piece_ids) - if add_special_tokens: - if self._cls_token is not None and word_pieces[0] != self.cls_index: - word_pieces.insert(0, self.cls_index) - if self._sep_token is not None and word_pieces[-1] != self.sep_index: - word_pieces.append(self.eos_index) - return word_pieces - - def get_used_merge_pair_vocab(self, token): - # 如果token没有找到,会被拆分成字母返回 TODO need comment - used_pairs = {} - word = tuple(token) - pairs = get_pairs(word) # 如果word是abcd,则((a,b), (b,c), (c, d), (e,f)) - - if not pairs: - return token, used_pairs - - while True: - # 首先找到最常的pair - bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) - if bigram not in self.bpe_ranks: - break - used_pairs[bigram] = self.bpe_ranks[bigram] - first, second = bigram - new_word = [] - i = 0 - while i < len(word): - try: - j = word.index(first, i) - except ValueError: - new_word.extend(word[i:]) - break - else: - new_word.extend(word[i:j]) #最先找的 - i = j - - if word[i] == first and i < len(word) - 1 and word[i + 1] == second: - new_word.append(first + second) - i += 2 - else: - new_word.append(word[i]) - i += 1 - new_word = tuple(new_word) - word = new_word - if len(word) == 1: - break - else: - pairs = get_pairs(word) - word = " ".join(word) - return word, used_pairs \ No newline at end of file diff --git a/fastNLP/modules/tokenizer/roberta_tokenizer.py b/fastNLP/modules/tokenizer/roberta_tokenizer.py deleted file mode 100644 index 13a6b562..00000000 --- a/fastNLP/modules/tokenizer/roberta_tokenizer.py +++ /dev/null @@ -1,102 +0,0 @@ -r""" - -""" - -__all__ = [ - "RobertaTokenizer" -] - -import json -from .gpt2_tokenizer import GPT2Tokenizer -from fastNLP.io.file_utils import _get_file_name_base_on_postfix -from ...io.file_utils import _get_roberta_dir - -PRETRAINED_ROBERTA_POSITIONAL_EMBEDDINGS_SIZES = { - "roberta-base": 512, - "roberta-large": 512, - "roberta-large-mnli": 512, - "distilroberta-base": 512, - "roberta-base-openai-detector": 512, - "roberta-large-openai-detector": 512, -} - - -class RobertaTokenizer(GPT2Tokenizer): - - vocab_files_names = { - "vocab_file": "vocab.json", - "merges_file": "merges.txt", - } - - def __init__( - self, - vocab_file, - merges_file, - errors="replace", - bos_token="", - eos_token="", - sep_token="
", - cls_token="", - unk_token="", - pad_token="", - mask_token="", - **kwargs - ): - super().__init__( - vocab_file=vocab_file, - merges_file=merges_file, - errors=errors, - bos_token=bos_token, - eos_token=eos_token, - unk_token=unk_token, - sep_token=sep_token, - cls_token=cls_token, - pad_token=pad_token, - mask_token=mask_token, - **kwargs, - ) - self.max_len_single_sentence = self.max_len - 2 # take into account special tokens - self.max_len_sentences_pair = self.max_len - 4 # take into account special tokens - - @classmethod - def from_pretrained(cls, model_dir_or_name, *inputs, **kwargs): - """ - - :param str model_dir_or_name: 目录或者缩写名 - :param kwargs: - :return: - """ - # 它需要两个文件,第一个是vocab.json,第二个是merge_file? - model_dir = _get_roberta_dir(model_dir_or_name) - # 里面会包含四个文件vocab.json, merge.txt, config.json, model.bin - - tokenizer_config_file = _get_file_name_base_on_postfix(model_dir, 'config.json') - with open(tokenizer_config_file, encoding="utf-8") as tokenizer_config_handle: - init_kwargs = json.load(tokenizer_config_handle) - # Set max length if needed - if model_dir_or_name in PRETRAINED_ROBERTA_POSITIONAL_EMBEDDINGS_SIZES: - # if we're using a pretrained model, ensure the tokenizer - # wont index sequences longer than the number of positional embeddings - max_len = PRETRAINED_ROBERTA_POSITIONAL_EMBEDDINGS_SIZES[model_dir_or_name] - if max_len is not None and isinstance(max_len, (int, float)): - init_kwargs["max_len"] = min(init_kwargs.get("max_len", int(1e12)), max_len) - - # 将vocab, merge加入到init_kwargs中 - if 'vocab_file' in kwargs: # 如果指定了词表则用指定词表 - init_kwargs['vocab_file'] = kwargs['vocab_file'] - else: - init_kwargs['vocab_file'] = _get_file_name_base_on_postfix(model_dir, RobertaTokenizer.vocab_files_names['vocab_file']) - init_kwargs['merges_file'] = _get_file_name_base_on_postfix(model_dir, RobertaTokenizer.vocab_files_names['merges_file']) - - init_inputs = init_kwargs.pop("init_inputs", ()) - # Instantiate tokenizer. - try: - tokenizer = cls(*init_inputs, **init_kwargs) - except OSError: - OSError( - "Unable to load vocabulary from file. " - "Please check that the provided vocabulary is accessible and not corrupted." - ) - - return tokenizer - diff --git a/fastNLP/modules/utils.py b/fastNLP/modules/utils.py deleted file mode 100644 index 934b2910..00000000 --- a/fastNLP/modules/utils.py +++ /dev/null @@ -1,137 +0,0 @@ -r""" -.. todo:: - doc -""" - -__all__ = [ - "initial_parameter", - "summary" -] - -from functools import reduce - -import torch -import torch.nn as nn -import torch.nn.init as init - - -def initial_parameter(net, initial_method=None): - r"""A method used to initialize the weights of PyTorch models. - - :param net: a PyTorch model - :param str initial_method: one of the following initializations. - - - xavier_uniform - - xavier_normal (default) - - kaiming_normal, or msra - - kaiming_uniform - - orthogonal - - sparse - - normal - - uniform - - """ - if initial_method == 'xavier_uniform': - init_method = init.xavier_uniform_ - elif initial_method == 'xavier_normal': - init_method = init.xavier_normal_ - elif initial_method == 'kaiming_normal' or initial_method == 'msra': - init_method = init.kaiming_normal_ - elif initial_method == 'kaiming_uniform': - init_method = init.kaiming_uniform_ - elif initial_method == 'orthogonal': - init_method = init.orthogonal_ - elif initial_method == 'sparse': - init_method = init.sparse_ - elif initial_method == 'normal': - init_method = init.normal_ - elif initial_method == 'uniform': - init_method = init.uniform_ - else: - init_method = init.xavier_normal_ - - def weights_init(m): - # classname = m.__class__.__name__ - if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d) or isinstance(m, nn.Conv3d): # for all the cnn - if initial_method is not None: - init_method(m.weight.data) - else: - init.xavier_normal_(m.weight.data) - init.normal_(m.bias.data) - elif isinstance(m, nn.LSTM): - for w in m.parameters(): - if len(w.data.size()) > 1: - init_method(w.data) # weight - else: - init.normal_(w.data) # bias - elif m is not None and hasattr(m, 'weight') and \ - hasattr(m.weight, "requires_grad"): - if len(m.weight.size()) > 1: - init_method(m.weight.data) - else: - init.normal_(m.weight.data) # batchnorm or layernorm - else: - for w in m.parameters(): - if w.requires_grad: - if len(w.data.size()) > 1: - init_method(w.data) # weight - else: - init.normal_(w.data) # bias - # print("init else") - - net.apply(weights_init) - - -def summary(model: nn.Module): - r""" - 得到模型的总参数量 - - :params model: Pytorch 模型 - :return tuple: 包含总参数量,可训练参数量,不可训练参数量 - """ - train = [] - nontrain = [] - buffer = [] - - def layer_summary(module: nn.Module): - def count_size(sizes): - return reduce(lambda x, y: x * y, sizes) - - for p in module.parameters(recurse=False): - if p.requires_grad: - train.append(count_size(p.shape)) - else: - nontrain.append(count_size(p.shape)) - for p in module.buffers(): - buffer.append(count_size(p.shape)) - for subm in module.children(): - layer_summary(subm) - - layer_summary(model) - total_train = sum(train) - total_nontrain = sum(nontrain) - total = total_train + total_nontrain - strings = [] - strings.append('Total params: {:,}'.format(total)) - strings.append('Trainable params: {:,}'.format(total_train)) - strings.append('Non-trainable params: {:,}'.format(total_nontrain)) - strings.append("Buffer params: {:,}".format(sum(buffer))) - max_len = len(max(strings, key=len)) - bar = '-' * (max_len + 3) - strings = [bar] + strings + [bar] - print('\n'.join(strings)) - return total, total_train, total_nontrain - - -def get_dropout_mask(drop_p: float, tensor: torch.Tensor): - r""" - 根据tensor的形状,生成一个mask - - :param drop_p: float, 以多大的概率置为0。 - :param tensor: torch.Tensor - :return: torch.FloatTensor. 与tensor一样的shape - """ - mask_x = torch.ones_like(tensor) - nn.functional.dropout(mask_x, p=drop_p, - training=False, inplace=True) - return mask_x diff --git a/readthedocs.yml b/readthedocs.yml deleted file mode 100644 index 5ff803a0..00000000 --- a/readthedocs.yml +++ /dev/null @@ -1,17 +0,0 @@ -version: 2 - -sphinx: - configuration: docs/source/conf.py - -build: - image: latest - -python: - version: 3.8 - install: - - requirements: docs/requirements.txt - - method: setuptools - path: . - -formats: - - htmlzip diff --git a/reproduction/README.md b/reproduction/README.md deleted file mode 100644 index 1ddca315..00000000 --- a/reproduction/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# 模型复现 -这里复现了在fastNLP中实现的模型,旨在达到与论文中相符的性能。 - -复现的模型有: -- [Star-Transformer](Star_transformer) -- [Biaffine](https://github.com/fastnlp/fastNLP/blob/master/fastNLP/models/biaffine_parser.py) -- [CNNText](https://github.com/fastnlp/fastNLP/blob/master/fastNLP/models/cnn_text_classification.py) -- ... - -# 任务复现 -## Text Classification (文本分类) -- [Text Classification 文本分类任务复现](text_classification) - - -## Matching (自然语言推理/句子匹配) -- [Matching 任务复现](matching) - - -## Sequence Labeling (序列标注) -- [NER](sequence_labelling/ner) - - -## Coreference Resolution (指代消解) -- [Coreference Resolution 指代消解任务复现](coreference_resolution) - - -## Summarization (摘要) -- [Summerization 摘要任务复现](Summarization) - - -## ... diff --git a/reproduction/Star_transformer/README.md b/reproduction/Star_transformer/README.md deleted file mode 100644 index 071e07e8..00000000 --- a/reproduction/Star_transformer/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Star-Transformer -paper: [Star-Transformer](https://arxiv.org/abs/1902.09113) -## Performance (still in progress) -|任务| 数据集 | SOTA | 模型表现 | -|------|------| ------| ------| -|Pos Tagging|CTB 9.0|-|ACC 92.31| -|Pos Tagging|CONLL 2012|-|ACC 96.51| -|Named Entity Recognition|CONLL 2012|-|F1 85.66| -|Text Classification|SST|-|51.2| -|Natural Language Inference|SNLI|-|83.76| - diff --git a/reproduction/Star_transformer/datasets.py b/reproduction/Star_transformer/datasets.py deleted file mode 100644 index 41d3f34b..00000000 --- a/reproduction/Star_transformer/datasets.py +++ /dev/null @@ -1,162 +0,0 @@ -import torch -import json -import os -from fastNLP import Vocabulary -from fastNLP.io.data_loader import ConllLoader, SSTLoader, SNLILoader -from fastNLP.core import Const as C -import numpy as np - -MAX_LEN = 128 - -def update_v(vocab, data, field): - data.apply(lambda x: vocab.add_word_lst(x[field]), new_field_name=None) - - -def to_index(vocab, data, field, name): - def func(x): - try: - return [vocab.to_index(w) for w in x[field]] - except ValueError: - return [vocab.padding_idx for _ in x[field]] - data.apply(func, new_field_name=name) - - -def load_seqtag(path, files, indexs): - word_h, tag_h = 'words', 'tags' - loader = ConllLoader(headers=[word_h, tag_h], indexes=indexs) - ds_list = [] - for fn in files: - ds_list.append(loader.load(os.path.join(path, fn))) - word_v = Vocabulary(min_freq=2) - tag_v = Vocabulary(unknown=None) - update_v(word_v, ds_list[0], word_h) - update_v(tag_v, ds_list[0], tag_h) - - def process_data(ds): - to_index(word_v, ds, word_h, C.INPUT) - to_index(tag_v, ds, tag_h, C.TARGET) - ds.apply(lambda x: x[C.INPUT][:MAX_LEN], new_field_name=C.INPUT) - ds.apply(lambda x: x[C.TARGET][:MAX_LEN], new_field_name=C.TARGET) - ds.apply(lambda x: len(x[word_h]), new_field_name=C.INPUT_LEN) - ds.set_input(C.INPUT, C.INPUT_LEN) - ds.set_target(C.TARGET, C.INPUT_LEN) - for i in range(len(ds_list)): - process_data(ds_list[i]) - return ds_list, word_v, tag_v - - -def load_sst(path, files): - loaders = [SSTLoader(subtree=sub, fine_grained=True) - for sub in [True, False, False]] - ds_list = [loader.load(os.path.join(path, fn)) - for fn, loader in zip(files, loaders)] - word_v = Vocabulary(min_freq=0) - tag_v = Vocabulary(unknown=None, padding=None) - for ds in ds_list: - ds.apply(lambda x: [w.lower() - for w in x['words']], new_field_name='words') - #ds_list[0].drop(lambda x: len(x['words']) < 3) - update_v(word_v, ds_list[0], 'words') - update_v(word_v, ds_list[1], 'words') - update_v(word_v, ds_list[2], 'words') - ds_list[0].apply(lambda x: tag_v.add_word( - x['target']), new_field_name=None) - - def process_data(ds): - to_index(word_v, ds, 'words', C.INPUT) - ds.apply(lambda x: tag_v.to_index(x['target']), new_field_name=C.TARGET) - ds.apply(lambda x: x[C.INPUT][:MAX_LEN], new_field_name=C.INPUT) - ds.apply(lambda x: len(x['words']), new_field_name=C.INPUT_LEN) - ds.set_input(C.INPUT, C.INPUT_LEN) - ds.set_target(C.TARGET) - for i in range(len(ds_list)): - process_data(ds_list[i]) - return ds_list, word_v, tag_v - - -def load_snli(path, files): - loader = SNLILoader() - ds_list = [loader.load(os.path.join(path, f)) for f in files] - word_v = Vocabulary(min_freq=2) - tag_v = Vocabulary(unknown=None, padding=None) - for ds in ds_list: - ds.apply(lambda x: [w.lower() - for w in x['words1']], new_field_name='words1') - ds.apply(lambda x: [w.lower() - for w in x['words2']], new_field_name='words2') - update_v(word_v, ds_list[0], 'words1') - update_v(word_v, ds_list[0], 'words2') - ds_list[0].apply(lambda x: tag_v.add_word( - x['target']), new_field_name=None) - - def process_data(ds): - to_index(word_v, ds, 'words1', C.INPUTS(0)) - to_index(word_v, ds, 'words2', C.INPUTS(1)) - ds.apply(lambda x: tag_v.to_index(x['target']), new_field_name=C.TARGET) - ds.apply(lambda x: x[C.INPUTS(0)][:MAX_LEN], new_field_name=C.INPUTS(0)) - ds.apply(lambda x: x[C.INPUTS(1)][:MAX_LEN], new_field_name=C.INPUTS(1)) - ds.apply(lambda x: len(x[C.INPUTS(0)]), new_field_name=C.INPUT_LENS(0)) - ds.apply(lambda x: len(x[C.INPUTS(1)]), new_field_name=C.INPUT_LENS(1)) - ds.set_input(C.INPUTS(0), C.INPUTS(1), C.INPUT_LENS(0), C.INPUT_LENS(1)) - ds.set_target(C.TARGET) - for i in range(len(ds_list)): - process_data(ds_list[i]) - return ds_list, word_v, tag_v - - -class EmbedLoader: - @staticmethod - def parse_glove_line(line): - line = line.split() - if len(line) <= 2: - raise RuntimeError( - "something goes wrong in parsing glove embedding") - return line[0], line[1:] - - @staticmethod - def str_list_2_vec(line): - return torch.Tensor(list(map(float, line))) - - @staticmethod - def fast_load_embedding(emb_dim, emb_file, vocab): - """Fast load the pre-trained embedding and combine with the given dictionary. - This loading method uses line-by-line operation. - - :param int emb_dim: the dimension of the embedding. Should be the same as pre-trained embedding. - :param str emb_file: the pre-trained embedding file path. - :param Vocabulary vocab: a mapping from word to index, can be provided by user or built from pre-trained embedding - :return embedding_matrix: numpy.ndarray - - """ - if vocab is None: - raise RuntimeError("You must provide a vocabulary.") - embedding_matrix = np.zeros( - shape=(len(vocab), emb_dim), dtype=np.float32) - hit_flags = np.zeros(shape=(len(vocab),), dtype=int) - with open(emb_file, "r", encoding="utf-8") as f: - startline = f.readline() - if len(startline.split()) > 2: - f.seek(0) - for line in f: - word, vector = EmbedLoader.parse_glove_line(line) - try: - if word in vocab: - vector = EmbedLoader.str_list_2_vec(vector) - if emb_dim != vector.size(0): - continue - embedding_matrix[vocab[word]] = vector - hit_flags[vocab[word]] = 1 - except Exception: - continue - - if np.sum(hit_flags) < len(vocab): - # some words from vocab are missing in pre-trained embedding - # we normally sample each dimension - vocab_embed = embedding_matrix[np.where(hit_flags)] - #sampled_vectors = np.random.normal(vocab_embed.mean(axis=0), vocab_embed.std(axis=0), - # size=(len(vocab) - np.sum(hit_flags), emb_dim)) - sampled_vectors = np.random.uniform(-0.01, 0.01, - size=(len(vocab) - np.sum(hit_flags), emb_dim)) - - embedding_matrix[np.where(1 - hit_flags)] = sampled_vectors - return embedding_matrix diff --git a/reproduction/Star_transformer/modules.py b/reproduction/Star_transformer/modules.py deleted file mode 100644 index 61a61d25..00000000 --- a/reproduction/Star_transformer/modules.py +++ /dev/null @@ -1,56 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -import numpy as np -from fastNLP.core.losses import LossBase - - -reduce_func = { - 'none': lambda x, mask: x*mask, - 'sum': lambda x, mask: (x*mask).sum(), - 'mean': lambda x, mask: (x*mask).sum() / mask.sum(), -} - - -class LabelSmoothCrossEntropy(nn.Module): - def __init__(self, smoothing=0.1, ignore_index=-100, reduction='mean'): - global reduce_func - super().__init__() - if smoothing < 0 or smoothing > 1: - raise ValueError('invalid smoothing value: {}'.format(smoothing)) - self.smoothing = smoothing - self.ignore_index = ignore_index - if reduction not in reduce_func: - raise ValueError('invalid reduce type: {}'.format(reduction)) - self.reduce_func = reduce_func[reduction] - - def forward(self, input, target): - input = F.log_softmax(input, dim=1) # [N, C, ...] - smooth_val = self.smoothing / input.size(1) # [N, C, ...] - target_logit = input.new_full(input.size(), fill_value=smooth_val) - target_logit.scatter_(1, target[:, None], 1 - self.smoothing) - result = -(target_logit * input).sum(1) # [N, ...] - mask = (target != self.ignore_index).float() - return self.reduce_func(result, mask) - - -class SmoothCE(LossBase): - def __init__(self, pred=None, target=None, **kwargs): - super().__init__() - self.loss_fn = LabelSmoothCrossEntropy(**kwargs) - self._init_param_map(pred=pred, target=target) - - def get_loss(self, pred, target): - return self.loss_fn(pred, target) - - -if __name__ == '__main__': - loss_fn = nn.CrossEntropyLoss(ignore_index=0) - sm_loss_fn = LabelSmoothCrossEntropy(smoothing=0, ignore_index=0) - predict = torch.tensor([[0, 0.2, 0.7, 0.1, 0], - [0, 0.9, 0.2, 0.1, 0], - [1, 0.2, 0.7, 0.1, 0]]) - target = torch.tensor([2, 1, 0]) - loss = loss_fn(predict, target) - sm_loss = sm_loss_fn(predict, target) - print(loss, sm_loss) diff --git a/reproduction/Star_transformer/run.sh b/reproduction/Star_transformer/run.sh deleted file mode 100644 index 5cd6954b..00000000 --- a/reproduction/Star_transformer/run.sh +++ /dev/null @@ -1,5 +0,0 @@ -#python -u train.py --task pos --ds conll --mode train --gpu 1 --lr 3e-4 --w_decay 2e-5 --lr_decay .95 --drop 0.3 --ep 25 --bsz 64 > conll_pos102.log 2>&1 & -#python -u train.py --task pos --ds ctb --mode train --gpu 1 --lr 3e-4 --w_decay 2e-5 --lr_decay .95 --drop 0.3 --ep 25 --bsz 64 > ctb_pos101.log 2>&1 & -python -u train.py --task cls --ds sst --mode train --gpu 0 --lr 1e-4 --w_decay 5e-5 --lr_decay 1.0 --drop 0.4 --ep 20 --bsz 64 > sst_cls.log & -#python -u train.py --task nli --ds snli --mode train --gpu 1 --lr 1e-4 --w_decay 1e-5 --lr_decay 0.9 --drop 0.4 --ep 120 --bsz 128 > snli_nli201.log & -#python -u train.py --task ner --ds conll --mode train --gpu 0 --lr 1e-4 --w_decay 1e-5 --lr_decay 0.9 --drop 0.4 --ep 120 --bsz 64 > conll_ner201.log & diff --git a/reproduction/Star_transformer/train.py b/reproduction/Star_transformer/train.py deleted file mode 100644 index d8e2576b..00000000 --- a/reproduction/Star_transformer/train.py +++ /dev/null @@ -1,228 +0,0 @@ -from reproduction.Star_transformer.util import get_argparser, set_gpu, set_rng_seeds, add_model_args -seed = set_rng_seeds(15360) -print('RNG SEED {}'.format(seed)) -from reproduction.Star_transformer.datasets import load_seqtag, load_sst, load_snli, EmbedLoader, MAX_LEN -import torch.nn as nn -import torch -import numpy as np -import fastNLP as FN -from fastNLP.models.star_transformer import STSeqLabel, STSeqCls, STNLICls -from fastNLP.core.const import Const as C -import sys -#sys.path.append('/remote-home/yfshao/workdir/dev_fastnlp/') -import os -pre_dir = os.path.join(os.environ['HOME'], 'workdir/datasets/') - -g_model_select = { - 'pos': STSeqLabel, - 'ner': STSeqLabel, - 'cls': STSeqCls, - 'nli': STNLICls, -} - -g_emb_file_path = {'en': pre_dir + 'word_vector/glove.840B.300d.txt', - 'zh': pre_dir + 'cc.zh.300.vec'} - -g_args = None -g_model_cfg = None - - -def get_ptb_pos(): - pos_dir = '/remote-home/yfshao/workdir/datasets/pos' - pos_files = ['train.pos', 'dev.pos', 'test.pos', ] - return load_seqtag(pos_dir, pos_files, [0, 1]) - - -def get_ctb_pos(): - ctb_dir = '/remote-home/yfshao/workdir/datasets/ctb9_hy' - files = ['train.conllx', 'dev.conllx', 'test.conllx'] - return load_seqtag(ctb_dir, files, [1, 4]) - - -def get_conll2012_pos(): - path = '/remote-home/yfshao/workdir/datasets/ontonotes/pos' - files = ['ontonotes-conll.train', - 'ontonotes-conll.dev', - 'ontonotes-conll.conll-2012-test'] - return load_seqtag(path, files, [0, 1]) - - -def get_conll2012_ner(): - path = '/remote-home/yfshao/workdir/datasets/ontonotes/ner' - files = ['bieso-ontonotes-conll-ner.train', - 'bieso-ontonotes-conll-ner.dev', - 'bieso-ontonotes-conll-ner.conll-2012-test'] - return load_seqtag(path, files, [0, 1]) - - -def get_sst(): - path = pre_dir + 'SST' - files = ['train.txt', 'dev.txt', 'test.txt'] - return load_sst(path, files) - - -def get_snli(): - path = '/remote-home/yfshao/workdir/datasets/nli-data/snli_1.0' - files = ['snli_1.0_train.jsonl', - 'snli_1.0_dev.jsonl', 'snli_1.0_test.jsonl'] - return load_snli(path, files) - - -g_datasets = { - 'ptb-pos': get_ptb_pos, - 'ctb-pos': get_ctb_pos, - 'conll-pos': get_conll2012_pos, - 'conll-ner': get_conll2012_ner, - 'sst-cls': get_sst, - 'snli-nli': get_snli, -} - - -def load_pretrain_emb(word_v, lang='en'): - print('loading pre-train embeddings') - emb = EmbedLoader.fast_load_embedding(300, g_emb_file_path[lang], word_v) - emb /= np.linalg.norm(emb, axis=1, keepdims=True) - emb = torch.tensor(emb, dtype=torch.float32) - print('embedding mean: {:.6}, std: {:.6}'.format(emb.mean(), emb.std())) - emb[word_v.padding_idx].fill_(0) - return emb - - -class MyCallback(FN.core.callback.Callback): - def on_train_begin(self): - super(MyCallback, self).on_train_begin() - self.init_lrs = [pg['lr'] for pg in self.optimizer.param_groups] - - def on_backward_end(self): - nn.utils.clip_grad.clip_grad_norm_(self.model.parameters(), 5.0) - - def on_step_end(self): - return - warm_steps = 6000 - # learning rate warm-up & decay - if self.step <= warm_steps: - for lr, pg in zip(self.init_lrs, self.optimizer.param_groups): - pg['lr'] = lr * (self.step / float(warm_steps)) - - elif self.step % 3000 == 0: - for pg in self.optimizer.param_groups: - cur_lr = pg['lr'] - pg['lr'] = max(1e-5, cur_lr*g_args.lr_decay) - - - -def train(): - print('loading data') - ds_list, word_v, tag_v = g_datasets['{}-{}'.format( - g_args.ds, g_args.task)]() - print(ds_list[0][:2]) - print(len(ds_list[0]), len(ds_list[1]), len(ds_list[2])) - embed = load_pretrain_emb(word_v, lang='zh' if g_args.ds == 'ctb' else 'en') - g_model_cfg['num_cls'] = len(tag_v) - print(g_model_cfg) - g_model_cfg['init_embed'] = embed - model = g_model_select[g_args.task.lower()](**g_model_cfg) - - def init_model(model): - for p in model.parameters(): - if p.size(0) != len(word_v): - if len(p.size())<2: - nn.init.constant_(p, 0.0) - else: - nn.init.normal_(p, 0.0, 0.05) - init_model(model) - train_data = ds_list[0] - dev_data = ds_list[1] - test_data = ds_list[2] - print(tag_v.word2idx) - - if g_args.task in ['pos', 'ner']: - padding_idx = tag_v.padding_idx - else: - padding_idx = -100 - print('padding_idx ', padding_idx) - loss = FN.CrossEntropyLoss(padding_idx=padding_idx) - metrics = { - 'pos': (None, FN.AccuracyMetric()), - 'ner': ('f', FN.core.metrics.SpanFPreRecMetric( - tag_vocab=tag_v, encoding_type='bmeso', ignore_labels=[''], )), - 'cls': (None, FN.AccuracyMetric()), - 'nli': (None, FN.AccuracyMetric()), - } - metric_key, metric = metrics[g_args.task] - device = 'cuda' if torch.cuda.is_available() else 'cpu' - - params = [(x,y) for x,y in list(model.named_parameters()) if y.requires_grad and y.size(0) != len(word_v)] - no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] - print([n for n,p in params]) - optim_cfg = [ - #{'params': model.enc.embedding.parameters(), 'lr': g_args.lr*0.1}, - {'params': [p for n, p in params if not any(nd in n for nd in no_decay)], 'lr': g_args.lr, 'weight_decay': 1.0*g_args.w_decay}, - {'params': [p for n, p in params if any(nd in n for nd in no_decay)], 'lr': g_args.lr, 'weight_decay': 0.0*g_args.w_decay} - ] - - print(model) - trainer = FN.Trainer(model=model, train_data=train_data, dev_data=dev_data, - loss=loss, metrics=metric, metric_key=metric_key, - optimizer=torch.optim.Adam(optim_cfg), - n_epochs=g_args.ep, batch_size=g_args.bsz, print_every=100, validate_every=1000, - device=device, - use_tqdm=False, prefetch=False, - save_path=g_args.log, - sampler=FN.BucketSampler(100, g_args.bsz, C.INPUT_LEN), - callbacks=[MyCallback()]) - - print(trainer.train()) - tester = FN.Tester(data=test_data, model=model, metrics=metric, - batch_size=128, device=device) - print(tester.test()) - - -def test(): - pass - - -def infer(): - pass - - -run_select = { - 'train': train, - 'test': test, - 'infer': infer, -} - - -def main(): - global g_args, g_model_cfg - import signal - - def signal_handler(signal, frame): - raise KeyboardInterrupt - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - parser = get_argparser() - parser.add_argument('--task', choices=['pos', 'ner', 'cls', 'nli']) - parser.add_argument('--mode', choices=['train', 'test', 'infer']) - parser.add_argument('--ds', type=str) - add_model_args(parser) - g_args = parser.parse_args() - print(g_args.__dict__) - set_gpu(g_args.gpu) - g_model_cfg = { - 'init_embed': (None, 300), - 'num_cls': None, - 'hidden_size': g_args.hidden, - 'num_layers': 2, - 'num_head': g_args.nhead, - 'head_dim': g_args.hdim, - 'max_len': MAX_LEN, - 'cls_hidden_size': 200, - 'emb_dropout': g_args.drop, - 'dropout': g_args.drop, - } - run_select[g_args.mode.lower()]() - - -if __name__ == '__main__': - main() diff --git a/reproduction/Star_transformer/util.py b/reproduction/Star_transformer/util.py deleted file mode 100644 index ecd1e18d..00000000 --- a/reproduction/Star_transformer/util.py +++ /dev/null @@ -1,112 +0,0 @@ -import fastNLP as FN -import argparse -import os -import random -import numpy -import torch - - -def get_argparser(): - parser = argparse.ArgumentParser() - parser.add_argument('--lr', type=float, required=True) - parser.add_argument('--w_decay', type=float, required=True) - parser.add_argument('--lr_decay', type=float, required=True) - parser.add_argument('--bsz', type=int, required=True) - parser.add_argument('--ep', type=int, required=True) - parser.add_argument('--drop', type=float, required=True) - parser.add_argument('--gpu', type=str, required=True) - parser.add_argument('--log', type=str, default=None) - return parser - - -def add_model_args(parser): - parser.add_argument('--nhead', type=int, default=6) - parser.add_argument('--hdim', type=int, default=50) - parser.add_argument('--hidden', type=int, default=300) - return parser - - -def set_gpu(gpu_str): - os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" - os.environ['CUDA_VISIBLE_DEVICES'] = gpu_str - - -def set_rng_seeds(seed=None): - if seed is None: - seed = numpy.random.randint(0, 65536) - random.seed(seed) - numpy.random.seed(seed) - torch.random.manual_seed(seed) - torch.cuda.manual_seed_all(seed) - # print('RNG_SEED {}'.format(seed)) - return seed - - -class TensorboardCallback(FN.Callback): - """ - 接受以下一个或多个字符串作为参数: - - "model" - - "loss" - - "metric" - """ - - def __init__(self, *options): - super(TensorboardCallback, self).__init__() - args = {"model", "loss", "metric"} - for opt in options: - if opt not in args: - raise ValueError( - "Unrecognized argument {}. Expect one of {}".format(opt, args)) - self.options = options - self._summary_writer = None - self.graph_added = False - - def on_train_begin(self): - save_dir = self.trainer.save_path - if save_dir is None: - path = os.path.join( - "./", 'tensorboard_logs_{}'.format(self.trainer.start_time)) - else: - path = os.path.join( - save_dir, 'tensorboard_logs_{}'.format(self.trainer.start_time)) - self._summary_writer = SummaryWriter(path) - - def on_batch_begin(self, batch_x, batch_y, indices): - if "model" in self.options and self.graph_added is False: - # tesorboardX 这里有大bug,暂时没法画模型图 - # from fastNLP.core.utils import _build_args - # inputs = _build_args(self.trainer.model, **batch_x) - # args = tuple([value for value in inputs.values()]) - # args = args[0] if len(args) == 1 else args - # self._summary_writer.add_graph(self.trainer.model, torch.zeros(32, 2)) - self.graph_added = True - - def on_backward_begin(self, loss): - if "loss" in self.options: - self._summary_writer.add_scalar( - "loss", loss.item(), global_step=self.trainer.step) - - if "model" in self.options: - for name, param in self.trainer.model.named_parameters(): - if param.requires_grad: - self._summary_writer.add_scalar( - name + "_mean", param.mean(), global_step=self.trainer.step) - # self._summary_writer.add_scalar(name + "_std", param.std(), global_step=self.trainer.step) - self._summary_writer.add_scalar(name + "_grad_mean", param.grad.mean(), - global_step=self.trainer.step) - - def on_valid_end(self, eval_result, metric_key): - if "metric" in self.options: - for name, metric in eval_result.items(): - for metric_key, metric_val in metric.items(): - self._summary_writer.add_scalar("valid_{}_{}".format(name, metric_key), metric_val, - global_step=self.trainer.step) - - def on_train_end(self): - self._summary_writer.close() - del self._summary_writer - - def on_exception(self, exception): - if hasattr(self, "_summary_writer"): - self._summary_writer.close() - del self._summary_writer diff --git a/reproduction/Summarization/Baseline/config/deeplstm.config b/reproduction/Summarization/Baseline/config/deeplstm.config deleted file mode 100644 index 839ab0b8..00000000 --- a/reproduction/Summarization/Baseline/config/deeplstm.config +++ /dev/null @@ -1,12 +0,0 @@ -{ - "n_layers": 16, - "layer_sum": false, - "layer_cat": false, - "lstm_hidden_size": 300, - "ffn_inner_hidden_size": 2048, - "n_head": 6, - "recurrent_dropout_prob": 0.1, - "atten_dropout_prob": 0.1, - "ffn_dropout_prob": 0.1, - "fix_mask": true -} \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/config/seqlab.config b/reproduction/Summarization/Baseline/config/seqlab.config deleted file mode 100644 index cb92a2ed..00000000 --- a/reproduction/Summarization/Baseline/config/seqlab.config +++ /dev/null @@ -1,3 +0,0 @@ -{ - -} \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/config/transformer.config b/reproduction/Summarization/Baseline/config/transformer.config deleted file mode 100644 index 5ab3ed4d..00000000 --- a/reproduction/Summarization/Baseline/config/transformer.config +++ /dev/null @@ -1,9 +0,0 @@ -{ - "n_layers": 12, - "hidden_size": 512, - "ffn_inner_hidden_size": 2048, - "n_head": 8, - "recurrent_dropout_prob": 0.1, - "atten_dropout_prob": 0.1, - "ffn_dropout_prob": 0.1 -} \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/model/DeepLSTM.py b/reproduction/Summarization/Baseline/model/DeepLSTM.py deleted file mode 100644 index 80842bf7..00000000 --- a/reproduction/Summarization/Baseline/model/DeepLSTM.py +++ /dev/null @@ -1,136 +0,0 @@ -import numpy as np - - -import torch -import torch.nn as nn -import torch.nn.init as init -import torch.nn.functional as F -from torch.autograd import Variable -from torch.distributions import Bernoulli - -class DeepLSTM(nn.Module): - def __init__(self, input_size, hidden_size, num_layers, recurrent_dropout, use_orthnormal_init=True, fix_mask=True, use_cuda=True): - super(DeepLSTM, self).__init__() - - self.fix_mask = fix_mask - self.use_cuda = use_cuda - self.input_size = input_size - self.num_layers = num_layers - self.hidden_size = hidden_size - self.recurrent_dropout = recurrent_dropout - - self.lstms = nn.ModuleList([None] * self.num_layers) - self.highway_gate_input = nn.ModuleList([None] * self.num_layers) - self.highway_gate_state = nn.ModuleList([nn.Linear(hidden_size, hidden_size)] * self.num_layers) - self.highway_linear_input = nn.ModuleList([None] * self.num_layers) - - # self._input_w = nn.Parameter(torch.Tensor(input_size, hidden_size)) - # init.xavier_normal_(self._input_w) - - for l in range(self.num_layers): - input_dim = input_size if l == 0 else hidden_size - - self.lstms[l] = nn.LSTMCell(input_size=input_dim, hidden_size=hidden_size) - self.highway_gate_input[l] = nn.Linear(input_dim, hidden_size) - self.highway_linear_input[l] = nn.Linear(input_dim, hidden_size, bias=False) - - # logger.info("[INFO] Initing W for LSTM .......") - for l in range(self.num_layers): - if use_orthnormal_init: - # logger.info("[INFO] Initing W using orthnormal init .......") - init.orthogonal_(self.lstms[l].weight_ih) - init.orthogonal_(self.lstms[l].weight_hh) - init.orthogonal_(self.highway_gate_input[l].weight.data) - init.orthogonal_(self.highway_gate_state[l].weight.data) - init.orthogonal_(self.highway_linear_input[l].weight.data) - else: - # logger.info("[INFO] Initing W using xavier_normal .......") - init_weight_value = 6.0 - init.xavier_normal_(self.lstms[l].weight_ih, gain=np.sqrt(init_weight_value)) - init.xavier_normal_(self.lstms[l].weight_hh, gain=np.sqrt(init_weight_value)) - init.xavier_normal_(self.highway_gate_input[l].weight.data, gain=np.sqrt(init_weight_value)) - init.xavier_normal_(self.highway_gate_state[l].weight.data, gain=np.sqrt(init_weight_value)) - init.xavier_normal_(self.highway_linear_input[l].weight.data, gain=np.sqrt(init_weight_value)) - - def init_hidden(self, batch_size, hidden_size): - # the first is the hidden h - # the second is the cell c - if self.use_cuda: - return (torch.zeros(batch_size, hidden_size).cuda(), - torch.zeros(batch_size, hidden_size).cuda()) - else: - return (torch.zeros(batch_size, hidden_size), - torch.zeros(batch_size, hidden_size)) - - def forward(self, inputs, input_masks, Train): - - ''' - inputs: [[seq_len, batch, Co * kernel_sizes], n_layer * [None]] (list) - input_masks: [[seq_len, batch, Co * kernel_sizes], n_layer * [None]] (list) - ''' - - batch_size, seq_len = inputs[0].size(1), inputs[0].size(0) - - # inputs[0] = torch.matmul(inputs[0], self._input_w) - # input_masks[0] = input_masks[0].unsqueeze(-1).expand(seq_len, batch_size, self.hidden_size) - - self.inputs = inputs - self.input_masks = input_masks - - if self.fix_mask: - self.output_dropout_layers = [None] * self.num_layers - for l in range(self.num_layers): - binary_mask = torch.rand((batch_size, self.hidden_size)) > self.recurrent_dropout - # This scaling ensures expected values and variances of the output of applying this mask and the original tensor are the same. - # from allennlp.nn.util.py - self.output_dropout_layers[l] = binary_mask.float().div(1.0 - self.recurrent_dropout) - if self.use_cuda: - self.output_dropout_layers[l] = self.output_dropout_layers[l].cuda() - - for l in range(self.num_layers): - h, c = self.init_hidden(batch_size, self.hidden_size) - outputs_list = [] - for t in range(len(self.inputs[l])): - x = self.inputs[l][t] - m = self.input_masks[l][t].float() - h_temp, c_temp = self.lstms[l].forward(x, (h, c)) # [batch, hidden_size] - r = torch.sigmoid(self.highway_gate_input[l](x) + self.highway_gate_state[l](h)) - lx = self.highway_linear_input[l](x) # [batch, hidden_size] - h_temp = r * h_temp + (1 - r) * lx - - if Train: - if self.fix_mask: - h_temp = self.output_dropout_layers[l] * h_temp - else: - h_temp = F.dropout(h_temp, p=self.recurrent_dropout) - - h = m * h_temp + (1 - m) * h - c = m * c_temp + (1 - m) * c - outputs_list.append(h) - outputs = torch.stack(outputs_list, 0) # [seq_len, batch, hidden_size] - self.inputs[l + 1] = DeepLSTM.flip(outputs, 0) # reverse [seq_len, batch, hidden_size] - self.input_masks[l + 1] = DeepLSTM.flip(self.input_masks[l], 0) - - self.output_state = self.inputs # num_layers * [seq_len, batch, hidden_size] - - # flip -2 layer - # self.output_state[-2] = DeepLSTM.flip(self.output_state[-2], 0) - - # concat last two layer - # self.output_state = torch.cat([self.output_state[-1], self.output_state[-2]], dim=-1).transpose(0, 1) - - self.output_state = self.output_state[-1].transpose(0, 1) - - assert self.output_state.size() == (batch_size, seq_len, self.hidden_size) - - return self.output_state - - @staticmethod - def flip(x, dim): - xsize = x.size() - dim = x.dim() + dim if dim < 0 else dim - x = x.contiguous() - x = x.view(-1, *xsize[dim:]).contiguous() - x = x.view(x.size(0), x.size(1), -1)[:, getattr(torch.arange(x.size(1) - 1, - -1, -1), ('cpu','cuda')[x.is_cuda])().long(), :] - return x.view(xsize) diff --git a/reproduction/Summarization/Baseline/model/Encoder.py b/reproduction/Summarization/Baseline/model/Encoder.py deleted file mode 100644 index 271270b3..00000000 --- a/reproduction/Summarization/Baseline/model/Encoder.py +++ /dev/null @@ -1,568 +0,0 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import numpy as np - -import torch -import torch.nn as nn -import torch.nn.functional as F -import torch.nn.init as init - -from fastNLP.core.vocabulary import Vocabulary -from fastNLP.io.embed_loader import EmbedLoader - -# from tools.logger import * -from tools.PositionEmbedding import get_sinusoid_encoding_table - -WORD_PAD = "[PAD]" - -class Encoder(nn.Module): - def __init__(self, hps, embed): - """ - - :param hps: - word_emb_dim: word embedding dimension - sent_max_len: max token number in the sentence - output_channel: output channel for cnn - min_kernel_size: min kernel size for cnn - max_kernel_size: max kernel size for cnn - word_embedding: bool, use word embedding or not - embedding_path: word embedding path - embed_train: bool, whether to train word embedding - cuda: bool, use cuda or not - :param vocab: FastNLP.Vocabulary - """ - super(Encoder, self).__init__() - - self._hps = hps - self.sent_max_len = hps.sent_max_len - embed_size = hps.word_emb_dim - - sent_max_len = hps.sent_max_len - - input_channels = 1 - out_channels = hps.output_channel - min_kernel_size = hps.min_kernel_size - max_kernel_size = hps.max_kernel_size - width = embed_size - - # word embedding - self.embed = embed - - # position embedding - self.position_embedding = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(sent_max_len + 1, embed_size, padding_idx=0), freeze=True) - - # cnn - self.convs = nn.ModuleList([nn.Conv2d(input_channels, out_channels, kernel_size = (height, width)) for height in range(min_kernel_size, max_kernel_size+1)]) - print("[INFO] Initing W for CNN.......") - for conv in self.convs: - init_weight_value = 6.0 - init.xavier_normal_(conv.weight.data, gain=np.sqrt(init_weight_value)) - fan_in, fan_out = Encoder.calculate_fan_in_and_fan_out(conv.weight.data) - std = np.sqrt(init_weight_value) * np.sqrt(2.0 / (fan_in + fan_out)) - - def calculate_fan_in_and_fan_out(tensor): - dimensions = tensor.ndimension() - if dimensions < 2: - print("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - raise ValueError("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - - if dimensions == 2: # Linear - fan_in = tensor.size(1) - fan_out = tensor.size(0) - else: - num_input_fmaps = tensor.size(1) - num_output_fmaps = tensor.size(0) - receptive_field_size = 1 - if tensor.dim() > 2: - receptive_field_size = tensor[0][0].numel() - fan_in = num_input_fmaps * receptive_field_size - fan_out = num_output_fmaps * receptive_field_size - - return fan_in, fan_out - - def forward(self, input): - # input: a batch of Example object [batch_size, N, seq_len] - - batch_size, N, _ = input.size() - input = input.view(-1, input.size(2)) # [batch_size*N, L] - input_sent_len = ((input!=0).sum(dim=1)).int() # [batch_size*N, 1] - enc_embed_input = self.embed(input) # [batch_size*N, L, D] - - input_pos = torch.Tensor([np.hstack((np.arange(1, sentlen + 1), np.zeros(self.sent_max_len - sentlen))) for sentlen in input_sent_len]) - if self._hps.cuda: - input_pos = input_pos.cuda() - enc_pos_embed_input = self.position_embedding(input_pos.long()) # [batch_size*N, D] - # print(enc_embed_input.size()) - # print(enc_pos_embed_input.size()) - enc_conv_input = enc_embed_input + enc_pos_embed_input - enc_conv_input = enc_conv_input.unsqueeze(1) # (batch * N,Ci,L,D) - enc_conv_output = [F.relu(conv(enc_conv_input)).squeeze(3) for conv in self.convs] # kernel_sizes * (batch*N, Co, W) - enc_maxpool_output = [F.max_pool1d(x, x.size(2)).squeeze(2) for x in enc_conv_output] # kernel_sizes * (batch*N, Co) - sent_embedding = torch.cat(enc_maxpool_output, 1) # (batch*N, Co * kernel_sizes) - sent_embedding = sent_embedding.view(batch_size, N, -1) - return sent_embedding - -class DomainEncoder(Encoder): - def __init__(self, hps, vocab, domaindict): - super(DomainEncoder, self).__init__(hps, vocab) - - # domain embedding - self.domain_embedding = nn.Embedding(domaindict.size(), hps.domain_emb_dim) - self.domain_embedding.weight.requires_grad = True - - def forward(self, input, domain): - """ - :param input: [batch_size, N, seq_len], N sentence number, seq_len token number - :param domain: [batch_size] - :return: sent_embedding: [batch_size, N, Co * kernel_sizes] - """ - - batch_size, N, _ = input.size() - - sent_embedding = super().forward(input) - enc_domain_input = self.domain_embedding(domain) # [batch, D] - enc_domain_input = enc_domain_input.unsqueeze(1).expand(batch_size, N, -1) # [batch, N, D] - sent_embedding = torch.cat((sent_embedding, enc_domain_input), dim=2) - return sent_embedding - -class MultiDomainEncoder(Encoder): - def __init__(self, hps, vocab, domaindict): - super(MultiDomainEncoder, self).__init__(hps, vocab) - - self.domain_size = domaindict.size() - - # domain embedding - self.domain_embedding = nn.Embedding(self.domain_size, hps.domain_emb_dim) - self.domain_embedding.weight.requires_grad = True - - def forward(self, input, domain): - """ - :param input: [batch_size, N, seq_len], N sentence number, seq_len token number - :param domain: [batch_size, domain_size] - :return: sent_embedding: [batch_size, N, Co * kernel_sizes] - """ - - batch_size, N, _ = input.size() - - # logger.info(domain[:5, :]) - - sent_embedding = super().forward(input) - domain_padding = torch.arange(self.domain_size).unsqueeze(0).expand(batch_size, -1) - domain_padding = domain_padding.cuda().view(-1) if self._hps.cuda else domain_padding.view(-1) # [batch * domain_size] - - enc_domain_input = self.domain_embedding(domain_padding) # [batch * domain_size, D] - enc_domain_input = enc_domain_input.view(batch_size, self.domain_size, -1) * domain.unsqueeze(-1).float() # [batch, domain_size, D] - - # logger.info(enc_domain_input[:5,:]) # [batch, domain_size, D] - - enc_domain_input = enc_domain_input.sum(1) / domain.sum(1).float().unsqueeze(-1) # [batch, D] - enc_domain_input = enc_domain_input.unsqueeze(1).expand(batch_size, N, -1) # [batch, N, D] - sent_embedding = torch.cat((sent_embedding, enc_domain_input), dim=2) - return sent_embedding - - -class BertEncoder(nn.Module): - def __init__(self, hps): - super(BertEncoder, self).__init__() - - from pytorch_pretrained_bert.modeling import BertModel - - self._hps = hps - self.sent_max_len = hps.sent_max_len - self._cuda = hps.cuda - - embed_size = hps.word_emb_dim - sent_max_len = hps.sent_max_len - - input_channels = 1 - out_channels = hps.output_channel - min_kernel_size = hps.min_kernel_size - max_kernel_size = hps.max_kernel_size - width = embed_size - - # word embedding - self._bert = BertModel.from_pretrained("/remote-home/dqwang/BERT/pre-train/uncased_L-24_H-1024_A-16") - self._bert.eval() - for p in self._bert.parameters(): - p.requires_grad = False - - self.word_embedding_proj = nn.Linear(4096, embed_size) - - # position embedding - self.position_embedding = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(sent_max_len + 1, embed_size, padding_idx=0), freeze=True) - - # cnn - self.convs = nn.ModuleList([nn.Conv2d(input_channels, out_channels, kernel_size = (height, width)) for height in range(min_kernel_size, max_kernel_size+1)]) - logger.info("[INFO] Initing W for CNN.......") - for conv in self.convs: - init_weight_value = 6.0 - init.xavier_normal_(conv.weight.data, gain=np.sqrt(init_weight_value)) - fan_in, fan_out = Encoder.calculate_fan_in_and_fan_out(conv.weight.data) - std = np.sqrt(init_weight_value) * np.sqrt(2.0 / (fan_in + fan_out)) - - def calculate_fan_in_and_fan_out(tensor): - dimensions = tensor.ndimension() - if dimensions < 2: - logger.error("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - raise ValueError("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - - if dimensions == 2: # Linear - fan_in = tensor.size(1) - fan_out = tensor.size(0) - else: - num_input_fmaps = tensor.size(1) - num_output_fmaps = tensor.size(0) - receptive_field_size = 1 - if tensor.dim() > 2: - receptive_field_size = tensor[0][0].numel() - fan_in = num_input_fmaps * receptive_field_size - fan_out = num_output_fmaps * receptive_field_size - - return fan_in, fan_out - - def pad_encoder_input(self, input_list): - """ - :param input_list: N [seq_len, hidden_state] - :return: enc_sent_input_pad: list, N [max_len, hidden_state] - """ - max_len = self.sent_max_len - enc_sent_input_pad = [] - _, hidden_size = input_list[0].size() - for i in range(len(input_list)): - article_words = input_list[i] # [seq_len, hidden_size] - seq_len = article_words.size(0) - if seq_len > max_len: - pad_words = article_words[:max_len, :] - else: - pad_tensor = torch.zeros(max_len - seq_len, hidden_size).cuda() if self._cuda else torch.zeros(max_len - seq_len, hidden_size) - pad_words = torch.cat([article_words, pad_tensor], dim=0) - enc_sent_input_pad.append(pad_words) - return enc_sent_input_pad - - def forward(self, inputs, input_masks, enc_sent_len): - """ - - :param inputs: a batch of Example object [batch_size, doc_len=512] - :param input_masks: 0 or 1, [batch, doc_len=512] - :param enc_sent_len: sentence original length [batch, N] - :return: - """ - - - # Use Bert to get word embedding - batch_size, N = enc_sent_len.size() - input_pad_list = [] - for i in range(batch_size): - tokens_id = inputs[i] - input_mask = input_masks[i] - sent_len = enc_sent_len[i] - input_ids = tokens_id.unsqueeze(0) - input_mask = input_mask.unsqueeze(0) - - out, _ = self._bert(input_ids, token_type_ids=None, attention_mask=input_mask) - out = torch.cat(out[-4:], dim=-1).squeeze(0) # [doc_len=512, hidden_state=4096] - - _, hidden_size = out.size() - - # restore the sentence - last_end = 1 - enc_sent_input = [] - for length in sent_len: - if length != 0 and last_end < 511: - enc_sent_input.append(out[last_end: min(511, last_end + length), :]) - last_end += length - else: - pad_tensor = torch.zeros(self.sent_max_len, hidden_size).cuda() if self._hps.cuda else torch.zeros(self.sent_max_len, hidden_size) - enc_sent_input.append(pad_tensor) - - - # pad the sentence - enc_sent_input_pad = self.pad_encoder_input(enc_sent_input) # [N, seq_len, hidden_state=4096] - input_pad_list.append(torch.stack(enc_sent_input_pad)) - - input_pad = torch.stack(input_pad_list) - - input_pad = input_pad.view(batch_size*N, self.sent_max_len, -1) - enc_sent_len = enc_sent_len.view(-1) # [batch_size*N] - enc_embed_input = self.word_embedding_proj(input_pad) # [batch_size * N, L, D] - - sent_pos_list = [] - for sentlen in enc_sent_len: - sent_pos = list(range(1, min(self.sent_max_len, sentlen) + 1)) - for k in range(self.sent_max_len - sentlen): - sent_pos.append(0) - sent_pos_list.append(sent_pos) - input_pos = torch.Tensor(sent_pos_list).long() - - if self._hps.cuda: - input_pos = input_pos.cuda() - enc_pos_embed_input = self.position_embedding(input_pos.long()) # [batch_size*N, D] - enc_conv_input = enc_embed_input + enc_pos_embed_input - enc_conv_input = enc_conv_input.unsqueeze(1) # (batch * N,Ci,L,D) - enc_conv_output = [F.relu(conv(enc_conv_input)).squeeze(3) for conv in self.convs] # kernel_sizes * (batch*N, Co, W) - enc_maxpool_output = [F.max_pool1d(x, x.size(2)).squeeze(2) for x in enc_conv_output] # kernel_sizes * (batch*N, Co) - sent_embedding = torch.cat(enc_maxpool_output, 1) # (batch*N, Co * kernel_sizes) - sent_embedding = sent_embedding.view(batch_size, N, -1) - return sent_embedding - -class BertTagEncoder(BertEncoder): - def __init__(self, hps, domaindict): - super(BertTagEncoder, self).__init__(hps) - - # domain embedding - self.domain_embedding = nn.Embedding(domaindict.size(), hps.domain_emb_dim) - self.domain_embedding.weight.requires_grad = True - - def forward(self, inputs, input_masks, enc_sent_len, domain): - sent_embedding = super().forward(inputs, input_masks, enc_sent_len) - - batch_size, N = enc_sent_len.size() - - enc_domain_input = self.domain_embedding(domain) # [batch, D] - enc_domain_input = enc_domain_input.unsqueeze(1).expand(batch_size, N, -1) # [batch, N, D] - sent_embedding = torch.cat((sent_embedding, enc_domain_input), dim=2) - - return sent_embedding - -class ELMoEndoer(nn.Module): - def __init__(self, hps): - super(ELMoEndoer, self).__init__() - - self._hps = hps - self.sent_max_len = hps.sent_max_len - - from allennlp.modules.elmo import Elmo - - elmo_dim = 1024 - options_file = "/remote-home/dqwang/ELMo/elmo_2x4096_512_2048cnn_2xhighway_5.5B_options.json" - weight_file = "/remote-home/dqwang/ELMo/elmo_2x4096_512_2048cnn_2xhighway_5.5B_weights.hdf5" - - # elmo_dim = 512 - # options_file = "/remote-home/dqwang/ELMo/elmo_2x2048_256_2048cnn_1xhighway_options.json" - # weight_file = "/remote-home/dqwang/ELMo/elmo_2x2048_256_2048cnn_1xhighway_weights.hdf5" - - embed_size = hps.word_emb_dim - sent_max_len = hps.sent_max_len - - input_channels = 1 - out_channels = hps.output_channel - min_kernel_size = hps.min_kernel_size - max_kernel_size = hps.max_kernel_size - width = embed_size - - # elmo embedding - self.elmo = Elmo(options_file, weight_file, 1, dropout=0) - self.embed_proj = nn.Linear(elmo_dim, embed_size) - - # position embedding - self.position_embedding = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(sent_max_len + 1, embed_size, padding_idx=0), freeze=True) - - # cnn - self.convs = nn.ModuleList([nn.Conv2d(input_channels, out_channels, kernel_size = (height, width)) for height in range(min_kernel_size, max_kernel_size+1)]) - logger.info("[INFO] Initing W for CNN.......") - for conv in self.convs: - init_weight_value = 6.0 - init.xavier_normal_(conv.weight.data, gain=np.sqrt(init_weight_value)) - fan_in, fan_out = Encoder.calculate_fan_in_and_fan_out(conv.weight.data) - std = np.sqrt(init_weight_value) * np.sqrt(2.0 / (fan_in + fan_out)) - - def calculate_fan_in_and_fan_out(tensor): - dimensions = tensor.ndimension() - if dimensions < 2: - logger.error("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - raise ValueError("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - - if dimensions == 2: # Linear - fan_in = tensor.size(1) - fan_out = tensor.size(0) - else: - num_input_fmaps = tensor.size(1) - num_output_fmaps = tensor.size(0) - receptive_field_size = 1 - if tensor.dim() > 2: - receptive_field_size = tensor[0][0].numel() - fan_in = num_input_fmaps * receptive_field_size - fan_out = num_output_fmaps * receptive_field_size - - return fan_in, fan_out - - def forward(self, input): - # input: a batch of Example object [batch_size, N, seq_len, character_len] - - batch_size, N, seq_len, _ = input.size() - input = input.view(batch_size * N, seq_len, -1) # [batch_size*N, seq_len, character_len] - input_sent_len = ((input.sum(-1)!=0).sum(dim=1)).int() # [batch_size*N, 1] - # logger.debug(input_sent_len.view(batch_size, -1)) - enc_embed_input = self.elmo(input)['elmo_representations'][0] # [batch_size*N, L, D] - enc_embed_input = self.embed_proj(enc_embed_input) - - # input_pos = torch.Tensor([np.hstack((np.arange(1, sentlen + 1), np.zeros(self.sent_max_len - sentlen))) for sentlen in input_sent_len]) - - sent_pos_list = [] - for sentlen in input_sent_len: - sent_pos = list(range(1, min(self.sent_max_len, sentlen) + 1)) - for k in range(self.sent_max_len - sentlen): - sent_pos.append(0) - sent_pos_list.append(sent_pos) - input_pos = torch.Tensor(sent_pos_list).long() - - if self._hps.cuda: - input_pos = input_pos.cuda() - enc_pos_embed_input = self.position_embedding(input_pos.long()) # [batch_size*N, D] - enc_conv_input = enc_embed_input + enc_pos_embed_input - enc_conv_input = enc_conv_input.unsqueeze(1) # (batch * N,Ci,L,D) - enc_conv_output = [F.relu(conv(enc_conv_input)).squeeze(3) for conv in self.convs] # kernel_sizes * (batch*N, Co, W) - enc_maxpool_output = [F.max_pool1d(x, x.size(2)).squeeze(2) for x in enc_conv_output] # kernel_sizes * (batch*N, Co) - sent_embedding = torch.cat(enc_maxpool_output, 1) # (batch*N, Co * kernel_sizes) - sent_embedding = sent_embedding.view(batch_size, N, -1) - return sent_embedding - -class ELMoEndoer2(nn.Module): - def __init__(self, hps): - super(ELMoEndoer2, self).__init__() - - self._hps = hps - self._cuda = hps.cuda - self.sent_max_len = hps.sent_max_len - - from allennlp.modules.elmo import Elmo - - elmo_dim = 1024 - options_file = "/remote-home/dqwang/ELMo/elmo_2x4096_512_2048cnn_2xhighway_5.5B_options.json" - weight_file = "/remote-home/dqwang/ELMo/elmo_2x4096_512_2048cnn_2xhighway_5.5B_weights.hdf5" - - # elmo_dim = 512 - # options_file = "/remote-home/dqwang/ELMo/elmo_2x2048_256_2048cnn_1xhighway_options.json" - # weight_file = "/remote-home/dqwang/ELMo/elmo_2x2048_256_2048cnn_1xhighway_weights.hdf5" - - embed_size = hps.word_emb_dim - sent_max_len = hps.sent_max_len - - input_channels = 1 - out_channels = hps.output_channel - min_kernel_size = hps.min_kernel_size - max_kernel_size = hps.max_kernel_size - width = embed_size - - # elmo embedding - self.elmo = Elmo(options_file, weight_file, 1, dropout=0) - self.embed_proj = nn.Linear(elmo_dim, embed_size) - - # position embedding - self.position_embedding = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(sent_max_len + 1, embed_size, padding_idx=0), freeze=True) - - # cnn - self.convs = nn.ModuleList([nn.Conv2d(input_channels, out_channels, kernel_size = (height, width)) for height in range(min_kernel_size, max_kernel_size+1)]) - logger.info("[INFO] Initing W for CNN.......") - for conv in self.convs: - init_weight_value = 6.0 - init.xavier_normal_(conv.weight.data, gain=np.sqrt(init_weight_value)) - fan_in, fan_out = Encoder.calculate_fan_in_and_fan_out(conv.weight.data) - std = np.sqrt(init_weight_value) * np.sqrt(2.0 / (fan_in + fan_out)) - - def calculate_fan_in_and_fan_out(tensor): - dimensions = tensor.ndimension() - if dimensions < 2: - logger.error("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - raise ValueError("[Error] Fan in and fan out can not be computed for tensor with less than 2 dimensions") - - if dimensions == 2: # Linear - fan_in = tensor.size(1) - fan_out = tensor.size(0) - else: - num_input_fmaps = tensor.size(1) - num_output_fmaps = tensor.size(0) - receptive_field_size = 1 - if tensor.dim() > 2: - receptive_field_size = tensor[0][0].numel() - fan_in = num_input_fmaps * receptive_field_size - fan_out = num_output_fmaps * receptive_field_size - - return fan_in, fan_out - - def pad_encoder_input(self, input_list): - """ - :param input_list: N [seq_len, hidden_state] - :return: enc_sent_input_pad: list, N [max_len, hidden_state] - """ - max_len = self.sent_max_len - enc_sent_input_pad = [] - _, hidden_size = input_list[0].size() - for i in range(len(input_list)): - article_words = input_list[i] # [seq_len, hidden_size] - seq_len = article_words.size(0) - if seq_len > max_len: - pad_words = article_words[:max_len, :] - else: - pad_tensor = torch.zeros(max_len - seq_len, hidden_size).cuda() if self._cuda else torch.zeros(max_len - seq_len, hidden_size) - pad_words = torch.cat([article_words, pad_tensor], dim=0) - enc_sent_input_pad.append(pad_words) - return enc_sent_input_pad - - def forward(self, inputs, input_masks, enc_sent_len): - """ - - :param inputs: a batch of Example object [batch_size, doc_len=512, character_len=50] - :param input_masks: 0 or 1, [batch, doc_len=512] - :param enc_sent_len: sentence original length [batch, N] - :return: - sent_embedding: [batch, N, D] - """ - - # Use Bert to get word embedding - batch_size, N = enc_sent_len.size() - input_pad_list = [] - - elmo_output = self.elmo(inputs)['elmo_representations'][0] # [batch_size, 512, D] - elmo_output = elmo_output * input_masks.unsqueeze(-1).float() - # print("END elmo") - - for i in range(batch_size): - sent_len = enc_sent_len[i] # [1, N] - out = elmo_output[i] - - _, hidden_size = out.size() - - # restore the sentence - last_end = 0 - enc_sent_input = [] - for length in sent_len: - if length != 0 and last_end < 512: - enc_sent_input.append(out[last_end : min(512, last_end + length), :]) - last_end += length - else: - pad_tensor = torch.zeros(self.sent_max_len, hidden_size).cuda() if self._hps.cuda else torch.zeros(self.sent_max_len, hidden_size) - enc_sent_input.append(pad_tensor) - - # pad the sentence - enc_sent_input_pad = self.pad_encoder_input(enc_sent_input) # [N, seq_len, hidden_state=4096] - input_pad_list.append(torch.stack(enc_sent_input_pad)) # batch * [N, max_len, hidden_state] - - input_pad = torch.stack(input_pad_list) - - input_pad = input_pad.view(batch_size * N, self.sent_max_len, -1) - enc_sent_len = enc_sent_len.view(-1) # [batch_size*N] - enc_embed_input = self.embed_proj(input_pad) # [batch_size * N, L, D] - - # input_pos = torch.Tensor([np.hstack((np.arange(1, sentlen + 1), np.zeros(self.sent_max_len - sentlen))) for sentlen in input_sent_len]) - - sent_pos_list = [] - for sentlen in enc_sent_len: - sent_pos = list(range(1, min(self.sent_max_len, sentlen) + 1)) - for k in range(self.sent_max_len - sentlen): - sent_pos.append(0) - sent_pos_list.append(sent_pos) - input_pos = torch.Tensor(sent_pos_list).long() - - if self._hps.cuda: - input_pos = input_pos.cuda() - enc_pos_embed_input = self.position_embedding(input_pos.long()) # [batch_size*N, D] - enc_conv_input = enc_embed_input + enc_pos_embed_input - enc_conv_input = enc_conv_input.unsqueeze(1) # (batch * N,Ci,L,D) - enc_conv_output = [F.relu(conv(enc_conv_input)).squeeze(3) for conv in self.convs] # kernel_sizes * (batch*N, Co, W) - enc_maxpool_output = [F.max_pool1d(x, x.size(2)).squeeze(2) for x in enc_conv_output] # kernel_sizes * (batch*N, Co) - sent_embedding = torch.cat(enc_maxpool_output, 1) # (batch*N, Co * kernel_sizes) - sent_embedding = sent_embedding.view(batch_size, N, -1) - return sent_embedding \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/model/LSTMModel.py b/reproduction/Summarization/Baseline/model/LSTMModel.py deleted file mode 100644 index 3dfbf6ba..00000000 --- a/reproduction/Summarization/Baseline/model/LSTMModel.py +++ /dev/null @@ -1,105 +0,0 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import torch -import torch.nn as nn -from torch.autograd import * -from torch.distributions import * - -from .Encoder import Encoder -from .DeepLSTM import DeepLSTM - -from transformer.SubLayers import MultiHeadAttention,PositionwiseFeedForward - -class SummarizationModel(nn.Module): - def __init__(self, hps, embed): - """ - - :param hps: hyperparameters for the model - :param embed: word embedding - """ - super(SummarizationModel, self).__init__() - - self._hps = hps - self.Train = (hps.mode == 'train') - - # sentence encoder - self.encoder = Encoder(hps, embed) - - # Multi-layer highway lstm - self.num_layers = hps.n_layers - self.sent_embedding_size = (hps.max_kernel_size - hps.min_kernel_size + 1) * hps.output_channel - self.lstm_hidden_size = hps.lstm_hidden_size - self.recurrent_dropout = hps.recurrent_dropout_prob - - self.deep_lstm = DeepLSTM(self.sent_embedding_size, self.lstm_hidden_size, self.num_layers, self.recurrent_dropout, - hps.use_orthnormal_init, hps.fix_mask, hps.cuda) - - # Multi-head attention - self.n_head = hps.n_head - self.d_v = self.d_k = int(self.lstm_hidden_size / hps.n_head) - self.d_inner = hps.ffn_inner_hidden_size - self.slf_attn = MultiHeadAttention(hps.n_head, self.lstm_hidden_size , self.d_k, self.d_v, dropout=hps.atten_dropout_prob) - self.pos_ffn = PositionwiseFeedForward(self.d_v, self.d_inner, dropout = hps.ffn_dropout_prob) - - self.wh = nn.Linear(self.d_v, 2) - - - def forward(self, words, seq_len): - """ - - :param input: [batch_size, N, seq_len], word idx long tensor - :param input_len: [batch_size, N], 1 for sentence and 0 for padding - :return: - p_sent: [batch_size, N, 2] - output_slf_attn: (option) [n_head, batch_size, N, N] - """ - - input = words - input_len = seq_len - - # -- Sentence Encoder - self.sent_embedding = self.encoder(input) # [batch, N, Co * kernel_sizes] - - # -- Multi-layer highway lstm - input_len = input_len.float() # [batch, N] - self.inputs = [None] * (self.num_layers + 1) - self.input_masks = [None] * (self.num_layers + 1) - self.inputs[0] = self.sent_embedding.permute(1, 0, 2) # [N, batch, Co * kernel_sizes] - self.input_masks[0] = input_len.permute(1, 0).unsqueeze(2) - - self.lstm_output_state = self.deep_lstm(self.inputs, self.input_masks, Train=self.train) # [batch, N, hidden_size] - - # -- Prepare masks - batch_size, N = input_len.size() - slf_attn_mask = input_len.eq(0.0) # [batch, N], 1 for padding - slf_attn_mask = slf_attn_mask.unsqueeze(1).expand(-1, N, -1) # [batch, N, N] - - # -- Multi-head attention - self.atten_output, self.output_slf_attn = self.slf_attn(self.lstm_output_state, self.lstm_output_state, self.lstm_output_state, mask=slf_attn_mask) - self.atten_output *= input_len.unsqueeze(2) # [batch_size, N, lstm_hidden_size = (n_head * d_v)] - self.multi_atten_output = self.atten_output.view(batch_size, N, self.n_head, self.d_v) # [batch_size, N, n_head, d_v] - self.multi_atten_context = self.multi_atten_output[:, :, 0::2, :].sum(2) - self.multi_atten_output[:, :, 1::2, :].sum(2) # [batch_size, N, d_v] - - # -- Position-wise Feed-Forward Networks - self.output_state = self.pos_ffn(self.multi_atten_context) - self.output_state = self.output_state * input_len.unsqueeze(2) # [batch_size, N, d_v] - - p_sent = self.wh(self.output_state) # [batch, N, 2] - - idx = None - if self._hps.m == 0: - prediction = p_sent.view(-1, 2).max(1)[1] - prediction = prediction.view(batch_size, -1) - else: - mask_output = torch.exp(p_sent[:, :, 1]) # # [batch, N] - mask_output = mask_output.masked_fill(input_len.eq(0), 0) - topk, idx = torch.topk(mask_output, self._hps.m) - prediction = torch.zeros(batch_size, N).scatter_(1, idx.data.cpu(), 1) - prediction = prediction.long().view(batch_size, -1) - - if self._hps.cuda: - prediction = prediction.cuda() - - return {"p_sent": p_sent, "prediction": prediction, "pred_idx": idx} diff --git a/reproduction/Summarization/Baseline/model/Loss.py b/reproduction/Summarization/Baseline/model/Loss.py deleted file mode 100644 index 6ff6f0b9..00000000 --- a/reproduction/Summarization/Baseline/model/Loss.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import torch -import torch.nn.functional as F - -from fastNLP.core.losses import LossBase -from fastNLP.core._logger import logger - -class MyCrossEntropyLoss(LossBase): - def __init__(self, pred=None, target=None, mask=None, padding_idx=-100, reduce='mean'): - super().__init__() - self._init_param_map(pred=pred, target=target, mask=mask) - self.padding_idx = padding_idx - self.reduce = reduce - - def get_loss(self, pred, target, mask): - """ - - :param pred: [batch, N, 2] - :param target: [batch, N] - :param input_mask: [batch, N] - :return: - """ - # logger.debug(pred[0:5, :, :]) - # logger.debug(target[0:5, :]) - - batch, N, _ = pred.size() - pred = pred.view(-1, 2) - target = target.view(-1) - loss = F.cross_entropy(input=pred, target=target, - ignore_index=self.padding_idx, reduction=self.reduce) - loss = loss.view(batch, -1) - loss = loss.masked_fill(mask.eq(False), 0) - loss = loss.sum(1).mean() - logger.debug("loss %f", loss) - return loss - - diff --git a/reproduction/Summarization/Baseline/model/Metric.py b/reproduction/Summarization/Baseline/model/Metric.py deleted file mode 100644 index 91c25184..00000000 --- a/reproduction/Summarization/Baseline/model/Metric.py +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -from __future__ import division - - -import torch -import torch.nn.functional as F - -from rouge import Rouge - -from fastNLP.core.const import Const -from fastNLP.core.metrics import MetricBase - -# from tools.logger import * -from fastNLP.core._logger import logger -from tools.utils import pyrouge_score_all, pyrouge_score_all_multi - - -class LossMetric(MetricBase): - def __init__(self, pred=None, target=None, mask=None, padding_idx=-100, reduce='mean'): - super().__init__() - - self._init_param_map(pred=pred, target=target, mask=mask) - self.padding_idx = padding_idx - self.reduce = reduce - self.loss = 0.0 - self.iteration = 0 - - def evaluate(self, pred, target, mask): - """ - - :param pred: [batch, N, 2] - :param target: [batch, N] - :param input_mask: [batch, N] - :return: - """ - - batch, N, _ = pred.size() - pred = pred.view(-1, 2) - target = target.view(-1) - loss = F.cross_entropy(input=pred, target=target, - ignore_index=self.padding_idx, reduction=self.reduce) - loss = loss.view(batch, -1) - loss = loss.masked_fill(mask.eq(False), 0) - loss = loss.sum(1).mean() - self.loss += loss - self.iteration += 1 - - def get_metric(self, reset=True): - epoch_avg_loss = self.loss / self.iteration - if reset: - self.loss = 0.0 - self.iteration = 0 - metric = {"loss": -epoch_avg_loss} - logger.info(metric) - return metric - - - - -class LabelFMetric(MetricBase): - def __init__(self, pred=None, target=None): - super().__init__() - - self._init_param_map(pred=pred, target=target) - - self.match = 0.0 - self.pred = 0.0 - self.true = 0.0 - self.match_true = 0.0 - self.total = 0.0 - - - def evaluate(self, pred, target): - """ - - :param pred: [batch, N] int - :param target: [batch, N] int - :return: - """ - target = target.data - pred = pred.data - # logger.debug(pred.size()) - # logger.debug(pred[:5,:]) - batch, N = pred.size() - self.pred += pred.sum() - self.true += target.sum() - self.match += (pred == target).sum() - self.match_true += ((pred == target) & (pred == 1)).sum() - self.total += batch * N - - def get_metric(self, reset=True): - self.match,self.pred, self.true, self.match_true, self.total = self.match.float(),self.pred.float(), self.true.float(), self.match_true.float(), self.total - logger.debug((self.match,self.pred, self.true, self.match_true, self.total)) - try: - accu = self.match / self.total - precision = self.match_true / self.pred - recall = self.match_true / self.true - F = 2 * precision * recall / (precision + recall) - except ZeroDivisionError: - F = 0.0 - logger.error("[Error] float division by zero") - if reset: - self.pred, self.true, self.match_true, self.match, self.total = 0, 0, 0, 0, 0 - ret = {"accu": accu.cpu(), "p":precision.cpu(), "r":recall.cpu(), "f": F.cpu()} - logger.info(ret) - return ret - - -class RougeMetric(MetricBase): - def __init__(self, hps, pred=None, text=None, refer=None): - super().__init__() - - self._hps = hps - self._init_param_map(pred=pred, text=text, summary=refer) - - self.hyps = [] - self.refers = [] - - def evaluate(self, pred, text, summary): - """ - - :param prediction: [batch, N] - :param text: [batch, N] - :param summary: [batch, N] - :return: - """ - - batch_size, N = pred.size() - for j in range(batch_size): - original_article_sents = text[j] - sent_max_number = len(original_article_sents) - refer = "\n".join(summary[j]) - hyps = "\n".join(original_article_sents[id] for id in range(len(pred[j])) if - pred[j][id] == 1 and id < sent_max_number) - if sent_max_number < self._hps.m and len(hyps) <= 1: - print("sent_max_number is too short %d, Skip!", sent_max_number) - continue - - if len(hyps) >= 1 and hyps != '.': - self.hyps.append(hyps) - self.refers.append(refer) - elif refer == "." or refer == "": - logger.error("Refer is None!") - logger.debug(refer) - elif hyps == "." or hyps == "": - logger.error("hyps is None!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug("pred:") - logger.debug(pred[j]) - logger.debug(hyps) - else: - logger.error("Do not select any sentences!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug(original_article_sents) - logger.debug(refer) - continue - - def get_metric(self, reset=True): - pass - -class FastRougeMetric(RougeMetric): - def __init__(self, hps, pred=None, text=None, refer=None): - super().__init__(hps, pred, text, refer) - - def get_metric(self, reset=True): - logger.info("[INFO] Hyps and Refer number is %d, %d", len(self.hyps), len(self.refers)) - if len(self.hyps) == 0 or len(self.refers) == 0 : - logger.error("During testing, no hyps or refers is selected!") - return - rouge = Rouge() - scores_all = rouge.get_scores(self.hyps, self.refers, avg=True) - if reset: - self.hyps = [] - self.refers = [] - logger.info(scores_all) - return scores_all - - -class PyRougeMetric(RougeMetric): - def __init__(self, hps, pred=None, text=None, refer=None): - super().__init__(hps, pred, text, refer) - - def get_metric(self, reset=True): - logger.info("[INFO] Hyps and Refer number is %d, %d", len(self.hyps), len(self.refers)) - if len(self.hyps) == 0 or len(self.refers) == 0: - logger.error("During testing, no hyps or refers is selected!") - return - if isinstance(self.refers[0], list): - logger.info("Multi Reference summaries!") - scores_all = pyrouge_score_all_multi(self.hyps, self.refers) - else: - scores_all = pyrouge_score_all(self.hyps, self.refers) - if reset: - self.hyps = [] - self.refers = [] - logger.info(scores_all) - return scores_all - - - diff --git a/reproduction/Summarization/Baseline/model/TForiginal.py b/reproduction/Summarization/Baseline/model/TForiginal.py deleted file mode 100644 index d1444150..00000000 --- a/reproduction/Summarization/Baseline/model/TForiginal.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import numpy as np - -import torch -import torch.nn as nn - -from .Encoder import Encoder -# from tools.Encoder import Encoder -from tools.PositionEmbedding import get_sinusoid_encoding_table -from tools.logger import * - -from fastNLP.core.const import Const - -from transformer.Layers import EncoderLayer - - -class TransformerModel(nn.Module): - def __init__(self, hps, embed): - """ - - :param hps: - min_kernel_size: min kernel size for cnn encoder - max_kernel_size: max kernel size for cnn encoder - output_channel: output_channel number for cnn encoder - hidden_size: hidden size for transformer - n_layers: transfromer encoder layer - n_head: multi head attention for transformer - ffn_inner_hidden_size: FFN hiddens size - atten_dropout_prob: dropout size - doc_max_timesteps: max sentence number of the document - :param embed: word embedding - """ - super(TransformerModel, self).__init__() - - self._hps = hps - - self.encoder = Encoder(hps, embed) - - self.sent_embedding_size = (hps.max_kernel_size - hps.min_kernel_size + 1) * hps.output_channel - self.hidden_size = hps.hidden_size - - self.n_head = hps.n_head - self.d_v = self.d_k = int(self.hidden_size / self.n_head) - self.d_inner = hps.ffn_inner_hidden_size - self.num_layers = hps.n_layers - - self.projection = nn.Linear(self.sent_embedding_size, self.hidden_size) - self.sent_pos_embed = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(hps.doc_max_timesteps + 1, self.hidden_size, padding_idx=0), freeze=True) - - self.layer_stack = nn.ModuleList([ - EncoderLayer(self.hidden_size, self.d_inner, self.n_head, self.d_k, self.d_v, - dropout=hps.atten_dropout_prob) - for _ in range(self.num_layers)]) - - self.wh = nn.Linear(self.hidden_size, 2) - - def forward(self, words, seq_len): - """ - - :param input: [batch_size, N, seq_len] - :param input_len: [batch_size, N] - :return: - """ - # Sentence Encoder - - input = words - input_len = seq_len - - self.sent_embedding = self.encoder(input) # [batch, N, Co * kernel_sizes] - - input_len = input_len.float() # [batch, N] - - # -- Prepare masks - batch_size, N = input_len.size() - self.slf_attn_mask = input_len.eq(0.0) # [batch, N] - self.slf_attn_mask = self.slf_attn_mask.unsqueeze(1).expand(-1, N, -1) # [batch, N, N] - self.non_pad_mask = input_len.unsqueeze(-1) # [batch, N, 1] - - input_doc_len = input_len.sum(dim=1).int() # [batch] - sent_pos = torch.Tensor( - [np.hstack((np.arange(1, doclen + 1), np.zeros(N - doclen))) for doclen in input_doc_len]) - sent_pos = sent_pos.long().cuda() if self._hps.cuda else sent_pos.long() - - enc_output_state = self.projection(self.sent_embedding) - enc_input = enc_output_state + self.sent_pos_embed(sent_pos) - - # self.enc_slf_attn = self.enc_slf_attn * self.non_pad_mask - enc_input_list = [] - for enc_layer in self.layer_stack: - # enc_output = [batch_size, N, hidden_size = n_head * d_v] - # enc_slf_attn = [n_head * batch_size, N, N] - enc_input, enc_slf_atten = enc_layer(enc_input, non_pad_mask=self.non_pad_mask, - slf_attn_mask=self.slf_attn_mask) - enc_input_list += [enc_input] - - self.dec_output_state = torch.cat(enc_input_list[-4:]) # [4, batch_size, N, hidden_state] - self.dec_output_state = self.dec_output_state.view(4, batch_size, N, -1) - self.dec_output_state = self.dec_output_state.sum(0) - - p_sent = self.wh(self.dec_output_state) # [batch, N, 2] - - idx = None - if self._hps.m == 0: - prediction = p_sent.view(-1, 2).max(1)[1] - prediction = prediction.view(batch_size, -1) - else: - mask_output = torch.exp(p_sent[:, :, 1]) # # [batch, N] - mask_output = mask_output.masked_fill(input_len.eq(0), 0) - topk, idx = torch.topk(mask_output, self._hps.m) - prediction = torch.zeros(batch_size, N).scatter_(1, idx.data.cpu(), 1) - prediction = prediction.long().view(batch_size, -1) - - if self._hps.cuda: - prediction = prediction.cuda() - - # logger.debug(((p_sent.size(), prediction.size(), idx.size()))) - - return {"p_sent": p_sent, "prediction": prediction, "pred_idx": idx} - diff --git a/reproduction/Summarization/Baseline/model/TransformerModel.py b/reproduction/Summarization/Baseline/model/TransformerModel.py deleted file mode 100644 index 4d314f84..00000000 --- a/reproduction/Summarization/Baseline/model/TransformerModel.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import numpy as np - -import torch -import torch.nn as nn - -from .Encoder import Encoder -from tools.PositionEmbedding import get_sinusoid_encoding_table - -from fastNLP.core.const import Const -from fastNLP.modules.encoder.seq2seq_encoder import TransformerSeq2SeqEncoderLayer - -class TransformerModel(nn.Module): - def __init__(self, hps, vocab): - """ - - :param hps: - min_kernel_size: min kernel size for cnn encoder - max_kernel_size: max kernel size for cnn encoder - output_channel: output_channel number for cnn encoder - hidden_size: hidden size for transformer - n_layers: transfromer encoder layer - n_head: multi head attention for transformer - ffn_inner_hidden_size: FFN hiddens size - atten_dropout_prob: dropout size - doc_max_timesteps: max sentence number of the document - :param vocab: - """ - super(TransformerModel, self).__init__() - - self._hps = hps - self._vocab = vocab - - self.encoder = Encoder(hps, vocab) - - self.sent_embedding_size = (hps.max_kernel_size - hps.min_kernel_size + 1) * hps.output_channel - self.hidden_size = hps.hidden_size - - self.n_head = hps.n_head - self.d_v = self.d_k = int(self.hidden_size / self.n_head) - self.d_inner = hps.ffn_inner_hidden_size - self.num_layers = hps.n_layers - - self.projection = nn.Linear(self.sent_embedding_size, self.hidden_size) - self.sent_pos_embed = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(hps.doc_max_timesteps + 1, self.hidden_size, padding_idx=0), freeze=True) - - self.layer_stack = nn.ModuleList([ - TransformerSeq2SeqEncoderLayer(d_model = self.hidden_size, n_head = self.n_head, dim_ff = self.d_inner, - dropout = hps.atten_dropout_prob) - for _ in range(self.num_layers)]) - - self.wh = nn.Linear(self.hidden_size, 2) - - - def forward(self, words, seq_len): - """ - - :param input: [batch_size, N, seq_len] - :param input_len: [batch_size, N] - :param return_atten: bool - :return: - """ - # Sentence Encoder - - input = words - input_len = seq_len - - self.sent_embedding = self.encoder(input) # [batch, N, Co * kernel_sizes] - - input_len = input_len.float() # [batch, N] - - # -- Prepare masks - batch_size, N = input_len.size() - self.slf_attn_mask = input_len.eq(0.0) # [batch, N] - self.slf_attn_mask = self.slf_attn_mask.unsqueeze(1).expand(-1, N, -1) # [batch, N, N] - self.non_pad_mask = input_len.unsqueeze(-1) # [batch, N, 1] - - input_doc_len = input_len.sum(dim=1).int() # [batch] - sent_pos = torch.Tensor([np.hstack((np.arange(1, doclen + 1), np.zeros(N - doclen))) for doclen in input_doc_len]) - sent_pos = sent_pos.long().cuda() if self._hps.cuda else sent_pos.long() - - enc_output_state = self.projection(self.sent_embedding) - enc_input = enc_output_state + self.sent_pos_embed(sent_pos) - - # self.enc_slf_attn = self.enc_slf_attn * self.non_pad_mask - enc_input_list = [] - for enc_layer in self.layer_stack: - # enc_output = [batch_size, N, hidden_size = n_head * d_v] - # enc_slf_attn = [n_head * batch_size, N, N] - enc_input = enc_layer(enc_input, encoder_mask=self.slf_attn_mask) - enc_input_list += [enc_input] - - self.dec_output_state = torch.cat(enc_input_list[-4:]) # [4, batch_size, N, hidden_state] - self.dec_output_state = self.dec_output_state.view(4, batch_size, N, -1) - self.dec_output_state = self.dec_output_state.sum(0) - - p_sent = self.wh(self.dec_output_state) # [batch, N, 2] - - idx = None - if self._hps.m == 0: - prediction = p_sent.view(-1, 2).max(1)[1] - prediction = prediction.view(batch_size, -1) - else: - mask_output = torch.exp(p_sent[:, :, 1]) # # [batch, N] - mask_output = mask_output * input_len.float() - topk, idx = torch.topk(mask_output, self._hps.m) - prediction = torch.zeros(batch_size, N).scatter_(1, idx.data.cpu(), 1) - prediction = prediction.long().view(batch_size, -1) - - if self._hps.cuda: - prediction = prediction.cuda() - - # print((p_sent.size(), prediction.size(), idx.size())) - # [batch, N, 2], [batch, N], [batch, hps.m] - return {"pred": p_sent, "prediction": prediction, "pred_idx": idx} - diff --git a/reproduction/Summarization/Baseline/model/__init__.py b/reproduction/Summarization/Baseline/model/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/Summarization/Baseline/test/__init__.py b/reproduction/Summarization/Baseline/test/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/Summarization/Baseline/test/test_dataLoader.py b/reproduction/Summarization/Baseline/test/test_dataLoader.py deleted file mode 100644 index 53aab547..00000000 --- a/reproduction/Summarization/Baseline/test/test_dataLoader.py +++ /dev/null @@ -1,36 +0,0 @@ - -import unittest - -import sys -sys.path.append('..') - -from data.dataloader import SummarizationLoader - -vocab_size = 100000 -vocab_path = "testdata/vocab" -sent_max_len = 100 -doc_max_timesteps = 50 - -class TestSummarizationLoader(unittest.TestCase): - - def test_case1(self): - sum_loader = SummarizationLoader() - paths = {"train":"testdata/train.jsonl", "valid":"testdata/val.jsonl", "test":"testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps) - print(data.datasets) - - def test_case2(self): - sum_loader = SummarizationLoader() - paths = {"train": "testdata/train.jsonl", "valid": "testdata/val.jsonl", "test": "testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps, domain=True) - print(data.datasets, data.vocabs) - - def test_case3(self): - sum_loader = SummarizationLoader() - paths = {"train": "testdata/train.jsonl", "valid": "testdata/val.jsonl", "test": "testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps, tag=True) - print(data.datasets, data.vocabs) - - - - diff --git a/reproduction/Summarization/Baseline/test/test_dataloader.py b/reproduction/Summarization/Baseline/test/test_dataloader.py deleted file mode 100644 index 53aab547..00000000 --- a/reproduction/Summarization/Baseline/test/test_dataloader.py +++ /dev/null @@ -1,36 +0,0 @@ - -import unittest - -import sys -sys.path.append('..') - -from data.dataloader import SummarizationLoader - -vocab_size = 100000 -vocab_path = "testdata/vocab" -sent_max_len = 100 -doc_max_timesteps = 50 - -class TestSummarizationLoader(unittest.TestCase): - - def test_case1(self): - sum_loader = SummarizationLoader() - paths = {"train":"testdata/train.jsonl", "valid":"testdata/val.jsonl", "test":"testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps) - print(data.datasets) - - def test_case2(self): - sum_loader = SummarizationLoader() - paths = {"train": "testdata/train.jsonl", "valid": "testdata/val.jsonl", "test": "testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps, domain=True) - print(data.datasets, data.vocabs) - - def test_case3(self): - sum_loader = SummarizationLoader() - paths = {"train": "testdata/train.jsonl", "valid": "testdata/val.jsonl", "test": "testdata/test.jsonl"} - data = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps, tag=True) - print(data.datasets, data.vocabs) - - - - diff --git a/reproduction/Summarization/Baseline/test/testdata/test.jsonl b/reproduction/Summarization/Baseline/test/testdata/test.jsonl deleted file mode 100644 index 42ad6a6f..00000000 --- a/reproduction/Summarization/Baseline/test/testdata/test.jsonl +++ /dev/null @@ -1,100 +0,0 @@ -{"id": "1815744", "text": ["Even before the Metropolitan Opera 's Saturday matinee of Mozart 's `` Magic Flute '' began , this family-friendly version of Julie Taymor 's 2004 production looked to be a huge success .", "Children were everywhere , a rare sight at the venerable institution .", "They were having pictures taken in front of the house , dashing up and down the stairs of the Grand Promenade and , before long , sitting up in their seats all over the auditorium .", "Peter Gelb , the Met 's new general manager , whose multifaceted outreach efforts have already become a model for opera companies everywhere , has rightly stated that the major impediment to making this art form accessible to children is that most operas are simply too long .", "So besides translating the text from German into English , the solution here was to cut the production , which normally lasts 3 hours 10 minutes , down to 100 minutes without an intermission .", "Actually the matinee clocked in at close to two hours , but few of the children seemed to mind .", "The audience was remarkably attentive and well behaved .", "Of course one strict Met protocol -- if you leave the auditorium , you are not allowed re-entry until intermission -- was wisely ditched for the day , so children could take restroom breaks .", "Shortening the score involved what must have been painstaking decisions .", "The overture and several entire arias and ensembles were cut .", "Other arias were abridged through some very deft trims .", "Otherwise the Met went all out .", "The cast was excellent , and James Levine conducted .", "The very free English translation by the poet J . D . McClatchy was clever and singable .", "Papageno , still without a girlfriend and miserable , asks forlornly : `` Is my face just one big puddle .", "Are n't I cute enough to cuddle .", "`` The Papageno , Nathan Gunn , was certainly cute enough .", "This dynamic baritone exuded charm and cavorted about the stage like an acrobat .", "At one point he tried to flee danger by scurrying up the side of a huge plastic tube he was trapped in , only to slide back down , landing with the floppy-limbed aplomb of a Charlie Chaplin .", "He seemed the darling of every child in attendance -LRB- and the audience included Mr. Gunn 's five -RRB- .", "The stupendous bass Ren\u00e9 Pape was Sarastro .", "A lovely , clear-voiced lyric soprano , Ying Huang , in her debut role at the Met , was an alluring Pamina .", "Matthew Polenzani brought his sweet tenor voice and wholesome appeal to Prince Tamino .", "The agile coloratura soprano Erika Miklosa was a vocally fearless and aptly chilling Queen of the Night .", "As the wicked Monostatos , the trim tenor Greg Fedderly was unrecognizable with his flabby , fake pot belly , which induced giggles every time he exposed it .", "I am on record as being no fan of Ms. Taymor 's production , which to me is a mishmash of imagery , so cluttered with puppets , flying objects and fire-breathing statues that it overwhelms Mozart 's music .", "But this show was not presented with me in mind .", "So let me offer the reactions of three young attendees .", "Amitav Mitra , my neighbor , who is 8 , came as my guest .", "And Kira and Jonah Newmark , 9 and 7 , the children of friends , were also glad to share their critiques afterwards .", "For Amitav , this was his first opera .", "Though Jonah had seen opera videos at home with his sister , he too was trying the real thing for the first time .", "Kira , a burgeoning opera buff , has attended , as she put it , `` real three-hour operas , '' most recently `` The Barber of Seville '' at the New York City Opera .", "Not surprisingly Ms. Taymor 's fanciful sets , costumes and puppets won raves from this trio of critics .", "But their most revealing comments were about the singing and the story .", "The singing `` was loud , '' Amitav said .", "Jonah added , `` It was too loud . ''", "Kira more or less agreed .", "I pressed them about this .", "Today , when children hear amplified music everywhere , often channeled right into their ears through headphones , how could unamplified singing seem too loud .", "Amitav clarified their reactions when he said that the singing was `` too loud for human voices , '' adding , `` I never thought voices could do that . ''", "So their reaction was not a complaint about excessive volume , but rather an attempt to explain the awesome impression made by Ms. Miklosa 's dazzlingly high vocal flights as the Queen of the Night , or Mr. Pape 's unearthly powerful bass voice , or the amassed chorus in the temple scenes .", "It takes a while for young opera neophytes to adjust to such mind-boggling voices , to realize that this strange , unamplified `` loudness '' is actually amazing .", "The other common reaction concerned the story , which all three children enjoyed .", "Kira , though , was struck by the gravity of Prince Tamino 's dilemma .", "`` Tamino was a little too serious for me , '' she said , adding : `` He never does anything that 's funny .", "He takes things seriously . ``", "I think Mr. Levine , who conducted a glowing and elegant performance , would be pleased by Kira 's reaction .", "Mr. Levine made certain that some of the opera 's most somber episodes were included , like the long scene in which the confused Tamino is confronted by the austere Speaker -LRB- David Pittsinger -RRB- , a stalwart member of Sarastro 's brotherhood , at the entrance to the temple .", "Like most fairy tales `` The Magic Flute '' is a mysterious story of good and evil .", "Naturally , Ms. Taymor 's production makes the opera 's monsters quite charming , like the puppet bears who are enchanted by Tamino 's magic flute .", "And the boys singing the kindly Three Spirits -LRB- Bennett Kosma , Jesse Burnside Murray and Jacob A . Wade -RRB- are turned spectral and eerie , with their bodies painted white and Methuselah beards .", "This `` Magic Flute '' was the first Met opera that was transmitted live in high-definition video to some 100 movie theaters around the world .", "Ultimately the point of this technological outreach is to entice newcomers into attending opera performances .", "The children I spoke with are likely to be back .", "Summarizing his reactions to `` The Magic Flute , '' Jonah said , `` I do n't think it 's going to be the best opera I 'm going to go to in my life . ``", "What he meant , explaining further , was , `` I 'm , like , going to go to others that will be even better . ``", "MUSIC ."], "summary": ["Julie Taymor family friendly production of Mozart 's The Magic Flute at Metropolitan Opera features libretto translated into English and has been cut from nearly four hours to less than two .", "Is filled with puppets and elaborate costumes that appeal to children .", "Production is part of Met general manager Peter Gelb 's initiative to expand opera audience .", "Photos ."], "publication": "nyt50", "label": [0, 3, 6, 33], "tag": ["Arts"]} -{"id": "1815747", "text": ["When it comes to spectacle , Hollywood enjoys nothing better than a nasty legal battle between two determined and egotistical adversaries : Bette Davis meets Joan Crawford , in a courtroom .", "In the past such face-offs have featured prominent figures like the former Disney chairman Michael D . Eisner , the DreamWorks co-founder Jeffrey Katzenberg , the superagent Michael S . Ovitz , the humorist Art Buchwald and the actress-writer Joan Collins .", "Many of them cringed to hear their private comments and inner thoughts offered up for public consumption .", "And so may it go if the headline-making book publisher Judith Regan proceeds with a lawsuit her lawyers have threatened against the News Corporation , which owns HarperCollins , the publishing house that fired her in December after the O . J . Simpson book and television project imploded .", "In an interview last week Ms. Regan 's lawyer , Bert Fields , said he was preparing to file a lawsuit for libel and wrongful termination against the News Corporation shortly after the New Year because of allegations that she had been fired after making anti-Semitic remarks during a heated phone conversation with a company lawyer .", "`` My present thought is that we would sue not just for breach of contract but for libel , '' he said .", "`` They issued false and defamatory statements about Judith .", "This has been terribly destructive to her career , and I think the damages could be huge . ``", "A News Corporation spokesman declined to comment officially because the matter involved an imminent lawsuit , but he provided what he said was the company view : the News Corporation was confident about its case and not worried that a third party , a temporary worker who was briefly Ms. Regan 's assistant , had emerged to support Ms. Regan 's account of the conversation that led to the dismissal .", "`` We 're very confident that what we said is not false , `` said the company executive , who added that the News Corporation chairman , Rupert Murdoch , was taking an active interest in the matter .", "`` But this seems to be heading more toward a public relations war than a litigation . ''", "Well , naturally so .", "In Hollywood battles a war of words often precedes legal formalities , and Mr. Fields has plenty of experience .", "He has played legal counselor in headline-grabbing cases like Mr. Katzenberg versus the Walt Disney Company in the mid-1990s , and two Tom Cruise defamation cases , in Britain in 1998 and in California in 2003 , both over allegations that he was homosexual .", "In those instances Mr. Katzenberg and Mr. Cruise got satisfaction , either financial or legal , although they had to suffer through headlines that , in Mr. Cruise 's case , repeated the unsupported allegations and , in Mr. Katzenberg 's case , unearthed a nasty remark about his diminutive height .", "But a case involving Ms. Regan may well prove to be uglier and more prurient .", "Legal scholars and prominent litigators said that proving libel is extremely difficult , but it opens the door to a public airing of the litigants ' private affairs .", "`` Libel is a very , very high mountain of proof to climb , and you can get destroyed in the process , '' said Pierce O'Donnell , a leading litigator who successfully sued Paramount in the 1980s for Mr. Buchwald , who contended that the studio had stolen his screenplay idea in its movie `` Coming to America . ''", "Ms. Regan , whose lively personal life is already well-worn fodder for tabloid gossip , will find lawyers poring over every off-color remark she may have made , Mr. O'Donnell said .", "Former colleagues have already emerged to confirm that she was reprimanded in the past for making an anti-Semitic remark at work .", "Mr. O'Donnell said : `` She will open herself up to every scurrilous allegation .", "She will not enjoy one minute of this litigation .", "They 'll hire a bulldog , and it 'll be a bloodletting . ``", "Meanwhile HarperCollins , which owns ReganBooks , would probably face uncomfortable questions about why it tolerated Ms. Regan for so long if the company found her behavior so objectionable .", "And executives would also have to submit to a detailed examination of their decision-making process in the Simpson project , a book titled `` If I Did It '' and a television interview conducted by Ms. Regan , which unleashed such a cascade of public outrage that both were canceled .", "`` Everything that went on will get into evidence , '' Mr. Fields promised .", "`` What really happened with that interview , what Jane Friedman , '' the president of HarperCollins and Ms. Regan 's former boss , `` is really like . ''", "A significant issue for a jury , Mr. O'Donnell said , would be whether the accusations of anti-Semitism were merely a pretext for getting rid of the controversial publisher .", "HarperCollins `` may say that this was the straw that broke the camel 's back , but the other side may say the straw was really the embarrassment over O . J . , `` he said .", "The breach-of-contract matter might be considered a garden-variety case , except for the accusation of anti-Semitism .", "With four years remaining on her contract , Ms. Regan would have been paid several million dollars had she not been fired for cause , a general term that usually includes behavior like dishonesty , failure to carry out orders or sexual harassment .", "But the charge of anti-Semitism itself is not a clear-cut one for either side , legal experts said .", "In its termination letter News Corporation did not specify the reason for Ms. Regan 's dismissal , though the company did later release its account of the allegedly anti-Semitic remarks to the news media .", "Mr. Fields said he planned to argue that a HarperCollins lawyer , Mark Jackson , knew that Ms. Regan never said that a `` Jewish cabal '' was in league against her during that phone conversation .", "`` He stated it , knowing it was false , '' Mr. Fields said .", "`` These days people do n't want much to do with an anti-Semite or an anti-black person . ``", "Others said it was more complicated .", "`` It really is a he said-she said , '' said Alan R . Friedman , a leading New York entertainment lawyer .", "`` Both sides have their reasons .", "Both witnesses have reason to be less than objective .", "And you have to show that it has an impact on her reputation .", "Will this really change anyone 's opinion of Judith Regan , a ` Jewish cabal ' .", "This is n't like denying that the Holocaust occurred .", "That 's just a burden . ``", "David R . Ginsburg , who is executive director of the entertainment law program at the University of California , Los Angeles , said that if a jury believed she made the remark , it would be a basis for firing .", "`` Taking it in the abstract , it is damaging for a company to take no action while a senior executive purportedly makes anti-Semitic remarks , '' he said .", "Several lawyers said they imagined that the matter would never have received this much attention if not for the furor over O . J . Simpson .", "And they said they believed it would be settled quietly once the media noise died down because the negative implications for both sides were so great .", "On the eve of the new year there were indications that a window might open toward conciliation .", "Mr. Fields suggested an apology : `` In my view they should retract what they said .", "If they did , it might limit the damages .", "We 'd still sue for breach of contract , but we might not sue for libel . ``", "The News Corporation executive said the company would prefer to settle the matter quietly .", "`` We 'd like this to go away quickly , amicably and professionally , `` he said .", "`` We 're not going to settle at any price .", "We have a very strong case .", "But having the winning case does n't mean you want to go to court . `` ."], "summary": ["Judith Regan says she is preparing to file lawsuit against News Corp , Rupert Murdoch-owned media giant that owns Harper Collins publishing house , which fired her after O J Simpson book project imploded .", "Regan 's lawyer Bert Fields says libel and wrongful termination suit is related to allegations that Regan had been fired after making anti-Semitic remarks during heated phone conversation with company lawyer .", "News Corp declines to comment officially .", "Photos .", "Chart of other face-offs between famous names and large corporations ."], "publication": "nyt50", "label": [4, 3], "tag": ["Arts", "Books"]} -{"id": "1815756", "text": ["Last year 's debut of Howard Stern 's radio show on Sirius Satellite Radio put the technology on the map , raising the public 's awareness of satellite radio and helping to boost significantly subscriber totals for Sirius and its larger rival , XM Satellite Radio .", "Today , thanks in part to the outsize radio personality , the Stern Effect has increased Sirius 's base to about six million subscribers , up 80 percent from one year ago .", "XM has increased its numbers by more than 30 percent , ending 2006 with 7.7 to 7.9 million customers .", "`` There is a tendency to view satellite radio as if the glass is half empty , and that it is a failure or disappointment , '' said Craig Moffett , senior cable analyst for Sanford C . Bernstein .", "`` In fact , nothing could be further from the truth , '' he said .", "`` Satellite radio is growing faster than any consumer product except for the iPod . ''", "But Sirius and XM shares have taken a battering on Wall Street , with prices for both off about 50 percent from their year-ago levels .", "On Friday , Sirius closed at $ 3.54 , while XM ended the year at $ 14.45.", "And now , the industry may be getting ready to try an even more dramatic third act -- a possible attempt to merge the two services .", "The benefits of a merger have been promoted by the chief executive of Sirius , Mel Karmazin , for a number of months , and Sirius officials continue to say that a merger would be a good thing .", "XM has not commented on the possibility , and neither company has said whether they have actually discussed the issue .", "`` When you have two companies in the same industry , we have a similar cost structure .", "Clearly , a merger makes sense from an investor 's point of view to reduce costs , and to have a better return , `` said David Frear , the chief financial officer for Sirius .", "Both companies have continued to lose hundreds of millions of dollars because of marketing and other subscriber acquisition expenses .", "During the year , XM sharply lowered its expectations for 2006 subscriber levels , from January 's predicted end-of-year total of 9 million to a maximum of 7.9 million .", "-LRB- Sirius reduced its subscription projection by about 100,000 . -RRB-", "Nate Davis , XM 's president , said his company believed that the slower-than-expected growth rate was of its own making and not a result of any market indifference .", "`` We did not stimulate the market with new products , '' he said .", "XM 's most talked-about receivers , the Pioneer Inno and Samsung Helix , were first announced one year ago .", "Several new receiver models will be introduced later in 2007 .", "In addition , production of some receivers was temporarily halted to stop a condition that was allowing satellite signals to be picked up by neighboring vehicles .", "The hiccups typical of fledgling industries appear to be over .", "Both companies have their programming lineups largely in place and a wide range of receivers available in retail stores .", "In addition to Howard Stern , Sirius features personalities like Deepak Chopra , Judith Regan , Richard Simmons and Martha Stewart .", "Sports programming includes N.B.A. , N.F.L. , and N.H.L. games .", "Nascar programming begins this year .", "XM has shows with hosts including Bob Dylan , Ellen Degeneres , `` Good Morning America '' personalities , and Oprah Winfrey .", "XM broadcasts every Major League Baseball game as well as P.G.A. golf .", "Yet the vast majority of programming remains duplicative .", "Each company offers a wide variety of rock , pop , folk , and other musical genres , as well as the same news channels , which include the BBC , CNN , Fox , and MSNBC .", "Sirius and XM each claim that their music channels are more compelling than the competition 's , but most casual listeners would be hard-pressed to tell the difference .", "`` The services mirror each other tremendously , '' said Richard Doherty , an analyst with the Envisioneering Group , a research firm .", "`` More people know that one service has Howard Stern than know which one has him . ''", "Except for a relatively small handful of viewers looking for particular programs , consumers searching for a satellite service in a retail store often make their decision not on the merits of one over the other , but which one is more convenient to buy .", "`` For the subscriber , it all comes down to which one of the two is closer to the cash register .", "Customers can not tell the difference between the two services , `` Mr. Moffett said .", "Customer choice will play an even smaller role in the coming years as both companies come to rely more on selling satellite radio as a factory-installed option on new cars , and less on receivers sold at retail stores .", "Both companies have exclusive agreements with the automobile companies .", "Customers typically get free service for a number of months , and then must pay $ 12.95 a month to continue listening .", "XM has exclusive arrangements with General Motors , Honda , Hyundai , Nissan and Porsche .", "Sirius has similar alliances with BMW , DaimlerChrysler , Ford , Kia and VW-Audi .", "Today , about 63 percent of XM 's subscribers are buyers of new cars , and Sirius 's new subscribers are derived equally from new car and after-market sales .", "As more cars are equipped with satellite radios , the new car market could grow to as high as 70 percent of sales in the next few years , Mr. Moffett said .", "`` We see greater and greater demand in the car market , '' said Mr. Davis of XM .", "`` And we think the used car market will be an opportunity to sell to new subscribers . ''", "Used car subscribers incur no additional hardware costs if the receiver is already in place .", "And if the companies were to merge and effectively double their subscriber base , the new company could reduce programming costs through increased negotiating clout , removal of duplicative channels and elimination of redundant employees .", "Whether Sirius and XM attempt to merge , a number of variables that will determine the size of the industry 's success remain unknown .", "They include the number of new cars that will be equipped with satellite radio receivers .", "The percentage of new car owners who will subscribe after the free trial period ends .", "And whether purchasers of used cars equipped with satellite radio will be more or less likely to subscribe than new car owners .", "The business may also be vulnerable to subscription overload , Mr. Doherty said , if consumers find that monthly recurring expenses from cellphone bills , cable TV , and other services are too high .", "Yet even if that is true , there is little doubt that the concept of satellite radio is no longer alien to consumers .", "According to Sirius , 83 percent of consumers aged 18 to 55 are now aware of the technology .", "Mr. Frear became personally cognizant of that when he tried to rent a car with a Sirius radio recently but found they were all taken .", "`` Every year , satellite radio just sinks deeper and deeper into the public consciousness , '' he said .", "Correction : January 16 , 2007 , Tuesday An article in Business Day on Jan . 1 about the increase in subscribers for Sirius Satellite Radio and its rival , XM Satellite Radio , misstated one news channel that is offered by both companies .", "It is CNBC , not MSNBC ."], "summary": ["Sirius Satellite Radio and XM Satellite Radio may try merger .", "Benefits of merger have been promoted by Sirius chief executive Mel Karmazin and Sirius officials continue to say that merger would be good thing .", "XM has not commented on possibility , and neither company has said whether they have actually discussed issue .", "Both companies have continued to lose hundreds of millions of dollars because of marketing and other subscriber acquisition expenses .", "Graphs .", "Photos ."], "publication": "nyt50", "label": [9, 13, 10], "tag": ["Business"]} -{"id": "1815757", "text": ["The business of grain marketing has not been its usual sleepy self lately in Western Canada .", "Plans by the government to strip the Canadian Wheat Board of its monopoly control over most of the country 's wheat and barley exports have provoked a fight that is pitting farmer against farmer and the agency against the government .", "The wheat board , founded 75 years ago as part of a wave of cooperative ventures for improving farmers ' lives , is now one of the world 's largest grain traders , with annual sales of $ 4 billion to $ 6 billion .", "The board is , in many respects , among the last of the cooperative projects that remains true to its original goals .", "But those goals now have little in common with the open market philosophy of the minority Conservative government that came to power just under a year ago .", "Shortly before Christmas , that clash resulted in an unusual cabinet order to fire the wheat board 's president , Adrian Measner .", "The fate of the board will be eagerly awaited in the United States , where farm groups have unsuccessfully challenged the Canadian board 's monopoly .", "Among the companies likely to move into Canada 's export market , if it is opened , are commercial grain traders like Cargill of Minneapolis and Archer Daniels Midland of Decatur , Ill .", "`` There is absolutely no doubt that part of the reason the Conservative government is pushing as hard as it is pushing is , I suspect , that they are feeling pressure from the Americans , '' said Murray Fulton , an agriculture economist who directs a center for the study of cooperatives at the University of Saskatchewan in Saskatoon .", "The wheat board concept is simple .", "In exchange for its monopoly over wheat destined for export from Canada 's three prairie provinces , as well as a small part of British Columbia , it pays every farmer the same average sale price .", "Overall , Professor Fulton said , price averaging has provided most farmers with greater stability and higher prices than they would have obtained in an open market .", "A study commissioned by the wheat board on barley prices released in December gives more specifics .", "The report , by Richard S . Gray of the University of Saskatchewan , Andrew Schmitz of the University of Florida and Troy G . Schmitz at Arizona State University , concluded that farmers ' barley revenue from the wheat board was 59 million Canadian dollars higher from 1995 to 2004 than it would have been in an open market system .", "Wheat board supporters argue that the board also helps farmers by negotiating terms with railways and ports .", "The system seems to have supporters on the buyers ' side as well .", "It provides a more uniform grading of grains than is available in the United States , for instance .", "But averaging prices has a significant drawback for some farmers .", "By definition , an average price is often lower than what individual farmers who live near the United States border could obtain by directly trucking their harvest south .", "The province of Alberta , home to Prime Minister Stephen Harper , has long opposed the board for that reason , among others .", "Chuck Strahl , the minister of agriculture , has repeatedly said that all he wants is to give farmers the choice of pooling their risk through the wheat board or going out on their own .", "`` We are trying to get more marketing choice for farmers , '' he told the House of Commons in December .", "`` We want to put more money in their pockets .", "We want them to take advantage of their own expertise . ``", "Professor Fulton argued in a separate study published last month that the wheat board can not survive in an open market .", "Indeed , its history before it was granted a monopoly supports that idea .", "The operation floundered as farmers chose the open market when prices were high , returning to the board only during hard times .", "Mr. Strahl has not found it easy to end the board 's monopoly .", "A 1998 law gave farmers control of the board by letting them directly elect 10 of its 15 directors , with the balance , including the president , being appointed by the government .", "-LRB- Because the government guarantees the board 's finances , however , it retained the power to issue cabinet orders . -RRB-", "The Conservatives , in the minority , have been unable to get any of the other parties to support changes in the wheat board .", "An attempt earlier this year to amend the board 's legislation was resoundingly defeated by opposition parties .", "And when five wheat board positions came up for election this fall , the government heavily promoted candidates who backed its plan .", "Farmers , however , responded by electing four candidates who favor the current monopoly .", "Now Mr. Strahl plans a vote on creating an open barley market early in 2007 .", "But he has also made it clear that the government will not be bound by its results .", "The level of support for the government 's open market plan is unclear .", "Even Mr. Strahl said during recent committee testimony : `` I think I 've had 4,500 letters on wheat board issues since I 've been in office .", "They are almost equally divided . ``", "Mr. Measner , a 34-year veteran of the wheat board , had become something of a minor celebrity by rebuffing orders from Mr. Strahl and his staff to stop promoting the monopoly system and start backing the government 's plan .", "Before his dismissal on Dec . 22 , Mr. Measner began a court challenge of a cabinet order requiring the board to support the government , arguing that it is unconstitutional and outside the government 's power over the board .", "`` The government has a direction they 're taking the wheat board , and it does n't matter what people say or what people want , `` Mr. Measner said in an interview from his home after his firing .", "`` My position is that it should be farmers making the decision . ''", "Ken Ritter , a farmer from Saskatchewan who is the board 's chairman , said the legal challenge of the government 's orders will continue as will the board 's campaign to keep the current system .", "`` This is very , very divisive , '' Mr. Ritter said .", "`` This has gone on for 40 years . ''", "Professor Fulton said that by firing Mr. Measner and attempting to gag the board , the government runs the danger of alienating even some of the farmers who back its open market position .", "`` The view seems to be shifting , '' Professor Fulton said .", "`` Even farmers who might support a change in the wheat board are saying , why do it this way . '' ."], "summary": ["Plans by Canada to strip Canadian Wheat Board of its strict control of wheat and barley exports provokes fight between farmers and government .", "Board , founded 75 years ago , pays all farmers average sale price over market value in exchange for monopoly .", "Supporters say system helps farmers negotiate terms with buyers while critics say some living near United States border could potentially generate profits higher than average price .", "Vote is planned on creating open market .", "Photo ."], "publication": "nyt50", "label": [1, 18, 34, 14], "tag": ["Business"]} -{"id": "1815760", "text": ["In brand-new offices with a still-empty game room and enough space to triple their staff of nearly 30 , a trio of entrepreneurs is leading an Internet start-up with an improbable mission : to out-Google Google .", "The three started Powerset , a company whose aim is to deliver better answers than any other search engine -- including Google -- by letting users type questions in plain English .", "And they have made believers of Silicon Valley investors whose fortunes turn on identifying the next big thing .", "`` There 's definitely a segment of the market that thinks we are crazy , `` said Charles Moldow , a partner at Foundation Capital , a venture capital firm that is Powerset 's principal financial backer .", "`` In 2000 , some people thought Google was crazy . ''", "Powerset is hardly alone .", "Even as Google continues to outmaneuver its main search rivals , Yahoo and Microsoft , plenty of newcomers -- with names like hakia , ChaCha and Snap -- are trying to beat the company at its own game .", "And Wikia Inc . , a company started by a founder of Wikipedia , plans to develop a search engine that , like the popular Web-based encyclopedia , would be built by a community of programmers and users .", "These ambitious quests reflect the renewed optimism sweeping technology centers like Silicon Valley and fueling a nascent Internet boom .", "It also shows how much the new Internet economy resembles a planetary system where everything and everyone orbits around search in general , and around Google in particular .", "Silicon Valley is filled with start-ups whose main business proposition is to be bought by Google , or for that matter by Yahoo or Microsoft .", "Countless other start-ups rely on Google as their primary driver of traffic or on Google 's powerful advertising system as their primary source of income .", "Virtually all new companies compete with Google for scarce engineering talent .", "And divining Google 's next move has become a fixation for scores of technology blogs and a favorite parlor game among technology investors .", "`` There is way too much obsession with search , as if it were the end of the world , '' said Esther Dyson , a well-known technology investor and forecaster .", "`` Google equals money equals search equals search advertising .", "It all gets combined as if this is the last great business model . ``", "It may not be the last great business model , but Google has proved that search linked to advertising is a very large and lucrative business , and everyone -- including Ms. Dyson , who invested a small sum in Powerset -- seems to want a piece of it .", "Since the beginning of 2004 , venture capitalists have put nearly $ 350 million into no fewer than 79 start-ups that had something to do with Internet search , according to the National Venture Capital Association , an industry group .", "An overwhelming majority are not trying to take Google head on , but rather are focusing on specialized slices of the search world , like searching for videos , blog postings or medical information .", "Since Google 's stated mission is to organize all of the world 's information , they may still find themselves in the search giant 's cross hairs .", "That is not necessarily bad , as being acquired by Google could be a financial bonanza for some of these entrepreneurs and investors .", "But in the current boom , there is money even for those with the audacious goal of becoming a better Google .", "Powerset recently received $ 12.5 million in financing .", "Hakia , which like Powerset is trying to create a `` natural language '' search engine , got $ 16 million .", "Another $ 16 million went to Snap , which has focused on presenting search results in a more compelling way and is experimenting with a new advertising model .", "And ChaCha , which uses paid researchers that act as virtual reference librarians to provide answers to users ' queries , got $ 6.1 million .", "Still , recent history suggests that gaining traction is going to be difficult .", "Of dozens of search start-ups that were introduced in recent years , none had more than a 1 percent share of the United States search market in November , according to Nielsen NetRatings , a research firm that measures Internet traffic .", "Amassing a large audience has proved to be a challenge even for those with a track record and resources .", "Consider A9 , a search engine owned by Amazon.com that received positive reviews when it began in 2004 and was run by Udi Manber , a widely recognized search specialist .", "Despite some innovative features and early successes , A9 has captured only a tiny share of the market .", "Mr. Manber now works for Google , where he is vice president of engineering .", "The setback apparently has not stopped Amazon or its chief executive , Jeffrey P . Bezos , from pursuing profits in search .", "ChaCha said it counts an investment company owned by Mr. Bezos among its backers , and Amazon is an investor in Wikia .", "An Amazon spokeswoman said Mr. Bezos does not comment about his personal investments .", "Some start-ups are similarly bullish .", "`` We expect to be one of the top three search engines , '' said Riza C . Berkan , the chief executive of hakia .", "It is a bold claim , given that hakia 's technology is not yet ready for prime time , and Mr. Berkan readily concedes it will take time to perfect it .", "The dream , however , is quintessential Silicon Valley .", "`` It is hard for me to believe that anybody thinks they can take Google 's business from Google , `` said Randy Komisar , a venture capitalist who was once known as Silicon Valley 's `` virtual C.E.O. '' for his role as a mentor to scores of technology firms .", "`` But to call the game over because Google has been such a success would be to deny history . ''", "In some ways , the willingness of so many to make multimillion-dollar investments to take on Google and other search companies represents a startling change .", "In the late 1990s , when Microsoft dominated the technology world , inventors and investors did everything they could to avoid competing with the software company .", "Yet many of today 's search start-ups are putting themselves squarely in the path of the Google steamroller .", "Most explain that decision in similar ways .", "They say that Google 's dominance today is different from Microsoft 's in the late 90s when its operating system was a virtual monopoly and nearly impossible to break .", "In the Internet search industry , `` you earn your right to be in business every day , page view after page view , click after click , '' said Barney Pell , a founder and the chief executive of Powerset , whose search service is not yet available .", "They also say that the market for search simply is too large to resist .", "Google , which , according to Nielsen , handles about half of all Internet searches in the United States , is valued at an astonishing $ 141 billion .", "So , the reasoning goes , anyone who can grab even a small slice of the search market could be well rewarded .", "`` You do n't need to be No . 1 to be worth billions of dollars , `` said Allen Morgan , a partner at Mayfield Fund , a venture capital firm that invested $ 10 million in Snap .", "The company is also backed by Bill Gross , an Internet financier who pioneered the idea of linking ads and search results , only to see Google seize the powerful business model and improve on it .", "Almost all of today 's search entrepreneurs also say that Google 's success lends credibility to their own long-shot quest .", "When Lawrence Page and Sergey Brin first started tinkering with what would become Google , other search engines like AltaVista and Lycos and Excite were dominant .", "But the companies that owned them were distracted by efforts to diversify their businesses , and they took their eye off the ball of Internetsearch and stopped innovating .", "Some now say that search has not evolved much in years , and that Google is similarly distracted as it introduces new products like word processors , spreadsheets and online payment systems and expands into online video , social networking and other businesses .", "`` The more Google starts to think about taking on Microsoft , the less it is a pure search play , and the more it opens the door for new innovations , '' said Mr. Moldow , the Foundation Capital partner .", "`` That 's great for us . ``", "But at the same time , Google , Yahoo and Microsoft have thousands of engineers , including some of the world 's top search specialists , working on improving their search results .", "And they have spent billions building vast computer networks so they can respond instantly to the endless stream of queries from around the world .", "Search `` is becoming an increasingly capital-intensive business , '' said Marissa Mayer , Google 's vice president for search .", "That makes it harder for start-ups to catch up to the giants , she said .", "That is not stopping entrepreneurs from betting that they can .", "Powerset has search and natural-language experts among its two dozen employees , including former top engineers from Yahoo and a former chief linguist from Ask Jeeves , Ask.com' s predecessor .", "They are the kind of people who could easily land jobs at Google or Microsoft or Yahoo .", "Steve Newcomb , a Powerset founder and veteran of several successful start-ups , said his company could become the next Google .", "Or , he said , if Google or someone else perfected natural-language search before Powerset , then his company would make a great acquisition for one of the other search companies .", "`` We are a huge story no matter what , '' he said .", "Ms. Dyson , the technology commentator and Powerset investor , captured the optimism more concisely and with less swagger .", "`` I love Google , '' she said , `` but I love the march of history . '' ."], "summary": ["Search-engine companies such as Powerset , hakia , ChaCha and Snap are trying to be next Google .", "Wikia Inc , company started by founder of Wikipedia , plans to develop search engine that , like popular Web-based encyclopedia , would be built by community of programmers and users .", "These ambitious quests reflect renewed optimism sweeping technology centers like Silicon Valley and fueling nascent Internet boom .", "It also shows how much new Internet economy resembles planetary system where everything and everyone orbits around search in general , and around Google in particular .", "Photo ."], "publication": "nyt50", "label": [7, 9, 8], "tag": ["Technology", "Business"]} -{"id": "1815761", "text": ["As the Jewel of the Seas , a luxury cruise ship owned by Royal Caribbean , steamed down the coast toward Bermuda in late October , there were plenty of distractions , including two Olympic-size pools , Latin dance lessons and Boozer Bingo .", "But Birdie Jaworski was having none of it .", "Ms. Jaworski , a single mother who sells Avon products in tiny Las Vegas , N.M. , instead spent much of her time inside the ship 's lounges and meeting rooms with other book enthusiasts , listening to authors like Elinor Lipman -LRB- `` My Latest Grievance '' -RRB- , Terri Jentz -LRB- `` Strange Piece of Paradise '' -RRB- , and Lynnette Khalfani -LRB- `` The Money Coach 's Guide to Your First Million `` -RRB- .", "`` I was having so much fun hanging out with people with the same literary interests as me , '' said Ms. Jaworski , who runs a book club in her hometown and is herself an aspiring author .", "Ms. Jaworski was one of about 150 book lovers aboard the ship for a five-day literary-themed cruise out of Boston .", "Known as Book It to Bermuda , it is just the latest example of a growing genre of cruises that could be called Ship Lit .", "Often sponsored by publishers , the cruises , aboard commercial liners , feature popular authors who give readings and seminars -- even knitting lessons -- to boatfuls of book lovers .", "Ms. Jaworski was particularly impressed with a presentation by Ms. Jentz , whose book recounts a brutal attack she survived in Oregon 30 years ago , and subsequent journey back to the scene to investigate .", "The seas were rough that day , according Ms. Jaworski , so Ms. Jentz delivered her presentation sitting down .", "`` I enjoyed it , even though I was feeling a little queasy , '' said Ms. Jaworski .", "For authors and their publishers , the cruises offer an opportunity to promote their books to a captive audience of hundreds of enthusiastic readers .", "People tend to read a lot on cruises , and the profile of a typical Ship Lit cruise customer -- older and female -- is an especially good match for romance , health and fitness books , publishers say .", "`` It 's become increasingly difficult to create and build a book and make it successful , `` says Keith Fox , president of McGraw-Hill Professional , the division of the McGraw-Hill Companies that published Ms. Khalfani 's financial advice book .", "Literary cruises , he said , are `` an opportunity for us to get our authors in front of a demographic that loves books . ''", "Authors say the environment makes for a special experience .", "`` You get to connect with people in a way that you never would at a bookshop , '' Ms. Khalfani said .", "`` I had people stopping me in the bathroom , in the spa .", "I probably gave another three or four minisessions just sitting around talking shop . ``", "The feeling was mutual .", "`` Bermuda was stunning , but the authors made the cruise , '' says Julie Rogers , a self-described `` crazed reader '' from San Jose , Calif . , who was on the cruise .", "The company that has done the most to promote Ship Lit , however , is Levy Home Entertainment , a book distributor with thousands of retail accounts , including Wal-Mart Stores , Target , and Kmart .", "With the support of major publishers , Levy has organized two Authors at Sea cruises that each featured more than two dozen authors .", "Mary Higgins Clark , Paul Levine , a mystery writer , and Arthur Frommer , a travel expert , have been headliners on the cruises , while Jackie Collins and Dean Koontz have lent their star power to bon voyage parties .", "The next cruise is tentatively scheduled for 2008 .", "Levy has long sought novel ways to spur sales of mass market paperbacks , which the company says account for a third of its sales , such as organizing bus tours and driving groups of authors from city to city to do signings .", "The idea for a literary cruise grew out of that experience , says Pam Nelson , director of promotions for Levy .", "Publishers say it is difficult to measure the direct impact of the cruises , but they say participating authors see an increase in sales .", "Harlequin Enterprises sent a half dozen of its writers on the Authors at Sea cruise last spring .", "Craig Swinwood , executive vice president for sales and marketing at Harlequin , notes that those authors , including Debbie Macomber and Carla Neggers , had record years .", "Still , `` you could do five cities in that same week , '' on a regular book tour , he said .", "Books by participating authors are sold onboard .", "Levy sold about 2,500 books on its last cruise .", "And the marketing starts months beforehand .", "In Levy 's case , featured books are distributed with an Authors at Sea logo on the covers , and tucked inside each book is a coupon for $ 250 toward the price of the cruise .", "But the real payoff may come from the word-of-mouth after vacationers return home , telling their friends and posting their thoughts on one of the many book-oriented Web sites and blogs .", "`` These are not just readers .", "These are power readers that can really drive trends in the book business , `` says John Lindsay , a vice president at Levy .", "Indeed , the Internet has changed the dynamics of the book business in profound ways .", "Ms. Macomber , who has written dozens of mass market novels , keeps in touch with her fans through her own Web site .", "`` We 're used to being in that celestial cloud , `` she said .", "`` Now authors are out there .", "You have to be . ``", "As a featured author on the Levy cruise , Ms. Macomber gave knitting lessons to attendees -LRB- knitting features prominently in her 2005 novel , `` A Good Yarn '' -RRB- .", "After the cruise , she added the readers she had met to her mailing list , and many have left messages on her online guest book , she said .", "Kate Duffy , editorial director of the Kensington Publishing Corporation -LRB- whose author , Beverly Barton , was on the last Levy cruise -RRB- , courts these readers , whom she affectionately calls `` the big mouths , '' sending them manuscripts and soliciting their opinions .", "After attending the Authors at Sea trip , she said , `` I added 10 more big mouths to my list . ''", "MEDIA ."], "summary": ["Literary cruises give authors and their publishers opportunity to promote books to captive audience of enthusiastic readers .", "Environment provides connection with readers that authors can not achieve through bookstore appearances or other events .", "Cruises are becoming popular as it becomes icreasingly difficult to successfully market new books .", "Photos d ."], "publication": "nyt50", "label": [10, 35], "tag": ["Business", "Books"]} -{"id": "1815789", "text": ["When Ban Ki-moon takes over as secretary general of the United Nations on Monday , he may quickly have to grapple with the crisis in Darfur , fighting in Somalia and the continuing strife in the Middle East .", "But he will have to wait to enjoy one of the more glamorous trappings of his new post : the secretary general 's official residence on Sutton Place , the exclusive Manhattan enclave off the East River .", "The General Assembly recently approved a $ 4.5 million renovation of the residence , a 14,000-square - foot neo-Georgian attached town house with four floors and a basement .", "Until October , when the work is expected to be completed , Mr. Ban and his wife will sleep and entertain in a suite at the Waldorf-Astoria , where they have been living since November .", "Choi Soung-ah , who has been serving as a spokeswoman during Mr. Ban 's transition , said that while he would have preferred to move directly into the Sutton Place residence , `` it 's just essential refurbishment that 's been pushed off for years . ``", "`` He would rather , of course , be in the official residence and not living in a hotel where he ca n't really unpack his own things , his own belongings , `` Ms. Choi said .", "`` There is such a thing as the official residence of the secretary general .", "Of course that would be better , and it would be very convenient . ``", "The residence was once home to Anne Morgan , the daughter of J . P . Morgan , the financier .", "It was donated to the United Nations in 1972 and has not been significantly refurbished since 1950 , according to a September report prepared by the secretary general 's office .", "The report estimated that the renovation effort would cost $ 4.49 million .", "Of that , $ 650,900 would go toward security upgrades , including $ 137,700 for a digital video-recording system and additional cameras .", "The price includes $ 200,000 to upgrade the kitchen , $ 100,000 to redo the restrooms and $ 2.1 million for a central heating , ventilation and air-conditioning system .", "The upgrades that are not related to security total $ 255 per square foot .", "The General Assembly also approved $ 202,500 for temporary accommodations for the incoming secretary general and his family .", "Details in the report hint at the sort of domestic annoyances the departing secretary general , Kofi Annan , has dealt with since he took up the post in 1997 .", "The house currently `` poses safety hazards '' and is prone to `` severe malfunctions , requiring increasingly frequent emergency repairs , '' the report says .", "Specifically , it describes a `` technologically obsolete '' telecommunications system , an 85-year-old electrical wiring system unequipped to handle the phalanx of communications and security technology required by a modern world leader and an elevator that violates New York City 's safety code .", "The building is heated with steam , which frequently leaks from `` heavily corroded '' pipes and fittings , the secretary general 's report says , damaging walls and furniture .", "The high-powered fans that are used to address the leaks sometimes make matters worse , by overloading the electrical circuit and causing power failures .", "The General Assembly has also approved a separate , extensive renovation of the United Nations headquarters , which is expected to cost $ 1.9 billion and take seven years .", "In the meantime , Mr. Ban , the former South Korean foreign minister , will not have to wait to move into his new office , on the 38th floor of the gleaming Secretariat building .", "He will do so starting Monday , but not a moment sooner , Ms. Choi said , noting , `` Kofi Annan is still the secretary general until the 31st of December . '' ."], "summary": ["Ban Ki-moon , new secretary general of UN , will have to wait to enjoy one of more glamorous trappings of his new post : secretary general 's official residence on Sutton Place .", "General Assembly recently approved $ 4.5 million renovation of residence .", "Ban and his wife are staying at Waldorf-Astoria until Oct , when work is expected to be completed ."], "publication": "nyt50", "label": [1, 3, 2], "tag": ["World"]} -{"id": "1815792", "text": ["As a New Year 's deadline arrived , Russia 's natural gas monopoly , Gazprom , struck a deal early Monday to supply gas to Belarus for the next five years , averting a price dispute that threatened to disrupt supplies to Europe , the company said in a statement .", "The agreement , reached as the Russian capital celebrated the New Year with rolling displays of fireworks , more than doubled the price that Belarus will pay for natural gas this year and raised it significantly in the years to come .", "For Belarus , a close ally of Russia , the price of gas would rise to $ 100 per thousand cubic meters in 2007 , from $ 46 now , and increase steadily to the level paid by European countries by 2011 , the company said .", "Gazprom , Russia 's largest company , succeeded in achieving , at least in part , what its officials had described as a central goal : ending subsidized supplies of energy to the countries of the former Soviet Union .", "Belarus , led by its autocratic president , Aleksandr G . Lukashenko , has for years benefited from the comparatively inexpensive supply of natural gas , a vital part of its sclerotic economy , still mostly managed by the state .", "The agreement headed off a shutdown of supplies to Belarus and beyond , avoiding the disruptions of last year , when the effects of a price dispute with Ukraine rippled throughout Europe and raised concerns about Russia 's reliability as an energy supplier .", "Gazprom , closely allied to the Kremlin , threatened to cut off gas supplies again beginning Monday at 10 a.m. if Belarus did not agree to the higher prices .", "At least 20 percent of Russian natural gas destined for Europe passes through Belarus , less than the amount that transits Ukraine but enough to raise new concerns in Europe .", "The agreement was reached after months of negotiations -- and a final week of threats and counterthreats .", "Belarus 's prime minister , Sergei S . Sidorsky , arrived in Moscow on Sunday for a last round of negotiations and announced the deal with Gazprom 's chairman , Aleksei B . Miller .", "Mr. Miller had suggested that Belarus should ultimately pay the going market price , now roughly $ 260 per thousand cubic meters of gas .", "The agreed price , $ 100 per thousand cubic meters , was less than the $ 105 that Gazprom had demanded in the past few days .", "But under the deal announced on Monday , Gazprom , in keeping with its stated goals of expanding its export empire , will acquire 50 percent of Beltranzgaz , the Belarussian gas-transit monopoly that distributes gas through the country .", "Mr. Lukashenko , whose rule has been described as the last dictatorship in Europe , had previously vowed never to give up control of those pipelines .", "On Friday , he vowed that Belarussians would rather live in unheated dugouts than pay the higher prices that Gazprom was demanding .", "`` All this means destruction of our relations , '' he said .", "A statement by Mr. Sidorsky early Monday appeared to reflect his government 's unease with the agreement .", "`` The Belarussian side , in a difficult atmosphere on the eve of the new year , signed an agreement on unfortunate terms , '' he said , according to Agence France-Presse .", "Russia has long been the country 's most reliable partner , shielding it from efforts by Europe and the United States to isolate Mr. Lukashenko , who won re-election to a third term as president in balloting in March that was denounced as unfair .", "But the negotiations suggested that Russia 's political priorities had been surpassed by Gazprom 's economic ones ."], "summary": ["Russia 's natural gas monopoly , Gazprom , strikes deal to supply gas to Belarus for next five years , averting price dispute that threatened to disrupt supplies to Europe .", "Agreement more than doubled price that Belarus will pay for natural gas in 2007 and raised it significantly in years to come ."], "publication": "nyt50", "label": [0, 1], "tag": ["World"]} -{"id": "1815795", "text": ["City , state and federal agencies granted final approvals last month to a half-dozen wide-ranging projects in a political aligning of the stars that will promote New York City 's most ambitious economic development agenda in decades .", "Approval or financing was given to a Second Avenue subway .", "An extension of the Flushing Line to the Far West Side .", "A spur to connect the Long Island Rail Road to Grand Central Terminal .", "Financing for tens of thousands of apartments for low - and moderate-income residents .", "The Atlantic Yards complex near Downtown Brooklyn , which includes a new home for the basketball Nets .", "And even the bus-stop shelters and public toilets that New Yorkers and visitors have demanded for years .", "Some of the approvals were prompted by legal deadlines and last-minute efforts by departing Pataki administration officials -- including Charles A . Gargano , the chairman of the Empire State Development Corporation .", "Peter S . Kalikow , the chairman of the Metropolitan Transportation Authority .", "And the governor himself -- to stake out their legacy .", "But two more enduring forces also converged : the beginning of the last 1,000 days of the Bloomberg administration , and a climate that some urban planners suggest signals at least a lull in the nearly half-century backlash against the bulldozer diplomacy of Robert Moses .", "`` It 's a pretty amazing list , `` said Robert D . Yaro , the president of the Regional Plan Association , a group that studies transportation and development issues .", "`` It 's the Bloomberg administration pushing hard .", "There 's a pro-growth , long-range theme behind all this . ``", "Kenneth T . Jackson , the Columbia University urban historian , said that Mr. Bloomberg 's speech in December outlining the challenges posed by a growing population `` signaled that New York had to fight for its place at the table , that real estate and commercial rents and housing prices are getting out of hand .", "The only way the city can prosper is to make that more reasonable and the only way to do that is to increase the supply .", "`` I think they 're beginning to move , `` Professor Jackson said .", "In the months ahead , the Bloomberg administration 's development agenda includes rezoning in Harlem as well as in Jamaica and Willets Point in Queens .", "The administration also wants to make another effort to gain approval for the transformation of the James A . Farley general post office building in Midtown Manhattan into a commuter rail hub called Moynihan Station -- one proposal that appears to be in political limbo .", "`` After 9/11 , a spirit of cooperation -- not perfect -- prevailed that enabled people to aim farther than they had in decades , '' said Daniel L . Doctoroff , the deputy mayor for economic development , who was instrumental in winning many of the approvals .", "`` The mayor really encouraged that kind of thinking -- repositioning New York City 's economy to compete with other cities in the 21st century . ``", "Several of the approved projects still face court challenges .", "Some others are bound to raise concerns over displacement and congestion .", "And the promise of a Second Avenue subway has been dangled before skeptical New Yorkers for nearly 80 years .", "But even if some projects are delayed , the others would change the city 's face and , arguably , help fend off competition from New Jersey and from other world capitals .", "`` I think you probably would have to go back to the late 1930s to see anything like that , '' Mr. Doctoroff said .", "`` I do n't think any mayor has had an agenda like this , not since La Guardia . ``", "On Dec . 6 , the city sold $ 2 billion in bonds to extend the No . 7 subway line 1.1 miles west and then south from Times Square to 11th Avenue and 34th Street to help transform the largely fallow Far West Side .", "On Dec . 18 , the federal government agreed to grant $ 2.6 billion to link Long Island Rail Road commuters directly to the East Side of Manhattan and $ 693 million for the Second Avenue subway from 96th to 63rd Streets .", "On Dec . 19 , Mr. Pataki presided over the installation of the first two huge steel columns to mark the perimeter of the Freedom Tower at the World Trade Center site .", "The same day , Mr. Bloomberg announced the installation of the first 24 of 3,300 bus-stop shelters by a company that will also replace 330 newsstands and install and operate 20 public toilets .", "The `` street furniture '' will help pay for the city 's tourism campaign .", "On Dec . 20 , the City Council , in a compromise supported by the mayor , voted to overhaul a tax break to induce developers to build tens of thousands of apartments for New Yorkers making less than 80 percent of the median household income , or $ 56,720 for a family of four .", "That day , a state oversight board gave final approval to the $ 4 billion Atlantic Yards project , a mostly residential complex with a basketball arena , offices and retail space near Flatbush and Atlantic Avenues .", "In addition , new stadiums are being built for the Yankees and the Mets .", "Brad Lander , the director of the Pratt Center for Community Development , a planning group , said : `` My guess is , we are only just now settling into the general sense that the city 's growth and development are long-term trends , not a short-time , business-cycle flash . ``", "Dick Dadey , the executive director of Citizens Union , said that the city , bolstered by a robust economy , was trying to meet pent-up demand .", "`` Say what you want about the scope and size of development , '' he added , `` the projects that are being approved are more sensitive to the current communities and neighborhoods or to creating new ones -- like the new Downtown Brooklyn -- than Moses ever was . ''", "Moreover , Mr. Dadey said , `` the community boards no longer have the sway they once did over stopping local projects , '' and some local groups are even supporting development -- `` trying to encourage it responsibly in ways that benefit a greater number of people . ''", "In `` The Power Broker , '' Robert A . Caro in 1974 wrote that without the approval of Robert Moses , who oversaw virtually all public works in New York until he was eased out in 1968 , the city was `` utterly unable '' to build anything .", "Mr. Caro said in an interview that he , too , was struck by the plethora of projects approved in December .", "`` Does this alignment of stars show that this is may be a problem that democracy can solve .", "`` he said .", "`` For the first time in 40 years , I 'm hopeful . `` ."], "summary": ["New York City , New York State and federal agencies grant final approval to half-dozen large projects that will transform some areas of city and help bring it competitively into 21st-Century .", "Wide-ranging projects signal lull in climate of nearly 50-year backlash against ` bulldozer diplomacy ' of Robert Moses .", "Developers are more sensitive to needs of communities and some communities welcome changes that can improve neighborhoods .", "Photos ."], "publication": "nyt50", "label": [10, 0], "tag": ["New York and Region"]} -{"id": "1815801", "text": ["For a half-century , he has been the center of a heartbreaking mystery , this boy of 4 or 5 who lies buried beneath a black granite stone in the shade of rhododendrons .", "Visitors to his grave in Ivy Hill Cemetery here pray and leave toys , perhaps bestowing more love on this child than he ever had in his life , which was marked by sickness and hunger and ended in a beating .", "The boy 's body , bruised and naked , was found in a cardboard box in a patch of woods off a dirt road on the city 's outskirts in February 1957 .", "He was a symbol of child abuse at its worst .", "Yet all these years later , he has no name .", "William H . Kelly , a retired Philadelphia detective , and his friend Joseph McGillen , a retired investigator for the medical examiner 's office , visit the boy 's grave often .", "`` My little friend , '' Mr. Kelly calls him .", "The men , both 79 , dream of giving the boy an identity before they die .", "Still officially an open homicide investigation , the case is `` one of the very few in which we ca n't say who it is , `` said Capt . Benjamin Naish , a Philadelphia police spokesman .", "Elmer Palmer was the first officer on the scene that drizzly Feb . 26 , 1957 .", "`` It looked like a doll , '' he recalled recently .", "`` Then I saw it was n't a doll . ``", "The boy 's hair had been cut , crudely , either just before or just after death , so his body was flecked with hairs .", "Mr. Palmer , now 79 , was a young husband and father then .", "He recalled shivering in his raincoat , thinking , `` What a shame . ''", "At least this will be solved quickly , he thought .", "`` They had so many leads . ''", "But there were problems .", "A college student had spotted the body on Feb . 25 , but did not call the police until the next day , after confiding in a priest .", "Cold slows decomposition , so it was impossible to tell how long the boy had been dead .", "An autopsy showed that the child had been beaten to death and that he had been ill and undernourished .", "His baby teeth were intact , and he had apparently never been to a dentist .", "His body bore several small scars that looked like surgical incisions .", "Yet a survey of local doctors and hospitals turned up nothing .", "Photographs of the boy 's face were printed in the newspapers , hung on storefronts and mailed with utility bills throughout Philadelphia and beyond .", "Orphanages and other child-care institutions were checked .", "Still nothing .", "Detectives even dressed the corpse and photographed it in a sitting position , then distributed the pictures in the hope that the more `` lifelike '' appearance would jog someone 's memory .", "A man 's corduroy cap found near the body was traced to an area store .", "The owner recognized it from the strap the buyer had her sew on .", "She recalled him as a man in his 20s who had come into the store alone .", "There was nothing special about him .", "He was never found .", "The police traced the cardboard box to another store .", "It was one of a dozen that had held bassinets sold from Dec . 3 , 1956 , to Feb . 16 , 1957 .", "Investigators tracked down all but one buyer -- quite a feat , considering the store 's cash-only policy -- but found no link to the boy .", "Among the many tips and theories : the boy was a refugee who came to America after the Hungarian Revolution of 1956 .", "He was the son of wandering carnival workers , several of whose children had died under odd circumstances .", "He was the son of an itinerant roofer who had worked in the Philadelphia area .", "More than 11,000 Hungarian passports were checked .", "The carnival workers were cleared .", "The roofer was found , along with his son , safe and sound .", "Finally , the boy was buried in a potter 's field , with detectives as pallbearers .", "His grave was the only one with a stone , donated by a local monument maker .", "Years went by .", "The patch of woods was bulldozed for houses .", "The dirt road became a busy street .", "Investigators who had worked on the case acquired paunches and pensions .", "But for all the big-city death and mayhem they had seen , they could not forget the little boy .", "The Vidocq Society , a Philadelphia group composed largely of law enforcement professionals who investigate long-unsolved crimes , adopted the case .", "Mr. Kelly and Mr. McGillen are members .", "-LRB- The society is named after a famed 18th-century French detective , Eug\u00e8ne Fran\u00e7ois Vidocq . -RRB-", "Another member was Remington Bristow , an investigator in the medical examiner 's office who had been deeply affected by the case .", "His own son had died in early childhood .", "Mr. Bristow worked on the case practically full time , even in his retirement , spending thousands of dollars of his own to chase leads across the country .", "He carried a death mask of the child in his briefcase .", "Until his death in 1993 , Mr. Bristow theorized that the child was the son of an unmarried daughter of a couple who ran a foster home in an old mansion .", "He even suggested that the child might have died accidentally .", "The boy 's D.N.A. was obtained when the body was exhumed in 1998 and reburied in Ivy Hill , in a plot donated by the cemetery .", "Those close to the case hold out hope that a match will turn up one day .", "Mr. Kelly and Mr. McGillen say the key to the mystery may lie in the memory of a woman who grew up in Philadelphia and says that when she was a child her parents brought a boy home and kept him in the basement .", "One day , the woman says , her mother battered the boy to death , then drove with her to the patch of woods to dispose of the body .", "The woman told her story to Mr. Kelly and Mr. McGillen several years ago , in the presence of her psychiatrist .", "She said she decided to come forward after a television reprise of the case , one of several in recent years .", "`` We think she 's the real deal , `` Mr. McGillen said .", "But William Fleisher , a former Philadelphia police officer and F.B.I. agent who is the president of the Vidocq Society , is not so sure .", "`` Nothing she says has been proved , nothing she says has been disproved , '' said Mr. Fleisher , now a private investigator .", "And if the boy remains without a name and the crime goes unpunished .", "Sooner or later the killer will be `` in a place where there 's no appeal , `` Mr. McGillen said .", "`` And I feel good about that . '' ."], "summary": ["Bruised and naked body of unidentified boy , 4 or 5 , was found in patch of woods on outskirts of Philadelphia in February 1957 .", "Autopsy showed that child had been beaten to death and that he had been ill and undernourished .", "Vidocq Society , Philadelphia group composed largely of law enforcement professionals who investigate long-unsolved crimes , adopted case .", "Yet all these years later , boy has no name .", "William H Kelly , retired Philadelphia detective , and Joseph McGillen , retired investigator for medical examiner 's office , visit boy 's grave often .", "They dream of giving him identity before they die .", "Photos ."], "publication": "nyt50", "label": [5, 49, 20, 4, 2, 7], "tag": ["U.S."]} -{"id": "1815802", "text": ["Darlene Bishop , the nationally renowned evangelical preacher , begins her book about how God cured the cancer afflicting one of her brothers with a Biblical verse : `` And the prayer of faith shall save the sick , and the Lord shall raise him up . ''", "The book , `` Your Life Follows Your Words , '' is sold in the gift shop of Solid Rock Church , the 4,000-member congregation in Monroe , Ohio , where Ms. Bishop is a co-pastor .", "She has promoted it on her television show , `` Sisters , '' which is modeled after ABC 's `` The View '' and is broadcast on four cable networks nationwide .", "On her Web site , Ms. Bishop promises that the book reveals `` how God healed her of breast cancer '' and a brother of throat cancer .", "Nowhere , though , does she mention , that the brother , Darrell Perry , a successful country music songwriter whom everyone called Wayne , died from the cancer a year and a half ago .", "In a sworn deposition responding to two lawsuits filed by Mr. Perry 's four children , Ms. Bishop stated that no doctor ever diagnosed the breast cancer she referred to prominently in her book .", "Instead , Ms. Bishop testified , she thought that she had cancer in 1986 and that it was cured .", "`` She 's lying to people and exploiting my father for her own financial gain , `` Mr. Perry 's eldest son , Bryan Perry , 36 , said in an interview .", "One lawsuit accuses Ms. Bishop of wrongful death because , it says , she convinced Mr. Perry to pray rather than to seek medical care .", "The other accuses her of mismanaging and misusing his estate , which the Perry children say could be worth millions .", "The estate case is to be argued in Butler County Probate Court on Friday .", "Mr. Perry 's death at age 55 left some of country music 's most popular performers , including Toby Keith and Tim McGraw , without one of their most trusted and prolific writers .", "Now the battle over who caused his death , who owns his assets and how best to interpret his legacy is dividing a once-close family whose members climbed from Appalachian poverty to prominence in the music industry and the evangelical movement .", "Ms. Bishop would not answer questions about the suits .", "On her Web site , she says that the allegations `` are complete lies '' and that she never discourages anyone from seeing doctors .", "She also says she is a trustworthy steward of Mr. Perry 's estate , which , she said in the deposition , could be worth nothing after his many debts are paid .", "Long before she gained fame as a preacher , Ms. Bishop was her family 's spiritual leader , Bryan Perry said .", "One of Mr. Perry 's two former wives , Janet Perry-McCormick , said that he often sought the religious counsel of his older sister , whom he called Sissy , and that his children grew up attending her church .", "`` I put my faith in Darlene , '' Bryan Perry said .", "`` We all did .", "We thought she was a holy , pure woman . ``", "Wayne Perry fathered four children with three women , two of whom he married , his sons said .", "He abandoned his family when Bryan was 2 to pursue his songwriting career , which produced such hits as `` A Woman 's Touch , `` recorded by Mr. Keith , and '' Not a Moment Too Soon , `` by Mr. McGraw .", "He earned millions of dollars , said a music industry lawyer , Rush Hicks , who is advising the children .", "After doctors diagnosed his throat cancer in December 2002 , Mr. Perry moved into Ms. Bishop 's mansion on her $ 2.6 million horse farm in Monroe to re-commit his life to God , his sons said .", "According to Ms. Bishop 's book , when her brother arrived at her front door , he confirmed that he had cancer , and she replied , `` Let that be the last time those words ever come from your mouth . ''", "In her deposition , Ms. Bishop said Mr. Perry had decided on his own to disregard doctors ' advice that he immediately begin chemotherapy and radiation treatments .", "But Mr. Perry 's children contend that their aunt persuaded him to forgo medical treatment and rely on a process of faith healing that , Ms. Bishop wrote in her book , God had explained to her in a revelation .", "`` He was laying in bed dying , and she had him convinced that he was healed , '' said Mr. Perry 's son Justin Jones , 28 , who lived in Ms. Bishop 's house for a year caring for his father .", "As his throat tumors swelled to the size of tennis balls , Mr. Perry stopped eating , Mr. Jones said .", "His weight dropped to 84 pounds .", "He did consent to chemotherapy , Mr. Jones said , but only after the tumors had restricted his breathing to the point that he collapsed .", "The chemotherapy shrank the tumors , Mr. Jones said , and his father began eating again .", "In her book , Ms. Bishop describes her brother 's spiritual awakening and the improvement in his condition , but she does not mention his chemotherapy .", "As Mr. Perry regained strength , he and Ms. Bishop went on a nationwide tour of evangelical churches , promoting Ms. Bishop 's book about his miraculous recovery , his children said .", "Against his doctor 's advice , Mr. Perry stopped chemotherapy , Mr. Jones said .", "On Oct . 13 , 2004 , an oncologist , Dr. Albert Malcolm , wrote a letter telling Mr. Perry that his cancer was terminal .", "Mr. Perry forwarded the letter to Janet Perry-McCormick , his former wife , after writing across the top , `` Destroy this letter after you read it , '' and , `` Only you and Darlene know this . ''", "The note is proof that Ms. Bishop knew her brother was dying but concealed it from the public while continuing to promote her book , Mr. Perry 's children said in interviews , but in her deposition , Ms. Bishop said she learned of Dr. Malcolm 's diagnosis after Mr. Perry died in May 2005 .", "Mr. Perry 's death raised questions about the ownership of his royalties , his catalogs of songs and his `` hook book , '' which his children describe as a loose-leaf notebook stuffed with lyrics and musical riffs , most of which had not been recorded .", "The children accused Ms. Bishop 's son Lawrence Bishop II , a musician , of recording two albums that contained a total of five songs copyrighted by Mr. Perry without paying royalties to his estate .", "Copyrights are not strictly followed in the Christian country music business , Ms. Bishop said in her deposition .", "She also said Mr. Perry 's notebook was missing .", "Their father 's songs could be worth millions of dollars , the children said , but only if they can be marketed , an impossibility given no hook book and a dispute over song rights .", "Ms. Bishop said Mr. Perry 's catalogs of songs belonged to the record companies that recorded and promoted them , not the family .", "Also in dispute is Mr. Perry 's life insurance policy , worth $ 260,000 .", "Ms. Bishop was named the policy 's sole beneficiary , but the children claim it was meant for them .", "One point on which both sides agree is that Mr. Perry died believing he had been healed by God .", "`` The only thing he told me , '' Ms. Perry-McCormick said , `` was , ' I 'm going to show Sissy that I can be healed just like she was . ' `` ."], "summary": ["Darrell Perry , successful country music songwriter died from cancer year and half ago .", "Perry 's four children have filed two lawsuits against their aunt , Darlene Bishop , renowned evangelical preacher .", "One lawsuit accuses her of wrongful death , claiming that she convinced Perry to pray rather than to seek medical care .", "Other accuses her of mismanaging and misusing his estate , which Perry children say could be worth millions .", "Photos ."], "publication": "nyt50", "label": [9, 8, 4], "tag": ["U.S."]} -{"id": "1815811", "text": ["This college town received what it wanted when , during the 1980s and 90s , it sought to reverse the decline of its downtown and to create a more vibrant civic center that would draw people at night and on weekends .", "Since then , thousands of young professionals , retirees and former suburbanites have moved to glistening condominium buildings in the shadow of the state Capitol 's dome and only a few blocks from the University of Wisconsin 's main campus .", "And there is hardly a bad night for business near State Street , where university students and tourists pack restaurants and bars to capacity even on freezing weeknights .", "But as downtown 's population and revelry have grown , so have overcrowding on the streets , vandalism and , most significantly , the police say , alcohol-related crime .", "Mayor Dave Cieslewicz and other officials find themselves grappling with a problem that is a direct result of Madison 's successful transformation : how to tone down downtown .", "As an urban issue , the downsizing of downtowns has little precedent because many cities , particularly in the Midwest , are struggling mightily to bring people back to their cores , not send them away .", "Of course , many college towns deal with problems related to drinking .", "In the Midwest alone , La Crosse , Wis . , and East Lansing and Ann Arbor , Mich . , are struggling with how to cope with the public mayhem often fueled by inebriated students .", "In Madison , two Common Council members , convinced that much of what ails downtown can be traced to the proliferation of bars and restaurants known more for drinking than dining , introduced a plan intended to reduce the number of such establishments , and to restrict the approval of new liquor licenses .", "The plan , which has the support of Mayor Cieslewicz -LRB- pronounced chess-LEV-ich -RRB- , is preliminary and does not detail , for example , how many or which places may be closed .", "A final plan is expected to be ready for a Council vote in the spring .", "That area of nearly one square mile -- between Lake Mendota , Lake Monona and Blair and Lake Streets -- has 120 places that serve only or mostly alcohol .", "They have a capacity of more than 11,000 people , city officials said .", "The proposal has its critics , many of whom call it nothing less than modern-day Prohibition , and an assault on personal freedom and the free market that flies in the face of Madison 's traditional liberalism and Wisconsin 's entrenched drinking culture .", "Some Council members say they worry that limiting the number of bars will only increase the number of drinkers who turn to house parties and makeshift taverns , where binge drinking and bad behavior often go together but behind closed doors .", "`` A lot of the activists on this issue revile alcohol , and their logic is equally fallacious as the original Prohibitionists ' , `` said Austin King , the president of the Common Council and a member of its Progressive caucus .", "`` From a safety perspective , '' Mr. King said , `` I would much , much rather have young people drinking in the regulated environment of bars . ''", "College students , not surprisingly , also oppose the plan .", "`` A proportion of students drink irresponsibly , but the majority do n't , `` said Katrina Schleitwiler , 21 , a political science major at the University of Wisconsin .", "`` This would just drive students into other places to drink and not affect the problem at all . ''", "Although Ms. Schleitwiler acknowledged a spate of crimes around campus and downtown , she said she did not think alcohol abuse by students or anyone else was at its root .", "`` Madison is becoming a big city with more crime , '' she said .", "`` How different is that from any other city .", "`` The police see things differently .", "According to a recent police department analysis of attacks in which someone was injured downtown , about 75 percent of the victims and perpetrators were intoxicated .", "The analysis also found that after midnight on Thursdays , Fridays and Saturdays , police officers , paramedics and firefighters often spent half to all of their working hours responding to alcohol-fueled fights and disorderly conduct .", "Noise , public urination and vandalism are constant concerns .", "The University of Wisconsin has tried many initiatives to curtail under-age drinking and older students ' overconsumption .", "Most recently , the city and the college jointly paid for a municipal alcohol policy coordinator -- referred to as the `` bar czar '' -- to redouble those efforts .", "`` Frankly , nothing has worked very well , and there 's still a culture of binge drinking , `` Mayor Cieslewicz said .", "For the last five years , Madison , formerly a sleepy college town , has grown by about 2,500 people a year into a medium-size city with a population of about 230,000 .", "In the last decade , 1,950 apartments -- rental and condominium units -- have been built downtown .", "But the arrival of so many newcomers has produced a culture clash .", "Stefanie Moritz , a retired librarian , moved with her husband from Phoenix into a downtown condominium about three years ago , drawn by pedestrian-friendly streets , a university job for her husband and the community 's progressive politics .", "`` We decided that we definitely wanted to live downtown , so we could get rid of one of our cars , my husband could walk to work and we could enjoy the downtown experience , '' Ms. Moritz said .", "`` The reality is a little bit different . ''", "She said she quickly grew irritated at being awakened at 2:30 a.m. , when the noisy bar crowd usually begins to make its way home , dropping empty beer cans and other trash along the way .", "One morning she woke to find that garbage had been torched and the flames had charred a tree .", "`` I want to live downtown , but I also want a decent quality of life , '' Ms. Moritz said .", "`` And I feel that that is being denied by the present level of alcohol use . ''", "About 18 months ago , Ms. Moritz became active in a relatively new residents ' group , Capitol Neighborhoods , which is at the forefront of the push for stricter drinking rules .", "Hawk Schenkel , the owner of one of the biggest restaurants on State Street , Hawk 's Bar and Grill , pointed out that residents who criticized the downtown scene had `` moved downtown in a university town . ''", "`` Do they know where all the revenue comes from downtown , why we have a downtown that 's alive and worth being in .", "`` Mr. Schenkel asked .", "`` All that could change . ''", "`` If the ordinance as written were to pass , my bar would automatically be worth at least $ 100,000 more , overnight , and it 's clear that I personally stand to gain financially , `` he said .", "`` But I 'm against this on principle , and I do n't think it helps the problem .", "My argument is that there are n't enough bars .", "It 's the overcrowding that leads to violence . `` ."], "summary": ["Madison , Wis , college town that sought to reverse decline of its downtown in 1980s and 1990s to draw people at night and on weekends , is now facing overcrowding on streets , vandalism and , most significantly , alcohol-related crime .", "Officials are grappling with how to scale down downtown .", "Map . Photo ."], "publication": "nyt50", "label": [0, 3], "tag": ["U.S."]} -{"id": "1815830", "text": ["For Sunni Arabs here , the ugly reality of the new Iraq seemed to crystallize in a two-minute segment of Saddam Hussein 's hanging , filmed surreptitiously on a cellphone .", "The video featured excited taunting of Mr. Hussein by hooded Shiite guards .", "Passed around from cellphone to cellphone on Sunday , the images had echoes of the videos Sunni militants take of beheadings .", "`` Yes , he was a dictator , but he was killed by a death squad , '' said a Sunni Arab woman in western Baghdad who was too afraid to give her name .", "`` What 's the difference between him and them .", "`` There was , of course , a difference .", "Mr. Hussein was a brutal dictator , while the Shiite organizers of the execution are members of the popularly elected Iraqi government that the United States helped put in place as an attempt to implant a democracy .", "It was supposed to be a formal and solemn proceeding carried out by a dispassionate state .", "But the grainy recording of the execution 's cruel theater summed up what has become increasingly clear on the streets of the capital : that the Shiite-led government that assumed power in the American effort here is running the state under an undisguised sectarian banner .", "The hanging was hasty .", "Laws governing its timing were bypassed , and the guards charged with keeping order in the chamber instead disrupted it , shouting Shiite militia slogans .", "It was a degrading end for a vicious leader , and an ominous beginning for the new Iraq .", "The Bush administration has already scaled back its hopes for a democracy here .", "But as the Iraqi government has become ever more set on protecting its Shiite constituency , often at the expense of the Sunni minority , the goal of stopping the sectarian war seems to be slipping out of reach .", "`` We speak about the crimes of Saddam Hussein , but now here we are behaving in the same way , '' said Alaa Makki , a prominent Sunni politician .", "`` We fear that nothing has been changed .", "On the contrary , we feel it is going in a worse direction . ``", "After the invasion , Sunni Arabs , bitter at losing their place , refused to take part in Iraq 's first elections , allowing Shiites and Kurds to sweep to power .", "Americans here spent the following months persuading the Shiites to let the Sunnis back in .", "The idea , at the time , was that involving Sunnis in politics would drain the insurgency of its violence .", "Instead , the violence got worse , and in February , the long-abused Shiites struck back , using the force of the state ministries and agencies that they now control .", "Now , American officials are pressing Iraqi leaders , both Sunni and Shiite , to reconcile and have made it a central demand for continued support of the Iraqi government .", "But the prospects for mutual agreement seem ever more distant .", "`` I ca n't think of any good reason for any level-minded person to be interested in reconciliation , `` one secular Sunni politician said .", "That unwillingness , shared by most of the Shiite political elite , is a serious challenge to any new American strategy proposal that President Bush may announce soon .", "Indeed , the Sunni political class is getting smaller .", "Many of the Sunni politicians once ubiquitous during the broad discussions of the Iraqi Constitution two years ago are now gone .", "Virtually none of the members of the Association of Muslim Scholars , a hard-line Sunni Arab religious group , are left in Iraq -- most of them have gone to Jordan and Syria .", "Out of more than 50 members of the Baghdad council that runs the city , only one is Sunni .", "The reason is that Shiites , who had been driven from their homes and relentlessly slaughtered by Sunni suicide bombers , are now pushing back .", "The taunting during Mr. Hussein 's execution capped months of advances by Shiite militias , which have forced Sunnis farther back into western Baghdad .", "But as the Shiites gain the upper hand , they also seem to be abandoning any hint of compromise .", "The video , Sunnis said , was a startling symbol of that .", "In the images , the guards taunt Mr. Hussein .", "They damn him .", "They cheer their Shiite heroes so persistently that one observer makes a remark about how the effort to rein in militias does not seem to be going well .", "Immediately after they let him drop , in the midst of repeating a prayer , the voices rise in urgency and begin talking excitedly .", "Then several others chime in , telling those present to step back from the body and to wait three minutes before touching it .", "The video was particularly disturbing for Sunni Arabs , who accuse the government of willfully allowing militias to remain in the ranks of its security forces .", "It left the impression that the government cared more for revenge than for justice , Sunnis said .", "`` Either it 's terrible incompetence or it 's an act of revenge -- a vendetta , `` said Adnan Pachachi , a respected Sunni whose political career began long before Mr. Hussein took power .", "`` That was the impression people had . ''", "One of the problems was the timing .", "The execution was rescheduled a number of times , as Iraqi officials raced through a checklist of requirements put forth by the Americans .", "Two legal conditions -- that it not be held on a holiday and that the Iraqi president and his two deputies be given 30 days to sign off on the sentence first -- were ignored .", "The fact was not lost on Sunni political leaders , including Mr. Makki , who said the execution was a step backward for the country .", "`` This is a political mistake , '' he said .", "`` We lost a lot with this . ''", "To make matters worse , it fell just as the first day of the Id al-Adha holiday dawned for Sunnis -- a day before the Shiites ' observance was to begin .", "Shiite politicians did not apologize and some even reveled in the timing .", "That did a major disservice to reconciliation , many argued .", "`` Why could n't they have waited for a few more days .", "`` Mr. Pachachi said .", "`` It was a deliberate insult to so many people .", "It helped Saddam 's friends . ``", "Yusra Abdul Aziz , a Sunni teacher in Mansour , had a blunter analysis : `` They changed him from a criminal into a martyr . ''", "In a strange twist , Sunni insurgents did not seem to care .", "Sunni Jihadist Web sites had virtually no messages about Mr. Hussein 's death , aside from two re-released statements , old debates by militant sheiks over whether he should be considered a martyr .", "`` The feeling is that they do n't care about him , `` said Rita Katz , who runs the SITE Institute , a group that tracks militant Islamist Web sites .", "For the more hard-line Sunni Arabs , the execution simply confirmed their view that joining the Shiite government could never work .", "Sheik Hakam Abdullah al-Shahiri from the Obeid tribe in Kirkuk is an example .", "`` Iraq is occupied now by the U.S. and Iran and a puppet government for both sides , '' he said .", "`` With the execution of Saddam the Arab identity of Iraq and its unity have ended . ''", "That has left moderate Sunnis -- those who still seek reconciliation -- to ponder the danger of a Shiite hegemony that seems too scarred from past abuses to govern lightly .", "`` Governing a country should not be done by reflexes , '' Mr. Makki said .", "`` It should be wisdom first .", "A panoramic view . ``", "`` Not behaving from one side , '' he added , `` like what we saw here . ''", "THE STRUGGLE FOR IRAQ : NEWS ANALYSIS ."], "summary": ["Saddam Hussein 's hanging was supposed to be formal and solemn proceeding carried out by dispassionate state , but recording of execution 's cruel theater summed up what has become increasingly clear on streets of Baghdad : that Shiite-led government that assumed power in US effort is running state under undisguised sectarian banner .", "It was degrading end for vicious leader , and ominous beginning for new Iraq .", "Photo ."], "publication": "nyt50", "label": [8, 7, 11], "tag": ["World", "Washington"]} -{"id": "1815834", "text": ["He drew pictures of himself with angel wings .", "He left a set of his dog tags on a nightstand in my Manhattan apartment .", "He bought a tiny blue sweat suit for our baby to wear home from the hospital .", "Then he began to write what would become a 200-page journal for our son , in case he did not make it back from the desert in Iraq .", "For months before my fianc\u00e9 , First Sgt . Charles Monroe King , kissed my swollen stomach and said goodbye , he had been preparing for the beginning of the life we had created and for the end of his own .", "He boarded a plane in December 2005 with two missions , really -- to lead his young soldiers in combat and to prepare our boy for a life without him .", "Dear son , Charles wrote on the last page of the journal , `` I hope this book is somewhat helpful to you .", "Please forgive me for the poor handwriting and grammar .", "I tried to finish this book before I was deployed to Iraq .", "It has to be something special to you .", "I 've been writing it in the states , Kuwait and Iraq .", "The journal will have to speak for Charles now .", "He was killed Oct . 14 when an improvised explosive device detonated near his armored vehicle in Baghdad .", "Charles , 48 , had been assigned to the Army 's First Battalion , 67th Armored Regiment , Fourth Infantry Division , based in Fort Hood , Tex .", "He was a month from completing his tour of duty .", "For our son 's first Christmas , Charles had hoped to take him on a carriage ride through Central Park .", "Instead , Jordan , now 9 months old , and I snuggled under a blanket in a horse-drawn buggy .", "The driver seemed puzzled about why I was riding alone with a baby and crying on Christmas Day .", "I told him .", "`` No charge , '' he said at the end of the ride , an act of kindness in a city that can magnify loneliness .", "On paper , Charles revealed himself in a way he rarely did in person .", "He thought hard about what to say to a son who would have no memory of him .", "Even if Jordan will never hear the cadence of his father 's voice , he will know the wisdom of his words .", "Never be ashamed to cry .", "No man is too good to get on his knee and humble himself to God .", "Follow your heart and look for the strength of a woman .", "Charles tried to anticipate questions in the years to come .", "Favorite team .", "I am a diehard Cleveland Browns fan .", "Favorite meal .", "Chicken , fried or baked , candied yams , collard greens and cornbread .", "Childhood chores .", "Shoveling snow and cutting grass .", "First kiss .", "Eighth grade .", "In neat block letters , he wrote about faith and failure , heartache and hope .", "He offered tips on how to behave on a date and where to hide money on vacation .", "Rainy days have their pleasures , he noted : Every now and then you get lucky and catch a rainbow .", "Charles mailed the book to me in July , after one of his soldiers was killed and he had recovered the body from a tank .", "The journal was incomplete , but the horror of the young man 's death shook Charles so deeply that he wanted to send it even though he had more to say .", "He finished it when he came home on a two-week leave in August to meet Jordan , then 5 months old .", "He was so intoxicated by love for his son that he barely slept , instead keeping vigil over the baby .", "I can fill in some of the blanks left for Jordan about his father .", "When we met in my hometown of Radcliff , Ky . , near Fort Knox , I did not consider Charles my type at first .", "He was bashful , a homebody and got his news from television rather than newspapers -LRB- heresy , since I 'm a New York Times editor -RRB- .", "But he won me over .", "One day a couple of years ago , I pulled out a list of the traits I wanted in a husband and realized that Charles had almost all of them .", "He rose early to begin each day with prayers and a list of goals that he ticked off as he accomplished them .", "He was meticulous , even insisting on doing my ironing because he deemed my wrinkle-removing skills deficient .", "His rock-hard warrior 's body made him appear tough , but he had a tender heart .", "He doted on Christina , now 16 , his daughter from a marriage that ended in divorce .", "He made her blush when he showed her a tattoo with her name on his arm .", "Toward women , he displayed an old-fashioned chivalry , something he expected of our son .", "Remember who taught you to speak , to walk and to be a gentleman , he wrote to Jordan in his journal .", "These are your first teachers , my little prince .", "Protect them , embrace them and always treat them like a queen .", "Though as a black man he sometimes felt the sting of discrimination , Charles betrayed no bitterness .", "It 's not fair to judge someone by the color of their skin , where they 're raised or their religious beliefs , he wrote .", "Appreciate people for who they are and learn from their differences .", "He had his faults , of course .", "Charles could be moody , easily wounded and infuriatingly quiet , especially during an argument .", "And at times , I felt , he put the military ahead of family .", "He had enlisted in 1987 , drawn by the discipline and challenges .", "Charles had other options -- he was a gifted artist who had trained at the Art Institute of Chicago -- but felt fulfilled as a soldier , something I respected but never really understood .", "He had a chest full of medals and a fierce devotion to his men .", "He taught the youngest , barely out of high school , to balance their checkbooks , counseled them about girlfriends and sometimes bailed them out of jail .", "When he was home in August , I had a baby shower for him .", "One guest recently reminded me that he had spent much of the evening worrying about his troops back in Iraq .", "Charles knew the perils of war .", "During the months before he went away and the days he returned on leave , we talked often about what might happen .", "In his journal , he wrote about the loss of fellow soldiers .", "Still , I could not bear to answer when Charles turned to me one day and asked , `` You do n't think I 'm coming back , do you .", "`` We never said aloud that the fear that he might not return was why we decided to have a child before we planned a wedding , rather than risk never having the chance .", "But Charles missed Jordan 's birth because he refused to take a leave from Iraq until all of his soldiers had gone home first , a decision that hurt me at first .", "And he volunteered for the mission on which he died , a military official told his sister , Gail T . King .", "Although he was not required to join the resupply convoy in Baghdad , he believed that his soldiers needed someone experienced with them .", "`` He would say , ' My boys are out there , I 've got to go check on my boys , ' `` said First Sgt . Arenteanis A . Jenkins , Charles 's roommate in Iraq .", "In my grief , that decision haunts me .", "Charles 's father faults himself for not begging his son to avoid taking unnecessary risks .", "But he acknowledges that it would not have made a difference .", "`` He was a born leader , '' said his father , Charlie J . King .", "`` And he believed what he was doing was right . ''", "Back in April , after a roadside bombing remarkably similar to that which would claim him , Charles wrote about death and duty .", "The 18th was a long , solemn night , he wrote in Jordan 's journal .", "We had a memorial for two soldiers who were killed by an improvised explosive device .", "None of my soldiers went to the memorial .", "Their excuse was that they did n't want to go because it was depressing .", "I told them it was selfish of them not to pay their respects to two men who were selfless in giving their lives for their country .", "Things may not always be easy or pleasant for you , that 's life , but always pay your respects for the way people lived and what they stood for .", "It 's the honorable thing to do .", "When Jordan is old enough to ask how his father died , I will tell him of Charles 's courage and assure him of Charles 's love .", "And I will try to comfort him with his father 's words .", "God blessed me above all I could imagine , Charles wrote in the journal .", "I have no regrets , serving your country is great .", "He had tucked a message to me in the front of Jordan 's journal .", "This is the letter every soldier should write , he said .", "For us , life will move on through Jordan .", "He will be an extension of us and hopefully everything that we stand for .", "I would like to see him grow up to be a man , but only God knows what the future holds .", "THE CONFLICT IN IRAQ : AN APPRECIATION ."], "summary": ["Dana Canedy , in appreciation of her late fiance , First Sgt Charles Monroe King , holds that he boarded plane for Iraq in Dec 2005 with two missions -- to lead his soldiers in combat and to prepare their boy for life without him .", "Notes that he wrote what would become 200-page journal for their son in case he did not make it back .", "Says that Charles was killed on Oct 14 , and journal will have to speak for him now .", "Photo shows Charles with their young son , Jordan ."], "publication": "nyt50", "label": [5, 3, 11, 33], "tag": ["Front Page", "U.S."]} -{"id": "1815835", "text": ["Jordan W . Hess was the unlikeliest of soldiers .", "He could bench-press 300 pounds and then go home and write poetry .", "He learned the art of glass blowing because it seemed interesting and built a computer with only a magazine as his guide .", "Most recently , he fell in love with a woman from Brazil and took up digital photography , letting both sweep his heart away .", "Specialist Hess , the seventh of eight children , was never keen on premonitions , but on Christmas of 2005 , as his tight-knit family gathered on a beach for the weekend , he told each sibling and parent privately that he did not expect to come home from Iraq .", "On Nov . 11 , Specialist Hess , 26 , freshly arrived in Iraq , was conducting a mission as the driver of an Abrams tank when an improvised explosive device , or I.E.D. , blew up with brain-rattling force .", "The blast was so potent it penetrated the 67-ton tank , flinging him against the top and critically injuring his spine .", "His four crewmates survived .", "For three weeks , he hung on at Brooke Army Medical Center in San Antonio , long enough to utter a few words to his loved ones and absorb their kindness .", "On Dec . 4 , Specialist Hess slipped onto the ever-expanding list of American military fatalities in Iraq , one that has increased by an average of more than three a day since Oct . 1 , the highest three-month toll in two years .", "On Sunday , with the announcement of the death in Baghdad of Specialist Dustin R . Donica , 22 , of Spring , Tex . , the list reached the somber milestone of at least 3,000 deaths since the March 2003 invasion .", "The landmark reflects how much more dangerous and muddled a soldier 's job in Iraq has become in the face of a growing and increasingly sophisticated insurgency .", "Violence in the country is at an all-time high , according to a Pentagon report released last month .", "December was the third deadliest month for American troops since the start of the war , with insurgents claiming 111 soldiers ' lives .", "October and November also witnessed a high number of casualties , 106 and 68 respectively , as American forces stepped up combat operations to try to stabilize Baghdad .", "`` It escalated while I was there , '' said Capt . Scott Stanford , a National Guard officer who was a commander of a headquarters company in Ramadi for a year , arriving in June 2005 .", "`` When we left this June , it was completely unhinged .", "There was a huge increase in the suicide car bombs we had .", "The I.E.D. ` s were bigger and more complex . ''", "`` And it was very tense before we left in terms of snipers , '' said Captain Stanford , a member of the Iraq and Afghanistan Veterans of America .", "`` I do n't know if there were more of them , or if they were getting better . ``", "This spike in violence , which has been felt most profoundly by Iraqi civilians , who are dying by the thousands , has stoked feverish debate about the nation 's presence in Iraq .", "Many Democrats in Congress are urging a phased withdrawal from the country , and the Bush administration is leaning toward deploying additional troops in 2007 .", "If the conflict continues into March , the Iraq war will be the third longest in American history , ranked behind the Vietnam War and the American Revolution .", "President Bush did not specifically acknowledge reaching the milestone of 3,000 American deaths , but a White House spokesman , Scott Stanzel , said the president `` grieves for each one that is lost '' and would ensure that their sacrifices were not made in vain .", "The campaign against terrorism , Mr. Stanzel said , will be a long struggle .", "Specialist Hess had volunteered for his mission to spare another soldier the danger of going outside the wire that day .", "Like so many of his fallen comrades , he had become the victim of an inescapably dangerous roadside landscape .", "`` It was the type of injury you rarely recover from .", "In past wars you would n't have gotten out of theater , `` said his father , Bill Hess , a Boeing engineer and retired Air Force man .", "`` So that was a blessing , that he could talk to us .", "He mouthed words and we were able to say we loved him .", "There is a lot to be said for that . ``", "A Steady Toll of Deaths In many ways , the third 1,000 men and women to die in Iraq faced the same unflinching challenge as the second 1,000 soldiers to die there -- a dedicated and ruthless Iraqi insurgency that has exploited the power of roadside bombs to chilling effect .", "These bombs now cause about half of all American combat deaths and injuries in Iraq .", "Over all , the casualty rate has remained relatively steady since 2005 , dipping only slightly .", "It took 14 months for the death toll to jump to 2,000 soldiers from 1,000 .", "It took about two weeks longer for it to rise to 3,000 from 2,000 , during the period covering Oct . 25 , 2005 , to this week .", "`` It is hugely frustrating , tragic and disappointing that we ca n't reduce the fatality rate , `` said Michael O'Hanlon , a military analyst for the Brookings Institution .", "The service members who died during this latest period fit an unchanging profile .", "They were mostly white men from rural areas , soldiers so young they still held fresh memories of high school football heroics and teenage escapades .", "Many men and women were in Iraq for the second or third time .", "Some were going on their fourth , fifth or sixth deployment .", "But in other ways , the situation has changed in the past year .", "Improvised explosive devices -- the kind that killed Specialist Hess -- have grown deadlier , despite concerted Pentagon efforts and billions of dollars spent trying to counteract them .", "Insurgents are now more adept at concealing bombs , booby-trapping them and powering them to penetrate well-armored vehicles .", "They are also scattering more of them along countless roads using myriad triggers and hiding spots -- under garbage and tires , behind guardrails , inside craters .", "At the same time , Iraqi citizens have grown less inclined to tip off soldiers to the presence of these bombs .", "About 1,200 roadside bombs were detonated in August .", "The toll of war has fallen most heavily this year on regular Army soldiers , at least 544 of whom died in this group of 1,000 , compared with 405 in the last group .", "This increase was the result of fewer National Guard soldiers and reservists being deployed to Iraq in 2006 .", "Considering the intensity of the violence in Iraq this year , it is remarkable that the casualty rate did not climb higher , analysts and officers say .", "Long-awaited improvements in body and vehicle armor have helped protect soldiers , and advances in battlefield medicine have saved many lives .", "New procedures , like leaving wounds open to prevent infection , and relaying soldiers to hospitals faster than ever , have kept more service members alive .", "Troops now carry their own tourniquets .", "During World War II , 30 percent of all wounded soldiers died of their injuries , a number that dipped to 24 percent during the Vietnam War and then to 9 percent for the Iraq conflict .", "Though this is a positive development , it also means that more soldiers are coming home with life-changing injuries , including amputations and brain trauma .", "More than 22,000 soldiers have been wounded in Iraq .", "`` There is no question that the number of dead should have been far higher , '' said Dr. William Winkenwerder , the assistant secretary of defense for health affairs , referring to the Iraqi conflict .", "`` Some of these blast injuries are very powerful . ''", "Bombs and bullets are not the only things that can kill soldiers .", "Nearly 20 percent of those who die in Iraq do so outside of combat operations .", "Sometimes it is the hazard of driving too quickly on badly rutted roads to avoid danger .", "Humvees , weighted down with armor , can easily flip if maneuvered too quickly .", "Many of Iraq 's roads are not built to hold heavy vehicles , and the ground can give way , tossing multi-ton machines into narrow canals where soldiers have drowned .", "Helicopters are sometimes strafed by sandstorms or crippled by mechanical malfunctions .", "Accidents make up two-thirds of the nonhostile deaths .", "With so many soldiers carrying so many weapons , unintentional casualties occur , sometimes while handling firearms .", "Fire from one 's own side is another inevitability of war , as is suicide .", "Since March 2003 , 93 soldiers have died from self-inflicted wounds in Iraq .", "In a way , these deaths , coming not at the hands of the enemy , but as a consequence of inferior roads and turbulent weather , can be even more difficult for parents to accept .", "Sometimes they wait months for official reports , since all noncombat deaths must be investigated .", "`` I do n't think I ever thought something like this could happen , `` said Shelley Burnett , whose son , Lance Cpl .", "Jason K . Burnett , 20 , died in May after his tank toppled into a canal .", "`` We talked a lot about the I.E.D. ' s and the dangers out there , but Jason kept saying , ' There is not a whole lot they can do to a tank . '", "`` Death at Roadside Over the last two years , the Pentagon has worked frantically to harden body armor and the armor on its Humvees and other vehicles .", "And the insurgents in Iraq have responded just as forcefully with deadly innovations in roadside bombs , and a fury of sniper bullets .", "The most lethal development is the use of the `` explosively formed penetrators , '' which pierce armor and stay intact when they explode .", "Roadside bombs are often detonated from a distance -- with garage door openers , for example -- or automatically , from pressure-sensitive devices , like a simple rubber air hose .", "Motion detectors and infrared devices are also used .", "The vast majority of these bombs do not kill soldiers , or even injure them seriously .", "Four out of five I.E.D. ` s that detonate do not cause casualties , an improvement over previous years , the Pentagon says .", "But those devices that do cause casualties are killing more soldiers .", "An analysis by The New York Times of military records found that in 2003 , the devices accounted for 16 percent of troop fatalities .", "This year , they accounted for 43 percent .", "And an increasing number are killing more than one soldier .", "`` Unfortunately , when there is a fatal I.E.D. attack , there often are multiple wounded and casualties , '' said Christine DeVries , a spokeswoman for the Pentagon 's Joint I.E.D.", "Defeat Organization .", "`` The enemy has had some success in adapting to what we are doing . ''", "Lance Cpl .", "Jon Eric Bowman , 21 , affectionate and angel-faced , was typical of many of the soldiers and marines who found their calling in the military .", "He was raised in rural Dubach , La . , far from the razzmatazz of New Orleans , and could not wait to join after the Sept . 11 attacks .", "He was first sent to Iraq early in 2005 .", "When he came home later that year , he had changed .", "Three days before he was set to redeploy this September , he sat with his wife in their truck and talked for six hours .", "`` He was crying , he was so scared , '' said his wife , Dawn Bowman , 26 .", "`` He was having dreams that he was n't coming back . ``", "In fact , Corporal Bowman had been having blackouts , migraines and a tic , new ailments from his time in Iraq , his wife said .", "The diagnosis was Tourette 's syndrome , and he was then told by doctors in in Louisiana that fluid had built up in his brain .", "He wound up back in Iraq , anyway .", "`` They felt he was just trying to get out of Iraq , '' said Johnny Bowman , the corporal 's father , of his son 's superiors .", "`` That there was really nothing wrong with him .", "That 's what he told me on the phone . ``", "Corporal Bowman did not push the issue , feeling guilty about abandoning his fellow marines .", "On Oct . 9 , his Humvee ran across a roadside bomb , killing him instantly .", "He had been manning the machine gun .", "`` Jon Eric was not just my only son , '' his father said .", "`` He was my best friend . ''", "Lance Cpl . Jeromy D . West , 20 , a mortar man who loved to fish as much as he hated to study , was killed on Nov . 25 by a sniper bullet as he stood guard on a roof in Haditha .", "It was his second deployment .", "In December , shortly after word of his death , his family honored his wishes and held a memorial for him on the football field at Hamilton High School , near San Diego , where he had been a star player .", "A thousand people showed up .", "`` Everybody liked him , '' his stepfather , Ron Klopf , said .", "`` People would say , ' God , your son is polite . '", "And I would say , ` My kid .", "` I called him Eddie Haskell -- so polite at everybody else 's house . ``", "Corporal West was goofy in the best way .", "Not long before he joined the Marines , he and his friend would compete to see who could get a bigger freeze headache from eating too much ice cream .", "They would writhe in pain .", "Then they would do it again .", "He was 17 when he decided to get serious and join the corps , something his parents tried to talk him out of .", "`` ` You can get killed doing this , ' '' Mr. Klopf remembers saying .", "`` And he said , ' Should we send some other parent 's kid out there .", "` And that 's how he was . ``", "For Corporal Burnett , death came not from bullets or bombs but from riding in a tank in a country crisscrossed with irrigation canals and crumbly roads .", "Just two years after graduating from high school in St . Cloud , Fla . , where he spent his summers building houses for the poor and four-wheeling on back-country roads , Corporal Burnett 's tank fell off a bridge and plunged into a canal , in which he drowned .", "His mother can not forget the day Jason and his younger brother tossed her back and forth in the yard to make her scream with laughter .", "`` He was a fun-loving kid , '' Mrs. Burnett said .", "`` If you heard laughter , you knew Jason was around . ''", "Optimism was Specialist Robert F . Weber 's indelible quality .", "A gunner from Cincinnati , he had warned his mother , Cathy , that the roads in Iraq were wretched .", "She worried a lot during his first deployment , particularly after he sent home a roll of film to develop .", "The first print she saw was of a missile hitting a barracks .", "But he made it back to America and bought a blue Kia , the color of his eyes , before redeploying three weeks later .", "The Army had been a good fit .", "`` He was proud of himself , '' she said of Bobby , her only child .", "`` I was very proud .", "It was like he found his niche . ``", "On his second deployment , though , the situation in Iraq had become grimmer .", "`` Mom , things are getting worse over here , more dangerous , '' he said , from his base near Mosul the Saturday before he died .", "`` The roads are bad .", "You do n't run over anything even if it looks like a piece of paper . ``", "But the lumbering armored Humvee he was on never hit a bomb on Sept . 30 .", "It swerved somehow and flipped , killing him .", "Mrs. Weber said she can not imagine seeing the troops walk away from Iraq now , when democracy seems as unattainable as ever .", "`` For what did all these guys get killed over there .", "`` she asked , incredulously .", "`` What for .", "`` Seven Days from Home Back in America , countless families and friends have waited and worried and tried their best these past years to keep themselves busy until their husbands , sons , wives , daughters , fathers , mothers or buddies returned home safely .", "For 3,000 of them , the reunion never came .", "In too many cases , the homecoming was tantalizingly near , a few more X 's on the calendar and the vigil would be over .", "A number of soldiers were killed just days and weeks from the end of their deployment , a date close enough to allow those back home to lower their guard a trifle , making the deaths all the more devastating .", "`` It 's almost like Christmas is here , and you wake up Christmas morning and there is no Christmas , `` said Col . Bill Rochelle , a retired National Guard commander of the 42nd Division support command .", "Gunnery Sgt .", "John D . Fry , a 28-year-old marine from Lorena , Tex . , was seven days from scooping up his wife , Malia , and his three kids into a group hug back in America .", "`` My plans , '' Sergeant Fry told his commander , `` are to go home and wrestle with my kids . ''", "He and Mrs. Fry were only 15 when they went on their first date , to see `` A League of Their Own , '' and then to eat ice cream at the mall .", "Mom and Dad drove them home .", "A year later , he plopped her on his lap and proposed .", "They kept their engagement a secret .", "Not long after , he was named salutatorian at Heritage Christian Academy .", "Another student bested him for the top title .", "It was the future Mrs. Fry , the valedictorian .", "`` We were soul mates , '' Mrs. Fry said .", "On Nov . 15 , 1995 , five days after he graduated from boot camp , they were married .", "Mr. Fry , who liked a challenge , specialized in defusing explosive devices , a nerve-racking skill he brought with him to Iraq .", "`` Babe , '' Mrs. Fry recalled his saying when he chose the specialty , `` it 's dangerous , but I want to do it .", "And I said , ` Let 's go . '", "`` A team leader , Sergeant Fry , who shipped out to Iraq in September 2005 , disarmed 73 bombs , including one of the biggest car bombs found in Falluja .", "Once he helped defuse a suicide vest that insurgents had belted to a mentally handicapped Iraqi teenage boy .", "The boy had been beaten and chained to a wall .", "Another time , he spotted a bomb from the roof of a house .", "A little boy popped into the yard , hovering dangerously close to it .", "Sergeant Fry won his confidence by playing peekaboo , then got him to move away .", "He was in `` very high spirits '' in March , calling his wife to say that his duties were done , his paperwork filed and his anticipation impossible to stifle .", "`` He had made it , '' she said .", "Then a mission came down , and commanders were preparing to send a team of mostly inexperienced men to defuse bombs along a road in Al Anbar province .", "He volunteered for the job , instead .", "`` That is how he led , '' Mrs. Fry said .", "Sergeant Fry found three bombs that night and defused them .", "But the insurgents had hidden a fourth bomb under the third one , a booby-trap .", "It blew up and killed him .", "An Army team stayed with his body for six hours , fending off enemy fire in the dark until soldiers with mortuary affairs arrived to take his body away .", "The war never scared him , Mrs. Fry said .", "`` It was hard , but he felt he was making a difference , '' she said .", "`` He believed truly , that if he was n't over there , they would be trying to harm us here . ``", "Estimates of Iraqi Civilian Deaths Unlike the tally for American service members , no official count is available for the number of civilians killed in Iraq .", "Estimates based on mortuary tallies , news reports and household surveys vary widely .", "Using figures provided by the Iraq Ministry of Health , which counts violent deaths at hospitals across the country , and Baghdad 's central morgue , the United Nations Assistance Mission for Iraq has estimated that more than 28,000 Iraqi civilians were killed during the first 10 months of 2006 , a number 40 times higher than the number of American service members killed during that time .", "The American military has criticized the civilian count as high , but it has not released statistics of its own .", "In its quarterly reports to Congress , the Pentagon has provided a rough estimate of the number of Iraqi civilians and security forces killed or wounded by insurgents .", "That number has risen sharply , to an average of more than 90 a day since last May .", "In an off-the-cuff estimate during an official visit to Vienna in November , Iraq 's health minister , Ali al-Shimari , said that 150,000 Iraqis had been killed in violence since the war began in 2003 , the Associated Press has reported .", "Iraq Body Count , an independent group that monitors news reports of deaths , has recorded the deaths of more than 52,000 Iraqi civilians .", "The highest estimates of the civilian toll come from a team of researchers from the Johns Hopkins Bloomberg School of Public Health .", "In a study published in The Lancet , a British medical journal , they estimated 600,000 Iraqis died from violence between March 2003 and July 2006 , basing their analysis on a survey of 1,849 households in 47 neighborhoods across Iraq .", "That study was widely criticized -- the sample interviewed may not accurately represent the entire country -- but it emphasized both the difficulty of tracking deaths in a war zone and the need for a reliable count .", "There were three , not four ."], "summary": ["List of US military fatalities in Iraq has reached milestone of 3,000 deaths since March 2003 invasion .", "Number reflects how much more dangerous soldier 's job has become in face of growing and increasingly sophisticated insurgency .", "Pentagon reports that violence in country is at all-time high .", "No official count is available for number of civilians killed .", "Estimates based on mortality tallies , news reports and household surveys vary widely .", "Brief sketches of several US servicemen killed in Iraq .", "Graphs .", "Photos ."], "publication": "nyt50", "label": [11, 187, 186, 12], "tag": ["World", "Front Page"]} -{"id": "1815836", "text": ["Eliot Laurence Spitzer takes the oath of office as New York 's 54th governor in a capital desperately in need of a new moral authority figure .", "Whether or not he succeeds in fulfilling the soaring expectations that led to his landslide victory , his political timing is fortunate .", "After making ethical reform one of the central pledges of his campaign , Mr. Spitzer has watched from the sidelines in recent weeks as scandal has engulfed the upper reaches of state government .", "`` This is a crisis that I do n't want to waste , `` Mr. Spitzer said in an interview before he was sworn in early today .", "-LSB- Page B4 . -RSB-", "`` If there is a result of this momentary concentration on ethical dilemmas and failures , an opportunity to galvanize support for reform , we better seize it . ''", "Since the November election , Comptroller Alan G . Hevesi , a Democrat , has resigned and pleaded guilty to a felony .", "The Senate majority leader , Joseph L . Bruno , a Republican , has revealed that federal authorities were investigating his outside consulting work .", "State Senator Efrain Gonz\u00e1lez Jr . , a Democrat , has been charged with stealing more than $ 400,000 in state money .", "And a state court judge has forced a recalcitrant Legislature to reveal how it spends hundreds of millions of dollars worth of taxpayer funds on pet projects .", "In his effort to change the culture of Albany , Mr. Spitzer said that upon taking office , he would issue `` a slew '' of executive orders `` relating to ethics , procurement and the behavior of the leadership in the agencies where a governor has unilateral control . ''", "The orders will mirror those he recently outlined for his own staff , which included a ban on gifts of more than nominal value and a prohibition on members of the executive branch from lobbying any part of the executive branch for two years after they leave their posts .", "More daunting will be his efforts to push lawmakers to pass a number of measures early in the legislative session as he seizes on his landslide victory and the recent corruption scandals to push his agenda .", "`` He has talked about redistricting reform , he 's talked about campaign finance reform , he 's talked about ending pay to play and he 's talked overall about a transparent and accountable state government , `` said Blair Horner , the legislative director of the New York Public Interest Research Group .", "`` If he accomplishes all of that , that 's probably more reform than we 've seen in New York for the last 200 years . ``", "Mr. Spitzer is trying to reshape a state government notoriously resistant to change .", "Voters re-elected Mr. Hevesi as their comptroller after he admitted using state employees to chauffeur his wife .", "And while nearly a dozen lawmakers faced criminal charges in the last few years , many were subsequently re-elected .", "Mr. Spitzer will probably gain more traction from voters by delivering on his other priorities , including lowering property taxes and revitalizing the upstate economy .", "As attorney general , Mr. Spitzer made his name by taking on Wall Street corruption as the bull market of the late 1990s unraveled and the Securities and Exchange Commission had a limited appetite for enforcement , giving him an opening to redefine his job and make himself a nationally known figure .", "But he now must learn to work by consensus instead of by subpoena , and close watchers of the Albany scene say Mr. Spitzer must act fast so as not to lose momentum .", "Still , while reform proposals have stalled for years in the legislature , lawmakers are aware that Mr. Spitzer was elected by a record margin .", "`` Right now the Legislature is on its heels , '' Mr. Horner said .", "`` They know the public and the new governor wants action , and I think the Legislature will want to accommodate him , but if he gets bogged down in the first session , it makes it harder to achieve the things he wants to do . ''", "Already , Senate Republicans have embraced Mr. Spitzer 's proposal to delineate all spending items in the state budget , ending the practice by the Legislature and Gov . George E . Pataki of writing large blank checks into the budget that they could later spend , largely in secret .", "Certainly , the new governor has a much more caffeinated style than the old one .", "There were titters in political circles when Mr. Pataki recently escorted a group of former Republican governors to the Broadway show `` The Drowsy Chaperone . ''", "The play 's title echoed the criticisms frequently lobbed at Mr. Pataki -- that he has been rarely seen or heard in Albany as its problems have festered .", "Mr. Spitzer has the on-the-offensive style of a former prosecutor and is well known for rattling rich and powerful cages .", "He made a campaign slogan of changing Albany `` on Day 1 '' of his administration , but sought to set a new tone even earlier .", "Shortly before the election , he withdrew his endorsement of Mr. Hevesi , even though the two had been close political allies .", "Mr. Spitzer said that after the election he would begin adhering to a set of campaign finance guidelines far stricter than those required by state law .", "There have been , however , some steps that have raised eyebrows , including Mr. Spitzer 's appointment of a Cablevision lobbyist to be his new secretary of state .", "But many of his closest advisers will be veterans of the attorney general 's office or of his campaign .", "In his inaugural address , Mr. Spitzer will strive to flower his litigator 's prose with rhetorical flourishes .", "Along those lines , he said he would `` hearken back to the great transformations New York has gone through -- the Erie Canal is everybody 's favorite metaphor , and there will be some passing reference to it -- and Teddy Roosevelt 's effort to bring a new ethic of governance to the capital . ``", "`` The theme of the inaugural speech is clearly going to be that we are turning a corner , '' he said .", "`` We have to think of ourselves through a different prism , both in terms of ethics and as an entire state .", "We have to think of ourselves as one New York and not a series of interests that are spoken to , appealed to and mollified . ``", "Brass tacks will probably wait until Wednesday , when Mr. Spitzer delivers his first address to the Legislature , and until Feb . 1 , when his first executive budget is due .", "`` Somebody told me an inaugural is values , a State of the State is principles and the budget is the bad news , '' he said .", "The bad news starts with the multibillion-dollar deficits projected for the state in future years .", "`` The surplus we had at the end of the year is no different than an advance on a credit card bill that will come due on our next statement , '' he said .", "`` There is an increasing series of deficits as we proceed to Year 2 , 3 and 4 .", "The deficit increases by several billion dollars a year thereafter , and this is a consequence of prior budgets having pushed costs into the out years .", "That will make it tough , and tough choices have to be made . ``", "But Mr. Spitzer has some costly promises to keep , and conversely , some politically risky cuts to make .", "During the campaign , he outlined a complicated set of priorities , including reining in state spending to offset escalating future deficits , but also increasing spending on education and cutting by half the number of New Yorkers without health insurance .", "In his speech , he said , he will touch on his three overarching goals .", "At the forefront are his plans for ethical reforms , which include exploring changes to the quasi-independent public authorities that control much of the state 's spending but act , as he has said , as a `` shadow government '' with little accountability .", "A second focus will be investing in the state 's infrastructure , including large transportation projects like replacing the Tappan Zee Bridge and building the Second Avenue subway .", "He also wants the state to take a much more assertive role in creating lower-cost housing .", "`` We have n't had an aggressive state housing policy for how long .", "`` he said .", "`` The state has to participate if we 're going to build the housing stock we need . ``", "Perhaps the most complicated part of his three-pronged agenda is his plan for economic revitalization .", "Reviving the economy of upstate New York , and particularly the Buffalo area , is of such concern that he split the leadership of the Empire State Development Corporation in half , with leaders for upstate and downstate .", "He also wants to rein in state spending overall , in part by overhauling the Medicaid system , but also by lowering property taxes and providing relief to businesses on issues like workers ' compensation policies .", "At the same time , he wants to increase education spending by billions of dollars .", "With so much to do , he hopes to move quickly .", "`` Delay is the enemy of progress , '' he said .", "`` If you do not lay out the agenda early on and lay out the framework for change , we wo n't get there . `` ."], "summary": ["Eliot Spitzer is sworn in as New York 's 54th governor , after making ethics reform central pledge of landslide victory in his election campaign and watching in recent weeks as scandal reached upper reaches of state government .", "Spitzer says that he will issue slew of executive orders relating to ethics , procurement and behavior of leadership in agencies where governor has unilateral control .", "Orders will mirror those he recently outlined for his own staff , which included ban on gifts of more than nominal value and prohibition on members of executive branch from lobbying any part of executive branch for two years after they leave their posts .", "Photo ."], "publication": "nyt50", "label": [11, 10, 2], "tag": ["Front Page", "New York and Region"]} -{"id": "1815837", "text": ["The Democrats taking over Congress this week are promising sweeping changes to ethics and lobbying laws , pledging to clean up after a spate of corruption scandals under Republican rules .", "So far , however , their proposals are not as comprehensive or far-reaching as changes already adopted by many state legislatures .", "Democrats in both chambers are proposing new restrictions on gifts , meals or trips paid for by lobbyists .", "They say they plan for the first time to require lawmakers to disclose their sponsorship of the pet items known as earmarks that they insert into major bills .", "House Democrats also want to require members to certify that they will not personally profit from the projects .", "Several states , responding to the federal scandals as well as their own statehouse imbroglios , have already adopted more sweeping gift and travel bans , broader measures to end the central role of lobbyists or government contractors in financing campaigns and new public campaign financing intended to reduce lawmakers ' dependence on big donors .", "To enforce their rules , about half the states have also created independent ethics watchdogs , outside the control of the lawmakers they police -- something federal lawmakers have so far resisted .", "House Democrats recently said they would create a panel to study the idea .", "John Hurson , a former member of the Maryland General Assembly and president of the National Council of State Legislatures , remembers marveling at the goings -on just a few miles away in the United States Capitol .", "He was barred from letting a lobbyist buy him a cup of coffee under rules enforced by the Maryland Ethics Commission .", "Meanwhile , congressmen were flying across the country for golf trips with lobbyists and enlisting them as major fund-raisers for their re-election campaigns .", "`` It was amusing in a sad kind of way , '' said Mr. Hurson , who now works as a Washington lobbyist himself , for a cosmetics industry trade group .", "`` At the state level in Maryland a lobbyist ca n't even have his name on a campaign flier .", "And at the federal level some of these guys are basically running campaigns . ``", "Democrats say their proposals are significant first steps , especially given the customary opposition of most incumbents toward rules that would restrict their fund-raising edge .", "The Democrats argue that their proposals go further than anything Republicans managed to pass .", "`` It is an important step forward from where we have been , let 's put it that way , `` said Representative Chris Van Hollen , the Maryland Democrat who is taking over the Democratic Congressional Campaign Committee and is a proponent of several more drastic changes .", "Still , some Democrats say they hope the Congress will go beyond the party leaders ' current proposals .", "They argue that their party took control of Congress in part because of backlash against the corruption scandals under the Republicans , that many new members campaigned on ethics reform and that a failure to deliver meaningful changes could hurt the party in the 2008 elections .", "Lawmakers say the Supreme Court made it difficult to regulate campaign spending by ruling in 1976 , in Buckley v. Valeo , that it is a protected form of free expression .", "States , however , are testing the limits of that decision .", "More than a dozen states , including New Jersey and Connecticut , have enacted so-called pay-to-play laws that block contractors or executives of their companies from making campaign contributions to officials who could influence state contracts .", "Connecticut , reeling from a payoff scandal that unseated its governor , recently passed a pay-to-play law that takes aim at a time-tested tactic for evading contribution limits : funneling money through dependents .", "The law bans campaign contributions not only from lobbyists and contracting executives but also from their children and spouses .", "To make enforcement easier , lobbyists and contractors would be required to disclose the names of their family members on a public Web site .", "-LRB- No Congressional proposal does the same . -RRB-", "On Tuesday , a federal district court judge in Connecticut will hear a challenge to the law .", "Connecticut has also borrowed some aspects of a decade-old Maryland law that seeks to restrict the most valuable gift that lobbyists give lawmakers : campaign fund-raising .", "At the federal level , caps on individual or corporate campaign contributions have placed a premium on `` bundlers , '' who solicit and collect donations to turn over in bulk to a candidate 's campaign .", "Many Washington lobbyists are among the biggest bundlers , and even help run re-election campaigns .", "Across the District of Columbia border in Maryland , state law bars lobbyists from soliciting contributions for candidates or playing any roles in the campaigns .", "`` Lobbyists can no longer be the center of the fund-raising process , '' Mr. Van Hollen of Maryland said .", "Mr. Van Hollen said he planned to introduce a measure requiring federal lobbyists to disclose whom they ask for the contributions that they bundle and how much those people give .", "A similar measure was deleted from a bill by the Republican leadership before it reached the floor .", "States are also adopting new forms of public campaign financing .", "Congressional candidates receive no public financing , and there is no limit on what they can spend .", "And the public financing system adopted for presidential campaigns after the Watergate scandal is on the brink of obsolescence .", "For the first time in three decades , the major 2008 presidential candidates are expected to reject the system in favor of raising unlimited private funds .", "Several states , however , are expanding the idea .", "Maryland and New Jersey are among those considering a system enacted in Arizona and Maine .", "The new Connecticut law includes a modified form of the idea , known as `` clean elections . ''", "Participating candidates who get a certain amount of small contributions -- as low as $ 5 in some places -- receive large lump sums of public campaign money early in the race if they agree not to raise or spend private funds .", "And , up to a limit , the state pledges to give participating candidates enough money to match the campaign spending of any rival candidate outside the system .", "No state , of course , has eradicated the influence of money .", "In Maryland , for example , lobbyists can not take individual lawmakers to dinner but can treat whole legislative committees , a rule lobbyists say favors the well-financed .", "Even so , some Annapolis lobbyists appreciate the fund-raising ban .", "`` Legislators can call and say they need your help , '' said Minor Carter , a Republican lobbyist .", "`` And you have the absolute defense of saying , ' I 'm sorry , I ca n't . ' `` ."], "summary": ["Democrats taking over Congress are promising sweeping changes to ethics and lobbying laws , pledging to clean up after spate of corruption scandals under Republican rules .", "Their proposals are not as comprehensive or far-reaching as changes already adopted by many state legislatures .", "Several states have adopted more sweeping gift and travel bans , broader measures to end central role of lobbyists or government contractors in financing campaigns and new public campaign financing intended to reduce lawmakers ' dependence on big donors ."], "publication": "nyt50", "label": [5, 0, 1], "tag": ["Front Page", "U.S.", "Washington"]} -{"id": "1815838", "text": ["With his plain pine coffin strapped into an American military helicopter for a predawn journey across the desert , Saddam Hussein , the executed dictator who built a legend with his defiance of America , completed a turbulent passage into history on Sunday .", "Like the helicopter trip , just about everything in the 24 hours that began with Mr. Hussein 's being taken to his execution from his cell in an American military detention center in the postmidnight chill of Saturday had a surreal and even cinematic quality .", "Part of it was that the Americans , who turned him into a pariah and drove him from power , proved to be his unlikely benefactors in the face of Iraq 's new Shiite rulers who seemed bent on turning the execution and its aftermath into a new nightmare for the Sunni minority privileged under Mr. Hussein .", "-LSB- Page A7 . -RSB-", "The 110-mile journey aboard a Black Hawk helicopter carried Mr. Hussein 's body to an American military base north of Tikrit , Camp Speicher , named for an American Navy pilot lost over Iraq in the first hours of the Persian Gulf war in 1991 .", "From there , an Iraqi convoy carried him to Awja , the humble town beside the Tigris River that Mr. Hussein , in the chandeliered palaces that became his habitat as ruler , spoke of as emblematic of the miseries of his lonely and impoverished youth .", "The American role extended beyond providing the helicopter that carried Mr. Hussein home .", "Iraqi and American officials who have discussed the intrigue and confusion that preceded the decision late on Friday to rush Mr. Hussein to the gallows have said that it was the Americans who questioned the political wisdom -- and justice -- of expediting the execution , in ways that required Prime Minister Nuri Kamal al-Maliki to override constitutional and religious precepts that might have assured Mr. Hussein a more dignified passage to his end .", "The Americans ' concerns seem certain to have been heightened by what happened at the hanging , as evidenced in video recordings made just before Mr. Hussein fell through the gallows trapdoor at 6:10 a.m. on Saturday .", "A new video that appeared on the Internet late Saturday , apparently made by a witness with a camera cellphone , underscored the unruly , mocking atmosphere in the execution chamber .", "This continued , on the video , through the actual hanging itself , with a shout of `` The tyrant has fallen ! May God curse him ! '' as Mr. Hussein hung lifeless , his neck snapped back and his glassy eyes open .", "The cacophony from those gathered before the gallows included a shout of `` Go to hell ! '' as the former ruler stood with the noose around his neck in the final moments , and his riposte , barely audible above the bedlam , which included the words `` gallows of shame . ''", "It continued despite appeals from an official-sounding voice , possibly Munir Haddad , the judge who presided at the hanging , saying , `` Please no ! The man is about to die . ''", "The Shiites who predominated at the hanging began a refrain at one point of `` Moktada ! Moktada ! Moktada ! '' -- the name of a volatile cleric whose private militia has spawned death squads that have made an indiscriminate industry of killing Sunnis -- appending it to a Muslim imprecation for blessings on the Prophet Muhammad .", "`` Moktada , '' Mr. Hussein replied , smiling contemptuously .", "`` Is this how real men behave .", "`` American officials in Iraq have been reluctant to say much publicly about the pell-mell nature of the hanging , apparently fearful of provoking recriminations in Washington , where the Bush administration adopted a hands-off posture , saying the timing of the execution was Iraq 's to decide .", "While privately incensed at the dead-of-night rush to the gallows , the Americans here have been caught in the double bind that has ensnared them over much else about the Maliki government -- frustrated at what they call the government 's failure to recognize its destructive behavior , but reluctant to speak out , or sometimes to act , for fear of undermining Mr. Maliki and worsening the situation .", "But a narrative assembled from accounts by various American officials , and by Iraqis present at some of the crucial meetings between the two sides , shows that it was the Americans who counseled caution in the way the Iraqis carried out the hanging .", "The issues uppermost in the Americans ' minds , these officials said , were a provision in Iraq 's new Constitution that required the three-man presidency council to approve hangings , and a stipulation in a longstanding Iraqi law that no executions can be carried out during the Id al-Adha holiday , which began for Iraqi Sunnis on Saturday and Shiites on Sunday .", "A senior Iraqi official said the Americans staked out their ground at a meeting on Thursday , 48 hours after an appeals court had upheld the death sentence passed on Mr. Hussein and two associates .", "They were convicted in November of crimes against humanity for the persecution of the Shiite townspeople of Dujail , north of Baghdad , in 1982 .", "Mr. Hussein , as president , signed a decree to hang 148 men and teenage boys .", "Told that Mr. Maliki wanted to carry out the death sentence on Mr. Hussein almost immediately , and not wait further into the 30-day deadline set by the appeals court , American officers at the Thursday meeting said that they would accept any decision but needed assurance that due process had been followed before relinquishing physical custody of Mr. Hussein .", "`` The Americans said that we have no issue in handing him over , but we need everything to be in accordance with the law , '' the Iraqi official said .", "`` We do not want to break the law . ''", "The American pressure sent Mr. Maliki and his aides into a frantic quest for legal workarounds , the Iraqi official said .", "The Americans told them they needed a decree from President Jalal Talabani , signed jointly by his two vice presidents , upholding the death sentence , and a letter from the chief judge of the Iraqi High Tribunal , the court that tried Mr. Hussein , certifying the verdict .", "But Mr. Talabani , a Kurd , made it known that he objected to the death penalty on principle .", "The Maliki government spent much of Friday working on legal mechanisms to meet the American demands .", "From Mr. Talabani , they obtained a letter saying that while he would not sign a decree approving the hanging , he had no objections .", "The Iraqi official said Mr. Talabani first asked the tribunal 's judges for an opinion on whether the constitutional requirement for presidential approval applied to a death sentence handed down by the tribunal , a special court operating outside Iraq 's main judicial system .", "The judges said the requirement was void .", "Mr. Maliki had one major obstacle : the Hussein-era law proscribing executions during the Id holiday .", "This remained unresolved until late Friday , the Iraqi official said .", "He said he attended a late-night dinner at the prime minister 's office at which American officers and Mr. Maliki 's officials debated the issue .", "One participant described the meeting this way : `` The Iraqis seemed quite frustrated , saying , ' Who is going to execute him , anyway , you or us .", "` The Americans replied by saying that obviously , it was the Iraqis who would carry out the hanging .", "So the Iraqis said , ` This is our problem and we will handle the consequences .", "If there is any damage done , it is we who will be damaged , not you . '", "`` To this , the Iraqis added what has often been their trump card in tricky political situations : they telephoned officials of the marjaiya , thesupreme religious body in Iraqi Shiism , composed of ayatollahs in the holy city of Najaf .", "The ayatollahs approved .", "Mr. Maliki , at a few minutes before midnight on Friday , then signed a letter to the justice minister , `` to carry out the hanging until death . ''", "The Maliki letter sent Iraqi and American officials into a frenzy of activity .", "Fourteen Iraqi officials , including senior members of the Maliki government , were called at 1:30 a.m. on Saturday and told to gather at the prime minister 's office .", "At .", "3:30 a.m. , they were driven to the helicopter pad beside Mr. Hussein 's old Republican Palace , and taken to the prison in the northern suburb of Khadimiya where the hanging took place .", "At about the same time , American and Iraqi officials said , Mr. Hussein was roused at his Camp Cropper cell 10 miles away , and taken to a Black Hawk helicopter for his journey to Khadimiya .", "None of the Iraqi officials were able to explain why Mr. Maliki had been unwilling to allow the execution to wait .", "Nor would any explain why those who conducted it had allowed it to deteriorate into a sectarian free-for-all that had the effect , on the video recordings , of making Mr. Hussein , a mass murderer , appear dignified and restrained , and his executioners , representing Shiites who were his principal victims , seem like bullying street thugs .", "But the explanation may have lain in something that Bassam al-Husseini , a Maliki aide closely involved in arrangements for the hanging , said to the BBC later .", "Mr. Husseini , who has American citizenship , described the hanging as `` an Id gift to the Iraqi people . ''", "The weekend 's final disorderly chapter came with the tensions over Mr. Hussein 's body .", "For nearly 18 hours on Saturday , Mr. Maliki 's officials insisted that his corpse would be kept in secret government custody until circumstances allowed interment without his grave becoming a shrine or a target .", "Once again , the Americans intervened .", "The leader of Mr. Hussein 's Albu-Nasir tribe , Sheik Ali al-Nida , said that before flying to Baghdad on an American helicopter , he had been so fearful for his safety that he had written a will .", "Bizarrely , Sheik Nida and others were shown on Iraqi television collecting the coffin from the courtyard in front of Mr. Maliki 's office , where it sat unceremoniously in a police pickup .", "After the helicopter trip to Camp Speicher , the American base outside Tikrit , the coffin was taken in an Iraqi convoy to Awja , and laid to rest in the ornate visitors ' center that Mr. Hussein ordered built for the townspeople in the 1990s .", "Local officials and members of Mr. Hussein 's tribe had broken open the marbled floor in the main reception hall , and cleared what they said would be a temporary burial place until he could be moved to a permanent grave outside Awja where his two sons , Uday and Qusay , are buried .", "At the burial , several mourners threw themselves on the closed casket .", "One , a young man convulsed with sobs , cried : `` He has not died .", "I can hear him speaking to me . ``", "Another shouted , `` Saddam is dead ! Instead of weeping for him , think of ways we can take revenge on the Iranian enemy , '' Sunni parlance for the Shiites now in power .", "THE STRUGGLE FOR IRAQ ."], "summary": ["US and Iraqi officials who have discussed intrigue and confusion that preceded decision to rush Saddam Hussein to gallows have said that it was Americans who questioned political wisdom and justice of expediting execution , in ways that required Prime Min Nuri Kamal al-Maliki to override constitutional and religious precepts that might have assured Hussein more dignified passage to his end .", "New video that has appeared on Internet underscores unruly , mocking atmosphere in execution chamber .", "Photo ."], "publication": "nyt50", "label": [7], "tag": ["Technology", "World", "Front Page", "Washington"]} -{"id": "1815839", "text": ["It may fall short of a feel-good sequel to `` Chinatown , '' the movie based on the notorious , somewhat shady water grab by Los Angeles that allowed the city to bloom from a semi-arid desert .", "But in one of the largest river restoration efforts in the West , water is again flowing along a 62-mile stretch of the Owens River after a dry spell of nearly a century .", "That part of the river had been left mostly drained when upstream water , fed by snowmelt from the towering Sierra Nevada , was channeled 233 miles south to fill swimming pools and bathtubs throughout Los Angeles .", "The restored flow is among several long-awaited steps the city is taking to help make amends for the environmental consequences of its water maneuvering , most notably the drying up of Owens Lake , an area more than three times the size of Manhattan , here in the Owens Valley .", "Los Angeles agreed in December to expand efforts to control toxic dust storms that erupt from what is left of the lake , a 110-square-mile body that emptied when the river was diverted to Los Angeles through an aqueduct opened in 1913 .", "The lake 's salty , mineral-laced basin has been the largest single source of particulate pollution in the country .", "It looks so otherworldly that it doubled as a desolate planet in the movie `` Star Trek V : The Final Frontier . ''", "To restore the river , Los Angeles built automated gates at the point where the river veers into the aqueduct .", "The gates steer some water into the original riverbed , setting the stage for the growth of cottonwood trees and other plants and the return of waterfowl and other animals .", "Much of the water eventually returns to the aqueduct , though some of it is being used for lake irrigation and other projects .", "Environmentalists here say they are keeping an eye on Los Angeles for backsliding , but they acknowledge that the new efforts will make a significant difference .", "As winds whipped across Owens Lake on a recent afternoon , Mike Prather of the Owens Valley Committee , which along with the Sierra Club took Los Angeles to court over the environmental fallout of its water policies , marveled at sandpipers , American avocets and other birds frolicking in the shallow pools created by the irrigation .", "`` This work will bring back more and more of them , '' Mr. Prather said , savoring the twist in the battle that means water once intended for Los Angeles will feed the lake .", "`` It 's Owens Valley 's turn to stick its straw in L.A. ` s water , '' he said .", "Court rulings and the threat of legal action have largely forced Los Angeles 's hand in dealing with its past water moves , but city leaders say they are also intent on doing the right thing in keeping up a vital source of water while avoiding further damage to the Owens Valley .", "H . David Nahai , president of the board that oversees the Los Angeles Department of Water and Power , said Los Angeles was looking for less adversarial ways to resolve differences over the valley , which provides 40 percent to 60 percent of the city 's water supply , depending on the snowfall in the mountains .", "`` We ca n't change the past , but we can shape the future , `` said Mr. Nahai , one of five board members appointed by Mayor Antonio R . Villaraigosa , who promised a friendlier approach to the valley when he took office in July 2005 .", "Susan Cash , the chairwoman of the Board of Supervisors of Inyo County , where the Owens Valley is located , said animosity toward Los Angeles had lessened since the early 20th century , when the water diversion was made possible by the purchase of much of the valley by less-than-forthcoming city operatives .", "The underhanded moves , as chronicled by historians , included city representatives posing as ranchers as they bought up property .", "The questionable land dealing provided the inspiration for `` Chinatown , '' the 1974 movie starring Jack Nicholson as a private detective who stumbles across corruption on a Los Angeles water project .", "Water from the valley made possible the growth of what became the nation 's second-largest city .", "But people in the valley have long regarded the water dealings as a double-edged sword .", "Officials here have argued that the water diversion undercut the potential for growth .", "But others say that such prospects were dim anyway in such a dry and remote valley , and that Los Angeles 's keeping the water clean and the land relatively untouched has been a boon .", "Los Angeles 's policy of allowing public access to much of its land and the fact that many people here have worked for the Los Angeles Department of Water and Power , one of the valley 's largest employers , or have friends or relatives there , have contributed to improved relations .", "The godfathers of Ms. Cash 's children worked for the department .", "`` The fact is , '' she said , `` we are in a marriage with no annulment in the near future , so we have to find a way to work together . ''", "Inyo officials said the city 's projects could inspire more tourism , the only real economic activity in this dry , high-desert valley .", "`` We have recreational users now but not to the extent it can be once the river is flowing and there is sufficient water for fish and wildlife , '' said Arlene Grider , president of the chamber of commerce here .", "The long-promised river restoration is a $ 24 million project , compensation won from a lawsuit by environmental groups over excessive groundwater pumping .", "It came after delays that prompted a county judge in September 2005 to impose daily fines of $ 5,000 on Los Angeles .", "The penalty has so far cost the city $ 2.3 million and will continue until a large volume of water flows through the river in the coming months .", "The work on the lake , scheduled to be completed by 2010 , will irrigate or otherwise control dust over 43 square miles .", "The improvements result from an agreement the city signed with the local air pollution control regulator in 1998 that sets a timetable to comply with federal requirements to control dust on the lake .", "The city has spent $ 400 million on dust control for just under 30 square miles of the worst pockets , and in December , through a mediator , it agreed to do 12.7 more square miles by 2010 at a cost of $ 105 million .", "A water department spokeswoman in Los Angeles , Carol Tucker , said ratepayers would see relatively modest increases in their monthly bills .", "The river restoration , for example , would amount to an increase of about 26 cents .", "Los Angeles has one of the country 's more intensive conservation programs , allowing it to use roughly the same amount of water even as it has grown by 750,000 residents in the past two decades .", "But environmentalists say they doubt the city can grow much more without finding more water .", "Mr. Nahai said the Department of Water and Power was already studying other possibilities , like using groundwater from within Los Angeles , buying water from other places and desalinating ocean water .", "But one thing is certain , he said : `` Are we going to get to a place where we are going to pump all the water out .", "No . ``", "Still , most everyone suggests there could be rough going ahead .", "Ms. Cash , the Inyo County supervisor , said officials were only `` cautiously optimistic '' about a changed relationship with Los Angeles because they had heard nice words from the city before , only to end up in court .", "Mr. Nahai acknowledged that the litigious nature of the relationship would be difficult to break .", "`` Nobody can guarantee there wo n't be litigation in the future , and litigation has its uses , `` he said .", "`` There is no denying what the City of Los Angeles has done far too often has been because of court order . ''", "He added , `` It 's like what Mark Twain said : ` Whiskey is for drinking , and water is for fighting over . ' '' ."], "summary": ["Water is again flowing along 62-mile stretch of Owens River in eastern California , in one of largest river restoration efforts in West , and after dry spell of nearly century .", "That part of river had been left mostly drained when upstream water , fed by snowmelt from Sierra Nevada Mountains , was channeled 233 miles south to Los Angeles .", "Restored flow is among several long-awaited steps city is taking to help make amends for environmental consequences of its water maneuvering , most notably drying up of Owens Lake , area more than three times size of Manhattan .", "Diagram .", "Map . Photos ."], "publication": "nyt50", "label": [3, 2, 1], "tag": ["Front Page", "U.S."]} -{"id": "1815841", "text": ["With his wife , his children and his parents looking on , Eliot Laurence Spitzer was officially sworn in at midnight as the 54th governor of New York State during a private ceremony here at the governor 's mansion .", "A cheer rose from the guests : `` Go get ' em , Eliot ! '' And with those words , New York 's Democrats reclaimed the governor 's office after 12 years of Republican rule , and Mr. Spitzer , 47 , ascended to a historic office that has been held by Franklin and Theodore Roosevelt , Alfred E . Smith , Thomas E . Dewey and Hugh L . Carey , who was present .", "A more festive public inauguration ceremony with pomp , circumstance , food and music is scheduled to be held Monday afternoon , but Mr. Spitzer needed to take the oath of office by midnight in order to ensure the smooth transition of government as he took over from Gov . George E . Pataki , whose 12-year term ended at midnight .", "Mr. Spitzer was sworn in by Judge Robert W . Sweet , the Federal District Court judge who gave Mr. Spitzer his start in public service , hiring him as a clerk in 1984 after his graduation from Harvard Law School .", "With the new governor were his wife , Silda Wall Spitzer .", "His daughters , Elyssa , 17 , Sarabeth , 14 , and Jenna , 12 .", "His parents , Bernard and Anne Spitzer .", "And dozens of political figures and family friends .", "The party was not without drama .", "Lloyd Constantine -- Mr. Spitzer 's friend , tennis partner , onetime law partner and co-chairman of his transition team -- continued his tradition of buying Champagne for a Spitzer oath of office .", "The bottles seem to grow in size with the job .", "As Ms. Wall Spitzer put it , `` The bottles just keep getting bigger . ''", "This time Mr. Constantine went for a 12-liter bottle of Veuve Clicquot called a balthazar .", "But opening it did not prove easy -- the Champagne did not flow until Mr. Constantine attacked the giant bottle with a wrench , enlisted the help of another guest , and eventually broke the lip of the bottle , cutting his hand , and waving the bloody hand in triumph .", "Asked if it was worth it , he said , `` Yeah , sure . ''", "The public festivities later Monday were to feature New York State wines from Long Island , the Hudson Valley and the Finger Lakes , as well as regional specialties like Guss 's pickles , Junior 's cheesecake and Buffalo wings from the Anchor Bar in Buffalo .", "There was some trepidation about the weather forecast , though .", "Mr. Spitzer , who spoke about the value of optimism during the campaign , planned an outdoor inaugural for New Year 's Day in Albany -LRB- and a 6 a.m. run through Washington Park -RRB- .", "But with light rain falling , and freezing rain forecast , most of the inaugural events were pushed back an hour in the hopes that the bad weather would move through ."], "summary": ["Eliot Spitzer is sworn in as 54th governor of New York State by Federal Judge Robert W Sweet at governor 's mansion in Albany .", "Ceremony i small and private and is attended by family , friends and dignitaries .", "Larger celebration is planned for afternoon of January 2 .", "12-year regime of Gov George E Pataki ended at midnight .", "Photo ."], "publication": "nyt50", "label": [0, 7], "tag": ["New York and Region"]} -{"id": "1815842", "text": ["As she wrote her first inaugural speech , M . Jodi Rell turned to the remarks of Gerald R . Ford .", "Like the former president who took over after the resignation of Richard M . Nixon , Governor Rell was looking for a way to soothe nerves , not make waves .", "She was sworn in on a hot day in July 2004 after the abrupt resignation of Gov . John G . Rowland , a Republican who would be sentenced to a year in federal prison on corruption charges .", "`` People were so disappointed , they were so disillusioned , they had just lost faith , '' Ms. Rell said , apparently referring to both her own situation and President Ford 's .", "`` They just needed somebody to say it 's going to be O.K. and we are going to get past this . ``", "Now , with Mr. Ford 's death coinciding with Ms. Rell 's preparation for her first full term , she said her role as the state 's leader had evolved .", "She was no longer the accidental governor .", "`` After the election , it just felt like coming back to work .", "Nothing really changed , `` Ms. Rell , a Republican who swept 63 percent of the vote in November , said in a recent interview at the Capitol here .", "`` But it 's only in the last week or so where it 's hit me -- this is different .", "The expectations and the new session gives you pause to say all right , this is what we are going to do .", "This is my agenda . ``", "Extremely popular and perceived as honest and calm , Ms. Rell is just now beginning to form her own cabinet , dismissing nearly all of Mr. Rowland 's top assistants .", "After two years largely characterized by initiatives to `` right the ship '' in a state crippled by corrupt politicians , Ms. Rell , who will be sworn in on Wednesday , now must attack more complex problems , such as soaring health care and energy costs .", "And though she was one of only a handful of Republicans in the Northeast who managed to retain a top office this election season , Ms. Rell faces a veto-proof Democratic majority in both chambers of the General Assembly .", "The dominance of Democrats could certainly hobble Ms. Rell 's plans to cut taxes .", "Already , she has compromised one of her signature ideas : after legislative leaders indicated they would block her efforts to cut the car tax , Ms. Rell dropped the subject , saying she would choose her battles carefully .", "`` I think we are approaching a time where we have to come up with some really creative solutions , and if the governor ca n't get there it is possible the honeymoon would be over , `` said Donald E . Williams Jr . , leader of the Democrats in the State Senate .", "But for all the chatter in political circles of their party 's new power , Mr. Williams and other Democrats speak of the Republican governor in tones usually reserved for one of their own .", "`` I do n't think anyone is looking for unnecessary fights , `` Senator Williams said .", "`` The governor has sealed her popularity among voters and made people feel extremely comfortable . ''", "Confrontation and flashy speech are not Ms. Rell 's style .", "Elected to the State House of Representatives in 1984 , she earned a reputation for dealing with mundane details , picking apart legislation and knowing precisely where there was room for compromise .", "That approach helped lead to a sense of Ms. Rell , 60 , as a maternal figure , something that has stuck through her term in executive office .", "Some Democrats remark privately that when the governor admonishes them to kill or push through a bill , it is like being scolded by their mother .", "Ms. Rell cultivates that image : In a season of brutal campaign attack advertisements , one of her commercials featured the governor sitting in a chair with her grandson .", "A public service announcement encouraging drivers not to drink and drive has Ms. Rell on a playground surrounded by children .", "And when she released a health care proposal last week , half a dozen children sat on either side waving to the television cameras .", "Peering out at the audience over her bifocals , Ms. Rell resembled a teacher reading the lesson of the day .", "Ms. Rell and her supporters have always been happy to acknowledge that she was an unlikely candidate to become the state 's top political figure .", "A former homemaker who dropped out of college , Ms. Rell earned her political education on the job , working her way up to become minority leader of the House before being tapped by Mr. Rowland to run for lieutenant governor .", "The two were never particularly close , and Ms. Rell maintains that she had no clue about Mr. Rowland 's wrongdoings , something that infuriates opponents trying to tie her to the corrupt administration .", "Such criticisms have somehow yet to stick .", "Even when Ms. Rell 's chief of staff became embroiled in controversy for distributing invitations to a political fund-raiser on state time last year , voters consistently told pollsters they trusted the Rell administration .", "Some people in both parties complain that Governor Rell has done little to articulate or push her own agenda , instead favoring policies typically backed by Democrats such as stem-cell research or civil unions for same-sex couples .", "Others see that as her strength .", "`` You hear Democrats claim : ' She has n't done anything , she has n't done anything , ' `` said Chris Barnes , a pollster at the University of Connecticut .", "`` Well , the last governor did something and the something was lining his pockets .", "People are quite happy this governor is not doing that something . ``", "Instead , Ms. Rell has wisely been `` adjusting the course rather that setting it , '' Mr. Barnes said .", "`` People were not begging for grandiose plans .", "They just want the government not to mess things up . ``", "Such a modest goal aptly sums up the governing philosophy of Ms. Rell , who describes herself a steadfast moderate who believes in `` less government , less intrusive government , and less spending whenever possible . ''", "Her biggest fights with the Legislature will likely be over the budget , which Ms. Rell will present in February , and taxes .", "Also , Democrats plan to making universal health care a top priority , while Ms. Rell last week offered a far more limited proposal than what Democrats envision .", "Ms. Rell opened the health care bidding last week with a plan to let uninsured adult residents buy into a pool with a premium of $ 250 a month .", "Emphasizing that she would not support a `` big government '' subsidy , Ms. Rell said the cost of the plan to the state would be minimal .", "After decades in public office , Ms. Rell can still appear wary of the spotlight , blushing at the slightest embarrassment or standing at the back of a crowd -- and she guards her privacy .", "When she had a mastectomy to remove breast cancer two days after Christmas in 2004 , her assistants did not announce the operation until she was already at the hospital and asked well-wishers not to send flowers .", "There are signs of increasing comfort with the pageantry .", "On Wednesday , Ms. Rell will lead the first inaugural parade and ball the state has had in more than a decade -LRB- though she turned down a society reporter 's request to photograph her off-white ball gown last week -RRB- .", "`` I think you 're going to see more and more of her , `` said Robert Farr , an ally of Ms. Rell who recently retired from the General Assembly .", "`` A year ago she was the governor by accident , but she can clearly be the real thing now .", "`` She is not going to be one rushing for press conferences , '' Mr. Farr added .", "`` She is not one to go out and state a political position every day .", "But when she stakes something out , she is going to present something that can give measurable results . ``", "For her part , Ms. Rell said she is still drawing inspiration from the former president she looked to the first time she took the oath of office .", "`` Gerald Ford was not flashy , '' she said , her voice softening .", "`` He wanted to talk to people .", "And I know that 's what I want .", "I want people to feel comfortable .", "I want them to see a governor up close . `` ."], "summary": ["Gov M Jodi Rell is set to begin full term at helm of Connecticut after taking two years to right ship .", "Took office in 2004 following resignation of her predecessor John G Rowland .", "Plans to use full term to attack serious problems , such as soaring health care and energy costs .", "Dominance of Democrats in State Legislature will hinder her plans to cut taxes .", "Photo ."], "publication": "nyt50", "label": [13, 15, 2], "tag": ["Health", "New York and Region"]} -{"id": "1815843", "text": ["In New York City , rapes , robberies and assaults , among other crimes , continued to decline last year , prompting the Police Department 's top official to herald 2006 as a very good year .", "Homicides , however , climbed 10 percent in the city , reversing a much-hailed decrease .", "`` We 'd like to see no homicides .", "The reality is we 're going to have them , `` said Police Commissioner Raymond W . Kelly .", "`` I think this is a very good year . ''", "He called the increase in killings in 2006 all but negligible compared with 1990 , a year with one of the highest homicide rates in recent history .", "That year , crack-fueled violence soared , and 2,262 people were killed .", "Last year there were 579 killings citywide as of Dec . 24 , an increase of 52 homicides over the same period in 2005 .", "Yet overall crime last year was down 5 percent .", "The number of reported rapes declined by 7.4 percent to 1,486 , subway crime plunged 13 percent and auto theft fell 11.4 percent .", "The police said the year 's jump in homicides was rooted largely in an unusually high number of `` reclassified '' deaths , deaths linked to injuries incurred in months or years past .", "There were 38 reclassified homicides in New York last year , compared with 21 in 2005 .", "New York 's overall fall in crime also contrasts with an increase nationwide .", "According to a report by the F.B.I. , violent crime across the country rose 3.7 percent during the first half of the year .", "Dallas remained the country 's most violent city , with 3,985 crimes per 100,000 people , according to the midyear report , while New York ranked 10th with 1,1,87 per 100,000 .", "Myriad factors account for New York 's continuing decline in crime overall , the police and criminologists say .", "They cited more effective policing , shifting drug patterns and the lowest unemployment rate in 30 years .", "Yet what is puzzling , one expert said , is that overall crime is dropping even as New York becomes an increasingly polarized city , with haves and have-nots often living side by side in luxury condominiums and public housing .", "`` Within a few blocks , people are living worlds apart , '' said Andrew Karmen , a sociology professor at John Jay College of Criminal Justice and the author of `` New York Murder Mystery : The True Story Behind the Crime Crash of the 1990s . ''", "`` In theory , that should make the poor more dissatisfied and drive people to commit crimes , '' he said .", "`` But that does n't seem to be happening in New York . ``", "One possible explanation , Dr. Karmen said , is that the city is largely populated by immigrants , many of whom are driven by a determination to succeed .", "`` I think they still maintain a positive outlook and faith in the American dream , '' he said .", "`` But if it does n't deliver , attitudes could change . ``", "It has proven difficult to root out violent crime in the city 's toughest corners .", "One of the city 's most perilous neighborhoods is in the 75th Precinct in Brooklyn , which includes East New York .", "There the number of slayings last year was virtually unchanged from 2005 at 28 , and 3,239 crimes were reported .", "Public housing projects have a disproportionate number of crimes .", "While 5 percent of New York 's eight million residents live in public housing , Commissioner Kelly said , 16 percent of the city 's homicides take place there .", "The Police Department plans to tackle that seemingly intractable problem by redeploying personnel from other areas and opening police substations in the most troubled housing complexes , Commissioner Kelly said .", "Both homicide victims and suspects tend to have links to crime already , he said .", "Of those arrested last year in homicides , 95 percent had criminal histories .", "75 percent of the people killed did .", "Such figures point to the department 's need to continue expanding its homicide , shooting and crime databases , Mr. Kelly said .", "`` The more information we have , the greater the potential we have to prevent crimes , '' he said .", "New York 's rebirth as a safer-than-average large city since the 1990s has coincided with an increase in tourism here .", "According to city officials , about 44 million people visited last year on business or pleasure .", "`` One overarching reason why people are coming here , in a city that was attacked five years ago , it 's because they have a sense of comfort as far as security is concerned , `` Mr. Kelly said .", "The department is continuing to pour more resources into community policing , forging bonds between its officials and local leaders , especially those from new immigrant communities .", "Yet in neighborhoods where homicide rates climbed , residents seem divided about whether this recent and heightened focus was improving their lives .", "Allah B , the director of the Allah in Mecca Youth Center in Harlem , commended officers in its precinct , the 28th , for holding local forums and listening to community concerns .", "Even though slayings in the precinct more than doubled , to 11 , last year , he said the increased connection between residents and the police fostered a greater sense of ease .", "Yet a longtime resident of the St . Nicholas Homes in Harlem said police efforts to rout out criminals were having a divisive effect in her neighborhood .", "`` They caused a lot of trouble trying to play one person against the other , '' said the woman , who would give only her first name , Keisha .", "She said people in the neighborhood also felt harassed and persecuted by the police , and that such impressions heightened the tension and stress in the community .", "Still , despite a slight increase in the number of homicides in her neighborhood , she said the streets felt far less violent than in previous years .", "The sense that New Yorkers are increasingly inhabiting two different realities seems particularly strong in places like Fort Greene , Brooklyn , home to a thriving cafe scene and crime-plagued public housing complexes .", "Eleven homicides were recorded in the neighborhood last year , compared with none in 2005 .", "Strolling along a stretch of DeKalb Avenue by Fort Greene Park late last week , Cheryl Pickett , 36 , said she had no idea that murders had risen so sharply in the area .", "Ms. Pickett , who has lived in Fort Greene for five years , said her perception of the neighborhood had not changed .", "She still thinks of it as a safe , child-friendly place with charming shops and bars .", "`` When things happen , it 's really surprising , `` Ms. Pickett said .", "`` This year seems no different than last . '' ."], "summary": ["Murders in New York City rose by 10 percent in 2006 over 2005 to 579 , even as overall crime continued to decline .", "Homicides were well below 2,262 in 1990 .", "Rise reversed declining trend .", "Overall crime fell 5 percent in 2006 compared to 2005 .", "Reported rapes were down 7.4 to 1,486 .", "Subway crime plunged 13 percent and auto crime fell 11.4 percent ."], "publication": "nyt50", "label": [9, 11, 8], "tag": ["New York and Region"]} -{"id": "1815882", "text": ["Standing before a row of enlarged photographic slides of deadly viruses like Ebola and Hantavirus that decorate the new lunchroom at his office , Dr. Sherif Zaki professed himself to be uplifted .", "`` I ca n't tell you how much this has done for our morale , `` Dr. Zaki said .", "As a leader of an 11-year-old program at the Centers for Disease Control and Prevention here that tries to ferret out the cause of 700 or so unexplained deaths across the United States each year , Dr. Zaki spends his days on matters that could test the morale of any scientist : a boy in Mississippi who died 17 days after developing a fever and headache .", "A football player at the University of Missouri who died hours after collapsing on the field .", "A skateboarder who scraped her knee and died a few days later .", "These are among the mysteries for which Dr. Zaki and his colleagues at the Unexplained Deaths Project , or UNEX , serve as the medical detectives of last resort .", "Now , after years of toiling in the subbasement of a 1950s-era building on the C.D.C. ` s campus , Dr. Zaki 's team has moved to a futuristic-looking building nearby where the window shades automatically rise or fall depending on the amount of sunshine , a transmission electron microscope stands ready to magnify bacteria and viruses up to 740,000 times , and images of deadly pathogens pass for d\u00e9cor .", "Started in 1995 as an informal collaboration among a handful of C.D.C. scientists determined to identify outbreaks of new infectious diseases before they reached epidemic proportions , UNEX distinguished itself as an interdisciplinary group that brought together the expertise of virologists , bacteriologists , epidemiologists , veterinarians and clinicians .", "As enthusiasm for the program grew , four affiliates in state health departments opened in California , Connecticut , Minnesota and Oregon .", "Despite their success and the continuing threat of emerging infections , the state programs recently lost their financing , and enthusiasm for UNEX even within the C.D.C. was dwindling , to the point where its very future appeared to be in doubt until late December , when another year 's financing was finally approved .", "The problem , Dr. Zaki said , is that the program 's interdisciplinary nature clashes with the trend , at C.D.C. and in science generally , toward specialization .", "In fact , each researcher involved with UNEX has another position within one of C.D.C. ` s specialized departments .", "Dr. Zaki , for instance , is chief of infectious disease pathology activity .", "The hundreds of cases referred to UNEX each year by state health authorities , medical examiners and the occasional private physician represent a fraction of the true number of unexplained deaths across the country .", "Dr. Zaki estimates that there are `` tens of thousands '' of such cases each year .", "Most are presumed to be caused by infectious agents , usually carried by animals or insects , which is why UNEX is housed in the C.D.C. ` s National Center for Zoonotic , Vector-Borne and Enteric Diseases .", "`` There are so many cases where we say , ' We know this is infectious , ' where I 'd bet you anything the death was caused by a virus we ca n't find , `` Dr. Zaki said .", "In fact , UNEX is able to find the particular killer pathogen in only about 15 percent of the cases referred to the office , he said .", "On July 12 , 2005 , for instance , 19-year-old Aaron O'Neal , a reserve linebacker for the University of Missouri Tigers , collapsed on the field during a preseason workout and died soon after at a hospital .", "An autopsy found that the lining of his brain had been inflamed , a possible sign of viral meningitis .", "But even when UNEX received brain tissue samples , no virus or any other clear sign of what caused the inflammation could be detected .", "Aside from storing the remaining tissue sample on the chance that a new test might one day solve the mystery , the case was closed .", "Still , UNEX collars its share of microbial culprits .", "On Sept . 13 , 2005 , a 10-year-old Mississippi boy went to his pediatrician with a fever , headache and an itchy scalp .", "Within days he became so disoriented and agitated that he bit a family member .", "Admitted to the hospital , he grew sicker , but all tests came back negative .", "After he died on Sept . 27 , it took UNEX just eight days to detect the rabies virus in serum samples .", "They later learned , by speaking with friends and family , that dead bats had been previously found inside the boy 's home and garage , and that he had removed a live bat from his bedroom and released it outdoors in spring 2005 .", "Sometimes , finding the cause of a death means discovering a pathogen previously unknown to science .", "During the 2003 outbreak of Severe Acute Respiratory Syndrome , or SARS , it was a colleague of Dr. Zaki 's , Cynthia Goldsmith , who first identified the SARS coronavirus using an older-generation electron microscope at their old lab .", "`` It took my breath away when I first recognized it , '' Ms. Goldsmith said .", "Then there are the times when a probable cause of death is suspected , but local health officials are reluctant to perform an autopsy .", "On Oct . 5 , 2001 , minutes after a Florida photo retoucher for The National Enquirer died of what appeared to be inhalation anthrax , a doctor who treated the man called Dr. Zaki .", "`` The medical examiner was n't inclined to do the autopsy , `` Dr. Zaki recalled .", "`` Finally they said , ' O.K. , we 'll do it , but only if you come down . '", "`` When Dr. Zaki and a small group of colleagues flew to Florida the next morning on a private chartered jet , they learned that some of the staff at the Palm Beach County medical examiner 's office had had second thoughts about the autopsy .", "`` When we got there , '' Dr. Zaki said , `` the people in the facility told us , ' If you do that autopsy , we 're all going to leave and never come back .", "You 're going to leave spores , you 're going to contaminate the facility . '", "`` Dr. Zaki 's group explained that the spores become dangerous only if allowed to dry out -- something they would prevent through meticulous cleaning -- and that antibiotics given as prophylaxis would eliminate the slim remaining chance of infection .", "Even so , they performed the autopsy in a small room used for storage .", "`` We left that room much cleaner than when we found it , '' Dr. Zaki said .", "His own laboratory has an unequalled collection of automated testing equipment and a trove of tissue samples dating back decades .", "`` I do n't think there 's any laboratory like this in the world , `` Dr. Zaki said .", "`` And there 's nowhere else where people from so many different fields are together in such close proximity .", "It makes one very proud . ``", "Egyptian by birth , Dr. Zaki graduated third in his class of 1,200 at the University of Alexandria medical school before coming to the United States to study pathology in 1983 .", "Asked why he chose pathology , he replied : `` My mom asked me the same question .", "She said , ` Be a real doctor . '", "But pathology explains how and why a disease happens . ``", "Yet UNEX , as sophisticated as its equipment may be , is still routinely outfoxed by nature 's most archaic life forms , the viruses and bacteria whose roles in human disease , and hiding places in blood and tissue samples , continue to defy detection .", "`` We think we know everything , '' Dr. Zaki said , `` but we do n't know the tip of the iceberg .", "`` There are so many viruses and bacteria we do n't know anything about , that we do n't have tests for , `` he said .", "`` A hundred years from now , people will not believe the number of pathogens we did n't even know existed . `` ."], "summary": ["Dr Sherif Zaki , leader of 11-year-old Centers for Disease Control and Prevention Unexplained Deaths Project , describes challenges of being medical sleuth .", "Says there are about 700 unexplained deaths in US every year and it is his mission of project to try and find out why each person died .", "Program files tissue samples so they can be tested again if new information emerges .", "Case examples described .", "Photos ."], "publication": "nyt50", "label": [2, 5, 12], "tag": ["Health"]} -{"id": "1815893", "text": ["People in the United States have gotten used to the repulsive fact that raw chicken , meat and eggs are often contaminated with dangerous bacteria .", "Scrub the cutting board , we are warned , do n't nibble the cookie dough , do n't eat burgers rare .", "In other words , handle meat like a biohazard -- and then eat it .", "But until recently , getting sick from salad was something that most Americans did n't even think about unless they were traveling to a poor country .", "At home , fruits and vegetables have been regarded as clean and safe for as long as most people can remember .", "Lately , though , produce has caused a disturbing number of disease outbreaks .", "Just since September , bacteria-tainted tomatoes , spinach and lettuce have made hundreds of people sick , and killed three .", "There have been 20 serious outbreaks in the past decade or so , and many have come from crops grown in California , not from imports .", "Fruit juices , alfalfa sprouts and almonds have also been involved -- all of them supposedly health foods , like salad , the things we feel most virtuous about eating .", "The known outbreaks are just the tip of the iceberg , health officials say .", "Far more illness is never reported .", "Most people do n't call the health department about a few days of gut trouble .", "The government estimates that over all , food-borne microbes -- not just the ones on produce -- make 76 million people a year sick , put 325,000 in the hospital and kill 5,000 .", "In a modern country , a rise in disease caused by tainted food seems like a giant step backward in public health .", "But there has n't been much public outrage or even disgust at the notion of filth seeping into the food supply .", "Among the nastiest bacteria is E . coli 0157 : H7 , which makes a powerful toxin that can cause severe illness and sometimes even kidney failure .", "This is the germ found on spinach a few months ago , and more recently on iceberg lettuce served at Taco Bell restaurants .", "It comes from cow feces and was first identified in 1982 .", "Feeding the animals grain instead of hay seems to promote its growth .", "The strain is harmless to cows , but in people it is so dangerous , according to the Food and Drug Administration , that swallowing as few as 10 organisms may be enough to cause an infection .", "About 73,000 people a year get sick from this type of bacteria , and 61 die , the Centers for Disease Control and Prevention reports .", "`` It 's gotten more attention this fall , but we 've seen these outbreaks due to lettuce and other leafy greens for a long time , `` said Dr. Christopher Braden , chief of the outbreak response and surveillance team for enteric diseases at the disease centers .", "`` We are seeing this on an ongoing basis .", "That 's not an acceptable outcome .", "We need to find ways to interrupt that contamination . ``", "Last August , the F.D.A. announced a `` lettuce safety initiative '' in response to recurring E . coli outbreaks .", "It began with last fall 's lettuce harvest and included visits by inspectors to farms and cooling and packing facilities .", "But the spinach and Taco Bell outbreaks happened anyway .", "There are several ways that bacteria can contaminate lettuce .", "Water is an obvious route , whether from unsanitary irrigation or spraying , or from flooding .", "Animals can carry bacteria onto farmland , which is apparently how the spinach outbreak occurred -- feral pigs wandered from cow pastures to spinach fields , taking E . coli with them .", "Sick workers who handle produce can also contaminate it , and so can dust blowing off pastures .", "One bad batch can spoil others when they are mixed for chopping and bagging .", "Scientists think most contamination lies on the surface of crops , but studies have shown that it is possible for bacteria to be taken up through root systems and actually wind up inside the plants , where no amount of washing could get rid of it .", "In any case , E . coli 0157 : H7 tends to be sticky and is difficult or impossible to wash off , even when it 's only on the surface of produce .", "Over the past 30 years , diseases linked to produce have increased , Dr. Braden said .", "Increased ability to detect outbreaks may explain part of the increase , but not all of it , he added .", "`` We 're convinced it 's real in large part , `` he said .", "`` We 're seeing an increased number of outbreaks , an increased number of cases in outbreaks , and an increase in the number of types of produce involved . ``", "The reason is not known for sure .", "But , Dr. Braden said : `` The way produce is farmed and processed has changed .", "It 's become more centralized , and you have these huge processors and distributors that produce tens of thousands of pounds of a particular produce in a particular day .", "If something goes wrong with that produce you 've got a big problem , whereas with small farmers , if there is a problem it 's much more limited . ``", "In addition , he said , bagged and prewashed produce did n't exist 25 years ago , and people today eat more raw vegetables than in the past .", "`` There 's probably more susceptible people eating those things , `` Dr. Braden said .", "`` We have an aging population , and more people with chronic medical conditions that might make them more susceptible . ''", "The F.D.A. is responsible for produce safety , while the Agriculture Department oversees meat , poultry and eggs .", "Some politicians have urged that a single new agency be formed to take charge of all food safety , but even if that is done , it still may not answer basic questions about how to clean up produce .", "Dr. David W . K . Acheson , chief medical officer at the center for food safety and applied nutrition at the F.D.A. , said the agency was trying to find ways to prevent outbreaks .", "But , Dr. Acheson said , it has nowhere near the resources to inspect the hundreds of thousands of facilities that handle fresh produce in the United States .", "The Agriculture Department has far more inspectors and is required by law to have one in every major meat processing plant .", "One question the drug agency is trying to figure out , he said , is how close is too close when it comes to cattle and produce .", "`` We know that 0157 is a natural contaminant of cow feces , '' Dr. Acheson said .", "`` Cow feces , if it gets on fresh produce , is not good .", "Should there be some limitation as to how close cattle should be to a leafy-greens field .", "Fifty feet , 5 miles , 50 miles .", "What 's the science .", "`` Fifty feet may be plenty if the cows are downhill and downstream of the farm , he said -- but if it 's the other way around , five miles may not be enough .", "`` What 's really going to work .", "`` Dr. Acheson asked .", "`` At this point , there are a lot of unknowns . ''", "Another approach , instead of trying to prevent contamination , is to get rid of it after the fact .", "Nuts can be heat-treated and juices can be pasteurized .", "Some experts have recommended irradiating lettuce .", "`` People in the agency are looking at the impact of that , '' Dr. Acheson said .", "`` There are two pieces : does it work , and what dose do you need .", "Then , what 's the impact of that dose on the quality of the product .", "You could irradiate anything and sterilize it , but you may end up with mush .", "It 's not quite that easy . ``", "Dr. Braden said that so far , scientists had not found any way to prevent outbreaks .", "`` Not that people are n't working on it hard , `` he said , adding that the food industry itself is under pressure .", "`` There may be some self-regulation from the industry , the growers themselves , '' he said .", "`` They have to do something themselves , or else they 're going to lose their market . ``", "SECOND OPINION ."], "summary": ["Centers for Disease Control and Prevention reports that E coli strain recently found on spinach from California farm is virulent enough that swallowing only 10 organisms could be enough to cause infection .", "Increased number of health alerts and outbreaks related to fruits and vegetables has generated many new guidelines , such as Food and Drug Administration 's lettuce safety initiative , but researchers point to variety of causes and methods of contamination and say they are difficult to track and stop .", "Some experts hold that bagged and prewashed produce is fairly new to marketplace and increased consumption of raw produce make outbreaks more widespread than anytime in past .", "Many researchers are working on prevention measures , but few changes have been implemented .", "Drawings ."], "publication": "nyt50", "label": [19, 20, 43, 63, 25, 4, 60, 56], "tag": ["Health"]} -{"id": "1815895", "text": ["The day 's coppery last light reflects off the backs of sea bass swimming in fish ponds lined in neat rows on this desert farm .", "Fish farming in the desert may at first sound like an anomaly , but in Israel over the last decade a scientific hunch has turned into a bustling business .", "Scientists here say they realized they were on to something when they found that brackish water drilled from underground desert aquifers hundreds of feet deep could be used to raise warm-water fish .", "The geothermal water , less than one-tenth as saline as sea water , free of pollutants and a toasty 98 degrees on average , proved an ideal match .", "`` It was not simple to convince people that growing fish in the desert makes sense , '' said Samuel Appelbaum , a professor and fish biologist at the Jacob Blaustein Institutes for Desert Research at the Sede Boqer campus of Ben-Gurion University of the Negev .", "`` It is important to stop with the reputation that arid land is nonfertile , useless land , '' said Professor Appelbaum , who pioneered the concept of desert aquaculture in Israel in the late 1980s .", "`` We should consider arid land where subsurface water exists as land that has great opportunities , especially in food production because of the low level of competition on the land itself and because it gives opportunities to its inhabitants . ''", "The next step in this country , where water is scarce and expensive , was to show farmers that they could later use the water in which the fish are raised to irrigate their crops in a system called double usage .", "The organic waste produced by the cultured fish makes the water especially useful , because it acts as fertilizer for the crops .", "Fields watered by brackish water dot Israel 's Negev and Arava Deserts in the south of the country , where they spread out like green blankets against a landscape of sand dunes and rocky outcrops .", "At Kibbutz Mashabbe Sade in the Negev , the recycled water from the fish ponds is used to irrigate acres of olive and jojoba groves .", "Elsewhere it is also used for irrigating date palms and alfalfa .", "The chain of multiple users for the water is potentially a model that can be copied , especially in arid third world countries where farmers struggle to produce crops , and Israeli scientists have recently been peddling their ideas abroad .", "Dry lands cover about 40 percent of the planet , and the people who live on them are often among the poorest in the world .", "Scientists are working to share the desert aquaculture technology they fine-tuned here with Tanzania , India , Australia and China , among others .", "-LRB- Similar methods of fish farming are also being used in the Sonoran Desert of Arizona . -RRB-", "`` Each farm could run itself , which is important in the developing world , '' said Alon Tal , a leading Israeli environmental activist who recently organized a conference on desertification , with the United Nations Convention to Combat Desertification and Ben-Gurion University , that brought policy makers and scientists from 30 countries to Israel .", "`` A whole village could adopt such a system , '' Dr. Tal added .", "At the conference , Gregoire de Kalbermatten , deputy secretary general of the antidesertification group at the United Nations , said , `` We need to learn from the resilience of Israel in developing dry lands . ''", "Israel , long heralded for its agricultural success in the desert through innovative technologies like drip irrigation , has found ways to use low-quality water and what is considered terrible soil to grow produce like sweet cherry tomatoes , peppers , asparagus and melon , marketing much of it abroad to Europe , especially during winter .", "`` Most development is still driven by the Zionist ethos that the desert was some mistake of God that we have to correct and make the desert bloom , '' said Uriel Safriel , an ecology professor at the Hebrew University of Jerusalem .", "The history of fish-farming in nondesert areas here , mostly in the Galilee region near the sea , dates back to the late 1920s , before Israel was established as a state .", "At the time , the country was extremely poor and meat was considered a luxury .", "But fish was a cheap food source , so fish farms were set up on several kibbutzim in the Galilee .", "The early Jewish farmers were mostly Eastern European , and , Professor Safriel said , `` they only knew gefilte fish , so they grew carp . ''", "Eventually they expanded to other varieties of fish including tilapia , striped bass and mullet , as well as ornamental fish .", "The past decade has seen the establishment of about 15 fish farms producing both edible and ornamental fish in the Negev and Arava Deserts .", "Fish farming , meanwhile , has become more lucrative worldwide as people seek more fish in their diet for better health , and ocean fisheries increasingly are being depleted .", "The practice is not without critics , who say it can harm the environment and the fish .", "In Israel there was a decision by the government to stop fish farming in the Red Sea near the southern city of Eilat by 2008 because it was deemed damaging to nearby coral reefs .", "Some also argue that the industry is not sustainable in the long term because most of the fish that are farmed are carnivorous and must be fed a protein-rich diet of other fish , usually caught in the wild .", "Another criticism is that large numbers of fish are kept in relatively small areas , leading to a higher risk of disease .", "Professor Appelbaum said the controversy surrounding fish farming in ocean areas does not apply to desert aquaculture , which is in an isolated , controlled area , with much less competition for resources .", "On Kibbutz Mashabbe Sade , Amit Ziv runs a fish farm , raising about 15,000 fish at a time .", "Up to 500,000 cubic meters of water from the fish ponds is recycled for irrigation every year .", "`` It 's a matter of better efficiency , `` said Mr. Ziv , who pays about 24 cents a cubic meter for water , a government-subsidized rate .", "`` In an area where there is lack of water , being able to use it twice over is a huge advantage . ''", "Mr. Ziv , 39 , said there are benefits to raising fish in the desert : the dryness translates to fewer insects and less mold and disease .", "He also said the warm air makes it easier to keep the pools temperate .", "He remembers the stories his parents , who , along with other founders of the kibbutz in 1948 , would tell of having to travel long days to get to the fields of the communal farm .", "They then tilled closer to central Israel , because at the time the local arid ground was thought to be impossible to farm .", "`` Now , '' he said , pointing toward the desert-grown crops , `` the fields are all here . ''", "Mr. Ziv and his dog turned back toward the fish ponds stretched out under green plastic hothouse canopies .", "It was time to prepare for a shipment of hatchlings that was to arrive the next day ."], "summary": ["Desert aquaculture , which was first used in Israel , has become increasingly popular way to farm crops and fish in otherwise arid and unfertile ground .", "Professor Samuel Appelbaum , Jacob Blaustein Institutes for Desert Research fish biologist and pioneer of process that pumps subsurface water up to tanks for use in fish ponds and irrigation , describes how aquaculture can transform communities .", "Double usage of water is ideal as farmers use water from fish farms , which is high in organic matter , to irrigate and fertilize fields .", "Kibbutz Mashabbe Sade , Israel , aquaculture discusssed .", "Map . Photos ."], "publication": "nyt50", "label": [4, 10, 40], "tag": ["Science", "Health"]} -{"id": "1815906", "text": ["A year after Hollywood rediscovered weighty political and social issues in movies like `` Syriana , '' `` Crash '' and `` Brokeback Mountain , '' the box office story of 2006 was that moviegoers finally said , `` Enough . ''", "They showed no appetite for a critique of their eating habits in `` Fast Food Nation . ''", "They were n't ready to fly along on `` United 93 , '' no matter how skilled its expos\u00e9 of homeland insecurity .", "They did n't care to see combat or suffer its after-effects in `` Flags of Our Fathers . ''", "And even Leonardo DiCaprio could n't interest them in touring the ravaged Africa of `` Blood Diamond . ''", "While Al Gore 's prophecies in `` An Inconvenient Truth '' produced a respectable $ 24 million for Paramount , it was the message-movie exception that proved the rule .", "The big money was to be made making people laugh , cry and squeeze their dates ' arms -- not think .", "`` What worked was classic , get-away-from-it-all entertainment , '' said Rob Moore , Paramount 's marketing and distribution chief .", "`` What did n't was things that were more challenging and esoteric . ``", "Comedy , animation and adventure , all with a PG-13 rating or tamer -- and for young adults , R-rated horror flicks -- were the escapist recipe for success .", "Reminding moviegoers of what was on the news , and in an election year at that , only turned them off .", "-LRB- Unless it was on the news nine years ago , as in `` The Queen . '' -RRB-", "While Disney 's `` Pirates of the Caribbean : Dead Man 's Chest `` set a new opening-weekend record and topped the box office tables with $ 423 million , the winner among studios was Sony Pictures , which said it would end the year with nearly $ 1.7 billion domestically -- besting its own industry record -- and $ 3.3 billion overseas .", "In an off year for its Spider-Man franchise , Sony managed to win a record 13 weekends , led by Adam Sandler -LRB- `` Click '' -RRB- .", "Will Ferrell -LRB- `` Talladega Nights : The Ballad of Ricky Bobby '' -RRB- .", "An animated hit -LRB- `` Open Season '' -RRB- .", "James Bond -LRB- `` Casino Royale , '' which has grossed $ 155 million , a franchise record -RRB- .", "And Will Smith -LRB- `` The Pursuit of Happyness '' -RRB- .", "Mr. Smith 's film broke $ 100 million , and he appears to have bolstered his stature as Hollywood 's man who can do no wrong , a bankable star in dramatic , romantic , comedic or action roles .", "-LRB- When actors play against type , however , it can be deadly , as Russell Crowe showed in Ridley Scott 's film `` A Good Year , '' for 20th Century Fox .", "Coming after his nose dive in `` Cinderella Man , '' Mr. Crowe 's belly-flop raised questions about his status as a top box office draw . -RRB-", "Then there was what Jeff Blake , Sony 's marketing and distribution czar , called `` that rare adult blockbuster , '' Ron Howard 's `` Da Vinci Code . ''", "Fans of the book ignored the film 's reviews , and it grossed $ 218 million .", "`` Really , we brought the adults back to the movies this year , which is part of the reason why we 're doing so much better , `` Mr. Blake said of the industry , tipping his hat to Warner Brothers ' `` Departed '' and 20th Century Fox 's `` Devil Wears Prada . ''", "Sony also got a boost from its Screen Gems unit .", "Four of its horror films opened at No . 1 .", "Typical was `` When a Stranger Calls , '' made for just $ 15 million , which grossed $ 48 million domestically .", "Over all , the top tier of the box office held its usual contours : 5 blockbusters exceeded $ 200 million , and 12 fell in the $ 100 million to $ 200 million zone .", "In addition , 39 exceeded $ 50 million , 7 more than in 2005 .", "Total domestic box office reached $ 9.4 billion , a shade shy of the 2004 record but 5 percent more than in 2005 , said Paul Dergarabedian , president of Media by Numbers , which tracks box office results .", "Attendance was up 3.3 percent .", "No . 2 Disney had its second-best year ever worldwide , with more than $ 3.27 billion internationally , and exceeded $ 1 billion domestically for the 10th time , thanks largely to `` Pirates '' and the year 's No . 2 movie , Pixar 's `` Cars , '' with $ 244 million .", "Mark Zoradi , who runs marketing and distribution for Walt Disney Motion Pictures Group , said basic entertainment had proved to be the cure for the industry 's woes .", "`` People love to go to the movies to laugh , to feel emotion and cry , '' he said .", "`` That 's why ` Cars ' is so big .", "It was n't a straight-out slapstick comedy .", "At its core , it was an emotional movie with comedy in it . ``", "The slate of movies at year 's end was much stronger than on the same weekend a year earlier : up 10 percent in the aggregate , and 12 percent when comparing just the top 12 grosses .", "Fox 's `` Night at the Museum , '' the Ben Stiller comedy , led the field , raking in $ 38 million for a total so far of $ 117 million .", "Among animated films , Fox 's `` Ice Age : The Meltdown '' came in at No . 2 , nearly hitting $ 200 million .", "Bruce Snyder , president for domestic distribution , said Fox had been wise to get its movie into theaters well before the deluge of more than a dozen other computer-animated movies about animals .", "One that suffered was Warner 's `` Ant Bully , '' which was sandwiched between Sony 's `` Monster House '' and Paramount 's `` Barnyard '' and came away with just $ 28 million in sales .", "Paramount , too , might have regretted the title of its `` Flushed Away , '' which cost $ 150 million but grossed only $ 62 million .", "`` Happy Feet '' was a much-needed big hit for Warner , which had been less than overjoyed by the $ 200 million gross of `` Superman Returns . ''", "Despite the animation glut , the potential payoffs -- Paramount 's `` Over the Hedge '' grossed $ 155 million , and `` Happy Feet '' reached $ 176 million on Sunday -- are huge enough to make this a recurring phenomenon .", "For Fox 2005 was a strong year .", "`` X-Men : The Last Stand '' was the No . 3 movie , at $ 234 million , and Meryl Streep 's performance turned a formulaic comedy into a worldwide hit in `` Prada . ''", "Fox also had the year 's most original film , `` Borat : Cultural Learnings of America for Make Benefit Glorious Nation of Kazakhstan , '' which was made for less than $ 20 million and grossed more than $ 125 million .", "Among thought-provoking movies , `` Flags of Our Fathers '' showed how treacherous it can be to open an Oscar contender in September or October .", "While `` The Departed '' was a hit , `` All the King 's Men , `` `` Hollywoodland '' and `` Running With Scissors '' all bombed .", "Back-to-school audiences much preferred Lions Gate 's `` Saw III . ''", "Warner missed , meanwhile , with `` Blood Diamond , '' a big action movie that also had something to say .", "Alan Horn , the studio 's president , said he thought the film had managed the feat , but audiences did n't , and the film has grossed $ 36 million so far .", "`` The audience is telling us that either they want lighter fare , and they just do n't want to go there and have a movie as thematically heavy as ` Blood Diamond ' is , or it 's the quality of the movie , `` he said .", "Audiences apparently were n't eager to read , either .", "With directors like Clint Eastwood , Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu and Mel Gibson pushing for authenticity , the studios wound up releasing subtitled movies that were shot largely or entirely in Japanese , Moroccan , Mexican , Mayan and Russian .", "But even Brad Pitt could n't draw big crowds for `` Babel , '' and the Fox Searchlight release of the Russian blockbuster `` Night Watch '' proved that some cultural exchanges will remain a one-way street .", "It remains to be seen whether `` Letters From Iwo Jima , '' Mr. Eastwood 's critically adored Japanese companion piece to `` Flags , '' could lure sizable audiences once it expands from a micro-release .", "Fifth-place Paramount was cheered by the low-budget comedies `` Jackass Number Two '' and `` Nacho Libre , '' but was counting for redemption on `` Dreamgirls , '' which opened to packed houses on Christmas Day .", "In just 852 theaters , the movie grossed $ 38.5 million through New Year 's weekend , and the studio was counting on Oscar attention to make it a megahit .", "Universal , in a leadership transition , struggled to fill a gaping hole in its slate .", "The studio has n't released a movie that it made since August , and wo n't have one till April .", "-LRB- `` The Good Shepherd , '' its lone prestige release at year 's end , was financed by Morgan Creek . -RRB-", "Its biggest movie was `` The Break-Up , '' at $ 118 million , but more typical were duds like `` Miami Vice , '' `` Man of the Year , '' `` Let 's Go to Prison , `` and '' The Black Dahlia . ``", "New Line 's year , finally , was summed up by `` Snakes on a Plane , '' a trip you 'd want to forget , as long as you could survive it .", "The studio 's standout performers were `` Final Destination 3 '' and `` The Texas Chainsaw Massacre : The Beginning . ''", "New Line 's stab at exploiting the religious Christian market , `` The Nativity Story , '' cost $ 35 million , but grossed just $ 37 million .", "By comparison , a tiny proselytizing football movie called `` Facing the Giants , '' made for just $ 100,000 by a Southern Baptist congregation in Georgia , grossed $ 10 million in a limited release .", "Correction : January 4 , 2007 , Thursday An article in The Arts on Tuesday about the most popular movies of 2006 and others that did not do as well at the box office referred incorrectly to two languages spoken in `` Babel , '' one of the films with subtitles that did not draw big crowds .", "They are Spanish and Berber , not `` Mexican '' and `` Moroccan . '' ."], "summary": ["Moviegoers favored films that were escapist fantasies in 2006 .", "Sony Pictures was winner among studios , earning nearly $ 1.7 billion domestically and $ 3.3 billion overseas .", "Pirates of the Caribbean : Dead Man 's Chest set new opening-weekend record and topped box office tables with $ 423 million .", "Other high grossing films included Click , Talladega Nights : The Ballad of Ricky Bobby , Open Season , Casino Royale and The Pursuit of Happyness .", "Photos ."], "publication": "nyt50", "label": [12, 14, 17], "tag": ["Movies", "Arts"]} -{"id": "1815915", "text": ["Within days of convening , the new Congress will return to some of the biggest battles of the last decade as House Democrats try to rush through legislation requiring the government to negotiate lower drug prices for Medicare beneficiaries and overturning President Bush 's restrictions on embryonic stem cell research .", "The Medicare proposal highlights the profound differences between Democrats and Republicans over the future of the nation 's health care system , the proper role of government and the role of private markets in securing the best value for the huge sums spent on health care .", "State officials say they wish Congress would focus on a more immediate problem : money for the Children 's Health Insurance Program , which provides coverage for four million low-income children , is running out in more than a dozen states .", "Dr. Rhonda M . Medows , commissioner of the Georgia Department of Community Health , said , `` Our program will run out of federal money in March , and all 260,000 children in the program will lose their health care coverage if Congress fails to act . ''", "In debating the future of the children 's health program , which has broad bipartisan support , Congress will take up proposals to cover some of the 46 million people who have no health insurance .", "Dr. Mark B . McClellan , former administrator of the Centers for Medicare and Medicaid Services , said , `` Congress should consider expanding the Children 's Health Insurance Program to low-income adults . ``", "Many Democrats agree .", "But even modest proposals may collide with Democratic efforts to restore fiscal discipline and to reduce the federal budget deficit .", "Congress convenes on Thursday .", "Representative Nancy Pelosi , the California Democrat who is in line to be speaker , has said the House will , in its first 100 hours , vote on bills to authorize drug price negotiations under Medicare and to expand federal financing of stem cell research .", "Representative Tom Allen , Democrat of Maine , said he was `` giddy '' at the prospect of being able to legislate on health care after toiling for 10 years in the minority .", "`` People in Maine find it incomprehensible that the Medicare law has a provision that forbids negotiation of lower prices , '' said Mr. Allen , who introduced a bill to give beneficiaries access to drug discounts negotiated by the government in 1998 .", "By adding a drug benefit to Medicare in 2003 , Congress authorized the biggest expansion of the program since its creation in 1965 .", "The drug benefit , unlike most Medicare benefits , is delivered entirely by private insurers subsidized by the government .", "The insurers negotiate with drug manufacturers to obtain discounts , often in return for promoting the use of particular drugs .", "The 2003 law prohibits the government from interfering in those negotiations and stipulates that Medicare can not establish a list of preferred drugs .", "Most Democrats want to repeal the ban on price negotiations .", "Wendell E . Primus , an aide to Ms. Pelosi , said the Democratic proposal would require the secretary of health and human services to negotiate , but would not specify how .", "`` It will be very simple language , '' Mr. Primus said .", "`` We do not think that Congress needs to hammer out all the details .", "There are a lot of smart people in the administration , including the secretary , who can look at how we 're buying drugs -- the Medicaid program , the Department of Defense , vaccines , et cetera -- and figure out the best way of negotiating better prices with drug companies . ``", "Republicans said they welcomed the opportunity to debate the issue .", "The House Republican leader , Representative John A . Boehner of Ohio , said the Democrats ' proposal would `` take a wrecking ball to a popular program that has cut drug costs for consumers through competition . ''", "Such competition , he said , `` has kept prices lower than anyone expected . ''", "Administration officials suggested that Mr. Bush would veto a bill calling for price negotiations .", "Democrats could then exploit the issue in the 2008 campaign , as they did in the midterm elections .", "Under a bill introduced in 2005 by several House Democrats , Medicare would offer a government-run drug plan , in addition to all the plans offered by private insurance companies , and federal officials would negotiate with drug manufacturers on the prices of drugs covered by the government plan .", "But aides to Ms. Pelosi said House Democratic leaders now wanted to go further .", "Under their proposal , the government would negotiate on behalf of all people in Medicare drug plans , more than 22.5 million people .", "House Democrats assume that if the government negotiates lower drug prices , the savings will automatically be passed on to beneficiaries in the form of lower premiums .", "But they could not immediately say how they would guarantee that result .", "In the absence of detailed instructions from Congress , lower drug prices could mean lower costs and higher profits for the insurers that operate Medicare drug plans .", "Michael O . Leavitt , the secretary of health and human services , said he did not want the power to negotiate .", "`` I do n't believe I can do a better job than an efficient market , `` Mr. Leavitt said in an interview .", "Leslie V . Norwalk , acting administrator of the Centers for Medicare and Medicaid Services , said that under the Democrats ' proposal her agency would have to `` hire hundreds of people to negotiate prices for 4,500 different drugs . ''", "And Ms. Norwalk said the agency would be besieged by lobbyists seeking higher Medicare payments for specific drugs .", "That , she said , is `` how Washington really works . ''", "Senator Max Baucus , the Montana Democrat who is in line to become chairman of the Finance Committee , helped shape the 2003 law .", "In March , he voted against a proposal to authorize drug price negotiations .", "But Mr. Baucus , who is up for re-election in 2008 , said he had an open mind and would hold hearings on the idea .", "Kate Leone , an aide to the Senate Democratic leader , Harry Reid of Nevada , said Senate Democrats had the same legislative priorities as House Democrats , but were not committed to the 100-hour schedule .", "Mr. Primus said the new Congress would also `` give the president another chance to veto a stem cell bill , '' like the one he vetoed in July .", "In campaign commercials in 2006 , Democrats and some Republicans boasted of their support for embryonic stem cell research as a way to find treatments for a wide range of diseases .", "Advocates of such research say that , despite gains in the elections , they still do not have the votes to override a veto .", "They are working with Senate allies on a plan to attach the stem cell bill to unrelated legislation that Mr. Bush would feel obliged to sign .", "Lawmakers are also likely to wrestle with these issues : Many Democrats will try to reduce Medicare payments to managed care plans .", "They contend such plans are overpaid by about 10 percent .", "Insurers intend to fight back , with support from the Bush administration , Republican lawmakers and beneficiaries who see the plans as a way to obtain extra benefits at an affordable cost .", "Congress faces a huge challenge in devising a new formula to pay doctors for treating Medicare patients .", "Under the current formula , doctors ' fees would be cut more than 4 percent a year for the next decade .", "Lawmakers are determined to avert such cuts , but see no easy way to pay the cost .", "Democrats have drafted legislation to speed the approval of safe , low-cost versions of expensive biotechnology drugs , which account for a growing share of spending on pharmaceuticals .", "People who pay for health care , including state officials , employers and insurers , support such legislationas a way to slow spending on biotech drugs , which can cost more than $ 10,000 a year .", "Biotech companies argue that their products , made from living organisms , are so complex that they can not be exactly duplicated by generic drug manufacturers .", "As a result , they say , a `` copy '' would rarely be interchangeable with the original .", "The Food and Drug Administration has approved thousands of generic drugs deemed equivalent to traditional brand-name medicines .", "But the agency is unsure of its legal authority to approve such versions of biotech drugs ."], "summary": ["There are profound differences between Democrats and Republicans over future of nation 's health care system , proper role of government and role of private markets in securing best value for huge sums spent on health care .", "House Democrats , now in majority , will try to rush through legislation requiring government to negotiate lower drug prices for Medicare beneficiaries and overturn Pres Bush 's restrictions on embryonic stem cell research .", "Photo ."], "publication": "nyt50", "label": [1, 0], "tag": ["Health", "U.S.", "Washington"]} -{"id": "1815916", "text": ["When the same old irksome question popped up recently at one of his final public events here , Gov . Jeb Bush , addressing Spanish-speaking reporters , gave an atypically dramatic answer : `` Yo no tengo futuro , '' or `` I have no future . ''", "His words set off round-the-world buzz , with The Daily Telegraph of London going so far as to call them `` a recognition by the Bush family that their dynastic reign in American politics is drawing to a close . ''", "But in fact , the question lives on .", "Mr. Bush 's spokeswoman said last week that he made the comment jokingly , and when asked about it later in an e-mail message , Mr. Bush himself replied , `` I was misunderstood by a reporter . ''", "He did not elaborate , leaving the world to know only this much : Half his life after he arrived in Miami as a 27-year-old real estate salesman , Governor Bush returns here this week without the title before his name and , he insists , without knowing what his future holds .", "`` We 're in the preface of the new book in my life and I just do n't know yet , `` he told reporters last month in Tallahassee , a day after his official portrait , with a Bible and a BlackBerry in the background , was unveiled at the Governor 's Mansion .", "`` I 'm going to take some time off , hopefully do a little fishing , golfing , resting , reading , exercising .", "And I 've got to make a living , so I 'll figure it out probably in January . ``", "Florida , too , has some readjusting to do .", "After eight years in office , Mr. Bush , 53 , is leaving as one of the most popular and prominent governors in state history , not least because of his relationship to President Bush -LRB- brother -RRB- and former President George Bush -LRB- son -RRB- .", "Succeeding him is Attorney General Charlie Crist , who is Republican like Mr. Bush but otherwise starkly different .", "Despite the wishful prodding of admirers , Mr. Bush has adamantly ruled out a presidential campaign of his own next year , saying that he wants only to return to Miami with his wife , Columba , and their cat , Sugar .", "Yet rumors about his future have burst forth as regularly as exotic species in the Everglades -- among them that he would be the next commissioner of the National Football League , run for Senate or become Senator John McCain 's running mate if Mr. McCain won the Republican nomination for president in 2008 .", "`` The presidency is out of the question at this point because of Bush fatigue , '' said Peter Schweizer , a fellow at the Hoover Institution at Stanford who wrote `` The Bushes : A Dynasty '' with his wife , Rochelle .", "`` But the vice presidential slot is something that 's very much in play .", "He 's a successful governor of an important state , he helps shore up relations with the social conservatives and he has the Bush money machine . ``", "One of Mr. Bush 's former chiefs of staff has gone to work for Mr. McCain 's exploratory committee , but several other former aides have signed up with Gov . Mitt Romney of Massachusetts , another probable Republican contender .", "`` Jeb is a policy-driven guy , '' Mr. Schweizer said .", "`` If he can be a vice president that plays some kind of a policy role as Cheney has , as Gore did in the Clinton administration , then Jeb Bush will be interested . ''", "Many assume that for now -- at least partly at the urging of his wife , described as shy and eager to be out of the public eye -- Mr. Bush will return to the private sector .", "He reported a net worth of $ 1.4 million in 2005 , down from $ 2.4 million in 1998 .", "He was a partner in a major real estate development firm here until his first , unsuccessful run for governor in 1994 , but Mr. Schweizer predicted that Mr. Bush might now seek out work involving the bioscience industry or the Latin American economy , both of which `` he seems particularly animated by . ''", "All indications notwithstanding , ardent admirers like Grover Norquist , the president of Americans for Tax Reform , are not giving up on the prospect of Mr. Bush jumping into the presidential race next year , especially if Senator Hillary Rodham Clinton of New York becomes the Democratic candidate .", "`` He could step in later than anybody else , '' Mr. Norquist said .", "`` You can run for president with the last name of Bush , even though there is and will be Bush fatigue , in a year that you 're likely to be running against someone whose last name is Clinton . ``", "For the time being , Mr. Bush bought a car , a Chrysler 300C , and rented a $ 5,500-per - month , 3,949-square - foot condominium in Segovia Tower , a luxury building overlooking a golf course in lush Coral Gables .", "`` I have no idea what I will be doing next , '' he wrote by e-mail from Boca Chica , Fla . , where he was vacationing with his parents .", "`` My priorities are to hang out with my beloved wife -LRB- until she ca n't take it anymore ! :", "As for the continued speculation , he wrote : `` I am flattered that all sorts of people are interested in what I am going to do and many have offered advice as well .", "That will all subside soon . ``", "Small signs suggest , however , that he will have a hard time giving up executive powers .", "He told reporters that while buying furniture recently , he had to stifle the urge to tell the store owner a better way of doing business -- a trait his adversaries say they will not miss .", "`` Bush was the type that if you did not agree with him , he really did n't have time for you , `` said State Senator Frederica Wilson , Democrat of Miami .", "`` He wanted you to rubber stamp every idea he had , and he would n't listen to reason . ``", "While Mr. Bush is internationally famous , Mr. Crist , who will be sworn in as governor on Tuesday , is a stranger to all outside Florida and , but for his native Tampa Bay region , not particularly well known within the state either .", "While Mr. Bush was ideologically driven , often making enemies in pursuit of `` big , hairy , audacious goals '' and divisive social policies , Mr. Crist seems above all a pleaser , avoiding firm opinions and promising to be `` the people 's governor . ``", "Yet despite Mr. Bush 's abrasiveness and the plunging popularity of his brother the president , he has remained well liked -- or at least respected -- to the end , a feat in a state as ethnically and politically divided as Florida .", "A poll last month by Quinnipiac University found that 57 percent of Floridians feel he did a `` good '' or `` great '' job as governor , compared with only 10 percent who said he had done a `` bad '' job .", "Howard Simon , executive director of the American Civil Liberties Union of Florida , said the poll results reflected approval of Mr. Bush 's persona more than of his policies .", "Mr. Simon pointed out that two major education initiatives during the governor 's tenure -- a costly effort to lower class size and another to provide universal prekindergarten classes -- were passed by public referendum , over the governor 's objections .", "`` It needs to be said that the personal appeal and likeability of Jeb Bush has led the press and the public to overlook the extremism of many of his policies , '' he said .", "Several of Mr. Bush 's pet initiatives in fact failed , including a school voucher program that the Florida Supreme Court found unconstitutional .", "But Mr. Bush pushed through $ 19.3 billion in tax cuts , put an unprecedented emphasis on standardized testing in public schools , privatized thousands of government jobs and ended affirmative action in public university admissions .", "He also persuaded the Scripps Research Institute and other bioscience research groups to open laboratories in Florida , which he says will makethe state economy less dependent on tourism and create more high-paying jobs .", "And he has appointed more than a third of the state 's judges , assuring that his socially and fiscally conservative beliefs will continue to hold some sway .", "While others have emoted about Mr. Bush 's departure -- including his father , who wept as he described his second son 's `` decency '' and `` honor '' in a speech in Tallahassee last month -- he has characteristically avoided introspection .", "Asked last month what he would miss most about the Governor 's Mansion , he cited its beauty , its staff -- and its towels .", "`` Fresh towels -- all you want , '' he said .", "`` Here , although I 've been trained to do otherwise , it 's just any time I want I can have many towels . `` ."], "summary": ["Jeb Bush is leaving as one of most popular and prominent governors in Florida 's history after eight years .", "Despite wishful prodding , he has adamantly ruled out presidential campaign of his own next year , but rumors about his future have burst forth .", "One is that he would become Sen John McCain 's running mate if McCain wins Republican nomination for president .", "One of Bush 's former chiefs of staff has gone to work for McCain 's exploratory committee , but several other former aides have signed up with Gov Mitt Romney , another probable Republican contender .", "Photo ."], "publication": "nyt50", "label": [16, 12, 11], "tag": ["U.S.", "Washington"]} -{"id": "1815929", "text": ["Representative John D . Dingell , a Michigan Democrat who with more than 50 years ' tenure is the senior member of the House , is not so sure about the idea of creating an independent group to enforce ethics rules .", "But Gabrielle Giffords , a brand-new House Democrat from Arizona , considers it a no-brainer .", "Of the longstanding approach in which lawmakers are seated on the ethics committee to police their peers , Representative-elect Giffords said , `` It is like having the fox guard the henhouse . ''", "Those divergent outlooks over how best to fulfill the Democratic promise to clean up the House are just one illustration of a friction that could develop in the new Congress as the party takes control after 12 years in exile .", "While most attention will be focused on the divide between Republicans and Democrats , members of the new majority have their own differing perspectives , corresponding largely to length of service , that could ultimately prove more crucial to their success or failure .", "Of 233 Democrats who will be sworn in on Thursday , 147 -- 63 percent -- have been elected since Republicans won control of the House in 1994 , and have never served in the majority .", "Those whose service predates the 1994 revolution , on the other hand , number only 86 , or 37 percent .", "But it is this core of senior Democrats , Mr. Dingell among them , who will lead 20 of the 21 major committees and so exercise concentrated legislative power .", "The differences in tenure tend to manifest themselves geographically as well .", "The makeup of the senior membership has a more urban flavor , while those more recently elected tend to come from the suburbs and exurbs .", "These newer members have faced tougher electoral opposition than their older counterparts , who in many cases represent overwhelmingly safe Democratic districts .", "A majority of new chairmen have traditional liberal roots .", "Lawmakers , senior aides and analysts say the institutional gulf is not necessarily problematic and could even prove beneficial if Democratic leaders are able to harness the experience and skill of the chairmen to the enthusiasm and drive for reform exhibited by the newcomers .", "But they worry that it could become a distraction if the `` old bulls , '' as they are sometimes called , believe that less seasoned lawmakers are demanding too much too fast or if the newer members see the veterans as representative of Congressional inertia .", "`` The guys who have been there for a while and built up seniority saw an abuse of the system , so they know firsthand why it has to change , '' said Representative Rahm Emanuel of Illinois , who will be chairman of the Democratic caucus .", "`` The new members ran on an agenda of why it has to be changed .", "`` If managed correctly , you have the experience and energy to make changes .", "If not managed correctly , it has the potential to be a fault line . ``", "Leading House Democrats say the long-tenured members and those sent to Congress in recent elections broadly agree on a desire to move ahead on social programs , ethics , energy , national security and fiscal responsibility .", "The differences , they say , are subtler .", "Do issues studied in the previous Congress , for instance , need a full further examination in committee , in deference to the new chairmen .", "Is there need for a separate commission to scrutinize war contracting , or should this too be the province of the committees .", "In any event , it is clear that the lower classmen , particularly the large and celebrated group of 30 freshmen , want to move quickly .", "`` The new class coming in and some of the other newer members are absolutely committed to delivering on the agenda we talked about during the election , '' said Representative Chris Van Hollen of Maryland , who will lead the House Democratic campaign effort for 2008 .", "`` Now that we are in power , we want to make sure that we are changing direction in Washington , and that means following through not just on the big print but the fine print , a break with business as usual . ''", "Representative-elect Ed Perlmutter , Democrat of Colorado , agreed .", "`` I do n't think the chairs are not looking to be aggressive , `` Mr. Perlmutter said , '' but I do see in this freshman class a real desire to make changes and move things along quickly , because I think that was the direction we were given by our voters . ``", "Senior Democrats say that as the lawmakers who endured minority status for so long , they are the ones most painfully aware of a need for new direction .", "Their stake in holding on to a majority , they say , means protecting and advancing the careers of new lawmakers , who in many cases were elected with fund-raising and other help from senior Democrats .", "`` It is not like we are just now meeting for the first time , '' said Representative Barney Frank , Democrat of Massachusetts , who will be chairman of the House Financial Services Committee and can tick off the names of junior committee members he has worked with , as well as several freshmen he personally supported .", "`` We all campaigned for these guys . ''", "Mr. Dingell said an urge to remake the House was hardly anything new .", "`` This is a normal phenomenon which occurs in this place every two years , '' he said , `` and I have seen no class come in that could be described as different .", "We all come here just dying to do something .", "But the smart ones of us know how to get it done by working within the system .", "I have no fears they are out to burn the place down . ``", "But Norman Ornstein , a longtime Congressional observer at the American Enterprise Institute , said he saw competing drives among members of the new majority .", "`` You have a significant number of Democrats who think the major change is that the whip is now in their hands and it is the Republicans taking the lash , '' Mr. Ornstein said .", "`` A number of others want to keep the spigots running , but just into their own pockets .", "Those who genuinely want to change the House -- the way it operates , the culture of Washington -- have their work cut out for them . ``", "The Democrats ' leader , Representative Nancy Pelosi of California , the incoming speaker , has been in the House since 1987 but has never been chairwoman of a committee , and so in some respects her role may be that of an outsider free to pursue transformation .", "In one of her first tests , however , some seven weeks ago , many of those headed for chairmanships opposed her push to install Representative John P . Murtha of Pennsylvania as majority leader .", "Instead , they backed Representative Steny H . Hoyer of Maryland , at least partly because they saw him as a check on Mrs. Pelosi 's power .", "Mr. Hoyer went on to win .", "Mrs. Pelosi has told allies that while she respects the authority of the new chairmen , she will not allow them to dominate the party agenda or stall legislative initiatives that have broad support .", "And she has already indicated that she does not intend to send the party 's early legislative initiatives back through the committee process , but will instead bring a minimum-wage increase and energy and health bills , among others , straight to the floor .", "Democrats senior and junior say they are watching to see whether their ideals merge or collide .", "But they view party differences as just another factor they will have to take into account as they assume control .", "`` I think people understand it is a little bit of a bump and we have to deal with it , '' Mr. Frank said .", "Michael Arcuri , a moderate Democrat from upstate New York who won a Republican-held seat in November , said the combination of exuberance and experience should prove an advantage for the party .", "`` If we strike the balance between the two , '' Mr. Arcuri said , `` we are going to accomplish some pretty incredible things . ''", "Correction : January 9 , 2007 , Tuesday An article last Tuesday about possible divisions among Democrats in Congress misstated the number of Democratic freshmen in the House .", "It is 42 , not 30 .", "-LRB- Democrats took over 30 Republican-held seats and 12 seats that had been vacated by Democrats . -RRB- ."], "summary": ["Divergent outlooks are part of friction that may develop in new Congress as Democratic party takes control after 12 years .", "Democrats have differing perspectives , corresponding largely to length of service , that could ultimately prove crucial to their success or failure .", "Photos .", "Graph shows composition of House Democrats ."], "publication": "nyt50", "label": [4, 3], "tag": ["U.S.", "Washington"]} -{"id": "1815930", "text": ["A FEW years back , while traveling in the Sierra Madre Occidental of northern Mexico , I came upon a canyon packed with cliff dwellings no one had lived in since before the time of Christopher Columbus .", "On the ground were discarded artifacts , pieces of frayed baskets , broken pottery and hundreds of desiccated corn cobs -- the ruins of an ancient civilization .", "I reached down to pick up what I thought was a dry gourd , and instead found myself cradling the skull of a human child .", "As I turned it in my hands , I noticed a deliberate hole in the back of the skull , directly above the spine .", "The skull was not cracked around the hole , which means the child had most likely been alive when a spike or some other implement had been slammed into his or her head from behind .", "This is not the only skull like this .", "Excavations from elsewhere in northern Mexico have turned up other children killed the same way , human sacrifices to an ancient water deity , their bodies buried under pre-Columbian ball courts or at the foot of pillars in important rooms .", "With knowledge of such widespread ferocity , I recently saw Mel Gibson 's movie `` Apocalypto , '' which deals with the gore of the Mayan civilization .", "I had heard that the movie 's violence was wildly out of control .", "But even as I winced at many of the scenes , as a writer and researcher in ancient American archaeology , I found little technical fault with the film other than ridiculous Hollywood ploys and niggling archaeological details .", "Indeed , parts of the archaeological record of the Americas read like a war-crimes indictment , with charred skeletons stacked like cordwood and innumerable human remains missing heads , legs and arms .", "In the American Southwest , which is my area of research , human tissue has been found cooked to the insides of kitchen jars and stained into a ceramic serving ladle .", "A grinding stone was found full of crushed human finger bones .", "A sample of human feces came up containing the remains of a cannibal 's meal .", "It could be argued that `` Apocalypto '' dehumanizes Native Americans , turning their ancestors into savage monsters , but I think it does the opposite .", "Oppressed hunter-gatherers in the movie are presented as people with the same , universal emotions all humans share .", "And urban Mayans are portrayed as politically and religiously savvy , having made of themselves a monumental , Neolithic empire , something more akin to ancient Egypt than the trouble-free agrarians who come to most people 's minds when they think of native America .", "To further shatter that popular notion of Native Americans , there 's the scene in which a turquoise-jeweled priest stands atop a staggering temple yanking out one beating human heart after the next .", "That 's an image that nearly every archaeologist working in Central America has played in his or her head many times , only now it 's on the big screen for everyone to see .", "Being told by screenwriters and archaeologists that their ancestors engaged in death cults tends to make many Native Americans uneasy .", "In Arizona , Hopi elders turn their eyes to the ground when they hear about their own past stained with overt brutality .", "The name Hopi means people of peace , which is what they strive to be .", "Meanwhile , excavators keep digging up evidence of cannibalism and ritualized violence among their ancestors .", "How do we rectify the age-old perception of noble and peaceful native America with the reality that at times violence was coordinated on a scale never before witnessed by humanity .", "The answer is simple .", "We do n't .", "Prior to 1492 it was a complex cultural landscape with civilization ebbing and flowing , the spaces in between traversed by ancient lineages of hunters and gatherers .", "To the religious core of pre-Columbian Mayans , a beating heart ripped from someone 's chest was a thing of supreme sacredness and not prosaic violence .", "If `` Apocalypto '' has a fault , it is not with its brutality , but with us in the audience who cringe , thinking the Mayans little more than a barbaric people .", "The fault lies in our misunderstanding of a complicated history , thinking we can lump a whole civilization into a single response and walk out of the movie saying , `` That was disgusting . ''", "Op-Ed Contributor Craig Childs is the author of the forthcoming `` House of Rain : Tracking a Vanished Civilization Across the American Southwest . '' ."], "summary": ["Craig Childs Op-Ed disagrees with those who argue that Mel Gibson movie Apocalypto dehumanizes Native Americans , turning their Mayan ancestors into savagely violent people .", "Says oppressed Mayans in movie are presented as people with universal emotions shared by all humans .", "Says problem with movie is with audience who finds Mayans barbaric .", "Says fault lies in misunderstanding of complicated history that lumps entire civilization into simplistic response ."], "publication": "nyt50", "label": [15, 14, 29], "tag": ["Opinion"]} -{"id": "1815938", "text": ["After Somalia 's Islamist forces abandoned their final outpost on Monday , the transitional government moved aggressively to assert control , setting a three-day deadline for all weapons to be turned in and calling for international peacekeeping troops to be sent immediately .", "Somalia was already a place where military-grade weaponry was casually flaunted on its streets , but the Islamists ' swift collapse has created such a surplus of guns that the average price of a Kalashnikov assault rifle , one of the world 's most popular killing machines , has dropped to $ 15 .", "Ali Mohammed Gedi , the former veterinarian who is the transitional prime minister , said at his daily news conference that he would not tolerate the situation and gave instructions for turning in the weapons .", "`` Individuals or groups of people who have trucks mounted with antiaircraft guns , known as ' technicals , ' should bring those battlewagons to Mogadishu 's old port , `` he said .", "Clan leaders were skeptical about whether he would succeed , and many Somalis seemed dead set against it .", "`` They 're trying to neuter us , `` said Muhammad Duudo , an unemployed car mechanic .", "`` And it 's not going to happen .", "Just wait until the full moon passes and the darkness comes . ``", "Whatever lies ahead , encouraging or ominous , most Somalis seemed to agree that after a week of fast-moving events , the rough outlines of a new reality were emerging .", "For the first time since the former dictator , Mohamed Siad Barre , fled the country in 1991 , casting Somalia into 15 years of anarchy , there is a credible government based in Mogadishu , the capital , with serious outside support and no organized military threat from within .", "True , countless gunmen still roam the streets , heavily armed warlords still command authority and the seeds of a possible guerrilla movement may be taking root .", "But so far no major force has emerged to challenge the authority of the transitional government .", "The only rival , the Islamists , lost their last conventional military battle on Monday .", "The Islamists had steadily lost ground since Dec . 24 , when Ethiopia unleashed a punishing series of airstrikes and pushed ground troops deep into Somali territory .", "Ethiopian officials justified their intervention in Somalia 's messy , violent internal politics by saying the Islamist movement was a regional threat with terrorist connections and ambitions to invade their country .", "Ethiopia commands one of the most powerful militaries in Africa , and within days of its entrance into the war , Burhakaba , a pivotal inland town , fell from Islamist control -- then Jowhar , another key town , and then Mogadishu , the Islamists ' former stronghold .", "By Sunday , the last remnants of the Islamist forces , which just a few weeks ago controlled a large swath of Somalia , were cornered in Kismayo , a port city on the south Somali coast .", "Thousands of Ethiopian and transitional government troops were closing in on them , and on Sunday night , the Ethiopians began pounding away with heavy artillery .", "At the same time , Kismayo clan elders were pleading with the Islamists to leave .", "The elders said the Islamists did not stand a chance , and they were worried that their city was about to be flattened .", "Clan elders in Mogadishu similarly decided last week that the Islamists were a losing cause and pulled their troops and weapons out of the movement .", "As in Mogadishu , the Islamists in Kismayo , after many fiery speeches about fighting to the death , simply fled .", "By Monday morning , many of the Islamists ' fighters in Kismayo had shed their uniforms and melted back into the population while others headed south toward a thickly forested area along the Kenyan border .", "`` I ca n't tell you how happy people were that they disappeared , `` said Adam Ragay , a businessman in Kismayo .", "As soon as the Islamists left , looters rushed into the streets and smashed up stores and ran away with televisions and cellphones .", "But by midafternoon , the brief burst of anarchy was over and transitional government troops had arrived on the outskirts of town .", "Residents of Kismayo said the remaining Islamists were heading toward Ras Kamboni , a small town in an isolated area on the Kenyan border that the Islamists had used before as a hide-out .", "Ethiopian intelligence officials say operatives of Al Qaeda , invited by the Islamists , planned the 1998 attacks on American Embassies in Kenya and Tanzania from Ras Kamboni .", "Kenyan officials say they have increased border security to keep the Islamists from escaping .", "`` Anyone who ventures to enter Kenya will have to go through a very serious vetting process , '' Alfred N . Mutua , a Kenyan spokesman , said in a statement on Monday .", "Mr. Gedi has acknowledged that he needs a lot of outside help .", "Security in Mogadishu is still uncertain , with just a light presence of soldiers .", "Mogadishu 's ports and airport remain closed , strangling the flow of goods and sending prices for rice , sugar , oil and gasoline through the roof .", "The schools remain closed .", "One of the Islamists ' first steps after beginning their ill-fated attack on the transitional government was to close all schools to funnel more teenagers to the front .", "Their move backfired both militarily and politically after countless teenage soldiers were summarily mowed down by better-trained Ethiopian troops .", "Abdi Sallah , a pharmacist in Mogadishu , said he thought that the new government was heading in the right direction but that it needed to step gingerly .", "`` So , we ask all the militia to turn in their weapons and then what happens .", "`` he said .", "`` Do they become part of a national army or do they become unemployed .", "Have these guys thought all this through .", "`` ."], "summary": ["Transitional government in Somalia moves aggressively to assert control after Islamist forces abandon their final outpost .", "Transitional prime minister Ali Mohammed Gedi sets three-day deadline for all weapons to be turned in and calls for international peacekeeping troops to be sent immediately .", "Clan leaders are skeptical about whether he would succeed , and many Somalis are dead set against order to surrender firearms .", "Photo ."], "publication": "nyt50", "label": [0, 4], "tag": ["World"]} -{"id": "1815939", "text": ["Having enjoyed a year that was better than average in the stock market and a much weaker one in housing , home owners and investors appear neither exuberant nor glum about 2007 .", "In fact , they are decidedly ambivalent , according to a recent New York Times / CBS News poll .", "They are split , for instance , almost evenly on whether it is a good time to buy a new home or better to wait , even though a sizable majority expects home prices to stay steady or rise , especially in their neighborhoods .", "The poll also showed that Americans had regained some faith in stocks as a safe investment since the market 's crash in 2000 , but they were less confident that stocks would rise next year than they were during the depths of the last bear market , in 2002 .", "The telephone survey was conducted from Dec . 8 to 10 and included 922 adults nationwide and has a sampling error of plus or minus three percentage points .", "The seeming confusion and anxiety is not entirely new or limited to average Americans .", "Similar worries abounded in late 2005 , when some economists and market experts predicted the nation 's long housing boom would come to a screeching halt and inflict damage on the economy and the stock market .", "While home sales did plummet from record levels , the stock market rebounded to new heights after a brief stumble last summer .", "The Standard & Poor 's 500-stock index posted a 14 percent gain last year , its best showing since 2003 .", "The American economy also put up stronger-than-expected performance as job growth and business investment made up for the faltering real estate market .", "Still , in follow-up interviews , several of those polled last month said that while they were confident about their personal financial positions , they were preparing themselves for tougher times by either changing their spending and investment patterns or by not taking as many risks .", "Christopher J . Pujol , an account manager at a pharmacy benefits company in Texas , said he planned to shift some of the assets in his 401 -LRB- k -RRB- account to cash from stock-based mutual funds because he did not think the market would match its 2006 performance .", "`` The good times ca n't go on forever , `` he said .", "`` I take a conservative approach and take money off the table . ''", "Mr. Pujol is not alone .", "Skeptical market experts have raised concerns about the durability of the recent rally .", "More broadly , economists are significantly divided about the outlook for the year , from the most pessimistic among them predicting a recession and the most optimistic saying economic growth could be so strong that it may force policy makers to resume raising interest rates to fight inflation .", "`` People seem to have a fairly balanced view about things , '' said Robert T . McGee , chief economist at U.S. Trust , who reviewed results of the poll .", "`` We have had a relatively strong housing boom and people recognize that is over , but at the same time the disappointment in stocks that occurred after 2000 and 2001 is dissipating some . ''", "The poll is in line with other recent surveys that show Americans are slightly more cautious , even though most of them have stable employment and are seeing their paychecks increase , said Lynn Franco , director of the consumer research center at the Conference Board , which produces a widely followed consumer confidence index .", "`` Over all , it 's this glass half-full / half-empty scenario , `` she said .", "Mr. Pujol , who lives in Keller , Tex . , near Fort Worth , with his wife and their two young sons , thinks that the economy will moderate in 2007 .", "The family bought a new home four years ago and is not planning to move but Mr. Pujol and his wife have other real estate investments in Texas that he thinks will fare better than property on the coasts .", "Mr. Pujol reflected the view of most Southerners , 57 percent of whom said local housing prices would increase in the coming year and the same number said it was a good time to buy a house .", "Only 47 percent of people surveyed nationally said it was a good time to buy a house and 45 percent said local prices would increase .", "The difference may reflect the relative strength of housing in Texas , on the Gulf Coast and in the Carolinas .", "Not surprisingly , home owners were also more upbeat on housing -- 52 percent said it was a good time to buy a home -- than renters , 60 percent of whom felt it would be better to wait .", "-LRB- Similarly , 65 percent of people who owned stocks or mutual funds said the stock market would go up next year compared with 39 percent of people who did not have investments . -RRB-", "Laura Koepnick , a lawyer for the state of Massachusetts , said she and her partner would like to move to a bigger home from their condo in Boston but think home prices may fall further still .", "`` It 's more of a buyers market , but it has been so up and down that I am not comfortable making a huge investment at this time , `` said Ms. Koepnick , 36 , who is still paying off law school loans and owns a mutual fund in a retirement account .", "Darla K . Bundy , who works for the state of Pennsylvania , would also like to move but she faces an entirely different set of challenges .", "So many homes are on sale in Ridgway , the small industrial town two and a half hours northeast of Pittsburgh where she lives , that Ms. Bundy said she could not make as much as she and her husband , a welder , owe on the property .", "In 2001 , Ms. Bundy lost an $ 18-an-hour job making specialty light bulbs for Sylvania when the company moved some production to Mexico .", "After taking a two-year computer training course paid for by the government , she has a $ 12-an-hour clerical job at the state transportation department .", "`` My husband and I would love to make financial investments to prepare for retirement , '' Ms. Bundy , 42 , said .", "`` But to even take $ 20 out of our budget and put it somewhere else is unthinkable . ''", "Like several other people , Ms. Bundy pointed to Sept . 11 , 2001 , as a critical turning point for her family -- she lost her job shortly before the attacks of that day and her husband has had a hard time finding work ever since .", "The brief recession that followed the technology bust in 2000 ended before the attacks of 2001 , but many Americans view the attacks as an important historical marker in the nation 's and their personal economic lives .", "That may be a function of the weak employment and wage growth that characterized the economic recovery from 2002 to 2004 .", "The economy was `` pretty good until 9/11 , and then it took a major dive , '' said Dawn E . Owsley , a 33-year-old who owns a spa business in Olympia , Wash .", "`` And then it picked up a little .", "But I feel that we have not yet caught up . ``", "Over all , a majority , 52 percent , said they were making enough to pay bills and obligations and a sizable minority , 35 percent , said they earned enough to save and buy extras , while 12 percent said they did not make enough to meet their household expenses .", "By comparison , 17 percent of those who answered that question in early 2005 said they did not make enough , 48 percent said they did and 33 percent said they could save and buy extras .", "Most Americans , 71 percent , said if they had extra money to invest they would put it in real estate rather than stocks , which were favored by 22 percent of the respondents .", "By comparison , during the peak of the housing boom in the spring of 2005 , an NBC News / Wall Street Journal poll found that 80 percent preferred real estate and 13 percent stocks .", "Of those polled , 77 percent said they owned their own home and 21 percent said they rented .", "But only 44 percent said they owned stocks or mutual funds , down from 56 percent in the first half of 2000 .", "That shift away from stock ownership is not surprising given that many investors are still smarting from the technology-led crash of 2000 and the two-year bear market that followed .", "And in spite of the recent rally , mutual fund flows indicate that investors remain skeptical of American stocks .", "As of Dec . 27 , investors poured $ 15 billion into domestic equity funds in 2006 , compared with $ 134 billion in nondomestic funds and $ 305 billion in money market funds , according to AMG Data Services .", "Edward A . Kellerhals , a retired municipal electrical inspector who lives near Fresno , Calif . , said his investments in stocks and real estate had provided a comfortable retirement for him and his wife .", "Though he has long thought that land is a better investment than stocks , he became convinced that real estate prices in California had become outlandish when he and his wife were looking to buy a home at the end of 2005 .", "`` This piece of junk we saw was selling for $ 300,000 , '' Mr. Kellerhals , 76 , said .", "`` The roof was leaking all over the place and there was a trailer with a caved-in roof in the back . ''", "He has become more bullish on stocks , but says that at their age he and his wife `` do n't buy much stock anymore . ``", "`` We are just slowly going to sell it all , '' he said .", "How the Poll Was Conducted The latest New York Times / CBS News poll is based on telephone interviews conducted Dec . 8 through Dec . 10 with 922 adults throughout the United States .", "The sample of telephone exchanges called was randomly selected by a computer from a complete list of more than 42,000 active residential exchanges across the country .", "Within each exchange , random digits were added to form a complete telephone number , thus permitting access to listed and unlisted numbers alike .", "Within each household , one adult was designated by a random procedure to be the respondent for the survey .", "The results have been weighted to take account of household size and number of telephone lines into the residence and to adjust for variation in the sample relating to geographic region , sex , race , marital status , age and education .", "In theory , in 19 cases out of 20 , overall results based on such samples will differ by no more than three percentage points in either direction from what would have been obtained by seeking out all American adults .", "For smaller subgroups , the margin of sampling error is larger .", "Shifts in results between polls also have a larger sampling error .", "In addition to sampling error , the practical difficulties of conducting any survey of public opinion may introduce other sources of error into the poll .", "Variation in the wording and order of questions , for example , may lead to somewhat different results .", "Complete questions and results are available at nytimes.com/ polls .", "THE YEAR AHEAD IN MARKETS ."], "summary": ["New York Times / CBS News poll shows that home owners and investors are ambivalent about 2007 .", "They are split almost evenly on whether it is good time to buy new home or better to wait , even though sizable majority expects home prices to stay or rise .", "Poll shows also that Americans had regained some faith in stocks as safe investment since market 's crash in 2000 , but they are less confident that stocks will rise in 2007 than they were during last bear market in 2002 .", "Also shows that those interviewed are preparing themselves for tougher times by either changing their spending and investment patterns or by not taking as many risks .", "52 percent say they are making enough to pay bills and obligations and 35 percent say they earn enough to save and buy extras , while 12 percent say they do not make enough to meet household expenses .", "Graphs .", "Photo ."], "publication": "nyt50", "label": [3, 2, 42, 10, 1], "tag": ["Business"]} -{"id": "1815941", "text": ["Five years ago , the United States economy went through a recession that did virtually no damage to the housing market .", "In 2007 , the question is whether the economy can emerge unscathed from a housing recession .", "As 2006 ended , the opinion of stock market investors could not have been clearer : there are blue skies ahead .", "The Dow Jones industrial average , which was up 16.3 percent for 2006 , was setting new highs in the final week of the year , and even the shares of home builders had rallied sharply from their midyear lows .", "In the bond market , however , the outlook was cloudy at best .", "Prices in the futures market showed that investors expected that the next move by the Federal Reserve would be to reduce the interest rate it has raised 17 times since mid-2004 .", "That indicates worry about a slowing economy .", "And another traditional indicator -- the yield curve -- says the same thing .", "When short-term interest rates exceed long-term rates , as they do now , a recession often follows .", "Nonetheless , there are other indicators in the bond market to show a complete lack of worry .", "In a recession , less creditworthy borrowers are more likely to default as business turns bad , so investors who fear a downturn are expected to demand higher interest rates from them .", "But investors are not doing that .", "The spread between Treasury bonds and BB-rated bonds -- the best level of junk -- has shrunk to historic lows of less than two percentage points .", "And while very bad junk bonds -- those rated CCC or lower -- pay much more , spreads there fell to their lowest ever in 2006 , although they drifted up a bit late in the year .", "So the stock market says a boom is here and is going to stay , housing notwithstanding .", "And the bond market expects a recession -- but one that does not damage those who are financially stretched before it begins .", "`` We think of markets as forecasters , '' said Robert J . Barbera , the chief economist of ITG .", "`` But it is very hard to come up with a model '' that makes sense of the current forecasts .", "The answer to that conundrum is probably to look at markets from a different perspective .", "They move because money comes in or goes out , and money now is easier and more plentiful than ever .", "So most asset prices have risen , whether stocks or long-term bonds or commodities .", "An exception is short-term debt .", "There is no upside to a Treasury bill that matures in a few months , and thus little attraction to it .", "There is a perception that central banks tightened in 2006 , and that was true of the Federal Reserve and the European Central Bank .", "But it is anything but accurate about the central banks that matter the most now .", "`` We are in a global market , with the central banks of Japan and China very , very easy , '' Mr. Barbera said .", "`` Two central banks are pumping it out hand over fist . ''", "Where is the money going .", "Into consumption in many countries , and into a bidding war for assets as well .", "The Standard & Poor 's 500-stock index , which generally contains the largest public companies in the United States , now has eight companies in it that have announced plans to go private , often with a lot of borrowed money involved .", "And companies that are awash with cash are putting a lot of that money into dividends and even more into stock buybacks , at least among S . & P . 500 companies , reports Howard Silverblatt , an analyst with Standard & Poor 's .", "He estimates that those 500 companies spent $ 425 billion on capital in 2006 , up 3 percent from the prior year , and spent $ 437 billion on stock buybacks , up 25 percent .", "It was the first year ever that more money was spent on buybacks than capital spending .", "Dividends climbed 11 percent , to $ 224 billion .", "Stock of Exxon Mobil , the largest company in the country by market value , rose 36 percent , but because of share buybacks the company 's market value climbed only 28 percent , leaving it at $ 447 billion .", "That is still well below the record year-end value for an American company , the $ 603 billion valuation for Microsoft at the end of 1999 .", "Microsoft , its stock up 14 percent in 2006 , ended the year as the fourth-largest company , with a market value of $ 258 billion , trailing General Electric and Citigroup .", "Among all companies , not just those in the index , S . & P . counted 221 special dividends in December , the most for a single month since 1978 .", "Some stocks went down , of course , but not that many .", "All of the 10 sectors in the S . & P . 500 showed gains , with the best performance coming from telecommunications services , which rose 32 percent , and the worst from health care , which managed a gain of 6 percent .", "The overall index managed a gain in 11 of the 12 months , the exception being May .", "The last time the index managed that was in 1958 .", "For what it 's worth , the year that followed , 1959 , was a respectable year with a rise of more than 8 percent .", "But as good as the market was in the United States , it was better almost everywhere else .", "The S . & P . 500 rose 13.6 percent .", "But in dollar terms , virtually every major market in Europe was up at least 20 percent , and some grew much more .", "The Asian performance was more mixed , with Japan 's leading index rising just 5.7 percent , but many of those markets did much better than the United States did .", "The vast majority of American stocks have surpassed the highs they set in the great bull market that ended in 2000 , but that is not true of many of the most prominent stocks .", "That bull market was kindest to technology stocks .", "Many of them have not come close to recovering to their old peaks , and may never do so .", "It was also kind to large stocks in general , as a lot of money poured into index funds that tracked the S . & P . 500 index , and some of the largest , such as General Electric and Merck , are well below where they were .", "That index , calculated the traditional way that gives the highest weight to the largest companies , ended the year at 1,418.30 , still 7.1 percent below its 2000 high of 1,527.48 .", "But the index is also calculated on an equal-weighted basis , in which each of the 500 stocks counts as much as any other .", "For the year , that index did only a little better than the regular S . & P . , but it ended the year 52 percent above its 2000 high .", "The Dow , of course , set records in the year .", "But just 8 of the 30 Dow stocks -- Boeing , Exxon Mobil , Altria , American Express , Procter & Gamble , United Technologies , Caterpillar and Citigroup -- set new highs in 2006 .", "Two of them , Eastman Kodak and International Paper , have never risen above the highs they set in 1997 .", "The dollar , widely forecast to weaken , did so against the euro , and the Chinese yuan was allowed to float upward against the dollar , gaining 3.4 percent during the year .", "But the dollar actually showed a small gain against the Japanese yen .", "That the dollar held up as well as it did in the face of growing trade deficits reflects that people around the world continue to be happy to hold dollar-denominated assets , including Treasury securities .", "That willingness has also helped to hold down interest rates in this country .", "Those low interest rates helped support home prices .", "As home prices rose , lenders became more willing to lend on homes with little equity , or to people whose ability to repay seemed suspect , and that lending also provided money to keep consumption strong .", "Mortgage defaults were few and far between , which was hardly a surprise given that virtually any home could be sold for more than had been borrowed against it .", "But that started to change in 2006 .", "Some subprime loans -- a polite way of saying loans to people with poor credit -- defaulted soon after they were issued .", "The banking regulators , who had watched without action as the mortgage market heated up , finally put out guidelines aimed at restricting risky borrowing just as the market was turning lower .", "That could make it hard for homeowners under financial stress to refinance their existing home loans .", "With prices falling in some regions , home builders reported a surge of cancellations of purchase contracts .", "Housing starts plunged , and although starts showed a reassuring increase in November , newly issued permits to build new homes continued to decline .", "Oddly enough , rising home sales could be a bad sign in 2007 , particularly if prices continue to sag .", "A surge in sales of existing homes could be an indication that people were being forced to sell , and that could have a depressing effect on purchases of other things .", "In the past , when home prices fell , the number of homes sold also fell , as homeowners hung on to wait for good times to return .", "That could be harder this time , at least for those with mortgages that allow large increases in monthly payments .", "But even with housing in many areas of the United States on the ropes , much of the rest of the world seems to be humming along .", "There is strong economic growth in most of the developing world , and many commodities prices continue to be strong .", "Still , it was in commodities that the perils of a liquidity-driven market were exposed .", "Crude oil ended the year at virtually the same level as a year earlier , but spot natural gas was down 44 percent .", "It is no coincidence that a bet on natural gas led to the most notable hedge fund blowup of the year .", "Reality , in the form of warmer weather and higher inventories than expected , sent prices plunging .", "No such disasters have befallen most other markets , and with earnings strong , perhaps they will be averted .", "Easy money is driving up asset prices , and that could continue in 2007 .", "THE YEAR AHEAD IN MARKETS ."], "summary": ["Whether economy can emerge unscathed from housing recession in 2007 is unclear .", "Investors expect stock market to continue to rally as it did in 2006 .", "Outlook for bond market is cloudy because investors expect Federal Reserve to reduce interest rates as economy slows .", "Spread between Treasury bonds and BB-rated bonds -- best level of junk -- has shrunk to historic lows of less than two percentage points .", "Stock market says boom is going to stay , housing notwithstading .", "Graphs ."], "publication": "nyt50", "label": [12, 14, 1, 64], "tag": ["Business"]} -{"id": "1815943", "text": ["The United States and its allies in Europe , in a tacit acknowledgment that sanctions imposed by the United Nations Security Council in late December are too weak to force Iran to abandon its nuclear ambitions , have embarked on a new strategy to increase the financial and psychological pressure .", "The plan is to use the language of the resolution to help persuade foreign governments and financial institutions to cut ties with Iranian businesses , individuals in its nuclear and missile programs and , by extension , the Iranian Revolutionary Guard Corps , said Stuart Levey , under secretary of the treasury for terrorism and financial intelligence .", "The Guard and its military wing are identified as a power base for President Mahmoud Ahmadinejad .", "Under his administration , American officials said , the Guard has moved increasingly into commercial operations , earning profits and extending its influence in Iran in areas involving big government contracts -- including building airports and other infrastructure , oil production and the providing of cellphones .", "Bush administration officials , who asked not to be identified because they were discussing diplomatic plans , said envoys would soon head abroad to press officials of foreign governments and banks to interpret the Security Council resolution equally aggressively .", "The new strategy builds on the Treasury Department 's efforts over the past few months to get Western banks to scale back business with Iran or risk running afoul of American laws .", "In 2006 , the European banks Credit Suisse First Boston and UBS said they would not do any new business with Iran .", "It is hard to assess how deeply the financial actions may cut , since the most willing parties to the effort -- the United States and Europe -- have few business dealings with Iran .", "The United States does have laws that give it considerable leeway to impose financial restrictions on banks and companies doing business in Iran , while European law does not .", "That said , Britain is also backing the new push , as is France , although to a lesser extent .", "Germany , with far more business interests in Iran , is not quite as eager .", "Japan is not a member of the Security Council , and the country is heavily dependent on the Persian Gulf for oil .", "But Japanese government officials have recently indicated their willingness to limit some of their business dealings with Iran .", "Last month , the Japan Bank for International Cooperation announced that it would not issue any new loans for Iranian projects until Iran resolved the nuclear impasse with the West .", "In addition , Japan has reduced its stake in an initial $ 2 billion deal to develop Iran 's largest onshore oil field at Azadegan to 10 percent from the originally agreed 75 percent , citing concern about Iran 's nuclear program .", "While United States officials have discussed what they are trying to do with their Russian and Chinese counterparts , the belief is that they have gone about as far as they are willing to go with the Security Council resolution that passed Dec . 23 .", "Russia fought to keep certain entities off the list and to keep the list as narrow as possible .", "Mr. Levey noted that the resolution cited three people as off limits to outside commercial transactions , and , in another section , prohibited transactions with agencies `` owned or controlled '' by them .", "The three , he said , are Maj . Gen . Yahya Rahim Safavi , commander of the Iranian Revolutionary Guard Corps .", "Gen . Hosein Salimi , who is in charge of the air force branch of the corps .", "And Ahmad Vahid Dastjerdi , who runs the Aerospace Industries Organization .", "Thus , an effort to bar future foreign commercial or government involvement , including bank transactions , affecting missile programs and the Iranian Revolutionary Guard is authorized by the resolution , Mr. Levey said .", "`` This resolution will be a big step forward in getting governments and financial institutions to pay more attention to Iran 's use of deceptive financial practices to facilitate its dangerous conduct and to stop doing business with the I.R.G.C. , `` Mr. Levey said , referring to the Revolutionary Guard .", "The resolution says that `` all states '' will `` take the necessary measures '' to bar `` financial assistance '' and `` financial resources or services '' related to nuclear and ballistic missile programs .", "The resolution 's appendix cites several government and private groups and 12 people as involved in those programs .", "Interrupting foreign involvement with those groups and individuals is also part of the new campaign .", "But American officials have no figures on the value of international business done with those cited in the resolution .", "The United States and European officials said they had also begun trying maneuvers aimed at undermining the self-assurance of Iranian officials , especially those who travel abroad .", "The recent arrests of four Iranian diplomats by American troops in Iraq , the officials said , played into that strategy .", "Pentagon officials said the Iranians were suspected of transferring improvised explosive devices from Iran to Iraq .", "Iran complained loudly that the men were diplomats and that their arrest violated accepted diplomatic rules .", "The diplomats , two of whom American officials said were probably members of the Revolutionary Guard , were eventually released .", "But their arrests are `` precisely the type of thing that will chip away at their confidence , '' one European official said .", "Most of the Western officials spoke on the condition of anonymity because they were not authorized to speak publicly about the issue .", "Even before the new effort began , the slowdown in international business was already emerging as a problem for Iran , which has vast oil fields but relatively little refining capacity .", "It imports 43 percent of its gasoline , according to the Institute for the Analysis of Global Security , a Washington-based nonprofit group that follows energy issues .", "In a rare acknowledgment of difficulty , the Iranian oil minister , Kazem Vaziri-Hamaneh , told the ministry 's news agency , Shana , recently that Iran was encountering obstacles in financing oil projects .", "`` Currently , overseas banks and financiers have decreased their cooperation , '' Mr. Vaziri-Hamaneh told the agency .", "Iran is already seeking to secure gasoline imports from its allies , including Venezuela , and shifting some dependency from gasoline to natural gas .", "`` Definitely , the Iranian economy is suffering a great deal as a result of the economic punishment , '' said Gal Luft , the executive director of the Institute for the Analysis of Global Security .", "But he added that Mr. Ahmadinejad `` is not just sitting on his hands and waiting . ''", "The new strategy comes in part because few believe that the sanctions resolution that passed Dec . 23 has the muscle to sway Iran to abandon its nuclear ambitions , which it insists are focused on energy production , not weapons .", "The road to sanctions was a tortuous one , filled with wrangling between the United States , which pushed for tough measures , and Russia , which advocated weaker measures .", "United States and European officials said they might still try to include tougher sanctions through the United Nations in the months ahead .", "But they say the West will need to use other measures as well .", "Specifically , the United States will press France , Germany , Italy and other European countries to halt credits that encourage doing business in Iran .", "The German Ministry of Economics , in a credit program called Hermes , says on a Web site that Iran is among `` risky markets , which are also growth markets , '' identified for such credits ."], "summary": ["United States and allies in Europe embark on new strategy to increase financial and psychological pressure on Iran , in tacit acknowledgment that sanctions imposed by United Nations Security Council in late Dec are too weak to force Iran to abandon its nuclear ambitions .", "Plan is to use language of resolution to help persuade foreign governments and financial institutions to cut ties with Iranian businesses , individuals in its nuclear and missile programs and , by extension , Iranian Revolutionary Guard , which is identified as power base for Pres Mahmoud Ahmadinejad .", "It is hard to assess how deeply financial actions may cut , since most willing parties to effort -- US and Europe -- have few business dealings with Iran .", "Photo ."], "publication": "nyt50", "label": [0, 7, 1, 2], "tag": ["World", "Washington"]} -{"id": "1815949", "text": ["By the end of 2006 , Wall Street had every reason to celebrate .", "A second-half rally propelled the stock market to its best year since 2003 .", "Companies reported strong profits , and some , like oil producers and investment banks , had record-shattering earnings .", "Yet the year had a more precarious beginning .", "Worries were rampant that a slowdown in housing would hurt the overall economy and that the Federal Reserve would raise interest rates and stall growth .", "But the economy proved resilient , withstanding a housing slump .", "The Fed halted its campaign for higher interest rates .", "Profits accelerated , and there was a huge wave of mergers and acquisitions .", "Nonetheless , Wall Street economists are looking at 2007 with some caution .", "They expect at least some slowdown in economic growth .", "And many worry that housing 's slump will spread to other corners of the economy .", "The analysts surveyed predicted that the Standard & Poor 's 500-stock index , which closed 2006 at 1,418.30 , would end 2007 at 1,440 to 1,570 .", "Richard Bernstein Chief Investment Strategist Merrill Lynch Though concerns mount that the world is suffering from global warming , Richard Bernstein is content to call himself a `` bipolar bear . ''", "While he foresees returns rising 12 percent over the coming year -- and the S . & P . hitting 1,570 by the end of the year -- Mr. Bernstein expects increased volatility .", "That is in large part because the Fed will not ease , or lower rates , in January , he says .", "`` As the uncertainty about the Fed easing has gone up , you 've found that the market has become choppier , `` he said .", "Mr. Bernstein cautioned that he expected growth in gross domestic product to be a full percentage point below other estimates , citing in part a `` money illusion '' over the growth in the Dow and even in energy prices .", "Investors should watch earnings carefully , he said .", "`` There 's sort of an embedded assumption that earnings will be going great forever . ``", "Mr. Bernstein recommends a mix of 50 percent stocks , 30 percent bonds and 20 percent cash for investors ' portfolios .", "Among sectors to watch , Mr. Bernstein rated telecommunications as a growth sector , aided by increasing attractiveness in European and Asian companies .", "`` It 's one of our turnaround sectors , `` he said .", "He is bearish on energy and commodities , citing their cyclical nature .", "Abby Joseph Cohen Chief Investment Strategist Goldman Sachs The coming year looks to be reasonable , if a little slow , said Abby Joseph Cohen , long one of Wall Street 's optimists .", "`` We are assuming profit growth is in the process of deceleration , '' she said .", "`` Especially in the case of energy companies , we expect that they will show gains for 2007 , but they will not show as much of a boost as in previous years . ''", "Still , Ms. Cohen said , her team 's research indicates that the S . & P . is underpriced , pointing to a 12-month target of 1,550 .", "Buoyed by the Fed 's halt in raising interest rates , merger activity should keep rolling , but with more of a focus on international expansion .", "With the slowdown in housing , the economy will rotate toward discretionary consumer services , like travel , restaurants and entertainment , she said .", "Large-capitalization industrials and information technology may also see favorable growth .", "Over all , Ms. Cohen said , the economy should run well , with no real bumps in the way .", "David Bianco Chief U.S. Equity Strategist UBS Investment Research David Bianco remembers a scary moment from last summer , as he and others feared that the Fed might tighten interest rates .", "Since then , the stock market has recovered and that forward momentum should carry over into 2007 , he said .", "`` We 'll continue to see P / E expansion , `` Mr. Bianco said of price-to-earnings ratios , '' mostly driven by everybody 's fears of a weakening economy not taking place . ``", "Mr. Bianco said that he expects the Fed to cut interest rates in March , setting a fairly steady metronome for slowing but solid growth and a target S . & P . of 1,500 by year-end .", "Like Ms. Cohen , Mr. Bianco said that he sees the S . & P . as relatively undervalued .", "Despite the flurry of merger activity in 2006 , he believes that the S . & P . has been using less than half of its debt capacity , something he expects to be corrected .", "Housing will continue to slow and prices will fall further , taking consumer-related spending and industries like autos along with it , he said .", "Abhijit Chakrabortti Chief U.S. and Global Equity Strategist J . P . Morgan Chase Last year can be summed up in one phrase , according to Abhijit Chakrabortti : Value killed growth .", "`` If you had that the S . & P . would be up for the year , you would have said that tech , industrials and health care would kill value stocks , '' he said .", "`` But telecoms and utilities were the big winners . ''", "Coming off a surprisingly good 2006 , however , Mr. Chakrabortti is distinctly bearish .", "He sees slower growth in the United States compared with Asia and Europe .", "Volatility will pick up as earnings slow down , he added , and the dollar will weaken .", "The Fed will hold steady , Mr. Chakrabortti said , but other central banks will tighten .", "He predicts the S . & P . will rise to 1,440 -- the low end of targets among the analysts surveyed .", "`` I very much suspect that there will be a time when a 1,440 valuation looks good , '' he said .", "`` The market will be disappointed with the Fed , and we 'll see a significant correction sometime in the first half . ``", "Ethan S . Harris Chief U.S. Economist Lehman Brothers Ethan S . Harris said he was less surprised than most by the resilience of the stock market last year .", "He expects 2007 to be a growth year , albeit one without the boom that marked the last half of 2006 .", "`` There 's still room for the market to rally , `` he said .", "To that end , he staked an S . & P . price target of 1,570 , thanks in large part to the economy shrugging off the housing slump .", "But he also said he did not expect a completely rosy economic picture .", "Alone among economists surveyed , Mr. Harris talked about the possibility of `` a very mild version of stagflation . ''", "Moreover , he gave recession a 20 percent chance to appear in 2007 .", "THE YEAR AHEAD IN MARKETS ."], "summary": ["Wall Street economists Richard Bernstein , Abby Joseph Cohen , David Bianco , Abhijit Chakrabortti and Ethan S Harris are cautious about economy in 2007 .", "They expect at least some slowdown in economic growth .", "Many are concerned that housing 's slump will spread to other parts of economy .", "Analysts surveyed predict that Standard & Poor 's 500-stock index , which closed 2006 at 1,418.30 , will end 2007 at 1,440 to 1,570 .", "Photos ."], "publication": "nyt50", "label": [11, 9, 10, 8], "tag": ["Business"]} -{"id": "1815950", "text": ["Hundreds of people emerged from tents beside this city 's Canal St . - Martin to greet the chilly New Year with a hot lunch from a nearby soup kitchen .", "But not all of them were homeless .", "Dozens of otherwise well-housed , middle-class French have been spending nights in tents along the canal , in the 10th Arrondissement , in solidarity with the country 's growing number of `` sans domicile fixe , '' or `` without fixed address , '' the French euphemism for people living on the street .", "The bleak yet determinedly cheerful sleep-in is meant to embarrass the French government into doing something about the problem .", "`` Each person should have the minimum dignity in a country as rich as this , '' said Bleunwenn Manrot , a 28-year-old with a newsboy cap on her head and a toothbrush in her hand .", "Ms. Manrot drove more than six hours with friends from her home in Carhaix , Brittany , to spend New Year 's Eve along the canal .", "The demonstration has drawn enough media attention over the holidays for President Jacques Chirac to acknowledge it during his traditional New Year address to the nation on Sunday .", "He asked the government to work in the coming weeks to `` put in place a truly enforceable right to housing '' that would give the homeless the legal means to demand a place to live .", "Given France 's well-financed social services , the country 's homeless problem is relatively mild -- the national statistics bureau estimated the number of people living without a fixed address on any one night at 86,000 for all of France in 2004 , about equal to the number of homeless in Los Angeles alone .", "But even that number is disturbing for the socially active segment of France 's population .", "In December 2005 , the French affiliate of the international charity Doctors of the World began distributing nylon pup tents to people who sleep on Paris 's sidewalks and beneath its bridges .", "The movement took hold , and since then the tents have become a fixture in odd corners of the city .", "In an effort to increase pressure on politicians , another group , Don Quixote 's Children , marshaled some of the tent dwellers last year to set up their tents along the Canal St . - Martin , in the heart of `` bobo '' -LRB- short for bourgeois bohemian -RRB- Paris .", "The canal was dug by Napoleon to supply Paris with clean drinking water .", "Since mid-December , the encampment has become a happening in one of Paris 's most happening neighborhoods .", "`` There are 250 tents now , '' said Jean-Baptiste Legrand , the organization 's president .", "`` The people keep coming , and the tents are full . ''", "The protest has started to spread to other cities , including Orl\u00e9ans , Toulouse and Lyon , and has been picked up by politicians as the presidential campaign gets under way .", "Fran\u00e7ois Hollande , the leader of the Socialist Party , and Bertrand Delano\u00eb , the mayor of Paris , have signed the group 's petition calling for a solution to the housing problem .", "Both of the leading presidential candidates -- Nicolas Sarkozy , of the governing Union for a Popular Movement , and S\u00e9gol\u00e8ne Royal of the Socialists -- support the cause .", "Catherine Vautrin , the minister for social cohesion , met with Mr. Legrand and other members of his group and last week announced a tenfold increase in spending to help the homeless , to $ 92 million from $ 9 million .", "She said the money would allow homeless shelters to stay open around the clock on weekends and extend their weekday opening by three hours a day .", "But a legally enforceable right to housing is the biggest prize sought by housing activists , including Don Quixote 's Children , and they remain skeptical of Mr. Chirac 's New Year promise .", "France already has a hard time housing new immigrants and asylum seekers .", "Fires in overcrowded , substandard lodgings have caused scandals in recent years .", "Finding a place for the hardcore homeless is certain to complicate those problems .", "`` Chirac 's speech means nothing , `` said Ms. Manrot , the Brittany protester .", "Such comments suggest that the long camp-out will continue .", "Organizers have arranged portable toilets and a soup kitchen to keep the ad hoc village operating .", "Vans of blankets and other supplies arrive regularly , much of the material donated by Parisians .", "Volunteers sweep the canal-side cobblestones to keep the area clean .", "`` I like the protest because it 's nonviolent , `` said another protester , Renaud Huv\u00e9 , 39 , a photographer .", "`` It 's a citizens ' call . ``", "So far , the authorities have been tolerant , though they have quietly evicted tent dwellers before , when the news media were not watching .", "The police broke up one encampment under a bridge farther north along the canal in October .", "Magali Marx , 23 , a sales assistant in a clothes shop , expressed the laissez-faire attitude of the neighborhood 's residents as she passed by .", "`` It 's a bit of a pain for the people who want to walk along the side of the canal , `` she said .", "`` But then , these people do n't have a roof . ``", "Not all of the homeless are down-and-out French .", "A group of immigrants continues to live farther up the canal beneath what people in the area have dubbed `` the bridge of the Afghans . ''", "The government says that a third of the country 's homeless hold jobs .", "The homeless who make up the bulk of the canal-side campers are thankful for the attention .", "`` Let 's hope it makes a difference , `` said Jean , a middle-aged man who said he had been living on the streets of Paris for eight years .", "But staying on the street is anything but restful for those who have a warm bed waiting at home .", "Rain and high winds dampened the canal-side New Year 's Eve celebration .", "Ms. Manrot and her boyfriend , Franck Renardineau , ended up sleeping in their car .", "`` I sleep in one of the tents , '' Mr. Legrand said , rubbing his pale , exhausted face .", "`` But I 've stayed at home a couple of times .", "We 've got a lot going on . ``", "PARIS JOURNAL ."], "summary": ["Dozens of middle-class French people have been spending nights in tents along canal in Paris in solidarity with country 's growing number of homeless people .", "Demonstration has drawn enough media attention over holidays for Pres Jacques Chirac to acknowledge it during his traditional New Year address to nation .", "Given France 's well-financed social services , country 's homeless problem is relatively mild .", "But even small number of homeless is disturbing for socially active segment of France 's population .", "Photo ."], "publication": "nyt50", "label": [6, 2, 9], "tag": ["World"]} -{"id": "1815974", "text": ["Public School 64 , a vacant building with a leaky roof , broken windows and a colony of pigeons , sits on East Ninth Street near Tompkins Square Park at the center of one of the last fights against gentrification in Manhattan .", "Since 1998 , it has touched off pitched battles in the neighborhood , embroiling two mayoral administrations and employing a legion of lawyers and fixers , all for naught .", "In November , vandals broke in and painted the walls with another layer of anti-landlord graffiti .", "Gregg Singer , the small-time developer who bought the building , which runs from Ninth Street to 10th Street , for $ 3.15 million at a city auction , says he has been stymied at every turn from renovating the building for elderly tenants , nonprofit organizations or college dormitories .", "He said he was the victim of a political deal between Mayor Michael R . Bloomberg and a former city councilwoman , Margarita L\u00f3pez .", "Mr. Singer 's opponents view him as an interloper with little respect for the needs of the community or the building 's history as a political and cultural center for the East Village and Lower East Side .", "They say his secret intention is to build luxury housing .", "The opponents include not only neighborhood activists but nearly every local elected official , the pro-development Bloomberg administration and the owner of the penthouse next door at the Christadora House , a 1980s symbol of encroaching gentrification where protesters once chanted , `` Kill yuppie scum . ''", "The 16-story , 79-year-old apartment building originally housed a charity by the same name that helped immigrants adjust to life in New York .", "More than eight years after Mr. Singer bought the building , there is no end in sight .", "P.S. 64 is a blight even as Tompkins Square Park , the site of a homeless encampment and riot in 1988 , has been transformed into a quiet oasis for the white-collar professionals who live nearby .", "`` It 's an amazing tale , `` said Steven Spinola , president of the Real Estate Board of New York .", "`` Whatever you believe , the fact that this has dragged on so long is amazing .", "The property continues to be an eyesore and a wasted opportunity in the neighborhood . ``", "Last year , the city declared the building a landmark , an example of the French Renaissance Revival style .", "It now plans to change the zoning to eliminate most development options .", "`` For the government to sell me the building and then landmark it , it 's like bait and switch , `` Mr. Singer said .", "`` Complaints from the community do n't bother me .", "It 's the government that 's the problem .", "They 've blocked me from doing any useful development here . ``", "City officials called his claims nonsense and have asked that the suit be dismissed .", "Virginia Waters , a city lawyer , said that Mr. Singer could use the building for a medical or community center , or a dormitory , so long as the city is assured it would be occupied by students .", "`` The city has in no way prohibited those uses from going forward , '' she said .", "`` However , we have read that he wants an exorbitant rent .", "That may be his problem . ``", "Ms. L\u00f3pez , who was appointed last April to the city 's Housing Authority by Mr. Bloomberg , did not return calls about the controversy .", "The atmosphere is poisonous .", "One flier likened Mr. Singer 's dormitory design to a Nazi concentration camp .", "Another invited people to toss dog droppings over the construction fence .", "More recently , Mr. Singer put up his own posters announcing that the `` Christotora Treatment Center , '' for the homeless , drug addicted and recently paroled , was `` coming soon . ''", "And he set off a furor when he used an old permit to alter the outside of the building to buttress his suit challenging landmark designation .", "`` Singer 's been combative from the beginning , `` said David McWater , the chairman of Community Board 3 , which includes Ninth Street .", "`` He thinks it 's his God-given right to make as much money as he can . ``", "There was little question that Mr. Singer had stepped onto a minefield when he outbid a dozen rival bidders in July 1998 at an city auction .", "Police officers offered to escort him past an angry group of protesters .", "It took a year to close on the property , which Mr. Singer says is now worth $ 51 million , though the effect of landmark status is unclear .", "The building came with a deed restriction : It must be used as a `` community facility , '' including a library , nursing home or clinic , or for social service or arts groups .", "The original opposition to the sale was led by Armando Perez and Chino Garcia of Charas , a community group that established the El Bohio Cultural and Community Center in the building after P.S. 64 closed in 1977 .", "Over time , it became a home for housing activists , artists , theater groups , a bicycle repair shop and the Latin Kings , a notorious street gang .", "The school is `` symbolic of the struggle in our community , '' said Councilwoman Rosie Mendez , Ms. L\u00f3pez 's successor .", "`` When everyone else fled the community during the 1970s , we took out the rubble and created community gardens , affordable housing .", "He can move forward on developing this as a community facility , or he can decide to sell it . ``", "In 1997 , Antonio Pag\u00e1n , who had clashed with Charas and who was then the local city councilman , urged the Giuliani administration to auction the property .", "The fight to `` save '' P.S. 64 became a cause c\u00e9l\u00e8bre among local groups , the Puerto Rican Legal Defense Fund , the actress Susan Sarandon and Ms. L\u00f3pez , a friend of Mr. Perez 's who later succeeded Mr. Pag\u00e1n on the Council .", "Mr. Singer 's first move was to evict Charas .", "That took more than two years and 110 police officers to haul away protesters who had chained themselves together .", "`` The anger was n't at Gregg , per se , `` said Lyn Pentecost , executive director of the Lower East Side Girls Club .", "But he had `` stepped into the thick of community politics . ''", "At one point , Ms. Pentecost said , she offered to buy a third of the building for the Girls Club for $ 3 million .", "She said Mr. Singer was interested only in leasing it .", "`` He 's an opportunist , `` said Valerio Orselli , a member of Charas .", "`` I do n't believe for a minute that Mr. Singer intended it to be the community center we envisioned . ``", "The developer tells a different story : He says that Ms. L\u00f3pez warned the Girls Club , and many other nonprofits , not to get involved with him .", "He said 108 schools and nonprofit and social service organizations responded to an appeal he sent to 1,000 groups , but they all backed out .", "No doubt some potential tenants who visited the building were scared off by the frequent protests outside , so perhaps New York University , the New School and the New York Society for the Deaf decided to sidestep a volatile situation .", "John Caizzo , a vice president at the DeMatteis Organization , who decided on a different site for his company 's project , acknowledged that the building `` seems like a hot potato . ''", "But no city officials told him to stay away , he said .", "But Cecilia Abrilla , a director of the Puerto Rican Alliance , on the Lower East Side , wrote to Mr. Singer in 2003 saying Ms. L\u00f3pez `` has cast a warning to the nonprofit community to stay away from Mr. Singer '' by threatening to withdraw financial support for the organizations .", "And a top city official who spoke on the condition of anonymity because of the litigation made it clear that the local community effectively has veto power .", "`` At the end of the day , '' the official said , Mr. Singer `` will have to find an accommodation with the community . ''", "After a series of meetings in 2004 with Robert B . Tierney , chairman of the Landmarks Commission , Mr. Singer devised a plan to preserve the Ninth Street side of the old school while building a 19-story dormitory tower at the rear .", "He said as much as $ 2 million a year in excess revenue would flow to local groups .", "In return , according to the lawsuit , Mr. Tierney promised to refrain from landmarking and support a building permit .", "But the proposal ignited broader opposition , and the Buildings Department denied him a permit after imposing what Mr. Singer called `` an unprecedented requirement '' that he prove that he had a contract with a specific school for the housing .", "City officials said they imposed the rule after unscrupulous developers had tried to convert `` dormitories '' into market-rate housing .", "Days later , Mr. Singer said , Ms. L\u00f3pez , a Democrat , announced her endorsement of Mr. Bloomberg , a Republican with whom she had quarreled , rather than the challenger , Fernando Ferrer .", "Michael Rosen , who lives in the penthouse at Christadora House , formed a new group that lobbied to designate the school a city landmark .", "`` The thought of a dormitory , tall or short , was out of place in this neighborhood , '' he said , adding , `` My hope is that the building comes back to serve people who are in need . ''", "Mr. Singer scoffed , saying Mr. Rosen did not want his views blocked .", "In June , the Landmarks Commission voted to designate P.S. 64 a landmark , over the objection of the Real Estate Board , a powerful lobbying organization .", "`` The building should never have been landmarked , '' Mr. Spinola , the board 's president , said .", "Further , he said , the city 's new requirement for dormitories was tantamount to `` telling a commercial developer he must have leases before they 'll give him a permit . ``", "City lawyers denied many of Mr. Singer 's assertions .", "`` Landmarks never had a deal with him , '' said Gabriel Taussig of the Law Department .", "`` He had nothing in writing . ''", "Mr. Singer sued over the permit denial and lost .", "He is appealing .", "The first , and so far only , attempt at a settlement came in July , when Mr. Singer met with Ms. Mendez , United States Representative Nydia Vel\u00e1zquez and a lawyer for the city .", "The two sides explored a proposal for Mr. Singer to provide space for community groups while he built apartments in the rest of the building .", "The talks quickly collapsed , with each side blaming the other .", "That is when Mr. Singer stripped terra cotta elements and copper cornices from the building 's exterior , a move the politicians saw as a breach of faith .", "`` I 'm disappointed that all the negotiations have broken down , `` said William Jones , pastor of the Gospel Fellowship Church nearby .", "`` I would 've been thrilled if there was a compromise that allowed residential development and community space . `` ."], "summary": ["Developer Gregg Singer and New York City 's East Village community continue eight-year running battle over development plans for former Public School 64 .", "Singer says city has stymied his plans to renovate building for elderly tenants , nonprofit organizations or college dormitories .", "Community says Singer secretly intends to build luxury housing .", "Singer has filed three suits against city .", "One suit contends Mayor Michael R Bloomberg made political deal with former councilwoman Margarita Lopez to block Singer development plans .", "Singer expresses anger over city 's declaring building landmark and plans to rezone area to eliminate most development options ."], "publication": "nyt50", "label": [3, 4, 15, 6], "tag": ["New York and Region"]} -{"id": "1816052", "text": ["Logan Fox ca n't quite pinpoint the moment when movies and television shows replaced books as the cultural topics people liked to talk about over dinner , at cocktail parties , at work .", "He does know that at Micawber Books , his 26-year-old independent bookstore here that is to close for good in March , his own employees prefer to come in every morning and gossip about `` Survivor '' or `` that fashion reality show '' whose title he ca n't quite place .", "`` It kills me , '' Mr. Fox , 53 , said over coffee on Friday afternoon , shaking his head .", "`` The amount of time spent discussing culturally iconic shows has superseded anything in the way of books that I can detect .", "Discussing books is very much one on one .", "It just hurts me . ``", "Mr. Fox is bracing himself for an emotionally wrenching few months .", "In December Micawber announced that it would close , after years of fighting not only the tyranny of other media but also the steady encroachment of big-box retail competitors and the Internet .", "Independent bookstores , of course , have been under siege for nearly two decades by the megachains and the Web retailers , and have been steadily dropping away , one by one .", "Now , though , the battle is reaching some of the last redoubts .", "Mr. Fox said that Micawber 's first chain competitor , Encore Books , arrived in the mid-1990s , and his sales plummeted 25 percent , nearly putting him out of business .", "Soon after , Barnes & Noble and Borders came to town .", "And then there was Amazon.com.", "But beyond those factors , Mr. Fox said , he blames a change in American culture , in the quickening pace of people 's lives , in the shrinking willingness to linger .", "During the 1980s , in the store 's early days , customers would come in and stay all afternoon , carefully inspecting the books that were packed tightly together , spine to spine .", "No longer .", "`` The driving force of all of this is the acceleration of our culture , '' Mr. Fox said .", "`` The old days of browsing , the old days of a person coming in for three or four hours on a Saturday and slowly meandering , making a small pile of books , being very selective , coming away with six or seven gems they wanted , are pretty much over .", "If you go to the Strand or to Micawber Books today , it 's a whole different gear , where society wants satisfaction and fulfillment now . ``", "The other crisis for independent booksellers , Mr. Fox said , is the current state of publishing .", "The job of building writers ' reputations and nurturing them has fallen to agents , he said .", "Publishers are concerned only with the bottom line , he added , looking for the home run instead of the single .", "And there is the question of quality .", "Though Micawber carries a few , Mr. Fox laments the rapid growth of the celebrity cookbook genre .", "Children 's books , in particular , are driven by marketability instead of creativity , said Bobbie Fishman , the children 's books buyer at Micawber .", "`` It 's either pirates , wizards , one of a series , or written by Katie Couric , `` she said .", "Independent bookstores across the country are suffocating , squeezed by Amazon.com and the chain bookstores that deliver deeper discounts and wider variety than independent shops .", "According to the American Booksellers Association , a trade group of independent bookstores , there are about 2,500 such stores in the United States , down from about 4,700 in 1993 .", "And that is not counting those that sell only used books .", "Micawber opened in 1981 , when Princeton was an old-money kind of place , with independently owned shops lining Nassau Street , adjacent to the picturesque Princeton University campus .", "`` When we first came , every store on this street was a 15 - , 20-year entrenched old-family business , '' Mr. Fox said .", "Today that same thoroughfare is dominated by chain stores like Ann Taylor and Foot Locker .", "Micawber Books , with its purple walls and its alien name , which comes from Dickens ' novel `` David Copperfield , '' is the one that sticks out now .", "Half the store is devoted to secondhand books and other vintage publications , like a stack of Archie comics on the register .", "The other half is new books , a well-edited mix that includes fiction , history , children 's books and the occasional concession to popular taste , like Rachael Ray 's `` Express Lane Meals . ''", "-LRB- There are also stalwarts , evidenced by the back wall of shelves lined with Penguin Classics . -RRB-", "Mr. Fox is proudly from the old school of bookselling .", "He says he has only been inside a Barnes & Noble store three times .", "-LRB- `` I ca n't do it , `` he said , grimacing . -RRB-", "In the mid-1990s , when the struggling Micawber took on Margaret Griffin as a partner , she insisted that Mr. Fox abandon his system of 3-by-5-inch index cards in boxes and become computerized .", "The new Sony Reader , a handheld device that stores dozens of titles electronically .", "Never tried it .", "`` I do n't want to sound corny about it , `` Mr. Fox said .", "`` But there is something transformative about the book . ''", "Mr. Fox began as a bookseller in the ` 70s at the Strand Book Store in Manhattan , and moved to Princeton to open Micawber , then a store devoted entirely to secondhand books from his own personal library .", "His father , Joe Fox , was an editor at Random House , nurturing the careers of John Irving and Truman Capote .", "His childhood was sprinkled with the romantic stuff of traditional New York publishing , with swanky book parties attended by famous authors and writers for The Paris Review .", "In her 1998 film , `` You 've Got Mail , `` Nora Ephron used the name of his father , who , as it happens , was also a former boyfriend of hers .", "Joe Fox was the name she gave the character played by Tom Hanks , a chain bookstore owner who swiftly puts Meg Ryan and her tiny shop around the corner out of business .", "Of course , in the Micawber story , Logan Fox is in the Meg Ryan role , the person who champions the mom-and-pop store and derides the corporate behemoth .", "In Micawber 's case , Mr. Fox said there was no imminent financial crisis , only the recognition that years from now , selling the business would be a far more difficult task .", "`` Ten years down the road , we could n't imagine who would buy us at all , `` he said .", "After Micawber quietly put out the word that it was for sale , Princeton University offered to buy the building that houses the store for an undisclosed sum , and promised to bring in Labyrinth Books , a scholarly chain with stores in New Haven and Manhattan .", "All the architectural details incorporated into Micawber , Mr. Fox pointed out sadly , will be bulldozed .", "And he has no idea what he will do next .", "Still , Mr. Fox said he was trying hard not to be cynical .", "`` I consider this a success story , '' he said .", "`` Our intention was to be a good townspeople store . '' ."], "summary": ["Logan Fox has sold his independent bookstore Micawber Books in Princeton , NJ . Laments cultural shift away from books .", "Holds that fast pace of life and shrinking attention spans have made customers less eager to linger and search for books in store that forgoes commercial push of best sellers at front of store .", "Photos ."], "publication": "nyt50", "label": [1, 13, 0], "tag": ["Arts", "Education", "Books"]} -{"id": "1816057", "text": ["Diane Sawyer , whose continued commitment to `` Good Morning America '' has been the subject of speculation since Charles Gibson departed the show in June , has in recent weeks left some executives at ABC with the impression that she intends to remain with the program until at least early summer , according to one executive briefed directly on Ms. Sawyer 's status .", "By sending signals that she intends to stay with `` Good Morning America '' -- at least through the end of the current prime-time television season , and perhaps longer still -- Ms. Sawyer has prompted some exhaling in the upper echelons of both the news division and the network as a whole .", "That is because she remains ABC 's biggest news star in the morning -- and perhaps the evening as well -- on a program that is by far the news division 's most valuable , with annual profits believed to be well in excess of $ 100 million .", "The executive , who spoke yesterday on condition of anonymity because of the delicate diplomacy involved , said Ms. Sawyer had met before Christmas , as she does periodically , with David Westin , the president of ABC News .", "Separately , the official said , she had also had a holiday lunch in New York with Robert A . Iger , the president and chief executive of the Walt Disney Company , ABC 's corporate parent , though that get-together was characterized as more social than business .", "By staying put , Ms. Sawyer , 61 , also promises to keep `` Good Morning America '' competitive with `` Today '' on NBC , the ratings leader for more than a decade , at a moment when that program and its audience are continuing to adjust to a new co-anchor , Meredith Vieira , who replaced Katie Couric in September .", "-LRB- Since Mr. Gibson 's departure for `` World News , '' `` Good Morning America '' has gone through some growing pains of its own , as it introduced Chris Cuomo as its news reader and Sam Champion as its weatherman , while pairing Robin Roberts with Ms. Sawyer as lead anchors . -RRB-", "In the most recent November sweeps period , `` Today '' had an average daily audience of 5.8 million , about 735,000 more than `` Good Morning America , '' which drew an audience of about 5.1 million , according to Nielsen Media Research .", "During the same period a year ago , the lead of `` Today '' over `` Good Morning America '' was about 5,000 viewers larger , although both shows have lost viewers over the last year .", "In June an ABC spokesman said Ms. Sawyer remained committed to `` Good Morning America '' through at least early 2007 .", "When asked in September how much longer she planned to stay with the program , she told a reporter that she was not ready to entertain that question .", "`` ` I 'm sorry if it sounds like a dodge , `` she said at the time .", "`` I truly am looking ahead and enjoying this part .", "And there is plenty of time to think about what remains after -- when and whether . ``", "When asked yesterday about Ms. Sawyer , Jeffrey Schneider , a senior vice president of ABC News , dismissed any notion of a timetable for her tenure on the program .", "He added : `` Diane is having a great time on ' G.M.A. '", "She loves the new team .", "And she has had an unrivaled run of groundbreaking reporting which has her passionately engaged . ``", "Ms. Sawyer 's current contract with the network extends for at least two more years , said the executive with knowledge of her situation .", "But Ms. Sawyer has always characterized her tenure on `` Good Morning America '' as temporary -- and the network has regarded it as such -- even if it has been eight years since she joined the show , at a moment of crisis , after the network had just jettisoned a new , low-rated anchor team ."], "summary": ["Diane Sawyer indicates that she will stay with ABC morning program Good Morning America until at least early summer .", "Decision prompts sigh of relief from upper echelons of news division and network as whole , who have relied on Sawyer for star power since departure of Charles Gibson .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Arts"]} -{"id": "1816064", "text": ["SOMETHING made me uneasy when I dropped a box of gluten-free EnviroKidz organic Koala Crisp cereal in my shopping cart .", "But it 's hard to suspect a cartoon koala , so I moved on .", "The unsettling sensation came back when I bought a bag of my favorite organic frozen French fries .", "Why did the verdant fields in the Cascadian Farm logo make me feel so smug .", "Then I got really suspicious .", "A bag of natural Cheetos seemed so much more appealing than the classic cheese puff .", "Why .", "Was it the image of a subdued Chester Cheetah rising gently from a farm field bathed in golden sunlight .", "Like clues to a murder that suddenly point to a single culprit , the mystery in my shopping cart revealed itself .", "Wheat sheaf by wheat sheaf , sunrise by sunrise , the grocery store shelves had been greenwashed .", "And I was falling for it .", "The kind of greenwashing I 'm talking about is not just a fake environmental ethos .", "Greenwashing , it seems to me , can also describe a pervasive genre of food packaging designed to make sure that manufacturers grab their slice of the $ 25 billion that American shoppers spend each year on natural or organic food .", "As a design shorthand , it makes subtle use of specific colors , images , typefaces and the promise of what marketers call `` an authentic narrative '' to sell food .", "Especially in recent years , greenwashing has spilled out well past the organic section of the grocery store .", "Even the snack aisle at the gas station is n't immune .", "`` Somebody becomes successful with a specific point of view , and the consumer begins to identify with it and it spreads like a virus , '' said Paula Scher , a partner in Pentagram , an international design firm .", "From there it 's only a matter of time before Cap 'n Crunch shows up in a hemp jacket , raising money to save the manatees .", "Buy a greenwashed product and you 're buying a specific set of healthy environmental and socially correct values .", "If the package does its work , then the food inside does n't actually have to be organic , only organic-ish .", "The right cues on a package free mass-market consumers from doing any homework , said Elizabeth Talerman , a branding analyst .", "They can assume that a group she calls the green elite -- those early adopters who pushed for organic food laws and who helped make Whole Foods markets a success -- have done the work for them .", "`` The mass market wants an instant identifier , '' said Ms. Talerman , a longtime New York advertising consultant .", "So what are the identifiers .", "After shopping for dozens of products in places as varied as food co-ops and convenience stores , I 've uncovered the essential elements of a greenwashed product .", "Start with a gentle image of a field or a farm to suggest an ample harvest gathered by an honest , hard-working family .", "To that end , strangely oversize vegetables or fruits are good .", "If they are dew-kissed and nestled in a basket , all the better .", "A little red tractor is O.K. Pesticide tanks and rows of immigrant farm laborers bent over in the hot sun are not .", "Earth 's Best , a baby and toddler food company , offers a delicious example .", "Its whole grain rice cereal features two babies working the rice fields .", "One is white and one is black .", "-LRB- A greenwashed package would never show the black child working in the fields alone . -RRB-", "A sign that looks hand-hewn declares `` No GMO 's . ``", "There is a barn , a butterfly and a typeface that could have come from the back room of a general store .", "A good greenwashed product should show an animal displaying special skills or great emotional range .", "Some Organic Valley packages feature a sax-playing , environmentally friendly earthworm .", "Jaunty cows on Stonyfield Farm yogurt wear sunglasses and headbands .", "The cows on Horizon 's milk cartons dance a bovine jig , despite challenges by organic purists that some Horizon cows see precious little pasture .", "A little family history helps , too .", "My Family Farm of Fort Thomas , Ky . , sells packaged cookies and crackers and promises to give some of the money to charity .", "On the back of the box is a story that begins , `` With careers as licensed social workers , my sister and I are committed to improving the lives of children . ''", "A carton of Country Hen omega-3 eggs , which cost $ 3.69 for six , had a fuzzy black-and-white photograph inside showing the company 's owner , George Bass , and the entire Country Hen family , along with their favorite eggnog recipe .", "A cause is important .", "Nature 's Path , the maker of Koala Crisp , promises that 1 percent of sales will be spent saving endangered species .", "Barbara 's Bakery , maker of Puffins cereal , pays for the National Audubon Society 's live `` puffin cams '' in the Gulf of Maine .", "Buy a box of Peace Cereal 's raspberry ginger crisp , and a percentage of the profit helps pay for International Peace Prayer Day in New Mexico .", "The actual health benefits of a product do n't always matter .", "A package of organic Naturepops from College Farm shows a field of lollipops and a barn , suggesting a well-educated farmer tending her candy .", "The sugar might come from cane juice and tapioca syrup , but it 's sugar just the same .", "And although `` organic '' is losing its power as a code word for certain cultural values , it does n't hurt to flaunt it if you 've got it .", "The word appears 21 times on a box of Cascadian Farm Vanilla Almond Crunch .", "Having established a design paradigm that succeeds in selling food that is often more expensive than conventional groceries , the design world should perhaps rejoice .", "This is not the case .", "Some top brand and package designers find the cartoonish animals and bad hippie typefaces as grating as a self-righteous vegan at a barbecue .", "But then , they did n't like American food package design that much to begin with .", "`` It 's the bottom of the barrel , `` said Ms. Scher , who works in the New York office of Pentagram design .", "Riskier designs , like the clean lettering and curvy bottle of Pom Wonderful pomegranate juice , are rare .", "Food manufacturers usually agonize over changing the size of a box or shifting the background color from teal to aquamarine .", "But when a trend starts to show success , it 's a design pileup .", "That 's what happened with the natural and organic category , which makes up about 10 percent of the food at the grocery store and has been growing by more than 20 percent a year since 2000 .", "In the grocery business , a 4 percent jump is considered a victory .", "`` It 's aisle after aisle of design desperation , `` said Brian Collins , chairman and chief creative officer of the design group at Ogilvy , the international advertising and public relations company .", "He called the look `` phony na\u00efvet\u00e9 '' and predicted that its demise was close because consumers are wising up .", "There is value in telling a story , but it must be true , he said .", "Merely dressing up the package is not enough , he said .", "Nonetheless , manufacturers are eager to project a wholesome image .", "`` It 's the halo effect , `` said Caren Wilcox , executive director of the Organic Trade Association .", "`` That 's why we encourage consumers to look for the U.S.D.A. organic seal . ``", "But even the organic seal does n't necessarily offer assurances that the item is produced in a way that jibes with consumer expectations for something that comes in a greenwashed package .", "`` All the ingredients being used in items with the organic seal are produced using the organic system , '' Ms. Wilcox said .", "`` It does n't mean they do n't sometimes end up in products some people think other people should n't eat . ``", "Design and packaging experts fix the start of sincerity and authenticity in food package design in the 1970s .", "Mo Siegel began selling Celestial Seasonings tea in boxes with sleepy bears .", "Tom and Kate Chappell gave up the corporate life to create Tom 's of Maine toothpaste .", "Ben Cohen and Jerry Greenfield sold ice cream in Vermont , using goofy hand-rendered graphics to tell their story .", "The trend grew in the 1980s , when corporate America entered a noncorporate phase .", "`` Companies began to try to not look like big companies , '' Ms. Scher said .", "By the late 1990s , anything with a hint of natural organic goodness sold in big numbers .", "Today , many companies that started with a humble story line have been purchased by larger interests .", "Unilever owns Ben and Jerry 's , the Hain Celestial Group is traded on Nasdaq and Tom 's of Maine is controlled by Colgate-Palmolive .", "The kind of imagery that once marked a brand as an alternative to corporate food conglomerates has now been incorporated into Lay 's potato chips .", "Consumers can buy classic Lay 's in the shiny yellow bag , or Natural Lay 's , with a thicker cut , expeller-pressed oil and sea salt .", "The package has a brown harvest graphic design , old-timey typefaces and a matte bag .", "The natural chips cost about 10 cents an ounce more than the classics .", "A handful of either still offers 150 calories and 10 grams of fat .", "`` When it gets to Lay 's , `` Ms. Scher said , '' its time to change . ``", "Ms. Talerman , the New York advertising consultant , predicted that the fascination with what she called the green identifiers will last about five years longer .", "Then , she said , green-elite food consumers will push companies for even more information about environmental impact , labor practices and community involvement , and mass market consumers will start reading labels instead of just searching out easy identifiers .", "Food manufacturers might begin to copy the new nutrition-style labels that Timberland is putting on its shoe boxes .", "Each one lists the amount of energy it took to make the shoes , how much of that was renewable , whether child labor was used and how many hours per pair Timberland dedicated to community service .", "`` As soon as the mass market starts to understand these issues more , '' Ms. Talerman predicted , `` we 'll get away from the fields and the giant vegetables and get back to better design . ``", "It 's More or Less Natural , And It 's Getting Bigger by the Day EACH year grocery manufacturers roll out tens of thousands of products , ever hopeful that a new box of crackers or a frozen entree will be a hit with consumers .", "In 2006 , 17,779 food products were introduced , according to Mintel International , a market research company .", "That 's a jump of almost 2,000 items over the previous year .", "Of those products , 3,761 either were organic or had an all-natural claim on the label .", "`` It seems now that everybody is getting into organics , '' said Lynn Dornblaser , a new-products industry analyst at Mintel .", "She predicted that in 2007 , a shakedown will occur in the organic industry as its products become accessible to larger numbers of people .", "One key indicator of this shift is that Wal-Mart is selling organic products for less money than many competitors .", "Other fast-growing categories last year were baby food , with 116 new products , and ready-to-eat meals or meal-replacement products , with 1,125 items , up from 791 in 2005 .", "Desserts , ice cream and candy came out at a healthy clip , but fruits and vegetables were not so lucky .", "The number of new products in that category dropped by 13 percent .", "`` We 're also seeing more of a focus on authentically ethnic foods , `` Ms. Dornblaser said .", "`` It 's not just pasta sauce : it 's pasta sauce from Tuscany . ``", "Indian food , in particular , is beginning to catch on .", "Although the numbers are small -- only 143 new Indian food items appeared last year -- the figure is almost double the number brought out the year before .", "KIM SEVERSON ."], "summary": ["Food manufacturers are greenwashing their packaging , using specific colors , images and typefaces to convey message that product is healthy .", "In recent years , greenwashing has spilled out past organic section , even into snack section .", "Packaging often displays fields , farms , animals or allegiance to worthy cause , and includes words like ` natural ' and ` healthy ' .", "Photos .", "Drawing ."], "publication": "nyt50", "label": [14, 13, 43, 66], "tag": ["Arts", "Dining and Wine"]} -{"id": "1816065", "text": ["DREW NIEPORENT still remembers the afternoon in the 1970s when Warner LeRoy ordered him to turn off the Barbra Streisand eight-track .", "Mr. Nieporent , who today operates restaurants from Louisville to London , was tending the bar for Mr. LeRoy at Maxwell 's Plum on the Upper East Side .", "He cued up the Streisand tape every day at lunch .", "It 's no longer clear if it was the warble of the tape or of `` Evergreen '' that set Mr. LeRoy off , but he ordered the eight-track turned off and pronounced that from that moment on , music was forever banned during lunch at Maxwell 's Plum .", "Silent , strident or Streisand , there 's no consensus on what should play on the dining room hi-fi . Without an easy recipe for success , chefs and restaurateurs turn to consultants , D.J. ` s , enthusiastic staff members and their own record collections , seeking a mix that works .", "`` Music in restaurants is a sore issue in general , '' said Andrew Carmellini , who , when he 's not manning the stoves at A Voce , is working on his second recording with his band , the Crown .", "`` Pre-opening , I thought my list was brilliant , '' he said .", "`` But you name the complaint -- the jazz is too boring , the horns are too shrill , there 's too much bass -- and we 've gotten it . ``", "Many restaurateurs try to avoid such complaints by seeking professional help .", "Food service establishments make up `` a significant portion '' of the 400,000 locations into which Muzak pipes music , according to Karen Vigeland , a company spokeswoman .", "The bulk of those are quick-service places , but Muzak 's roster also includes more elite clients , like ` Wichcraft , the sandwich chain Tom Colicchio has an interest in .", "Dean & DeLuca 's cafes .", "And Emeril Lagasse 's restaurants .", "-LRB- Apologies to anyone who had illusions that Doc Gibbs , Mr. Lagasse 's musical sidekick on `` Emeril Live , '' is selecting the discs at Emeril 's . -RRB-", "Many of Muzak 's clients purchase one of the company 's 90 or so `` core programs , '' themed mixes with titles like `` Rock Show '' or `` Concrete Beats , '' each of which is about 1,200 songs long .", "For others , the company generates custom soundtracks , which may be a core program with advertising spliced in -LRB- touting breakfast sandwich deals in the morning , for example -RRB- or a mix that 's tailored to a restaurant 's particular vibe .", "Part of the appeal of services like Muzak or Digital Music Xperience , another large music consulting firm , is that they make life easier : No iPods to lose , no CDs to scratch .", "The bulk of Muzak 's customers stream the company 's musical feed over the Internet or through a satellite dish .", "Jeremy Abrams , managing partner of the New York-based music consulting company Audiostiles , said that a large percentage of his clients come to him from the larger companies `` looking for something a little more customized and up-to-date . ''", "Daniel Boulud 's restaurant group , Dinex , left Muzak three years ago to become Audiostiles 's first restaurant client .", "Since then , Thomas Keller 's restaurants in New York , California and Las Vegas , and the Four Seasons hotel group have signed on .", "Mr. Abrams said he tailors playlists to the time of the day they 'll be heard .", "To determine exactly what will work for each place , he polls clients on the tempos and genres they want , asking them whether they prefer instrumentals or vocals , new music or something familiar .", "Some restaurants , looking for an even more intimate approach , turn to D.J. ` s .", "Nemo Librizzi started out choosing the music for the restaurants at the Maritime Hotel and has since added Barbuto , Employees Only , Thor and more to his roster .", "Describing himself as the kind of guy who shows up to meetings without business cards or much of a plan , he said : `` I like to tell clients that they can have me come in and do a mix for them for a few thousand dollars , but that 's like an off-the-rack suit , like buying music by the yard .", "I prefer to do multiple fittings , to tailor the music after hearing feedback from the staff . ``", "His basic strategy might include `` a little jazzy after work music '' for the cocktail hour and late-night montages in which he will slip samples of Jack Kerouac reading from `` On the Road '' between cuts of pygmy music .", "`` Sometimes when I look at a place I hear this kind of music being played , '' he said .", "`` Certain music likes to live in certain rooms .", "I do n't understand it .", "It 's like magic . ``", "Muzak or magic , some restaurateurs ca n't abide the thought of having an outsider select a soundtrack .", "`` Just like I would n't outsource one of my restaurants ' wine lists , I would never outsource the music , `` said Danny Meyer , whose Union Square Hospitality Group operates nine venues in New York .", "Both Mr. Nieporent and Mr. Meyer rely on managers to fine-tune the mix .", "`` Getting it right is a function of watching the guests , '' Mr. Meyer said .", "`` In a restaurant setting , music is a little like air-conditioning -- no one 's going to tell you when the air-conditioning is perfect , but when it is , the conversations in the room will be more energetic . ``", "-LRB- Digital Music Xperience developed a high-tech system to manage the Panera Bread chain 's `` aural strategy '' -- the music -- with microphones in the ceilings of the company 's stores that measure noise level in the room and adjust the volume of the music accordingly . -RRB-", "Sometimes the music program is a group effort .", "Gabriel Stulman , who owns the Little Owl with the chef Joey Campanaro , said the eclectic mix of music played there is `` mine , Joey 's , the servers and all . ``", "Sometimes it 's a solo show : regulars at Babbo are accustomed to having plates of black spaghetti with rock shrimp served with an audible side of whatever Mario Batali likes , as loud as he likes , whenever he likes .", "-LRB- These days his iPod is more likely to be dishing up the plaintive croon of Michael Stipe than the mix of Led Zeppelin and Jimi Hendrix that dominated the restaurant 's dining room in its early years . -RRB-", "At the Great Jones Cafe in NoHo , the manager , Bill Judkins , stocks the restaurant 's jukebox with seven-inch singles from his own personal and obsessively manicured collection .", "On Dec . 26 , he replaced part of the jukebox 's holiday lineup with a selection of James Brown and James Brown side project records , to honor the Godfather of Soul .", "But not all restaurant folks have 45s of `` Hot Pants '' at their fingertips .", "Some even choose the most radical soundtrack of all .", "Grant Achatz , the chef of Alinea in Chicago , says he has `` great appreciation for but limited knowledge '' of music , and that that is one reason he has chosen not to play any in his dining rooms .", "Beyond that , he said , he has not yet found a way to match the acoustics to the food in a restaurant full of people , each of them at different points in his lengthy tasting menus .", "He did experiment with speakers above each table that would deploy `` audio spotlight '' technology , which can project a very precise beam of sound into a narrowly delineated area .", "But the noises bounced off the hardwood tabletops and reverberated throughout the space .", "Hearing a crunch during a creamy course , he said , just did n't seem right .", "The system , he said , is `` back on the drawing board '' for now .", "And until that changes , his customers will just have to eat Mr. Achatz 's `` white truffle explosion '' to the sounds of silence .", "Correction : January 10 , 2007 , Wednesday An article last week about music in restaurants misstated the relationship between Muzak and ` Wichcraft restaurants .", "Muzak provided stereo equipment for a ` Wichcraft restaurant in San Francisco .", "It does not provide the chain with recorded music .", "Correction : January 15 , 2007 , Monday An article in Thursday Styles on Dec . 7 about holiday music in retail stores and an article in Dining on Jan . 3 about the debate over playing music in restaurants misstated the name of a music consulting company that sells audio systems and programming to such businesses .", "It is DMX -- not Digital Music Express or Digital Music Xperience ."], "summary": ["Chefs and restaurant owners sometimes turn to consultants or even staff members for suggestions on type of music to play , but often hear complaints from customers .", "Muzak offers themed mixes or can generate custom soundtracks tailored to specific restaurant .", "Other companies that offer music to restaurants noted .", "Photos ."], "publication": "nyt50", "label": [4, 54, 45, 12], "tag": ["Arts", "Dining and Wine"]} -{"id": "1816069", "text": ["Capitalizing on Manhattan 's robust real estate prices , the Museum of Modern Art is selling its last vacant parcel of land in Midtown for $ 125 million to Hines , an international real estate developer based in Houston , the museum 's director said yesterday .", "As part of the deal Hines is to construct a mixed-use building on West 54th Street that will connect to the museum 's second - , fourth - and fifth-floor galleries , said the director , Glenn D . Lowry .", "He said the project would afford about 50,000 square feet of additional exhibition space for the Modern 's painting and sculpture collections .", "A Hines spokesman said it was too early to say what the building 's other uses would be .", "The property is one of several the Modern acquired during the last decade in mapping out an ambitious expansion .", "A glass-and-steel addition designed by the architect Yoshio Taniguchi was completed in November 2004 .", "Hines also plans to provide about 10,000 square feet in the new building 's basement for museum storage .", "After construction expenses for the new galleries are covered , the Modern estimates that some $ 65 million will go to its $ 650 million endowment .", "`` This is a Christmas present , '' Mr. Lowry said .", "`` It 's a tremendous boon to enhancing what is already an extraordinary collection . ``", "The 10 percent addition to the endowment will go toward caring for the collections and acquisitions .", "No firm timetable for construction has been set , he added , but he estimated that completion of the new building was at least five years away .", "In 1996 the museum bought the Dorset Hotel , a 19-story building from the 1920s next door on West 54th Street , along with two adjacent brownstones in a $ 50 million transaction .", "Much of that land was used for Mr. Taniguchi 's addition .", "That expansion , including an increase in MoMA 's endowment to cover operating expenses , cost $ 858 million in total .", "The museum also quietly purchased other parcels on West 54th Street , including what had been the City Athletic Club , a brownstone and a sliver building next door .", "Over the years , Mr. Lowry said , the museum has been inundated with offers from developers interested in buying the land , but did not seriously consider selling until recently .", "`` But as the market went into overdrive it seemed like the right move to make , '' he said .", "The Modern put out the word that it was open to offers and the response was overwhelming .", "Hines was the highest bidder , Mr. Lowry said .", "`` We ultimately settled on Hines because of its financial offer and because it has a good reputation for working with architects , '' Mr. Lowry said .", "He added that no architect had been selected to design the new building or the Modern 's additional galleries .", "When Mr. Tanaguchi conceived his design he took into consideration a possible future expansion to the west , Mr. Lowry said , making it structurally easy to break through to what will be the new building and extend each of the three gallery floors by about 17,000 square feet .", "Jerry I . Speyer , a Modern trustee and real estate developer who is president and chief executive of Tishman Speyer , helped negotiate the sale .", "-LRB- He was instrumental in the purchase of the Dorset Hotel , too . -RRB-", "`` The museum is not in the real estate business , but in the business of showing art , collecting art and educating people about art , '' Mr. Speyer said .", "`` Because of the figuration of the land , there was a limit to the amount of space we could use for galleries . ''", "He said that the entire board agreed that now was the time to act .", "`` Everyone felt great about the decision , '' he said of the sale .", "`` There were no issues in anyone 's mind . ``", "The parcel as a whole consists of about 200,000 square feet of buildable space , Mr. Lowry said .", "The addition also opens the way for the museum to address wide criticism of the exhibition spaces in the Taniguchi building .", "When the Modern reopened in 2004 many faulted its curators for showing fewer artworks in its expanded galleries than it had before .", "`` The goal has always been to display the collection better , '' Mr. Lowry said .", "Responding to the criticism , he said the display of art in the museum 's previous incarnation was `` overly dense , '' which people felt was `` too much like a textbook . ''", "Trying to anticipate the museum 's needs for contemporary art display is not easy .", "Mr. Lowry said the new galleries would be designed to be flexible .", "`` We envision them to include space that will deal with the unanticipated changes of the future , '' he said .", "And whereas MoMA had to close its doors on West 54th Street during the 2002-04 building project , operating a temporary museum in Queens , Mr. Lowry said that would not be necessary this time .", "`` The construction of these galleries will not entail closing the museum again , '' he said ."], "summary": ["Museum of Modern Art will sell its last vacant parcel of land in Midtown for $ 125 million to real estate developer Hines .", "As part of deal , Hines will construct mixed use building on West 54th Street that will connect to museum 's galleries and afford about 50,000 square feet of addition exhibition space .", "Photos ."], "publication": "nyt50", "label": [0, 1, 2], "tag": ["Arts"]} -{"id": "1816070", "text": ["`` I did n't go there to make a point , `` said Laura Poitras , a documentary filmmaker , about traveling in Iraq to make '' My Country , My Country , `` one of four documentaries about the war contending for Oscar nominations this year .", "`` I do n't think I would risk my life to make a point , `` she added , seated in her comfortable TriBeCa office early last month .", "`` But I did feel it was important to understand this war -- and to document it -- and I did n't think that the mass media was going to do it . ``", "Ms. Poitras , 42 , used her own camera and recorded sound herself as she followed an Iraqi physician for eight months .", "An outspoken Sunni critic of the American occupation , he was seeking a seat on the Baghdad Provincial Council during the national elections in January 2005 , but did not win .", "`` My Country , My Country '' may not capture the best-documentary Oscar , or even be selected as one of the five nominees , to be announced by the Academy of Motion Picture Arts and Sciences on Jan . 23 .", "-LRB- The awards ceremony is on Feb . 25 . -RRB-", "But its presence on the highly competitive feature-length documentary shortlist -- 14 other films are on that list -- highlights a shift toward gritty , guerrilla filmmaking , a willingness to tackle controversial subjects , no matter the obstacles .", "Issue-oriented documentaries dominate the shortlist , chosen by the 138 members of the documentary branch of the academy .", "Eighty-one films met the eligibility requirements .", "Of those , the members who voted selected 15 and will further narrow the field to the 5 nominees .", "`` This is the year of the angry documentary , of the ' Take back America ' documentary , `` Sheila Nevins , president of HBO Documentary Films , said in a telephone interview .", "`` The theatrical documentary , '' she added , `` has replaced the television documentary in terms of talking back to the administration .", "That 's one of the only places where one can do it . ``", "But one pioneering filmmaker , Albert Maysles , did not seem enthusiastic about the trend .", "`` I am a strong advocate of distancing oneself from a point of view , '' he said recently .", "`` What is good for the documentary world in ' Fahrenheit 9/11 , ' '' -- Michael Moore 's 2004 film -- `` is that Michael 's heart was in the right place `` for viewers who agreed with him , he said .", "`` But he damages his cause because he is out to get people .", "He 's using people in a nonloving fashion to serve the purpose of his argument .", "If what you think is correct , what do you have to fear in telling the full story .", "`` Stanley Nelson , the director of another shortlisted film , '' Jonestown : The Life and Death of Peoples Temple , `` said that while Mr. Moore was '' over the top , `` his work occupied a significant position within the genre .", "Speaking at an Upper West Side coffee shop , Mr. Nelson said , `` What 's fascinating about documentary today is the different ways to approach it . ``", "Referring to his own film about Jim Jones , who led the mass suicide in which more than 900 people died in Guyana in 1978 , Mr. Nelson said : `` It was essential for us not to say that this guy was only evil .", "Just by being somewhat objective , we were being revolutionary . ``", "Mr. Nelson 's comment reflects a climate in which the pursuit of objectivity in documentaries is hardly the norm , as it had been during the 1950s and ` 60s .", "In that period , American filmmakers like Mr. Maysles advocated `` direct cinema , '' where the camera was thought of as a fly on the wall , capturing but not commenting on life .", "Still , some of the shortlisted documentaries adopt this approach more than others in treating subjects like these : Global warming : Davis Guggenheim 's box office hit , `` An Inconvenient Truth , '' with former Vice President Al Gore .", "Religion : Rachel Grady and Heidi Ewing 's `` Jesus Camp , '' about born-again Christian children at an evangelical summer camp in North Dakota .", "Amy Berg 's `` Deliver Us From Evil , '' about Oliver O'Grady , a former priest and convicted pedophile .", "And Mr. Nelson 's film about Jim Jones .", "Race : Ricki Stern and Annie Sundberg 's `` Trials of Darryl Hunt , '' about a wrongly convicted African-American man .", "Free speech : Barbara Kopple and Cecilia Peck 's `` Shut Up & Sing , '' on the fallout after Natalie Maines , of the Dixie Chicks , publicly criticized President Bush on the eve of the 2003 invasion of Iraq .", "The political campaign process : Frank Popper 's `` Can Mr. Smith Get to Washington Anymore . , '' which follows the 2004 grass-roots campaign of Jeff Smith , a Missouri Democrat , for Congress .", "The two-party political system : Henriette Mantel and Steve Skrovan 's `` Unreasonable Man , '' a profile of Ralph Nader .", "In addition to Ms. Poitras 's film , the three other shortlisted documentaries on the Iraq war are James Longley 's `` Iraq in Fragments , '' Deborah Scranton 's `` War Tapes '' and Patricia Foulkrod 's `` Ground Truth . ''", "Ms. Kopple , a two-time Oscar-winning documentary filmmaker who once worked for Mr. Maysles , said more people were seeing documentaries because they wanted to watch passionate stories about unforgettable characters .", "`` Audiences are smart enough to decide for themselves if they agree with the point of view onscreen , '' she said .", "`` I 'm not sure that ` distance ' is a positive thing in nonfiction filmmaking .", "I think there 's a time and place for distance .", "In television journalism , for example . ``", "She agreed with Mr. Maysles about letting a story unfold naturally .", "`` The most important factor , in my opinion , '' she said , `` is not do we grow too close to our subjects , it 's are we willing to go on a journey with them that may not end up as we first envisioned it .", "`` One director who took such a journey was Mr. Guggenheim with '' An Inconvenient Truth . ``", "Speaking from Los Angeles , he recalled the beginning of his own transformation after watching a presentation by Mr. Gore on climate change , which became the centerpiece of the film .", "`` All movies are personal , '' Mr. Guggenheim said .", "`` When I make a movie , I do n't have activism in mind .", "I have an experience in mind .", "Before I saw Al 's slide show , I was not an environmentalist .", "But when I saw it , it shook me to the core . ``", "In a telephone conversation in New York with Ms. Ewing and Ms. Grady , the directors of `` Jesus Camp , '' Ms. Grady said their film was as `` balanced as humanly possible for us . ''", "`` It 's unattainable to have no point of view at all , `` she said .", "`` We 're human , and we did the best we could . ``", "With its concentration on national politics , the academy passed over a clutch of well-made films that in other years might have fared better : for example , Christopher Quinn 's `` God Grew Tired of Us : The Story of the Lost Boys of Sudan '' .", "Doug Block 's `` 51 Birch Street , '' an exploration into the lives of his parents .", "And Ward Serrill 's `` Heart of the Game , '' about girls ' basketball .", "Similarly , the three remaining shortlisted movies , all set in foreign countries other than Iraq , may face an uphill battle .", "They are Lucy Walker 's `` Blindsight , '' about six blind Tibetan children .", "Yael Klopmann 's `` Storm of Emotions , '' about the Israeli pullout from the Gaza Strip .", "And Kim Longinotto and Florence Ayisi 's `` Sisters in Law , '' a profile of two Cameroon women -- a judge and a prosecutor -- fighting for women 's rights .", "However the academy members vote , Ms. Poitras said she already considered `` My Country , My Country '' successful .", "She cited a scene she had shot at the Abu Ghraib detention center : a 9-year-old Iraqi boy is being held for some unspecified reason by American Army officers who call him a dangerous juvenile .", "Moments such as these , she said , `` will bring a sense of questioning and shame about some of the things we are doing in Iraq . ''", "So even a filmmaker like Ms. Poitras , who by her own account employed a subtle and patient approach , may have made a point after all .", "In the current climate for documentaries , she certainly is not alone ."], "summary": ["Documentary films on shortlist for Oscar nominations are all issue based .", "Films My Country , My Country , An Inconvenient Truth , Jesus Camp , Deliver Us From Evil , Trials of Darryl Hunt , Shut Up & Sing , Can Mr Smith Get to Washington Anymore . , An Unreasonable Man , Iraq in Fragments , War Tapes and Ground Truth noted .", "Photos ."], "publication": "nyt50", "label": [34, 32, 28, 30], "tag": ["Movies", "Arts"]} -{"id": "1816074", "text": ["In a moment of presidential empathy , former President George Bush recalled a skill he had learned from Gerald R . Ford : how to handle being ridiculed on `` Saturday Night Live . ''", "`` I remember that lesson well , since being able to laugh at yourself is essential in public life , '' Mr. Bush said in his eulogy for Mr. Ford on Tuesday .", "`` I 'd tell you more about that , but as Dana Carvey would say : ` Not gon na do it .", "Would n't be prudent . '", "`` As the 82-year-old former president imitated his own impersonator , there were only three people in the audience who knew exactly how it felt to be ridiculed , as a sitting president , on late-night television .", "Former President Bill Clinton responded with a hearty laugh from his seat in the Washington National Cathedral , while former President Jimmy Carter , in a neighboring pew , looked on with a smile .", "Seated nearby was President Bush , who has been relentlessly skewered by TV comics as well .", "With Mr. Ford 's death last week , the group of living former presidents has shrunk to three , down from five in the early 1990s .", "Since 1994 , Richard M . Nixon , Ronald Reagan and , now , Mr. Ford have left the stage , while Mr. Clinton has joined the ranks of former chief executives .", "The dynamic among the former presidents is perpetually evolving , as people age , rivalries fade and former political opponents discover they share more similarities than differences .", "The current club has at its core two presidents in different parties -- Mr. Clinton , 60 , and the elder Mr. Bush -- who have become companions in traveling and fund-raising for causes around the world .", "The two at times appear more friendly than Mr. Bush is with his son , or than Mr. Clinton is with Mr. Carter , 82 , a fellow Democrat .", "And on Tuesday it was Mr. Carter , not either of the Bushes , who accompanied Mr. Ford 's coffin back to Grand Rapids , Mich . , for burial .", "Though Mr. Ford and Mr. Carter squared off in the 1976 election , they became friends after both were out of office .", "On a flight home from Cairo in 1981 , after the funeral of the Egyptian leader Anwar el-Sadat , they found common ground discussing their presidential libraries .", "Similar kinship has grown between Mr. Clinton and the first President Bush , in contrast to the rocky relationship between Mr. Clinton and Mr. Carter .", "Though the two Southern Democrats are from similarly humble backgrounds , they have been at odds repeatedly over the years .", "In 1993 , Mr. Carter said he was `` very disappointed '' in the Clintons for sending their daughter , Chelsea , to private school rather than public school , as the Carters had done with their daughter , Amy .", "After the Monica Lewinsky scandal , Mr. Carter said he had been `` deeply embarrassed by what occurred . ''", "In more recent years , the Clinton-Carter relationship has had a tinge of rivalry , as both men have sought to become global statesmen .", "The funeral also provided a stage for those thinking about the politics of tomorrow , as well as those of yesterday .", "Along with the three former presidents sat several of those who may aspire to the job , including Senator Hillary Rodham Clinton of New York , former Mayor Rudolph W . Giuliani of New York and Representative Dennis J . Kucinich of Ohio .", "And it was a reminder that while the nation elects only the president , the White House becomes home to a whole family whose members have to cope with the spotlight .", "At the cathedral on Tuesday , Rosalynn Carter was seen talking intently with Nancy Reagan .", "One row behind them , Chelsea Clinton spoke animatedly with a fellow Stanford University alumna , Secretary of State Condoleezza Rice .", "Tom Brokaw , the former NBC News anchor , said in his eulogy that when Mr. Ford brought his family to 1600 Pennsylvania Avenue , `` he brought the humanity that comes with a family that seemed to be living right next door . ''", "For neighbors of the Fords in Alexandria , Va . , from the 1950s until they moved into the White House in 1974 , they were in fact the family next door , with four independent-minded children whose public indiscretions Mr. Ford sometimes had to confront .", "His son Jack admitted in a newspaper interview in 1975 that he smoked marijuana and lived a rowdy bachelor 's life .", "His daughter , Susan Ford Bales , was a headstrong teenager when her father became president and was married for a time to a Secret Service agent who had been assigned to guard the family .", "Mr. Ford and Ms. Bales , now middle-aged , walked hand in hand down the center aisle of the cathedral before the service , and each read a passage from the Bible .", "Ms. Bales 's reading came from James 1:19 - 25 , chosen by the family to help illuminate the former president 's humble approach to the bitter times in which he served .", "`` Therefore rid yourselves of all sordidness and rank growth of wickedness , and welcome with meekness the implanted word that has the power to save your souls , '' Ms. Bales read .", "`` But be doers of the word , and not merely hearers who deceive themselves . ''", "THE 38TH PRESIDENT Correction : January 5 , 2007 , Friday An article on Wednesday about the funeral of former President Gerald R . Ford drew an erroneous comparison between Chelsea Clinton and Secretary of State Condoleezza Rice , who sat next to each other .", "Ms. Rice received degrees from the University of Denver and Notre Dame .", "She is not an alumna of Stanford University , where Ms. Clinton received a bachelor 's degree .", "-LRB- Ms.", "Rice is a former Stanford provost . -RRB- ."], "summary": ["Former Pres Gerald R Ford 's death brings group of former living presidents to three : George Bush , Jimmy Carter and Bill Clinton .", "Dynamic among former presidents continually evolves , as people age , rivalries fade and former political opponents discover they share more similarities than differences .", "Photo ."], "publication": "nyt50", "label": [9, 7], "tag": ["U.S.", "Washington"]} -{"id": "1816075", "text": ["In a soaring tribute to a modest man , Gerald R . Ford was remembered on Tuesday as bringing the ordinary virtues of decency , integrity and humility to mend a broken government after the pain of war and scandal .", "`` Amid all the turmoil , Gerald Ford was a rock of stability , '' President Bush told the gathering of generations of Washington 's powerful at Washington National Cathedral .", "`` And when he put his hand on his family Bible to take the presidential oath of office , he brought grace to a moment of great doubt . ''", "The cathedral 's grand setting and the pomp of a state funeral provided a counterpoint for the unassuming character praised by the eulogists .", "President Bush 's father called Mr. Ford `` a Norman Rockwell painting come to life '' .", "Tom Brokaw , the former television anchor , described `` Citizen Ford '' as a `` champion of Main Street values '' .", "And Henry A . Kissinger said the man he served as secretary of state `` had the virtues of small-town America . ''", "When the cathedral 's limestone arches echoed , it was with the drums and brass of Aaron Copland 's `` Fanfare for the Common Man , '' and the ushers directing the capacity crowd of 3,700 to their seats were uniformed Boy Scouts , a tribute to Mr. Ford 's youthful achievement of the rank of Eagle Scout .", "Among the hymns was `` Eternal Father , Strong to Save , '' known as the Navy Hymn , a particular favorite of Mr. Ford , who served in the Pacific during World War II .", "President Bush , overseeing a deeply unpopular war in Iraq and perhaps pondering his own legacy , lauded Mr. Ford 's `` firm resolve '' in sending the Marines to rescue the crew of the American merchant ship Mayag\u00fcez when it was seized by Cambodia .", "He suggested that some acts widely condemned during Mr. Ford 's administration in the 1970s had come to look wiser in historical perspective , including his pardon for his immediate predecessor , Richard M . Nixon .", "In addition , Mr. Bush noted that Mr. Ford was criticized for signing the Helsinki Accords , the 1975 agreement that ratified borders in Soviet-dominated Eastern Europe while also setting new standards for human rights .", "`` History has shown that document helped bring down the Soviet Union as courageous men and women behind the Iron Curtain used it to demand their God-given liberties , '' Mr. Bush said .", "Mr. Ford 's coffin arrived at the cathedral by motorcade from the Capitol , a final journey through the city where he served as 13-term congressman , vice president and finally president , the only person to hold the nation 's top two offices without being elected to either .", "After the 90-minute Episcopal funeral service , Mr. Ford 's body was flown from Andrews Air Force Base outside Washington to his hometown , Grand Rapids , Mich . , for a burial service on Wednesday in a plot beside the museum that bears his name .", "In Washington , the Gothic cathedral where Mr. Ford helped dedicate the nave in 1976 , became for the morning a crossroads of the capital 's past and present , with Supreme Court justices and members of Congress in the south transept facing scores of foreign ambassadors and former foreign leaders in the north transept .", "Across an aisle from the diplomats sat Mr. Ford 's honorary pallbearers , including in the front row Mr. Kissinger .", "Donald H . Rumsfeld , who served as defense secretary to both Mr. Ford and the current President Bush .", "Alan Greenspan , the former Federal Reserve chief who was Mr. Ford 's top economic adviser .", "James A . Baker III , who ran Mr. Ford 's unsuccessful 1976 campaign for president .", "And Brent Scowcroft , Mr. Ford 's national security adviser .", "Facing the altar , where Mr. Ford 's coffin , draped by a flag , sat , were Mr. Ford 's widow , Betty , who was escorted in and out of the cathedral by President Bush , and the Ford children , Steve , Jack , Mike and Susan .", "Across the nave from the Ford family sat President Bush and Laura Bush , and Vice President Dick Cheney , who served Mr. Ford as chief of staff , with his wife , Lynne .", "Several current cabinet members and three former presidents -- the elder Mr. Bush with his wife , Barbara .", "Jimmy Carter and his wife , Rosalynn .", "And Bill Clinton and his wife , Senator Hillary Rodham Clinton , and their daughter , Chelsea .", "With them was Nancy Reagan , the former first lady .", "Like much of the outpouring of affection for Mr. Ford since he died at his home in Rancho Mirage , Calif . , on Dec . 26 at the age of 93 , the service focused on what President Bush called the `` calm and healing '' the former president brought to `` one of the most divisive moments in our nation 's history . ``", "Mr. Ford , the House minority leader , succeeded first Vice President Spiro T . Agnew and then President Nixon after both men were forced from office by scandal .", "`` Gerald Ford brought to the political arena no demons , no hidden agenda , no hit list or acts of vengeance , '' said Mr. Brokaw , who explained that Mr. Ford had asked him to address the funeral as a representative of the press corps .", "`` He knew who he was , and he did n't require consultants or gurus to change him . ``", "Mr. Kissinger in particular emphasized the substantive achievements of Mr. Ford in foreign policy , saying the `` deserved commentary '' on Mr. Ford 's character `` has sometimes obscured how sweeping and lasting were his achievements . ''", "In remarks perhaps intended to reflect on his own record as well as Mr. Ford 's , he credited the former president with keeping ethnic conflicts in Cyprus and Lebanon from spiraling out of control , producing the first peace agreement between Israel and Egypt and presiding over `` the final agony of Indochina with dignity and wisdom . ''", "Historians , Mr. Kissinger added , will find `` that the cold war could not have been won had not Gerald Ford emerged at a tragic period to restore equilibrium to America and confidence in its international role . ''", "A few hours after the service , the plane carrying Mr. Ford 's body circled over the University of Michigan football stadium , where he had been a standout center and linebacker , then landed at the airport named for him in Grand Rapids .", "The university 's marching band , which arrived on a red-eye flight from California after the Rose Bowl game on Monday , solemnly played its fight song , `` The Victors . ''", "About 200 friends and local dignitaries invited by Mr. Ford 's family attended the brief ceremony before the 13-mile motorcade to his presidential museum in downtown Grand Rapids , passing thousands of residents who lined the streets , some holding signs that said `` Welcome home . ''", "Billboards around the city declared `` Gerald ' Our ' Ford : 1913-2006 . ``", "Despite a fierce , bitter wind blowing off the Grand River , Tim Micho waited with his video camera and 7-year-old daughter , Tessa , for two and a half hours to watch the motorcade pass by .", "`` She 'll probably never get to see something like this again , `` Mr. Micho , 43 , said .", "`` It 's so moving to see this many people out here to support him . ``", "A single bagpiper played `` Amazing Grace '' as Betty Ford and the rest of the family made their way slowly behind the coffin into the museum , a geometric , glassy structure along the water .", "Inside , they held a brief service for family and honored guests , including former President Carter .", "Like many along the streets in Grand Rapids , Gov . Jennifer M . Granholm spoke of Mr. Ford 's deep roots in the region .", "Here , Ms. Granholm said , Mr. Ford had learned from his family `` some good Midwestern values likehard work and sportsmanship and integrity and honesty . ''", "Here , he had played high school football -LRB- with a few men , now frail , in attendance on Tuesday -RRB- , had married and had been elected to Congress .", "`` Welcome home , '' Ms. Granholm said , `` to the people that you reflected so well when you were in Washington . ''", "THE 38TH PRESIDENT ."], "summary": ["Former Pres Gerald R Ford is eulogized as modest man bringing virtues of decency , integrity and humility to mend broken government after pain of war and scandal .", "Washington National Cathedral 's grand setting and pomp of state funeral provide counterpoint for unassuming former president .", "Capacity crowd of 3,700 attends .", "Photo ."], "publication": "nyt50", "label": [0, 3], "tag": ["U.S.", "Washington"]} -{"id": "1816077", "text": ["A parolee who has served more than 30 years in prison for two separate homicides and other violent crimes in New York City was arrested yesterday by the Suffolk County police and charged in the shooting deaths of a 52-year-old woman and her 30-year-old son over the weekend .", "The man , Fernando DeCampoamor , 59 , of Mount Sinai , N.Y. , was scheduled to be arraigned today on two counts of second-degree murder in the deaths of the woman , Amelia Garcia , and her son , Ferneliz Cruz , the authorities said .", "They were found shot once in the head in the basement apartment in Coram where they lived with Mr. Cruz 's wife and their two small children .", "According to the police , the suspect told investigators he had lent money to Ms. Garcia , whom he described as his companion of six months , and became enraged Saturday evening over what he said was a delay in repayment .", "Mr. Cruz and his family had spent that early evening at a nearby self-service laundry .", "He had returned home first with a few loads of clean clothes .", "His wife , Yessenia Hernandez , and their children , an 8-year-old and a 2-month-old , returned to the apartment 20 minutes later to find Ms. Garcia dead and Mr. Cruz severely wounded .", "He died on Sunday night , about 24 hours later , at Stony Brook University Medical Center .", "It was unclear whether Mr. Cruz had returned to the apartment , on Marie Lane , before his mother was shot or after , the police said .", "Detective Lt . Jack Fitzpatrick of the Suffolk police said that according to state records , Mr. DeCampoamor was convicted of a murder in Brooklyn in 1961 and paroled in 1967 .", "In 1968 , he was convicted in connection with a nonfatal shooting and sentenced to 15 years in prison .", "He was paroled in 1975 .", "In 1985 , he was sentenced to 81/3 to 25 years in prison for manslaughter in connection with a killing in Queens .", "He was paroled in 2003 and had been working recently as a tow truck driver .", "Lieutenant Fitzpatrick said he did not know whether Ms. Garcia had any knowledge of Mr. DeCampoamor 's criminal background .", "Neighbors and friends said Ms. Garcia and Mr. Cruz immigrated from the Dominican Republic about 12 years ago .", "Ms. Garcia helped care for her son 's children while he and his wife worked .", "Neighbors said he worked full time for a bakery and bread distributing company , and might have held other part-time jobs , including one as a waiter .", "His wife worked part time at a local fast-food restaurant .", "A neighbor , Darlene Kennedy , 32 , described the family as close-knit and unfailingly polite .", "She said the 8-year-old , Yerneli , who played with her children , was cheerful and very bright and seemed to love learning new games .", "From the open door of the family 's basement apartment , which was covered in black dust left by police fingerprint technicians on Monday , a chess board and chess pieces could be seen on the kitchen table .", "Blood was pooled on the linoleum ."], "summary": ["Fernando DeCampoamor is charged with murdering Amelia Garcia and her 30-year-old son Ferneliz Cruz in Coram , NY . Police say he lent money to companion Garcia and became enraged over delay in repayment .", "Was on parole for 1985 murder in New York City .", "Previous record noted ."], "publication": "nyt50", "label": [3, 1], "tag": ["New York and Region"]} -{"id": "1816079", "text": ["Indonesian officials reversed themselves on Tuesday and admitted that they did not know what had happened to a passenger jet that disappeared from radar screens in bad weather on Monday shortly after issuing a distress signal .", "On Monday , Indonesian Air Force and Transport Ministry officials told local and foreign news services that wreckage from a 17-year-old Boeing 737-400 operated by Adam Air had been found strewn on jungle-covered mountainside on Sulawesi island .", "They also said 12 of the 102 people on board had survived .", "But later they said reports that the wreckage and survivors had been found were wrong .", "Officials said they had been misled by incorrect information from villagers and local officials in the remote area where the reports had originated .", "`` We apologize for the news that we released earlier , '' said Eddy Suyanto , an air force official , The Associated Press reported .", "`` It was not true . ''", "Indonesian officials said they planned to resume the search for the plane on Wednesday .", "The presumed crash was another loss for the Indonesian aviation industry , which boomed after deregulation in 1999 led to the founding of several low-cost airlines but which has had a long history of accidents .", "It was a particularly bitter blow to Adam Adhitya Suherman , 26 , the president of Adam SkyConnection Airlines .", "In an Indonesian industry known for bankruptcies and safety problems , Mr. Suherman and his family of Chinese traders had aimed to create a reliable and relatively inexpensive airline for a rapidly growing pool of Indonesian travelers .", "And they seemed to be succeeding .", "From a shoestring domestic operation , started in 2003 with three leased Boeing 737s , the airline grew by 2006 into a $ 300 million-a-year business , flying 19 planes to 25 domestic and international destinations and harboring big ambitions for growth .", "The family 's goal for 2006 was to carry six million to eight million passengers .", "It was courted by big investors , including Qantas and Tiger Airways .", "While emphasizing safety , the airline has had its share of troubles .", "On Feb . 11 last year , an Adam Air flight to Makassar from Jakarta lost its way , making an unscheduled landing hundreds of miles off course in eastern Indonesia .", "The pilots blamed a malfunction in the navigation equipment but Mr. Suherman said it had been working properly .", "On May 31 , 2005 , part of the landing gear of a Boeing 737 operated by Adam Air collapsed on landing at the Jakarta airport .", "More Rescued After Ferry Sinking REMBANG , Indonesia , Jan . 2 -LRB- AP -RRB- -- Fishing boats rescued dozens of people from the sea on Tuesday , four days after a ferry sank in a storm off Indonesia , but 400 people remained missing .", "The ferry Senopati Nusantara foundered after being pounded by heavy waves for more than 10 hours as it neared the end of a two-day journey to Java , Indonesia 's main island , from the Indonesian section of Borneo island .", "Officials said the bad weather caused the sinking .", "About 200 survivors have been found , and officials say the search will continue until Sunday .", "Thirteen bodies have been recovered and scores more have been seen floating at sea .", "Susilo , 35 , was picked up by fishermen and taken to a hospital with chest pains and respiratory problems after drifting four days in a life raft .", "`` Six among us died , one by one , '' said Mr. Susilo , who like many Indonesians uses one name .", "Some who died drank seawater , he said ."], "summary": ["Indonesian officials reverse themselves and say they do not know what happened to passenger jet that disappeared from radar screen on Jan 1 after issuing distress signal .", "They say initial reports that wreckage was found on Sulawesi island along with 12 survivors was wrong .", "They say search will resume on Jan 3 ."], "publication": "nyt50", "label": [0, 3], "tag": ["World"]} -{"id": "1816080", "text": ["The Israeli chief of staff , Lt . Gen . Dan Halutz , conceded Tuesday that the military had made serious errors during last summer 's war against Hezbollah in Lebanon but said he would not resign his post .", "General Halutz said Israel had badly damaged Hezbollah in southern Lebanon and killed `` hundreds of terrorists . ''", "But he said Israel was `` not successful in reducing the short-range rocket fire on Israel 's north until the cease-fire , `` which came after 34 days of fighting .", "Critics of General Halutz and of the Israeli government of Prime Minister Ehud Olmert have said the military relied too heavily on air power and delayed too long sending in ground troops in the numbers needed to push back the Hezbollah fighters and supporters who were firing Katyusha rockets into Israel .", "Critics have also said that the military should be led by a ground forces commander -- General Halutz spent his career in the air force -- and that reserves were not called up in time , were badly trained and equipped , and often faced contradictory orders .", "`` We attacked the Katyushas , but unsuccessfully , '' General Halutz said .", "He spoke Tuesday at a Tel Aviv news conference summing up the army 's own investigation of its behavior during the war .", "He said he would stay on `` to correct what can be corrected , '' and said to resign now would be `` running away . ''", "He said Mr. Olmert and Defense Minister Amir Peretz had not asked him to go .", "`` I have not heard my superiors calling on me to resign , '' he said .", "`` If they do , I will respond . ''", "He suggested that discipline had broken down to some degree .", "`` There were cases in which officers did not carry out their assignments , and cases in which officers objected on moral grounds to their orders , '' he said , an apparent reference to resistance against attacking southern Lebanese towns and villages .", "He said that those instances of refusal `` ran counter to the army 's basic values `` and that a senior officer was suspended as a result .", "During the war , as criticism mounted , General Halutz effectively demoted the commander of the northern front , Maj . Gen . Udi Adam , putting the deputy chief of staff , Maj . Gen . Moshe Kaplinski , alongside him .", "General Adam later quit the army .", "Previously , General Halutz has said the army fired some cluster munitions , with the `` bomblets '' placed in artillery shells , into southern Lebanon in contradiction of his orders that they be aimed only at specific targets .", "The United States is investigating whether the Israelis used cluster munitions made and paid for in America in ways that contravene American regulations for the weapons ' use .", "General Halutz implicitly criticized Mr. Olmert for setting as a goal of the war the release of two Israeli soldiers captured by Hezbollah in a cross-border raid on July 12 , an act that set off the fighting .", "The Winograd committee , which is led by a retired judge , Eliyahu Winograd , and was appointed by the government , is still investigating the conflict and its outcome .", "General Halutz said that if the committee called for his resignation , `` of course '' he would comply .", "Defense Minister Amir Peretz has made the same pledge .", "The war ended with a cease-fire on Aug . 14 , after a United Nations Security Council resolution mandated an enlarged and strengthened international peacekeeping force in southern Lebanon and supervision of Lebanon 's seacoast and border with Syria to prevent the rearming of Hezbollah .", "The fighting left more than 1,000 people dead on both sides .", "Israel says more than 500 of the dead were Hezbollah fighters , but Hezbollah disputes that .", "Israel counted 159 fatalities , including 39 civilians who were killed by the more than 4,000 rockets Hezbollah fired into Israel .", "In Gaza on Tuesday , Palestinian security forces were searching for a Peruvian photographer for Agence France-Presse , Jaime Razuri , 50 , a day after his abduction .", "The Palestinian Authority president , Mahmoud Abbas , said he was hopeful .", "`` We 're sure he will soon be released , `` Mr. Abbas told a delegation from the news agency and French and Peruvian diplomats .", "`` In past incidents of this kind , hostages have been freed after one or two days . ''", "In the latest in a string of foreigner abductions , several unmasked gunmen abducted Mr. Razuri in the center of Gaza City on Monday as he was returning from an assignment with an interpreter and a driver .", "Separately , warring Hamas and Fatah factions returned 14 fighters kidnapped on Monday after mediation from representatives of Islamic Jihad , Palestinian officials said .", "The violence resumed Monday when Hamas gunmen shot at a brother of a senior Fatah militant in the northern Gaza Strip , violating an earlier deal for a general truce ."], "summary": ["Israeli chief of staff , Lt Gen Dan Halutz , says that military made serious errors during 2006 war against Hezbollah in Lebanon but says he will not resign his post .", "Halutz suggests one problem was discipline and cites cases in which officers refused assignments and objected on moral grounds .", "Critics of Halutz say military relied too heavily on air power and delayed too long sending in sufficient ground troops .", "Photo ."], "publication": "nyt50", "label": [0, 3], "tag": ["World"]} -{"id": "1816081", "text": ["Prime Minister Meles Zenawi of Ethiopia said Tuesday that his country , one of the poorest in the world , could not afford to keep troops in Somalia much longer and that it was ill equipped to play the role of peacekeeper there .", "Two hours after he spoke , two Ethiopian soldiers were gunned down in an ambush in southern Somalia in one of the first strikes of an anticipated anti-Ethiopian guerrilla campaign .", "According to residents in Jilib , about 250 miles southwest of Mogadishu , Somalia 's capital , a fighter for the Islamist forces , who were routed last week by Ethiopian-led troops , had shot two Ethiopian soldiers while they were crossing a bridge .", "Witnesses said the fighter then dashed into town and was quickly surrounded by Ethiopian troops , who killed him .", "`` It was a suicide mission , '' said Mohammed Subiye , a farmer in Jilib .", "The Islamist forces , which in the span of one week went from ruling much of Somalia to fleeing into the bush , have vowed to fight a guerrilla insurgency against the Ethiopians , whom they consider infidel invaders .", "But Mr. Meles said he did not plan to have his troops to remain in Somalia for much longer , possibly only a few more weeks .", "The troops were dispatched to neutralize the rising regional threat posed by the Islamists , he said , and now international peacekeepers are needed to bring order to a country that has been synonymous with anarchy for 15 years .", "`` We do n't have the money to take this burden individually , `` Mr. Meles said during a speech to Ethiopia 's Parliament .", "Diplomats in the region are hurrying to cobble together an African peacekeeping solution , but despite murmurs of commitment from several countries , including Uganda , South Africa and Nigeria , a force has yet to materialize .", "Somalia is far from stable , with many heavy weapons still in the hands of warlords , and the country 's turmoil is likely to dissuade many nations from volunteering troops .", "On Tuesday , Ali Mohammed Gedi , Somalia 's transitional prime minister , reiterated his plea for the nation 's many gunmen to turn in their weapons .", "But few seemed to be listening .", "The collection points across Mogadishu remained empty , and many young men defiantly vowed to keep their guns .", "Meanwhile , the remnants of the once-fierce Islamist army continued to flee south from Kismayo , the port city 100 miles from the Kenyan border that had been a final stronghold until the Islamist military definitively collapsed there on Monday .", "Kenyan authorities said 10 fighters were apprehended on Monday as they tried to slip through the border disguised as refugees .", "Eight had Eritrean passports while two had Canadian passports , said Alfred Mutua , a spokesman for the Kenyan government .", "All of them were carrying briefcases packed with cash .", "`` They definitely did n't look like refugees , `` Mr. Mutua said .", "Mr. Mutua said that the suspects remained in Kenyan custody and that they would probably be returned to Somalia to face charges under the transitional government , though it has not yet set up a justice system .", "The Islamists tried to improve their military prospects by calling for a global jihad against Ethiopia , a country with a long Christian history .", "But in the end , American officials said , only a few hundred foreign fighters answered the call , the bulk of them from Eritrea , Ethiopia 's archenemy .", "Still , the Islamists were widely believed to have been sheltering several wanted terrorists , and American officials said they were hoping to use the swift collapse of the Islamist forces as an opportunity to capture men they have been chasing for years .", "Ships from the Fifth Fleet of the United States Navy , based in Bahrain , have increased patrols off Somalia 's coast to prevent any suspects from escaping .", "`` Yes , we have a presence out there , '' said Lt . Denise Garcia , a spokeswoman for the Fifth Fleet .", "So far , though , no suspects have been apprehended .", "Somalia continues to be a work in progress .", "The country 's transitional president , Abdullahi Yusuf Ahmed , has yet to set foot in the capital , and only a select few officials of the transitional government have returned to it .", "Many of them seem to be pulling in wildly different directions .", "On Tuesday , Hussein Mohammed Aideed , the interior minister and son of a notorious warlord , announced that he would like to erase the 1,000-mile long border between Somalia and Ethiopia .", "`` We should unite , just like the Europeans , '' Mr. Aideed said at a news conference .", "`` One money .", "One passport .", "One security . ``", "Many Somalis consider Ethiopia a historic enemy and were appalled by the suggestion .", "`` All I can say is that the interior minister is entitled to his opinion , '' said Abdirahman Dinari , the spokesman for the transitional government .", "`` But he does not speak on behalf of the government . '' ."], "summary": ["Prime Min Meles Zenawi of Ethiopia says that his country can not afford to keep troops in Somalia much longer and that it is ill equipped to keep peace there .", "Several countries in region have spoken of committing troops , but force has yet to materialize .", "Somalia 's instability may prevent many nations from volunteering troops .", "Islamist forces have vowed to fight guerrilla insurgency against Ethiopians .", "Photo ."], "publication": "nyt50", "label": [0, 5], "tag": ["World"]} -{"id": "1816083", "text": ["Ali Saleem may have devised the perfect , if improbable , cover for breaking taboos in conservative , Muslim Pakistan .", "In a country where publicly talking about sex is strictly off limits , Mr. Saleem has managed not only to bring up the subject on his prime-time television talk show -- but to do so without stirring a backlash from fundamentalist Islamic clerics .", "And he has done so as a woman .", "When Mr. Saleem takes to the airwaves , he is Begum Nawazish Ali , a coquettish widow who interviews Pakistan 's glitterati and some of its top politicians .", "A real woman could not possibly do what Mr. Saleem does .", "In the unlikely event a station would broadcast such a show , the hostess would be shunned .", "And taking on the guise of a married woman -- whose virtue is crucial to her whole family -- would be equally impossible .", "But apparently a cross-dressing man pretending to be a widow is another matter entirely .", "It is something of a mystery why a man who openly acknowledges he is bisexual is a sensation here .", "Traditional Islamic teaching rejects bisexuals and gays , and gay Pakistanis have few outlets for a social life .", "The gay party scenes in Lahore and Karachi are deep underground .", "Mr. Saleem has his own theory for his popularity : he thinks Pakistan has always been more open than outsiders believed .", "It is true that Pakistan is , in a sense , two countries .", "There is urban , and urbane , Pakistan , where Western mores are more accepted , although nudity would never be seen on television or scantily clad women on billboards .", "And then there is rural Pakistan , where Islam is generally practiced with more fervor .", "It is also true that the Pakistani president , Gen . Pervez Musharraf , is relatively tolerant about what the media can show and cover , including politics .", "Although General Musharraf came to power in a bloodless coup by the military in 1999 , he has been more open to political criticism in the press than some of his democratic predecessors .", "Mr. Saleem , 28 , is thrilled with his success for reasons that are both political -LRB- he is proud to be breaking ground in bringing up tough subjects -RRB- and profoundly personal .", "`` My biggest high is to see myself gorgeous in the mirror '' he said recently while reclining in a makeup-room chair .", "As a beautician outlined his eyes , adding glitter and eye shadow , he said , `` Maybe , yes , I am a diva . ''", "It is hard to judge how successful Mr. Saleem 's show is -- there is no form of Nielsen ratings here .", "And there are clearly people who find the show revolting .", "But by many measures , it is a success .", "Television critics have been generally supportive , and the show , which has been on a year and a half , has a prime-time slot despite its name , `` Late Night Show With Begum Nawazish Ali . ''", "Mr. Saleem said it was named for its racy content , usually shown late , but he said the network scheduled it earlier hoping for a hit that would bring in more advertising revenue .", "Urbanites , meanwhile , seem not to be able to get enough of the once-a-week show , which is rerun twice each week .", "They have showered praise on Mr. Saleem 's portrayal of a middle-aged widow who , in glamorous saris and glittery diamonds , invites to her drawing room politicians , movie stars and rights advocates from Pakistan and India .", "With fluttering eyelids and glossy lips , Begum Nawazish Ali -LRB- Begum means Lady or Mrs. in Urdu -RRB- flirts with male guests using suggestive banter and sexual innuendo .", "With female guests , she is something of a tease , challenging them about who looks better .", "Questions are pointed and piercing .", "Politics , democracy and saucy gossip are enmeshed in her conversation .", "Mr. Saleem sees the show 's acceptance and commercial success as a testimony to the tolerance and moderation of Pakistan , a country often seen by the outside world as teetering on the edges of militancy and extremism .", "Colorful and witty , Mr. Saleem is open about his own sexuality and sprinkles his conversation with gender-bending phrases .", "`` My life fluctuates between two extremes , '' he says .", "`` I always say this : I am a man and I am a woman .", "It is two gender extremes , and I am constantly trying to balance it . ``", "He is unabashed at the criticism that his show often borders on raunchiness .", "`` Sitting senators have sent requests to be on the show , '' he says .", "Mr. Saleem has also been willing to take on tough political subjects .", "He is openly critical of the army 's role in ruling Pakistan , for instance .", "His show is not the only one pushing the envelope on that and other touchy subjects .", "In another network television program , `` Aalim Online , '' religious scholars from Shiite and Sunni sects sat side by side and responded to viewers ' queries on different issues from their respective viewpoints .", "Television talk shows and news programs have also openly criticized the policies of previous governments on their support for the Taliban and on their policies in Kashmir , which both India and Pakistan claim .", "President Musharraf 's policies and the role of the powerful Inter-Services Intelligence , or ISI , have come under fire on talk shows and analysis programs , something unimaginable some years ago .", "That is not to say that anything goes .", "The restrictions on print media are generally tougher than for broadcast journalists , and some subjects are considered clearly off limits .", "Owais Aslam Ali , secretary general of Pakistan Press Foundation , an independent media research center in Karachi , said that `` on things of consequence , restrictions remain . ''", "He said that included reporting on the tribal areas bordering Afghanistan , where the Taliban and Al Qaeda are taking refuge .", "Mr. Ali said there also were unstated restrictions on reporting about Baluchistan , the southwestern province where a low-level civil insurgency has long simmered .", "`` This is a big black hole as far as media is concerned , '' he said .", "`` Parameters have been set .", "You cross those parameters at your own peril . ``", "Mr. Saleem , who in the guise of Begum Nawazish Ali often gets away with questions to politicians that print journalists might be wary of , said his show would not have been a possibility earlier .", "`` I owe Begum Nawazish Ali 's existence , in a certain way , to General Musharraf , `` he said .", "But he appears to know his own limits .", "He shrugged when asked if he should not invite the general himself on the show , appearing to indicate that he knew that was one taboo he could not break .", "But it did not stop him from flirting with the idea , especially after General Musharraf made himself so open to the media during his book tour of the United States last year .", "`` I would love it if Musharraf would come on the show , '' he said .", "`` If he can go on Jon Stewart 's show , then why not .", "`` KARACHI JOURNAL ."], "summary": ["Ali Saleem is cross-dressing man who portrays flirty widow on Pakastani television .", "Television critics have been generally supportive and show , which features interviews with country 's celebrities and politicians , has prime-time slot .", "Saleem says show 's acceptance is symbol of Pakistan 's tolerance and moderation .", "Some media researchers say that restrictions on print media are tougher and that some subjects are still off limits .", "Photos ."], "publication": "nyt50", "label": [23, 45, 3, 44], "tag": ["World"]} -{"id": "1816085", "text": ["It would be an ambitious project even in a Middle Eastern country not embroiled in war : build an American-style university where classes are taught in English , teachers come from around the world and graduates compete for lucrative jobs in fields like business and computer science .", "Yet some of the leading lights of Iraq 's political and intellectual classes are doing exactly that , even as the bloodshed widens .", "Their planned American University of Iraq is modeled after the famous private universities in Cairo and Beirut .", "The project 's managers have a board of trustees .", "A business plan recently completed by McKinsey & Company , an international consulting firm .", "Three candidates for university president .", "And $ 25 million , much of it in pledges from the American government and Kurdish sources .", "To fulfill their dream , they need much more : $ 200 million to $ 250 million over 15 years , said Azzam Alwash , the board 's executive secretary .", "But if it does become a reality , the university will not be built in Baghdad , which for centuries was a beacon of learning in the Arab world .", "Instead , it is slated for what is the most non-Iraqi part of Iraq .", "The site is on a windswept hilltop along the outskirts of Sulaimaniya , the eastern capital of Iraqi Kurdistan , 150 miles north of Baghdad and far from the car bombs and death squads that are tearing apart the Arab regions of Iraq .", "Because of its relative safety so far , Kurdistan can more easily attract aid and reconstruction money .", "With doctors , engineers , businesspeople , academics and students among the hundreds of thousands fleeing to neighboring countries or the West , the university raises hopes of stanching the country 's enormous brain drain and pushing Iraq forward .", "`` You really need to develop the political elite of the future , the educated elite of the future , '' said Barham Salih , the project 's Kurdish founder , a deputy prime minister who received a doctorate in statistics and computer modeling from Liverpool University in Britain , and whose daughter attends Princeton .", "`` The focus is also to stimulate reform in the Iraqi education system . ''", "However , some Arab education officials in Baghdad , the capital , have argued that the university should be built there , not in a part of Iraq where secessionist ambitions are well known .", "Baghdad first achieved fame for its schools and scholars during the Abbasid caliphate , which reached its height in the eighth century .", "Even in the 20th century , before the Iran-Iraq war of the 1980s and international economic sanctions of the 1990s , students from the region flocked to Baghdad .", "But because of security threats , many universities in Baghdad have been closed since October .", "Up to 150 employees from the Ministry of Higher Education were abducted by men in commando uniforms in mid-November .", "Jihadist groups have threatened to kill students on campuses .", "So intellectuals like Kanan Makiya , the prominent former exile and writer who strongly advocated for the American invasion , say they plan to move their research projects to the American University .", "Mr. Makiya founded the Iraq Memory Foundation , an organization based in the fortified Green Zone in Baghdad that is documenting Saddam Hussein 's atrocities .", "`` The problem is nobody can thrive in Baghdad anymore , '' said Mr. Makiya , who teaches Middle Eastern studies at Brandeis University and sits on the new university 's board of trustees .", "`` The north is much more stable , growing , prosperous . ''", "`` There is a sadness that we 're being driven out of Baghdad , `` he added .", "The university 's planners plan to make Mr. Makiya 's documentary project the core of the humanities department .", "Mr. Alwash , an environmental scientist , has said he will use the university as a base for his research project , which is about rejuvenating the southern marshlands .", "Other prominent intellectual and political figures , many of whom supported the American invasion , are on the board .", "They include Fouad Ajami , a professor of Middle Eastern studies at Johns Hopkins , and John Agresto , an education adviser in the Coalition Provisional Authority who , as he ended his tenure there in 2004 , told a reporter he was `` a neoconservative who 's been mugged by reality . ``", "The planners have sketched a rough schedule .", "Construction would start in the spring , and the first 15 to 30 students could begin a six-month intensive English course , to be taught in rented space here in Sulaimaniya , before they start a two-year master 's program in business administration .", "The first class to earn bachelor 's degrees would start in fall 2008 .", "The program would take five years , with the first devoted to the study of English , Mr. Alwash said .", "Although the university has regional aspirations like its counterparts in Cairo and Beirut , the first undergraduate class would be mostly Iraqis , Mr. Alwash said , and a majority probably Kurds .", "In the university 's first five years , degree programs would focus on subjects that the board judges to be crucial to Iraq 's development : business , petroleum engineering and computer science , for example .", "`` This has to have immediate practical consequences for the economy of Iraq and the politics of Iraq , '' Mr. Salih , the founder , said .", "After five years , the university may add humanities degree programs .", "`` We want them to study the ideas of Locke , the ideas and writings of Paine and Madison , '' Mr. Alwash , the executive secretary , said .", "`` We want them to understand what democracy is -- not only majority rule , but also the rights of minorities .", "They should be well rounded . ``", "Projected undergraduate enrollment is 1,000 students by 2011 and 5,000 by 2021 .", "The numbers are small compared with enrollment at Baghdad University , the country 's flagship public university , which has 70,000 students .", "Sulaimaniya University here has about 12,000 students .", "In total , about 475,000 Iraqis are pursuing college-level degrees across the country , in 21 public universities or colleges , 18 private ones and about 40 technical institutes , according to the American Embassy .", "Tuition at American University would be $ 8,500 to $ 10,000 a year , Mr. Alwash said .", "That places the university beyond the reach of the average middle-class Iraqi family .", "But Mr. Salih said the school planned to give loans and scholarships .", "Zalmay Khalilzad , the American ambassador and an alumnus of the university in Beirut , has promised that American agencies will give the school $ 10.5 million , possibly the largest donation by the United States to any single education project in Iraq , if American officials approve the business plan .", "Mr. Khalilzad , a native Afghan , helped found the American University of Kabul after the American military ousted the Taliban from Afghanistan in 2001 .", "Some Kurds fear that the Patriotic Union of Kurdistan , the governing party of eastern Kurdistan led by Mr. Talabani and Mr. Salih , could end up diverting money from the university for its own purposes .", "Among many Kurds , the main Kurdish parties have a reputation for corruption and authoritarian rule .", "`` I hope this will not just be party propaganda , because we need a real academic center for this society , '' said Asos Hardi , the editor in chief of a weekly newspaper here .", "`` Having a Western-style university in Iraq would help strengthen education here and across the country . ''", "THE STRUGGLE FOR IRAQ ."], "summary": ["Group of Iraqis are planning American University of Iraq , American-style university with international teachers and classes taught in English .", "Prominent intellectual and political figures support university and its chosen site in Sulaimaniya , area that is relatively safe so far .", "Some education officials in Baghdad argue that university should be built there instead .", "Photo ."], "publication": "nyt50", "label": [15, 28, 2], "tag": ["World", "Education"]} -{"id": "1816095", "text": ["In 1997 , Jonathan T . Taplin , a veteran film and television producer , stood up at a cable industry convention and asserted that in the future all movies would be distributed over the Internet .", "He recalls being laughed out of the room .", "Mr. Taplin may laugh last .", "Online distribution of movies has arrived , at places like Apple Computer 's iTunes Store .", "And even though Mr. Taplin 's own video-on-demand company , Intertainer , shut down operations five years ago , it says it deserves some credit -- and cash .", "Last week , Intertainer filed a broad lawsuit asserting that Apple , Google and Napster are infringing on a 2005 patent that covers the commercial distribution of audio and video over the Internet .", "Founded by Mr. Taplin and two other Hollywood entertainment executives in 1996 , Intertainer developed technology to distribute movies on demand through cable and phone lines for viewing on televisions and personal computers .", "It gained investors including Intel , Microsoft , Sony , NBC and Comcast .", "`` Intertainer was the leader of the idea of entertainment on demand over Internet platforms before Google was even thought up , '' said Mr. Taplin , now an adjunct professor at the Annenberg School for Communication at the University of Southern California .", "He and a secretary constitute the entire remaining staff of Intertainer .", "Theodore Stevenson , a partner at McKool Smith , the Dallas firm representing Intertainer , said the company filed suit against Apple , Google and Napster because they were perceived as leaders in the market for digital downloads .", "He declined to specify the damages that Intertainer was seeking .", "Apple , Google and Napster all declined to comment on the lawsuit .", "Intertainer 's tale is somewhat different than other intellectual property suits brought by technology licensing firms .", "By 2002 the company seemed to have a growing business , with 125,000 Internet subscribers for its servers and 35,000 TV subscribers through the Comcast cable system .", "But in the fall of 2002 , the company shut down its service and filed a lawsuit against some of the backers of Movielink , a competitor backed by five Hollywood studios , including Sony , Universal and Warner Brothers .", "At the time Mr. Taplin said the studios were using Movielink as a price-fixing vehicle to kill Intertainer .", "An antitrust investigation by the Justice Department into Movielink was dropped in 2004 .", "The studios settled the lawsuit last March for an undisclosed sum , and Mr. Taplin said in a phone interview Tuesday that Intertainer would henceforth pursue a patent licensing business .", "The company holds nine patents , including United States Patent No . 6,925,469 , which was issued in 2005 and is intended to cover the management and distribution of digital media from various suppliers .", "Despite initial backing from Microsoft and Intel , Mr. Taplin said the two companies were not involved in the decision to bring the Apple , Google and Napster lawsuit .", "He said that decision was made by Intertainer 's board and that none of his original corporate backers have board seats .", "Several of the company 's original investors have taken patent licenses , he said , but he would not name the companies .", "Despite the company 's decision to file the case in a federal district court in Texas that has traditionally looked favorably on plaintiffs in patent lawsuits , several digital media experts said that Intertainer might have a difficult time enforcing its patent because of its relatively recent filing date of 2001 .", "By that time , for example , Real Networks , the Seattle-based pioneer in streaming digital media , had begun an Internet subscription service for digital content .", "Legal experts said it was difficult to handicap Intertainer 's claims .", "`` There are so many of these lawsuits nowadays , '' said Eric Goldman , director the High-Tech Law Institute at Santa Clara University School of Law .", "`` It is hard to figure out which ones are a serious threat and which ones are not . ''", "Mr. Goldman also said it was unclear what specific technology or service was covered by the Intertainer patent .", "`` I have the same problem with this patent as so many of the patents of the dot-com boom days : I do n't know what it means , `` Mr. Goldman said .", "Mr. Stevenson , the Intertainer lawyer , said the patent covers a system that can be used by content owners to upload their content and used by consumers to download it .", "`` It is pretty basic to the architecture of digital content delivery nowadays , '' he said .", "Mr. Taplin , who once worked as a road manager for Bob Dylan and produced several movies , including `` Mean Streets , '' `` The Last Waltz '' and `` To Die For , '' has a history of activism on technology issues .", "In 2002 , he encouraged those attending a technology conference to urge the Federal Communications Commission to ensure that broadband providers would not be able to block specific Web sites -- an early version of a hot-button issue that has become known as network neutrality .", "Earlier that year , he testified before the Senate against legislation that would have forced high-tech manufacturers to incorporate technology to prevent piracy in their software and hardware .", "Correction : January 4 , 2007 , Thursday A headline in Business Day yesterday about a lawsuit brought by Intertainer , a digital media company , against Apple Computer , Google and Napster misstated the nature of the litigation .", "It involves a patent , not a copyright ."], "summary": ["Intertainer files broad lawsuit against Apple Computer , Google and Napster for infringing on 2005 patent covering commercial distribution of audio and video over Internet .", "Claims to be leader of concept of entertainment on demand over Web platforms .", "Founder Jonathan T Taplin has history of activism on variety of technology issues .", "Photo ."], "publication": "nyt50", "label": [5], "tag": ["Technology", "Business"]} -{"id": "1816100", "text": ["A government effort to cool Ireland 's roaring economy by encouraging people to save is threatening to backfire .", "In 2001 , Ireland encouraged citizens to set money aside each month in special accounts by offering to match a portion of those savings .", "The catch was that savers had to lock up their money for five years .", "About 1.1 million Irish savers -- or 40 percent of the adult population -- amassed 16 billion euros -LRB- $ 21 billion -RRB- in savings , a sum that is about 10 percent of the country 's annual gross domestic product .", "But now that the five years are over , the wealth is being unleashed , and the plan may do exactly what it was meant to prevent : add fuel to the economy of one of the most competitive and fastest-growing countries in Europe .", "The expected outcome underscores the trickiness in taming surging economies -- a task that the authorities in China are grappling with .", "It could also serve as a case study for countries , like the United States , which are looking for ways to motivate spendthrift consumers to save more to rebalance their economies .", "Edel Byrne , 30 , an operations director for a large chain of restaurants in Dublin , is looking forward to spending the 20,000 euros she will have accumulated when the five-year lockup ends in the spring .", "She has her eyes on a Marni handbag -- the line sells on the Internet for 280 to 1,060 euros -- and airline tickets to Rio de Janeiro , Milan and Paris .", "She plans to spend some of what is left on a house that she recently bought in the Dublin suburbs .", "In 2002 , Ms. Byrne would never have dreamed of setting aside as much as 254 euros each month for five years if the government had not made it worth her while : It gave her one euro for every four she paid into her so-called Special Savings Incentive Account , or S.S.I.A.", "For Ms. Byrne , the government incentive was worth 63.50 euros each month .", "Her own monthly payments , the government bonus and the interest she earned were not accessible as the savings accumulated .", "Some of the accounts have already matured since the program opened in May 2001 .", "But most savers , who on average have accumulated 15,000 euros , according to industry estimates , delayed opening the accounts until the months before enrollment ended in April 2002 .", "The freed savings will come on top of an estimated 5 billion euros in increased public spending and tax cuts that the Irish government plans to pump into the economy in 2007 , an election year .", "If the Irish choose to spend their savings over a few months rather than tap the personal wealth pool over two or three years , inflation , especially for services like hotels and restaurants , could accelerate at a time when prices are already rising rapidly .", "`` The 16 billion euros is a phenomenal amount of money , '' said Jim Power , chief economist at Friends First , a pension and investment firm in Dublin .", "`` There is of course an inflationary risk .", "There is already an inflationary risk notwithstanding the S.S.I.A. effect . ``", "The savings plan was devised by Charlie McCreevy , the Irish finance minister at the time and now the European Union 's commissioner for the internal market , who wanted to tame a potential threat of inflation to the Irish economy .", "In 2000 , consumer prices in Ireland rose 5.6 percent .", "That figure had moderated to 2.5 percent for 2005 , but it started creeping up again in 2006 , to a 4.4 percent pace in November , and is expected to stay relatively high at about 3.5 percent in 2007 , according to some economists who fear the bounty from the savings accounts could inject too much money into the economy over a short period .", "But nobody really knows what will happen , which is why there have been widespread surveys of the intentions of savers like Ms. Byrne .", "Banks eager to hold on to the large sums are offering new but less generous saving products .", "Savers whose special accounts have already matured appear to be taking their time in deciding what to do with the money .", "Similar savings plans by other European governments have not been as popular , probably because the cash incentives were much less generous than those offered by the Irish plan .", "In Britain , for example , tax-efficient savings plans like Individual Savings Accounts , or I.S.A. ` s , operate through the tax system rather than with cash incentives .", "Consumer spending in Ireland is less than 50 percent of gross domestic product , low by European standards , according to Dan McLaughlin , chief economist at Bank of Ireland .", "According to the Irish Central Statistics Office , the country had a household savings rate of 11.9 percent of income in 2003 , compared with 2.1 percent in the United States and 11.1 percent in France .", "On Grafton Street , one of central Dublin 's main shopping streets , evidence of the usual seasonal surge in spending has not been hard to detect .", "But whether spending will be further fueled as Irish consumers anticipate tapping their savings is difficult to judge , said Stephen Sealey , a buying director for the department store Brown Thomas .", "It sells Herm\u00e8s handbags for as much as 10,000 euros -LRB- $ 13,200 -RRB- and Chanel watches for 7,500 euros -LRB- $ 9,940 -RRB- .", "`` Over the last four years , we have seen a steady growth in luxury goods , '' he said , adding that buoyant consumer spending `` is driving double-digit year-on-year growth across the business . ''", "Mr. McLaughlin , at Bank of Ireland , said that surveys by his bank indicated that a slice of the special accounts would be spent on foreign holidays and home renovations .", "He forecast that spending from the accounts would help support the economy over the next three or four years at a time when rising interest rates are adding to the costs of housing .", "But if Ms. Byrne is any indication , not all the money will be spent .", "`` The Irish are good at treating themselves these days , but I would feel guilty to spend over half the amount on luxuries , '' she said .", "`` I do not think you should just throw away five years of hard savings . '' ."], "summary": ["Five years after Ireland encouraged citizens to save money in special accounts designed to slow roaring economy , government officials fear spending spree will do exactly what plan was meant to prevent .", "Underscores problems in taming surging economies .", "Inflation could accelerate at rapid pace if Irish choose to spend 16 billion euros -LRB- $ 21 billion -RRB- in savings that are just now becoming available .", "Analysts are uncertain how savings will be spent , if at all ."], "publication": "nyt50", "label": [3, 4, 1, 36], "tag": ["Business"]} -{"id": "1816123", "text": ["The steeply rising cost of preventing and suppressing wildfires , which burned more of the American landscape in 2006 than in any other year since at least 1960 , is creating a rift between Washington and state and local governments over how the burden ought to be shouldered .", "A study issued in November by the inspector general 's office of the United States Department of Agriculture , the parent agency of the Forest Service , said the nature of the wildfire threat was changing as private homes and communities pushed ever closer to the boundaries of once-remote public lands .", "Those communities and landowners , rather than federal taxpayers , should have to pay for more of their own fire protection , the report concluded .", "States and local governments are gearing up to fight back in Congress , arguing that decades of federal mismanagement of national forests and open spaces , not development , created the threat and that little communities with few resources are neither responsible for it nor equipped to make a difference .", "The pattern of wildfire distribution during the recently ended fire season , which charred more than 9.8 million acres , supports either side .", "According to federal statistics , more state , county and private lands burned than in any other year since 1997 -- about half the total 2006 losses -- primarily because of monstrous blazes in Oklahoma , in Texas and across the Upper Plains , regions where most property is privately owned .", "That finding , though also driven by broader factors like drought and heat that have little to do with residential development in fire-prone areas , supports the federal contention that the government has had to shift an increasingly large share of its resources from the task of protecting its own forests to firefighting elsewhere .", "In some places , though , the issue is more complex .", "In Stillwater County , Mont . , north of Yellowstone National Park , for example , the small , long-established towns of Nye and Fishtail are bordered on two sides by national forest .", "In early July , the first of two huge fires erupted in the forest and roared into those communities , where 100,000 acres of mostly private land and 32 homes were burned .", "The blaze was the worst in the county 's history , local officials say .", "`` The forest is very dry and primed for fires started by lightning , and when that occurs in a forest not managed as well as it could have been , it soon gets out of control and meets the community , '' said Ken Mesch , the Stillwater County disaster and emergency services coordinator .", "`` If the federal government started pulling back money for fire suppression , they would be hanging us out to dry . ''", "Federal land managers say protection of private land at the boundaries of public space -- called the wildland-urban interface -- is the fastest-growing component of the nation 's firefighting budget .", "In 2003 and 2004 , the inspector general 's report estimated , the Forest Service spent at least half a billion dollars , and perhaps as much as a billion , protecting private property in such areas .", "The trend is similar at the Interior Department , which oversees hundreds of millions of acres of public lands in the West through the Bureau of Land Management , the Fish and Wildlife Service , and the National Park Service .", "Fire prevention activities -- controlled fires or thinning of burnable vegetation -- have shifted there toward the interface lands , said Lynn Scarlett , deputy interior secretary .", "Ms. Scarlett said that almost half the 1.1 million acres treated by the Interior Department for fire-risk reduction in 2006 were in interface zones , about double the proportion as recently as 2002 .", "She said her department , too , was considering that it demand increased cost-sharing by state and local governments , though she emphasized that any outcome would have to be collaborative .", "`` One of the last things you want in an emergency is people squabbling over who 's going to pay , `` she said .", "The report from the Agriculture Department 's inspector general said a major problem was simply the weight of accumulated assumptions : fire response in the West has long meant federal authorities ' riding to the rescue , with no questions asked and no cost too great to bear .", "`` Public expectations and uncertainties about protection responsibilities , '' the report said , `` compel the Forest Service to suppress fires aggressively and at great expense when private property is at risk , even when fires pose little threat to National Forest system land . ''", "About 8.5 million homes were built at the wildland-urban interface within the interior West in the 1990s alone , according to the Forest Service .", "But state and local officials say they already pay their share to protect those communities and homeowners , partly because the residential growth has coincided with years of federal budget cuts .", "Arizona , for instance , now has 12 to 14 air tanker firefighting aircraft under contract , up from 2 to 4 in 2005 , as a result of reduced federal spending on tankers , said Lori Faeth , a policy adviser to Gov . Janet Napolitano .", "`` Our forests are in the condition they are because of poor federal management , '' Ms. Faeth said .", "`` They 've put us in this position , and they have the responsibility to pay for it . ``", "The Forest Service 's director of fire and aviation management , Tom Harbour , said the agency would follow up on the inspector general 's recommendations .", "`` We 're not going to walk away , `` Mr. Harbour said , '' but we will engage in a vigorous debate with our partners about the right way to split the pie . ``", "Still , money is only part of the issue , he said .", "Communities and developers in the West should be thinking in new ways as well , he said , including the use of fire-wise construction techniques and preparedness plans that involve residents in their own defense even before fires start .", "Many land experts say hardly anyone is addressing the most tangled and emotional question raised by the debate : how much or how little voice federal land managers should have in land-use decisions .", "`` Thinking through in advance the fire implications of a new subdivision next to a national forest boundary -- that does n't happen , `` said James L . Caswell , administrator of the Idaho Office of Species Conservation .", "Given the property rights issue and the tension between local governments and Washington that has shaped the West 's culture for the last century , a system of planning that allows federal officials veto power would seem unlikely .", "Mr. Caswell said better planning must be part of the solution .", "`` A thousand houses next to a boundary could overwhelm all the other cost-control issues , '' he said .", "`` But , '' he added , `` that 's a very emotional topic , so it 's really hard to deal with . `` ."], "summary": ["Steeply rising cost of preventing and suppressing wildfires is creating rift between Washington and state and local governments over who should pay .", "Agriculture Dept report says private homeowners and communities , rather than federal taxpayers , should pay more of their own fire protection .", "State and local governments are fighting back .", "Recent fire season burned 9.8 million acres .", "Photos ."], "publication": "nyt50", "label": [0, 2], "tag": ["U.S."]} -{"id": "1816124", "text": ["Massachusetts , the only state where same-sex marriage is legal , took a first step toward possibly banning it Tuesday when legislators voted to advance a constitutional amendment defining marriage as the union between a man and a woman .", "The amendment now requires the approval of at least 50 legislators in another vote in the 2007-8 session .", "Then it would be placed on the November 2008 ballot as a referendum question .", "If it passed , the amendment would not invalidate the more than 8,000 same-sex marriages that have taken place since they became legal in May 2004 .", "But it would prevent future marriages of gay men and lesbians .", "`` This is democracy in action , '' said Kris Mineau , president of the Massachusetts Family Institute , which sponsored the amendment .", "`` It 's giving people the opportunity to vote on the most essential institution in human existence -- marriage . ``", "Arline Isaacson , co-chairwoman of the Massachusetts Gay and Lesbian Political Caucus , choked back tears .", "`` The price that our children and families will pay is so severe that we simply have to recommit ourselves to fight this some more , '' she said .", "The swiftness of the vote on Tuesday surprised people on both sides of the issue , taking place without any debate , just minutes after the constitutional convention had been gaveled into session .", "Proponents of the amendment needed just 50 of the legislature 's 200 lawmakers to support it .", "The final vote was 61 in favor of the amendment and 132 opposed .", "Later in the day , supporters of same-sex marriage persuaded lawmakers to reconsider the amendment , but the second vote , 62 to 134 , only affirmed the results of the first .", "National groups on both sides of the issue said they would commit resources to help advocates wage battle here .", "This past Election Day , the tide had seemed to be turning slightly in favor of supporters of same-sex marriage , with the defeat of an opposition amendment in Arizona and passage of seven others by slimmer margins than similar amendments in 2004 .", "Just two months ago , at an earlier constitutional convention , the legislature appeared to have essentially killed the proposal to allow a vote .", "During that session , legislators recessed without voting on the amendment , tabling it until Jan . 2 , the last day of the legislative session .", "Both sides said they expected that lawmakers would then vote to end the session without taking up the measure .", "But last week , the state 's Supreme Judicial Court , which three years ago ruled that same-sex marriage should be legal , threw a wrench into things .", "The court chided lawmakers for their maneuvers to avoid a vote on the amendment , saying the legislature had demonstrated `` indifference to , or defiance of , its constitutional duties . ''", "The court said it was not empowered to order the legislature to vote on the amendment , which petitioners , including Gov . Mitt Romney , had asked it to do .", "But the court 's criticism appeared to be enough to make some lawmakers , including some supporters of same-sex marriage , decide to allow a vote .", "`` Certainly , the court ruling changed the atmosphere this week , '' said Mr. Mineau , whose organization had gathered a record 170,000 petition signatures to get the amendment before the legislature .", "Ms. Isaacson said , `` The S.J.C. decision really tipped the scales against us . ''", "Tuesday 's vote was considered a victory for Governor Romney , a Republican who has used his opposition to same-sex marriage as a conservative rallying point as he has laid the groundwork for an expected run for the presidency in 2008 .", "In a statement Tuesday , Mr. Romney called the marriage vote `` a huge victory for the people of Massachusetts . ''", "By contrast , the vote was something of a rebuke to the incoming governor , Deval L . Patrick , a supporter of same-sex marriage who on Thursday will be sworn in as the first Democrat to occupy the governor 's office in 16 years .", "On Tuesday , before the constitutional convention , Mr. Patrick met with the House speaker and the Senate president , both Democrats , to urge them to find a way to defeat the amendment , even if it meant adjourning without voting on it .", "`` I believe that adults should be free to choose whom they wish to love and to marry , '' Mr. Patrick said , adding that he objected to using the constitutional amendment process `` to give a minority fewer freedoms than the majority . ''", "After the vote , Mr. Patrick said in a statement , `` We have work to do over the next year to turn this around . ''", "The new legislature taking office this month includes more supporters of same-sex marriage .", "But people on both sides of the issue said it was not clear if the balance had tipped enough to sideline the amendment .", "Ms. Isaacson and other gay rights activists have said that , should the initiative get on the 2008 ballot , they fear losing to an expensive campaign that would draw opponents from around the country .", "Polls in Massachusetts have generally found that just over half of the citizens surveyed supported same-sex marriage , but about the same number wanted the constitutional amendment to come before voters .", "On Tuesday , scores of demonstrators lined the street outside the Statehouse and spilled into the building .", "`` I think it is going to get defeated next time around , '' said Lea Roy , 38 , a supporter of same-sex marriage from Fitchburg who hopes to marry her girlfriend .", "`` It 's something you always dream about growing up -- getting married .", "Then it 's like , I 'm gay and we 're not allowed to get married . ``", "But David Wilson , who , along with his partner , Rob Compton , was a plaintiff in the original lawsuit that legalized same-sex marriage , was less optimistic .", "`` It feels like the rug has been pulled out from under us , '' said Mr. Wilson , who has married Mr. Compton .", "`` Maybe I 'll feel better tomorrow , but today I feel like I 've been shot . ``", "Bea Martins , 63 , an opponent of same-sex marriage from Fall River , said she was `` very pleased '' by the vote .", "As the initiative winds its way through the rest of the process , Ms. Martins said , `` my counsel is we continue praying to the dear Lord for justice to be done . '' ."], "summary": ["Massachusetts Legislature votes to advance constitutional amendment defining marriage as union between man and woman .", "Amendment now requires approval of at least 50 legislators in another vote in 2007-8 session for it to be placed on ballot as referendum .", "If passed , amendment would not invalidate same-sex marriages that have taken place since becoming legal in 2004 , but would prevent future marriages .", "Photo ."], "publication": "nyt50", "label": [3, 1, 0, 2], "tag": ["U.S."]} -{"id": "1816125", "text": ["The Delaware River , which separates New Jersey and Pennsylvania , is putting distance between the Democratic governors of the two states , who are at a standoff over the financing and environmental implications of a plan to deepen the river for larger cargo ships and oil tankers .", "After a yearlong impasse over the $ 300 million project , which requires approval and money from both states , Gov . Edward G . Rendell of Pennsylvania has halted the work of the Delaware River Port Authority , a bistate agency he heads that regulates activity along the river , blocking construction projects and increasing debt .", "He warned at a recent news conference that `` our patience is running out . ''", "But Gov . Jon S . Corzine of New Jersey said that officials in his state are not yet convinced that Pennsylvania 's recent promises to accept the bulk of the waste from the riverbed and cover any cost overruns would allay their concerns .", "Moreover , Mr. Corzine , who has yet to offer a definitive position on the project , has expressed doubts about it from an environmental standpoint .", "`` I understand his frustration , '' Mr. Corzine said in an interview .", "`` We have people who are frustrated also about the environment , and I 'm certain with all the details , some of the conceptual ideas that have been framed , we 'll work through it . ``", "The debate over the Delaware predates both governors , and for many of those who have followed it -- environmentalists and developers alike -- the stakes are high .", "Backers say the proposal to deepen more than 100 miles of the river , from the mouth of Delaware Bay north to Philadelphia , to 45 feet from 40 feet , is critical to the revitalization of the Port of Philadelphia .", "Critics contend the dredging could shake free contaminated sediments that could be absorbed by fish or make their way into the water supply .", "`` It 's a giant waste of money that 's going to hurt the environment and waste taxpayers ' resources , `` said Jeff Tittel , executive director of the New Jersey chapter of the Sierra Club .", "Mr. Tittel challenged Mr. Rendell 's contention that the dredging would make the Philadelphia port more competitive with other major East Coast ports .", "`` The port will still be 100 miles from the ocean , '' Mr. Tittel pointed out , `` and will still not be competitive with Port of Newark , Norfolk , or even Halifax . ''", "Governor Rendell continues to push for the project as a potential economic development engine .", "He pointed to a similar , if more costly , dredging project effectively undertaken at the Port of New York in the last six years , asking in a recent interview , `` Why is it good enough in the north and not good enough in the south .", "`` Meanwhile , his refusal to take action to reduce the interest rates on bonds issued by the Delaware River Port Authority has increased the agency 's debt by $ 7 million .", "In addition to agreeing to handle waste and extra costs , Governor Rendell has also promised to appoint a committee to examine the environmental impact of the 10-year project .", "But Governor Corzine dismissed these concessions , saying , `` It would be an overreading of that to say we 've come to an agreement . ``", "Those who know both men and understand the dispute said that they are dismayed that the governors of neighboring states , both from the same political party , have been unable to have a meeting of the minds .", "`` Hopefully , the two governors will sit down and resolve this , '' said State Senator Stephen M . Sweeney , a Democrat from Gloucester County in southern New Jersey and an opponent of the plan .", "`` Honestly , I have n't heard anything about this in a year . ``", "Mr. Rendell and Mr. Corzine , who have both played roles in the national Democratic Party , have enjoyed a cordial if not particularly deep friendship .", "But their styles are as different as the environs of the well-heeled New Jersey suburb of Summit , where Mr. Corzine lived while he was chief executive at Goldman Sachs , and the rough-and-tumble , in-your-face attitude of South Philadelphia , in Mr. Rendell 's adopted hometown .", "The garrulous Mr. Rendell , who will be 63 this week , is a former prosecutor and Philadelphia mayor who once instigated spectators to throw snowballs on the field at a pro football game .", "The earnest Mr. Corzine , who just turned 60 , is a multimillionaire and former Wall Street trader who successfully tackled fiscal problems in his first year but sometimes struggled to get along with state legislative leaders from his own party .", "Despite his frustration , Governor Rendell said the issue has not affected his friendship with Mr. Corzine , a relationship that he said dates to his successful 1991 campaign for mayor of Philadelphia , when Mr. Corzine , then a trader at Goldman Sachs , was a significant campaign contributor .", "`` I think he 's a great , great , great public servant , `` said Mr. Rendell , a native New Yorker , who before becoming governor in 2003 served as chairman of the Democratic National Committee .", "`` I believe that Governor Corzine has acted in good faith and does intend to resolve this . ''", "Mr. Corzine , the former head of the Democratic Senatorial Campaign Committee , declined to discuss how the dispute had affected his rapport with Mr. Rendell .", "Indeed , he was reluctant to talk about the matter .", "Days after Mr. Rendell 's acerbic comment that New Jersey officials are `` running out of excuses , '' Mr. Corzine would say only , `` I actually think it 's better for us to have private conversations about it . ``", "Mr. Rendell said that he has tried to be accommodating with New Jersey officials , who over the past 14 months had asked to delay talks on the dredging project while political campaigns were going on in both states .", "Also , Mr. Corzine was trying to work out the details of his first budget , which led to a seven-day shutdown in July of the state government .", "He said that he and Mr. Corzine were scheduled to meet this month on the issue .", "`` I do n't know what reason they 'll find now , `` Mr. Rendell said of the prospect of any future delays .", "David P . Rebovich , director of the Institute of New Jersey Politics at Rider University , said the showdown has highlighted Mr. Rendell 's `` feisty , quick-witted , can-do style '' and the fact that Mr. Corzine is `` much more thoughtful and deliberative . ''", "`` The impasse is n't doing either state much good , `` Professor Rebovich added ."], "summary": ["New Jersey Gov Jon S Corzine and Pennsylvania Gov Edward G Rendell head for showdown over financing and environmental issues of plan to deepen Delaware River .", "Corzine expresses concern that dredging could shake free contaminated sediment .", "Says he is not convinced by Pennsylvania 's promises to accept bulk of waste from river and cover cost overruns .", "Rendell sees project as critical to revitalization of Port of Philadelphia .", "Photo ."], "publication": "nyt50", "label": [3, 8, 9], "tag": ["New York and Region"]} -{"id": "1816127", "text": ["Sale prices for Manhattan apartments fell in the last quarter of 2006 , while the pace of sales was reported to be strong and the backlog of unsold apartments fell , according to several market studies released yesterday by large real estate brokerage firms .", "The new quarterly numbers led brokers and market analysts to conclude that the Manhattan real estate market -- with its legendary high prices -- appeared to have so far escaped the worst of the real estate market excesses , like large price cuts by developers , reported across the country .", "`` It shows us that we hit the soft landing that a lot of people were hoping for , '' said Gregory J . Heym , an economist who prepared market studies for two brokerage firms , Brown Harris Stevens and Halstead Property .", "The new figures were also reported in a series of competitive and sometimes contradictory studies .", "One report , released by Prudential Douglas Elliman , reported a 5 percent decline in the average apartment sale price , compared with the previous quarter , while one by the Corcoran Group put the decline at 1.5 percent .", "Both put the average sale price for all apartments at more than $ 1.2 million .", "A third study by Mr. Heym showed that average apartment prices actually increased by 5 percent from the prior quarter .", "But even this study showed weakness in the market .", "It found that prices for co-ops , the older apartments that make up most of Manhattan 's residential real estate , declined by 5 percent , with these declines offset by increased sale prices for condominiums .", "He said more than half the sales were in new luxury buildings that command premium prices .", "Yet what heartened many market watchers was the market 's overall stability , and the rising sales , according to the Prudential report , in what is traditionally a weak quarter .", "Average prices were higher last quarter than they were in the same quarter a year ago .", "For all of 2006 , average prices were also higher than the previous year , which included the peak quarters of the recent real estate boom .", "At the end of December , the Prudential report found that the backlog of unsold apartments had dropped to 5,934 from 7,623 , a decline of 22 percent and slightly below the inventory reported a year before , as apartments were sold and overpriced apartments were taken off the market .", "At the same time , the Prudential report tracked 2,441 sales in the last quarter of 2006 , an increase of 15.5 percent from the prior quarter and a 55 percent increase from the fourth quarter of 2005 .", "Jonathan J . Miller , an appraiser and president of Miller Samuel Inc . , prepared the Prudential report .", "He said some of the increase in sales might be exaggerated because of improved reporting by New York City , which last summer began providing sales information for previously undisclosed co-op sales .", "But he said that the increase in reported sales was the largest he had seen in several years , especially in the fourth quarter when sales typically decline an average of 8 percent .", "But he said that he believed the market was strengthening and that it was now `` just barely '' a buyer 's market , but one in which buyers were frustrated because sellers were not making significant concessions .", "He found that the average discount from the final asking price for an apartment fell to 2.8 percent from 4 percent in the previous quarter .", "Pamela Liebman , president of the Corcoran Group , said that sellers are now generally pricing their apartments realistically , and that with the economy strong and Wall Street bonuses high , `` the psychology of the market is very positive . ''", "`` The big story here is the story that never happened , '' she said .", "`` The story was supposed to be that prices would crash in 2006 , and a strong buyer 's market will emerge .", "Buyers can certainly pick and choose , but those who think they are going to have a field day out there right now are mistaken . ``", "Ms. Liebman said the fear of a price collapse caused by a construction boom has not been borne out so far , and the outlook remained good for this year .", "She said several thousand planned condominiums have instead been turned into hotels and office buildings , as hotel profits and office rentals have risen .", "The trends in the Manhattan real estate market have always been something of a puzzle , since the average and median prices tend to move up and down with the changing mix of large and small apartments and of apartments in old and new buildings .", "In addition , the various market studies augment public records of sales with private databases compiled by brokers and appraisers of recent closings that have not yet been reported by the city .", "New building sales also distort the data , since sales often close up to a year or more after they go to contract .", "Ms. Liebman predicted that average sale prices would rise next year as contracts already signed at new expensive luxury buildings with multiple sales of more than $ 10 million , like 15 Central Park West and the Plaza on Fifth Avenue , are completed .", "Several reports showed significant price increases in very large apartments with four or more bedrooms and in studios .", "The reports generally showed the largest increases in very large apartments with four or more bedrooms , with declines in average prices for two-bedroom apartments , which still averaged above $ 1.5 million per apartment .", "Overall , 2006 ended with an average price increase of 6 percent over the previous year 's sales , Mr. Miller said , while the median went up by 11 percent .", "Mr. Heym , the economist at Halstead and Brown Harris Stevens , attributed the continued strength of the market to the local economy , which he said has produced more jobs last year than in any year since the 2000 boom , and that interest rates that have remained low .", "Mr. Heym said , `` 2006 turned out to be a really good year , much better than people thought . '' ."], "summary": ["Prudential Douglas Elliman reports 5 percent decline in average apartment sale price in last quarter .", "Corcoran Group puts decline at 1.5 percent .", "Both reports put average sale price for all apartments at more than $ 1.2 million .", "Results of Brown Harris Stevens and Halstead Property study noted ."], "publication": "nyt50", "label": [5, 4], "tag": ["New York and Region"]} -{"id": "1816129", "text": ["The dollar slumped yesterday and the euro climbed to a three-week high against the currency .", "A steady slide in the value of the dollar since late 2005 , primarily against the euro and the British pound , has steepened over the last month amid indications that interest rates will rise in Europe , while the Federal Reserve is expected to cut rates this year .", "At the same time , countries with large dollar holdings are showing a new willingness to dump the dollar in favor of the rising euro , though the current activity is seen as posing little long-term risk to the dollar .", "Late last month , the United Arab Emirates became the latest country to shift more of its currency reserves away from the dollar , joining Russia , Switzerland , Venezuela and others .", "Those moves coincide with ambiguous signals from China about possibly pulling back from the dollar , and recent word from Iran , the world 's fourth-largest oil producer , that it would prefer euros as payment for oil , which is typically priced in dollars .", "But currency experts say that this turn away from the dollar is not likely to do any long-term damage to the currency 's value for a number of reasons .", "First , the motives of central banks that are adding other currencies to their reserves do not appear to be driven by the belief that the euro will eventually supplant the dollar as the world 's key currency .", "Rather , these central banks are doing what investors do to cut risk : diversifying their portfolios .", "Moreover , the amount of currency moved so far has been relatively small in a global market that trades trillions of dollars a day -- only about $ 2 billion in the case of the United Arab Emirates , for example .", "`` There is some indication that central banks are moving to diversify reserves , but it 's at a very slow pace , `` said David Powell , a currency analyst with IDEAglobal .", "`` Is it the start of a massive shift out of the dollar .", "I would say no . ``", "Yesterday , the euro traded at $ 1.3272 , up from $ 1.3198 late Friday in New York .", "The British pound was at $ 1.9721 , up from $ 1.9586.", "The United States dollar index , a measure of the dollar 's strength against a basket of currencies , fell to 83.23 from 83.65 on Friday .", "In February 2002 , the index was at 120 .", "But trading was thinner than usual yesterday as financial markets were closed in the United States , as were markets in Tokyo and Singapore .", "In 2006 , the euro appreciated more than 11 percent against the dollar , while the British pound rose nearly 14 percent against the dollar .", "But the dollar is not likely to start flowing with great speed out of central banks because foreign countries risk devaluing their investments if they do so .", "Even the slightest suggestion that a country is thinking about swapping dollars for euros risks sending the value of the dollar falling , and in turn hurts all foreign investors in American securities .", "The case of China , which holds more Treasury securities than any other foreign nation except Japan , offers an example of why countries would be reluctant to dump their dollar reserves .", "In October , the most recent month for which figures are available from the Treasury Department , China held $ 345 billion in Treasury securities .", "That was up from $ 301 billion a year earlier .", "Its currency holdings total $ 1 trillion .", "About $ 700 billion of that , economists estimate , is in dollars .", "So in many ways , it is in China 's best interest not to let the dollar 's value slip .", "Heavy sales of the dollar could make it harder for the People 's Bank of China to manage its gradual appreciation of the yuan against the dollar .", "Anything more abrupt , Beijing fears , would make Chinese goods less competitive in the United States and pose problems domestically for some of the loans from its state banks .", "And if the dollar drops too much , the value of China 's holdings would decrease , limiting the lending ability of its banks .", "Nonetheless , the rising euro is not something the United States or foreign investors can afford to ignore .", "`` You have to start to thinking that the euro can be of some risk to the dollar , '' said Shaun Osbourne , chief currency strategist at TD Securities in Toronto .", "`` Over the course of the next 5 or 10 years , I do n't think there 's any danger that the dollar 's pre-eminence is threatened .", "But in the long run , there is certainly the risk that does happen . ``", "One issue driving investors from the dollar is the possibility that interest rates in the United States and Europe may move farther apart .", "Financial markets are currently expecting at least one interest rate cut by the Federal Reserve sometime next year .", "That contrasts with predictions of further rate increases by the European Central Bank .", "`` A lot of foreign investors think the Fed is going to cut rates in 2007 , and that 's a rather dollar-bearish thing , `` said Julia Coronado , senior economist with Barclays Capital .", "Some economists predict the dollar will fall further in 2007 .", "The euro finished 2006 at $ 1.31 , and some economists see it climbing near $ 1.40 -- a high in its seven-year history .", "`` We believe that the dollar 's decline versus the euro has further to run , with $ 1.38 a possible destination for the pair over the next six months , `` said Tom Levinson , a foreign exchange strategist with ING Wholesale Banking in London .", "Still , many economists are unwilling to predict that the dollar faces an inevitable demise .", "`` The dollar is still the world 's No . 1 currency , and it 's going to stay that way , `` said Nigel Gault , chief United States economist for Global Insight .", "`` The euro is gradually going to become more important , but I do n't see it becoming more important than the dollar . `` ."], "summary": ["Steady slide in value of United States dollar since 2005 continues as Federal Reserve is expected to cut rates this year and currencies such as British pound and euro are rising .", "United Arab Emirates joins Russia , Switzerland , Venezuela and other countries that have shifted reserves away from dollar or have hinted at similar plans .", "Experts do not expect long-term damage to US currency 's value .", "Graph ."], "publication": "nyt50", "label": [1, 3, 5], "tag": ["Business"]} -{"id": "1816130", "text": ["College stinks .", "Just ask Emily Watson , a sophomore at Dartmouth , who sprays her dorm room once a week and her clothes two to three times a week with fresheners , usually in citrus or other fruity flavors .", "Ms. Watson did not grow up using air fresheners , but a Febreze commercial two years ago changed all that .", "`` If you 're in a frat basement or something , you kind of stink afterwards , and you want to wear your jeans the next day , `` Ms. Watson said .", "Younger customers like Ms. Watson are at the forefront of the boom in air fresheners , which have grown up since the first Air Wicks and Glade sprays hit the shelves two generations ago .", "Glade 's first sprays in evergreen and blossom scents appeared in 1956 and were marketed to suburban families as a way to banish cooking and tobacco smells .", "Since then , thousands of new products have made their debuts -- plug-ins , fragrance fans , diffusers , flashing light shows -- becoming pricier and fancier every year .", "These days , air fresheners are selling to a wider range of customers , as companies pour more money into advertisements that show people of all ages , male and female , euphorically smelling their `` clean '' air .", "Procter & Gamble introduced Febreze air products in 2004 , after sensing a growing market , and put its marketing heft behind them .", "Sales across the industry have soared as a result , up 50 percent , or nearly $ 600 million since 2003 , according to Kline & Company , a market research firm in New Jersey .", "Total sales are expected to reach $ 1.72 billion in the United States this year , the firm said .", "`` P . & G . came from nowhere , '' said William Schmitz , a senior analyst with Deutsche Bank .", "`` And they expanded the category . ''", "Febreze has become popular with young people who go barhopping and come home with smelly sweaters , Mr. Schmitz said .", "Indeed , Procter & Gamble sought such customers by creating offbeat Febreze ads featuring young 20-somethings -- men and women -- making air fresheners seem cool .", "The company used much the same approach to marketing for its Swiffer cleaning products , with one of them appearing in the hands of Jessica Simpson on a Rolling Stone cover in 2003 .", "One of the first Febreze air freshener products was designed to look like a CD player .", "Febreze Scentstories , released in 2004 , features `` stop '' and `` play '' buttons and `` discs '' that radiate scents rather than music .", "The disc titled `` Wandering Barefoot on the Shore '' features scents like `` splashing in the waves '' and `` sailing in the bay . ''", "`` Teenage girls are very scent-involved , generally speaking , '' said John Paquin , executive vice president and global account director for Febreze at Grey Worldwide , a WPP Group agency .", "`` You ca n't have enough scent with teenage girls . ``", "Shoppers looking to brighten their air will find plenty of options .", "More than 1,000 new fresheners hit the market last year , exceeding the number for 2005 and 2004 combined , according to the Productscan Online service of Datamonitor .", "Companies that make air fresheners , like SC Johnson , maker of Glade .", "Reckitt Benckiser , maker of Air Wick .", "And Procter & Gamble say they are responding to consumer demand .", "Scent , they say , has become more important to consumers .", "`` Fragrance for some people is a natural expression of themselves , '' said Tammy Maier-Jones , category manager for Glade .", "`` They want their friends to come over and say , ' That 's Linda 's scent , ` or ' Mary 's scent . '", "`` SC Johnson spent only $ 31 million on Glade ads in 2003 , before Febreze .", "It has since increased its spending by more than $ 30 million a year , bringing Glade to Febreze 's level , according to Nielsen Monitor-Plus , a unit of VNU .", "`` There 's an explosion of activity in this segment , `` said Michelle Dauchy , category manager for Glade .", "Febreze commercials now account for about 30 percent of air freshener commercials , and about 40 percent of the money spent buying TV air time in the category .", "Procter & Gamble spent $ 58 million for Febreze commercials in the first nine months of 2006 , overshadowing Reckitt Benckiser 's $ 31 million and just above SC Johnson 's $ 55 million in that period , Monitor-Plus numbers show .", "SC Johnson released a Glade product in August directly for tweens , or girls 8 to 12 years old , and teenage girls .", "Its Scented Oil Light Show plugs into walls and beams bright colors and scents like Berry Burst , Watermelon Rush and Vanilla & Cream .", "Glade commercials for the Light Show feature a teenage girl in a bathrobe complaining that her mother bought the freshener for her younger brother rather than for her .", "The company developed a tween-focused Web site for the Light Show and ran a contest online for teenagers to win `` the ultimate slumber party '' with Vanessa Anne Hudgens , star of the Disney show `` High School Musical . ''", "Ten pairs of best friends won the contest and hung out with Ms. Hudgens in Hollywood in November .", "The Glade Create-a-Scent PlugIn , released in 2004 , also appeals to young shoppers .", "Tweens have long made their own perfume potions , and the Glade product provides suggested combinations of its scents .", "Air Wick introduced a new look for some of its fresheners last July with animated commercials , featuring elephant and octopus characters .", "These new products tend to be pricey .", "The Febreze `` players '' cost $ 27.49 and discs are $ 5.99 apiece .", "The Glade Light Show costs $ 11.99.", "But then teenagers today have more discretionary money than those of past generations , analysts said .", "`` The teenager today is purchasing what prior generations have looked to buy when they became young career professionals , '' said Marshal Cohen , chief industry analyst at the NPD Group , a retail research company .", "`` Younger consumers are much older today , in terms of what they 're buying . ``", "Mothers and older consumers remain important to air freshener companies , and many advertisements continue to be directed at people with families .", "About three-quarters of households bought air fresheners last year , according to an ACNielsen Homescan Consumer Facts report .", "Ad executives said there had also been significant growth in the last few years among those traditional customers .", "`` It used to be that home environment stopped at d\u00e9cor , and now scent is brought in as another dimension , '' said Tammy Anthony , executive director for client services in the Cincinnati office of Landor Associates , a WPP Group agency that works with Procter & Gamble on Febreze .", "`` Today 's consumers crave experiences that touch all of their senses . ``", "About 40 percent of people who buy air products started buying them in the last six years , according to a recent survey by MarketTools , a firm based in San Francisco that does consumer research for Procter & Gamble and other packaged goods companies .", "About half of the people buying air scents said they were now putting them in more rooms than they used to .", "The living room , kitchen , bathrooms and master bedroom are the most popular , but fresheners are also showing up in the oldest child 's room , the survey found .", "Teenagers are sometimes encouraged to make their rooms smell better by their parents , but they are also seeking out the scents themselves .", "About a third of teenagers with scented air in their rooms buy the fresheners themselves , a third ask their parents to buy them , and the other third said their parents put the freshener in , according to a survey by Weekly Reader Research , a research firm in Stamford , Conn . , that specializes in teenagers .", "The survey , while not scientific , interviewed 1,500 teenagers online and found that two-thirds used air fresheners .", "Of those , about a third said advertisements had convinced them their rooms could smell better .", "Jessica Ray , a freshman at Williams College , said she liked having a dorm room scent that could be smelled all the way down her hallway .", "She has been using an apple-cinnamon-scented Glade PlugIn , but in the spring , she says , she 'll switch to something flowery .", "Her family in Virginia did not use fresheners when she was growing up , but Ms. Ray said she was likely to continue to buy them throughout her life .", "Even specialty retailers are seeking young buyers .", "Anne Taintor Inc . , a specialty retailer for women that uses vintage images and a playful sense of attitude in its products , has found that a lemon-gelatin-scented freshener sells well among college students .", "`` Young people are more oriented towards the inside than the outside , '' said Christina White , the company 's senior vice president for marketing .", "`` They 're in front of their computers .", "They 're not taking long walks in the woods .", "The idea of scent tended to be a personal thing , but younger people are seeing it in a much broader way .", "They want to control everything around them in their own spaces . ``", "It is not just teenage girls who are enjoying scented rooms .", "Boys , too , are customers .", "Unilever has had success selling its Axe line of body sprays aimed at young men in the last few years , and Weekly Reader Research found that about 60 percent of its male respondents used fresheners , just 10 percentage points fewer than the teenage girls surveyed .", "Mohammad Usman , a freshman at Dartmouth , said that most of his friends of both sexes were scent savvy .", "He showed up for college this fall with about a dozen fresheners he brought from home , he said .", "He sprays them about once a week .", "`` I go to the gym every day , and I come back and it always smells because my laundry bag is there , '' Mr. Usman said .", "`` Tonight , I 'm throwing a movie night , so before I clean my room , I 'm going to spray some Febreze . ``", "ADVERTISING ."], "summary": ["Makers of air fresheners are responding to increased demand as scent becomes more important to consumers .", "College students are fueling growth in new generation of air fresheners .", "Companies such as Procter & Gamble , S C Johnson and Reckitt Benckiser have increased spending to capitalize on trend .", "Older consumers remain important to air freshener makers as indicated by focused advertising in segment .", "Photos .", "Graphs ."], "publication": "nyt50", "label": [48, 25, 24, 23, 26, 0], "tag": ["Education", "Business"]} -{"id": "1816131", "text": ["Eugen David , a small-time farmer with a chipped tooth and muddy boots in this obscure wrinkle of Transylvania , is an unlikely man to attract the attention of movie stars and moguls .", "But he counts Vanessa Redgrave , George Soros and Teddy Goldsmith among his backers in a land battle with a Canadian gold mining company .", "The company , Gabriel Resources , owns the rights to mine the hills here and wants Mr. David , 41 , to leave his 50 acres of land so that the company can carve out what would be Europe 's largest open-pit gold mine .", "Mr. David says he is n't budging .", "`` We do n't want to move , `` he says , staring across at the brown-gray stain of Rosia Montana 's defunct gold mine , which would be swallowed by Gabriel Resource 's huge project .", "In the old days , a pipsqueak like Mr. David would n't stand a chance fighting powerful and sophisticated adversaries like Gabriel Resources and its minority partner , the Romanian government .", "But this is the Internet age , when local activists like Mr. David can tap into an increasingly well-oiled global network of non-governmental organizations for financial and political support on a long list of causes and emerge with almost as much clout as any corporation .", "Mr. David 's stubbornness has struck a chord with the anti-globalization movement .", "Gabriel Resources ' proposed open-pit , cyanide-leaching mining process has also drawn the ire of international environmentalists who are now trying to stop it .", "They just might win .", "Mining is one of the world 's most unpopular pursuits these days , particularly the gigantic gouging that leaves the earth pocked with moonscape-like craters a mile or more wide .", "Gold mining is disdained even more because of the perceived frivolity of its end : to provide lucre for the rich , status for the everyman and hidden stores of wealth for nations .", "But it also has a strong allure , particularly for resource-rich countries like Romania that are struggling to develop impoverished communities that need jobs .", "The $ 3.7 billion project would plow more than $ 2 billion into the Romanian economy and could earn Gabriel Resources and its shareholders profits of $ 1 billion or more .", "And the company involved here , a Toronto-based corporation with market capitalization of $ 1 billion , is run by savvy mining executives , many of them highly experienced from cutting their teeth building the Barrick Gold Corporation , the largest gold mining company in the world .", "The allure is perhaps stronger in Romania because the country was created , in a way , by gold mining .", "Early in the second century A.D. , Emperor Trajan extended Roman territory to include what is now Transylvania , in the western half of Romania , to mine Europe 's most important gold deposits .", "The mines helped finance the expansion of the empire to its peak .", "When the Romans abandoned the territory almost 200 years later , they left behind colonists who are the ancestors of Romanians today .", "When the Romans left , the mining did not stop .", "The eventual ruling dynasty , the Hapsburgs , and the Communists , who turned to open-pit mining , continued the process , though with dwindling efficiency .", "The mine was finally shut in early 2006 .", "Gabriel Resources was born in the breakup of the state-owned economy after Communism 's collapse when Romanian businessmen with little mining experience and suspected ties to the former secret police won a vast concession to exploit mineral deposits .", "Mr. David and his neighbors realized six years ago that the company planned to expand the old mine and formed an association called Alburnus Maior -- Rosia Montana 's Roman name -- to try to stop the project .", "They were engaged in an ineffective letter-writing campaign when the founders of Gabriel Resources moved the company 's listing from Vancouver , British Columbia , to the more respectable Toronto Stock Exchange .", "Mr. David 's opposition might have withered had it not been for an ill-advised plan to build a Dracula theme park near the picturesque Romanian town of Sighisoara , once home to Vlad Dracula , the notorious Romanian ruler and inspiration for `` Dracula , '' the Bram Stoker novel .", "Prince Charles of Britain , fond of Romania 's old Saxon villages , was outraged .", "So was Teddy Goldsmith , the aging anti-globalist environmentalist and scion of a wealthy business family .", "A Swiss-born environmental journalist named Stephanie Roth , who wrote for Mr. Goldsmith 's magazine , The Ecologist , moved to Romania to help defeat the project .", "With such powerful forces aligned against it , the theme park for Sighisoara died .", "While in Romania , Ms. Roth heard about the Gabriel Resources ' plan for Rosia Montana and went to meet Mr. David in April 2002 .", "Within months , she had introduced him to some of the most powerful environmental organizations in the world .", "`` When I came there was no computer , no Web site , '' Ms. Roth said .", "`` I tried to empower the local organization . ''", "Ms. Roth started by helping Mr. David 's group obtain a grant for a few hundred dollars from an American environmental organization , Global Greengrants Fund .", "They organized a public hearing in Rosia Montana that drew 40 non-governmental organizations with Romanian operations , including Greenpeace , and catapulted Mr. David 's dispute onto the national stage .", "Then Ms. Roth took to the road .", "By the time Gabriel Resources ' founders turned the company over to more professional management in 2005 , the company had an international coalition of nongovernmental organizations arrayed against it .", "But the mining industry does n't easily back down .", "Hoping to extract an estimated 300 tons of gold and 1,200 tons of silver from the mine , Gabriel Resources introduced a public relations campaign with Madison Avenue-style television commercials and community sponsorships to win over 960 Rosia Montana families that it needed to relocate .", "It cast itself as an economic savior .", "It even countered a critical documentary with its own film , `` Mine Your Own Business . ''", "Some efforts backfired .", "Gabriel Resources helped sponsor the Transylvanian International Film Festival in nearby Cluj-Napoca .", "But when its organizers invited Ms. Redgrave to receive a lifetime achievement award , Ms. Roth quickly put the actress and Mr. David together .", "Ms. Redgrave 's acceptance speech became a rallying cry against Gabriel Resources ' project .", "The anti-Gabriel Resources ' movement had its mascot and the European press began covering the story .", "Word of the movement had by then reached the Open Society Institute of George Soros , which has been working for years for more accountability from Romanian public officials .", "`` When guys in S.U.V. ' s with bags full of cash show up in a poor locality in Romania , they can really make the law there , '' said Radu Motoc , project director of the Open Society Foundation-Romania .", "Nearly all members of Rosia Montana 's former and current council are either employed by Rosia Montana Gold , Gabriel 's local subsidiary , or have family members who are , according to the foundation .", "The foundation , which has already given $ 35,000 to the cause , says it plans to spend as much as $ 240,000 next year fighting the project and helping Mr. David .", "Because of the polarizing debate surrounding open-pit gold mining , it is hard to find an unbiased commentator to assess the risks and benefits of Gabriel Resources ' proposed mine .", "A major focus of contention is the use of large quantities of highly toxic cyanide to separate gold and silver from the ore .", "In 1999 , Aurul , a joint venture of the Australian mining company , Esmeralda Exploration , and a Romanian national company , Remin , began a leaching operation to recover gold from old tailings in Baia Mare , or Great Mine , roughly 80 miles north of Rosia Montana .", "Like Gabriel Resources , the company promised a state-of-the-art , self-contained project that would not pose risks to the environment .", "But less than a year later , the dam holding back a lake of cyanide-laced water burst , sending 100,000 cubic meters of contaminated water downstream to the Danube , killing more than 1,200 tons of fish in Hungary .", "Gabriel Resources says it would build in safeguards that were missing at Baia Mare .", "It has promised to convert most of the cyanide into a nontoxic compound before discharging it into the mine 's tailing pond .", "It also promises to clean up pollution left by past mining operations and spend $ 70 million to do as much as possible to repair the altered landscape after its project is done .", "`` Arsenic , cadmium , nickel , lead , '' said Catalin Hosu , a public relations official for Gabriel Resources , ticking off just a few of the heavy metals that leach from ancient mines to give this valley its name .", "Rosia Montana means red mountain .", "`` We help the biodiversity .", "We help the environment , `` said Yani Roditis , Gabriel Resources ' chief operating officer .", "That 's difficult for many people here to believe .", "The new project will grind down several hills , leaving four deep pits in their place , and slowly fill an entire valley with wastewater and tailings that will take years to solidify .", "Robert E . Moran , a mining expert hired by the opposition to evaluate the impact of Gabriel Resources ' plans , said that the mine , despite detoxification , would inevitably produce other toxic byproducts damaging to the environment , including heavy metals .", "The controversy , meanwhile , has splintered the town , its buildings divided between those with signs that read , `` Property of Rosia Montana Gold Corp . '' and others that say , `` This Property Is Not For Sale . ''", "`` I was born here , so why should I leave .", "`` said Gabriela Jorka , 38 , who runs a small general store in Rosia Montana .", "`` I 'd rather kill myself . ``", "Eugen Bobar , 60 , the school principal , says that the dispute is pitting parents against children , husbands against wives .", "But only about 40 percent of the families to be relocated remain , and Mr. Bobar predicts that most of them will leave .", "`` Most of the people who talk about the environment are just making an excuse , '' Mr. Bobar said , sitting in the school 's office late one night .", "`` They will leave for a good price . ''", "Mr. David , however , insists there is a committed core of opponents who will not sell , whatever the offer .", "In that case , Gabriel Resources warns , it may ask the state to step in and move people out by force .", "But that could lead to years of legal wrangling .", "The company has told its shareholders that it expects to receive final approval for the project from the Romanian government this year and will start producing gold by mid-2009 .", "Gabriel Resources , which is based in Toronto , is , meanwhile , trying to win over the remaining holdouts .", "It is sponsoring education for underprivileged children in Rosia Montana through a nongovernmental organization run by Leslie Hawke , the mother of the actor Ethan Hawke and a celebrity herself in Romania .", "She supports the project .", "`` It 's probably better that nothing happened , but the gold is there and if they do n't do it , somebody else will , `` Ms. Hawke said .", "`` And I 'd rather that they do it than somebody else . `` ."], "summary": ["Eugene David refuses to leave home in Romania where Gabriel Resources wants to build largest open-pit gold mine .", "Gabriel owns rights to mine hills in area but not 50 acres of land David lives on .", "David has taken fight to Internet , where he has gained support from celebrities , environmentalists and members of anti-globalization movement .", "Company launches campaign to convince town of economic benefits and safety of proposed mine .", "Photos ."], "publication": "nyt50", "label": [2, 7], "tag": ["Technology", "Business"]} -{"id": "1816141", "text": ["Advisers to former Mayor Rudolph W . Giuliani said yesterday that someone infiltrated the Giuliani camp last fall and stole a document about his presidential prospects and political liabilities .", "It was then leaked , they said , as a `` dirty trick '' to embarrass Mr. Giuliani and highlight such headaches as his controversial former aide , Bernard B . Kerik , and one of his ex-wives , Donna Hanover .", "The Daily News was given the 140-page document recently by someone `` sympathetic to one of Giuliani 's rivals for the White House , `` The News said in an article published yesterday .", "According to the article , the document proposes a $ 100 million fund-raising effort for 2007 , names an array of potential donors , and warns that Mr. Giuliani might face `` insurmountable '' problems , including questions about Mr. Kerik and Ms. Hanover .", "Mr. Giuliani is expected to decide over the next few months whether to run for president , his advisers say , and he has already formed an exploratory committee to raise money .", "A Giuliani spokeswoman , Sunny Mindel , said yesterday that the document belonged to a staff member and did not reflect official strategy , but `` simply someone 's ideas which were committed to paper over three months ago . ``", "Ms. Mindel said the document was apparently stolen from a piece of luggage during a Giuliani political trip last fall , then photocopied and replaced in the luggage .", "Ms. Mindel said she did not know if her office would seek a criminal investigation of the alleged theft .", "The public disclosure of the document is potentially damaging for Mr. Giuliani , not least because since 9/11 , he has built a business as a private consultant on security issues while creating an image as a political leader capable of combating terrorism .", "Indeed , an adviser to one of his possible rivals in 2008 , Senator John McCain of Arizona , half-joked yesterday that it was interesting that Mr. Giuliani 's businesses included security consulting .", "`` I 'm surprised that something like that would ever leave the custody of a campaign , and that such raw and frank information would be around the countryside , `` said the McCain adviser , John Weaver .", "`` That said , a lot of the information was predictable . ''", "The document outlines a fund-raising effort to bring in at least $ 25 million by the end of March , and to spend more than $ 21 million this year .", "The News said it included notations that named Mr. Giuliani 's chief fund-raiser , Anne Dickerson , and a senior political aide , Anthony Carbonetti .", "Ms. Mindel would not say to whom the document belonged .", "Among the sections that appear outdated are the prospective donor lists .", "The document proposed New Jersey fund-raisers Lewis M . Eisenberg and Larry Bathgate , financier Henry R . Kravis , and FedEx executive Fred Smith .", "The first three men were named last month as leaders of Mr. McCain 's fund-raising operation , and Mr. Smith is expected to join the roster shortly .", "Assessing the document , which includes printed text , handwriting and spreadsheets , The News drew the conclusion that Mr. Giuliani appeared torn between seeking the White House and continuing his business endeavors , which include consulting on leadership and security issues , a law practice and an investment concern .", "One page in the document , according to The News , says Mr. Giuliani might `` drop out '' of the race as a result of `` insurmountable '' personal and political concerns .", "On that page was a list of bullet points that seem to highlight those concerns : Mr. Giuliani 's consulting practice .", "His former police commissioner , Mr. Kerik , who has struggled with personal and professional controversies .", "Ms. Hanover , with whom he had a stormy breakup .", "His third and current wife , Judith Nathan Giuliani .", "And `` social issues '' -- apparently a reference to his support for abortion rights , gay civil unions , and gun control , some or all of which are opposed by many Republican voters .", "`` All will come out -- in worst light , '' the document stated .", "`` $ 100 million against us on this stuff , `` it continued , apparently a reference to likely efforts by Giuliani opponents to draw public attention to his liabilities .", "It also suggests that Mr. Giuliani would categorize and honor his donors with terms from baseball .", "While President Bush referred to his best financial supporters as `` Rangers '' and `` Pioneers , '' Mr. Giuliani would call them `` Team Captains , '' `` MVPs , '' `` All-Stars , '' and `` Sluggers . ''", "He would focus his fund-raising operations in New York , Washington , and California , the document indicates .", "Ms. Mindel called the document `` very outdated , '' but at least some of the ideas reflect current strategy discussions in the Giuliani camp , according to Republican Party figures who are familiar with the discussions and who spoke to The New York Times recently on condition of anonymity .", "Specifically , these Republicans say , Giuliani advisers believe that he is broadly popular enough to be able to raise money quickly for a presidential bid , and that he would need more than $ 100 million by the end of 2007 .", "According to Giuliani advisers , the document was a notebook compiled by one staff member whose luggage was not immediately located after a private plane flight last fall .", "They said it was not an official dossier that all Giuliani political aides shared .", "`` Voters are sick and tired of dirty tricks , '' Ms. Mindel said .", "Referring to the document , she added : `` It 's about as relevant today as a grocery list written in early October -- in pencil . `` ."], "summary": ["Advisers to former New York City Mayor Rudolph W Giuliani say someone infiltrated Giuliani camp and stole document about his presidential prospects and political liabilities .", "Daily News was given document recently , and reports it proposes $ 100 million 2007 fundraising effort , names potential donors , and cites other issues .", "Giuliani is expected to decide whether to run for presidency in next few months .", "Photo ."], "publication": "nyt50", "label": [0, 4], "tag": ["U.S."]} -{"id": "1816142", "text": ["Iraq 's Shiite-led government said Tuesday that it had ordered an investigation into the abusive behavior at the execution of Saddam Hussein , who was subjected to a battery of taunts by official Shiite witnesses and guards as he awaited his hanging .", "Officials said a three-man Interior Ministry committee would look into the scenes that have caused outrage and public demonstrations among Mr. Hussein 's Sunni Arab loyalists in Iraq , and widespread dismay elsewhere , especially in the Middle East .", "In an unofficial cellphone video recording that was broadcast around the world and posted on countless Web sites , Mr. Hussein is shown standing on the gallows platform with the noose around his neck at dawn on Saturday , facing a barrage of mockery and derision from unseen tormentors below the gallows .", "As the shock of those scenes reached a new crescendo in Iraq , American officials said that they had worked until the last hours of Mr. Hussein 's life to persuade Prime Minister Nuri Kamal al-Maliki to delay the execution .", "The officials , who spoke on condition that they not be identified , said they appealed to Mr. Maliki not to execute Mr. Hussein at dawn on Saturday because of the onset of a major Islamic festival , and because of constitutional and legal questions that the Americans believed threw the legitimacy of the execution into doubt .", "But when Mr. Maliki decided to go ahead with the hanging , the Americans said they made no further attempts to stop it , having concluded that they could advise the Iraqis against the execution , but not prevent it if the Iraqis persisted , out of respect for Iraqi sovereignty .", "When asked if that decision had been made in the White House , the Americans refused to say , noting only that it came some time before the final exchanges on Friday night .", "Mr. Hussein was hanged at 6:10 a.m. on Saturday , about seven hours after what the officials said was their final attempt to postpone the hanging .", "`` We told the prime minister that going forward on the first day of Id would have a negative reaction in the Islamic world , and among the Iraqi people , '' a senior American official said , recounting a telephone conversation with Mr. Maliki that began at 10:30 p.m. Baghdad time on Friday .", "The reference was to the Id al-Adha holiday , which began for Sunnis on Saturday , marking the end of the annual pilgrimage to Mecca .", "`` Therefore , '' the official said , `` we said we thought it would be better if they delayed until after Id , and use the delay to resolve the legal issues . ''", "The American official said that Mr. Maliki had never fully explained his urgency in carrying out the death sentence , which was upheld last Tuesday in an appeals court ruling that set off a 30-day countdown for executions to be carried out after a final appeal has been turned down .", "But the prime minister gave one explanation that appeared to weigh heavily on his mind , the American said , and that was his fear that Mr. Hussein might be the subject of an insurgent attempt to free him if the procedural wrangling over the execution were protracted .", "`` His concern was security , and that there was a danger that if it continued , maybe there would be a mass kidnapping to bargain for Saddam Hussein 's release , `` the official said .", "`` He was concerned that he might somehow get free . ''", "The American decision to confirm that they had opposed the quick execution came after days of silence from the American Embassy and the United States military command in Baghdad , which appeared to have been shocked , like so many others , by the unofficial video recording that showed the bedlam at the gallows .", "With some Iraqi politicians raising fresh demands for Mr. Maliki 's dismissal , the Americans , in offering to have a senior official discuss the matter in a telephone interview with The New York Times , appeared eager to protect the Bush administration from a fresh surge of criticism for its handling of events in Iraq .", "The official said that among American officials in Iraq who had tried to stop Mr. Maliki from rushing Mr. Hussein to the gallows , the reaction to the scenes of abuse had been one of dismay .", "`` Well , yes , when I think of the behavior of the people who were there , I 'm disappointed and distressed , that 's true , `` the official who spoke in the telephone interview said .", "He said he had been one of the Americans who intervened with Mr. Maliki on Friday night and earlier last week to try to delay the hanging .", "Mr. Maliki seemed equally eager to ward off the opprobrium stirred by the execution .", "Attempts to reach Mr. Rubaie were unsuccessful .", "The prosecutor , Munkith al-Faroun , said the other man holding a cellphone above his head was also an official , but he could not recall his name .", "The government inquiry was ordered as a groundswell of protest grew at Sunni population centers across Iraq .", "The protests , sporadic in the first 72 hours after the hanging , appeared to be building in intensity as Iraqi and American troops relaxed security cordons that had been thrown around centers of diehard support for Mr. Hussein , including his hometown , Tikrit , 100 miles north of Baghdad , and Awja , the village where he was born , a few miles away .", "The protesters carried portraits of Mr. Hussein , chanted his name , and fired weapons in the air .", "Thousands of mourners flocked to Awja , where Mr. Hussein 's body has lain in a reception hall .", "The body , in a plain wood coffin draped in an Iraqi flag , has become a point of pilgrimage for loyalists .", "Many of those reaching Awja have wept as they filed past the coffin , shouting slogans of fealty of the kind that were universal in Iraq when Mr. Hussein was the country 's dictator .", "`` Maliki , you coward , you are an American agent , '' cried one demonstrator in Tikrit , referring to the prime minister .", "`` Iran , out , out ! '' another man shouted , echoing anger among Sunnis at the rise to power in Baghdad of Shiite religious groups backed by Iran , including Mr. Maliki 's Dawa Party .", "After Mr. Maliki made it clear to the Americans in Baghdad that his decision was final , the official who discussed the events on Friday night said , American commanders were told to deliver Mr. Hussein to an execution bloc in the Kadhimiya district of northern Baghdad that Mr. Hussein 's military intelligence agency used to execute countless opponents of his government .", "At 4 a.m. , Mr. Hussein was flown by an American military helicopter from an American detention center and handed over to the Iraqis .", "He was hanged with only Iraqis present , in a group of about 25 , including executioners and guards , according to accounts by American and Iraqi officials .", "A postponement of the execution until after the holiday would have delayed it at least until Thursday of this week .", "But the American officials said they had made no stipulation as to how long the delay should be , since their concern , beyond respecting the sanctity of the Id al-Adha holiday , had been that Mr. Maliki should await a formal judicial ruling resolving the legal issues before going ahead with the hanging .", "The Americans said Mr. Maliki had agreed , as the Americans had urged , to ask the chief judge of Iraq 's Supreme Judicial Council , Midhat al-Mahmoud , to issue a formal written judgment saying that the uncompleted legal procedures that concerned the Americans were not necessary to the lawfulness of the hanging .", "But Judge Mahmoud refused , the Americans said , and around midnight on Friday the Iraqi leader decided to go ahead with the execution , signing a decree ordering that Mr. Hussein be `` hanged by the neck until dead . ''", "The legal issues the Americans said they urged Mr. Maliki to resolve before the hanging centered on a constitutional provision requiring Iraq 's three-man presidency council to affirm all executions before they are carried out .", "That posed a potential obstacle to the hanging because Iraq 's president , Jalal Talabani , is opposed to the death penalty .", "One of the other members of the council , Tariq al-Hashemi , is a Sunni from a moderate party that has disavowed Mr. Hussein , but has been careful not to endorse his trial and execution .", "Mr. Maliki , in pushing ahead with the hanging , relied on a provision in the statute that established the Iraqi High Tribunal , which convicted Mr. Hussein , which said that the tribunal 's verdicts , once upheld by its own appeal bench , were final and not subject to presidential review .", "It was that conflict the Americans said they wanted resolved by a written ruling from Judge Mahmoud .", "`` Mr. Maliki said that Judge Mahmoud had given that opinion orally , but we said it would be better for everybody if he said it in writing , '' the American official who discussed the standoff said .", "Sami al-Askari , a political adviser to Mr. Maliki who attended the hanging , said in a telephone interview that the committee would question everyone present at the execution .", "He said those who used their cellphones to record the event would be one focus of the inquiry .", "He said his own observation was that the worst sectarian taunts had come from a guard he described as a poorly educated Shiite man with a thick Arabic accent .", "`` It was horrible , it was terrible , it was a mistake , '' he said .", "`` We were supposed to sit there quietly , just looking at what 's going on . ``", "The first images of the execution that were released were in the form of an official video recording without sound .", "The unofficial cellphone images showed Mr. Hussein , with the noose around his neck , facing shouts of `` Go to hell ! '' and taunts of `` Moktada ! Moktada ! Moktada ! '' in reference to an unruly Shiite cleric , Moktada al-Sadr , who has become a populist hero among Shiites .", "Speaking of those protesting the abuse of Mr. Hussein , Mr. Faroun , the prosecutor , asked , `` Where were these critics when Saddam 's people were executing whole prisons full of innocent people .", "`` He said he had been deeply offended by the taunting of Mr. Hussein , and had tried to stop it .", "`` You heard my voice on the cellphone recording , '' he said .", "`` I was the one shouting , ' Please , no .", "The man is about to be executed . '", "`` THE STRUGGLE FOR IRAQ Correction : January 4 , 2007 , Thursday A front-page article yesterday about an Iraqi government investigation of the abusive behavior at the execution of Saddam Hussein , including the unauthorized cellphone camera recording of it , misstated the account of a witness who said he saw two others there holding cellphone cameras aloft to record Mr. Hussein 's final moments .", "While the witness , Munkith al-Faroun , a prosecutor at Mr. Hussein 's trial , said both of the others were officials , he did not identify one of them as Mowaffak al-Rubaie , the national security adviser .", "Mr. Rubaie , who could not be reached for comment in the article , said yesterday that he had handed his cellphone to security officials an hour before the hanging and had not recorded it ."], "summary": ["Iraq 's Shiite-led government orders investigation into abusive behavior at execution of Saddam Hussein , who was taunted by witnesses to his hanging .", "Unofficial cellphone video recording of execution that was broadcast around world has caused public demonstrations in Iraq and widespread dismay elsewhere .", "United States officials say they worked to persuade Prime Min Nuri Kamal al-Maliki to delay execution until Islamic holiday passed and legal questions could be answered .", "US official says Maliki was worried that insurgents might try to free Hussein if execution was delayed .", "Some Iraqi politicians are raising new demands for Maliki 's dismissal .", "Government inquiry is ordered as protests build in intensity .", "Photo ."], "publication": "nyt50", "label": [0, 3, 1], "tag": ["World", "Front Page", "Washington"]} -{"id": "1816168", "text": ["Have you ever made a profit from a catering business or dog walking .", "Do you prefer to work alone or in groups .", "Have you ever set a world record in anything .", "The right answers could help get you a job at Google .", "Google has always wanted to hire people with straight-A report cards and double 800s on their SATs .", "Now , like an Ivy League school , it is starting to look for more well-rounded candidates , like those who have published books or started their own clubs .", "Desperate to hire more engineers and sales representatives to staff its rapidly growing search and advertising business , Google -- in typical eccentric fashion -- has created an automated way to search for talent among the more than 100,000 job applications it receives each month .", "It is starting to ask job applicants to fill out an elaborate online survey that explores their attitudes , behavior , personality and biographical details going back to high school .", "The questions range from the age when applicants first got excited about computers to whether they have ever tutored or ever established a nonprofit organization .", "The answers are fed into a series of formulas created by Google 's mathematicians that calculate a score -- from zero to 100 -- meant to predict how well a person will fit into its chaotic and competitive culture .", "`` As we get bigger , we find it harder and harder to find enough people , '' said Laszlo Bock , Google 's vice president for people operations .", "`` With traditional hiring methods , we were worried we will overlook some of the best candidates . ''", "Google is certainly not alone in the search for quantitative ways to find good employees .", "Employers use a wide range of tests meant to assess skills , intelligence , personality and honesty .", "And the use of biographical surveys similar to Google 's new system is on the rise .", "Such tools , however , have mainly been the trademark of large corporations recruiting armies of similar workers , like telephone service representatives or insurance sales agents .", "They are rarely used in Silicon Valley , which is built on a belief in idiosyncratic talent .", "`` Yahoo does not use tests , puzzles or tricks , etc. , when interviewing candidates , '' Jessie Wixon , a spokeswoman for Yahoo , said .", "-LRB- Google is known for hazing prospects in interviews with intractable brain teasers .", "And it once tried to attract candidates by placing some particularly difficult problems on billboards . -RRB-", "Google 's growth is staggering even by Silicon Valley standards .", "It is constantly leasing new buildings for its overflowing campus here and opening offices around the world .", "Google has doubled the number of employees in each of the last three years .", "Even though the company now has about 10,000 employees , Mr. Bock says he sees no reason the company will not double again in size this year .", "That would increase the number of hires to about 200 a week .", "As a result , Mr. Bock , who joined Google from General Electric last spring , has been trying to make the company 's rigorous screening process more efficient .", "Until now , head hunters said , Google largely turned up its nose at engineers who had less than a 3.7 grade-point average .", "-LRB- Those who wanted to sell ads could get by with a 3.0 average , head hunters said . -RRB-", "And it often would take two months to consider candidates , submitting them to more than half a dozen interviews .", "Unfortunately , most of the academic research suggests that the factors Google has put the most weight on -- grades and interviews -- are not an especially reliable way of hiring good people .", "`` Interviews are a terrible predictor of performance , '' Mr. Bock said .", "Mr. Bock said that he wanted the company 's human resources department to bring the iconoclastic style as its Web site developers to the normally routine function of interviewing job candidates .", "`` The level of questioning assumptions is uniquely Googly , '' Mr. Bock said .", "So Google set out to find out if there were any bits of life experience or personality it could use to spot future stars .", "Last summer , Google asked every employee who had been working at the company for at least five months to fill out a 300-question survey .", "Some questions were factual : What programming languages are you familiar with .", "What Internet mailing lists do you subscribe to .", "Some looked for behavior : Is your work space messy or neat .", "And some looked at personality : Are you an extrovert or an introvert .", "And some fell into no traditional category in the human resources world : What magazines do you subscribe to .", "What pets do you have .", "`` We wanted to cast a very wide net , '' Mr. Bock said .", "`` It is not unusual to walk the halls here and bump into dogs .", "Maybe people who own dogs have some personality trait that is useful . ``", "The data from this initial survey was then compared with 25 separate measures of each employee 's performance .", "Again there were traditional yardsticks -- the employee 's reviews , both by supervisors and peers , and their compensation -- and some oddball ones .", "One score was what the company called `` organizational citizenship , '' said Todd Carlisle , an analyst with a doctorate in organizational psychology , who designed the survey .", "That is , `` things you do that are n't technically part of your job but make Google a better place to work , `` Dr. Carlisle said , such as helping interview job candidates .", "When all this was completed , Dr. Carlisle set about analyzing the two million data points the survey collected .", "Among the first results was confirmation that Google 's obsession with academic performance was not always correlated with success at the company .", "`` Sometimes too much schooling will be a detriment to you in your job , '' Dr. Carlisle said , adding that not all of the more than 600 people with doctorates at Google are equally well suited to their current assignments .", "Indeed , there was no single factor that seemed to find the top workers for every single job title .", "-LRB- And pet ownership did not seem to be a useful predictor of anything . -RRB-", "But Dr. Carlisle was able to create several surveys that he believed would help find candidates in several areas -- engineering , sales , finance , and human resources .", "Currently about 15 percent of applicants take the survey .", "It will be used for all applicants starting this month .", "Even as Google tries to hire more people faster , it wants to make sure that its employees will fit into its freewheeling culture .", "The company boasts that only 4 percent of its work force leaves each year , less than other Silicon Valley companies .", "And it works hard to retain people , with copious free food , time to work on personal projects and other goodies .", "Stock options and grants certainly encourage employees to stay long enough to take advantage of the company 's surging share price .", "Google 's hiring approach is backed by academic research showing that quantitative information on a person 's background -- called `` biodata '' among testing experts -- is indeed a valid way to look for good workers .", "Michael Mumford , a psychology professor at the University of Oklahoma who specializes in talent assessment , said that this sort of test was effective , but he cautioned that companies should not rely on oddball factors , even if they seemed to correlate to good performance .", "`` You have to know or at least have a hypothesis why having a dog makes a good computer programmer , '' Professor Mumford said .", "`` If you ask whether someone started a club in high school , it is a clear indicator of leadership . ''", "At Google , it is too early to tell if the system is working .", "The surveys have been in use in about a dozen areas for several months .", "Indeed , there is some resistance even at Google to the idea that a machine can pick talent better than a human .", "`` It 's like telling someone that you have the perfect data about who they should marry , `` Dr. Carlisle said .", "But even before the results are in on the new survey , Mr. Bock says he is already seeing success in easing the company past its obsession with grades .", "`` More and more in the time I 've been here , we hire people based on experience as a proxy for what they can accomplish , `` he said .", "`` Last week we hired six people who had below a 3.0 G.P.A. ''", "Correction : January 5 , 2007 , Friday A picture caption on Wednesday with the continuation of a front-page article about the hiring practices of Google reversed the names of the two employees looking over a job application .", "Laszlo Bock , a Google vice president , was at right .", "Todd Carlisle , an analyst at the company , was at left ."], "summary": ["Google has created extensive survey to screen for well-rounded job candidates .", "300-question survey asks job applicants about their attitudes , behavior , personality and is meant to predict how well person will fit into company 's hectic and competitive culture .", "Such hiring tools are more often used by large corporations and not Internet companies , which pride themselves on finding unique talent .", "Google has traditionally hired people with excellent academic records but it says academic success does not always correlate with success at company .", "Google has 10,000 employees and is growing so rapidly that company wants to make screening process more efficient .", "Some hiring experts say Google 's new approach is valid way to look for good workers and better indicator of job performance than interviews .", "Photo .", "Graphic of application survey ."], "publication": "nyt50", "label": [9, 60, 7, 49, 25], "tag": ["Technology", "Front Page", "Business"]} -{"id": "1816226", "text": ["IN a high school locker room in Worthington , Ohio , Sam Maniar handed a piece of string tied to a washer to each of the field hockey players sitting around him .", "He told the athletes to use their minds to move the washer from side to side , and he watched as each girl 's washer started to swing .", "`` By thinking about moving the washer , we send a message from the brain to the nerve receptors in our fingers to move the string attached to the washer , '' said Dr. Maniar , a sports psychologist .", "`` Most people are in total amazement .", "I use it show the connection between our thoughts and our bodies . ``", "Every athlete and coach knows that harnessing one 's mind can lead to feats of coordination and finesse on the field .", "But what many are just learning is that taking care of an athlete 's emotional health -- and managing stress in particular -- can help prevent injury .", "`` The research is there showing that high stress levels do increase the risk of getting injured , but few lay people realize the connection , '' Dr. Maniar said .", "Iona MacKenzie , 35 , a triathlete and adventure racer in Boulder , Colo . , believes that a period of undue tension in 2001 led to her back-to-back injuries .", "Ms. MacKenzie had just started a highly stressful job in Vancouver , was rowing competitively and was also dealing with a long-distance relationship that was financially and emotionally draining .", "`` I 'd been rowing for about seven or eight years completely injury free , `` she said .", "`` But during that year I fractured a rib and soon after that healed , I injured my lower back .", "Both are fairly common injuries for female rowers , but I do n't feel like either would have happened if my body had n't been so weakened by how much stress I was under . ``", "The American College of Sports Medicine recently issued a consensus statement -- a joint effort by about a dozen sports medicine practitioners , team doctors and sports psychologists -- to inform team physicians , coaches and athletic trainers about the important link between stress and injury .", "`` This concept is not always welcomed with open arms in the sports community where athletes are taught to be tough , '' said Dr. Stanley A . Herring , chairman of the consensus statement committee , a team physician for the Seattle Seahawks and a consultant to the University of Washington 's sports medicine department .", "`` But you ca n't tough your way through mental issues . ``", "Stress is an omnipresent problem , and the word is used to describe situations ranging from a long wait at Starbucks to fear of a terrorist attack .", "So why , one may argue , is n't every athlete collapsing to the ground .", "While the research has shown a consistent connection between significant negative life events -- the end of a relationship , a death in family , the loss of job , failing in school -- and the increased risk of injury , the key is n't so much the stressful event itself , but how a person handles it .", "`` One man 's stress is another man 's vacation , `` Dr. Herring said .", "`` Those at risk are the ones whose stress exceeds the resources they have to cope with it . ''", "There are several explanations for how stress leads to injuries .", "According to Frank M . Perna , a psychologist and associate professor at Boston University , there may be multiple causes .", "`` Studies have shown that when you 're stressed you do n't pay enough attention to visual cues , `` he said .", "That , for example , could cause a football player to miss something in his peripheral vision and be blindsided and injured .", "Physiologically , more stress means more stress hormones flowing through your body .", "Cortisol , a stress-related hormone , decreases the immune response , Dr. Perna said .", "`` Athletes who are training hard are breaking down muscle , '' he said , `` and cortisol will impede the body 's ability to repair muscles , making them more likely to get injured or exacerbate a chronic injury . ``", "STRESS also increases muscle tension , and tense muscles are more susceptible to tearing or can throw a person off balance and affect coordination .", "Studies have shown that during a single school year , one in six athletes is likely to suffer an injury serious enough to miss games .", "Other studies have reported that high stress levels make athletes at least twice as susceptible to injury .", "The key to recognizing athletes at risk , the researchers agreed , is simply bothering to probe deeper .", "`` When the athlete gets a preseason physical , it should n't be just about the ` physical , ' '' Dr. Herring said .", "`` Ask about what 's going on at home , how are things with their girlfriend , do they feel overwhelmed .", "And if the answers are concerning , it 's an opportunity to help them out . ``", "Dr. Margot Putukian , director of athletic medicine at Princeton University , said , `` Most coaches do n't perceive mental health as something they have to worry about . ``", "For coaches who do n't have a sports psychologist on call , it is still easy enough to implement some stress-management techniques .", "Dr. Maniar said there was no reason a coach could not learn a few relaxation tips .", "The American College of Sports Medicine recommends methods such as progressive muscle relaxation , a sequence of movements to tense and release muscle groups to bring tension levels down .", "Visual imagery , which involves picturing certain images to create a different physical state , can also be employed .", "`` For example , imagining being on a beach in the warm sand is likely to produce a physical shift that makes you feel more relaxed , '' said David B . Coppel , a sports psychologist in Kirkland , Wash .", "And doing deep , abdominal breathing helps change shallow breathing -LRB- a symptom of stress and tension -RRB- into deeper , rhythmic breathing .", "MOST high school and college teams may not be sitting down for group yoga and meditation sessions in place of practice , but a few forward-thinking coaches are already heeding the College of Sports Medicine 's recommendations .", "Terri Simonetti Frost , who coaches the field hockey team at Thomas Worthington High School in Worthington , Ohio , brought Dr. Maniar in teach her players imagery techniques .", "He also encouraged the girls to better communicate about what was causing tension in their lives .", "`` Communication is our biggest technique for coping with stress , '' Ms. Frost said .", "`` If a kid seems off during practice , I 'll call her aside and we 'll talk about what 's going on in her life . ``", "Ms. Frost gives her players an extra day off each week -- they practice only four days -- to help reduce stress .", "She also talks to them about their study habits and what is going on in their academic schedules so that they wo n't feel overwhelmed by school and sports simultaneously .", "`` I think on a sort of subconscious level it may play a role in keeping the players healthy , '' she said .", "Although it is impossible to prove cause and effect , she noted that none of her players were injured this last season , and the team was undefeated in the regular season .", "The doctors who put together the College of Sports Medicine consensus statement also said that common emotional reactions to injury -- like loss of identity , fear , anxiety and depression -- could affect a player 's recovery .", "`` But people who have learned coping strategies for dealing with setbacks in life can better handle the stress of being on the sideline , '' Dr. Perna said .", "Stephanie Heinsons , 21 , a senior at Ohio State University who competes in equestrian events , regularly practices controlled breathing and imagery to help cope not only with the stress of competition but also with everyday life .", "`` These techniques help to calm me down and force my focus back into the present , '' she said .", "If she skips her relaxation techniques , she is more likely to feel anxious and tense up .", "`` When I take a deep breath and tell myself to focus on the present , I become calm , '' Ms. Heinsons said .", "`` That helps me like magic . '' ."], "summary": ["Research shows that high stress levels in athletes increase risk of getting injured .", "American College of Sports Medicine releases consensus statement advising team physicians , coaches and athletic trainers on link between stress and injury .", "Researchers say key to finding athlete at risk is to probe deeper and discover how person is handling stressful event .", "Drawing ."], "publication": "nyt50", "label": [13, 31], "tag": ["Health", "Style"]} -{"id": "1816229", "text": ["DR . FRAN E . COOK-BOLDEN , a dermatologist in Manhattan , is an advocate of skin-care minimalism .", "When a patient recently arrived for an appointment toting 20 different products she was using regularly -- including an eye cream , a vitamin C cream , a wrinkle serum , a pigmentation cream , a mask , a peel , a scrub and `` some sort of special oxygen detoxifying cream '' -- Dr. Cook-Bolden said she confiscated all but three .", "`` It gave me a headache just to look at all of those products , '' Dr. Cook-Bolden said .", "`` Just two products , a gentle cleanser and a good sunscreen , are enough daily skin care for most people , and you can buy those at a drugstore or a grocery store . ''", "Dr. Cook-Bolden is part of a back-to-basics movement among dermatologists .", "At a time when beauty companies are introducing an increasing number of products marketed for specific body parts -- including necks , creases around the mouth and eyelids -- or for apocryphal maladies like visible pores or cellulite , these doctors are putting their patients on cosmetics restriction diets .", "They are prescribing simplified skin-care routines requiring at most three steps : soap .", "Sunscreen every day , no matter the weather or the season .", "And , if necessary , a product tailored to specific skin needs , whether a cream for pimples or pigmented spots , or a vitamin-enriched moisturizer for aging skin .", "Each product , they say , can be bought at drugstores for $ 30 or less .", "Among those doctors who have become experts at uncluttering their patients ' vanity tables and medicine cabinets is Dr. Sarah Boyce Sawyer , an assistant professor of dermatology at the School of Medicine at the University of Alabama at Birmingham .", "`` My New Year 's beauty resolution for patients is : cut down on skin-care products and cut your skin-care budget , `` Dr. Sawyer said .", "`` Cut down on those $ 100 potions . ''", "For some doctors , simplifying skin-care routines is a way to make patients follow a regimen or a means to soothe irritated skin .", "But some dermatologists are also suggesting patients use fewer , less expensive products because they believe there is little scientific research to justify buying an armload of pricey cosmetics , Dr. Sawyer said .", "`` We have good medical evidence on prescription products , '' she said .", "`` But the science is fuzzy with a lot of cosmetics . ''", "Unlike drugs , cosmetics are not required to prove their efficacy .", "Prescription medications like Accutane for acne and over-the-counter drugs such as sunscreen ingredients must undergo rigorous clinical testing before they gain approval from the Food and Drug Administration .", "But cosmetics are not subject to the agency 's scrutiny before they go on sale .", "The F.D.A. defines cosmetics as topical products that do not alter the structure or function of the skin .", "Dr. William P . Coleman III , the vice president of the American Academy of Dermatology , said consumers should view moisturizers and wrinkle creams as no more than superficial treatments .", "`` You have to think of cosmetics as decorative and hygienic , not as things that are going to change your skin , '' said Dr. Coleman , who is a clinical professor of dermatology at Tulane University Health Sciences Center in New Orleans .", "`` A $ 200 cream may have better perfume or packaging , but as far as it moisturizing your skin better than a $ 10 cream , it probably wo n't . ``", "According to F.D.A. regulations , beauty manufacturers are responsible for the safety of their cosmetics and for their own marketing claims .", "Although many beauty companies perform studies on their products , they are not required to conduct clinical trials on the level of medical research or to make their proprietary research available to the public .", "Dr. Mary Ellen Brademas , a clinical assistant professor of dermatology at New York University Medical Center , said the paucity of rigorous published science on cosmetics makes it difficult to determine how well creams work , whether they cost $ 10 , $ 100 or $ 1,000 .", "`` People are spending $ 450 on a jar of cream just because it is made out of something exotic like salmon eggs or cocoons , '' Dr. Brademas said .", "`` But the cheapest products work just as well as the more expensive ones . ''", "A study of wrinkle creams published last month by Consumer Reports concluded that there was no correlation between price and effectiveness .", "The study , which tested nine brands of wrinkle creams over 12 weeks , also concluded that none of the products reduced the depth of wrinkles by more than 10 percent , an amount `` barely visible to the naked eye . ''", "The Consumer Reports study found , for example , that a three-step regimen of Olay Regenerist products costing $ 57 was slightly more effective at reducing the appearance of wrinkles than a $ 135 tube of StriVectin-SD or a $ 335 combination of two La Prairie Cellular lotions .", "`` I am seduced by fancy packaging as much as the next person , '' Dr. Brademas said .", "`` But I have a theory that all these skin-care things come out of the same vat in New Jersey . ''", "John Bailey , the executive vice president for science of the Cosmetic , Toiletry and Fragrance Association , an industry trade group in Washington , said that skin care varies widely in price because of amounts spent on research and development of ingredients and product formulas , and the cost of manufacturing and packaging .", "But , he said , it is difficult to measure performance differences among products .", "`` Cosmetics do n't have the same quantitative analysis as drugs , so you do n't have a set gauge you can use to determine perceived and actual benefits , `` said Dr. Bailey , who has a Ph.D. in chemistry .", "`` Ultimately , consumers will have to try products out and find what works best for them . ''", "THE back-to-basics skin-care regimen is based on practicality rather than marketing claims .", "It does not rely on exotic ingredients grown on far-flung islands hand-picked by natives only under a full moon .", "Dr. Diane C . Madfes , a clinical instructor at Mount Sinai School of Medicine , said that basic skin care requires washing one 's face to remove dirt , sweat and bacteria , and using sunscreen to impede sun damage .", "People who worry about wrinkles , pimples , dry spots or pores may want to add one or two treatment products , she said .", "Dr. Cook-Bolden , who has been a paid consultant for several mass-market cosmetics brands , suggested a mild liquid cleanser for the face .", "Instead of using toners , which may strip skin , or gritty exfoliation beads and microdermabrasion systems , which may irritate skin , she recommended using a washcloth to slough off dead skin cells .", "`` If you have dry , sensitive skin , you just pat the washcloth on your face gently in a circular motion , '' she said .", "`` If you do n't have irritated skin , you can put more speed and pressure on the washcloth . ``", "Dermatologists disagree whether a moisturizer is then needed .", "Dr. Brademas said it is superfluous .", "`` Moisturizer is optional unless you are in the Arctic , '' said Dr. Brademas , who favors Vaseline petroleum jelly for dry hands , feet , knees and elbows .", "`` I 'm not sure moisturizers do very much except for creating a smooth surface so that makeup can go on without drag . ``", "Dr. Cook-Bolden took a more agnostic position .", "`` If you need a moisturizer , moisturize , '' she said .", "`` If you want less moisture , use a lotion .", "If you want more , use a cream .", "And if you have acne-prone skin , use a gel or a spray . ``", "Although the dermatologists interviewed for this article disagreed about moisturizer , they agreed on one point : the importance of sun protection , including hats , avoidance of midday sun and the use of an effective sunscreen .", "They recommended that consumers look for formulas that include ingredients -- like zinc oxide , titanium dioxide or Mexoryl SX -- that impede damage from the sun 's longer wavelength UVA rays , a protective effect that is not indicated by a product 's SPF rating .", "Beyond soap and sunscreen , Dr. Madfes said that one or two additional products might be added to personalize a skin-care routine .", "`` People who see wrinkles around their eyes are going to reach for an eye cream , '' Dr. Madfes said .", "`` Someone who looks in the mirror and sees large pores may want to use a cleanser with salicylic acid , which can reduce clogged pores . ''", "She is also a proponent of night creams that combine retinol , a form of vitamin A that may help speed up the turnover of skin cells , and antioxidants such as vitamin C , vitamin E or lycopene that may help thwart environmental damage to the skin .", "People with skin conditions like severe acne or people interested in topical anti-wrinkle drugs should consult their doctors about prescription medications , she said .", "On an expedition last week to a CVS Pharmacy at Columbus Circle with a reporter , Dr. Madfes examined the product labels on skin-care items from a variety of mass-market brands and recommended a few basic products , including Cetaphil cleanser and La Roche-Posay Anthelios SX sunscreen .", "`` Higher end , more expensive products may look better in the box and feel better on your face , but they do n't necessarily work better than less expensive products as long as you look for ingredients that are known for efficacy , `` Dr. Madfes said .", "But she did see one benefit to splurging .", "`` The thing is , when someone buys a $ 200 cream , they are going to use that cream , '' Dr. Madfes said .", "`` So , in the end , their skin may benefit . ''", "Drugstores Bank on Snob Appeal -LRB- But Can You Pronounce It .", "-RRB- By NATASHA SINGER LUMENE .", "Lierac .", "Ol\u00ed . Arnaud .", "Proraso .", "No , these are not race-car drivers competing in the Dakar Rally .", "They are imported skin-care brands now competing for consumers ' attention on the shelves at Target , Walgreens and CVS Pharmacy .", "To try to attract brand-conscious beauty consumers who are wont to buy their cosmetics at department and specialty stores , drugstore and big-box chains are starting to introduce niche European skin-care lines .", "These imported products , some of which include retinol , peptides or antioxidants derived from exotic plants , aim to rival prestige brands in formula , texture , packaging and fragrance .", "`` You are no longer necessarily getting a better product assortment , better products or better service at a department store , '' said Karen R . Young , the chief executive of the Young Group , a consulting firm for the beauty industry .", "`` It is a massive leveling of the playing field . ''", "In the last few years , for example , CVS , with 6,200 stores nationwide , has upgraded its cosmetics aisles in 300 stores by introducing beauty departments called Healthy Skincare Centers .", "The displays are stocked with French skin-care brands like Av\u00e8ne and La Roche-Posay , a line that was previously sold in the United States through dermatologists ' offices .", "The centers are staffed by beauty advisers .", "These products cost more than mass-market skin care .", "Janice Jacobs , director of proprietary and limited-distribution brands for CVS , said the chain 's most expensive skin-care product now costs about $ 60 , up from about $ 20 in 1999 .", "`` Consumers are looking for convenience and accessibility , '' Ms. Jacobs said .", "`` If we can give her the tools and the testers of a department store , and the skin-care advice on her terms when she needs it , then she is willing to buy in our stores . ''", "Last year , Target created a department called Bath & Body , a range of 20 skin-care brands including Ma Provence , a fragrance line , and Proraso , an Italian shaving line , that are now sold in almost 1,500 stores .", "Walgreens recently introduced its European Beauty Collection , a group of seven skin-care brands including Institut Arnaud Paris and Skincode , a line from Switzerland .", "And drugstore.com just added Cl\u00e9o , a brand made in Italy whose key ingredient is yogurt .", "`` The drugstores are saying , ' We are not Saks , but if we have our own Spanish or Finnish brand , we 'll attract consumers with our own exclusive products , ' `` Ms. Young said ."], "summary": ["Many dermotologists are advising patients to restrict use of cosmetics even as beauty companies are introducing an increasing number of products marketed for specific body parts -- including necks , creases around mouth and eyelids -- or for apocryphal maladies like visible pores or cellulite .", "Are prescribing simplified skin-care routines -- soap , sunscreen and , if necessary , products tailored to specific skin needs .", "Chain drugstores are attempting to attract brand-conscious beauty consumers by introducing imported skin-care lines .", "Photos -LRB- M ."], "publication": "nyt50", "label": [5, 6, 8], "tag": ["Health", "Style"]} -{"id": "1816230", "text": ["FOR those who have failed in a decade or three 's worth of New Year 's resolutions to become better workers , spouses , parents , athletes or lovers , there is a new frontier in personal growth -- or at least a proliferation of products , mostly hawked over the Internet , that promise to help turn the last bit of untrammeled downtime -LRB- sleep -RRB- into an opportunity for improvement .", "New health products have emerged , often from the margins of commerce .", "Old self-help approaches like subliminal `` sleep learning '' have evolved and found new life on the Web .", "`` While you sleep ! '' has become an Internet marketing catchphrase .", "The idea plays on two classic , if contradictory , American impulses : the desire to get ahead , and the compulsion to avoid the slightest expenditure of effort .", "There are diet pills sold under names like Lose and Snooze and Sleep ` n Slim , which contain collagen and which the makers say can help maximize the body 's metabolism .", "There are foot pads from Japan that look like tea bags and promise to drain toxins and restore energy while you sleep .", "On one Web site , hypnotictapes.com, besides recordings designed to improve public speaking or break addiction to alcohol or heroin , there are programs promising to help you , at least partly while sleeping , `` Overcome Fear of Clowns '' and `` Master the Bagpipes . ''", "`` The grow-yourself revolution started in the ' 50s , '' said David Allen , a productivity consultant who wrote `` Getting Things Done : The Art of Stress-Free Productivity '' -LRB- Viking , 2001 -RRB- , and it `` is the one industry that has never faltered in the last 40 years .", "All they have done is give you more clever ways of getting it without having to give anything . ``", "Products marketed for nocturnal self-transformation often target outward appearance .", "Ortho-K contact lenses , which reshape the cornea to correct vision and are typically worn only at night , have been approved by the Food and Drug Administration .", "But the claims of many other products are more nebulous .", "Nancy Clark , a registered dietician and the author of the best-selling `` Nancy Clark 's Sports Nutrition Guidebook `` -LRB- Human Kinetics , 2003 -RRB- , said that she had never heard of collagen -- the supposed magic bullet in some nocturnal diet pills -- used as a weight-loss aid .", "`` If it were true , '' she said , `` it would be front-page news . ''", "The products with perhaps the broadest potential market , and often the most extravagant promises , are designed for so-called sleep learning .", "Professionals who think the boss has been a little slow with that promotion -- and who have left their skepticism in their other briefcase -- can try to kick-start their careers with the Wealth & Success Power Affirmations subliminal program -LRB- `` start advertising to your own mind '' -RRB- .", "Sold through sleeplearning.com, it can be played through stereo pillow speakers available on the same Web site for $ 29.95.", "Not everyone selling self-improvement while sleeping promises overnight miracles .", "James Schmelter , a certified hypnotherapist who runs hypnotictapes.com out of San Francisco , said sleep learning on its own is often of only marginal benefit , which is why it is only one of four elements in his hypnosis-at-home programs , many of which he records individually for customers and sells for $ 98 .", "The other three parts of the program are to be listened to awake .", "Hypnosis is generally accepted as a legitimate technique to facilitate other forms of treatment , like cognitive behavior therapy , said Guy H . Montgomery , a clinical psychologist at Mount Sinai School of Medicine in Manhattan .", "But unlike the hypnotized brain , which is receptive to spoken suggestions , the sleeping brain is not so suggestible , said Dr. Michael J . Sateia , the head of the sleep disorders program at Dartmouth-Hitchcock Medical Center in Lebanon , N.H.", "`` Generally , '' he explained , `` sleep is considered to be a state of being relatively ' offline , ' as it were , with respect to extrasensory input . ''", "Sleep learning is a self-empowerment concept that dates back at least to Aldous Huxley 's `` Brave New World , '' published in 1932 , but for most of the intervening years it could be found mainly on cassette programs tucked away on self-help shelves in bookstores alongside volumes on astral projection .", "That all changed with the Internet .", "Marc VanDeKeere , for example , is one seller of sleep-learning self-improvement products who said he has noted a spike in business in recent years .", "Mr. VanDeKeere , a hypnotherapist who operates the Web site brainwave-entrainment . com , said he had seen sales of subliminal problem-solving and meditation products , which he recommends that customers use while sleeping , increase by a third in the last few years .", "And he proudly points to 17,000 positive feedback responses for his products on eBay .", "`` Fifteen years ago , '' he said , `` nobody would be able to find these products . ''", "Eldon Taylor , the founder of Progressive Awareness Research , a company in Medical Lake , Wash . , that has marketed a line of hypnosis CDs to be used awake and asleep under the InnerTalk brand since 1984 , said demand has grown noticeably in the last five years .", "He attributes that growth in part to economic anxieties that have increased people 's desire to wring productivity out of the wee hours .", "`` How many times do people change jobs .", "`` asked Mr. Taylor , whose Web site , innertalk.com, offers CDs titled '' Excel in Exams `` and '' Natural Breast Enlargement . ``", "`` We live in a society full of more information than we can absorb , '' he said , `` but we 're required to absorb it , because if we do n't absorb it , we fall out `` of the race .", "This apparent renewed interest in learning while snoozing comes at a time of increased academic research into the topic .", "Last year , for example , researchers at the University of L\u00fcbeck in Germany found that gently stimulating the brain during sleep with an electrical current at a certain frequency improved the ability of test subjects to remember a string of words they had learned before a nap .", "Still , Dr. Jerome M . Siegel , a professor of psychiatry at the Center for Sleep Research at the University of California , Los Angeles , said that most research he has seen indicates that subliminal sleep learning is of little or no value because `` the sound had to actually wake you before you would benefit . ''", "Dr. Siegel recalled rigging a speaker system under his own pillow in junior high school in a failed effort to learn French .", "`` Even when you 're sleepy , but not asleep , `` he added , '' you do n't learn very well . ``", "A study in 1992 by the British Psychological Society dismissed the concept of sleep learning , finding that it can `` only occur if the sleeping person is partly awakened by the message . ''", "But such findings did not kill off interest in the concept and its seductive , something-for-nothing allure .", "For Candace Kinsella , a retired nurse in Largo , Fla . , who wanted to reduce by a few sizes , diet pills were not enough , so she turned to a weight-loss sleep program that is not subliminal , but consists of spoken messages like `` eat more vegetables , '' she said .", "Despite the potential distraction of someone talking while you 're trying to sleep , she said the program , which is sold on the Web site robert-egby . com , is relaxing .", "`` It 's like going to sleep with the TV on , `` she said .", "She credits it with helping her lose 50 pounds over one year , starting from a weight of 345 , simply by providing constant reminders to go to the gym , drink water instead of soda and eat better .", "`` All of this augments '' a commitment during waking hours to healthy living , she explained .", "Then again , the typical middle-aged American gets as little as 6.1 hours of sleep a night , according to a recent University of Chicago study , instead of the commonly accepted optimum of eight .", "So people who fail to transform themselves suitably during the wee hours can always fall back on using their sleep to obtain one thing that doctors believe is crucial to a life of happiness and productivity : rest ."], "summary": ["Web gives new life to products that promise to transform you ` while you sleep , ' and those with broadest market and most extravagant promises are designed for so-called sleep learning .", "Most research shows that people learn only when awake and suggest that people use sleep to rest .", "Drawing ."], "publication": "nyt50", "label": [15, 6], "tag": ["Technology", "Education", "Style"]} -{"id": "1816249", "text": ["WILLIAM LEININGER is not your typical environmental zealot .", "A Navy commander who works as a doctor at the Naval Medical Center San Diego , he is a Republican and lives in one of California 's most conservative counties , in a development of neat lawns and Spanish-style houses .", "His 2,400-square - foot , single-level house -- `` the usual Southern California design , '' he said recently -- is barely distinguishable from its neighbors , apart from one detail : the red-tile roof is crammed with solar panels .", "Dr. Leininger , 42 , is one of thousands of Californians , many of them unlikely converts to the cause of alternative energy , who have installed solar power systems in their homes in just the last year .", "Spurred by recent legislation that provides financial incentives -- and by rising energy costs and , perhaps , by a lingering distrust of power companies in the aftermath of the California electricity crisis at the start of the decade -- homeowners across the state have come to see solar power as a way to conserve money as well as natural resources .", "Architects in California are routinely designing solar systems into custom homes , and developers are offering solar systems and solar-ready wiring in new spec houses and subdivisions .", "Solar power is also emerging as a kind of status symbol , a glamorous mark of personal responsibility .", "Celebrities , including Leonardo DiCaprio , Alicia Silverstone , Carlos Santana and Tom Seaver , have installed solar systems .", "-LRB- Edward Norton runs a campaign in Los Angeles , encouraging his fellow celebrities to install solar panels on their homes and to make donations for systems in low-income housing . -RRB-", "The vogue began in earnest a year ago , when the state legislature approved the California Solar Initiative , one of the most ambitious solar programs in the world .", "The legislation took effect at the start of this month but was preceded by a stopgap measure with similar terms that ran throughout 2006 , offering homeowners a rebate on top of the federal tax credit of up to $ 2,000 that has been available nationwide since 2006 .", "The theory was that supplanting the year-to-year incentive programs in place since 1998 with the long-term certainty offered by the initiative 's 10-year , $ 3.2 billion program of rebates -LRB- one-third of which would likely go to homeowners -RRB- would stimulate the development of a robust solar sector -- which could then be weaned from subsidies as its growing scale brought down prices .", "If it works as planned , said J . P . Ross , the policy director for Vote Solar , an organization that advocates for large state-level solar projects , the initiative will stimulate the installation of 3,000 megawatts of solar electrical generating capacity in the state over the next decade .", "That would be an increase by a factor of more than 20 , Mr. Ross said , equivalent to 30 small natural-gas-fired power plants .", "Given the enthusiasm homeowners have shown for the initiative , filing nearly twice as many plans for solar systems with the California State Energy Commission in 2006 than in previous years , this goal may not be far-fetched .", "Other states are considering the future of their solar programs -LRB- several states in the Northeast and the Southwest have less ambitious ones in place , including New York , New Jersey and Connecticut -RRB- , and they are closely watching California 's .", "As the rebate program has made it less expensive to install a home solar system -- and as banks , which consider a solar system to be an improvement that increases a house 's value , have made financing readily available -- the solar industry has grown .", "There are now 434 companies registered to install solar systems by the state energy commission , which together installed just under 50 megawatts of solar electric generating capacity in 2006 , the most in a single year .", "-LRB- California 's total capacity by October was 180 megawatts , enough energy to power about 135,000 homes .", "At the end of 2005 the nationwide solar photovoltaic capacity was 425 megawatts . -RRB-", "While much of the total came from industrial and utility installations , more than 7,000 homeowners filed plans with the state energy commission in 2006 , up from about 4,000 in each of the previous two years .", "The companies are responding not only to an increase in demand , but also to a change in the type of consumers interested in going solar .", "Unlike the do-it-yourself tinkerers who once made up much of the home photovoltaic market , the people fueling the current growth spurt are interested in hands-off user friendliness .", "`` I more or less set it up and then I forgot about it , '' said Nicky Gonz\u00e1lez Yuen , an instructor in political science at De Anza College in Cupertino , who hired a company called NextEnergy to install the modest three-kilowatt system in his 100-year-old Berkeley duplex .", "`` I 'm a really busy person , and I did n't need to know that level of information . ``", "Companies like NextEnergy provide homeowners with a complete package that includes system design , permit applications , rebate processing , installation , maintenance and warranty .", "`` It was a seamless , painless process , '' said Mr. Yuen , whose system cost $ 16,000 after the California rebate and the federal tax credit , which together saved him $ 10,000 .", "It was `` comparable to having a sprinkler system put in , '' he said .", "Mr. Yuen , 47 , was the first on his block to install a solar system : `` In my circle I 'm the eco-nut , `` he said .", "But , he said , less than a year later they are quite common in his neighborhood .", "`` A lot of people are really paying attention and beginning to think about the whole environmental cycle , '' he added .", "But even as these solar adopters re-evaluate their relationship to the power grid , virtually all of them remain connected to it , which is contrary to the go-it-alone image of the early solar pioneers .", "Though the connection means a house will lose power in a blackout , most home users find the ease of operation makes up for the loss of independence .", "`` All I see is an e-mail from the system once a month , '' said Robert Felton , chief executive of TenFold , a software company , of the report of how much power his mansion in the Oakland hills is using and producing .", "As recently as 10 years ago it was unheard of and , in fact , illegal for solar-powered houses in California to connect to the grid .", "Now power companies are legally required to credit their customers for the excess power they produce .", "The grid , in effect , serves to store power , replacing the bank of batteries that is a component of off-grid systems .", "At the end of the year , credits for solar power added to the grid are applied against charges for power taken from it , helping homeowners `` zero out '' their electricity bills .", "According to Borrego Solar Systems , the company that installed the long rows of solar panels on a hill next to Mr. Felton 's house , two-thirds of its customers manage to do so .", "Excess credits are lost at the end of the year , so homeowners , at least for now , can not make a profit from their solar systems .", "Even so , the savings can be substantial : in 2005 Mr. Felton paid Pacific Gas and Electric about $ 2,500 a month for electricity .", "-LRB- `` I have a whole bunch of fountains and water features and stuff like that , '' he said . -RRB-", "In California residential electricity rates are tiered , and large users like Mr. Felton pay rates about three times higher than more modest consumers , making solar power even more attractive .", "While the average home solar system is about five kilowatts , Mr. Felton 's is 45 kilowatts , and he seldom sees an electric bill .", "Borrego Solar estimated the system could save Mr. Felton almost $ 2 million over 30 years -- far more than the $ 255,000 the system cost him after a $ 134,000 rebate .", "Mr. Felton , 67 , said that a solar system did not make sense when he built his house in 2000 , but that the rebate , as well as rising electricity prices , persuaded him to install the system last year .", "His pragmatic concerns were also informed by broader issues .", "`` I 'm not a hippie greenie , `` he said , pointing out that with a background in nuclear engineering , he strongly supports nuclear power .", "`` But solar is certainly a way to get off foreign oil . ''", "As a member of the military who has been deployed to the Persian Gulf three times , Dr. Leininger has been affected by the nation 's foreign oil habits more than most .", "`` The need for stable oil supplies is the big reason that we spend so much time in the Persian Gulf , '' he said .", "`` Decreasing our national energy consumption is in my self-interest . ''", "His neighbors in the San Diego suburb of Escondido , most of them politically conservative , have responded well to the solar panels of the eight-kilowatt system that he and his wife , Suzann , a cartographer , installed last year on their roof .", "The neighborhood association , which was required to approve the plan by California law , did so happily , he said .", "Lately , the Leiningers have noticed at least one other photovoltaic system in the immediate area and a number of solar heating systems for swimming pools .", "-LRB- Meanwhile , in Orange County , which is known for its political conservatism , about 250 solar installations were approved from January to November last year , more than twice the 2005 figure . -RRB-", "The Leiningers , who paid Borrego Solar $ 39,000 for their system after a $ 24,000 rebate , figure their system will pay for itself in a dozen years -- assuming residential electricity rates do not increase , as they have by 37 percent since 1998 .", "Dr. Leininger estimated that his system had reduced his household carbon emissions by nearly 30 tons since it was installed in June , and that it was well on its way to zeroing out .", "`` It comes down to personal responsibility , '' he said .", "`` If I can go electricity-neutral on my house , that 's that much less coal we have to burn . ``", "And much less money .", "`` One of the most gratifying things is on a sunny day when the meter is spinning backward , '' Dr. Leininger said .", "`` We have a guaranteed return on the system because we know we 're not going to have an electric bill from now on . ``", "Chasing Solar Power in the Northeast HOME solar systems in the Northeast can produce up to 90 percent of the electricity generated by those in California , said Michael Hall , chief marketing officer for Borrego Solar Systems , a company in California .", "It is not surprising , then , that some California installers are expanding nationally .", "They include PowerLight and Akeena Solar , which opened offices in New Jersey in the last two years .", "The New York State Energy Research and Development Authority 's Web site , powernaturally.org, explains the state 's incentive program , which has been in place since 2002 .", "It covers 40 percent to 70 percent of the cost of a home system , depending on its size and other factors .", "State-approved installers are listed at the site as well .", "New Jersey has been offering rebates to homeowners for photovoltaic systems since 2001 , and it is now the second-largest solar market in the country .", "The state offers rebates up to $ 3.80 per watt for home systems .", "While this is less than New York 's rate , New Jersey makes it possible for system owners to sell renewable energy credits and earn continuing revenues , unlike New York and most other states .", "Details and contact information for dozens of installers are at the New Jersey Clean Energy Program 's Web site , njcep.com.", "Connecticut 's rebate program pays up to $ 25,000 per solar photovoltaic installation , depending on the components used and how they are configured .", "Information is at ctinnovations.com/ funding / ccef / solar -- rebates.php.", "GREGORY DICUM ."], "summary": ["Solar industry has grown in California in year since state legislature approved homeowner rebates for installing solar electric generating systems .", "Banks , which consider solar systems to be improvement that increases house 's value , are making financing readily available .", "Architects are routinely designing solar systems in custom houses , and developers are offering solar systems and solar-ready wiring in new spec houses and subdivisions .", "Photos ."], "publication": "nyt50", "label": [5, 16], "tag": ["Home and Garden", "Style"]} -{"id": "1816254", "text": ["After the peppered beef carpaccio and before the pan-fried sea bass there were raucous toasts and the clinking of wine glasses in the V.I.P. room of New Heights , a jazzy restaurant in this city 's most luxurious location , overlooking the Bund .", "Wang Guangyi , one of China 's pioneering contemporary artists , was there .", "So were Zhang Xiaogang , Fang Lijun , Yue Minjun , Zeng Fanzhi and 20 other well-known Chinese artists and their guests , many of whom had been flown in from Beijing to celebrate the opening of a solo exhibition of new works by Zeng Hao , another rising star in China 's bubbly art scene .", "`` We 've had opening dinners before , `` said the Shanghai artist Zhou Tiehai , sipping Chilean red wine , '' but nothing quite like this until very recently . ``", "The dinner , held on a recent Saturday night in a restaurant located on the top floor of a historic building that also houses an Armani store and the Shanghai Gallery of Art , was symbolic of the soaring fortunes of Chinese contemporary art .", "In 2006 Sotheby 's and Christie 's , the world 's biggest auction houses , sold $ 190 million worth of Asian contemporary art , most of it Chinese , in a series of record-breaking auctions in New York , London and Hong Kong .", "In 2004 the two houses combined sold $ 22 million in Asian contemporary art .", "The climax came at a Beijing auction in November when a painting by Liu Xiaodong , 43 , sold to a Chinese entrepreneur for $ 2.7 million , the highest price ever paid for a piece by a Chinese artist who began working after 1979 , when loosened economic restrictions spurred a resurgence in contemporary art .", "That price put Mr. Liu in the company of the few living artists , including Damien Hirst and Jeff Koons , whose work has sold for $ 2 million or more at auction .", "`` This has come out of nowhere , '' said Henry Howard-Sneyd , global head of Asian arts at Sotheby 's , which , like Christie 's , has just started a division focusing on contemporary Chinese art .", "With auction prices soaring , hundreds of new studios , galleries and private art museums are opening in big cities like Beijing and Shanghai .", "Chinese auction houses that once specialized in traditional ink paintings are now putting contemporary experimental artworks on the block .", "Western galleries , especially in Europe , are rushing to sign up unknown painters .", "Artists a year out of college are selling photographic works for as much as $ 10,000 each .", "Well-known painters have yearlong waiting lists .", "And the Solomon R . Guggenheim Museum and the Pompidou Center in Paris are considering opening branches in China .", "`` What is happening in China is what happened in Europe at the beginning of the 20th century , '' said Michael Goedhuis , a collector and art dealer specializing in Asian contemporary art who has galleries in London and New York .", "`` New ground is being broken .", "There 's a revolution under way . ``", "But the auction frenzy has also sparked debate here about whether sales are artificially inflating prices and encouraging speculators , rather than real collectors , to enter the art market .", "Auction houses `` sell art like people sell cabbage , '' said Weng Ling , the director of the Shanghai Gallery of Art .", "`` They are not educating the public or helping artists develop .", "Many of them know nothing about art . ``", "But the boom in Chinese contemporary art -- reinforced by record sales in New York last year -- has also brought greater recognition to a group of experimental artists who grew up during China 's brutal Cultural Revolution -LRB- 1966-1976 -RRB- .", "After the 1989 government crackdown in Tiananmen Square , avant-garde art was often banned from being shown here because it was deemed hostile or anti-authoritarian .", "Through the 1990s many artists struggled to earn a living , considering themselves lucky to sell a painting for $ 500 .", "That has all changed .", "These days China 's leading avant-garde artists have morphed into multi-millionaires who show up at exhibitions wearing Gucci and Ferragamo .", "Wang Guangyi , best-known for his Great Criticism series of Cultural Revolution-style paintings emblazoned with the names of popular Western brands , like Coke , Swatch and Gucci , drives a Jaguar and owns a 10,000-square - foot luxury villa on the outskirts of Beijing .", "Yue Minjun , who makes legions of colorful smiling figures , has a walled-off suburban Beijing compound with an 8,000-square - foot home and studio .", "Fang Lijun , a `` Cynical Realist '' painter whose work captures artists ' post-Tiananmen disillusionment , owns six restaurants in Beijing and operates a small hotel in western Yunnan province .", "If China 's art scene can be likened to a booming stock market , Zhang Xiaogang , 48 , is its Google .", "More than any other Chinese artist Mr. Zhang , with his huge paintings depicting family photographs taken during the Cultural Revolution , has captured the imagination of international collectors .", "Prices for his work have skyrocketed at auction over the last two years .", "When his work `` Bloodline Series : Comrade No . 120 '' sold for $ 979,000 at Sotheby 's auction in March , many art insiders predicted the market had topped out and prices would plummet within months .", "But in October , the British collector Charles Saatchi bought another of Mr. Zhang 's pieces at Christie 's in London for $ 1.5 million .", "Then in November at Christie 's Hong Kong auction , Mr. Zhang 's 1993 `` Tiananmen Square '' sold to a private collector for $ 2.3 million .", "According to Artnet.com, which tracks auction prices , 16 of Mr. Zhang 's works have sold for $ 500,000 or more during the past two years .", "Are such prices justified .", "Uli Sigg , the former Swiss ambassador to China and perhaps the largest collector of Chinese contemporary art with more than 1,500 pieces , calls the market frothy but not finished .", "`` I do n't see anything at the moment that will stop the rise in prices , `` he said .", "`` More and more people are flocking to the market . ''", "Mr. Goedhuis insists that this is the beginning of an even bigger boom in Chinese contemporary art .", "`` I do n't think there 's a bubble , `` he said .", "`` There 's a lot of speculation but no bubble .", "That 's the paradox .", "In China there are only a handful of buyers -- 10 , 20 , 30 -- out of a billion people .", "You only need another 10 to come in and that will jack up prices . ``", "He added : `` Another astonishing fact is there is not a single museum in the West that has committed itself to buying Chinese art .", "It 's just starting to happen .", "Guggenheim , the Tate Modern , MoMA , they 're all looking . ``", "Representatives from those museums , as well as others , have made scouting missions to China .", "A growing number of international collectors are looking at Chinese art too .", "`` After the 2005 Sotheby 's show I just jumped in , `` said Didier Hirsch , a French-born California business executive who has long collected American and European contemporary art .", "`` People said the next big run-up in prices would be at Sotheby 's in March so I said , ` Now or never . '", "`` Mr. Hirsch purchased nearly his entire collection -- about 40 works -- by phone after doing research on the Internet .", "He said he went first for what he called the titans -- the original group of post - ` 79 painters -- including Wang Guangyi and Liu Xiaodong .", "Some critics here say the focus on prices has led to a decline in creativity as artists knock off variations of their best-known work rather than exploring new territory .", "Some are even employing teams of workers in assembly-line fashion .", "Christopher Phillips , a curator at the International Center of Photography in New York , has become a regular visitor to China , scouting young artists for the center and other places .", "On a recent trip `` I went to visit the studio of a well-known Beijing painter , '' Mr. Phillips said .", "`` The artist was n't there , but I saw a group of canvases being painted by a team of young women who seemed to be just in from the countryside .", "I found it a little disconcerting . ``", "There are also complaints that some artists are ignoring international standards by selling works directly into the auction market , rather than selling first to collectors .", "And many experts here say that some gallery officials and artists are sending representatives to the auctions to bid on their own works to prop up prices , or `` protect '' the prices of some rising stars .", "But Lorenz Helbling , director of the ShanghART Gallery here , said Chinese artists continue to produce an impressive array of works , and that talk about the market being overrun by commercialism is exaggerated .", "`` Things are much better than they were 10 years ago , '' he said .", "`` Back then many artists were commissioned to simply paint dozens of paintings for a gallery owner , who went out and sold those works .", "Now these artists are thinking more deeply about their work because they 're finally getting the recognition they deserve . `` ."], "summary": ["Soaring prices for Chinese contemporary art have led way for opening of hundreds of new studios , galleris and private art museums in cities like Beijing and Shanghai .", "Chinese auction houses that once only dealt in traditional art have now begun to put contemporary pieces on block .", "European galleries are also clamoring to sign unknown Chinese artists , some of whom been able to sell their works for large sums .", "Auction and gallery frenzy has sparked debate in China about whether sales are artificially inflated to encourage speculators , rather than real collectors , to enter art market .", "There are also complaints that some artists are ignoring international standards by selling works directly into auction market , rather than selling first to collectors .", "Photos ."], "publication": "nyt50", "label": [63, 19, 10, 11], "tag": ["Travel", "Arts"]} -{"id": "1816258", "text": ["Christopher Wheeldon , one of the world 's most sought-after choreographers , who recently announced an end to his residency at New York City Ballet , has formed his own company , a major event in the dance world .", "The company , Morphoses the Wheeldon Company , is to make its debut in August at the Vail International Dance Festival in Colorado , followed by four to six performances at the Sadler 's Wells Theater in London in September and roughly the same number at City Center in New York in October .", "The company is named after a 2002 dance that Mr. Wheeldon created for City Ballet .", "In an interview yesterday Mr. Wheeldon said performances would include a mix of his own choreography and works by others , performed by stars on loan from City Ballet and the Royal Ballet in London .", "`` We 're still kind of at the hopes-and-dreams stage , `` he said , adding that he had approached no one to become a permanent member .", "`` I do n't really believe in poaching , `` he said .", "`` I do n't even know at this stage whether I can offer them the security they need . ``", "At Vail , he said , he planned to cast couples from the Hamburg Ballet , Pacific Northwest Ballet , San Francisco Ballet and City Ballet to dance the four pas de deux from his `` Polyphonia . ''", "Among the dancers tentatively scheduled to participate in his first season are Wendy Whelan , S\u00e9bastien Marcovici , Maria Kowroski , Sofiane Sylve and Edwaard Liang from City Ballet , and Darcey Bussell , Alina Cojocaru and Johan Kobborg from the Royal Ballet , where Mr. Wheeldon has danced and choreographed .", "`` None of these dancers reflect the dancers that are necessarily going to be in the company when it 's formed in a more concrete way , `` he said .", "Mr. Wheeldon said he would concentrate on the artistic side and focus on new works , including those by contemporary choreographers .", "For now Lourdes Lopez , a former City Ballet principal , is handling administrative matters .", "The company is expected to have about 20 members and an annual budget of about $ 5 million .", "Mr. Wheeldon declined to discuss specifics on how the company would be financed , saying only that some potential donors had shown interest .", "The initial performances are being presented by Vail , City Center and Sadler 's Wells , which will cover the dancers ' fees and serve as long-term partners of the company .", "He said he hoped those performances would attract interest from potential company members and donors .", "The arrival of a new dance company is always an unusual event , given the funds needed to support an essentially money-losing endeavor .", "It is even rarer for an individual to establish a private company , as opposed to one sponsored by a city .", "But Mr. Wheeldon 's stature and substantial body of work , especially at his tender age of 33 , appear to be enough to attract backers .", "Mr. Wheeldon said in November that he would leave City Ballet as its first resident choreographer when his contract expires in 2008 .", "He is scheduled to choreograph two more works for it .", "Peter Martins , the City Ballet 's ballet master in chief , said at the time that he was happy to see Mr. Wheeldon spread his wings .", "Mr. Martins was in rehearsals and could not be immediately reached for comment yesterday .", "Inevitably Mr. Wheeldon 's company will compete for attention , donations and dancers , something Mr. Wheeldon indirectly acknowledged .", "He said Mr. Martins gave his blessing , yet `` he understands also that this may mean some dancers will decide to come to me , '' Mr. Wheeldon said .", "`` That 's just the way life is and the way things go . ``", "He continued , `` I 'm sort of stepping into an area where people might think , ` Why does New York need another ballet company when we 've already got two .", "' `` -LRB- In addition to City Ballet , New York is home to American Ballet Theater . -RRB-", "Answering his own question , he said , `` Maybe it does n't , but I 'm going to do it , and we 'll see if I 'm foolish or not . ``", "Mr. Wheeldon said he wanted to give dancers a greater voice , which is sometimes difficult in large companies like City Ballet .", "Referring to leaders of large companies in general , he said that casting decisions were not `` always handled in a perfectly sensitive way . ''", "`` My mission is to create an environment that is collaborative in all respects , '' he said .", "In an earlier recent interview he said he could make a `` change for the better in the ballet world '' by starting a company from scratch .", "`` I want to be in complete control of my personal artistic vision and goals , '' he said , `` and am not really interested in inheriting a legacy , but rather taking the opportunity to forge my own . ''", "Starting fresh also meant bypassing the `` big politics '' and bureaucracy of a large company , he said .", "The idea began taking shape last summer when the San Francisco Ballet presented one of his pieces at the Lincoln Center Festival .", "The choreographer William Forsythe had a work on the same program , and the two spoke at length .", "`` He basically told me that I needed to take a step forward on my own and do something different , and coming from him -- he is a man who has continued to invent himself -- it was immediately resonant , '' Mr. Wheeldon said .", "Mr. Wheeldon received a boost from an old friend at City Ballet , Damian Woetzel , who had recently become artistic director of the Vail festival and agreed to give him a springboard there .", "`` We 're going to work together to further the ideas that we 've been kicking around all these years , `` Mr. Woetzel said .", "`` It 's a chance to make a real solid contribution . ``", "While mainly making dances for City Ballet -- itself long a vehicle for George Balanchine 's work -- the British-born Mr. Wheeldon has been sought out by companies around the world , including the San Francisco Ballet , the Royal Ballet , the Bolshoi Ballet and the Philadelphia Ballet .", "News of his plans have been percolating through the dance world but were not widely known .", "`` What a good idea , '' said George Steel , the executive director of the Miller Theater at Columbia University , which began expanding its dance offerings after an evening of Wheeldon choreography in the fall of 2005 .", "`` More dancing is wonderful , '' Mr. Steel said .", "`` It 's a huge opportunity for him to have a company he can run . ``", "Dance Correction : January 9 , 2007 , Tuesday An article in The Arts on Thursday about the formation of a new dance company by the choreographer Christopher Wheeldon misstated the name of one of the companies that asked him in the past to choreograph for it .", "It is the Pennsylvania Ballet -LRB- based in Philadelphia -RRB- , not the Philadelphia Ballet ."], "summary": ["Christopher Wheeldon , one of world 's most sought-after choreographers who recently announced end to his residency at New York City Ballet , has formed his own company Morphoses the Wheeldon Company .", "Company will make debut in August at Vail International Dance Festival in Colorado and will perform at Sadler 's Wells Theater in London and at City Center in New York City .", "Wheeldon comments .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["Arts"]} -{"id": "1816262", "text": ["In a rare reprise to a rare trial , the jury that convicted a Long Island man of murder by depraved indifference last October in a drunken-driving crash that killed a limousine driver and a 7-year-old girl may be brought back to court for hearings on whether some members acted improperly during deliberations .", "In an answer submitted on Dec . 29 to a defense motion , Robert Hayden , an assistant Nassau County district attorney , agreed that a hearing should be held to determine if jurors discussed what they believed were prior drunken-driving charges against the defendant , Martin Heidgen , 25 .", "Such accusations were not included in the evidence introduced in the five-week trial , and could be considered a contamination of the jury proceedings .", "Beyond that , Mr. Hayden said in his answer , that the belief of a previous drunken-driving arrest was `` not true , '' as far as he knew .", "Mr. Hayden said the hearing should be limited to that issue , and rebuffed as `` irrelevant '' several other claims of impropriety raised by Mr. Heidgen 's lawyer , Stephen LaMagna , as causes for setting aside the guilty verdict .", "Mr. LaMagna also claimed that some jurors felt pressured to vote to convict Mr. Heidgen of the most serious charge , that some discussed the case outside the jury room , and that one said she planned to write a book about it .", "The Heidgen case was unusual because the defendant was prosecuted for a fatal drunken-driving crash under laws treating his crime as the functional equivalent of intentional murder .", "Such prosecutions , while not unprecedented , are considered rare and are usually brought only in the most horrific cases .", "Mr. Heidgen , whose blood-alcohol level was three times the legal limit , was driving the wrong way on the Meadowbrook Parkway when his pickup truck crashed head -on with a limousine taking a family home from a wedding .", "The crash killed the limousine driver , Stanley Rabinowitz , 59 , and Katie Flynn , who had been the flower girl .", "The girl 's parents and grandparents , who were also in the limousine , suffered minor injuries .", "After three days of deliberations , the jury told Acting Justice Alan R . Honorof of State Supreme Court that it was deadlocked , but after he sent them back for more deliberations , reached a verdict on the fifth day .", "Within days of the verdict , though , the jury forewoman said she had felt pressured to vote for the charge of `` murder by depraved indifference '' that Mr. Heidgen was convicted of , even though she believed he was guilty of the lesser crime of manslaughter .", "Based on the depraved indifference conviction , Mr. Heidgen faces a minimum of 25 years in prison .", "Manslaughter carries a minimum of 15 years .", "Mr. Heidgen 's lawyer , who could not be reached for comment on Wednesday , has said that based on interviews with other jury members , he believed that some jurors browbeat the last two holdouts who thought Mr. Heidgen was guilty of manslaughter , rather than murder .", "`` It is fairly rare for hearings like these to be held , '' said George Goltzer , vice president of the New York State Association of Criminal Defense Lawyers .", "`` The general rule is that you do n't let a jury impeach its own verdict .", "But if the charges are serious -- say that racism or serious misconduct was involved in the decision or something like that -- the courts have allowed it . ``", "Mr. Goltzer said he was not too familiar with the case , but added that evidence of `` extra-record information '' -- not part of the evidence introduced at the trial -- might qualify as serious enough to review a jury 's verdict .", "Justice Honorof is expected to decide in the next few days if and when hearings should be held .", "A spokesman for the district attorney , Kathleen Rice , said it was expected that with the district attorney 's assent , the hearings would be scheduled ."], "summary": ["Assistant Nassau County , NY , District Attorney Robert Hayden agrees that hearing should be held to determine whether jurors discussed what they thought were prior drunken-driving charges against Martin Heidgen before convicting him of murder .", "Heidgen crashed his vehicle into car , killing 7-year-old Katie Flynn and driver Stanley Rabinowitz .", "Photo ."], "publication": "nyt50", "label": [1, 9], "tag": ["New York and Region"]} -{"id": "1816274", "text": ["President Bush is all but daring Democratic leaders to attack his signature tax cuts as they take over Congress .", "But Democrats , perhaps to his frustration , are having none of it .", "In an opening salvo on Wednesday , Mr. Bush proclaimed that he would present a budget next month that manages to project a balanced budget by 2012 while permanently extending more than $ 1 trillion in tax cuts .", "`` It is also a fact that our tax cuts have fueled robust economic growth and record revenues , '' Mr. Bush wrote in an op-ed article for The Wall Street Journal .", "`` We met our goal of cutting the deficit in half three years ahead of schedule . ''", "The implicit message , which Republican lawmakers reinforced later , was that their tax cuts were popular with voters , that Republicans had proven the economic benefits of tax cuts and that Democrats would court disaster if they even hinted at rolling them back or repealing them .", "But even as Democratic leaders continue to accuse Mr. Bush of having a reckless fiscal policy , they have refused to discuss dismantling his tax cuts or even to engage in a debate with him about the best way to stimulate economic growth .", "`` It 's always the same old tired line with them -- ` Tax and spend , tax and spend , tax and spend , ' '' said Senator Kent Conrad , the North Dakota Democrat who is chairman of the Senate Budget Committee .", "`` We 're not going there . ``", "At least not now .", "Democratic leaders say they see no need to revisit Mr. Bush 's tax cuts for several years because they are not set to expire until the end of 2010 .", "And they contend that the government could increase revenues as much as $ 100 billion a year simply by closing the `` tax gap , '' taxes that are owed but not paid .", "When asked about their tax plans , as they have been again and again since Nov . 7 , Democratic leaders insist they want to preserve many of the middle-class tax cuts like the child tax credit and a reduction in the so-called `` marriage penalty '' for two-income households .", "The Democrats ' coyness is less than candid .", "Although it is true that Mr. Bush 's tax cuts have four more years of life , it is also true that Congress faces serious fiscal headaches that can not be postponed for even a full year .", "One of those challenges is the Iraq war .", "The conflict is likely to cost far more than $ 100 billion this year , even if the United States began reducing troop levels .", "So far , Mr. Bush has kept war spending out of the regular budget request that he sends to Congress each February , instead seeking the money on an expedited as-needed basis .", "Democrats say , however , that they will not accede to such requests , as their Republican predecessors did , because they sidestep the process by which Congress normally debates spending priorities .", "Perhaps the bigger and more enduring problem is the alternative minimum tax , a tax that was originally intended for the richest households but that has been not adjusted for inflation and is rapidly engulfing tens of millions of middle-class families every year .", "To prevent the alternative tax from becoming a de facto tax increase , Congress has been passing temporary fixes for the last several years .", "Those fixes now cost more than $ 50 billion a year , and the cost is expected to climb sharply for the rest of the decade .", "Repealing the alternative minimum tax , a goal shared by Democrats and Republicans , would reduce government revenues by about $ 1 trillion over the next 10 years and would be a huge drain on the Treasury that nobody in either party has figured out how to finance .", "Democrats , meanwhile , have a wish list of their own .", "House Democrats plan to reduce interest rates on student loans by half as part of their plan to pass measures that they have long favored in the first 100 legislative hours .", "Democrats also want to funnel more money to develop alternative energy sources , domestic security and the Children 's Health Insurance Program .", "House Democrats plan to reintroduce tough new `` pay-go '' budget rules as early as this Friday .", "Under `` pay-go , '' which governed spending measures in the 1990s but expired in 2002 , new entitlement programs or new tax cuts have to be matched by comparable spending cuts or tax increases .", "Politicians in both parties may be feeling a temporary reprieve from fiscal pressures because the budget deficit has indeed declined sharply in the last two years .", "Largely because of big back-to-back increases in tax revenue for the last two years , the deficit shrank to $ 248 billion in 2006 from a peak of $ 412 billion in 2004 .", "Measured as a share of the total economy , the approach economists view as more meaningful than the deficit size in dollars , the shortfall is 1.9 percent of the gross domestic product , modest by historical standards .", "But many budget analysts remain highly skeptical about Mr. Bush 's promise to eliminate the deficit , noting that the balanced budget would not occur until four years after he had left office .", "One reason for the skepticism is that the government would be renewing all of Mr. Bush 's tax cuts at precisely the time that the 76 million baby boomers have begun pouring into retirement and claiming government benefits from Social Security and Medicare .", "White House budget forecasts have also proven wildly wrong in both good years and bad , as tax revenue plunged far more than officials predicted from 2001 through 2004 and climbed faster than predicted in 2005 and 2006 .", "Despite the recent jump in tax receipts , moreover , federal tax collections are still low as a share of the total economy .", "Spending , on the other hand , climbed sharply as a share of the economy and remains high by historical standards .", "Mr. Conrad of the Budget Committee acknowledged on Tuesday that higher taxes , especially on wealthier families , would eventually have to be part of a comprehensive plan that included tough spending restraint and an overhaul of entitlement programs like Social Security and Medicare .", "`` I believe that if Americans are told the truth they will support actions necessary to address the budget , '' he said .", "`` If we 're going to be honest about this , it 's going to require both sides ' giving up some of their fundamental positions . ``", "But other Democratic lawmakers say it would be almost pointless to lead an unpopular fight over raising taxes until Republicans are willing to share some of the political pain .", "With Mr. Bush almost certain to fight almost any effort to revisit his tax cuts , and Republicans in Congress unlikely to rebel against the president , Democrats are inclined to wait until after Mr. Bush is gone .", "THE 110TH CONGRESS : NEWS ANALYSIS ."], "summary": ["News analysis notes even as Democratic leaders continue to accuse Pres Bush of reckless fiscal policy , they refuse to discuss dismantling his popular tax cuts or even engage in debate about best way to stimulate economy .", "Still , Congress faces serious fiscal problems that can not be postponed ."], "publication": "nyt50", "label": [6], "tag": ["U.S.", "Washington"]} -{"id": "1816278", "text": ["The sleek , bulbous-nosed new bullet trains here look like they are designed to whisk passengers across wide-open spaces .", "But on this congested island , they represent the start of a 180-mile-per-hour commuter train system .", "After a quarter century of planning and construction , the system is scheduled to open on Jan . 5 .", "It will tie together cities and towns where 94 percent of Taiwan 's population lives , offering an alternative to clogged highways and the air pollution the vehicles on them produce .", "For some urban planners and environmentalists , the project is an example of how Asia may be able to control oil imports , curb fast-rising emissions of global-warming gases and bring a higher standard of living to enormous numbers of people in an environmentally sustainable way .", "Passengers who travel on a fully loaded train will use only a sixth of the energy they would use if they drove alone in a car and will release only one-ninth as much carbon dioxide , the main gas linked to global warming .", "Compared with a bus ride , the figures are half the energy and a quarter of the carbon dioxide , train system officials said .", "But the system 's enormous cost -- $ 15 billion , or $ 650 for every man , woman and child on Taiwan -- has made it a subject of dispute .", "And a series of commercial disputes since the project began in 1980 has produced a remarkable hodgepodge : French and German train drivers who are allowed to speak only English with Taiwanese traffic controllers while operating Japanese bullet trains on tracks originally designed by British and French engineers .", "The system has become so complex that the leader of Taiwan 's consumer movement is calling for citizens to boycott it entirely until extensive safety data is released .", "`` Cherish your life , do n't be a guinea pig , `` Cheng Jen-hung , the chairman of the Consumers ' Foundation , said in an interview , repeating his group 's slogan .", "With 900 passengers on a fully loaded train , he warned , `` if there is an accident , there will be very heavy casualties . ''", "Arthur Chiang , the vice president for administration at Taiwan High Speed Rail , said the system was completely safe .", "But he acknowledged that the project had been bedeviled by opposition .", "`` Pandora 's box has already opened and everything has come out except hope and mutual trust , `` he said during a recent test run on one of the new trains from the capital , Taipei , in the north , to the city of Taichung , in west-central Taiwan .", "`` We just wanted to make it simple , but we failed , '' he added .", "`` Politics is one of the factors . ''", "Using overhead electric lines instead of diesel locomotives , the trains will run from Taipei down through western Taiwan to Kaohsiung , the main industrial city in the south .", "That is a distance of 215 miles , about the same as between New York and Washington .", "The system will start with 19 trains in each direction daily and eventually will be able to handle 88 trains daily in each direction .", "Planning started in 1980 , when Taiwan was still under martial law .", "The route was preliminarily picked in 1991 , as Taiwan was starting on the path to become the vibrant , even tempestuous , democracy that it is today .", "Every large city and town along the route lobbied to have its own stop and new railway station , and a succession of governments agreed .", "Three trains a day will travel from Taipei to Kaohsiung in 90 minutes , with just one stop , in Taichung .", "But most of the trains will make six intermediate stops , lengthening travel time to two hours and seven minutes .", "That is still 38 minutes faster than Amtrak 's Acela Express between New York and Washington , which also has up to six intermediate stops but a lower top speed .", "But flights between Taipei and Kaohsiung take just 40 minutes .", "Enormous stations resembling state-of-the-art airport terminals have been built on the outskirts of each city along the route except Taipei , where the existing main rail station is being used .", "The new stations can not be in most downtown areas because of the difficulty in acquiring land for tracks : the high-speed trains travel almost entirely on specially built , 60-foot-tall viaducts to avoid the need to cross roads .", "Smaller trains and buses will link the new stations to downtown .", "Although many urban planners see systems like this one as positive for the environment , Lee Schipper , the research director at Embarq , an environmental transport research group in Washington , said the system could eventually increase the use of energy , rather than save it , if the ease of using the trains encouraged people to move farther away from work .", "The expectation in Taiwan is that the train system will attract a lot of users at first , notwithstanding Mr. Cheng 's call for a boycott .", "The consumer movement here is not as big or visible as it was even 10 years ago .", "A French train driver sporting a magnificent handlebar mustache , who declined to give his name , sent Mr. Chiang 's train hurtling down the tracks on the recent test run .", "The driver said the trains were actually simpler to operate than those in France .", "`` It 's easier , it 's all automatic , `` he said in French .", "But the requirement that all communications take place in English is a complication , he added .", "The electronic displays in the cabs of each train are also in English .", "The Taiwan High Speed Rail Corporation is training Taiwanese drivers to replace the European drivers and plans to switch the entire system to spoken Chinese and Chinese-language computer displays in about three years , Mr. Chiang said .", "The consortium had expected to hire experienced Japanese drivers , but the Japanese companies that made the trains were unable to persuade Japan 's rail system operators to transfer any of their drivers to Taiwan .", "Whether the train system becomes a commercial success will partly depend on how many people use its somewhat inconveniently located stations , how quickly the land is developed around these stations and how much the tickets cost .", "The initial price for a one-way , coach ticket from Taipei to Kaohsiung will be $ 44 , or two-thirds the price of a typical airline ticket .", "Riding the train is much like a very low-altitude flight , and very quiet .", "Chen Chi-cheng , a 5-year-old invited on the test run , watched with fascination as the rooftops of houses flashed past .", "`` It 's like a plane , `` he said breathlessly .", "TAIPEI JOURNAL ."], "summary": ["Taiwan 's new bullet train system is scheduled to open on Jan 5 .", "It will tie together cities and towns where 94 percent of its population lives and offer alternative to clogged highways and air pollution vehicles on them produce .", "System 's enormous cost -- $ 15 billion -- has made it subject of dispute .", "Some environmentalists worry that system could eventually increase use of energy , if ease of using trains encourages people to move farther away from work .", "Project 's decades of planning and construction recalled .", "Photos .", "Map ."], "publication": "nyt50", "label": [3, 7, 2, 30], "tag": ["World"]} -{"id": "1816286", "text": ["A soldier from the 101st Airborne Division is scheduled to plead guilty next Tuesday to a reduced charge for mercy killing in connection with the death of three unarmed Iraqi men shot by American infantrymen last spring , according to lawyers for other defendants in the case .", "The terms of the plea arrangement will allow the soldier , Specialist Juston R . Graber , originally charged with capital murder , to be convicted of aggravated assault and to receive a nine-month prison sentence in exchange for his testifying against three other members of his squad , the lawyers said .", "The three other soldiers , members of the same company as Specialist Graber in the division 's Third Brigade , still face courts-martial on premeditated-murder charges , making them eligible for the death penalty or , barring that , life in prison .", "Army prosecutors have accused them of carrying out an impromptu plan , following a raid on a marshy island northwest of Baghdad , to kill the three Iraqis after cutting off their plastic handcuffs and forcing them to run , still blindfolded , from the squalid hut where they had been discovered .", "The two soldiers accused of firing on the men as they fled -- Specialist William B . Hunsaker and Pfc . Corey R . Clagett -- have said they shot in self-defense after the three broke free from the thin `` zip-tie '' handcuffs and attacked them .", "A third soldier , Staff Sgt . Raymond L . Girouard , the squad 's ranking member , is accused of devising the plan to kill the men , then punching Private Clagett and cutting Specialist Hunsaker to give the appearance that they had been attacked .", "None of these three defendants have entered a formal pea .", "The guilty plea by Specialist Graber , to be entered before an Army judge at Fort Campbell , Ky . , and his testimony against the other soldiers will make gaining their acquittal far more difficult than it would have been had all four kept a unified legal front , lawyers for the three others said .", "`` It changes the complexion of the entire case and makes the case much stronger against Clagett , Hunsaker and Girouard , '' said Paul Bergrin , a lawyer for Private Clagett who said he had not expected Specialist Graber to get `` such an outstanding deal . ''", "`` It gives the government 's case credibility and corroboration where they did n't have it before , `` Mr. Bergrin added .", "`` It requires us to rethink our strategy . ''", "Specialist Graber 's two military lawyers , Capt . Shaun Lister and Capt . Will Suddeth , did not reply to e-mailed requests for comment or phone messages left with military officials at Fort Hood , Tex . , where both are based .", "Their client 's willingness to testify for the prosecution , though , was bitter news to the three other defendants .", "Michael Waddington , a lawyer for Specialist Hunsaker , said Specialist Graber 's plea arrangement felt like a betrayal , `` especially when we 've been disclosing our strategy `` to his lawyers for months .", "Specialist Graber 's legal team had joined with Mr. Waddington , Mr. Bergrin and Capt . Ted Miller , a lawyer for Sergeant Girouard , in a collaborative pact known as a joint defense agreement , which lawyers often create if their clients face similar charges in the same incident .", "Defense lawyers said Specialist Graber 's plea was not particularly surprising , given the disparity between the evidence against him and the charges he had faced .", "According to the evidence , Specialist Graber made a last-second decision to shoot the dying man after a squad medic declared him beyond help and , according to sworn statements from soldiers not charged in the case , after Sergeant Girouard said , `` Put him out of his misery . ''", "But Army prosecutors nonetheless charged Specialist Graber , 21 , with premeditated murder , creating an enormous incentive , defense lawyers for the three other soldiers said , for him to plead guilty to a lesser charge that more closely fit the crime .", "The first court-martial in the case will be Private Clagett 's , scheduled to begin Jan . 15 at Fort Campbell , Mr. Bergrin said .", "Two other soldiers , Sgt . Leonel Lemus and Pfc . Bradley Mason , are expected to testify for the prosecution that they heard Sergeant Girouard conceive a plan to free the Iraqi men from their handcuffs and have Private Clagett and Specialist Hunsaker kill them .", "But Mr. Bergrin said he would call a powerful witness of his own : Col . Michael Steele , the brigade commander , who , according to several soldiers ' sworn testimony , told some soldiers to `` kill all military-age males '' they encountered during the raid , on May 9 .", "Colonel Steele 's lawyer , Lt . Col . Raymond A . Jackson , said on Wednesday that Colonel Steele had never given such an order , a denial putting him at odds with several soldiers under his command .", "The Army has granted Colonel Steele immunity to testify , Mr. Bergrin said , and once on the stand and under oath , `` he 's going to have to tell the truth . ``", "Correction : January 5 , 2007 , Friday An article yesterday about a guilty plea by Army Specialist Juston R . Graber to a reduced charge of mercy killing in connection with the deaths of three Iraqi men misstated the original charge against him and the maximum punishment three other soldiers could receive if convicted .", "Specialist Graber was charged with noncapital murder , not capital murder .", "The three other soldiers scheduled for courts-martial face life in prison -- not the death penalty -- if convicted ."], "summary": ["Specialist Juston R Graber is scheduled to plead guilty to reduced charge for mercy killing in connection with death of three unarmed Iraq men shot by US soldiers in 2006 .", "Will testify against squad members Specialist William B Hunsaker , Pfc Corey R Clagett , and Staff Sgt Raymond L Girouard ."], "publication": "nyt50", "label": [0], "tag": ["U.S."]} -{"id": "1816293", "text": ["The Iraqi prime minister 's office on Wednesday mounted its first public defense of the way the government carried out the execution of Saddam Hussein , and said that Iraqi authorities had detained a guard who they believed was involved in recording the moment in a macabre and unauthorized video that has generated revulsion around the world .", "Iraqi officials , in their effort to dampen the video 's impact , tried to challenge the impression it conveyed that Mr. Hussein , for all his brutal crimes , had behaved with far more dignity in his final minutes than his seemingly thuggish executioners .", "`` The execution operation has been mischaracterized for political purposes , '' said Sadiq al-Rikabi , an adviser to Prime Minister Nuri Kamal al-Maliki , who was present at the execution .", "Mr. Rikabi asserted that it had been carried out properly .", "`` What has happened is not an insult or degradation , '' he said .", "But even as Mr. Maliki 's government tried to defend its actions , the United States military , which had held Mr. Hussein in custody until it transferred him to Iraqi authorities about an hour before he was hanged , sought to distance itself from any responsibility for the scenes revealed in the video .", "`` You know , if you 're asking me , ` Would we have done things differently , ' yes , we would have , '' said Maj . Gen . William B . Caldwell IV , an American military spokesman in Baghdad , at a news briefing on Wednesday .", "`` But that 's not our decision , `` he added .", "`` That 's an Iraqi government decision . ``", "The reaction from the American military seemed to widen a rift that has been opening recently between the Shiite-led government of Mr. Maliki and its American supporters on a range of issues .", "They include the government 's tolerance of militias , the recent discovery of the unofficial presence of Iranian military officers in Baghdad and the swiftness with which the Iraqis put Mr. Hussein to death after his appeals had been exhausted .", "Mr. Maliki 's office confirmed Wednesday that until Mr. Hussein 's final hours , the American Embassy had sought to delay the execution long enough to avoid having it on a Muslim holiday and to resolve some remaining legal issues .", "`` The Americans wanted to postpone it , '' said Maryam al-Rayas , a legal adviser to the prime minister .", "The decision to go ahead , Ms. Rayas said , was `` a victory for the Iraqi government . ''", "The prime minister had decided that beginning the new year with Mr. Hussein dead trumped all other considerations , including the advice of the embassy , said Ms. Rayas , who also characterized the time frame as reasonable .", "`` There was no rush , '' she said .", "The Iraqi government 's detention of one of the guards generated some skepticism , with some Iraqi officials suggesting that Iraq was seeking a low-level scapegoat to blame for the almost Gothic display of intimidation and death that the images depict .", "Mr. Rikabi refused to name or otherwise characterize the guard who had been arrested other than to say that he was being held in Baghdad after an investigation had determined that he had shot the video with a cellphone camera .", "But Munkith al-Faroun , who was the prosecutor at Mr. Hussein 's trial and was present at the execution , has said publicly that 2 of the 14 Iraqi officials and court representatives flown in by American helicopters to witness it were openly videotaping the event with cellphones .", "When asked about Mr. Faroun 's statements , Mr. Rikabi said , `` I do not have this information . ''", "On Wednesday , The New York Times erroneously quoted Mr. Faroun as saying that one of the officials he had seen holding up a cellphone during the execution was Mowaffak al-Rubaie , Mr. Maliki 's national security adviser .", "Mr. Rubaie , in a telephone interview from London , said that along with all the Iraqi officials who were flown to the execution block by American helicopter , he had been searched at the Green Zone helipad and that his cellphone and even his keys were taken from him .", "`` I did not have a cellphone in the execution chamber , '' he said .", "But , further undermining the assertion that only a single guard had videotaped the execution , Mr. Rubaie said he had seen `` two or three '' others in the official contingent who did have cellphones .", "He suggested that they might have been among officials who arrived at Camp Justice , the American camp in northern Baghdad where the hanging took place , by car .", "The failure to call more senior officials to account raised suspicions among some Iraqis .", "`` They want to blame it on a guard , '' one senior Iraqi official said .", "Mr. Rubaie told CNN that there could have been as many as two others in the guard contingent who were associated with that scheme .", "In the wake of the video 's release , there were continuing condemnations of the way justice was meted out to Mr. Hussein after he lost his case in a court specially set up to judge crimes committed during his rule .", "On Wednesday , the United Nations high commissioner for human rights , Louise Arbour , renewed a previous call for restraint in carrying out the executions of two of Mr. Hussein 's co-defendants who were also sentenced to death .", "The manner of Mr. Hussein 's execution appeared to give a boost to the remnants of his outlawed Baath Party .", "In the town of Huwaish , north of Baghdad , hundreds of people led by gunmen calling themselves the `` mujahedeen of the Baath Party '' marched in protest , and in the once prosperous Baghdad neighborhood of Monsour , a large black banner proclaimed that Mr. Hussein 's death would set off fighting against `` the Americans and their followers . ''", "The banner was signed , in nicely printed lettering , `` Baath Party . ''", "At the same time , one of Mr. Hussein 's most ruthless enforcers , Izzat Ibrahim al-Douri , who has eluded his American pursuers for nearly four years , was named the Baath Party leader on one of its Web sites .", "Although the claim could not be independently verified , Mr. Douri has long been considered a leader of the Baathist insurgency .", "Asked repeatedly to describe how the American military would have carried out the execution differently , General Caldwell declined to elaborate , saying that the question was hypothetical , since the Iraqis were in control once they received custody of Mr. Hussein outside the execution block .", "`` It was not our decision as to what occurred at that point , but we would have done it differently , '' General Caldwell said .", "Still , Mr. Rikabi , the prime minister 's political adviser , said that the government rejected all criticism of the execution , including the point at which one of the guards shouted , `` Moktada ! Moktada ! Moktada ! '' as Mr. Hussein stood on the trapdoor of the gallows -- a reference to Moktada al-Sadr , the radical Shiite cleric who leads the militia called the Mahdi Army .", "The exclamation came at the end of a standard Muslim prayer that both the guards and Mr. Hussein were saying aloud , Mr. Rikabi said .", "But the guards were from the Shiite south , where Mr. Sadr is popular , and the prayer there typically ends with the reference to him , Mr. Rikabi said .", "`` If you go to any mosque in Karbala or Najaf you will hear them shouting like that , '' he said .", "`` This is their habit . ''", "Seemingly contradicting his own government , Mr. Rubaie said he was ashamed of what had happened during the execution , which he described as `` unacceptable '' and `` disgusting . ''", "`` It is not professional , it 's the wrong thing to do , and it should not have happened , `` he said .", "`` But it should n't divert the mind of the people from the crimes that Saddam has been condemned to death for . ``", "THE STRUGGLE FOR IRAQ ."], "summary": ["Iraqi Prime Min Nuri Kamal al-Maliki ` s office mounts its first public defense of way government carried out execution of Saddam Hussein , saying Iraqi authorities have detained guard who they believe was involved in recording moment in macabre and unauthorized video that has generated revulsion around world .", "Iraqi officials seek to challenge impression that Hussein , for all his brutal crimes , behaved with more dignity in his final minutes than his seemingly thuggish executioners .", "United States military , which had held Hussein in custody until it transferred him to Iraqi authorities hour before he was hanged , seeks to distance itself from any responsibility for scenes revealed in video ."], "publication": "nyt50", "label": [0, 5, 1], "tag": ["World", "Washington"]} -{"id": "1816302", "text": ["I have a dream , my friends .", "I have a dream that we are approaching the day when a ranch-owning millionaire Republican like George Bush will make peace with a vineyard-owning millionaire Democrat like Nancy Pelosi .", "I have a dream that Pelosi , who was chauffeured to school as a child and who , with her investor husband , owns minority shares in the Auberge du Soleil resort hotel and the CordeValle Golf Club , will look over her famous strand of South Sea Tahitian pearls and forge bonds of understanding with the zillionaire corporate barons in the opposing party .", "Furthermore , I dream of a great harmonic convergence among the obscenely rich -- between Randian hedge fund managers on the right and helipad environmentalists on the left .", "I dream that the big-money people who seem to dominate our politics will put aside their partisan fury and discover the class solidarity that Karl Marx always said they shared , and their newfound civility will trickle down to the rest of us .", "I dream that Berkeley will make peace with Buckhead , Streisand with DeVos , Huffington with O'Reilly .", "I have my dreams , but of course , I am realistic too , for I am aware that at present there is no peace among the secluded island villas .", "I look out across the second homes of America and its surrounding tropical regions and I see polarization among the Kate Spade devotees and bitterness among the Rolexes .", "And I know that both Bush and Pelosi are part of an upper-income whirlwind of strife .", "Some people believe that Pelosi is an airhead , but that is wrong .", "Some people believe she is a radical San Francisco liberal , but that , too , is wrong .", "The main fact to know about Pelosi is that she is a creature of the modern fund-raising system .", "Some politicians rise because they run political machines .", "Some rise because they are great communicators .", "Pelosi has risen because she is a master of the thousand-dollar-a-plate fundraising circuit .", "Living amid a web of investors , venture capitalists and West Coast technology tycoons , she raised heroic amounts of money for the Democratic Party before she ever thought of running for anything herself .", "In 1984 , she was the state party chairwoman .", "In 1986 , she was the national fund-raising chairwoman for the Senate Democrats .", "Since coming to the House , she has discovered what many a savvy pol has discovered -- that the fastest way to ascend in Congress is to raise a lot of money and give it to your peers .", "She paid her dues selecting party favors , arranging seating charts -LRB- after that , legislation is easy -RRB- , and laying thick dollops of obsequiousness on cranky old moguls and their helmet hair spa-spouses .", "She has done what all political fund-raisers do : tell rich people things they already believe , demonize the other side , motivate the giving with Manichaean tales of good versus evil .", "It is no wonder The Los Angeles Times calls her a `` rabid Democrat '' or that Time magazine calls her `` hyperpartisan . ''", "It is not a surprise , as The Washington Post reported this week , that despite campaign promises about changing the tone in Washington , Pelosi has decided to exclude Republicans from the first burst of legislation -- to forbid them to offer amendments or alternatives .", "She is part of the clash of the rival elites , with the dollars from Brookline battling dollars from Dallas , causing upper-class strife that even diminutive dogs , vibrant velvets and petite salades ca n't fully soothe .", "It pains me to see plutocrats fight , because it sets such a poor example for those of us in the lower orders who fly commercial .", "It pains me even more because politicians from the rival blueblood clans go to embarrassing lengths to try to prove they are most authentically connected with working Americans .", "Think of John Kerry visiting a Wendy 's or Bill Frist impersonating a Bible thumper .", "This week , witness Pelosi going on her all-about-me inauguration tour , which is designed to rebrand her as a regular Catholic grandma from Baltimore .", "Members of the middle classes never have to mount campaign swings to prove how regular they are , but these upper-bracket types ca n't help themselves , and they always lay it on too thick .", "So I harbor my dreams of reconciliation , but in the meantime , why oh why ca n't we have a decent overclass in this country -- a group of highly attractive dimwits who spread bland but worthy stability over our political scene .", "Why oh why do we have to have this endless canap\u00e9 war -- the people of the vineyard against the people of the ranch .", "Op-Ed Columnist ."], "summary": ["David Brooks Op-Ed longs for wealthy politicians to put aside their partisan fury and discover civility .", "Says House Speaker Nancy Pelosi is example of rich politician who rose to power by mastering fund-raising system .", "Says despite campaign promises to change tone in Congress , Pelosi has excluded Republicans from first burst of legislation ."], "publication": "nyt50", "label": [22, 4, 31], "tag": ["Opinion"]} -{"id": "1816308", "text": ["Robert L . Nardelli 's rich compensation and poor performance at Home Depot have long been cited by shareholder activists as a prime example of what they view as excessive executive pay .", "Some union members dressed up in giant chicken outfits to protest the board 's reluctance to clip Mr. Nardelli 's wings .", "What did all their outrage get them .", "Mr. Nardelli 's removal -- and at least a $ 210 million bill for a golden handshake on his way out the door .", "Yesterday , Home Depot 's board ousted Mr. Nardelli as chairman and chief executive in a surprising move that highlights the growing influence of investors pressuring boards to rein in executive pay .", "But it also illustrates another point : Even when their voices are heard , shareholders often wind up holding the bag .", "At Home Depot , Mr. Nardelli is expected to receive an exit package worth more than $ 210 million on top of the nearly $ 64 million he was paid during his six years at the helm .", "That equals about $ 45 million a year .", "Over that same period , Home Depot 's stock has fallen from over $ 50 a share early in his tenure to $ 41.16 just before Mr. Nardelli 's resignation was announced .", "By contrast , Home Depot 's chief competitor , Lowe 's , has paid its chief executives about one-third of what Mr. Nardelli made during the same time , while investors have enjoyed a healthy increase in their holdings .", "Mr. Nardelli 's compensation may have a prominent place in the pantheon of pay-for-failure , but his arrangement is by no means unique .", "`` The company is big , the underperformance is significant and the numbers are very large , '' said Lucian Bebchuk , a Harvard Law School professor who is an outspoken critic of executive pay .", "`` But each of the pieces that lead to the decoupling of pay from performance are very common to the executive compensation landscape . ''", "Across corporate America , chief executives have been walking away with lavish riches even when their companies fail to perform , according to an analysis by the Corporate Library .", "At Pfizer , Henry A . McKinnell left with an exit package worth $ 213 million , including an $ 82 million pension , after the pharmaceutical giant he ran for six years lost over $ 137 billion in market value on his watch .", "Jay Sidhu , the former chairman and chief executive of Sovereign Bank , received $ 44 million last fall when he was removed after a bitter proxy fight .", "Morgan Stanley 's board awarded Philip J . Purcell an exit package worth more than $ 95 million when he was forced out in July 2005 .", "Tom Freston , who was ousted at Viacom in September , and Carleton S . Fiorina , who was forced out by Hewlett-Packard in February 2005 , were handed tens of millions of dollars when they abruptly stepped down .", "`` You can call them pay-for-failure packages , '' said Jesse M . Fried , a law professor at the University of California , Berkeley , who has been critical of excessive compensation packages .", "`` You get what you pay for . ''", "The reason highlights the `` Heads I win .", "Tails I win `` nature of executive pay -- especially for chief executives hired to orchestrate a turnaround .", "Over the last decade , many boards and big investors bought into the belief that a celebrity C.E.O. or corporate savior was vital for success .", "There have certainly been a number of examples of chief executives who delivered outsize gains to their shareholders , enjoying big rewards themselves .", "As a result , most executives have been able to take advantage of the perception that they would deliver big rewards to negotiate employment contracts that guaranteed them bonuses when they arrived , paid them handsomely with stock options during their careers -- and in many cases , ensured severance contracts providing millions more once they left .", "`` The justification that is given for these big executive pay packages is that we have to give them very strong incentives to create shareholder value , '' Mr. Fried said .", "`` But if you are allowing them to walk away with hundreds of millions of dollars even if they do extremely poorly , you are undermining that case . ''", "That appears to be what happened at Home Depot .", "When Mr. Nardelli was hired in December 2000 , he was seen as a strong leader who could help restore Home Depot to the luster it enjoyed in its early years .", "Home Depot 's board offered him a contract that would pay him well if times were bad and even more if the company 's performance was better .", "Mr. Nardelli received $ 63.5 million in salary , bonuses , and other compensation , including $ 21 million in forgiven loans and company-paid taxes during his career at Home Depot .", "In that time , however , he failed to turn the company around .", "Yesterday , details of his $ 210 million exit package were released .", "As part of the negotiated arrangement , Mr. Nardelli is expected to take home a severance payment of $ 20 million , a $ 32 million pension , a $ 2 million 401 -LRB- k -RRB- and $ 139 million in deferred equity awards and stock options that he now will be able to cash early .", "That amount could grow if Home Depot 's shares rebound under a new leader .", "Mr. Nardelli also stands to receive another $ 18 million in `` other entitlements , '' which the company did not disclose , but will be paid over the next four years so long as he does not violate a noncompete contract .", "Under his employment contract , Mr. Nardelli was eligible for benefits like life insurance , and dental and medical coverage for several years after his departure .", "It is unclear whether he will still receive them .", "`` When you guarantee income to an executive regardless of performance , you end up paying and you rarely get performance , '' Mr. Hodgson said .", "`` The board could have saved themselves hundreds of millions of dollars and still have their reputations intact . ''", "Representative Barney Frank , the Massachusetts Democrat who is the new chairman of the House Financial Services Committee , called Mr. Nardelli 's exit package `` confirmation of the need to deal with a pattern of C.E.O. pay that appears to be out of control . ''", "Some investors were so happy to see Mr. Nardelli leave that they declared victory -- no matter how hollow .", "They are holding out hope that his ignominious exit will serve as an example to others .", "William C . Thompson Jr . , New York City comptroller , said he hoped `` all the attention that has been focused on him -- with his excessive pay and his underperformance -- will lead to change within this company and send a message to other companies . ''", "Still , he conceded that the $ 210 million exit package was something `` we would have preferred not happen . ''", "A CHAIRMAN 'S FALL ."], "summary": ["Robert L Nardelli 's $ 210 million exit package from Home Depot highlights expensive ` pay-for-failure ' golden handshakes executives tend to receive after being pressured to leave companies .", "Amounts seem unaffected by poor performance .", "Incentives to attract high-profile talent undermines shareholder value and do little to improve business .", "Recent exit packages of prominent executives are shown .", "Photos ."], "publication": "nyt50", "label": [0, 32, 27], "tag": ["Business"]} -{"id": "1816325", "text": ["Coming home from their respective tours of duty in Iraq , John Reid and Alina Gutierrez had never met but they had a lot in common .", "Both were sergeants in the 42nd Infantry Division of the Army and were deployed in Iraq from autumn 2004 to autumn 2005 .", "And both had a strong interest in running their own businesses when they got home .", "Ms. Gutierrez is an owner of a Glass Doctor franchise , and next month , Mr. Reid will open his own Glass Doctor in a neighboring New Jersey county .", "They are the beneficiaries of an innovative private-sector business plan aimed at encouraging and supporting military veterans .", "The Veterans Transition Franchise Initiative , or VetFran , a program sponsored by the International Franchise Association , offers veterans a discount on financing prospective franchises as a way of thanking them for serving the country .", "Nearly 200 participating franchise companies provide qualified veterans `` the best deal '' in acquiring a new franchise , a deal not available to other franchise investors , according to Dina Dwyer-Owens , president of the Dwyer Group , a franchise organization in Waco , Tex .", "Franchises have become an increasingly appealing route for many would-be entrepreneurs .", "According to a 2004 study by PricewaterhouseCoopers , franchise businesses employ more than 18 million Americans and generate more than $ 1.5 trillion , or nearly 10 percent of private-sector economic output .", "The study noted that there were more than 760,000 franchise businesses in the United States and franchising continues to be a fast-growing business opportunity .", "VetFran was conceived by Don Dwyer Sr . , Ms. Dwyer-Owens ` father , after the Persian Gulf war .", "When Mr. Dwyer died suddenly in 1994 , the VetFran program foundered .", "After 9/11 and the invasion of Afghanistan , Ms. Dwyer-Owens decided that the program ought to be revived and , in 2002 , she turned it over to the franchise association .", "VetFran is open to all veterans , not just those returning from Afghanistan and Iraq .", "Since the program began , more than 600 veterans have received discounts in starting franchise businesses .", "Among the participating franchise companies are Dunkin ' Donuts , Midas , the UPS Store , Gold 's Gym and Aamco Transmissions .", "Ms. Gutierrez , 26 , said she received a 10 percent discount on her Glass Doctor franchise fee , which saved her several thousand dollars in much-needed capital for her outlet in Mercer County .", "But as with Mr. Reid and other veterans who have participated in the VetFran program , the money was not as significant as the meaning of the gesture .", "`` I was very excited when I heard about the program , '' Ms. Gutierrez said .", "`` When you come home from your deployment , you are not sure if everyone appreciated what you had done .", "I thought it was very cool that they recognized that we risked our lives in Iraq and they appreciated our sacrifice . ``", "Mr. Reid , who views Ms. Gutierrez as a colleague , not a competitor , said that the VetFran discount was a big influence on his decision to pursue a Glass Doctor franchise .", "`` The less capital you have to lay out , the better , '' he said .", "`` You need as much start-up money as you can get . ''", "He was also impressed that Glass Doctor , one of eight franchise businesses owned and operated by the Dwyer Group , was willing to be flexible with his deal , offering him an additional four months before he has to start repayments .", "Because of their military training , returning veterans are highly regarded as prospective franchise operators by franchise companies .", "As with the military , successful franchises operate within a specific system and a set of guidelines , created by the franchiser .", "`` Someone coming back from the military is a great fit for a franchise business , '' said Jim Blasingame , a small-business consultant and host of the `` The Small Business Advocate Show '' on radio .", "`` They not only know what it 's like to work hard but they know how to operate within a system .", "With a franchise , you ca n't think outside the box .", "You have to color inside the lines . ``", "The military is apparently good training for all types of entrepreneurs .", "According to William D . Elmore , associate administrator for veterans business development at the Small Business Administration , there are 25 million veterans in the United States and one in seven of them is successfully self-employed .", "`` Veterans have the highest rate of successful self-employment of any group of Americans , '' Mr. Elmore said , noting the 14 percent success rate in self-employment .", "He attributes this success to the discipline and training every soldier acquires during their service .", "`` There is an enormous resource going into the training and deployment of soldiers and out of that comes a skill set , discipline and worldliness that most citizens do n't have at a young age . ``", "Mr. Reid , 40 , says that Iraq gave him an opportunity to save $ 60,000 while he was deployed and he was intent on using that money to set up his own business .", "`` As a noncommissioned officer in charge of a logistics section , I was responsible for 45 people , '' he said .", "All 45 came back .", "`` So I thought , ' Let 's see how I do in business . '", "`` His Glass Doctor franchise in Somerset , N.J. , which is set to open Feb . 5 , gives him exclusive rights to all of Somerset County .", "`` They told me , ' We have a system .", "If you follow it , you 'll be successful . '", "That sounded like the business for me , `` he said .", "The VetFran program is allowing Brett Cooper to start a Lawn Doctor franchise in Boonville , Mo . , while keeping his full-time job as a sales representative for Graybar Electric .", "As an 18-year member of the Missouri Army National Guard , Mr. Cooper was called up for a tour of duty in Afghanistan in 2004 and spent a year deployed in and around Kabul .", "`` I looked into a Lawn Doctor franchise six years ago , but I was n't financially able to do it , `` Mr. Cooper said .", "`` While I was in Afghanistan , I saved my salary .", "I 'm a major so the pay was very good .", "and it gave me a cushion .", "I e-mailed Lawn Doctor from Afghanistan and was made aware of the VetFran program . ``", "He received a 5 percent discount on his start-up costs for the franchises , which is significant given that Lawn Doctor franchises cost $ 90,000 .", "As with other veterans starting franchises , Mr. Cooper does not hesitate to let customers know he is a veteran .", "`` I 've absolutely used my veteran status , `` he said .", "`` People are very grateful that you have served your country , especially in the current conflict .", "Doing that kind of service for your country builds trust and respect . ``", "Ms. Dwyer-Owens is pleased by the response to the VetFran program thus far .", "`` The veterans are so grateful that we are there to say thanks , '' she said .", "`` I 'm proud of it .", "There 's no government funding .", "It 's all done by private businesses . ``", "Mr. Elmore praised the VetFran program as a strong private-sector initiative that opened doors for veterans seeking small-business opportunities .", "He also cautioned that veterans should seek expert advice available through S.B.A. business development centers before signing any agreements -LRB- www.sba.gov / vets -RRB- .", "Jerry Pyle , a retired Marine sergeant in Mineola , Fla . , is among those who appreciate the VetFran gesture .", "After a 22-year career in the Marines ended in 1999 , Mr. Pyle tried his hand in corporate retail and found himself ill suited to that atmosphere .", "He began his own business as a handyman and home remodeler and when he moved from South Carolina to central Florida , he heard about the Dream Maker Bath and Kitchen Worldwide franchise , another Dwyer Group company .", "`` It resonated with me that a corporation would actually take my service to the country into consideration , '' he said .", "`` It felt like a belated payback for 22 years of service . ''", "The VetFran program emphasizes that veterans not only make excellent franchisers but employees as well .", "Mr. Pyle said that he gravitated toward the qualities and values of veterans and all of his five full-time employees are either veterans or wives of veterans .", "`` Their qualities of teamwork , ethics and enthusiasm make us a better company , '' he said .", "SMALL BUSINESS ."], "summary": ["Veterans Transition Franchise Initiative offers veterans assistance in setting up prospective franchises in deals not available to other investors .", "Business plan is aimed at encouraging and supporting military veterans .", "Study by PricewaterhouseCoopers shows franchises have become increasingly appealing route for entrepreneurs , employing more than 18 million Americans and generating more than $ 1.5 trillion .", "Photo ."], "publication": "nyt50", "label": [8, 7, 4], "tag": ["Business"]} -{"id": "1816327", "text": ["The dictator sat alone in his cell , three years in American custody .", "His beard had gone gray , his sons were dead and the gallows were being readied .", "Saddam Hussein in those final days turned to poetry , so often his source of solace in times of difficulty , inspired by his vision of himself as inseparably tied to those he led .", "The poem , `` Unbind It , '' is his rallying call to be sounded from the grave .", "It is a mixture of defiance and reflection , but no remorse .", "No mention of the tens of thousands of lives he was responsible for taking .", "No expression of guilt or sadness or regret .", "The poem , flush with florid phrases that were his trademark , begins with what sounds like a paean to the love between himself and his people , who were about to lose him .", "Unbind your soul .", "It is my soul mate and you are my soul 's beloved .", "No house could have sheltered my heart as you have .", "He moves quickly to more aggressive language .", "He refers to the foreigners who swept him from power and to the Iraqis who rose to rule here in his place .", "The enemies forced strangers into our sea And he who serves them will be made to weep .", "Here we unveil our chests to the wolves And will not tremble before the beast .", "The verses were written by Mr. Hussein after he was sentenced to death and , according to his relatives , are believed to be his last written words .", "A handwritten copy of the poem was passed along by the Iraqi authorities to his family in Tikrit , along with his final will and testament , according to Mr. Hussein 's cousin Muayed Dhamin al-Hazza .", "Mr. Hazza read the poem on the telephone , saying he hoped Mr. Hussein 's farewell would underline the manner in which the execution was carried out .", "Iraqi and American officials confirmed that a poem left among Mr. Hussein 's belongings at the American military detention center was delivered to his family .", "In the poem , Mr. Hussein praises those who continue to fight for the Iraqi nation and condemns the `` wolves '' who have brought ruin through their invasion .", "He portrays himself as a martyr .", "His poetry , like his speeches at decisive moments of his dictatorship , was often obscure , highly alliterative and difficult , even for Arabic speakers , to comprehend fully .", "At the height of his power , Iraqis brave enough to discuss the subject would shake their heads at his rambling speeches and convoluted verse .", "Some would suggest , with glances over their shoulders , that in his efforts to show himself as a scholar of Arab history and literature , he inadvertently revealed some of the darker recesses of his mind .", "According to news reports , Mr. Hussein even made gifts of his poetry to his American captors .", "Iraqis familiar with his style helped translate his death cell poem .", "Sections that would have been unintelligible in a literal translation have been interpreted loosely in an attempt to reveal the meaning Mr. Hussein intended .", "He is most clear when talking about how he sees himself in light of his impending death .", "I sacrifice my soul for you and for our nation Blood is cheap in hard times .", "Mr. Hussein told his official biographer that he cared little what people thought of him when he was alive , but that he hoped to be revered as one of the giants of history -- as a Nebuchadnezzar or Saladin -- 500 years from now .", "He ordered that one in every 10 bricks used in reconstructing the ancient palace at Babylon be stamped with his name or an eight-pointed star to symbolize the eight letters in his name in the Arabic alphabet .", "For a man whose vanity was in proportion to his brutality , he appears , from the poem , to have seen himself as dying for a greater good .", "It was a theme he returned to repeatedly in the courtroom where he was condemned to death for crimes against humanity , telling the judges that he was speaking to history .", "Many Iraqis viewed the thousands of portraits of Mr. Hussein erected around the country -- in business suits , as warrior , as Arab sheik -- as a sort of guidebook to his illusions about himself .", "Even as his secret police murdered tens of thousands , he sealed himself off with the conviction that he was widely loved .", "One of his favorite books was `` The Old Man and the Sea , '' but his style could not be mistaken for Hemingway .", "No short crisp sentences for Mr. Hussein .", "While still in power , he wrote , at least in part , two romantic novels .", "`` Zabibah and the King , '' which is set in a fanciful Arabia of long ago , tells of a lonely king who , while powerful , feels cut off from his liegemen .", "He encounters Zabibah and is entranced by her beauty and wisdom .", "But outsiders soon invade the kingdom , which is described as the cradle of civilization , and Zabibah is raped -- on Jan . 17 , a reference to the beginning of the Persian Gulf war of 1991 .", "When the book was released , Central Intelligence Agency analysts reportedly pored over it searching for clues into Mr. Hussein 's mind .", "But they could just as easily have turned to `` The Old Man and the Sea , '' which Mr. Hussein had first read as a young man in a different prison : `` A man can be destroyed not defeated . ''", "In that spirit , he urges his followers to be fierce and noble , saying : We never kneel or bend when attacking But we even treat our enemy with honor .", "THE STRUGGLE FOR IRAQ ."], "summary": ["Former Iraqi dictator Saddam Hussein in final days before his execution turned to poetry , often his solace in times of difficulty , inspired by vision of himself as inseparably tied to those he led .", "Final poem , ` Unbind It , ' compares Iraqi people to his soul mate .", "Florid poem is mixture of defiance and reflection .", "Photo ."], "publication": "nyt50", "label": [2, 4, 8], "tag": ["World", "Washington"]} -{"id": "1816340", "text": ["When boards of directors go shopping for a new chief executive , their first stop is often General Electric .", "After all , its disciplined approach to management , the `` G.E. Way , '' has been chronicled in a shelf 's worth of books , including `` Winning , '' by its former chief executive , John F . Welch Jr . , who gets much of the credit for his system of building a deep bench of talent .", "That system included rotating executives through many jobs , teaching them productivity and quality-control tools like Six Sigma , and training them at the company 's vaunted management school .", "G.E. executives were in such demand that just one week after Mr. Welch tapped Jeffrey R . Immelt as his successor in late 2000 , the two runners-up were immediately lured away .", "Home Depot recruited Robert L . Nardelli , and 3M took W . James McNerney Jr .", "But the ouster of Mr. Nardelli from Home Depot raises anew the question of whether G.E. executives are so bankable when they switch to new companies .", "The answer , many management experts say , is not necessarily .", "Though Mr. McNerney has succeeded at both 3M and then at Boeing , Mr. Nardelli joins Lawrence R . Johnston , another high-flying G.E. executive who left in 2001 for a troubled tenure at Albertson 's , among the alumni who did not live up to expectations .", "`` G.E. is still the best training ground on the planet , but that still does n't mean that everyone is going to succeed , `` said Noel M . Tichy , a professor at the University of Michigan Business School who has written extensively about G.E.", "If there is any pattern , experts say , it is that G.E. executives succeed when they switch to companies that are , well , a lot like G.E. -- big industrial firms that are dominant in their fields , or that need an infusion of manufacturing and research discipline , or that must grow by acquisition .", "By contrast , they tend to have more trouble in companies , retailers like Home Depot , that require more fingertip feel to manage .", "`` G.E. people are good at getting structure , system and strategy right , but they do n't always understand the soft issues like culture , `` said Boris Groysberg , an assistant professor at the Harvard Business School who recently studied 20 star G.E. managers who went on to run other companies .", "Many management experts say that the very command and control management style that has characterized many successful G.E. executives may have led to Mr. Nardelli 's downfall .", "They note that he shifted Home Depot 's emphasis to the commercial market , installed all sorts of productivity tools and otherwise `` G.E. - ified '' the company .", "But , they say , he did not recognize that sales associates react differently than white-collar managers to change , and thus need different incentives to embrace it .", "He closed stores and moved people around , which meant that many sales staff found themselves with new bosses .", "He insisted that shelves be stocked during off hours , and he instituted formal inventory control and performance evaluation systems .", "`` He was foisting all this change on people who are n't necessarily looking to rise to higher levels in the organization , and so did n't see any upside for themselves in any of it , `` said Batia M . Wiesenfeld , associate professor of management at the Leonard N . Stern School of Business at New York University .", "The style did not necessarily work for ambitious managers either , said Anthony J . Mayo , a lecturer at the Harvard Business School .", "`` He brought a classic G.E. top-down , autocratic command and control approach and style , which just did not work out of the context of G.E. , '' he said .", "Mr. Mayo includes Mr. Welch , Stanley Gault , Lawrence Bossidy and several other former G.E. executives in his coming book , `` In Their Time : The Greatest Business Leaders of the 20th Century . ''", "But he said that his research turned up many ex-G . E executives who ignored `` context-based management , '' his term for tailoring management approaches to specific situations .", "The G.E. way was particularly out of place in a retail operation , said James E . Schrager , clinical professor of entrepreneurship and strategy at the University of Chicago Graduate School of Business .", "`` Boards get overenthusiastic about the G.E. glow , '' he said .", "`` They forget that there 's a big divide between selling light bulbs and appliances to stores and running the stores that sell them to consumers . ``", "Mr. Nardelli is not the first G.E. alumnus who took on another company amid much fanfare , only to leave under a cloud .", "John B . Blystone left G.E. to run the SPX Corporation in 1995 .", "At first , shareholders applauded his cost-cutting methods .", "But revenue stalled .", "Mr. Blystone resigned in 2004 `` to spend more time with his family , '' amid controversy over his sale of SPX shares before the release of lackluster quarterly results .", "Shares of Conseco leaped in 2000 when Gary C . Wendt , then the head of GE Capital , became its new chief .", "He was unable to turn the company around , and two years later he relinquished the chief executive slot .", "Conseco soon filed for bankruptcy protection .", "Mr. Johnston left G.E. to run Albertson 's in 2001 .", "He closed stores and cut jobs , but still could not compete with the Wal-Mart juggernaut .", "Albertson 's sales and stock plunged , and the company was eventually sold .", "`` G.E. people bring a great tool kit with them , but they really need the global learning centers , the diverse product portfolio , the access to capital , all of the resources of G.E. to maximize its value , '' said Nicholas P . Heymann , who follows G.E. for Prudential Securities .", "There are many success stories too , of course .", "Mr. Bossidy , one of Mr. Welch 's hand-groomed executives , was considered a savior at Allied Signal -LRB- later Honeywell -RRB- , as was Mr. Gault at Rubbermaid .", "Kirk Hachigian continues to get high marks at Cooper Industries .", "John Trani ran into problems with Stanley Work 's unions toward the end of his tenure , but he nonetheless is credited with whipping the company 's costs and operations into shape before he retired in 2003 .", "And , of course , Mr. McNerney , who left G.E. to run 3M at the same time that Mr. Nardelli left for Home Depot , got 3M 's sales and stock price way up .", "He recently left to head Boeing , and most analysts expect him to do well there , too .", "The success stories , experts say , have several things in common : The companies were manufacturers that needed to cut costs , pump up product innovation and grow by acquisition -- all skills that are finely honed at G.E.", "In contrast , Albertson 's and Home Depot are retailers , an area that G.E. has not touched save for a brief and unrewarding stint owning Montgomery Ward .", "SPX needed to expand businesses it already owned -- something that was emphasized far less by Mr. Welch than it is by Mr. Immelt , G.E. ` s current chief .", "And Conseco was foundering -- a company G.E. would never have bought , and probably would have sold .", "`` The errors come in when G.E. people feel they learned all the secrets at G.E. , '' said Ms. Wiesenfeld of New York University .", "They become enamored of their own knowledge , she added , `` and do n't feel they have to learn about the business they are going into . ``", "Mr. Immelt , G.E. ` s current chief , has not been cheered by shareholders throughout much of his tenure .", "He took over from Mr. Welch in September 2001 , just in time for the terrorist attack of Sept . 11 to wreak havoc with G.E. insurance , aircraft leasing and other businesses .", "And he also suffered through two years of share price stagnation , which ended just last month .", "Shareholders did not clamor for his resignation , though .", "One reason , analysts say , was that Mr. Immelt seemed to understand the problems , and was installing innovation processes and product lines to fix them .", "Another reason was that his total compensation package -- $ 8,534,829 in 2004 and $ 3,400,769 million in 2005 -- was well below the sums paid to other chief executives like Mr. Nardelli .", "A CHAIRMAN 'S FALL ."], "summary": ["Ouster of Home Depot chairman-chief executive Robert L Nardelli , former executive at General Electric , raises anew question of whether GE executives are so bankable when they switch to new companies .", "Many management experts say answer is not necessarily .", "Nardelli joins Lawrence R Johnston , another high-flying GE executive who left in 2001 for troubled tenure at Albertson 's , among alumni who did not live up to expectations .", "Gary C Wendt tapped in June 2000 to run Conseco Inc after building GE Capital Services .", "Conseco filed for bankruptcy protection two months after Wendt stepped down in October 2002 .", "Jeffrey R Immelt started at GE in 1982 .", "He has sold off lackluster business and prodded managers to focus on ` organic growth ' since taking over as chief executive in 2001 .", "Photos ."], "publication": "nyt50", "label": [7, 5, 6, 30, 32, 33, 4], "tag": ["Business"]} -{"id": "1816344", "text": ["At first , the psychiatric drug Zyprexa may have saved John Eric Kauffman 's life , rescuing him from his hallucinations and other symptoms of acute psychosis .", "But while taking Zyprexa for five years , Mr. Kauffman , who had been a soccer player in high school and had maintained a normal weight into his mid-30s , gained about 80 pounds .", "He was found dead on March 27 at his apartment in Decatur , Ga . , just outside Atlanta .", "An autopsy showed that the 41-year-old Mr. Kauffman , who was 5 feet 10 inches , weighed 259 pounds when he died .", "His mother believes that the weight he gained while on Zyprexa contributed to the heart disease that killed him .", "Eli Lilly , which makes Zyprexa , said in a statement that Mr. Kauffman had other medical conditions that could have led to his death and that `` Zyprexa is a lifesaving drug . ''", "The company said it was saddened by Mr. Kauffman 's death .", "No one would say Mr. Kauffman had an easy life .", "Like millions of other Americans , he suffered from bipolar disorder , a mental illness characterized by periods of depression and mania that can end with psychotic hallucinations and delusions .", "After his final breakdown , in 2000 , a hospital in Georgia put Mr. Kauffman on Zyprexa , a powerful antipsychotic drug .", "Like other medicines Mr. Kauffman had taken , the Zyprexa stabilized his moods .", "For the next five and a half years , his illness remained relatively controlled .", "But his weight ballooned -- a common side effect of Zyprexa .", "His mother , Millie Beik , provided information about Mr. Kauffman , including medical records , to The New York Times .", "For many patients , the side effects of Zyprexa are severe .", "Connecting them to specific deaths can be difficult , because people with mental illness develop diabetes and heart disease more frequently than other adults .", "But in 2002 , a statistical analysis conducted for Eli Lilly found that compared with an older antipsychotic drug , Haldol , patients taking Zyprexa would be significantly more likely to develop heart disease , based on the results of a clinical trial comparing the two drugs .", "Exactly how many people have died as a result of Zyprexa 's side effects , and whether Lilly adequately disclosed those risks , are central issues in the thousands of product-liability lawsuits pending against the company , and in state and federal investigations .", "Because Mr. Kauffman also smoked heavily for much of his life , and led a sedentary existence in his last years , no one can be sure that the weight he gained while on Zyprexa caused his heart attack .", "Zyprexa , taken by about two million people worldwide last year , is approved to treat schizophrenia and bipolar disorder .", "Besides causing severe weight gain , it increases blood sugar and cholesterol in many people who take it , all risk factors for heart disease .", "In a statement responding to questions for this article , Lilly said it had reported the death of Mr. Kauffman to federal regulators , as it is legally required to do .", "The company said it could not comment on the specific causes of his death but noted that the report it submitted to regulators showed that he had `` a complicated medical history that may have led to this unfortunate outcome . ''", "`` Zyprexa , '' Lilly 's statement said , `` is a lifesaving drug and it has helped millions of people worldwide with schizophrenia and bipolar disorder regain control of their lives . ''", "Documents provided to The Times by a lawyer who represents mentally ill patients show that Eli Lilly , which makes Zyprexa , has sought for a decade to play down those side effects -- even though its own clinical trials show the drug causes 16 percent of the patients who take Zyprexa to gain more than 66 pounds after a year .", "Eli Lilly now faces federal and state investigations about the way it marketed Zyprexa .", "Last week -- after articles in The Times about the Zyprexa documents -- Australian drug regulators ordered Lilly to provide more information about what it knew , and when , about Zyprexa 's side effects .", "Lilly says side effects from Zyprexa must be measured against the potentially devastating consequences of uncontrolled mental illness .", "But some leading psychiatrists say that because of its physical side effects Zyprexa should be used only by patients who are acutely psychotic and that patients should take other medicines for long-term treatment .", "`` Lilly always downplayed the side effects , '' said Dr. S . Nassir Ghaemi , a specialist on bipolar disorder at Emory University in Atlanta .", "`` They 've tended to admit weight gain , but in various ways they 've minimized its relevance . ``", "Dr. Ghaemi said Lilly had also encouraged an overly positive view of its studies on the effectiveness of Zyprexa as a long-term treatment for bipolar disorder .", "There is more data to support the use of older and far cheaper drugs like lithium , he said .", "Last year , Lilly paid $ 700 million to settle 8,000 lawsuits from people who said they had developed diabetes or other diseases after taking Zyprexa .", "Thousands more suits are still pending .", "But Ms. Beik is not suing Lilly .", "She simply wants her son 's case to be known , she said , because she considers it a cautionary tale about Zyprexa 's tendency to cause severe weight gain .", "`` I do n't think that price should be paid , `` she said .", "Mr. Kauffman 's story , like that of many people with severe mental illness , is one of a slow and steady decline .", "Growing up in DeKalb , Ill . , west of Chicago , he acted in school plays and was a goalie on the soccer team .", "A photograph taken at his prom in 1982 shows a handsome young man with a messy mop of dark brown hair .", "But in 1984 , in his freshman year at Beloit College in Wisconsin , Mr. Kauffman suffered a breakdown and was found to have the most severe form of bipolar disorder .", "He returned home and , after medication stabilized his condition , enrolled in Northern Illinois University .", "He graduated from there in 1989 with a degree in political science .", "For the next year , he worked as a bus driver ferrying senior citizens around DeKalb .", "In a short local newspaper profile of him in 1990 , he listed his favorite book as `` Catch-22 , '' his favorite musician as Elvis Costello , and his favorite moment in life as a soccer game in which he had made 47 saves .", "A few months later , he followed his mother and stepfather to Atlanta and enrolled in Georgia State University , hoping to earn a master 's degree in political science .", "`` He wanted so much to become a political science professor , '' Ms. Beik said .", "But trying to work while attending school proved to be more stress than Mr. Kauffman could handle , Ms. Beik said .", "In 1992 , he suffered his most severe psychotic breakdown .", "He traveled around the country , telling his parents he intended to work on a political campaign .", "Instead , he spent much of the year homeless , and his medical records show that he was repeatedly admitted to hospitals .", "Mr. Kauffman returned home at the end of 1992 , but he never completely recovered , Ms. Beik said .", "He never worked again , and he rarely dated .", "In 1994 , the Social Security Administration deemed him permanently disabled and he began to receive disability payments .", "He filed for bankruptcy that year .", "According to the filing , he had $ 110 in assets -- $ 50 in cash , a $ 10 radio and $ 50 in clothes -- and about $ 10,000 in debts .", "From 1992 to 2000 , Mr. Kauffman did not suffer any psychotic breakdowns , according to his mother .", "During that period , he took lithium , a mood stabilizer commonly prescribed for people with bipolar disorder , and Stelazine , an older antipsychotic drug .", "With the help of his parents , he moved to an apartment complex that offered subsidized housing .", "But in late 1999 , a psychiatrist switched him from lithium , whichcan cause kidney damage , to Depakote , another mood stabilizer .", "In early 2000 , Mr. Kauffman stopped taking the Depakote , according to his mother .", "As the year went on , he began to give away his possessions , as he had in previous manic episodes , and became paranoid .", "During 2000 , he was repeatedly hospitalized , once after throwing cans of food out of the window of his sixth-floor apartment .", "In August , he was institutionalized for a month at a public hospital in Georgia .", "There he was put on 20 milligrams a day of Zyprexa , a relatively high dose .", "The Zyprexa , along with the Depakote , which he was still taking , stabilized his illness .", "But the drugs also left him severely sedated , hardly able to talk , his mother said .", "`` He was so tired and he slept so much , '' Ms. Beik said .", "`` He loved Shakespeare , and he was an avid reader in high school .", "At the end of his life , it was so sad , he could n't read a page . ``", "In addition , his health and hygiene deteriorated .", "In the 1990 newspaper profile , Mr. Kauffman had called himself extremely well-organized .", "But after 2000 , he became slovenly , his mother said .", "He spent most days in his apartment smoking .", "A therapist who treated Mr. Kauffman while he was taking Zyprexa recalls him as seeming shy and sad .", "`` He was intelligent enough to have the sense that his life had n't panned out in a normal fashion , `` the therapist said in an interview .", "`` He always reminded me of a person standing outside a house with a party going on , looking at it . ''", "The therapist spoke on the condition that her name not be used because of rules covering the confidentiality of discussions with psychiatric patients .", "As late as 2004 , Mr. Kauffman prepared a simple one-page r\u00e9sum\u00e9 of his spotty work history -- evidence that he perhaps hoped to re-enter the work force .", "He never did .", "As Mr. Kauffman 's weight increased from 2000 to 2006 , he began to suffer from other health problems , including high blood pressure .", "In December 2005 , a doctor ordered him to stop smoking , and he did .", "But in early 2006 , he began to tell his parents that he was having hallucinations of people appearing in his apartment .", "On March 16 , a psychiatrist increased his dose of Zyprexa to 30 milligrams , a very high level .", "That decision may have been a mistake , doctors say .", "Ending smoking causes the body to metabolize Zyprexa more slowly , and so Mr. Kauffman might have actually needed a lower rather than higher dose .", "A few days later , Mr. Kauffman spoke to his mother for the last time .", "By March 26 , they had been out of contact for several days .", "That was unusual , and she feared he might be in trouble .", "She drove to his apartment building in Decatur the next day and convinced the building 's manager to check Mr. Kauffman 's apartment .", "He was dead , his body already beginning to decompose .", "An autopsy paid for by his mother and conducted by a private forensic pathologist showed he had died of an irregular heartbeat -- probably , the report said , as the result of an enlarged heart caused by his history of high blood pressure .", "Ms. Beik acknowledged she can not be certain that Zyprexa caused her son 's death .", "But the weight gain it produced was most likely a contributing factor , she said .", "And she is angry that Eli Lilly played down the risks of Zyprexa .", "The company should have been more honest with doctors , as well as the millions of people who take Zyprexa , she said .", "Instead Lilly has marketed Zyprexa as safer and more effective than older drugs , despite scant evidence , psychiatrists say .", "Ms. Beik notes that Stelazine -- an older drug that is no longer widely used even though a federally financed clinical trial showed it works about as well as Zyprexa -- stabilized Mr. Kauffman 's illness for eight years without causing him to gain weight .", "`` He was on other drugs that worked , '' she said .", "Correction : January 6 , 2007 , Saturday A front-page article on Thursday about the side effects of the psychiatric drug Zyprexa misstated the name of a older drug tested against Zyprexa in a federally sponsored clinical trial .", "It was perphenazine -LRB- also called Trilafon -RRB- , not Stelazine ."], "summary": ["Mother suspects that weight gain from psychiatric drug Zyprexa may have contributed to heart disease that killed her 41-year-old son .", "John Eric Kauffman took drug for five years for symptoms of acute psychosis , and gained 80 pounds before being found dead last year at his home in Decator , Ga .", "Eli Lilly , which makes Zyprexa , issues statement noting that Kauffman had other medical conditions that could have led to his death .", "Claims Zyprexa is ` lifesaving drug ' .", "Weight gain is common side effect of Zyprexa .", "For many patients , side effects are severe .", "But connecting them to specific deaths can be difficult , because people with mental illness develop potentially life-threatening diseases more frequently than other adults .", "In 2002 , statistical analysis conducted for Eli Lilly found that compared with older antipsychotic drug Haldol , patients taking Zyprexa would be significantly more likely to develop heart disease .", "Whether Lilly adequately disclosed risks are central issues in thousands of product-liability lawsuits pending against company , and in state and federal investigations .", "Kauffman 's mother Millie Beik describes her son 's illness .", "Photos ."], "publication": "nyt50", "label": [16, 17, 5, 15, 0, 14, 12, 4], "tag": ["Front Page", "Health", "Business"]} -{"id": "1816345", "text": ["A laboratory that has tested most of the nation 's electronic voting systems has been temporarily barred from approving new machines after federal officials found that it was not following its quality-control procedures and could not document that it was conducting all the required tests .", "The company , Ciber Inc . of Greenwood Village , Colo . , has also come under fire from analysts hired by New York State over its plans to test new voting machines for the state .", "New York could eventually spend $ 200 million to replace its aging lever devices .", "Experts on voting systems say the Ciber problems underscore longstanding worries about lax inspections in the secretive world of voting-machine testing .", "The action by the federal Election Assistance Commission seems certain to fan growing concerns about the reliability and security of the devices .", "The commission acted last summer , but the problem was not disclosed then .", "Officials at the commission and Ciber confirmed the action in recent interviews .", "Ciber , the largest tester of the nation 's voting machine software , says it is fixing its problems and expects to gain certification soon .", "Experts say the deficiencies of the laboratory suggest that crucial features like the vote-counting software and security against hacking may not have been thoroughly tested on many machines now in use .", "`` What 's scary is that we 've been using systems in elections that Ciber had certified , and this calls into question those systems that they tested , `` said Aviel D . Rubin , a computer science professor at Johns Hopkins .", "Professor Rubin said that although some software bugs had shown up quickly , in other instances `` you might have to use the systems for a while before something happens . ''", "Officials at the commission and other election experts said it was essential for a laboratory to follow its quality-control procedures and document all its testing processes to instill confidence in the results .", "Commission officials said that they were evaluating the overall diligence of the laboratory and that they did not try to determine whether its weaknesses had contributed to problems with specific machines .", "Computer scientists have shown that some electronic machines now in use are vulnerable to hacking .", "Some scientists caution that even a simple software error could affect thousands of votes .", "In various places , elections have been complicated by machines that did not start , flipped votes from one candidate to another or had trouble tallying the votes .", "Until recently , the laboratories that test voting software and hardware have operated without federal scrutiny .", "Even though Washington and the states have spent billions to install the new technologies , the machine manufacturers have always paid for the tests that assess how well they work , and little has been disclosed about any flaws that were discovered .", "As soon as federal officials began a new oversight program in July , they detected the problems with Ciber .", "The commission held up its application for interim accreditation , thus barring Ciber from approving new voting systems in most states .", "Ciber , a large information technology company , also has a $ 3 million contract to help New York test proposed systems from six manufacturers .", "Nystec , a consulting firm in Rome , N.Y. , that the state hired , filed a report in late September criticizing Ciber for creating a plan to test the software security that `` did not specify any test methods or procedures for the majority of the requirements . ''", "The report said the plan did not detail how Ciber would look for bugs in the computer code or check hacking defenses .", "A spokeswoman for Ciber , Diane C . Stoner , said that the company believed that it had addressed all the problems and that it expected to receive its initial federal accreditation this month .", "Federal officials said they were evaluating the changes the company had made .", "Ms. Stoner said in a statement that although the Election Assistance Commission had found deficiencies , they `` were not because Ciber provided incomplete , inaccurate or flawed testing , but because we did not document to the E.A.C. ' s liking all of the testing that we were performing . ''", "She added that the test plan cited in New York was just a draft and that Ciber had been working with Nystec to ensure additional security testing .", "The co-chairman of the New York State Board of Elections , Douglas A . Kellner , said Ciber had tightened its testing .", "But Mr. Kellner said yesterday that Nystec and Ciber continued to haggle over the scope of the security testing .", "New York is one of the last states to upgrade its machines , and it also has created some of the strictest standards for them .", "Mr. Kellner said only two of the six bidders , Diebold Election Systems and Liberty Election Systems , seemed close to meeting all the requirements .", "Besides Ciber , two other companies , SysTest Labs of Denver and Wyle Laboratories , in El Segundo , Calif . , test electronic voting machines .", "Ciber , which has been testing the machines since 1997 , checks just software .", "Wyle examines hardware , and SysTest can look at both .", "The chairman of the Election Assistance Commission , Paul S . DeGregorio , said SysTest and Wyle received interim accreditations last summer .", "Mr. DeGregorio said two other laboratories had also applied to enter the field .", "Congress required greater federal oversight when it passed the Help America Vote Act of 2002 .", "Since then , the government also put up more than $ 3 billion to help states and localities buy electronic machines , to avoid a repeat of the hanging punch-card chads that caused such confusion in the 2000 presidential election .", "The commission was never given a substantial budget , and it did not finish creating the oversight program until last month .", "Until then , the laboratories had been at the heart of the system to evaluate voting machines , a system that seemed oddly cobbled together .", "While the federal government created standards for the machines , most of the states enacted laws to make them binding .", "The states also monitored the testing , and much of that work was left to a handful of current and former state election officials who volunteered their time .", "As a result , voting rights advocates and other critics have long been concerned about potential conflicts of interest , because the manufacturers hire the laboratories and largely try to ensure confidentiality .", "Michael I . Shamos , a computer scientist who examines voting machines for Pennsylvania , said about half had significant defects that the laboratories should have caught .", "Besides certifying the laboratories , the Election Assistance Commission will have three staff members and eight part-time technicians to approve test plans for each system and check the results .", "The manufacturers will be required to report mechanical breakdowns and botched tallies , and Mr. DeGregorio said those reports would be on the agency 's Web site .", "Dr. Shamos said , `` This is not the sea change that was needed . ''", "He said he was disappointed that the commission had hired some of the same people involved in the states ' monitoring program and that it never announced it had found problems with Ciber operations .", "Dr. Rubin of Johns Hopkins said the laboratories should be required to hire teams of hackers to ferret out software vulnerabilities .", "And the laboratories will still be paid by the voting machine companies , though a bill now in Congress could change that to government financing .", "A recent appearance in Sarasota , Fla . , by the SysTest Labs president , Brian T . Phillips , also raised eyebrows .", "After a Congressional election in the Sarasota area ended in a recount last month , the victorious Republican candidate hired Mr. Phillips as a consultant to monitor the state 's examination of whether there had been a malfunction in the voting machines .", "Several critics questioned whether Mr. Phillips should have taken such work , either because of its partisan nature or because it represented such a public defense of the industry .", "Mr. Phillips said he did not see any conflict because his laboratory had not tested the software used in Sarasota .", "And the project does not appear to have violated the ethics rules of the election commission ."], "summary": ["Federal Election Assistance Commission bars Ciber Inc from testing nation 's electronic voting systems after finding flaws in its quality-control procedures and lack of required documentation on tests it conducted .", "Ciber is largest tester of voting machine software and has tested most of nation 's electronic voting systems .", "It has also come under fire from analysts hired by New York State over its plans to test new voting machines for state .", "Experts say Ciber problems underscore longstanding worries about lax inspections in secretive world of voting-machine testing .", "Say deficiencies of Ciber laboratory suggest that crucial features like vote-counting software and security against hacking may not have been thoroughly tested on many machines now in use .", "Ciber says it is fixing its problems and expects to gain certification soon .", "Photo ."], "publication": "nyt50", "label": [8, 1, 7, 3], "tag": ["Technology", "Front Page", "U.S."]} -{"id": "1816348", "text": ["In 1997 , as the government listened in on their phone call , Adham Hassoun , a computer programmer in Broward County , Fla . , proposed a road trip to Jose Padilla , a low-wage worker there .", "The excursion to Tampa would be his treat , Mr. Hassoun said , and a chance to meet `` some nice , uh , brothers . ''", "Mr. Padilla , 36 , a Brooklyn-born Puerto Rican who had converted to Islam a few years earlier , knew Mr. Hassoun , an outspoken Palestinian , from his mosque .", "Still , according to a transcript of the conversation obtained by The New York Times , Mr. Padilla equivocated as Mr. Hassoun exhorted .", "`` We take the whole family and have a blast , '' Mr. Hassoun said .", "`` We go to , uh , our Busch Gardens , you know You wo n't regret it .", "Money-back guarantee . ``", "Mr. Padilla , laughing , suggested that they not discuss the matter over the phone .", "`` Why .", "`` Mr. Hassoun said .", "`` We 're going to Busch Gardens .", "What 's the big deal ! `` That conversation took place five years before Mr. Padilla , a United States citizen accused of plotting a '' dirty bomb `` attack against this country , was declared an enemy combatant .", "Given that Mr. Padilla and Mr. Hassoun are now criminal defendants in a terrorism conspiracy case in Miami , it sounds suspicious , as if Mr. Hassoun were proposing something more sinister than a weekend at the amusement park .", "He well may have been -- but maybe , too , he was sincere or joking about a Muslim retreat .", "Deciphering such chatter in order to construct a convincing narrative of conspiracy is a challenge .", "Yet , prosecutors say , the government will rely largely on wiretapped conversations when it puts Mr. Padilla , Mr. Hassoun , and a third defendant , Kifah Jayyousi , on trial as a `` North American support cell '' that sent money , goods and recruits abroad to assist `` global jihad . ''", "Tens of thousands of conversations were recorded .", "Some 230 phone calls form the core of the government 's case , including 21 that make reference to Mr. Padilla , prosecutors said .", "But Mr. Padilla 's voice is heard on only seven calls .", "And on those seven , which The Times obtained from a participant in the case , Mr. Padilla does not discuss violent plots .", "But this is not the version of Mr. Padilla -- Al Qaeda associate and would-be bomber -- that John Ashcroft , then the attorney general , unveiled in 2002 when he interrupted a trip to Moscow to trumpet Mr. Padilla 's capture .", "In the four and a half years since then , as the government tested the limits of its power to deal with terrorism outside the traditional law enforcement system , Mr. Padilla is the only accused terrorist to have gone from enemy combatant to criminal defendant .", "His criminal trial , scheduled to begin late this month , will feature none of the initial claims about violent plotting with Al Qaeda that the government cited as justification for detaining Mr. Padilla without formal charges for three and a half years .", "Those claims came from the government 's overseas interrogations of terrorism suspects , like Abu Zubaydah , which , the government said , Mr. Padilla corroborated , in part , during his own questioning in a military brig in South Carolina .", "But , constrained by strict federal rules of evidence that would prohibit or limit the use of information obtained during such interrogations , the government will make a far more circumscribed case against Mr. Padilla in court , effectively demoting him from Al Qaeda 's dirty bomber to foot soldier in a somewhat nebulous conspiracy .", "The initial dirty bomb accusation did not disappear .", "It quietly resurfaced in Guant\u00e1namo Bay , Cuba .", "The government filed the dirty bomb charges against Mr. Padilla 's supposed accomplice , an Ethiopian-born detainee , at about the same time it indicted Mr. Padilla on relatively lesser offenses in criminal court .", "A Change in Strategy The change in Mr. Padilla 's status , from enemy combatant to criminal defendant , was abrupt .", "It came late in 2005 as the Supreme Court was weighing whether to take up the legality of his military detention and the Bush administration , by filing criminal charges , pre-empted its review .", "In a way , Mr. Padilla 's prosecution was a legal maneuver that kept the issue of his detention without charges out of the Supreme Court .", "After apprehending him at O'Hare International Airport in Chicago in May 2002 , the Bush administration made a choice : to detain Mr. Padilla militarily , in order to thwart further plotting , rather than to follow him in order to gather evidence that might serve a criminal prosecution .", "Now that Mr. Padilla has ended up a criminal defendant after all , the prosecution 's case does not fully reflect the Bush administration 's view of who he is or what he did .", "Senior government officials have said publicly that Mr. Padilla provided self-incriminating information during interrogations , admitting , they said , to undergoing basic terrorist training , to accepting an assignment to blow up apartment buildings in the United States , and to attending a farewell dinner with Khaled Sheikh Mohammed , the suspected master planner of the Sept . 11 attacks , before he flew to Chicago in 2002 .", "But any confessions by Mr. Padilla while he was detained without charges and denied access to counsel -- whether or not he was mistreated , as his lawyers claim -- would not be admissible in court .", "And it is unlikely that information obtained during the harsh questioning of Al Qaeda detainees would be admissible , either -- and , further , the government is disinclined to expose sensitive intelligence or invite further scrutiny of secret jails overseas .", "Probably as a consequence , the current criminal case zeroes in on what the government sees as an earlier stage of Mr. Padilla 's involvement with terrorism .", "It focuses primarily on the other defendants ' support during the 1990s for Muslim struggles overseas , especially in Bosnia , Kosovo and Chechnya .", "Mr. Padilla , who was appended to their pre-existing case , in which he had been an unnamed co-conspirator , is depicted as their recruit .", "Although prosecutors have declined to discuss the government 's strategy , their filings and statements in court provide a picture of the case they are expected to present at trial .", "The most tangible allegation against Mr. Padilla is that in 2000 he filled out , under an alias , an Arab-language application to attend a terrorist training camp .", "That application is expected to be offered into evidence alongside the wiretapped conversations , but Mr. Padilla 's lawyers say they will contest its admissibility , challenging the government 's assertion that the `` mujahideen data form '' belonged to their client .", "Robert Chesney , a specialist in national security law at Wake Forest University , called the prosecution a pragmatic one , analogous to `` going after Al Capone on tax evasion . ''", "But Deborah Pearlstein , a lawyer with Human Rights First who has consulted with Mr. Padilla 's defense , said that his will never be an ordinary , pragmatic prosecution .", "`` If Jose Padilla were from Day 1 just charged and tried , then maybe , '' she said .", "`` But this is a case that comes after three and a half years of the most gross deprivation of human rights that we 've seen in this country for a long time . ``", "Further , Ms. Pearlstein noted , the government has reserved the option , should the prosecution fail , of returning Mr. Padilla to the military brig .", "This , she said , `` casts a shadow '' over the current prosecution .", "The Bush administration 's military case against Binyam Mohamed , 28 , the Ethiopian detainee at Guant\u00e1namo , put the current proceedings in a different light , too .", "In December 2005 , Mr. Mohamed was referred to the military commission in Guant\u00e1namo on accusations that he conspired with Mr. Padilla on the dirty bomb plot .", "It was little noticed at the time .", "But accusations against Mr. Padilla that are nowhere to be found in the indictment against him filled the pages of Mr. Mohamed 's charging sheet , with Mr. Padilla repeatedly identified by name .", "The sheet referred to the two men meeting in Pakistan after Sept . 11 , 2001 , studying how to build an improvised dirty bomb , discussing the feasibility of a dirty bomb attack with Al Qaeda officials and agreeing to undertake the mission to blow up buildings .", "Mr. Mohamed 's lawyer , Clive Stafford Smith , said that these charges were based on a forced confession by Mr. Mohamed , who , he said , was tortured overseas into admitting to a story that was fed to him .", "`` Binyam was told all along that his job was to be a witness against Padilla , Abu Zubaydah and Khaled Sheikh Mohammed , '' Mr. Stafford Smith said , adding that his client `` has no conscience knowledge that he ever met '' Mr. Padilla .", "The charges against Mr. Mohamed and other Guant\u00e1namo detainees who were headed for prosecution there have been suspended temporarily as a result of the Military Commissions Act passed by Congress in October .", "Those charges are likely to be reinstated , a Pentagon official said yesterday .", "That Mr. Mohamed faced dirty bomb charges and Mr. Padilla does not speaks to the central difference between being a terrorism suspect in Guant\u00e1namo and a criminal defendant charged with terrorism offenses in the United States .", "In Guant\u00e1namo , the military commission system that deals with foreign-born terrorism suspects is expected to allow , with some exceptions , the use of information obtained through coercion .", "`` Federal court rules are restrictive , '' Professor Chesney of Wake Forest University School of Law said .", "`` The very essence of why they 're trying to have that separate military system was to create rules to use information that is deemed by the intelligence community to be trustworthy but would n't make it under the federal rules of evidence . ``", "David Cole , a professor of law at Georgetown University and author of books on terrorism and civil liberties , sees the difference between the two systems more critically : `` What this says clearly is that they feel that they can get away with using tainted evidence in the military commission system that they ca n't use in the criminal court system . ``", "The Wiretapping Case The criminal case against Mr. Padilla has its roots in the prosecution of Sheikh Omer Abdel Rahman , the blind Egyptian cleric who was convicted in 1995 of conspiring to blow up the United Nations and other New York landmarks .", "In the early 1990s , Sheikh Rahman 's telephone was tapped , and Mr. Hassoun and Dr. Jayyousi , a Jordanian-born American citizen who holds a doctorate in civil engineering , came to the government 's attention through phone calls to or from his line .", "Then the government , under the Foreign Intelligence Surveillance Act , began to eavesdrop on them , which eventually pulled Mr. Padilla into their net , too .", "The government presents the three defendants as `` joined at the hip , '' as one prosecutor put it in a hearing last summer .", "But Judge Marcia G . Cooke of Federal District Court , noting that Mr. Padilla was appended to a case well under way , asked the government , `` If they are so joined at the hip , why is Mr. Padilla so late to the dance .", "`` Dr. Jayyousi , a former school system administrator in both Detroit and Washington , D.C. , never met Mr. Padilla , his lawyer , William Swor , said .", "It is Mr. Hassoun , the government said , who recruited Mr. Padilla .", "But both Mr. Hassoun 's and Mr. Padilla 's lawyers deny that Mr. Padilla was recruited .", "Seven Taped Phone Calls Mr. Padilla 's lawyers and relatives say that he left South Florida for Egypt in September 1998 on a spiritual journey .", "A former juvenile offender , he converted to Islam as part of an effort to straighten out his life , they say .", "His mosque in Fort Lauderdale sponsored his travel , he told friends , relatives and F.B.I. agents who interviewed him in 2002 .", "Mr. Hassoun belonged to that mosque , and the telephone transcripts seem to indicate that Mr. Hassoun helped , at the least , with Mr. Padilla 's travel plans .", "The seven taped phone calls that bear Mr. Padilla 's voice involve conversations with Mr. Hassoun from 1997 to 2000 .", "On those calls , Mr. Padilla , unlike some of the other defendants , does not employ what the government says is coded language .", "According to the government , other defendants refer to their jihad-related plans as `` getting some fresh air , '' `` participating in tourism , '' `` opening up a market , '' `` playing football , '' and so on .", "This leads to silly-sounding exchanges where `` the brothers '' discuss going on `` picnics '' in order `` to smell fresh air and to eat cheese '' or using $ 3,500 to buy `` zucchini . ''", "In contrast , Mr. Padilla 's seven conversations with Mr. Hassoun range from straightforward -- Mr. Hassoun tells Mr. Padilla that his grandmother has died .", "Mr. Padilla tells Mr. Hassoun that he has found himself an 18-year-old Egyptian bride who is willing to wear a veil -- to vaguely suggestive or just odd .", "In one phone call , the two men talked about a dream .", "It appeared to be the dream that Mr. Padilla , according to his relatives , cites as having played a crucial role in inspiring him to convert to Islam : the vision of a man in a turban , surrounded by the swirling dust of a desert .", "Mr. Hassoun brought it up and told Mr. Padilla that he himself had experienced the same vision .", "`` What do you mean you saw the same dream .", "`` Mr. Padilla asked .", "`` I saw the dream of the uh person with the turban , '' Mr. Hassoun said .", "Mr. Hassoun explained how , in his dream , the turban was wrongly wrapped and so he thought the man might be a spy , in which case , he was prepared `` to split his body apart . ''", "But then , he said , he understood that `` the brother was a good one . ''", "`` Yeah .", "`` Mr. Padilla said .", "In three of the seven conversations , Mr. Padilla made statements that the government has identified as `` overt acts '' in furtherance of the accused conspiracy .", "In the first , Mr. Hassoun asked , `` You 're ready , right .", "`` and Mr. Padilla said , '' God willing , brother , it 's going to happen soon . ``", "That was the summer of 1997 , a year before Mr. Padilla left South Florida for Egypt .", "In the second , Mr. Padilla told Mr. Hassoun , during a 1999 conversation from Egypt , that he had asked his ex-wife in the United States to arrange for him to receive an army jacket , a book bag and a sleeping bag , supplies that he had requested because `` there was a rumor here that the door was open somewhere . ''", "In the third , Mr. Padilla told Mr. Hassoun in April 2000 , that he would need a recommendation to `` connect me with the good brothers , with the right faith '' if he were to travel to Yemen .", "Prosecutors say Mr. Padilla is mentioned , although by his Muslim name Ibrahim or by another alias , on 21 additional tapes .", "One of them refers to Ibrahim as being `` in the area of Usama , '' which the government takes to mean that he was near Osama bin Laden .", "But Mr. Padilla 's lawyers contest that interpretation .", "`` That is just nonsensical , Your Honor , that these men who for years , according to the government , have been talking in code all of a sudden are going to throw Osama bin Laden 's name around , `` Michael Caruso , a federal public defender , said in court .", "Mr. Padilla has pleaded not guilty .", "But before his case goes before a jury , his fitness to stand trial will be evaluated .", "On the basis of Mr. Padilla 's lawyers ' assertion that he is mentally damaged as a result of his prolonged isolation and his interrogation in the brig , Judge Cooke has ordered a psychiatric evaluation by a Bureau of Prisons doctor to be completed this week .", "Friday in The Times : The only person on the American mainland still held asan enemy combatant .", "Correction : January 5 , 2007 , Friday A front-page article yesterday about wiretapped conversations in the case against Jose Padilla , who is accused of plotting a `` dirty bomb '' attack , omitted two lines at the continuation in some copies .", "The sentence should have read : `` In the four and a half years since then , as the government tested the limits of its power to deal with terrorism outside the traditional law enforcement system , Mr. Padilla is the only accused terrorist to have gone from enemy combatant to criminal defendant . '' ."], "summary": ["Prosecutors say government will rely largely on wiretapped conversations when it puts Jose Padilla , Adham Hassoun and Kifah Jayyousi on trial as ` North American support cell ' that sent money , goods and recruits abroad to assist ` global jihad ' .", "Say 230 recorded conversations form core of government 's case , including 21 that make reference to Padilla .", "But Padilla , who is American citizen , does not discuss violent plots on any of seven calls on which his voice is heard .", "Padilla 's criminal trial will feature none of initial claims about violent plotting with Al Qaeda that then-Atty Gen John Ashcroft cited as justification for detaining him without formal charges for three and half years .", "Those claims came from government 's overseas interrogations of terrorism suspects like Abu Zubaydah .", "Government claims that Padilla corroborated them , in part , during his own questioning , but strict federal rules of evidence prohibit or limit use of information obtained during such interrogations .", "Government will make far more circumscribed case against Padilla in court , effectively demoting him from Al Qaeda 's dirty bomber to foot soldier in somewhat nebulous conspiracy .", "Case recalled .", "Photos ."], "publication": "nyt50", "label": [24, 15, 22, 23, 17, 18], "tag": ["Front Page", "U.S."]} -{"id": "1816363", "text": ["In May , in a nearly empty basement ballroom at the Hotel du Pont in Wilmington , Del . , Robert L . Nardelli , the chairman and chief executive of Home Depot , stood between huge timers , intended to limit questions from the handful of shareholders present .", "After dismissing questions about his compensation or the independence of the board , Mr. Nardelli abruptly ended the meeting after only 30 minutes .", "Time ran out on Mr. Nardelli on Tuesday , after the board , at a hastily arranged meeting , decided that he should go -- with a $ 210 million exit package .", "It was a surprising turnaround for Home Depot 's board , which had publicly supported Mr. Nardelli as recently as two weeks ago even as questions about his compensation , business strategy and autocratic management style mounted .", "What ultimately divided the board and its chief executive appeared to have been Mr. Nardelli 's compensation .", "Over six years as chief executive , he had taken home $ 64 million and was on track to earn hundreds of millions more .", "In recent months , people close to the board say , directors sought to rein in the compensation under the terms of his current contract .", "Mr. Nardelli , these people said , had challenged that effort .", "`` He was brought down by his compensation , '' a person close to the board said .", "`` It was an erosion of relationships over several months .", "He lost the confidence of the board . ``", "In the end , the debate over cutting Mr. Nardelli 's pay may not have revolved around large numbers .", "It would have been possible , for example , for the board to give Mr. Nardelli a minimum bonus of $ 3 million under the terms of his contract , rather than the $ 7 million he received in 2005 .", "A director , however , speaking on the condition of anonymity , insisted that compensation was not the reason for Mr. Nardelli 's departure and that there was no `` smoking gun '' behind his resignation .", "The fall of Mr. Nardelli is the latest illustration of chief executives and boards coming under pressure when rich executive pay yields few apparent returns in the form of a rising stock price or improving profits .", "Outsize executive pay has increasingly become a flash point for investors , lawmakers and regulators .", "Still , critics of executive pay practices were incensed by Mr. Nardelli 's gold-plated exit , the terms of which were largely laid out in his 2000 contract .", "The pay package includes severance and retirement pay , restricted stock , stock options and other forms of deferred compensation .", "The package is `` further confirmation of the need to deal with a pattern of C.E.O. pay that appears to be out of control , '' said Representative Barney Frank , Democrat of Massachusetts , who will be chairman of the House Financial Services Committee .", "When he was hired , Mr. Nardelli was a prized former manager under John F . Welch Jr . at General Electric , running its $ 15 billion power systems division .", "Since he became chief executive , however , Home Depot 's stock price has languished and the company has lost market share to its chief rival , Lowe 's .", "But the seeds of Mr. Nardelli 's ouster were sown that day in May , according to several people close to the board .", "The angry outcry over how Mr. Nardelli conducted the annual meeting was compounded by the humiliation board members felt because they had been persuaded to stay away from the May meeting , departing from usual practice .", "That discontent grew in recent months as large shareholders threatened a revolt at Home Depot 's annual meeting this spring .", "Yesterday , Home Depot , the nation 's largest home-improvement chain , named Frank Blake as its new chairman and chief executive .", "Mr. Blake , another former General Electric executive , joined Home Depot in 2002 and helped develop business strategy at the company .", "Mr. Blake already appears to be distancing himself from Mr. Nardelli and his rigid style .", "In an address to Home Depot employees over an internal television system yesterday , Mr. Blake advised them to `` lighten up '' and `` have fun again , '' according to one worker .", "Mr. Blake 's current employment agreement is in force , Home Depot said , and the board is working out details of a new employment contract .", "Shares of Home Depot rose 2.3 percent , or 91 cents , to $ 41.07 yesterday .", "Messages for Mr. Nardelli were not returned yesterday .", "For Kenneth G . Langone , a director and co-founder of Home Depot , Mr. Nardelli 's resignation carries a particular sting .", "A onetime member of the G.E. board , Mr. Langone played an instrumental role in bringing Mr. Nardelli to Home Depot .", "And when it became clear that Mr. Nardelli would come aboard only as chief executive , he persuaded his old friend , Arthur Blank , then the chief executive , to step aside .", "Mr. Nardelli was the first chief executive plucked from outside of Home Depot since its founding in 1978 .", "He immediately drew fire over his lack of retail experience .", "Inside the company , his hands -on , severe management style rubbed against a more casual culture that gave store managers autonomy .", "By contrast , Mr. Nardelli was an obsessive workaholic who rose at 4 a.m. , logged 14-hour days and routinely worked through the weekend , splitting his time between Home Depot 's headquarters in Atlanta and shuttling from store-to-store in a chauffeured black Chevy Suburban .", "He earned the nickname `` general '' -- and , as if to underscore the point , he hired dozens of former military officers to run stores , a recruitment strategy he introduced at G.E.", "Mr. Nardelli never won over the chain 's store managers , current and former company executives said .", "After touring stores , he frequently inflamed managers by sending critical e-mail messages about cluttered aisles and poorly lighted displays .", "The result was a high level of manager turnover .", "In the last 18 months , three senior merchandise executives -- Tom Taylor , John Costello and Carl C . Liebert III -- left the company , leaving Mr. Nardelli with very few experienced retail executives in his top ranks .", "`` The long-term retailers never felt he integrated himself into the retailing piece of the business , '' said Harold Reiter , chairman and chief executive with Herbert Mines Associates , an executive recruitment firm .", "Outside of the company , many shareholders , angry at how the annual meeting in May was conducted , struggled to embrace the bold strategic course Mr. Nardelli was setting for Home Depot that steered it further away from its bread-and-butter business of selling hammers and nails to consumers .", "For instance , Mr. Nardelli was betting big on Home Depot Supply , a new unit that supplied professional contractors with lumber , cement and pipes .", "He spent $ 7 billion in recent years to buy about 40 companies , turning Home Depot Supply into a $ 12 billion business , whose sales now account for 13 percent of the company 's $ 90 billion in revenue .", "Mr. Nardelli also planned to move aggressively overseas , particularly into China , where the company acquired a retail chain in December .", "During his tenure , Home Depot doubled its sales and sharply increased its earnings per share .", "Yet while the board backed his vision for the company , shareholders were less enthused .", "The stock has barely budged over his tenure .", "Because of that lackluster performance , critics sought to make Mr. Nardelli Exhibit A for chief executives who receive millions and millions of dollars in pay for weak performance .", "Many of those critics took aim at Home Depot 's board , which they argued was cozy and tight-knit , with close ties between one another and with G.E. Sitting at the center of Home Depot 's board is Mr. Langone , who has been the longstanding chairmanof the board 's nominating committee and who some critics say has loaded up the board with many of his friends and former business associates .", "A proponent of paying chief executives well for their performance , Mr. Langone has long supported Mr. Nardelli 's generous contract .", "As chairman of the compensation committee at the New York Stock Exchange , Mr. Langone also signed off on Richard A . Grasso 's largest pay days as chairman and he remains embroiled in a civil suit over Mr. Grasso 's $ 139.5 million pay package .", "Mr. Langone did not return calls for comment .", "The lawyer who represented the New York exchange was the same lawyer who represented the board of Home Depot : Martin Lipton , the takeover lawyer who founded Wachtell , Lipton , Rosen & Katz .", "Mr. Lipton , a longtime friend of Mr. Langone , was originally hired to help defend Mr. Nardelli and the board against Relational Investors , an owner of Home Depot shares that has challenged Mr. Nardelli 's strategies in regulatory filings .", "Mr. Lipton has often found himself in the middle of sticky situations where the chief executive is ousted and paid an enormous golden parachute .", "Mr. Lipton also worked for Morgan Stanley 's board when Phillip Purcell was fired .", "He did the same for Disney when Michael D . Eisner was terminated .", "While many of Home Depot 's critics were elated that Mr. Nardelli was leaving the company , they expressed shock at the size of his departure pay package .", "`` We 're aghast at the level of compensation that Nardelli is walking away with -- this is money directly out of shareholders ' pockets , `` said Richard Ferlauto , the director of pension investment policy for the American Federation of State , County and Municipal Employees , whose pension fund owns shares in the company .", "Furthermore , if Home Depot is trying to distance itself from Mr. Nardelli , Mr. Blake is a strange choice , some Wall Street analysts said .", "Like Mr. Nardelli , Mr. Blake has little retail experience .", "He spent much of his career at G.E. and helped develop the Home Depot supply business .", "`` There will be some pushback to his appointment , '' said Stephen C . Chick , an analyst with J . P . Morgan Securities .", "Analysts said the performance of Home Depot 's retail business , which has lagged behind that of its main rival , Lowe 's , for several years , lay at the heart of Mr. Nardelli 's troubles .", "Mr. Nardelli 's original strategy of sharply cutting costs -- by reducing employees ' hours and hiring more part-timers -- alienated loyal customers , who relied on experienced store staff for advice .", "`` Home Depot was managing the company for Wall Street 's earnings expectations and that came at the expense of the investment needed in their retail business , `` Mr. Chick said .", "Mr. Nardelli `` has to take the blame for that , '' Mr. Chick said .", "Far from cooling down shareholders angry over Mr. Nardelli 's pay and other corporate governance issues , Mr. Nardelli 's departure seems to have instead inflamed them .", "`` It 's not enough to shuffle the deck chairs .", "They have n't changed their strategy and there is n't any fresh blood on the board , `` said Ralph Whitworth , who heads Relational Investors , which owns about $ 1 billion of Home Depot 's stock , or about a 1.2 percent stake .", "About three weeks ago , Mr. Whitworth notified Mr. Nardelli in a letter that he was intending to call on shareholders to create a committee to study Home Depot 's direction and performance and planned to submit at least two candidates for the board .", "Yesterday , Mr. Whitworth said those plans had not changed .", "`` There 's clearly some big , big corporate governance issues here and I do n't expect that to change without fresh blood on the board , `` he said .", "A CHAIRMAN 'S FALL Correction : January 6 , 2007 , Saturday A picture in Business Day on Thursday of the home of Robert L . Nardelli , who was ousted as chairman and chief executive of Home Depot , carried an incorrect credit .", "It was by Chris Rank of Bloomberg News , not by Home Depot ."], "summary": ["Home Depot board dismisses chairman and chief executive Robert L Nardelli with $ 210 million exit package .", "Surprise move comes only two weeks after board publicly supported him amid mounting questions about his compensation , business strategy and autocratic management style .", "Nardelli reportedly challenged efforts of directors who sought in recent months to rein in his compensation under terms of his current contract .", "Over six years as chief executive , he has taken home $ 64 million and was on track to earn hundreds of millions more .", "His fall is latest illustration of chief executives and boards coming under pressure when rich executive pay yields few apparent returns in form of rising stock price or improving profits .", "Outsize executive pay has increasingly become flash point for investors , lawmakers and regulators .", "In Nardelli 's case , critics are also incensed by his gold-plated exit .", "Photos ."], "publication": "nyt50", "label": [14, 5, 3, 15, 6], "tag": ["Front Page", "Business"]} -{"id": "1816366", "text": ["Drivers crossing the George Washington Bridge must contend with 18-wheelers , infuriating delays and noxious exhaust .", "Soon , they will also have advertisements from Geico .", "The Port Authority of New York and New Jersey is expected to announce an arrangement with Geico , the auto insurance giant , that will include the posting of a huge billboard on top of the toll plaza in Fort Lee , N.J. , that says `` Geico Drive Safely . ''", "Drivers will also see Geico signs with the company 's mascot , a gecko , on the tollbooths and electronic signs on the approach roads .", "Geico 's message will also be integrated into the Port Authority 's direct mailings and its Web site , and costumed gecko mascots will appear at Port Authority bus stations .", "The arrangement , first reported in The Wall Street Journal , will provide the agency with $ 3.2 million over two years .", "It is the first of its kind for the Port Authority , which has been trying to find new sources of revenue to offset rising costs , said Stephen Sigmund , the agency 's chief of public and government affairs .", "Geico is not the first company to think about buying a bridge -- or at least the advertising rights to one .", "And other public agencies have been exploring unconventional ways to bring in more money , even if it means toying with long-held taboos about commercializing public spaces .", "The Golden Gate Bridge , Highway and Transportation District , for instance , is exploring how to sell sponsorships in San Francisco .", "Mr. Sigmund said the Port Authority had been looking for the right advertisers since at least 2005 , adding that Geico was a natural since it is one of the country 's largest and best-known auto insurers .", "In a world where consumers can fast-forward through television advertisements , subscribe to commercial-free satellite radio and block pop-up ads on the Internet , companies like Geico are continually looking for fresh ways to get in front of consumers .", "In that context , getting in front of about 57 million eastbound drivers who cross the George Washington Bridge each year could amount to a gold mine , especially because it is not uncommon for cars to spend 15 or more minutes creeping toward the toll plaza in New Jersey .", "`` This is more than just eyeballs .", "It 's about reinforcing a message about a bridge that people have an endearing feeling to , `` said Drew Sheinman , chief executive of Axcess Partners Worldwide , a marketing company hired by the Port Authority to develop new advertising .", "Mr. Sheinman said that under the agreement , no other signs can compete with Geico 's at the bridge , which may allay fears that the toll plaza will become overrun with ads .", "`` This is not going to be left field at Yankee Stadium , '' he said .", "Still , the new signs could irritate drivers who view their cars as a refuge from media messages , as well as preservationists who see the bridge , a landmark , as unfit for commercial advertisements -- even if they appear only on the tollbooths .", "`` It 's a city icon that should not be tampered with in this way , `` said Vanessa Gruen , of the streetscape committee at the Municipal Art Society , which deals with street and sidewalk advertising .", "`` It 's not really worth the amount of money they 'll get out of it to block the view of the span . ``", "Brand experts also questioned whether Geico , which has won plaudits for its imaginative advertising campaigns , may turn off customers and dilute its name by making it too prominent .", "`` Since advertisers have generally lost some degree of control over what messages get into people 's lives , consumers resent getting them foisted on them , `` said Robert Passikoff , the president of Brand Keys , a brand consultant .", "But , Mr. Passikoff added , consumers may warm to the Geico ads as well as others on prominent public buildings and bridges if they know the proceeds are being used to improve service .", "The Port Authority has set a goal of generating $ 100 million in advertising and sponsorships , about three times more than it currently brings in , Mr. Sigmund said .", "He said , though , that the new advertising must match the surroundings and not jeopardize safety .", "Last month , for instance , the Port Authority announced an agreement with Samsung , the electronics giant , to install power outlets at Kennedy International Airport that travelers with laptop computers will be able to use ."], "summary": ["Auto insurer Geico will sign advertising deal with Port Authority of New York and New Jersey to post billboard at toll plaza on New Jersey side of George Washington Bridge and electronic signs on approach roads .", "Deal is worth $ 3.2 million over two years .", "It is agency 's first attempt to find new sources of revenue .", "Agreement permits no other signs to compete with Geico 's .", "Photo ."], "publication": "nyt50", "label": [2, 5, 3], "tag": ["New York and Region"]} -{"id": "1816367", "text": ["Gov . Eliot Spitzer vowed yesterday to make health insurance available to all children and enroll all eligible adults in Medicaid .", "If carried out fully , his pledges would cut the number of uninsured New Yorkers in half .", "But those promises may not turn out to be as ambitious as described .", "How far they go will depend on details the governor and his staff said would not be made public until late this month .", "At their most far-reaching , the governor 's plans would give New York the lowest percentage of uninsured people in the country , and would mark a sharp increase in coverage for the poor .", "Even so , such promises would not be as hard or as expensive to carry out as most people would guess , according to people who study the politics and economics of health care .", "People who have no health insurance tend to be young and healthy , cheaper to care for and cheaper to cover than those who are currently covered .", "As a result , the United Hospital Fund , a policy research group , has estimated that covering all of the uninsured in New York would cost $ 4.1 billion a year -- less than one-tenth of what the state already spends on Medicaid , the health plan for the poor .", "Most of the uninsured are also poor , and already qualify for Medicaid or a similar program , Child Health Plus , so experts said that easier enrollment and more aggressive recruitment in those programs would almost certainly be at the center of Mr. Spitzer 's plans .", "The biggest obstacle , they said , could be new federal requirements that people signing up for Medicaid produce birth certificates or other proof of citizenship .", "In his first address to the Legislature , Mr. Spitzer said , `` We will introduce a budget that in the very first year , guarantees access to health insurance for all of New York 's 500,000 uninsured children . ``", "But to guarantee access to insurance and to guarantee actual coverage could be two very different things .", "Many people have access to government programs , at least in theory , but do not take advantage of them .", "Again , the governor did not say precisely what he meant by his promise .", "In New York , children qualify for free health insurance through either Medicaid or Child Health Plus , in households that earn up to 160 percent of the federal poverty line -- about $ 21,000 a year for a family of four .", "Above that limit , up to 250 percent of the poverty line -- about $ 33,000 for a family of four -- they are eligible for Child Health Plus for a premium of $ 15 a month or less .", "About two-thirds of New York 's uninsured children already qualify for one of the programs on those terms .", "Danielle Holahan , senior health policy analyst at the United Hospital Fund , said , `` That , plus the fact that children are pretty cheap to insure , '' suggests that getting most of them enrolled `` is something we could tackle pretty easily . ''", "State law also says that any child , no matter how high the family income , can be enrolled in Child Health Plus by paying a premium that averages less than $ 200 a month , much less than a commercial insurance policy .", "Officials in the Pataki administration argued that that means New York already guarantees access to coverage for all children .", "Ms. Holahan said the state could draw middle-income children into Child Health Plus by putting the premiums on a sliding scale as incomes rise , rather than having an abrupt jump up from $ 15 a month .", "Mr. Spitzer said that within four years , `` we will enroll the 900,000 Medicaid-eligible adults '' by simplifying the paperwork .", "That promise could be tougher to keep , experts said , because he is promising not mere access but actual coverage .", "Each year , about one-third of the people on Medicaid in New York fail to renew their enrollment , though they remain eligible , so policy analysts say that an easier renewal process would keep some people enrolled .", "They also proposed tying Medicaid enrollment to food stamps , welfare , schools and other public services .", "But some people are reluctant to sign up , particularly immigrants who mistrust the government or just can not be bothered .", "`` The only way you ever get to really universal coverage is with some sort of individual mandate requiring people to enroll , '' said Diane Rowland , executive director of the Kaiser Commission on Medicaid and the Uninsured .", "Almost 14 percent of New Yorkers are uninsured , according to the Census Bureau , below the national average of nearly 16 percent but well above Minnesota , with the lowest rate , less than 9 percent .", "If Mr. Spitzer succeeds in insuring all children and all people eligible for Medicaid , the number of uninsured in New York would drop below 7 percent .", "Medicaid spends more than $ 45 billion a year to insure 4.2 million New Yorkers , but most of those costs are spent on older and disabled patients .", "An estimated 1.2 million people qualify for Medicaid or Child Health Plus but are not enrolled .", "They are disproportionately healthy young adults and children .", "`` Low-income people who have serious health needs end up in a hospital emergency room , and they enroll in Medicaid at that time , if they 're not enrolled already , `` Ms. Rowland said .", "`` So the remaining uninsured tend to be healthier than those who are insured . ''", "CHANGEOVER IN ALBANY ."], "summary": ["New York Gov Eliot Spitzer vows to make health insurance available to all children and enroll all eligible adults in Medicaid .", "United Hospital Fund , policy research group , estimates that covering all uninsured people in state would cost $ 4.1 billion annually , less than one-tenth what is already spent on Medicaid .", "Health care experts say Spitzer plan will include easier enrollment and more aggressive recruitment for Medicaid and Child Health Plus program .", "Say biggest obstacle would be federal requirement that people seeking Medicaid produce proof of citizenship ."], "publication": "nyt50", "label": [7, 0, 9], "tag": ["Health", "New York and Region"]} -{"id": "1816369", "text": ["The 20-year building boom on Staten Island , long the city 's fastest-growing borough , is decelerating drastically , thanks largely to a reining-in of the island 's freewheeling zoning laws , officials say .", "According to city figures released yesterday , permits for new buildings plunged by 43 percent last year .", "So substantial was the decline that Staten Island almost single-handedly accounted for a 10 percent dip in building permits citywide last year , which was the first drop in the city in a decade .", "In the other four boroughs , permits fell by 1.6 percent , the figures from the City Buildings Department show .", "Of course , the real estate market has been cooling all over the city and the country , but city officials and real estate professionals attributed the disproportionate chill on Staten Island to zoning and tax changes .", "New zoning rules passed in 2004 , which were aimed at stemming a flood of town house developments that residents complained were disfiguring communities and overloading streets with traffic , require larger lots and yards and more parking .", "A tax abatement for new one - and two-family homes was also rescinded last year , further discouraging construction .", "`` It 's one of these perfect-storm situations , `` said Sandy Krueger , chief executive officer of the Staten Island Board of Realtors .", "`` Everything kind of came together at once : the downzoning , the tax abatement removal and the change in the market .", "I think everybody is sort of taking a breath here and waiting this out a little bit to see what happens . ``", "Last year , 826 permits for new buildings were issued in Staten Island , compared with 1,441 in 2005 and nearly 2,000 in 2003 .", "Permits for new units of housing also declined more than 40 percent through November , compared with a 1 percent drop elsewhere in the city , according to census statistics .", "The downturn was broadly welcomed in a place where for years development appeared to proceed with pile driver and shoehorn .", "A one-acre parcel where 18 units of housing could once be built is limited to 7 .", "`` I do n't want Staten Island to stop growing , `` said the borough president , James P . Molinaro , who helped lead the drive for the zoning changes , '' but I want proper growth . ``", "The decline in new housing permits issued on Staten Island affected multifamily dwellings as well as smaller buildings .", "Permits for one-family homes fell 49 percent .", "City officials pointed to several developments planned for the island that they said would be more carefully managed , including a 700-unit waterfront complex in Stapleton , north of the Verrazano-Narrows Bridge , which is expected to bring new life to a vacant port .", "As for the slight citywide drop in new building and housing permits outside of Staten Island , the city 's deputy mayor for economic development , Daniel L . Doctoroff , said he saw little cause for concern .", "New housing construction is slowing dramatically across the country , he said , and total construction spending in New York City is increasing as builders focus on commercial projects .", "`` Particularly relative to the overall national housing market , '' Mr. Doctoroff said , `` and taking into account the fact that through much of the year interest rates were rising , I think by every indicator the market here was very resilient . ''", "Frank Naso , one of Staten Island 's biggest builders and the former president of the Building Industry Association of New York City , a trade group , said that zoning changes had been long overdue .", "`` We needed some kind of reform , '' he said .", "`` We 'll continue to build , but instead of building 10 you might build 6 .", "It 's not the end of the world . ``", "Mr. Naso was more upset about the loss of the tax abatement , which cut a homeowner 's property taxes for the first eight years after purchase and allowed many first-time buyers into the market .", "Jeff Gallo , an opponent of sprawl and a member of the Preservation League of Staten Island , who lives in a turn-of-the-century Victorian house in Stapleton , said he had noticed a slight calming in the air .", "`` There 's a feeling out in Staten Island now that you just ca n't get away with everything , `` said Mr. Gallo , a real estate broker who works in Brooklyn .", "Ever since the Verrazano-Narrows bridge was completed in 1964 , linking Staten Island to the rest of New York City , people have flocked to the island , developers have worked frenetically to accommodate them , and longtime residents have bemoaned the disappearance of open space and the strain on the island 's infrastructure .", "During the 1990s , Staten Island 's population grew by 17 percent and civic discontent skyrocketed .", "Eventually the clamor brought results .", "Before the zoning change , you were permitted to build a house with a 4-foot backyard , `` Mr. Molinaro said .", "`` Now you have to have a minimum of 30 feet .", "You were able to buy a house on Staten Island where your bottom step was the street -- there was no sidewalk .", "That was legal ! You ca n't do it no more . ``", "Staten Island , still by far the least-populous borough in the city , was the fastest-growing county in the state , according to the 2000 census .", "It is not even the fastest-growing borough in the city anymore , having fallen into a virtual tie with Manhattan .", "From 2003 to 2005 , the population in both boroughs grew by about 0.9 percent , according to census estimates .", "Helen Siegel , 52 , a schoolteacher who lives in New Springville , in the center of the island , thought back yesterday to the days when cows and pigs from nearby farms used to stroll across her yard .", "`` It might have been better had they done this sooner , '' she said .", "`` It 's all built up on every spare inch of land already . `` ."], "summary": ["New York City officials say 20-year building boom on Staten Island has drastically declined , with new building permits falling by 43 percent in 2006 .", "Say Staten Island decline accounted for 10 percent drop in building permits citywide .", "Declines in other boroughs noted .", "Officials attribute Staten Island decline to zoning and tax abatement changes .", "Graph ."], "publication": "nyt50", "label": [2, 0, 1], "tag": ["New York and Region"]} -{"id": "1816482", "text": ["PATTI LuPONE was playing Mrs. Lovett in `` Sweeney Todd '' early last year when she looked out from the stage of the Eugene O'Neill Theater and saw something she had never seen before in a Broadway theater : a popcorn war .", "`` There were two people in the front row sharing a bag of popcorn , '' she said recently .", "`` When she stuck her hand in , he immediately stuck his hand in .", "They were wrestling for the last few kernels of popcorn while we were performing .", "Everyone around them was distracted . ``", "While eating at your seat at a Broadway theater used to be universally forbidden , theaters are increasingly allowing patrons to take their drinks , candy and even crunchy munchies to their seats during a show .", "This let-them-eat-snacks philosophy has been embraced at the Helen Hayes , Hilton , New Amsterdam , Eugene O'Neill and Walter Kerr Theaters , as well as at all nine houses owned by the Nederlander Organization -LRB- the Brooks Atkinson , Gershwin , Lunt-Fontanne , Marquis , Minskoff , Nederlander , Neil Simon , Palace and Richard Rodgers -RRB- .", "`` It 's a reflection of changing audience habits , `` said Jim Boese , the organization 's vice president .", "`` As the audience for Broadway expands , there are changing audience needs .", "This is part of a broader attempt to enhance the audience experience . ``", "It also helps the bottom line for theater owners , who profit from sales at the concession stands .", "And when people can eat at their seats , they tend to buy more .", "To make purchases easier , machines to process credit-card sales -- provided by Visa as part of its sponsorship of Broadway -- were recently installed at bars in the New Amsterdam and Nederlander Theaters .", "There are also plans to create cafe areas with tables and chairs and higher-end products , like high-quality chocolates , in some theaters .", "One has just been constructed at the Minskoff .", "Rosa Hires , the general manager of concessions for the Hilton Theater , owned by Live Nation , said most audience members seemed delighted by the new rules .", "`` If anything , people want more food , '' she said .", "`` They 're asking for wraps and salads `` to be available at the concession stands , she added .", "`` Recently we 've had people asking for hot dogs . ``", "Concession sales there have more than doubled since the Hilton began allowing products other than bottled water to the theater 's seats about three years ago , Ms. Hires said .", "The other theaters mentioned above have seen significant increases as well .", "Some of them have taken steps to minimize distractions and guard against spills .", "The New Amsterdam , home to `` Mary Poppins , '' serves all drinks -- even Champagne -- in spillproof keepsake plastic cups that muffle the sound of clinking ice , Some Nederlander theaters have begun using similar items , though at these you have to pay for the privilege of taking beverages back to your seat .", "Wine at the Richard Rogers , where `` Tarzan '' is playing , costs $ 7 if you drink it in an open cup in the lobby , but is $ 12 for a spillproof commemorative cup that you are allowed to take back to your seat .", "All the theater owners whose houses serve food said they were investigating packaging that would reduce wrapper noise .", "There is no hiding the smell and sound of freshly popped popcorn at the the Neil Simon or the Hilton however .", "In West End of London , where eating and drinking in the theater has long been allowed , quieter foods , including ice cream , are on the menu .", "Baz Bamigboye , an arts writer for The Daily Mail , said in an e-mail message that in his experience London theatergoers `` try and be considerate , '' while New York audience members often take a contrasting stance .", "`` I think the view is , ' If we 're paying $ 100 or so for a ticket , we can do what we like , ' `` he said of Broadway 's audiences .", "At a recent performance of the child-friendly musical `` Hairspray , '' most of the 50 people interviewed said they were unaware of the rule change .", "They thought bringing food to a seat had always been allowed .", "Theatergoers at `` Grey Gardens , '' which attracts a metropolitan crowd , were more familiar with past rules and did n't like the present ones .", "Only 4 out of 50 audience members surveyed said they thought allowing food inside the theater was a good idea .", "`` I came to hear Christine Ebersole , not a Skittles wrapper , '' Jacquelyn Zimbowne of Newark said after the show .", "Gerald Schoenfeld , the chairman of the Shubert Organization , agreed with Ms. Zimbowne .", "`` It annoys many patrons , '' he said .", "`` It causes a refuse problem .", "It damages our carpets .", "It can be disconcerting to the performers . ``", "No midshow snacking is allowed at any of the Shuberts ' 17 Broadway theaters .", "They restrict drinks and snacks to the lobby area .", "And that 's the way it should be , Ms. LuPone said .", "`` Broadway is about a theatrical experience , '' she said .", "`` It 's not about pulling out Marie Callender 's chicken pot pie and a Sterno .", "Would you go to church and pull out a ham sandwich .", "I do n't think so .", "Then why would you do it at the theater .", "`` ."], "summary": ["Many Broadway theaters have changed rules to allow audience members to bring food to their seats as attempt to cater to changing audience desires and also bring in more profit for theater .", "Actors and many seasoned theatergoers object .", "Chart of snacks with ratings on package noise , chewing noise and odds of dropping on floor .", "Drawings ."], "publication": "nyt50", "label": [5, 32, 20, 8, 10, 35], "tag": ["Movies", "Arts", "Theater"]} -{"id": "1816485", "text": ["`` Mothers of America , let your kids go to the movies ! '' Always good advice , but the exhortation has dated a bit since 1960 , when Frank O'Hara made it the first line of his poem `` Ave Maria . ''", "`` Going to the movies '' has a quaint ring in the age of the plasma-screen home entertainment system , the iPod and video-on-demand .", "The movies are more than willing to come to us , which has inspired some sages , in and outside the film industry , to prophesy the obsolescence , or at least the increasing marginality , of paper tickets , bags of popcorn and big dark rooms lighted by a projector beam : the cultural ritual known dispassionately in the business as `` theatrical distribution . ''", "According to this vision , children are leading the slow exodus from the theaters .", "From an essay in the current issue of The New Yorker , for example , one learns that , when it comes to visual entertainment , kids these days are `` platform agnostic , '' perfectly happy to consume moving pictures wherever they pop up -- in the living room , on the laptop , in the car , on the cellphone -- without assigning priority among the various forms .", "David Denby , the author of the article and one of The New Yorker 's film critics , is an unapologetic adherent to the old-time religion , as am I , and his survey of the current technological landscape is colored by nostalgia for the old downtown movie palaces and the studio system that fed them .", "Of course , as Mr. Denby acknowledges , children have hardly disappeared from the movie audience .", "On the contrary , adolescents and their younger siblings are the most sought-after segments of the demographically segmented universe of potential viewers .", "The movies that make the most money , and therefore those on which the most production and advertising money is spent , are the ones that simultaneously reach down into the primary grades and up into the ranks of young adults .", "And there is special commercial potency in those movies that parents will be eager to see -- sometimes more than once -- with their children .", "Usually animated , always featuring big stars and wholesome lessons , these films are the ones pointedly aimed at the whole family : cute animals -LRB- and product tie-in toys -RRB- for the little ones , semi-naughty humor and exciting action for their older brothers and sisters , enough in the way of topicality or sophistication to keep mom and dad from losing their minds .", "In many ways the ascension of these movies is an encouraging development .", "Because entertainment aimed at children occupies a bigger share of the marketplace , the level of quality tends to be higher than it was , say , back in the heyday of Walt Disney live-action comedies .", "And the phenomenon of family viewing -- the mothers and fathers of America taking their children to the movies -- has become a central cultural activity consistent with the highly participatory style of parenthood currently in vogue .", "I would not wish it otherwise , but I also worry that the dominance of the family film has had a limiting , constraining effect on the imaginations of children .", "The point of Mr. O'Hara 's poem is that the movies represent a zone of mystery and cultural initiation : `` it 's true that fresh air is good for the body , `` he writes , '' but what about the soul / that grows in darkness , embossed by silvery images `` .", "Never mind that he also reminds his mothers that their offspring `` may even be grateful to you for their first sexual experience , which only cost you a quarter and did n't upset the peaceful home . ``", "How are they going to grow , if the images they see are carefully vetted for safety and appropriateness by the film industry .", "In other words : Parents of America , take your children to the movies you want to see ! Within reason , naturally .", "I cringe at the sight of strollers at `` Apocalypto '' or `` Saw III . ''", "But I also cringe at the timidity and cautiousness -- the hypersensitivity -- that confines family viewing to movies with a plush toy or fast food advertising tie-in .", "At their best , movies not only offer glimpses of fantastic imaginary worlds , but also inklings of what is , for children , the most intriguing and enigmatic world of all : the world of adulthood .", "For the last six months or so , in the guise of a civilian moviegoer , I have been conducting -LRB- with the sometimes unwitting assistance of my wife -RRB- a cautious , intermittent experiment .", "Ignoring the advice of the Motion Picture Association of America and the studio marketing departments about what my children , who are 10 and 7 , should see , I have taken them to revival houses and museums as well as to multiplexes .", "To musicals and subtitled films as well as to risqu\u00e9 action blockbusters and not-too-explicit love stories .", "This experiment has proceeded along two tracks , with two distinct but complementary intentions .", "I want them to learn to appreciate the varieties of this incomparably rich art form , which means learning to endure and even enjoy being occasionally bored , confused or scared .", "I also hope they will develop a taste for the act of moviegoing .", "The recent releases they have seen include `` Casino Royale , '' `` The Illusionist '' and `` The Pursuit of Happyness , '' none of which bored them or troubled their sleep and all of which seemed to me to be much better suited for their age cohort than mine .", "`` The Illusionist , '' in particular , with its star-crossed romance , its theatrical effects and its carefully turned plot twists , struck me as the kind of thing that would delight a bookish , intellectually curious fourth or fifth grader , and I was startled to see it in a theater full of adults .", "But I was gratified to hear my own children , on the subway ride home , puzzling out the intricacies of the plot and arguing about its ambiguities , just as the grownups did .", "Our house is full of DVDs , many of them acknowledged classics , reissued and remastered for rediscovery .", "And we have watched Charlie Chaplin and John Wayne , `` Casablanca '' and `` Frankenstein . ''", "But we also comb through the weekend movie listings in search of those dwindling numbers of screens that will show us old movies the old-fashioned way .", "On Saturdays we frequently find ourselves in sparsely peopled rooms staring at ancient shadows .", "Some adjustment of expectations is required : in the old movies people frequently talk faster and move more slowly .", "They burst into song without self-consciousness .", "There are fewer cuts , longer scenes and occasional visual anomalies .", "`` Why is he purple .", "`` my daughter asked in the middle of '' West Side Story , `` noticing the effects of an aging Technicolor print on Tony 's face .", "In `` The Man Who Shot Liberty Valance '' the tint would periodically switch from sepia to silver and back again .", "My son , noting each shift , wanted to know why it was happening : a question about aesthetics that I could only answer with a whispered lecture about chemistry .", "Most of the old movies he had seen were delivered by means of new technology .", "This one was old in the physical as well as the cultural sense .", "What he made of it I do n't know .", "-LRB- He was amused that Lee Marvin , as the titular villain , calls Jimmy Stewart 's character `` dude . '' -RRB-", "But he watched with an unusual intentness , the same quality of attention he brought to `` Monty Python and the Holy Grail , '' `` Oliver '' and `` Samurai Rebellion , '' some of the other stops on our haphazard tour of movie history .", "I 'm convinced that these films ' beguiling strangeness was magnified by the experience of seeing them away from home and its distractions , with the whir of the projector faintly audible in the background and motes of dust suspended in the path from projector to screen .", "Moviegoing , though unlikely to disappear , will probably never again be the universal rite it once was .", "This is not a catastrophe , just a change of habit .", "Going to the movies may survive as an acquired taste , and also , therefore , as an activity through which taste is acquired .", "FILM ."], "summary": ["A O Scott encourages bringing children to films other than family friendly movies approved by Motion Picture Association .", "Asserts that allowing children to view adult films , within reason , aids in cultivation of their imagination and helps them to become life-long moviegoers .", "Describes experience of taking his children to film The Illusionist and screenings of West Side Story and The Man Who Shot Liberty Valance .", "Photos ."], "publication": "nyt50", "label": [40, 18, 39], "tag": ["Movies", "Arts"]} -{"id": "1816495", "text": ["A survey by researchers at Villanova University has found that 85 percent of Roman Catholic dioceses that responded had discovered embezzlement of church money in the last five years , with 11 percent reporting that more than $ 500,000 had been stolen .", "The Catholic Church has some of the most rigorous financial guidelines of any denomination , specialists in church ethics said , but the survey found that the guidelines were often ignored in parishes .", "And when no one is looking , the cash that goes into the collection plate does not always get deposited into the church 's bank account .", "`` As a faith-based organization , we place a lot of trust in our folks , '' said Chuck Zech , a co-author of the study and director of the Center for the Study of Church Management at Villanova .", "`` We think if you work for a church -- you 're a volunteer or a priest -- the last thing on your mind is to do something dishonest , `` Mr. Zech said .", "`` But people are people , and there 's a lot of temptation there , and with the cash-based aspect of how churches operate , it 's pretty easy . ``", "Specialists in church ethics said they believed this was the first study to assess the extent of embezzlement in a denomination .", "Officials at the United States Conference of Catholic Bishops said they had seen the study , which was released just before Christmas and was first reported in the National Catholic Reporter , and were considering ways that parishes could tighten their financial controls .", "`` The Villanova study does not come as a surprise , '' said Bishop Dennis M . Schnurr , treasurer of the bishops ' conference .", "`` This is something that the bishops in this country have been looking at for some time .", "They are aware of a need to look for mechanisms that can assist parishes in accountability and transparency . ``", "Mr. Zech and his co-author , Robert West , a professor of accounting at Villanova , did not set out to look for embezzlement .", "They were conducting a study of internal financial controls in Catholic dioceses and sent a battery of questions to chief financial officers in the nation 's 174 Catholic dioceses .", "78 responded .", "Mr. Zech said he was surprised that so many dioceses had detected embezzlement .", "In 93 percent of those cases , police reports were filed .", "He said the survey did not ask who stole the church money .", "But it did ask who detected the theft , and found that it was most often the parish priest , followed by the bookkeeper , an internal auditor or the parish finance council .", "In October alone , three large cases of embezzlement surfaced , including one in Delray Beach , Fla . , where two priests spent $ 8.6 million on trips to Las Vegas , dental work , property taxes and other expenses over four decades .", "In the survey , 29 percent of the dioceses reported thefts of less than $ 50,000 .", "Most denominations have had cases of embezzlement , sometimes by top officials .", "In June , the Presbyterian Church U.S.A. fired its second-ranking financial officer , Judy Golliher , after she admitted stealing money that church officials put at more than $ 132,000 .", "Many nonprofit organizations that accept cash donations experience theft , and churches are particularly vulnerable , said John C . Knapp , director of the Southern Institute for Business and Professional Ethics , at Georgia State University in Atlanta .", "`` Churches have a tendency to be in denial about the potential for this conduct in their midst , '' Mr. Knapp said .", "`` When ethics seminars or ethics codes are proposed in churches , they are often met with resistance from people who say , ' Why in the world would we need this .", "After all , this is the church . '", "Whereas in business , people readily recognize that this sort of thing can happen . ``", "The Salvation Army is widely considered exemplary among nonprofits in handling cash collections .", "The red buckets in which bell ringers collect donations are covered and locked , and all buckets must be returned to a central location , where at least two people count the number and type of bills , coins and checks , said Major George Hood , the charity 's national spokesman .", "The money must be deposited in the bank within 24 hours , and different people reconcile the initial tallies with bank records , Major Hood said .", "In the Catholic Church , parishes and high schools handle many cash transactions , making them vulnerable to theft , the Villanova report notes .", "Canon law requires each parish to have a finance council to provide oversight .", "But Bishop Schnurr , who heads the diocese in Duluth , Minn . , said there were no standards for how finance council members were chosen or whether they should have any expertise in accounting or finance .", "Only 3 percent of the dioceses said they annually conducted an internal audit of their parishes , and 21 percent said they seldom or never audited parishes , the survey found .", "This lack of scrutiny is at the core of the problem , said Francis J . Butler , president of Foundations and Donors Interested in Catholic Activities , a nonprofit organization independent of the church .", "`` You 're taking a lot of risk , `` Mr. Butler said , '' and these days the church can not afford to take these kinds of risks . ``", "Bishop Schnurr said the study 's findings on lack of parish oversight contradicted his experience .", "But both he and Kenneth W . Korotky , chief financial officer for the bishops ' conference , said a committee could soon consider writing guidelines for the composition of parish finance councils and how often dioceses should audit parishes .", "But they cautioned that the bishops ' conference could not make guidelines mandatory , because each bishop was in charge of administering his own diocese .", "Jack B . Siegel , a tax lawyer and expert on nonprofit management who has commented on church fraud on his blog , charitygovernance.com, said he kept a tally of church frauds and was surprised by how many occurred at Catholic churches .", "`` I got interested because I thought , wait , I 've heard a lot about pedophilia , why are n't I hearing about these financial problems , `` Mr. Siegel said .", "He said he was impressed with the guidelines that the bishops ' conference and other Catholic organizations have offered .", "But he said , `` How those standards and guidelines get put into practice is what really matters . '' ."], "summary": ["Survey by researchers at Villanova University finds 85 percent of Roman Catholic dioceses that responded discovered embezzlement of church money in last five years , with 11 percent reporting more than $ 500,000 had been stolen .", "Specialists in church ethics claim Catholic Church has some of most rigorous financial guidelines of any denomination .", "But survey finds guidelines were often ignored in parishes .", "Officials at United States Conference of Catholic Bishops say they are considering ways that parishes could tighten their financial controls .", "Survey did not ask who stole money ."], "publication": "nyt50", "label": [0, 1, 7, 16], "tag": ["U.S."]} -{"id": "1816506", "text": ["Book publishers braced themselves for a financial blow this week after a bankruptcy filing by Advanced Marketing Services , a book distributor .", "The company filed for bankruptcy protection last Friday , reporting more than $ 200 million in debt to dozens of publishing companies .", "Its creditors included publishers large and small , among them Random House , which is owed $ 43.3 million , and Good Books of Intercourse , Pa . , which specializes in books about the Amish and the Mennonites and is owed nearly $ 1 million .", "A publishing executive said that while authors and readers were unlikely to be affected by the bankruptcy filing , many publishers might not recover much of what they were owed .", "`` This is a huge disruption in this business , '' said the executive , who declined to be further identified because he was not authorized to speak for his company .", "`` The publishers are going to end up taking a big loss . ''", "According to the bankruptcy filing , Advanced Marketing Services has been looking for additional financing or a buyer for the last 18 months , enlisting the investment bank Jeffries & Company in the search .", "Advanced Marketing Services also owns Publishers Group West , a distributor for a consortium of publishers including Grove / Atlantic , which may be hit hardest by the bankruptcy filing .", "`` It 's a mess , `` said Morgan Entrekin , the publisher of Grove / Atlantic , adding that he had been meeting with lawyers for days and was scrambling , trying to figure out how to deal with the situation .", "Advanced Marketing Services was granted $ 75 million of debtor-in-possession financing by creditors so it could continue to function for the next several months .", "Simon & Schuster immediately suspended shipments to Advanced Marketing Services , a Simon & Schuster spokesman , Adam Roth-berg , said .", "Publishers would not give details on their business relationships with Advanced Marketing Services , but the company accounts for as much as 10 percent of some publishers ' sales .", "`` We 're exploring ways to keep working with them , `` a spokeswoman for HarperCollins , Erin Crum , said .", "A spokesman for Random House declined to comment .", "The bankruptcy filing came over the New Year 's weekend , catching many publishing executives by surprise .", "The book industry is largely shut down the week between Christmas and New Year 's Day , and some executives were still trickling back to work late this week .", "Advanced Marketing Services , founded in 1982 , has headquarters in San Diego .", "It acts as a middleman between publishers and booksellers , obtaining books directly from the publishers and distributing them to retailers like Sam 's Club , Costco and BJ 's Wholesale Club .", "The company operates three distribution centers , in Indianapolis .", "Hanover , Md .", "And Woodland , Calif .", "Advanced Marketing Services ' financial difficulties were widely known in the industry , after an accounting scandal in 2003 resulted in the ouster of several senior managers .", "A lawyer for the company did not return calls seeking comment .", "The distributor has near-exclusive access to the discount retailers known as price clubs , including Costco and Sam 's Club .", "The price clubs can produce lucrative returns for the right books , like cookbooks and mass-market reading , and can allow publishers to reach people who do not shop at bookstores .", "Books are a relatively small part of the price clubs ' total business , but they have one attribute that retailers love : the suggested retail price is printed directly on the book jacket .", "Because the books are then discounted by at least 20 or 30 percent , this promotes the feeling among buyers that the stores are offering them a bargain ."], "summary": ["Bankruptcy filing by book distributor Advanced Marketing Services deals financial blow to book industry .", "Advanced reports more than $ 200 million in debt to dozens of publishers , both large and small , such as Random House and Good Books .", "Filing means that publishers might not recover much of what is owed .", "Photo ."], "publication": "nyt50", "label": [2, 1, 0, 3], "tag": ["Business", "Books"]} -{"id": "1816507", "text": ["One of the biggest proposed law firm mergers of the last year has been called off , the victim of too many insurmountable issues .", "The two firms , Dewey Ballantine and Orrick , Herrington & Sutcliffe , announced the news yesterday .", "The two had been in talks since last fall to join their specialties in mergers and acquisitions and debt financing .", "The new firm would have been one of the nation 's largest legal advisers , with 1,500 lawyers based in New York and San Francisco .", "Unlike most merger talks , which are conducted in secrecy , the firms announced in October that their top committees had given preliminary approval to the proposal .", "At every step in the process after that , the firms expressed confidence that the deal would go through .", "The two had already decided upon the name Dewey Orrick for the combined firm .", "Dewey 's chairman , Morton A . Pierce , and Orrick 's chairman , Ralph H . Baxter Jr . , would have served as co-chairmen , with Mr. Baxter taking on the additional title of presiding partner .", "Partners were to have voted on the deal last month .", "But as several obstacles proved intractable , those elections were postponed to this month .", "`` While both firms tried their best to work through these challenges , we were unable to bring the merger to completion , '' the two firms said in a statement .", "`` No one issue led us to this point . ''", "Among the most visible distress signals was the departure of 10 partners from Dewey since merger talks began .", "Most ominous were the exits of three lawyers from Dewey 's mergers and acquisitions team , including Michael J . Aiello and Jack S . Bodner , who had been considered prot\u00e9g\u00e9s of Mr. Pierce .", "People familiar with the negotiations said the talks had continued as recently as last weekend .", "The chairmen of both firms said in interviews that the decision to part ways was mutual .", "`` We reached a point where we decided it was in the best interest of both organizations to go our separate ways , '' Mr. Pierce said .", "Both chairmen said that they would look for future growth opportunities , but that neither was looking for another merger partner right now .", "`` When you look at how law firms grow , while mergers have great advantages , it 's the statistically least likely way to do it , `` Mr. Baxter said .", "Unlike mergers in other sectors , law firm consolidation involves the joining of companies whose main assets are their people .", "Orrick is acquainted with such situations , having undertaken merger talks with several firms in recent years , including Coudert Brothers in 2005 and Donovan , Leisure , Newton & Irvine in the late 1990s .", "None of those talks came to fruition , though Orrick subsequently hired lawyers from those firms .", "The dilution of brand names , unclear lines of authority and unfunded pensions are among some of the issues that may have played a role in the talks ' demise , said Peter Zeughauser , a law firm consultant who did not work on the deal .", "Unfunded pensions have long been a concern in the merger of law firms because partners inevitably bristle at taking a pay cut to help new colleagues ' retirement plans .", "Several consultants and lawyers at other firms previously questioned whether Dewey 's unfunded pension system would pose problems for the deal .", "`` It 's not at all uncommon for an unfunded retirement plan to be a deal breaker , `` Mr. Zeughauser said .", "A person familiar with the negotiations also pointed to questions over management authority .", "Though the deal had been labeled a merger of equals , it was clear that Orrick would retain control over crucial matters , the person said .", "Both Dewey and Orrick have long histories .", "Dewey is a storied New York adviser whose name comes from Gov . Thomas E . Dewey .", "Orrick is one of San Francisco 's oldest law firms , founded in 1863 .", "Both have prominent corporate deal-making and litigation practices .", "Dewey , a 550-lawyer firm , is known as a specialist in mergers and acquisitions , having advised on major deals like the hospital operator HCA 's $ 33 billion leveraged buyout .", "And Orrick , with more than 950 lawyers , is considered a leader in debt financing and capital market transactions and restructurings .", "Last year proved profitable for the two firms , with both claiming profits of about $ 1.4 million per equity partner .", "Dewey earned $ 408 million in gross revenue and Orrick pulled in $ 666 million for the 2006 fiscal year , both showing improvement over 2005 ."], "summary": ["Law firms Dewey Ballantine and Orrick , Herrington & Sutcliffe end talks for proposed merger .", "New firm would have been one of country 's largest legal advisors , with 1,500 lawyers .", "Firms say they could not agree on variety of issues and were unable to agree on terms of merger ."], "publication": "nyt50", "label": [3, 1, 8], "tag": ["Business"]} -{"id": "1816510", "text": ["For deal makers , does it get any better than this .", "A heady cocktail of rapidly growing emerging markets , floods of private equity and hedge fund money , buoyant chief executive confidence and a hungry debt market that seems willing to snap up nearly everything that banks can dish out has created an environment so fertile that mergers in 2006 surpassed all records .", "Globally , there were $ 3.79 trillion worth of deals last year , up 38 percent from 2005 , according to data from Thomson Financial .", "Deals were even up 11 percent from the heights of 2000 , the year of the AOL-Time Warner merger that has come to symbolize the dangers of a mergers and acquisitions bubble .", "Many of the deals of that year left a hangover of bad debt and broken companies .", "Records were also broken last year for hostile deals and European deals .", "Dealogic , a competing statistics firm , says that 2006 deal flow was even higher , at $ 3.98 trillion .", "Gavin MacDonald , head of European mergers and acquisitions at Morgan Stanley , acknowledged that he had `` seen a few cycles '' in more than 20 years as a banker , but never one like this .", "`` We thought in 2000 we might not see its like again , but we 've surpassed even that , `` he said .", "No one can agree on what happens next .", "Steven A . Baronoff , the global head of mergers and acquisitions at Merrill Lynch , said , `` The question a lot of people are asking themselves is , ' Last time it ended relatively badly , so will we see the same kind of problems in the future .", "' `` Many bankers , perhaps not surprisingly , insist the answer is no .", "After all , deals during this boom have been happening throughout many industries , not concentrated in just telecommunications and new media as they were in 2000 .", "They are being financed by fast-growing cash flows and forgiving capital markets , not overpriced stock .", "`` This time around it is a very different environment , '' Mr. Baronoff said .", "`` It 's a much more balanced boom . ``", "And although deal volume is up , the prices that companies are paying are still well below what they were in 2000 , said David Kirshenbaum , chief operating officer for global mergers and acquisitions at Citigroup .", "`` We 're seeing lower premiums and much higher identifiable synergies , and a positive reaction from the stock market , `` he said .", "Others , however , see a slowdown in deals coming this year .", "Phil Isherwood , an equity strategist with Dresdner Kleinwort , said to `` prepare for a peak in 2007 . ''", "One of the symptoms , he said , is the prevalence of cash deals .", "Through mid-December , some 70 percent of announced mergers were cash instead of stock , he said , compared with a 10-year average of 40 percent .", "`` This use of cash is a symbol and symptom of a bubble , not an argument against it , '' he said .", "Too much money is chasing too few deals , driving up prices , Mr. Isherwood said .", "`` Bigger deals , bigger borrowing and bigger prices equals bigger risk , not bigger fun . ''", "Indeed , the ever-bigger buyouts of 2006 have raised concerns about the amount of debt used to do the deal .", "`` Given current leverage levels and recent developments in the economic / credit cycle , the default of a large private equity backed company or a cluster of smaller private equity backed companies seems inevitable . ''", "the Financial Services Authority , the British markets regulator , said in a November report .", "Private equity buyers accounted for nearly a fifth of all deals in 2006 , according to Thomson .", "The volume of these deals doubled in the United States in 2006 , as they aggressively outbid corporate buyers .", "For example , a group including Blackstone and the Texas Pacific Group bought the medical device company Biomet for $ 10.9 billion last month , snatching it from the jaws of a larger rival , Smith & Nephew of Britain , which had courted the company for months .", "To win the day , they are taking out larger and larger loans .", "Over all , $ 183.3 billion in high-yield debt was issued in 2006 , according to Thomson , up 52 percent from a year before .", "The key to keeping the merger mania going is liquidity -- or the willingness of banks to lend money .", "So far , the door has been wide open for most borrowers , thanks in part to the exponential growth of new products like credit derivatives and credit default swaps , which allow lenders lay off some of the risk to other buyers .", "That door , of course , could slam shut if a deal goes sour , predict bankers and investors .", "But so far , there is no sign the party is ending : Private equity funds raised about $ 400 billion in 2006 , and as of December had about $ 700 billion available to do deals .", "Morgan Stanley predicts that 2007 will bring even more large leveraged buyouts , including some huge European deals in the $ 25 billion to $ 35 billion range .", "If there is a slowdown on the horizon , many bankers predict it will not happen until at least the second half of the year .", "`` You 've seen a lot of activity in the fourth quarter , and that is a good harbinger for the first half of 2007 , `` Mr. Kirshenbaum of Citigroup said .", "There will continue to be a lot of activity in buyouts , emerging markets , and in Japan and the Middle East , he added .", "New market participants are adding to the trend , helping to smooth the way for hostile deal makers .", "`` Hedge funds have become the kingmakers , '' deciding at what price they will sell out , Mr. MacDonald of Morgan Stanley said .", "Chief executives can no longer rebuff takeover offers easily , because once one is made , their stock is often snapped up by hedge funds looking for a quick buck .", "This impact has been keenly felt in Europe , where such investors helped Mittal Steel take over a reluctant Arcelor , and may be pushing the London Stock Exchange into the hands of Nasdaq .", "That may not be the case in 2007 , though .", "`` The hedge funds have a lot of money , but it is yet unclear that they are getting great returns out of this , '' Mr. Baronoff of Merrill Lynch said .", "`` If they find other higher return activities , maybe they will focus a little less on activism , '' he said .", "Yet as Michael Tory , head of British investment banking at Lehman Brothers , said : `` As long as the liquidity is there and the sources of liquidity remain healthy , there is no reason this ca n't continue . ``", "STREET SCENE Correction : January 11 , 2007 , Thursday A chart on the Street Scene page of Business Day last Thursday that listed the leading merger and acquisition advisers misstated the value of the deals in 2006 .", "For Goldman , Sachs , the sector leader , it was $ 1.088 trillion , not $ 108.8 trillion .", "For the industry as a whole , the value of the deals was $ 3.805 trillion , not $ 380.5 trillion .", "A corrected chart can be found at nytimes.com/ business ."], "summary": ["Thomson Financial reports that 2006 mergers surpassed all records with $ 3.79 trillion worth of deals , up 38 percent from 2005 .", "Deals were up 11 percent from heights of 2000 mergers and acquisitions bubble that left much bad debt and many broken companies .", "Analysts are uncertain what future holds for fast-growing markets , but insist current boom is more balanced .", "Photo .", "Graphs ."], "publication": "nyt50", "label": [2, 3, 4, 15], "tag": ["Business"]} -{"id": "1816520", "text": ["Democrats realized their political and legislative dream Thursday .", "Now they must face reality .", "As they take control of the House and Senate , members of the new majority must reconcile diverse ideological factions within their ranks and make a fundamental choice .", "They can spend their energy trying to reverse what they see as the flaws of the Bush administration and a dozen years in which conservative philosophy dominated Congress .", "Or they can accept the rightward tilt of that period and grudgingly concede that big tax cuts , deregulation , restrictions on abortion and other Republican-inspired changes are now a permanent part of the legislative framework .", "The competing drives were on display amid the constitutional hoopla Thursday and the emotion surrounding Representative Nancy Pelosi 's election as speaker , a position filled until now by the likes of Sam Rayburn , Joseph Cannon and Nicholas Longworth -- men whose names adorn nearby House office buildings .", "`` We have broken the marble ceiling , '' Mrs. Pelosi said after she was handed the gavel .", "In a meeting with the Congressional Black Caucus earlier in the day , Mrs. Pelosi made her own allusions to the competing tugs on Democrats , noting the party was rooted in its traditions but not hostage to the past .", "She promised a new direction `` for all the people , not just the privileged few , '' a reflection of the leadership 's political and policy calculation that Democrats need to champion the average guy .", "`` The agenda we have is about restoring economic security to a very vulnerable middle class , '' said Representative Rahm Emanuel of Illinois , chairman of the Democratic Caucus .", "`` The real activity will be in those areas . ''", "Yet many Democrats contend that President Bush and the Republican-led Congress that was his partner moved the dial too far to the right in many cases .", "And they believe it will be the work of Democrats to make a significant course correction .", "`` I think there are a lot of things the people of America want changed , '' said Senator Patrick J . Leahy , Democrat of Vermont , the new chairman of the Judiciary Committee and a tough critic of some Bush policies .", "Mr. Leahy and others made clear that the new direction had to begin with American policy in Iraq .", "But their domestic legislative agenda suggests that they are picking selected fights rather than going for wholesale change .", "On the economy , they will move swiftly to increase the minimum wage .", "On social policy , they will challenge Mr. Bush by calling for expanded stem cell research .", "They will try to pass legislation increasing college aid for the middle class .", "All of those issues have the twin advantages of broad popular appeal tied to measurable economic impact on individuals .", "But Democrats are in no rush to engage in a fight with Mr. Bush over the ideological centerpiece of his domestic policy , his tax cuts .", "And they have showed no inclination to wade back into the abortion issue , despite its potency among many of their supporters .", "`` We have to keep our eye on the average American family and sort of push aside the interest groups left , right and center , '' said Senator Charles E . Schumer , Democrat of New York .", "`` The world has changed , and it demands new solutions , not the old Democrat and Republican nostrums . ''", "But there is no dispute that Mr. Bush 's legislative and executive record will get a microscopic examination via a renewed emphasis on oversight , a Congressional function Democrats say was all but abandoned in recent years .", "And the results of those inquiries could determine what policies Democrats try to unravel if they uncover a strong case against them .", "`` The Bush administration has passed an entire architecture of laws that are going to be reviewed , '' said Representative Dennis J . Kucinich of Ohio , one of the most liberal members of the House .", "Republicans are waiting to see what develops , uncertain if Democrats sincerely want to join hands and produce some consensus on public policy .", "Or , as one senior Republican asked , will Democrats hostile to the Bush administration be more like the scorpion in the fable with the frog , unable to resist the urge to sting even if they hurt themselves .", "Democrats acknowledge that with their minuscule majority in the Senate and one in the House that is not much larger , they lack the political muscle to go too far in reversing Bush policy even if that was their chief goal .", "And they already have their hands full with delivering on their own ambitious legislative agenda , following through on their pledges of bipartisanship and ethics overhaul and avoiding anything that costs the party its chance at the White House in 2008 .", "Leading Democrats say their best direction is forward , concentrating on establishing a new party legacy rather than obsessing with the perceived failings of Republican rule .", "The test for the party 's newly empowered leadership and the Congressional membership will be whether they can stick to that path .", "THE 110TH CONGRESS : NEWS ANALYSIS ."], "summary": ["News analysis : Democrats taking control of Congress must reconcile diverse ideological factions and make fundamental choice .", "They can focus new power on undoing Republican policies , or concede that GOP changes are permanent part of legislative framework , and move to set new policies .", "Leading Democrats say best direction is forward , concentrating on establishing new party legacy ."], "publication": "nyt50", "label": [31, 2, 4], "tag": ["U.S."]} -{"id": "1816523", "text": ["Prime Minister Ehud Olmert met President Hosni Mubarak of Egypt on Thursday evening in an effort to give momentum to the Israeli-Palestinian peace negotiations .", "But the meeting was overshadowed by an Israeli raid in the West Bank in which four Palestinians were killed and 20 wounded .", "Mr. Mubarak was clearly embarrassed by the timing of the raid , hours before the meeting of the two leaders in an Egyptian Red Sea resort town , Sharm el Sheik .", "He called it a hindrance to peace efforts and told Mr. Olmert that Egypt `` rejects and is indignant at the military operation . ''", "`` Israel 's security can not be achieved through military force but by serious endeavors toward peace , `` Mr. Mubarak added at his news conference with Mr. Olmert .", "Mr. Olmert said that he was sorry that innocent Palestinians were hurt , but that Israel would defend itself and was acting to arrest `` terrorists who had killed Israelis . ''", "He gave no explanation for the timing of the daylight raid , a vain attempt to arrest a wanted militant , which used unusual force in normally quiet Ramallah .", "He said Israeli troops returned fire , but did not initiate it .", "`` Things developed in a way that could not have been predicted in advance , '' he said .", "`` If innocent people were hurt , this was not our intention . ''", "The Palestinian Authority president , Mahmoud Abbas , with whom Mr. Olmert met last week , condemned the raid , saying in a statement that it `` proved that the Israeli calls for peace and security are fake . ''", "Palestinians have been calling for an extension of a Gaza truce with Israel to the West Bank .", "`` The continued aggression will only lead to the destruction of all efforts aimed at realizing peace , '' Mr. Abbas said .", "Mr. Olmert and Mr. Mubarak said Egypt was continuing its efforts to secure the release of an Israeli corporal captured in June by militants , offering in exchange several hundred Palestinian prisoners held by Israel .", "There had been speculation that the two men might be able to announce more concrete progress on the matter , which is blocking more substantive discussions with Mr. Abbas .", "Egyptian officials said that they were discussing another Olmert-Abbas meeting with Egyptian and Jordanian leaders present .", "Secretary of State Condoleezza Rice is to visit the region this month .", "It was an unusual and emotional day for Mr. Olmert , during which he commemorated two legendary Israeli political figures , both of whom he succeeded in their jobs .", "Earlier , in Jerusalem , he spoke at the state funeral for the city 's fabled former mayor , Teddy Kollek , who died Tuesday at 95 and was buried in the area of the Mount Herzl cemetery reserved for Israel 's leaders .", "Mr. Olmert defeated Mr. Kollek to become Jerusalem 's mayor in a bitter political fight in 1993 .", "He said Israel 's first leader , David Ben-Gurion , had declared Jerusalem to be Israel 's capital in 1949 , but `` Teddy Kollek made it so . ''", "Thursday also marked a year since Prime Minister Ariel Sharon suffered a second extensive stroke , which left him in a deep coma from which he is not expected to awaken .", "Mr. Olmert , Mr. Sharon 's deputy , was chosen to lead the Kadima Party in his stead and became prime minister , and he acknowledged Mr. Sharon 's tragedy at his news conference in Egypt .", "In general , Mr. Olmert has been regarded as inferior to Mr. Sharon and Mr. Kollek .", "In the latest opinion poll , conducted by the Dahaf Institute and reported Thursday in the newspaper Yediot Aharanot , nearly 70 percent of Israelis polled said they disapproved of Mr. Olmert 's performance as prime minister .", "Israelis are unhappy with the way he managed the summer 's war against Hezbollah in southern Lebanon and do not much like his personality , contrasting him with the more stolid and experienced Mr. Sharon .", "Still , Mr. Olmert has proved himself a skilled politician , and early on Thursday Parliament quietly passed his $ 68 billion budget , a crucial test of his government 's ability to survive .", "The budget battle was always a great drama under Mr. Sharon , but Mr. Olmert , who has expanded his governing coalition to include parties ranging from Labor to the right-wing Israel Beiteinu Party of Avigdor Lieberman , had little difficulty .", "Mr. Olmert 's office is being rocked , though , by a new corruption scandal .", "He has not been touched by this scandal , though he has been under investigation for smaller cases of improper use of influence .", "But his longtime office director , Shula Zaken , has been put under house arrest and ordered not to contact Mr. Olmert as the police investigate whether she and the director of the Tax Authority , Jackie Matza , tried to help Ms. Zaken 's brother and two other businesspeople through the appointment of cronies to key jobs in return for tax breaks .", "About 30 people are under some form of detention in the spreading scandal .", "The operation in Ramallah by Israeli forces , with armored vehicles , bulldozers and helicopter support , was carried out hours before Mr. Olmert met with Mr. Mubarak .", "Palestinians and news reports said the operation was to arrest Rabiah Hamad of Fatah , a senior member of Al Aksa Martyrs Brigades .", "The army would not confirm that Mr. Hamad was the target , but said the man it wanted was armed and was wounded , but escaped .", "The army arrested four other wanted Palestinians , a spokesman said .", "It was a rare incursion into the heart of Ramallah , and it prompted an angry response .", "Youths on rooftops threw stones , metal trash barrels , a refrigerator and blocks of concrete at the Israeli Army vehicles .", "There were mortars fired at the Israeli soldiers , and an exchange of gunfire .", "Palestinian medics said four men -- Yussef Abdel Khader , 23 .", "Khalil al-Badawi , 20 .", "Jamal Jawela , 29 .", "And Ala al-Himran , in his 20s -- died at a Ramallah hospital from gunshot wounds .", "In Gaza there was continuing violence , with three more men killed in fighting between Fatah and Hamas .", "The fighting centered around the Jabaliya refugee camp in northern Gaza after gunmen surrounded the houses of known political or security figures .", "In Beit Lahiya , a senior Fatah commander , Col . Muhammad Gharib , was killed and his wife severely wounded when Hamas militants attacked his home with rifle fire and rocket-propelled grenades .", "Two of his bodyguards were also killed .", "About 25 others were wounded in Jabaliya , and local radio was filled with appeals to the gunmen to restore calm .", "On Wednesday five Palestinians , including a woman who was a passer-by , were killed and 12 wounded in Fatah-Hamas gun battles , which broke a weeklong lull .", "Other militant factions arranged a truce overnight and kidnapped members of Hamas and Fatah were released by the other sides , but the truce broke down on Thursday .", "The Palestinian prime minister , Ismail Haniya , returned to Gaza on Thursday from hajj , his pilgrimage to Mecca , Saudi Arabia .", "His entourage underwent a special inspection in El Arish , Egypt , to ensure that he was not carrying large amounts of cash back to Gaza , which has been deprived of most Western aid since Hamas won parliamentary elections last January .", "-LSB- Early Friday , Mr. Haniya said he and Mr. Abbas had agreed at emergency talks to keep gunmen from their parties off Gaza 's streets . -RSB-", "For the fourth day there was no word about a kidnapped Peruvian photographer for Agence France-Presse , Jaime R\u00e1zuri , 50 , who is said by colleagues to be lacking a required heart medication .", "Palestinian security services have warned foreign journalists to keep out of Gaza for now , because of the kidnappings and chaos .", "A leader of the Palestinian People 's Party , Bassam al-Salhi , said the security chaos could be resolved only with political agreement .", "`` The weakness of the Palestinian political system and the situation of the current Hamas-led government have contributed to this state of lawlessness , '' he said ."], "summary": ["Israeli Prime Min Ehud Olmert 's meeting with Pres Hosni Mubarak of Egypt in effort to advance Israeli-Palestinian peace negotiationsis is overshadowed by Israeli raid in West Bank that kills four Palestinians and wounds 20 others .", "Mubarak , clearly embarrassed by timing of raid , tells Olmert that Egypt ` is indignant ' at military operation .", "Olmert voices regret that innocent Palestinians were hurt , but says Israel was acting to arrest ` terrorists who had killed Israelis ' .", "Palestinian Authority Pres Mahmoud Abbas also condemns raid .", "Olmert and Mubarak say Egypt is continuing its efforts to secure release of Israeli corporal captured in June by militants .", "Earlier in day , Olmert speaks at state funeral for Jerusalem 's fabled former mayor Teddy Kollek , who died at age of 95 .", "Photo ."], "publication": "nyt50", "label": [13, 5, 18, 1, 0, 3], "tag": ["World"]} -{"id": "1816545", "text": ["The foot was balanced on a shopping bag after being scooped up off the dirty street by a man in a track suit .", "There was no person to go with the limb .", "Nearby a charred body was still smoldering , smoke coming off the black corpse 45 minutes after the attack .", "For 50 yards , the dead were scattered about , some in pieces , some whole but badly burned .", "This violence on Thursday involved two bombs timed to go off one after another in the formerly upscale neighborhood of Mansour , which continues to be ripped apart by sectarian violence .", "Thirteen people were killed and 22 wounded , just a small fraction of the civilians killed across the country this week .", "The first device went off at 10:15 a.m. , probably a roadside bomb set on a timer , officials said .", "The attack was apparently aimed at a gasoline station .", "Cars were lined up around the block waiting for fuel , and dozens of people , grasping large plastic jugs , hoped to buy heating fuel .", "Just moments after the first explosion , a second , larger , car bomb detonated .", "The neighborhood has traditionally been a mixed Sunni and Shiite one .", "Although the Abu Jaffar gas station , where the attack was centered , is in what is considered a Sunni area , the method of the attack -- multiple bombs timed to explode in succession -- is usually thought of as a trademark of Al Qaeda in Mesopotamia , a Sunni insurgent group .", "An hour after the explosion , there was still a strong stench of burning gasoline and fire .", "The road was slick with sludge from the water used to douse the fire .", "Blood pooled in areas .", "Scores of armed men were running about , including members of the Iraqi Army and the police .", "Some of those with machine guns had no uniforms at all .", "Shots rang out , mostly in warning .", "Neighbors gathered outside , oddly calm and seemingly accustomed to such carnage .", "A tanker truck filled with fuel was parked near the station , having escaped the blast .", "Not surprisingly , residents living near the area blamed everyone from the government to the Americans to terrorists for what had happened .", "`` We are just innocent people , '' said Nafia Abdul Jabbar .", "`` The people killed were poor , in need of kerosene that they can not afford to buy on the black market because the price is 10 times more than it is at the station . ''", "Elsewhere , a mortar attack was directed at the Shiite neighborhood of Huriya , wounding three people , officials said .", "Clashes on the outskirts of the Sunni neighborhood of Ghazaliya left two people dead and 25 people wounded , Iraqi officials said .", "A grenade attack in the Amin neighborhood killed five people .", "Across the city on Thursday , officials said , 47 bodies were found mutilated -- 4 of them with their heads cut off .", "An interview with the family of a man recently mutilated and killed , a prominent sheik considered to be the prince of the Tamim tribes , gives a glimpse into the complicated underworld that is , in part , responsible for the trucks full of bodies collected around this city every day .", "The man , Sheik Hamid Mohammed al-Suhail , 75 , was found Wednesday in the Shuala neighborhood of Baghdad , a Shiite redoubt , by members of his tribe , which is mixed Shiite and Sunni , who were searching for him .", "He disappeared last Sunday , and his mutilated body was found wrapped in a blanket , covered in blood .", "The search party recognized his body by the distinctive way the beard was trimmed .", "He had been an outspoken critic of the sectarian fighting and participated in a recent conference in Cairo on national reconciliation .", "The kidnappers , whom his relatives hinted they knew but would describe only as `` militiamen '' for fear of reprisal , initially called his family asking for $ 100,000 , said a nephew , Sheik Ali Sammi al-Suhail .", "The family told the kidnappers they did not have the money , the nephew said .", "`` The body was mutilated in a brutal way , '' he said .", "`` They used a drill on him and perhaps other tools . ''", "One hand and one leg were almost completely severed .", "The nephew said he had been told by people who said they witnessed the killing that after his uncle was tortured , his body was thrown from a two-story building .", "He survived the fall but was brutalized further before finally being killed .", "Another prominent Iraqi figure , Sheik Akram al-Zubeidi , was killed Thursday in Karbala , a Shiite holy city where there has been little sectarian strife .", "Sheik Zubeidi was assassinated when he was stopped at a fake checkpoint , a local hospital official said .", "Three other people in the car with him were also killed by the gunmen , whose motive was unclear .", "There was continued fallout Thursday from the execution of Saddam Hussein , as Sunnis , from Kashmir to Libya , used his death as a rallying point .", "The Libyan government announced that it would erect a statue of him to stand next to one of Libya 's own national heroes , news agencies reported .", "At least nine people were hurt in the Indian-controlled part of Kashmir when the police fired rubber-coated bullets to break up a large group of people protesting the execution , Reuters reported .", "Two Iraqi officials involved in the investigation of the distribution of a graphic video of the hanging said Thursday that a second guard was being held for questioning .", "Officials announced the arrest of the first guard on Wednesday .", "There is increasing pressure , including from the White House , on the Iraqi government to proceed with caution in carrying out the execution of Mr. Hussein 's two co-defendants , Barzan Ibrahim al-Tikriti , Mr. Hussein 's half brother , and Awad al-Bandar , a former judge .", "Despite the international reaction directed at the government of Prime Minister Nuri Kamal al-Maliki , Mr. Maliki 's popularity among Shiites in southern Iraq seems to have increased .", "In Basra , Iraq 's second largest city , hundreds of demonstrators representing Islamist parties rallied in the streets , praising Mr. Maliki and setting photos of Mr. Hussein on fire .", "THE STRUGGLE FOR IRAQ ."], "summary": ["Two bombs go off in formerly upscale Baghdad neighborhood of Mansour , killing 13 people and wounding 22 others .", "Method of attack -- multiple bombs timed to explode in succession -- is seen as trademark of Al Qaeda in Mesopotamia , Sunni insurgent group .", "Elsewhere , mortar attack is directed at Shiite neighborhood , wounding three people .", "In Baghdad , trucks full of bodies are being collected every day .", "Photo ."], "publication": "nyt50", "label": [11, 23], "tag": ["World", "Washington"]} -{"id": "1816553", "text": ["Responding to the shooting death of an unarmed Queens man by police officers nearly seven weeks ago , New York City 's Police Department has commissioned a six-month independent review of its firearms training , of instances in which officers have fired their guns and of the phenomenon of so-called contagious shooting , the department 's top official said yesterday .", "The study , which police officials said was the first of its kind commissioned by the department , will be done by the RAND Corporation , a private nonprofit organization that has reviewed police practices in other major cities , Police Commissioner Raymond W . Kelly said .", "The announcement came amid continuing bitterness since the death of Sean Bell , who was killed in a hail of 50 police bullets in Jamaica , Queens , hours before his wedding on Nov . 25 .", "`` Questions have arisen as to the quality and effectiveness of our training '' Mr. Kelly said at a news conference .", "`` We thought it would be appropriate to bring in a recognized world-renowned nongovernment organization to take a look at all of our firearms training . ''", "Mr. Kelly said a team from RAND would assess five aspects of firearm use by police officers .", "They include initial , continuing and tactical firearms training , investigations of police-involved shootings , and situations in which shots fired by one officer spur other officers to shoot , a phenomenon that may have played a role in Mr. Bell 's death .", "The organization will delve into the details of shootings in which the police are involved , examining an officer 's experience , the nature of the threat , the environment in which they fired their guns and other issues .", "Comparisons will be drawn to firearms training in other law enforcement agencies , Mr. Kelly said .", "RAND will not be looking into the death of Mr. Bell , which is under review by a grand jury in Queens , Mr. Kelly said .", "K . Jack Riley , the acting director of the RAND Center on Quality Policing , said the organization had previously evaluated training in the Los Angeles Police Department , investigated racial profiling among Oakland police officers , and examined ways to improve recruitment and retention among the police in New Orleans after Hurricane Katrina .", "`` This is a very proactive step the N.Y.P.D. is taking , '' Mr. Riley said .", "`` I think there is an honest interest in having a dispassionate third party take an objective look at their training , and see if there 's anything they 're not getting right -- or can improve -- with regards to firearms training . ``", "News of the New York study , which will cost about half a million dollars and be paid for by the New York City Police Foundation , a charity that supports the Police Department , drew mixed responses from experts in firearms and police tactics .", "Thomas A . Reppetto , a former president of the Citizens Crime Commission , said that bringing in an outside organization like RAND would likely inspire more public confidence , which has wavered of late , than an in-house Police Department review would .", "But Dean Speir , who writes about firearms and tactics , asked why the Police Department had not undertaken such a review before , citing the death of Amadou Diallo , the unarmed West African immigrant whom officers shot at 41 times .", "Another expert suggested that the money would be better spent on hands -on tactical training .", "`` If you talk to cops , they 'll tell you people on the front line are starved for operational training , `` said Eugene O'Donnell , a professor of police studies at John Jay College of Criminal Justice .", "But Paul J . Browne , a spokesman for the department , said that one purpose of the review was to determine whether the current training needed to be revamped .", "He also noted that Mr. Kelly was not the police commissioner during the Diallo shooting .", "`` What the commissioner wants is an independent assessment , '' Mr. Browne said .", "`` If the training needs to be improved , he wants an independent assessment of what improvements are needed . '' ."], "summary": ["New York City Police Dept commissions Rand Corp to do $ 500,000 independent review of its firearms training .", "Will study instances when officers fire their guns , focusing on ` contagious shootings , ' or why one officer 's shot spurs others to shoot .", "Police Comr Raymond W Kelly hopes to improve quality and effectiveness of training .", "Photo ."], "publication": "nyt50", "label": [0, 3], "tag": ["New York and Region"]} -{"id": "1816555", "text": ["For decades , activist shareholders were an entertaining , but largely ignored , Wall Street sideshow .", "Disgruntled investors would attend annual meetings to harangue executives , criticize strategies -- and protest that their complaints were being ignored .", "One agitator appeared in face paint and a red nose after executives called him a clown .", "Today , however , it seems that activists have captured the center ring and are directing the main event .", "On Wednesday , shareholder advocates could claim one of their biggest prizes yet when Home Depot announced the resignation of its chairman and chief executive , Robert L . Nardelli , long a target of shareholder ire for his large compensation and the company 's flagging stock price .", "The main investor who pressed for the overthrow at Home Depot might at first glance seem an unlikely rebel : Ralph V . Whitworth , a lawyer educated at Georgetown and a former campaign worker for President Ronald Reagan who in December announced he had bought about $ 1 billion of the retailer 's stock , or a 1.2 percent stake , through his fund , Relational Investors .", "But the rapid success of Mr. Whitworth 's campaign against the management and strategy of Home Depot demonstrates how thoroughly activists have moved into Wall Street 's inner sanctum .", "Mr. Whitworth has said he still intended to nominate himself and at least one other candidate to Home Depot 's board at its shareholder meeting in the spring .", "`` There 's a lot more respect for investors like me now , `` said Mr. Whitworth , 51 .", "`` I still have to make threats , but now everyone wants to deal with us fast .", "They realize we 've got real power and we 're here to stay .", "`` The shake-up at Home Depot may be just a taste of things to come as shareholders and management at a number of companies , including Brink 's , the Borders Group and Applebee 's International , square off for battle at annual meetings this spring .", "As those fights begin , expect few clown noses .", "`` Activist shareholders have a power and audience beyond what they 've ever enjoyed , `` said Howard Steinberg , a lawyer who advises corporate boards and deal makers .", "`` They 're developing a credible track record , and as a result , more and more managers are forced to engage with them .", "Activists ' time has come . ``", "Since July , activists have pressed successfully to push out chief executives at Pfizer and Sovereign Bank .", "Institutional investors and mutual funds have set aside hundreds of millions of dollars to invest in underperforming companies with the intention of demanding new board seats or alternative strategies .", "Much of that newfound influence is owed to recent legal changes and heightened attention to issues like executive compensation .", "But it also draws on the fact that many activists have now amassed the wealth , knowledge and networks critical for success .", "`` For years these guys were seen as politically motivated oddballs or annoying attention seekers , '' said Nell Minow , editor of the Corporate Library , a research group that rates corporate governance practices .", "`` Now some of those same people control hundreds of millions of dollars and have been around longer than many C.E.O. ' s '' Before the 1980s , much shareholder activism was directed by ideological agitators , including unions , religious organizations and populist figures like Ralph Nader , who bought shares in companies as platforms for urging social , environmental or political changes .", "The United Shareholders Associations , which Mr. Whitworth helped found in 1986 with the corporate raider Boone Pickens , was one of the first major efforts to organize shareholder activists around profit-minded goals .", "Every spring the group would select about 50 companies and begin asking embarrassing questions at shareholder meetings .", "In 1993 , the last year of the group 's operation , 25 of 43 of the companies , including I.B.M , reached agreements with Mr. Whitworth .", "`` The head of General Mills once called me a socialist , '' Mr. Whitworth said in an interview .", "`` I told him I was the ultimate capitalist .", "Business people would return my calls for the first time , and it was giving me an entree into a world I otherwise could n't access . ``", "Foremost among the goals successfully sought by United Shareholders were regulatory changes that made it possible for dissidents to nominate only one or two directors to a board , rather than an entire slate .", "By the mid-1990s , corporate raiders realized the rule offered a less expensive way to stage a takeover rather than buying a company outright .", "When the California Public Employees ' Retirement System began looking for a fund that would use activism to increase returns in 1996 , Mr. Whitworth and a colleague , David H . Batchelder , founded Relational Investors .", "The fund today oversees about $ 7 billion , all invested in only nine companies that are chosen with an eye toward activist intervention .", "In most cases , Mr. Whitworth forces his way onto a company 's board , either by threatening or staging a proxy fight .", "Investors say the fund has averaged a return of about 25 percent annually over the last nine years .", "Activist shareholders like Relational were further aided by changes in the law .", "In particular , they say that regulations passed in 2004 requiring money managers and mutual funds to disclose how they vote in proxy elections have forced once-passive managers to become activists .", "`` There was a sense before that mutual funds in particular just voted with management , '' said Ann Yerger , executive director of the Council of Institutional Investors .", "`` But post-Enron and Tyco , investors expect money managers to justify their votes now , and to be listening to anyone warning about dangers . ''", "Moreover , money managers and hedge funds are beginning to advertise their activist intentions , bragging that aggressiveness gives them an edge .", "In November , the noted investor Robert A . Olstein announced formation of an activist fund .", "`` We want to have an edge , '' said Eric R . Heyman , co-manager of the new Olstein Strategic Opportunities Fund .", "`` Our skill is approaching companies and persuading management to adopt certain decisions , sometimes aggressively , and that 's why people invest with us . ``", "Another important shift is a court ruling last year making it easier for shareholders to challenge directors nominated by companies .", "`` Shareholders ' toolboxes are getting more and more robust every year , `` said Ms. Minow of the Corporate Library .", "Moreover , as the number of activist investors grows , a vast ad hoc network has formed that makes it more difficult for companies to win a fight .", "The universe of activist investors has expanded to include hedge funds like Pirate Capital , which is fighting to add its executives to the board of the security company Brink 's , and investors like Carl C . Icahn , who failed to gain board seats after a public spat with Time Warner 's chief executive , Richard D . Parsons .", "When two activist investors , Pershing Square Capital Management and Nelson Peltz , bought large amounts of stock in Wendy 's International in 2005 , many other like-minded buyers jumped in , according to analysts .", "As a result , the board had no choice but to accept activists ' suggestions , said James V . Pickett , Wendy 's chairman .", "Ultimately , three representatives of Mr. Peltz 's fund joined the board .", "`` Most of the issues the activists raised at Wendy 's were things we were already dealing with , `` Mr. Pickett said .", "`` They just increased the intensity .", "But when they own that much of the company , you have to listen to them even if you do n't want to . ``", "For Mr. Whitworth , who has grown so wealthy that he once gave a charity $ 1 million in exchange for PaulMcCartney 's playing at his wife 's 50th birthday party , that kind of access has increased his effectiveness , he says .", "After forcing his way onto the board of Mattel , Mr. Whitworth was given a gold-plated Barbie in appreciation by other directors when he left .", "But such shifts have also increased his ambitions .", "`` I was a firebrand .", "Now I 've mellowed a lot , `` he said .", "`` But I 'm still young .", "I look forward to helping a lot more companies become more efficient , whether they like it or not . `` ."], "summary": ["Rapid success of activist shareholder Ralph V Whitworth to get Home Depot 's former chief executive Robert L Nardelli to resign demonstrates how thoroughly activists have moved into Wall Street 's inner sanctum .", "Whitworth has said he still intends to nominate himself and at least one other candidate to Home Depot 's board at its shareholder meeting in spring .", "Shake-up at Home Depot may be just beginning as shareholders and management at many companies battle at annual meeting this spring .", "Photo ."], "publication": "nyt50", "label": [7, 6, 11], "tag": ["Business"]} -{"id": "1816557", "text": ["For a half-century , the blue oval logo atop Ford Motor 's headquarters has served as a kind of beacon for the American automobile industry , a signpost for local motorists on area highways and a guidepost for pilots landing at Detroit 's airport nearby .", "But lately , blue is the only word for the malaise surrounding Ford , which is rapidly seeing its position in the industry and in its hometown eroded by its failure to combat foreign competition .", "This year , Ford expects to permanently lose its grip on second place in the American market to Toyota , which passed it twice in monthly sales in the last six months , including December .", "As many as half its blue - and white-collar employees have decided to take voluntary buyouts from a company where jobs were long seen as lifetime guarantees .", "Ron Cimino , 44 , used to regret not landing a job at Ford , where his father worked for 52 years .", "Turned down in 1984 because he lacked a master 's degree , Mr. Cimino of Dearborn wound up starting his own import-export business .", "`` I 'm glad I did n't end up working at Ford , `` he said .", "`` I never thought I 'd say that . ``", "To be sure , General Motors and Chrysler have their own problems that are weighing on this city .", "Their headquarters are here , too , as are some of their car plants , and thousands of their employees have their own worries as each company struggles financially .", "But Ford 's troubles , arguably , are felt more acutely here .", "After all , G.M. placed a good bulk of its manufacturing 60 miles north , in Flint , Mich . , and once had factories from Massachusetts to California as well as the Midwest .", "Chrysler , though still a big local employer , has had German owners for nearly a decade .", "Ford , by contrast , is woven more into the fabric of Detroit , with the Ford name on office buildings , museums , high schools and highways , as well as the football stadium , Ford Field , which opened downtown in 2002 .", "Now , as the annual Detroit auto show prepares to open to the media this weekend , much of this city 's bedrock is unexpectedly at risk .", "In November , Ford disclosed it had pledged nearly all its assets , including the trademark on its 100-year-old logo , as collateral against $ 25 billion in loans needed to fund its restructuring .", "Ford has generated a particular sense of sympathy , bewilderment and fear here that its crosstown rivals have not .", "There is sympathy , analysts say , because more of a human element is involved , namely the Ford family .", "Puzzlement because Ford seemed to be the one Detroit auto company that had an answer for Japanese competition in the 1980s and 1990s with its profitable sport utility vehicles .", "And fear that if a once-powerful company like Ford could falter , no one in Detroit may be safe .", "The sentiment is readily voiced in places like Miller 's Bar , where the red leather barstools and colored Christmas lights evoke the watering hole in `` It 's A Wonderful Life . ``", "It has been a Dearborn gathering spot since 1941 .", "`` Dearborn , '' said the owner , Mark Miller , `` is Ford . ''", "Judy Dolan , a secretary for the Detroit Building Trades Council , said during an interview at her home , `` I feel horrible about the situation they 're in . ``", "She drives only Ford vehicles in support of her hometown company .", "Concern over Ford 's future bubbled up late last month in conversation at the holiday cocktail parties held by the Detroit and foreign car companies and their parts suppliers .", "Given the attention to Ford 's situation , the company 's new chief executive , Alan R . Mulally , who arrived in September from the Boeing Company , is already recognized wherever he goes .", "Mr. Mulally said in an interview that he was recently lost in a local supermarket when another shopper came up to him .", "`` Oh , Mr. Mulally , we 're so glad you 're here , `` he recalled her saying , before she helped him find shampoo .", "His predecessor was William Clay Ford Jr . , who became the face of the company in television ads over the last four years as he struggled to fix Ford 's problems -LRB- he remains chairman -RRB- .", "The family is the reason many company employees past and present refer to the automaker as Ford 's .", "It is not a grammatical error , as visitors here might assume , but a nod to the fact that it is a company built by the original Henry and all the Fords that followed him .", "`` The Fords have a relationship with the American people , '' said James P . Womack , the author and expert in manufacturing efficiency who advised Mr. Mulally at Boeing .", "They include Kevin Boyle 's retired father , also named Kevin , who immigrated to Dearborn from Ireland in the 1950s , drawn by the chance to work for a company founded by the most famous man in the auto business .", "`` People came to Detroit with an intense connection with that name , and then their lives became tied up with that name , '' said Mr. Boyle , a professor of history at Ohio State who has written extensively about the automobile industry .", "`` It was a badge of honor to work for Ford . ''", "Numerous families here boast multiple generations of Ford employees , some who worked on the assembly line , others -- fathers and sons , mothers and daughters -- who toiled in company offices .", "`` I started there when I was 19 , and my whole family worked there , '' said Karen Kenniburg , who manages the vehicles driven by Ford executives .", "The Fords , for their part , have given back , not just in jobs but also in charities .", "Guests at a Nissan holiday party last month at the New Detroit Science Center drank wine and munched crab cakes beneath a wall of donors dotted with names of various Fords .", "The same was true for Honda , whose event took place in an atrium outside Orchestra Hall .", "That seeming omnipresence , however , makes the departures from Ford more poignant .", "Last month , more than 30,000 blue-collar workers accepted deals to give up their jobs .", "About half were ready to retire , but the rest were workers who did not have enough seniority to begin drawing pensions .", "They accepted payments of up to $ 140,000 apiece to leave .", "Ford 's white-collar employees are not exempt .", "The company has already cut 4,000 managers , and 10,000 more are set to go by spring .", "Last month , one of every three salaried employees was called in by their boss to be told Ford was giving them the opportunity for a buyout .", "If they did not accept , their jobs might not be safe , some were told .", "Altogether , more than 45,000 people , or half the number employed by the company at the beginning of the year , will not be working at Ford in 2007 .", "That is likely to hurt business at Miller 's Bar , where 60 percent of the patrons work for Ford , and 18 of 25 cars in the parking lot on a recent afternoon bore Ford logos .", "Others will be affected , as well .", "At a Starbucks on Michigan Avenue , a mile away from Ford 's headquarters , and once a hangout for Mr. Ford , one customer , Shelley Boda , 32 , said she was unable to land a job at the company , where her father , grandfather and numerous relatives worked .", "Instead , she chose the health care field , now less of a safe haven than she had hoped .", "With Ford cutting so many jobs , and the rich benefits that went with them , `` there wo n't be anyone here who can afford health care , `` she said .", "Professor Boyle said Ford employees and the community were now realizing that what they thought was a `` safe bet '' was not .", "`` The funny thing is , people did n't see it as a bet , `` he said .", "`` You would have parents who worked the line and have enough to send kids to college .", "They would go off and get engineering degrees , marketing degrees and work for the companies .", "`` That has crumbled .", "There are few things more terrifying than that . ``", "And yet , many still believe in the power of the Fords themselves to save their company .", "Ms. Kenniburg said they gave Ford an advantage over G.M. and Chrysler as they devise a comeback plan .", "`` The Ford family , '' she said , `` they 're going to do everything in their power to keep this company going . `` ."], "summary": ["Much of bedrock of Detroit , Mich , is unexpectedly at risk as annual Detroit auto show prepares to open to media .", "Ford in November disclosed it had pledged nearly all its assets , including trademark logo , as collateral against $ 25 billion in loans needed to fund its restructuring .", "Ford has generated particular sense of sympathy , bewilderment and fear in Detroit that its crosstown rivals have not .", "Analysts say there is sympathy because of human element is involved , namely Ford family .", "Puzzlement because Ford seemed to be one Detroit auto company that had answer to Japanese competition in 1980s and 1990s with its profitable sport utility .", "Fear that if once-powerful company like Ford could falter , no on in Detroit may be safe .", "Some residents comment .", "Photos ."], "publication": "nyt50", "label": [15, 18, 16, 19, 17, 14], "tag": ["Business"]} -{"id": "1816558", "text": ["Everyone who knows him says it : In a tough fight , Kenneth G . Langone is a guy you want in your corner .", "But when Robert L . Nardelli , the former chief executive of Home Depot , refused to bend to the will of an exasperated board and accept a pay cut , he lost the crucial support of a friend who as lead director not only recruited him but presided over his compensation contract that would later draw so much fire .", "For Mr. Langone , a voluble man whose passions include Italian meatballs , golf and the Roman Catholic Church , it was a surprising reversal .", "Mr. Langone prides himself on his loyalty to his friends and , as a director who has served on the compensation committees of General Electric , Yum Brands and the New York Stock Exchange , he has never been shy about paying his chief executives well .", "Now , people who have spoken with him in recent days say , he is angry with Mr. Nardelli over his unwillingness to have his pay reduced .", "In an era of escalating pay , with chief executives on Wall Street taking in $ 50 million a year and former General Electric executives like David Calhoun getting $ 100 million compensation deals from private equity investors , being paid well was a way for Mr. Nardelli , a scrappy former football player , to keep score .", "In light of Mr. Langone 's vocal and unyielding support for the $ 190 million pay package that was awarded to Richard A . Grasso , the former chief executive of the New York Stock Exchange , he might have been expected to fight to the bitter end to save Mr. Nardelli .", "But as friends who have served on boards with him say , beneath the bluster and occasional histrionics lies a more pragmatic man well aware of his fiduciary duties as a director -- especially at Home Depot , a company that he helped found and that has made him a billionaire .", "`` Ken does not back down , but he also has a strong sense of propriety , '' said Gary E . Earlbaum , a real estate executive who introduced Mr. Langone to Bernard Marcus , a founder of Home Depot , more than 30 years ago .", "`` His loyalty is to the enterprise . ''", "Mr. Langone declined to comment .", "Mr. Nardelli did not return messages left for him .", "At age 71 , when many corporate executives start slowing down a bit , Mr. Langone continues to manage a full schedule of corporate , political , legal and philanthropic activities .", "He is a top financial supporter of Rudolph W . Giuliani , who is weighing a possible run for the White House , and he is the chairman of New York University Medical Center .", "Mr. Langone also still devotes considerable time and energy to contesting the lawsuit brought against him and Mr. Grasso by Eliot Spitzer , the former attorney general for New York who is now the state 's governor .", "That 's all in addition to his day job , running Invemed Associates , the small investment bank he founded .", "But it has been his passionate and unstinting defense of Mr. Grasso 's pay , much of which was set during his time on the exchange 's compensation committee , that has come to define Mr. Langone 's public image .", "His detractors see him as a living symbol of the excesses of runaway executive compensation , a bullying old-school titan whose tendency to befriend his chief executives blinds any objective ability he might have had to keep a ceiling over their pay .", "Mr. Grasso 's huge compensation package , and , according to the lawsuit brought by Mr. Spitzer , his efforts to keep other directors in the dark about it , are perfect examples of this , they say .", "`` Ken Langone is the root of the problem .", "His philosophy is that you can never pay a C.E.O. too much money , `` said Richard Ferlauto , the director of pension investment policy at the A.F.L. - C.I.O.", "`` That was revealed in his defense of Grasso .", "Now we are focusing on the role of Langone `` at Home Depot , he said .", "`` We blame him and his cronies for the original contract . ''", "His defenders , on the other hand , point to his long experience as a director and his acute knowledge of a director 's responsibilities , honed from more than 30 years ' experience as an independent investor .", "Yes , he has a big heart and an inclination to overpay at times , they say , but his ethics are beyond dispute .", "`` I do n't care what kind of a battle I 'm in but I would want Ken Langone on my side , because he is on the side of honor and righteousness , `` said Frank Borman , a former director at Home Depot .", "Mr. Langone 's pursuit of Mr. Nardelli , whom he met in 1999 , when he became a G.E. director , was infused with a sense of glee at the prospect of landing such a corporate star .", "Mr. Nardelli , well aware of the value of the G.E. pedigree , drove a hard bargain .", "A result was the 2000 contract , which , with its guarantees and perks , may well be an anachronism today as rich pay packages come under increasing scrutiny .", "At Home Depot , Mr. Langone is a particularly involved director .", "He is chairman of the nominating committee and has played a significant role in shaping the current board .", "He is also a member of the executive and audit committees .", "He would frequently start his mornings with a call to Mr. Nardelli , and he thrilled in digesting the latest sales data as well as updates about Lowe 's , the retailer 's main competitor .", "And , while walking the floors at store outlets was a responsibility for directors , Mr. Langone did more than his share , as well as inspiring the Home Depot sales force at the frequent pep rallies he attended .", "Still , when it became clear that a combination of the poor performance of Home Depot 's stock and Mr. Nardelli 's obstinate demeanor had made his position untenable , Mr. Langone threw his support with the rest of the board .", "In a 2004 interview for an article about Mr. Langone and his role in the Grasso controversy , Mr. Nardelli said : `` Change is difficult .", "But it is the only constant in today 's environment .", "It takes courage and leadership . ``", "`` I think Ken stands by good business judgment . '' ."], "summary": ["Home Depot 's former director Robert L Nardelli 's refusal to bend to will of exasperated board and accept pay cut lost him crucial support of friend Kenneth Langone , co-founder and director of company , who not only recruited him but presided over his compensation contract that would later draw so much fire .", "Langone is believed to be angry over Nardelli 's unwillingness to have his pay reduced .", "Photo ."], "publication": "nyt50", "label": [1, 30], "tag": ["Business"]} -{"id": "1816559", "text": ["Gap 's board , it appears , has had enough .", "After a dismal holiday season -- the third in a row -- the chain 's directors are participating in a broad review of the company 's strategy , intensifying pressure on the chief executive , Paul S . Pressler , to pull it out of a protracted sales slump .", "Gap said yesterday that sales plunged 8 percent in December after a return to the simple fashions that fueled the chain 's meteoric rise in the 1990s , like T-shirts and hooded sweaters , failed to lure customers back into stores .", "In an admission that the back-to-basics approach has not worked , the company cut its earnings forecast for 2006 , blaming deep discounts -- like $ 100 off a fur-lined parka -- required to sell left-over piles of winter clothing .", "`` We are clearly disappointed , '' Mr. Pressler said in a statement .", "Gap 's results in December contrasted sharply with that of its competitors .", "The nation 's retailers reported respectable sales increases for the month , according to several industry groups , even as warm weather hurt winter clothing sales and higher fuel prices pinched consumer spending .", "Sales at stores open a year rose 3.1 percent in December , compared with the period in 2005 , according to the International Council of Shopping Centers , which predicted a 2.5 percent gain .", "For November and December combined , sales rose 2.8 percent , within the group 's forecast .", "As they have for much of the year , department stores like Saks -LRB- up 11.1 percent -RRB- , Federated -LRB- up 4.4 percent -RRB- and J . C . Penney -LRB- up 2.6 percent -RRB- posted strong gains .", "But smaller clothing chains like Abercrombie -LRB- down 1 percent -RRB- .", "Express , a unit of Limited -LRB- down 5 percent -RRB- .", "And Ann Taylor -LRB- down 5.3 percent -RRB- struggled to stand out .", "`` A lot of these retailers limped across the finish line , '' said John D . Morris , a senior retail analyst at Wachovia Securities , who speculated that bargains on flat-screen TVs captivated consumers in a season when there were few must-have fashions .", "Wal-Mart , which heavily promoted flat-screen TVs , said sales rose 1.6 percent , beating its own predictions of a meager 1 percent increase .", "`` Flat-screen TVs flogged fur-lined hoodies , '' Mr. Morris said .", "There were plenty of those hoodies at Gap , which heavily discounted much of its winter clothing in the days immediately before and after Christmas .", "-LRB- The fur-trimmed parka , originally $ 169 , was marked down to $ 68 several days before Christmas . -RRB-", "As a result of the deep discounting , Gap profit margins for the fourth quarter will be `` significantly below '' those of last year , said Sabrina Simmons , senior vice president for corporate finance at the chain .", "The board 's decision to intervene in a review of Gap 's brand strategies suggests that the directors are fed up with the chain 's poor results and the executives who have failed , season after season , to turn it around .", "`` This is a clear judgment coming down on the leadership of the Gap brand , '' Mr. Morris said .", "Revenue in December dropped 4 percent , to $ 2.34 billion , from $ 2.44 billion a year ago .", "The worst performance was at the Gap and Old Navy brands , where monthly sales dropped 9 percent and 10 percent , respectively .", "As a result , Gap yesterday reduced its earnings forecast to 83 cents to 87 cents a share , down from $ 1.01 to $ 1.06.", "Even at Gap , which often describes its sales as `` disappointing , '' the December results appeared to jolt executives .", "The word `` disappoint '' was used at least four times yesterday in a prerecorded phone message and news release describing December sales .", "Mr. Pressler said that he and the board would review brand strategies at Gap and Old Navy and that he was `` committed to making the necessary changes to improve performance . ''", "Banana Republic will not be included in the review because that chain 's sales are improving .", "As the division introduced pricier , preppier fashions , like an $ 88 silk-cashmere half-zip pullover , December sales rose 2 percent , making Banana a bright spot for the company .", "The strategic review would be the second in the last year for the beleaguered Gap division , which has struggled to recapture business after several years of changing missions and fluctuating fashions .", "Over the summer , after months of brainstorming , Gap 's brand president , Cynthia Harriss , introduced the back-to-basics strategy to investors and analysts , even bringing back the lower-case Gap logo used for the chain 's first store in 1969 .", "But the trip down memory lane did not win over shoppers .", "`` We did not gain the traction we had expected , '' Mr. Pressler said ."], "summary": ["Gap says sales dropped 8 percent in December .", "Cuts its earnings forecast for 2006 .", "International Council of Shopping Centers says sales at stores open one year rose 3.1 percent in December compared with period in 2005 .", "Combined sales rose 2.8 percent for November and December within group 's forecast .", "Department stores posted strong gains in December , while smaller clothing chains struggled .", "Wal-Mart says sales rose 1.6 percent , beating its own predictions of 1 percent increase .", "Graphs .", "Photo ."], "publication": "nyt50", "label": [7, 8, 14], "tag": ["Business"]} diff --git a/reproduction/Summarization/Baseline/test/testdata/train.jsonl b/reproduction/Summarization/Baseline/test/testdata/train.jsonl deleted file mode 100644 index 3e70cad6..00000000 --- a/reproduction/Summarization/Baseline/test/testdata/train.jsonl +++ /dev/null @@ -1,100 +0,0 @@ -{"id": "1453065", "text": ["Rita Bank helped run her family 's real estate business and , as a hobby , collected antiques , always dreaming of having an antiques shop .", "She died before that could happen , but her son , Ted , opened one in her honor , Rita 's Antiques Cafe , 167 Eighth Avenue -LRB- 18th Street -RRB- .", "It is filled with his mother 's crystal , china , paintings , light fixtures and furniture , and you can also sip a coffee or a glass of wine with a sandwich -LRB- $ 6 to $ 7 -RRB- or pastry -LRB- $ 1.75 to $ 5.50 -RRB- in the brick-walled store .", "And if you like the table you 're sitting at , you can buy it .", "Most of the food comes from purveyors like Chelsea Souk in the Chelsea Market .", "FOOD STUFF ."], "summary": ["Rita 's Antiques Cafe serves light food in antiques shop .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Dining and Wine", "Style"]} -{"id": "1453069", "text": ["Pissaladi\u00e8re , the onion and anchovy tart that is a specialty of Nice , is usually made with a simple bread dough enriched with olive oil .", "But the Oliviers & Company shops sell a flour specially for making pissaladi\u00e8re : spelt flour , or farine de petit \u00e9peautre .", "It 's $ 6 for 8.9 ounces , enough for a generous crust for six to eight people .", "The recipe on the package , requiring eggs , cornstarch , olive oil and salt , in addition to the spelt flour , makes a sturdy , if not particularly elastic , dough .", "The pastry that results has a nutty , rustic flavor to balance the sweet onions , salty anchovies and olives in the topping .", "FOOD STUFF ."], "summary": ["Oliviers & Company shops sell flour specially for making rustic tart pissaladiere .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Dining and Wine", "Style"]} -{"id": "1453070", "text": ["I am not a big fan of soy-protein meat pretenders , but as the French say , `` You must n't die an idiot . ``", "So I tried Karma ' Licious Veggie Bolognese pasta sauce , and was pleasantly surprised .", "It tastes like a rich meat sauce , with no soy flavor , and the texture could fool any card-carrying carnivore .", "Tama Herman , with tongue in cheek , named her company Mad Cow , and took a year to develop the sauce .", "She also makes a mushroom version , but the evidence of mushrooms was fleeting .", "The sauces are low in sodium as jarred sauces go -LRB- 170 milligrams in a half-cup , less than half that of a typical jar -RRB- .", "They are $ 7 for a 24-ounce jar at Westerly Natural Market , 911 Eighth Avenue -LRB- 54th Street -RRB- .", "LifeThyme Natural Market , 410 Avenue of the Americas -LRB- Ninth Street -RRB- .", "And the Amish Market at 49th Street and Ninth Avenue .", "The sauces are $ 5.99 plus shipping from www.veganstore.com.", "A new product for those who like the taste of Worcestershire sauce but not its hidden ingredient , anchovies , is Annie 's Naturals Organic Worcestershire Sauce .", "It has the sauce 's sweet-sour-spicy flavor but no fish .", "It is sold at many supermarket chains for $ 3 for 6.5 ounces .", "FOOD STUFF ."], "summary": ["Pasta sauces using meat substitutes by Karma ' Licious and Worcestershire sauce by Annie 's Naturals offer meaty taste suitable for vegans .", "Photo ."], "publication": "nyt50", "label": [10, 1], "tag": ["Dining and Wine", "Style"]} -{"id": "1453083", "text": ["IN professional kitchens there is a longstanding rule : there can be only one head chef .", "In French , the word chef means chief .", "And having two seems as harmonious as having two kings -LRB- remember the War of the Roses : two kings , one crown , much bloodletting . -RRB- .", "Yet there are teams of executive chefs around the country who bravely go where others fear to tread : into small restaurant kitchens , full of sharp , heavy and hot objects that would make good projectiles .", "These bravehearts include Mary Sue Milliken and Susan Feniger , who seem more like sisters than co-workers , though they have worked together at Border Grill in Santa Monica , Calif . , since they opened it in 1981 .", "One of the first chef teams since the country 's restaurant scene greatly expanded in the 70 's , Ms. Milliken and Ms. Feniger typify the overworked , overachieving chef : from 1995 to 1999 , they were the hosts of `` Too Hot Tamales '' and `` Tamales World Tour , '' on the Food Network .", "They have opened five more restaurants and written five cookbooks .", "As the job description of executive chefs grows beyond the kitchen -- building restaurant empires , being television hosts , writing cookbooks -- chefs are finding that sharing the glory as well as the toil can be a beneficial exchange .", "That is , for the right pair of chefs -LRB- micromanagers and control freaks need not apply -RRB- .", "Co-chefing is not simply a matter of two people sharing brilliant culinary insights while stirring the same pot .", "It 's a complicated balancing act based on mutual respect , compromise , a clear-eyed assessment of both parties ' strengths and weaknesses and a whole lot of trust .", "`` We clicked ever since we first met , working in a Chicago restaurant in 1978 , '' Ms. Milliken said of Ms. Feniger .", "`` For some reason my weaknesses and her strengths and vice versa were very compatible , and it felt natural to work as a team . ''", "`` She 's persistent and positive and hopeful , `` Ms. Milliken said , '' and I get discouraged and am skeptical and worry a lot .", "She helps get us through that way .", "But she 's also pretty disorganized and scattered , and I 'm a little better that way .", "I think neither of us separately would have built what we have together , nor even half of what we have . ``", "Sometimes , they concede , they wonder if their co-chef relationship borders on co-dependence .", "After all , Ms. Milliken married Ms. Feniger 's ex-boyfriend , and Ms. Feniger was in the delivery room when Ms. Milliken had her first child .", "`` No matter what happens , what tensions arise , we work it out , '' Ms. Milliken said .", "For them , working it out has meant couples therapy at times .", "Odd .", "`` We are in California , '' Ms. Milliken said .", "Sharing the kitchen duties at Blue Hill in Manhattan works well for Dan Barber , allowing him to divide his time between the restaurant , catering and planning other projects .", "But Alex Ure\u00f1a , his co-chef when the restaurant opened , left after a year to become the executive chef of Marseille restaurant .", "Now Michael Anthony fills that slot .", "`` I learned a lot working with Alex , '' Mr. Barber said , `` and I can now apply that to working with Mike . ''", "One key lesson was about communication : the more of it , the better .", "This means being open to criticism and suggestion , and taking the other chef 's feelings and ego into account .", "`` I 'm a little sensitive , `` Mr. Barber acknowledged .", "`` If Mike 's forgotten to tell me that he put yellow watermelon in the tomato coupe on my night off , and I come in the next day and find out from one of the cooks , it stings me .", "I think , ` What did he go and change it for .", "` But ultimately if it improves the dish , we both win . ''", "But not everyone is enthusiastic about being a member of such a team .", "Pino Maffeo worked as Patricia Yeo 's chef de cuisine at AZ .", "She promoted him to co-chef when she opened Pazo in Midtown a few months ago .", "`` The whole thing was her idea , and I go along to support her , '' Mr. Maffeo said , `` but I do n't like to put myself out front .", "At this point , I 'm happy helping to propel someone else 's career . ``", "Ms. Yeo prefers to give credit where it is due , unlike many of her peers , who simply hire chefs de cuisine or sous-chefs to run their kitchens when they are not there .", "`` Anyone who thinks they can run more than one restaurant at a time is either kidding themselves , or they just are n't there , `` she said .", "`` The sous-chefs do all the work but do n't get the credit . ``", "One recipe for happy co-chefdom is separate but equal spheres of responsibility .", "Ms. Yeo comes up with the dishes , and Mr. Maffeo executes them .", "He 's also the enforcer , firing people if need be , and making sure the kitchen 's workings are as smooth as cr\u00e8me fra\u00eeche .", "For Diane Forley and Michael Otsuka , co-chefs of Verbena for two years and also husband and wife , sharing credit was never an issue .", "The part that gave them pause was whether their marriage could survive the stress .", "`` That 's the first thing people say to us , `` Mr. Otsuka said , '' ` I could never work with my husband , we 'd be divorced in a year , ` but it 's been good for us . ``", "The best part , for them , is coming up with dishes together , bouncing ideas off each other and melding their two distinct styles .", "In their signature gravlax , the cured salmon is something Ms. Forley has made for years but the blinis now have buckwheat flour at Mr. Otsuka 's suggestion , along with a very Otsukian garnish : shredded daikon and tobiko caviar .", "`` That dish is a perfect example of our two styles complementing each other , '' Ms. Forley said .", "`` But we do n't go overboard with that .", "We 're not going to put soy sauce on ratatouille just to prove a point . ``", "Mr. Otsuka works in the kitchen during meals , doing the prep lists and ordering .", "Ms. Forley trains much of the staff , takes care of the phone calls , the press , events and , lately , the couple 's young daughter , Olivia .", "But neither would agree that being co-chefs gives them more free time .", "`` Now , we are actually able to see projects to completion , '' Ms. Forley said .", "`` Before , we tried to do everything at once , and there was always something we would have to let drop . ''", "Mr. Otsuka added , `` We 're not necessarily working less , but we are getting more done . ``", "Mr. Barber of Blue Hill noted : `` Most people would think it 's easier to work with a co-chef because you would only have to do half the work .", "But it does n't actually happen that way .", "When you 're working by yourself , you can quickly make all the hundreds of decisions that need to be made every day without having to stop and discuss every little thing .", "It takes more thought and energy than working by yourself . ``", "Not all co-chefs share that view .", "Part of the appeal for Lee Hanson and Riad Nasr , co-chefs of Balthazar , is being able to have a life beyond the kitchen .", "They both work only five days a week instead of six like many chefs , giving Mr. Hanson more time with his wife and daughter .", "`` It 's a very stable , sane way to work in a kitchen , `` he said .", "But , as in marriage , not all partnerships work out .", "Raphael Lunetta and Josiah Citrin had been best friends since they were teenagers but after three years as co-chefs at JiRaffe in Santa Monica , Calif . , more than their restaurant was at stake .", "It was unclear whether their friendship would survive .", "`` It was tough when we realized it was n't working , `` Mr. Lunetta said .", "`` There were a few big blowouts .", "Once I threw a piece of medium rare salmon right out of the frying pan and onto the wall behind him , and he probably threw some salmon back at me , but it never really got violent . ``", "The two have since made up , with Mr. Lunetta remaining at JiRaffe and Mr. Citrin moving on to his own restaurant , Melisse .", "Now , they are even godparents to each other 's children .", "`` It was a great thing to have done , '' Mr. Citrin said .", "`` We were best friends who wanted to open a restaurant together , so for us it was a childhood dream come true .", "But I do n't understand why anyone else would do it . ``", "Golden Guidelines HERE is a guide to successful co-chefdom from those who have been there .", "DO Adopt a state of Gandhian nonviolence .", "Be diplomatic , even when presented with a menu suggestion that you would normally veto .", "Become comfortable saying `` we '' instead of `` I . ''", "Be realistic about your strengths and weaknesses .", "Learn to let go .", "DO N'T Season the ragout when your partner 's back is turned .", "Take the other person for granted , or forget to say thanks .", "Play the martyr .", "Let the staff pit the two of you against each other ."], "summary": ["Mary Sue Milliken and Susan Feniger of Border Grill in Santa Monica , Calif , find that sharing executive chef responsibilities can be beneficial .", "Other successful duos noted , as well as team that decided to call it quits when kitchen partnership did not work out .", "Photo ."], "publication": "nyt50", "label": [4, 66], "tag": ["Dining and Wine", "Style"]} -{"id": "1453084", "text": ["When Joanna Sherman took her alternative theater company to Pakistan this spring to entertain Afghan refugees by dancing on stilts , one young boy in the crowd was not amused .", "`` He stared at us earnestly and asked , ' What is the meaning of this .", "' `` she recalled .", "Ms. Sherman reflected upon this incident as she embarked on her latest tour , dubbed the Balkan Peace Project .", "`` I gave him some glib response , but the question stuck in my mind , '' she said .", "`` It 's a tough one to answer . ``", "Since 1976 , when she founded Bond Street Theater in New York , her troupe has traveled through Asia , Europe , the Middle East and Latin America , seeking what she calls a `` universal physical language '' to amuse and enlighten .", "But her experience in Pakistan was a reminder that not everyone gets it .", "`` Sometimes it 's such a dire situation that you just want to go in and make a gesture , to give people some simple pleasure , `` she said .", "`` These kids did n't have access to anything fun , anything that would make them laugh .", "So we tried to offer them something . ``", "Trained as a dancer , Ms. Sherman , 48 , has created a performance style that incorporates elements of mime , martial arts , juggling and acrobatics .", "To her , Bond Street Theater is a `` cultural Peace Corps '' conveying a message of tolerance .", "It has traveled to Israel to train a street theater company of both Arabs and Jews , to Northern Ireland for workshops with Catholic and Protestant children , and most recently to the Balkans .", "Theater critics might question the merits of the play Ms. Sherman has brought to Albania : a circus-style production of `` Romeo and Juliet , '' stripped of Shakespeare 's poetic dialogue .", "But the result was an instant hit at the Fourth International Theater Festival in Elbasan , largely because everybody could understand it .", "`` We have always tried to reach the broadest and most diverse audience possible , '' Ms. Sherman explained .", "`` Originally , this meant a local audience across the United States , but over the years we 've become more interested in going to areas of conflict or recent conflict . ``", "Bond Street 's version of `` Romeo and Juliet '' reduces the play 's multiple themes to a simple core .", "`` We 're coming with a message that you can have peace if you choose , `` Ms. Sherman said .", "`` All the deaths in ' Romeo and Juliet ' are unnecessary , even the suicides .", "Nothing is inevitable unless you make it so . ``", "The slapstick comedy and rhythm of her production , reinforced by a live percussion accompaniment throughout the show , struck a chord with her audience of several hundred Albanians in Elbasan .", "Laughter and applause punctuated the performance .", "`` People are almost more impressed by the fact that we 're there , trying to help , than they are by the product or its message , `` she conceded .", "`` We 're the outsiders .", "We 're Americans .", "But we can facilitate , stimulate and encourage , even if we ca n't change things . ``", "The desire to become a catalyst for action has brought her back to the Balkans three times since her first visit in 1999 , when she sought to distract Kosovo Albanian children from the squalor of Macedonia 's refugee camps with a circus performance .", "Bond Street Theater has now performed in every country in southeastern Europe .", "It has a partnership with Theater Tsvete of Bulgaria , just one of many , which helped with the puppetry in `` Romeo and Juliet . ''", "`` Artists ' power to communicate on many levels has been put to the test in the field , `` Ms. Sherman said , '' and proven to be an essential component of any strategy to rebuild morale , re-establish cultural connections and heal the psyche of a society . ``", "Bond Street Theater 's commitment to humanitarian work has won it a MacArthur Foundation award and financing from the Trust for Mutual Understanding .", "Although the company gave up its original theater in Lower Manhattan 20 years ago , it has not turned its back on the United States .", "It regularly performs plays with strong social themes , like the provocatively titled `` Nightmare on Wall Street , '' and Ms. Sherman plans to take `` Romeo and Juliet '' on a nationwide tour next year .", "`` It 's been a tremendous learning process for me , because in the United States we 're really quite closed off from international news , `` she said .", "`` People in America need to learn more about the rest of the world .", "And they need to think more about why the rest of the world perceives us the way they do . ``", "But Ms. Sherman has few illusions about her power to do much to change that .", "`` It 's a very humbling experience doing this work , `` she said .", "`` You see the limitations of what you 're doing very clearly , that you can only go so far .", "But it 's a lot more than nothing . `` ."], "summary": ["Interview with Joanna Sherman , head of Bond Street Theater , alternative company that incorporates mime , martial arts , juggling , acrobats and slapstick in search for ` universal physical language ' .", "Troupe has performed in various locations around world since its founding in 1976 .", "Photo ."], "publication": "nyt50", "label": [11, 29, 2], "tag": ["Arts", "Theater"]} -{"id": "1453085", "text": ["IMAGINE for a moment the following scene : Sometime tonight , beginning about 6 , between 30 and 100 people come to your home .", "They arrive in a steady stream throughout the evening until midnight or 1 in the morning .", "They expect dinner .", "And you will make it .", "Alone .", "No one will help you wash the salad greens , whisk the vinaigrette , dice the vegetables , roast the chicken , reduce the sauce or torch the cr\u00e8me br\u00fbl\u00e9e .", "Now imagine doing this as many as six days -LRB- and nights -RRB- a week .", "O.K. , you can stop hyperventilating now .", "This was just an exercise to get you in touch with the world of the solo chef , a rarity in restaurants .", "Most chefs share cooking responsibilities with a seasoned team of knife-wielding colleagues .", "Yet there are , around the country , stalwart individualists who go it alone , and like it .", "Dominic Giuliano is one of them .", "A softspoken 30-year-old with a strong build , dark eyes and a tidy goatee , he whirls around his 7-by-9-foot kitchen at Punch & Judy , a wine bar on Clinton Street in Manhattan .", "The space is leanly equipped with a sink -LRB- he washes all the dishes there -RRB- , an induction burner , a convection oven and a panini press .", "As orders pile up , he morphs into a Chef Superhero , able to dress an arugula salad with a pancetta vinaigrette , coat a disk of goat cheese in crushed cashews , sear a rectangle of ruby-red tuna , press a smoked duck and fig panino , fan out slices of rosy beef carpaccio , warm a chocolate fondue and prepare a cheese plate in a single bound .", "It is a wonder he does not wear blue tights .", "Customers would never know there is no staff behind the kitchen door .", "Plates are garnished with care , pristine ingredients shine and little details , like the selection of homemade infused oils and butters that arrive with a basket of warm bread , are never overlooked .", "Mr. Giuliano has a soul mate in Chicago , where diners line up for one of the 28 coveted seats at Lovitt , the domain of Norman Six .", "In his cozy kitchen , he pirouettes from six-burner stove to flattop griddle to tiny grill while turning out salads of tomatillo , tomato and queso fresco , plates of maple-glazed salmon with squash and zucchini , and stacks of crisp , golden scallop and salmon cakes .", "At Lovitt , B.Y.O.B. is the order of the day , which eliminates -LRB- at least -RRB- the hat of sommelier .", "And in Cobble Hill , Brooklyn , in the 10-by-10-foot kitchen of That Bar , Brent Erickson 's arms seem to multiply as he flips back and forth between his stove , flattop , panini press and a basket fryer .", "Ditto for Sean Kelly , the chef at Clair de Lune , an eight-table bistro in Denver .", "Mr. Kelly 's company in the kitchen are a small pizza oven , a standard stove and a mini-fryer .", "Why do these Lone Ranger chefs do it .", "For Mr. Kelly , the answer is simple .", "`` After 9/11 , I was looking at a lot of projects with a lot of investors , but it was n't the right time to do a big space with a lot of debt , `` he said .", "`` This was a safe rest stop .", "I own it by myself .", "To start a restaurant with no debt is unique .", "This place is all about freedom .", "It 's not about money .", "I get to cook what I want to cook and do what I want to do . ``", "Indeed , independence -- complete culinary reign over the menu -- is the solitary kitchen 's biggest draw .", "`` Cooking alone makes me the only person to depend on , '' Mr. Giuliano said .", "`` When it 's just you coming up with the idea for the dish , prepping it and making it all the way through to the table , that gives you a very personal attachment to the food . ``", "But doing it all yourself takes discipline , planning skills and resilience .", "The first rule of the solo chef is making sure the components of each dish are ready in advance , so that the time it takes from the moment the chef starts making the order to the moment it is on the plate -LRB- called the `` fire time , '' in chef lingo -RRB- is cut as thin as a potato chip .", "`` It 's like Lego , `` Mr. Erickson of That Bar explained .", "`` All my building blocks have to be ready to go at dinner . ''", "He spends his days butchering and slicing meats , chopping and blanching vegetables , whisking sauces and making a night 's worth of soup , risotto , short ribs and couscous .", "With his blocks in order , he will have only one or two steps left before the dish is on the table in front of the diner .", "Mr. Giuliano follows the same strategy .", "For his sushi-style caprese salad , he makes mozzarella from scratch a few times a week .", "He rolls it up with chopped tomatoes and basil so that it resembles a long and puffy marshmallow log .", "He wraps it in plastic until someone orders it , then simply slices it into pinwheels and finishes it with coarse salt , freshly cracked pepper and basil oil .", "Fire time : 30 seconds .", "`` Only 25 percent of this job is cooking , '' Mr. Giuliano said .", "`` Timing is everything else . ''", "He has taken a shine to soups and other dishes that can be heated up while he works on others .", "Dishes that customers can share also work to the solo chef 's advantage .", "Mr. Giuliano serves a Mediterranean platter -- hummus , tzatziki and tomato salad with feta -- that takes less than a minute to arrange and can serve six .", "`` The sharing buys me time , '' he said .", "`` I can put out one plate , and that keeps them happy and gives me time to work on a couple of other dishes . ''", "Mr. Kelly banks on his raw bar and his antipasto platter , piled high with homemade bresaola , house-cured sardines , Parmigiano-Reggiano , figs , caper berries , olives and crisp fried artichokes .", "`` I am putting out one plate that will serve three or four people , and that 's perfect , `` he said .", "Solitary chefs rely heavily on dishes that do not mind being cooked in advance , then simply and speedily reheated .", "During the day Mr. Kelly slow-roasts whole ducks , which he later halves and crisps in the high-heat pizza oven for 10 minutes , then moves to a gentler oven for a few minutes , along with a syrupy reduction of diced turnips , apples , yams , leeks and apple cider .", "`` It 's a quick pick-up that is easy for me to do , `` he said .", "In Chicago , Mr. Six pointed out another trick of the one-chef trade .", "`` The most important thing is to cook what you know , '' he said .", "`` You need to be really comfortable with what you are doing , because you do n't have an army of prep cooks to help you with food you are unfamiliar with .", "You want to focus on simplicity , and try not to overreach and get fancy with plates of perfectly julienned vegetables . ``", "But at the end of a 16-hour solitary day , nothing can change the fact that this is one demanding gig .", "Steven Hall , an owner of That Bar , went through two other chefs in a few months before finding Mr. Erickson , an energetic 27-year-old .", "Mr. Erickson said he finds the rewards outweigh the high-anxiety nights .", "`` I am probably working harder than I have in other restaurants , '' he said .", "`` But it 's my menu and my food , and I cook everything myself .", "That makes it a lot less harsh . ``", "And harsh it can be .", "`` I am cooking five mornings , five afternoons , and five nights a week , '' Mr. Kelly said .", "`` The mental pressure of doing it alone is very challenging . ''", "Yet , he added , `` I always had the dream of doing what I am doing .", "I can say that every plate served is mine , and that is tremendously rewarding . ``", "Mr. Six also finds satisfaction in the fact that his signature is on every dish .", "`` It 's a lot more personal , `` he said .", "`` The restaurant is so small that I can overhear conversations , so if someone says something nice about the food , it 's really gratifying .", "`` So far what I have heard has been really gratifying .", "I do n't know what I 'll do if I do n't like what I hear . ``", "A Tall Order HERE are some tips from chefs who cook alone : Take care of yourself physically and mentally .", "You ca n't call in sick .", "Be careful .", "If you slice your finger and have to go to the E.R. , the restaurant has to close .", "Make sure the food is good , because you are the one to blame if it is n't .", "Monitor your beverage consumption .", "There is no time for bathroom breaks ."], "summary": ["Most restaurants have chef and army of cooks to fill diners ' orders , but several stalwarts prefer kitchen all to themselves .", "Dominic Giuliano , sole chef at Punch & Judy in Manhattan has developed intricate kitchen choreography .", "Solo chefs have independence and complete culinary control .", "Photos ."], "publication": "nyt50", "label": [12, 11, 33], "tag": ["Dining and Wine", "Style"]} -{"id": "1453087", "text": ["Early in the 19th century , as a flood of pioneers began rolling across the American West , the painter George Catlin resolved to travel there and document the unspoiled landscape and native peoples .", "Before the era of photography , few white people had ever seen the faces of Indians .", "So Catlin 's paintings created a stir when he showed them in Eastern cities and in Europe .", "He went on to become a fervent defender of Indians , even urging that the Great Plains be preserved as a `` nation 's park `` where they could continue to live their traditional lives .", "Catlin described the Indians he met as `` nature 's proudest , noblest men `` and said he hoped '' to rescue from oblivion their primitive looks and customs , `` which were being '' blasted and destroyed by the contemporary vices and dissipations introduced by the immoral part of civilized society . ``", "Many of Catlin 's portraits present Indians as stately and even regal .", "Some modern critics , however , also find troubling aspects of his art and life .", "A show at the Renwick Gallery of the Smithsonian American Art Museum here puts Catlin 's contradictions on display .", "The exhibition , which runs through Jan . 20 , is the most complete display of his work in more than a century , with more than 400 paintings and artifacts .", "Indians are among those who have taken newly critical looks at Catlin 's work .", "W . Richard West , a Southern Cheyenne who is director of the National Museum of the American Indian here , wrote in this show 's catalog , `` A native person is challenged , I think , not to feel on some level a profound resentment toward Catlin .", "His obsession with depicting Indians has an extremely invasive undertone to it . ``", "Mr. West called him `` a cultural P . T . Barnum , a crass huckster trading on other people 's lives and life ways . ``", "The show raises questions about the ability of outsiders to interpret foreign cultures , and also about whether it is possible to judge the attitudes of 19th-century Americans by modern standards .", "Catlin financed his five trips to the West himself , and after each of them he tried various means to make money from his experiences .", "He staged shows of his paintings and artifacts , some of which experts now believe were of dubious authenticity , and he exaggerated his exploits .", "His celebrated claim to have been the first white person to visit the sacred quarry where Plains Indians mined stone for their pipes , for example , is now widely considered false .", "Catlin was a product of the same romantic imagination that produced European masters like Caspar David Friedrich and J . M . W . Turner .", "Like them , he worked from nature , and his works have a sense of immediacy and intimacy .", "Sometimes he produced as many as three portraits in a single day .", "Romanticism shaped Catlin 's view of the Indian as a noble savage whose life in the state of nature was endangered by the encroachment of civilization .", "His trips ranged from one in the Southeast , where he met tribes that had intermingled with whites for more than a century , to some in the Northern Plains , where he found groups that had rarely if ever been in contact with non-Indians .", "The Indians in Catlin 's paintings are often adorned with scalps or bear claw necklaces .", "Some have richly painted faces and bodies .", "There are also works depicting various forms of hunting and religious rituals .", "Scholars say that while some are highly accurate , others are suspect .", "One of Catlin 's most powerful paintings is `` Wi-jun-jon , Pigeon 's Egg Head -LRB- The Light -RRB- , Going to and Returning From Washington . ``", "It shows two views of the same man .", "On the left , emerging from the prairie , he looks dignified , carrying a pipe and wearing a long feathered headdress and a richly decorated buckskin cloak .", "On the right he staggers back toward home in a dandy 's suit and top hat , complete with high leather boots and an umbrella .", "He is smoking a cigarette , and there are bottles of whiskey in his back pockets .", "Although Catlin 's Indian Gallery at first attracted crowds in cities like Pittsburgh and New York , and later in London , Paris and Brussels , he did not sell many pictures , and attendance soon dropped .", "He tried unsuccessfully to sell his collection to the United States government .", "To make the money he staged Wild West shows that foreshadowed those of Buffalo Bill decades later .", "Catlin 's shows included performing Indians , staged battles and even live grizzly bears .", "He was bankrupt and exhausted when he died in 1872 at 76 .", "Seven years later his widow donated the bulk of his collection to the Smithsonian .", "The exhibition here includes some evocative artifacts .", "Among them are `` peace medals '' bearing the portraits of American presidents , intended as gifts for Indian chiefs , and a mask made from a buffalo head that was used in ceremonial dances .", "There is also a room with four screens , on which video scenes of the West as Catlin knew it , including sequences of wild rivers , buffalo herds and a prairie fire are projected , complete with sound effects .", "From Washington this show will travel to the Nelson-Atkins Museum of Art in Kansas City , Mo .", "The Autry Museum of Western Heritage in Los Angeles .", "And the Museum of Fine Art in Houston .", "Catlin was a self-taught artist whose fame comes as much from the unusual subjects he chose as from his painterly skill .", "He was working as a lawyer in the 1820 's when he saw a group of Indians who visited Philadelphia .", "Eager to learn more about their culture , he traveled to St . Louis and met William Clark , governor of the Missouri Territory , who had made the famous trek West with Meriwether Lewis .", "Clark encouraged Catlin 's interest in Indians and took him on his first trip up the Mississippi , where he saw what he called `` soul-melting scenery '' that would inspire him for years to come .", "Over the next half-century Catlin traveled thousands of miles and painted hundreds of portraits of Indians from nearly 50 tribes , including the Pawnee , Omaha , Mandan , Sioux , Cheyenne , Crow , Blackfoot , Osage , Choctaw and Kiowa .", "He kept a meticulous journal that is still considered an important source of information about Indian life , and he is remembered not only as a painter and showman , but also as an ethnographer , geologist and cartographer .", "His experiences led him to conclude that `` the North American Indian in his native state is an honest , hospitable , faithful , brave , warlike , cruel , revengeful , relentless -- yet honorable , contemplative and religious being . ''", "ARTS IN AMERICA ."], "summary": ["Stephen Kinzer Arts in America column on 19th century American painter George Catlin , whose works are on exhibit at Renwick Gallery of Smithsonian American Art Museum , Washington , DC .", "Photo ."], "publication": "nyt50", "label": [7], "tag": ["Arts"]} -{"id": "1453089", "text": ["Loan exhibitions from one big city to another tend to beat the drum from start to finish until we can hear it sounding , triple forte , through the windows .", "There is nothing like that about the exhibition of 75 French master paintings from the Pushkin State Museum of Fine Arts in Moscow that opened here at the Museum of Fine Arts in mid-December and runs through March 9 .", "This is one of the subtlest and most seductive shows of its kind .", "From Poussin and Lorrain in the 17th century all the way to Bonnard and Matisse , just before 1914 , it has surprises to spring .", "A civilized curiosity is applied to everything from Jupiter 's seduction of Callisto -LRB- as interpreted by Boucher in 1744 in terms of a same-sex embrace -RRB- to the prison yard in St . - R\u00e9my , complete with exercising prisoners , that van Gogh painted in 1890 .", "-LRB- The subject of painting by van Gogh is still today 's news around the world . -RRB-", "Nothing in this show falls flat or goes on too long , and some of the best things in it are small .", "Room has been found , for instance , for the delicate wit with which Louis-L\u00e9opold Boilly handled scenes of domestic drama in the last years of the 18th century .", "Renoir is at his very best in his `` In the Garden '' of 1876 , where the convivial scene is stolen by a young woman in a dress that is made up from head to foot of radiant pink and blue stripes .", "Among the five C\u00e9zannes , the `` Mardi Gras '' of 1888 is a family affair , in that the model for the Harlequin was C\u00e9zanne 's 16-year-old son Paul and the Pierrot was Paul 's friend Louis Guillaume .", "C\u00e9zanne brought all his magic to the white costume of Pierrot , which turns out to harbor a host of colors derived from the curtains behind him .", "Harlequin 's sloping tread and neutral expression , though impressive , suggest that he was more attentive to his father than to the antics of the commedia dell ' arte .", "Where the pictures in this show are really big , it is for a reason .", "A place of honor is reserved for a very large painting -LRB- 76 by 70 inches -RRB- , `` The Village Lovers . ''", "It was painted by Jules Bastien-Lepage in 1882 , when the Impressionists were changing forever the way that a painting can look .", "Bastien-Lepage had been trained as a traditional painter , and he saw no reason to change .", "Style and subject were perfectly matched in this case .", "The picture is about country people and old-fashioned country ways .", "A young man loves a young woman and tells her so across a fence .", "They do not even touch hands .", "Nearby a man -- perhaps her father -- is working in the garden and can see what is going on .", "It is a painting that -- down to the last beanstalk -- speaks for an identifiable village and a specific way of life .", "Its detailing is superabundant .", "This is not , and never was , the `` cutting edge , '' but it has not outlived its uses , either .", "Among perennial favorites , Corot is in good form as a weather reporter in three studies of the French countryside .", "But it is in quite another guise that in `` Morning in Venice , '' he takes us with him to the Riva degli Schiavone at an hour when the light is perfect , the air is exhilarating and we have a nonpareil among townscapes all to ourselves .", "And Corot does it to perfection in just 10 by 15 inches .", "In the end , with just three great paintings by Matisse , all dated from just before 1914 and shown together in a room of their own , the show has a finale that no one who sees it will forget .", "The pictures in question are `` Nasturtiums and the Dance , '' `` Calla Lilies , Irises and Mimosas '' and `` Goldfish . ''", "All came from the holdings of the visionary collector Sergei Shchukin , and they have a collective exhilaration that bounces off the wall when we look at them .", "This broad , clear , comprehensive panorama of French painting is all the more remarkable because although the Pushkin Museum was opened by Czar Nicholas II in 1912 , it was not until after the death of Stalin in 1953 that it had any freedom in what could be put on the wall .", "In Stalin 's later years Soviet diehards had been heard to say that any museum director who showed Picasso or Matisse should be dismissed , flogged or shot .", "The single greatest task of museum professionals in Moscow was to preserve the inspired collections formed between 1897 and 1914 by Shchukin and his fellow collector Ivan Morozov .", "Between them , they drew a map of the advanced European art of the day that could have been drawn by a reborn Mercator .", "Shchukin owned 43 Matisses , 8 C\u00e9zannes , 5 Degas , 16 Derains , 16 Gauguins , 4 van Goghs , 13 Monets , 5 Marquets and 50 Picassos .", "Morozov owned 13 Bonnards , 18 C\u00e9zannes , 5 Derains , 11 Gauguins , 5 van Goghs , 6 Marquets , 11 Matisses and 6 Renoirs .", "Shchukin in all this was the leader , and Morozov was a younger enthusiast who had ideas of his own .", "Both wanted their collections to stay in Moscow forever .", "But in 1918 the Soviets seized both the collections and the houses in which Shchukin and Morozov had installed them .", "In 1948 the works were divided between the Pushkin Museum in Moscow and the Hermitage Museum in what was then Leningrad , on condition that nothing from either collection should be put on show .", "Advanced art was , in effect , under house arrest .", "Irina Antonova , the ever-valiant director of the Pushkin Museum since 1961 , worked consistently to get permission to put the forbidden pictures on view .", "-LRB- The Pushkin was the first museum in Russia to hang a painting by Kandinsky -RRB- .", "She succeeded , and since the 1960 's the pictures have been where Shchukin and Morozov wanted them to be -- in a place of honor in Moscow .", "ART REVIEW ."], "summary": ["John Russell reviews exhibition of French master paintings loaned by Pushkin State Museum of Fine Arts , Moscow , to Houston Museum of Fine Arts .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Arts"]} -{"id": "1453090", "text": ["Facing a sinking economy , opera companies and orchestras large and small across the United States report severe deficits and are starting to panic .", "As the directors of many of these institutions ponder what to do , one of their number , Pamela Rosenberg , the tenacious general director of the San Francisco Opera , is absolutely clear about what not to do .", "`` We are not going to get through this economic downturn and come out the other end by replacing quality with mediocrity , '' Ms. Rosenberg , who has set the company on course to becoming America 's most adventurous opera house , said in a recent telephone interview .", "`` If we are going to be worthy of support , we have to make a difference in people 's lives . ``", "Companies that typically react to economic uncertainty by playing it safe and catering to subscribers would be wise to heed Ms. Rosenberg 's example .", "Despite recent announcements from her office of dire budget deficits and cutbacks in some programming , she is determined to implement her bold artistic vision .", "To understand why her supporters are betting that she will prevail , consider how she came to this prominent post in the first place .", "She did n't want the job when the opera 's board approached her more than three years ago .", "Though an American , Ms. Rosenberg had spent most of her career , some 35 years , running houses in Europe .", "At the time she was winning praise and creating a buzz at the Stuttgart Opera in Germany for innovative programming rich in modernist fare , as well as for championing experimental directors and younger singers .", "For all its past glory , the San Francisco Opera had become stodgy , as Ms. Rosenberg said publicly .", "No matter what the board members were telling her , she doubted they truly wanted a shake-up .", "Moreover , accustomed to running a government-subsidized house , she did n't relish the prospect of fund-raising .", "But in the end the board convinced her .", "On taking charge last year Ms. Rosenberg , 57 , implemented `` Animating Opera , '' a five-year creative plan to present series of works related to broad themes like `` Outsiders or Pioneers .", "`` And '' Seminal Works of Modern Times . ``", "She reconstituted the rehearsal schedule to allow more time to prepare challenging operas , notably Messiaen 's visionary , complex , five-hour `` St . Fran\u00e7ois d' Assise , '' which received its American stage premiere by the company in September in a hauntingly modern production .", "An impressive cast and inspired orchestra were under the direction of Donald Runnicles .", "Far from intimidating audiences `` St . Fran\u00e7ois '' drew the strongest attendance figures of any San Francisco Opera production so far this season , even beating out `` Turandot '' with Jane Eaglen .", "The last three performances were sold out .", "And who was its biggest booster .", "Karl Mills , the president of the board .", "An investment adviser , Mr. Mills is an ebullient opera buff in his early 40 's who , company officials report , attended five out of six performances of `` St . Fran\u00e7ois . ''", "`` Actually , it was four and a half , '' he admitted in a recent interview .", "`` I was so moved .", "Lots of people who had apprehensions were just blown away , not only by its artistic impact , but its spiritual impact .", "It left a void that is hard to imagine filling . ``", "Asked whether convincing audiences to spend five hours at a modern opera was difficult , Mr. Mills responded , `` It 's better than five hours of watching CNBC these days .", "It 's better than a lot of what people spend five hours on . ``", "No sooner had the production closed when the announcement came that the company was saddled with a $ 7.7 million deficit on an operating budget of some $ 60 million for the year .", "Looked at in a national context , the gap was hardly out of line .", "For the first time in a decade the acclaimed Cleveland Orchestra is in the red , by some $ 1.3 million , roughly the same as the deficit at the Philadelphia Orchestra .", "Despite showing profits for 14 out of the last 17 seasons , the Chicago Symphony is now $ 6 million in the hole .", "Some smaller institutions are facing catastrophe .", "The San Jose Symphony , which tried to stave off bankruptcy by canceling its 2002-3 season , recently announced that there was no choice but to dissolve after operating for 123 years .", "Still , at the San Francisco Opera it did not help Ms. Rosenberg 's mission when , after the deficits were made public , Mr. Mills was widely quoted as saying that the company would have to take `` a more conservative view '' in the future .", "But Mr. Mills said he was referring to budget projections and administrative costs , not to artistic plans and commissions .", "Ms. Rosenberg asserts that the cost of mounting the Messiaen had nothing to do with the shortfall from last season 's budget .", "The debt , it would seem , is truly a case of `` It 's the economy stupid ! `` San Francisco , once a center for blue chip corporations , was pummeled by the dot-com bust .", "Most foundations in the area reported 50 percent declines in their endowments , said Elizabeth Connell Nielsen , the opera company 's director of public relations .", "The state 's deficit forced a 60 percent cut in the California Arts Council budget .", "And Sept . 11 was especially debilitating for the San Francisco Opera because of the company 's unusual schedule .", "Its house is shared with the San Francisco Ballet , and in 2001 , 9 of 11 opera productions were performed between Sept . 9 and the end of November .", "Ms. Rosenberg rightly maintains that putting on challenging and unfamiliar recent works does not necessarily cost more than sticking with the standards .", "Naturally , the rehearsals involved eat up funds .", "But so would a grandly traditional production of `` Aida . ''", "Out-of-town critics and opera buffs have been flocking to the city for intriguing offerings like the recent daring production of Janacek 's `` Kat ' a Kabanova `` with the soprano Karita Mattila in the title role .", "In an effort to keep the house full and attract new audiences , Ms. Rosenberg also instituted a student rush ticket policy that students have rushed to embrace : two hours before the performance any unsold seat goes for $ 15 .", "`` What we are doing here is not just good for the San Francisco Opera , '' Mr. Mills said .", "`` It puts San Francisco artistically on the world stage .", "It 's worth fighting for . ``", "Still , something had to give .", "So , reluctantly , Ms. Rosenberg has shelved two works for next season that would have been company firsts .", "A production of Rimsky-Korsakov ` s '' Coq d' Or `` from the Th\u00e9\u00e2tre du Chatelet in Paris , which would have had to be boosted a bit with added choristers for San Francisco 's larger War Memorial Opera House stage , will be replaced with the company 's familiar David Hockney production of Mozart 's `` Magic Flute . ''", "And Achim Freyer 's wildly inventive Stuttgart production of Weber 's `` Freischutz '' is being canceled .", "These moves should save some $ 2 million .", "Even with these cutbacks the season will still offer some unusual operas : Virgil Thomson 's iconoclastic `` Mother of Us All , '' Busoni 's monumental `` Doktor Faust , '' Janacek 's wistful `` Cunning Little Vixen '' and Shostakovich 's gritty `` Lady Macbeth of Mtsensk . ''", "This is not status-quo repertory .", "Ms. Rosenberg is staunchly committed to the `` Animating Opera '' series , which in the next few seasons will offer two daunting Berlioz works , the complete `` Les Troyens '' and `` Benvenuto Cellini , '' as well as Janacek 's seldom-heard `` From the House of the Dead . ''", "`` The Faust Project , '' a series of operas that reflect the Faust legend , still includes a commissioned work from the composer Lewis Spratlan , who won the 2000 Pulitzer Prize in Music for a concert version of Act II of his opera `` Life Is a Dream '' -LRB- which has yet to be staged -RRB- .", "And on Dec . 11 Ms. Rosenberg announced the commissioning of `` Doctor Atomic , '' an opera centered on J . Robert Oppenheimer , from the composer John Adams , the librettist Alice Goodman and the director Peter Sellars , the team that created `` Nixon in China '' and `` The Death of Klinghoffer . ''", "The new work is scheduled for a 2005 premiere .", "In comparison the Lyric Opera of Chicago , a company with a larger endowment and a smaller deficit , is also dropping two works that would have been house firsts in a `` pre-emptive strike , '' according to its recent announcement .", "But the Lyric is playing it safe , replacing `` Benvenuto Cellini '' with Gilbert & Sullivan 's `` Pirates of Penzance '' and scrapping a Montemezzi rarity , `` L' Amore dei Tre Re , '' for a Gounod war horse , `` Faust . ''", "One can only hope that the Lyric Opera is n't turning away from its bold recent history that includes presenting the premieres of two operas by William Bolcom .", "Giving a second chance to Martin David Levy , whose `` Mourning Becomes Electra '' fizzled at its 1967 Metropolitan Opera premiere but sizzled in its revised version in 2000 at the Lyric .", "Giving John Harbison 's `` Great Gatsby '' a second hearing after its Metropolitan Opera premiere .", "And presenting a knockout production of Britten 's `` Billy Budd '' last season .", "It 's interesting to note that the San Francisco Symphony , while having to contend with the same struggling economy as its neighboring opera company , is operating in the black .", "Why the difference .", "For one thing the orchestra is a less expensive enterprise .", "Last season 's budget was $ 45.5 million .", "But other factors are involved , said Karen Ames , the director of communications .", "Opera companies must plan four and five years in advance in order to secure the desired singers , directors and conductors .", "`` We 're not as hurt by an underperforming concert on an off week , `` Ms. Ames said .", "The next week may bring a surprise success , as happened recently when four performances of Wynton Marsalis 's oratorio `` All Rise '' were a `` massive sell out , '' Ms. Ames said .", "And the San Francisco Symphony 's endowment , which is three times the size of its current annual budget , provides a cushion .", "The opera company 's endowment is precariously low , just half the size of its operating budget .", "Most important , the orchestra has a dynamic public persona in its conductor , Michael Tilson Thomas , who has won audiences to innovative programming that balances compelling performances of established repertory with lots of edgy , hip and important contemporary music .", "And Mr. Thomas , who arrived in 1995 , had a crucial period during the boom years in which to introduce himself .", "Ms. Rosenberg always expected it would take her three or four years to reanimate audiences along with the company 's repertory .", "In the aftermath of 9/11 , people everywhere , including some often-obtuse arts institutions , were compelled to ask critical questions : What is this organization here for .", "How is what we do relevant .", "The economic downturn may compel company heads to ask these questions anew .", "`` Especially in times like these , '' Ms. Rosenberg said , `` challenging art can give people real sustenance .", "It can make people come together and think . ``", "CRITIC 'S NOTEBOOK ."], "summary": ["Article on financial difficulties faced by opera companies focuses on determination of San Francisco Opera general director Pamela Rosenberg to retain quality of performances , with support of board president Karl Mills .", "Compares problems of San Francisco with those of smaller Lyric Opera of Chicago .", "Photos ."], "publication": "nyt50", "label": [1, 21, 33], "tag": ["Arts"]} -{"id": "1453095", "text": ["To the Editor : Verlyn Klinkenborg 's Editorial Observer about H . L . Mencken -LRB- `` Remembering the Permanent Opposition of H . L . Mencken , '' Dec . 30 -RRB- made me reflect on how outrage serves our democracy .", "Senator Trent Lott 's record on race has finally raised the ire of the Republican Party .", "But Senator Bill Frist shows that a sexist may still wander where racists fear to tread .", "Mr. Frist 's opposition to sex education , international family planning and emergency contraception demonstrates an attitude toward women that also deserves a vehement outcry .", "The discrimination against women still practiced by the Augusta National Golf Club , which did n't admit a black member until 1990 , suggests that misogyny may have even deeper roots than racism .", "I hope that I live long enough to see insensitivity to women by our leaders get the anger it deserves .", "LYNN PARRAMORE New York , Dec . 30 , 2002 ."], "summary": ["Lynn Parramore letter comments on Verlyn Klinkenborg 's Dec 30 article about H L Mencken ."], "publication": "nyt50", "label": [0], "tag": ["Opinion"]} -{"id": "1453097", "text": ["To the Editor : Republicans ' `` race neutral '' goals -LRB- `` The Republicans Try to Redefine Civil Rights , '' Week in Review , Dec . 29 -RRB- are both laudable and painfully unrealistic .", "While the worst forms of racial discrimination were dealt with in the 1960 's and 70 's , their effects remain with us today , because of practices that made it disproportionately difficult for African-Americans to amass and pass down both real wealth and cultural capital .", "If the Republicans do not alter their principled opposition to race-sensitive policies , an alternative might be to support actions that help lower-income Americans across the board , as discussed in the article .", "If they fail to do so , it will be hard to believe that the principles that lie behind this opposition are not ones that were better left behind in 1948 .", "DAN SOLOMON Philadelphia , Dec . 29 , 2002 ."], "summary": ["Dan Solomon letter holds Republicans must alter principled opposition to race-sensitive policies -LRB- Dec 29 article -RRB- ."], "publication": "nyt50", "label": [2, 4], "tag": ["Opinion"]} -{"id": "1453098", "text": ["While architects have publicly proclaimed the World Trade Center site proposals displayed at the Winter Garden in Lower Manhattan as the greatest architecture show ever , many have privately expressed reservations about the designs ' details , the handling of the competition and even the spotlight in which the contestants now stand .", "`` Architecture is finally having a visible presence , perhaps too visible , '' said Ricardo Scofidio of Diller & Scofidio in Manhattan .", "The popular image of the architect as a creative genius whipping up great designs on a cocktail napkin is at odds with the reality .", "More often , architects say , great design is the result of constant , sometimes painful give-and-take between the architect and the client .", "Letting the public in on the process from the start , even as spectators , has pulled back the veil on a ritual that is most often conducted in the hush of boardrooms and private offices .", "By contrast , the Lower Manhattan Development Corporation announced that its design priorities for the site would be determined `` by conducting the most comprehensive public outreach campaign ever undertaken . ''", "The power of public opinion to sway the process was amply demonstrated in July when six initial site plans were universally rejected .", "In this , the second round , the public has been treated to front-row seats : the presentations by the seven competing architectural teams were televised live for more than three hours , and an exhibition of their models , renderings and video walk-throughs was open to the public almost immediately .", "Several architectural institutions have stepped in quickly to arrange their own forums , discussion groups and exhibitions on the process , and television networks have devoted unusual amounts of air time to explaining site plans and computer-animated design .", "Architects `` presenting on TV has never happened before , '' Mr. Scofidio added .", "`` But at this phase , letting the public say what it likes and does n't like will only make the water muddier , `` he said , explaining that what may be a great spectacle was no way to select a design .", "Bill Lacy , a design consultant and adviser to the jury on architecture 's highest honor , the Pritzker Prize , said that the Lower Manhattan redevelopment was `` far too important to be judged by public opinion poll . ''", "`` I feel sorry for these architects designing in a fish bowl , '' he continued .", "`` The first team did a credible job but was crucified by being exposed to the public prematurely .", "People are so eager for something positive to happen , but land use and massing studies are never exciting .", "You ca n't design for seven million clients . ``", "Mindful of the effort involved in preparing such complex and historically significant designs in just eight weeks -LRB- and with fees of only $ 40,000 -RRB- , the 16 architects interviewed for this article were loath to single out any team 's design .", "But they did not hesitate to criticize the process as too exposed and the requirements as too vague .", "The attention and its intensity are mixed blessings , said some architects , who worried that some of the more implausible designs might be taken literally , leaving the entire profession open to ridicule and condemnation .", "`` There is something a little grotesque in the interpretation of ground zero as a lucky break for art , '' Leon Wieseltier , literary editor of The New Republic , said last September in a debate with Daniel Libeskind , one of the competing architects , at Columbia University .", "The development corporation has frequently said that the object of the competition , a master land-use plan , is not to `` include the detailed architecture of individual structures . ''", "But many architects worry that the teams ' detailed models and impressively realistic video presentations will encourage the public to perceive them as concrete plans .", "Bernard Tschumi , a semifinalist in the competition and the dean of the Columbia Graduate School of Architecture , Planning and Preservation , described the process as backward .", "`` They are starting with a design and hope to arrive at a program , '' he said .", "`` It strikes me as unusual .", "And since each design is based on its own premises , you really ca n't compare them to each other at all .", "The ambiguity is not right . ``", "While some architects championed the competition as a way to educate the public about the importance of architecture , many faulted the proposals for the way the buildings met the ground and integrated with the city .", "`` There should be more talk about activities , not buildings , '' said the architect Denise Scott Brown of Venturi , Scott Brown & Associates in Philadelphia .", "`` A great deal of money will be spent quickly on the transit system , and that will affect what else happens .", "All those people coming up out of the subway will surely affect the design . ``", "She said she was n't sure that factor was reflected in the proposals , `` while , in fact , it should be the generator of these designs . ''", "Other architects said too much creative vision was expended on towers and not enough on street-level elements .", "`` The ground plan and infrastructure are surprisingly conservative in contrast to the boldness of the architecture , '' said Ralph Lerner , a Princeton , N.J. , architect and former dean of the Princeton University School of Architecture , who is now working on the design of several areas adjacent to the World Trade Center site .", "`` There were n't many new thoughts on how to treat ground transportation . ``", "Many architects , however , commended the building proposals for incorporating the latest innovations in energy efficiency .", "`` This will be the first time that European daring in ecological issues has been introduced at such a scale in the U.S. , '' said Raymond W . Gastil , executive director of the Van Alen Institute , a nonprofit organization devoted to increasing awareness of public architecture , `` but it will create new standards for all skyscrapers . ''", "The Van Alen Institute recently published a report , `` Information Exchange : How Cities Renew , Rebuild and Remember , '' exploring how seven cities , including Beirut , Sarajevo and Berlin , rebuilt themselves in the wake of both natural and political disasters .", "As for building height , architects ' opinions varied about what was appropriate for structures that would stand not in , but next to , the footsteps of the lanky twin towers .", "`` I 'm offended by everyone reaching to the sky again , `` said Will Bruder , an architect in Phoenix who focuses on environmental and recycling issues .", "Of the tall designs , he found Mr. Libeskind 's 1,776-foot tapering tower the most convincing .", "`` At least he reached up to the sky with sculpture instead of a bulky mass , '' Mr. Bruder said .", "Did any of the competitors succeed at reinventing the skyscraper for a new era .", "Only if you 've never seen Hong Kong , Mr. Lerner said .", "United Architects ' tall , angled structures , which combined into a single public floor high in the sky , were the only proposals suggesting a new way of thinking about large buildings in groups , he added .", "Hugh Hardy of Hardy Holzmann Pfeiffer in Manhattan , who did not participate in the competition , said he was not convinced that a new kind of skyscraper was possible at this time .", "The circumstances that created landmarks like the Chrysler and Empire State buildings were different , he said .", "`` Not in our lifetime has anyone been able to figure out what New York should be , '' Mr. Hardy explained .", "`` We 're all out of practice , and there 's no powerful leadership .", "Without someone in charge , it 's all going to have to be worked out each step of the way . ``", "All the architects wondered how the development corporation would proceed .", "The interested public , already well informed on the issues , has still more opportunities to learn .", "On Monday the Architectural League will open an exhibition that is like a continuing public tutorial .", "It will display a range of documents connected to the design proposals , from the architects ' video presentations to the reactions of the European news media .", "The exhibition is intended to be `` an archive of the process , '' said Rosalie Genevro , the league 's executive director , and it will be updated as more materials become available .", "`` The first round was so bland , there was nothing to talk about , '' she said .", "`` Now there 's so much more to look at and to sort out .", "And there 's more emotion . ``", "The exhibition will run through the end of February , when , the development corporation announced , it will adopt a final master land-use plan and undertake a competition for a ground zero memorial .", "On Tuesday Architectural Record magazine is sponsoring a forum of architects and architectural critics , including Mr. Tschumi and Richard Kahan , the former chief executive of the Battery Park City Authority , who oversaw the creation of the master plan for Battery Park City in the 1980 's .", "All the architects in the competition have been invited , along with representatives of the development corporation and Port Authority .", "`` It 's an intellectual exercise , `` said Robert Ivy , the editor in chief of Architectural Record .", "`` Have there ever been so many wonderful ideas to discuss , such depth of feeling to explore .", "My great fear is that they are trying to make a camel with three humps . ``", "But fears and criticism pale beside the excitement that most architects said they felt at the opportunity to see so much world-class architecture on display .", "`` This is a fantastic show of talent , '' said Cesar Pelli , the architect of the World Financial Center and the Winter Garden , who estimated that the architects involved must have spent as much as $ 4 million on their combined presentations .", "`` The community is getting a huge gift from these architects , '' Mr. Pelli said , adding , `` Of course , the architects are also getting phenomenal P.R. '' ."], "summary": ["Architects privately note difficulties resulting from power of public opinion in choosing design for World Trade Center site .", "Note unheard-of live TV broadcast presenting six initial site plans , which resulted in rejection of all designs .", "Interviews reveal variety of opinions among architects on unusual selection process .", "Photo ."], "publication": "nyt50", "label": [6, 9, 15], "tag": ["Arts"]} -{"id": "1453100", "text": ["It 's Sunday morning and the breakfast buffet at the Sofitel Suites -- one of five international hotels at this luxury 420-acre coastal resort -- is heaving with Brazilian families , all eagerly scooping scrambled eggs onto golden brown toast .", "Just hours later , at lunchtime , the restaurant is empty .", "Most of the families have headed home , leaving the hotel -- and most of the resort on the sun-drenched Bahian coast , some 50 miles north of the state capital , Salvador -- to the handful of guests who are staying longer than a weekend .", "To a tourist , that might sound like paradise : Imagine having Sau\u00edpe 's 15 restaurants , 18-hole golf course , riding , sailing and surfing spots , exotic mangrove gardens and miles of empty beaches all to yourself .", "But for Thomas Humpert , president of Sau\u00edpe S.A. , the company that must fill the beds at the two-year-old resort , it is far from heaven .", "`` For 2002 , we will probably have a 41 percent occupancy rate , '' he said .", "`` Obviously that 's not what we would ideally like . ``", "Worse still , only 15 percent of guests are foreigners , well off the 50 percent forecast by 2005 in the resort 's initial business plan , which had the local news media hailing Sau\u00edpe as Brazil 's Canc\u00fan , a resort that would bring millions of tourist dollars to Bahia .", "In many ways , Sau\u00edpe reflects the problems faced by Brazil 's tourism industry as a whole .", "Serious investments have been made , $ 6 billion over the last eight years , and $ 200 million in the Bahian resort by the Marriott Group , Accor of France and SuperClubs of Jamaica , yet occupancy rates are way below their potential .", "Brazil still ranks only 20th among the world 's most popular destinations .", "Despite the slump in global tourism after the Sept . 11 attacks , Brazil , by rights , should have been one of the world 's hottest destinations in 2002 , said Xavier Veciana , general director of SuperClubs here .", "With the currency , the real , off more than 30 percent against the dollar in 2002 , a night at Sau\u00edpe 's luxurious Sofitel can be cheaper than a London bed and breakfast .", "Two of Brazil 's rivals as beach resorts -- Southeast Asia and Africa -- have both been hit by terrorist attacks recently , in Bali and Kenya .", "By comparison , laid-back Bahia looks positively safe .", "The problem is there are few direct flights to beach destinations here , crisis-hit tour operators are reluctant to risk adding new charter routes , and Brazil , in general , spends little abroad on promoting its coastline , historic cities and vibrant culture .", "`` Now , just when Bahia has its best chance to attract more foreign tourists , airlines and tour operators are in the deepest crisis they have seen for years , '' Mr. Veciana said .", "`` If the global economic situation were better , we 'd have airlines and charter companies waiting in line at the airport here begging for slots . ``", "Brazil 's tropical northeast coast , potentially the country 's most valuable tourist asset , is starved of scheduled international flights .", "Only TAP Air Portugal and Brazil 's troubled carrier , Varig , fly from outside Brazil directly to the regional capitals of Salvador , Recife and Natal .", "`` We have an airlift problem to Brazil in general and to Salvador in particular , '' said Eduardo Farina of the Bahian Entertainment , Culture and Tourism Cluster , a business association uniting 14 companies and institutions involved in local tourism .", "`` All the airlines are concentrating their flights in Rio de Janeiro and S\u00e3o Paulo , because they can make more profit from business travelers , '' he said .", "Not that Mr. Farina has anything against pursuing profit .", "That is exactly what his organization hopes to do , by improving the infrastructure and training in Bahia and diversifying what its resort areas can offer visitors .", "`` The main idea is to raise the quality and level of tourism here -- to show that Bahia is not just beach and sun , '' Mr. Farina said .", "`` Visitors who come here to enjoy cultural , nature and sport tourism tend to stay longer and spend more money . ''", "Strapped for cash , Brazil 's government is slashing spending on tourism promotion .", "The advertising budget for the government agency Embratur has slipped from $ 10 million in 1997 to $ 3.5 million in 2002 .", "Tiny Panama , by comparison , is spending $ 15 million a year on promotion in the United States alone .", "While President-elect Luiz In\u00e1cio Lula da Silva has promised to give the tourism industry its own ministry , the Bahian group is not waiting for government help .", "It plans an international campaign for Bahia in 2003 , and Costa do Sau\u00edpe itself has contracted with T.H.R. , the agency behind Spain 's acclaimed strategic marketing for tourism , to pinpoint its best international markets .", "`` Bahia is going to communicate aggressively all its products to specific target markets for the first time ever , '' Mr. Veciana said .", "`` We hope it will bear fruit soon . ''", "So far , Bahia 's experience in marketing itself has been restricted to small niches , like African-Americans , mainly from the Northeast , keen to learn more about their ethnic roots in a place closer to home and more familiar than Africa .", "Luciane Leite , international marketing director for state tour operator , Bahiatursa , estimates that 60 percent of the 45,000 Americans visiting Bahia in 2002 were African-Americans .", "Bahia 's population is largely descended from slaves from Mozambique , Angola and West Africa brought to work on sugar plantations .", "Ceremonies mixing African rites with Catholic tradition , like the Feast of the Good Death in Cachoeira , a Baroque town outside Salvador , are performed regularly , attracting increasing numbers of tourists .", "`` They 're looking for a bit of Africa here in the culture , the music , the religious traditions , many of which have died out in Africa but survived here , `` said Paula Santos , an ethnologist and tour guide for Tatur , a Salvador operator that caters to African-American tourists .", "Adam Carter , managing director of Brazil Nuts , a Florida-based operator , said he saw a huge potential marketing Bahia to the African-American market .", "`` In Bahia you have a fantastic product , '' he said .", "`` Salvador is easily branded as the most African city on the Western hemisphere , or the New Orleans of Brazil . ''", "African-Americans can be reached best by radio and specialty magazines like Essence , and through church communities , he said , though he added that Bahia so far has only used small-scale `` guerrilla marketing tactics . ''", "`` This is a market that 's in its infancy , `` he said .", "`` Even if you say only 5 percent of the U.S. population could be interested in such trips , it 's a market of millions . `` ."], "summary": ["Occupancy rate at luxury beach resorts at Costa do Sauipe , Brazil , is disappointing 41 percent , and only 15 percent of guests are foreigners .", "Problem is that there are few direct international flights to airports in Bahia .", "Another problem is that Brazil , in general , spends little abroad on promoting its coastline , historic cities and vibrant culture .", "Photo ."], "publication": "nyt50", "label": [15], "tag": ["Business"]} -{"id": "1453101", "text": ["Russia announced today that it would shut down the mission in Chechnya of the Organization for Security and Cooperation in Europe , ending any permanent international monitoring in the republic after the mission 's mandate expires tonight .", "Also today , a Russian military court acquitted an army colonel , Yuri D . Budanov , who was accused of murdering an 18-year-old woman in Chechnya nearly three years ago .", "The move ended a long , contentious trial that was considered a test of the country 's willingness to prosecute abuses by the military .", "Representatives of the 55-nation Organization for Security and Cooperation , which is devoted to managing conflicts and other crises across Europe and Central Asia , negotiated intensely to renew the mission .", "But Russian officials insisted that the mandate be limited to providing relief aid .", "The small mission , in the northwestern Chechen city of Znamenskoye , has overseen dozens of relief and economic projects , but the group has also criticized Russian forces for abuses against civilians .", "Human Rights Watch in New York said the Russian decision , following the closure of Chechen refugee camps in the Russian region bordering Chechnya , `` raises serious human rights concerns . ''", "`` Closing down the O.S.C.E. mission is part of Russia 's strategy to cut off scrutiny of human rights conditions in Chechnya and portray the situation as normalizing , `` said Elizabeth Andersen , executive director of the Europe and Central Asia division of Human Rights Watch .", "Rights advocates in Russia were similarly dismayed over the acquittal of Colonel Budanov , which followed a finding by psychiatrists that he was temporarily insane when he seized Elza Kungayeva from her home , took her to his quarters , cut off her clothes with a knife , beat her and then strangled her before ordering her body hidden .", "The court in Rostov , in southern Russia , ordered Colonel Budanov to undergo psychiatric treatment , but it was not clear for how long .", "`` What can be said about justice in Chechnya .", "`` Arsen Sakalov , a leader of the Chechnya Justice Initiative , a legal advocacy organization , said in an interview from the neighboring republic , Ingushetia .", "`` Everything that happens proves that there is no justice there . ''", "In a case that had attracted intense scrutiny , the verdict appeared timed to minimize public attention .", "The court announced its decision late this afternoon after many Russians had already left work to devote themselves to New Year 's Eve , the most thoroughly observed of holidays here .", "Wednesday and Thursday are public holidays .", "The parents of the dead woman did not attend the proceedings today .", "They now live in a Chechen refugee camp near Ingushetia 's capital and could not be immediately reached for comment .", "The family 's lawyer , Abdullah Khamsayev , said he would continue to press for Colonel Budanov 's prosecution , appealing to Russia 's highest court and , possibly , to the European Court of Justice .", "Russian forces have been repeatedly accused of grave abuses during the war in Chechnya , but few have ever been seriously investigated , let alone prosecuted , according to Russian and international rights groups .", "Colonel Budanov was the highest-ranking officer tried on criminal charges stemming from those abuses .", "The case against him dragged on for more than two years .", "The colonel and his lawyers acknowledged that he had killed Ms. Kungayeva but said he did so in an emotional rage , believing that she was a sniper who had killed members of his unit .", "He had been drinking heavily .", "Charges that he had also raped her were dropped early on , even though an initial autopsy concluded that she had been raped .", "Late last week , an assistant to the chief military prosecutor , Col . Aleksandr A . Derbenev , questioned the psychiatric evaluations , saying he believed that Colonel Budanov was in control of his actions .", "The Kungayev family 's lawyer also argued that the fact he ordered subordinates to bury Ms. Kungayeva 's body indicated that he knew he had committed a crime .", "Colonel Derbenev called on the court to sentence Col . Budanov to 12 years in prison for murder .", "If convicted , he faced a maximum of 20 years .", "Despite public outrage over the killing , Colonel Budanov won broad sympathy and support , especially from active and retired military leaders , who argued that he was unfairly prosecuted for extremes that occurred in war .", "Asked how an officer suffering from insanity could have been allowed to serve in the army , Arkady G . Baskayev , a former commander now in Parliament , told the Ekho Moskvy radio station today , `` The fact is that in Chechnya and the war there you have particular conditions in which very different things happen . '' ."], "summary": ["Russia will shut down mission in Chechnya of Organization for Security and Cooperation in Europe , ending any permanent international monitoring in republic after mission 's mandate ends tonight ."], "publication": "nyt50", "label": [0], "tag": ["World"]} -{"id": "1453102", "text": ["Any Bulletproof Cue Cards .", "Things can be confusing for an entertainer who relies on the nightlife when there is no night .", "Such is the problem faced by the Scandinavian singer SISSEL , when she is visiting the northern part of her homeland , Norway .", "In the summertime , there is only an hour or so of darkness .", "`` It gets all mixed up , '' she said .", "But with the debut of her album , titled simply Sissel , she has been traveling too much to dwell on the darkness .", "Speaking from her home in Norway , she said she had just returned from Moscow , where she sang a Christmas concert with PL\u00c1CIDO DOMINGO and JOS\u00c9 CARRERAS .", "Following the recent takeover of a Moscow theater by Chechen guerrillas , security for the concert was extremely tight , Sissel said .", "`` I saw that Pl\u00e1cido and Jos\u00e9 had these small glass things in front of them , '' she said .", "Thinking it was some kind of strange security measure , she asked Mr. Domingo why he had one and she did n't .", "He explained that it was a prompter and then asked Sissel , `` Do you think I sing so bad I need protection .", "`` She blushed .", "Before the Moscow Christmas special , Sissel , whose full name is Sissel Kyrkjebo , was serenading JIMMY CARTER at a concert to celebrate the Nobel Peace Prize in Oslo .", "She said the former president was very sweet and had only one request .", "`` He asked for WILLIE NELSON , '' she said .", "So Willie Nelson he got , singing a Carter favorite , `` Georgia on My Mind . ''", "The Empire Strikes Out To the ears of an American , being named Commander of the British Empire by Her Majesty , the queen , sounds as if it would carry with it some great power .", "To wage war , perhaps , or at least to levy an unwanted tax on some far-flung subjects .", "Not so , says the actor BRIAN COX .", "`` When you are Commander of the British Empire , you are commander of an empire that does n't exist . ``", "This week , QUEEN ELIZABETH honored the Scottish-born Mr. Cox for his long career in the dramatic arts .", "Mr. Cox , who has roles in several movies out in theaters at the moment , including `` Adaptation '' and `` 25th Hour , '' said he initially did not want to accept the honor .", "`` But when I discussed it with friends , they convinced me that I should , '' he said .", "His hesitation grew out of his often outspoken criticism of the state of political affairs in Britain , particularly what he called `` a diminution of values '' and the poor condition of the national health service .", "Mr. Cox is also quick to speak out against what he sees as a dangerous brand of realpolitik .", "`` I think there is a lot of posturing going on regarding Iraq , '' he said .", "`` We do n't seem to have an independent mind on this . ``", "Mr. Cox was referring to his belief that Britain is just following America 's lead .", "As for America , he said it was often misunderstood overseas .", "`` And it is not doing itself any favors with the way it has been behaving recently , '' he said .", "`` Which is unfortunate , because I love America . ''", "Kvetching for Columbine Being MICHAEL MOORE is draining .", "Mr. Moore , who is known for ambushing his subjects , often bigwig corporate types , with cameras and crews , is tired of picking up the slack for , well , everyone .", "`` I think it 's an embarrassment that it 's somebody in a ball cap with no college education that 's going in to ask these questions , `` he said .", "`` It 's disgraceful .", "I ca n't stand to look at myself on the screen cause I look at that and I think , ` Boy if that shows there 's anything wrong with the American media , it 's that the job is left up to this guy . '", "You know .", "`` The director of '' Bowling for Columbine , `` a documentary about America 's gun culture , continued his rant .", "`` I 'm just waiting for the media to start doing their job so I can go back home and watch more sports .", "This is way too much work for me .", "I 'm a very lethargic person . ``", "But he would be up for some more TV time .", "Asked if it upset him that the conservative pundit ANN COULTER gets so much television time while he hardly gets any , he said that it was strange , considering how well his book had sold .", "`` I 've been on a total of two network shows in nine months , `` he said .", "`` What is going on with that .", "`` Has he ever spoken to Ms. Coulter .", "`` No .", "For some reason , BILL MAHER kept trying to get her on ` Politically Incorrect ' with me .", "She would never come on when I was on , `` he said .", "So does he think all the attention Ms. Coulter is getting has something to do with her looks .", "He sighed : `` I 've got good legs too . ``", "Boldface Names ."], "summary": ["Boldface Names column .", "Norwegian singer Sissel , who has released her first album , returns home from Moscow where she sang Christmas concert with Placido Domingo and Jose Carreras .", "Queen Elizabeth honors actor Brian Cox by naming him Commander of the British Empire .", "Director Michael Moore says he would like to make more television appearances .", "Photos ."], "publication": "nyt50", "label": [6, 19, 51, 41], "tag": ["New York and Region"]} -{"id": "1453103", "text": ["CONGRESS and the Bush administration are promising Americans tax cuts in the new year .", "What form those cuts take will spark fierce debate .", "Tax policy is not just an economic tool .", "It 's a partisan weapon .", "And its power , whether to improve economic performance or slay political opponents , depends on the details .", "Political considerations are already distorting two good economic ideas .", "The permanent campaign is transforming potentially significant tax reforms into flashy favors that enhance press releases more than economic growth .", "The first good idea is ending the `` double taxation '' of dividends .", "Under current law , a publicly held company calculates its pretax profit , pays its taxes , and then pays any dividends out of after-tax profit .", "Shareholders then pay income taxes on the dividends .", "This tax treatment creates all kinds of distortions .", "Because interest payments are deductible and dividends are not , the tax code encourages companies to raise capital by borrowing rather than issuing dividend-paying stock .", "The dividend tax also leads businesses to pile up retained earnings in cash -- and sometimes to use that extra cash for unwise acquisitions -- rather than turn over profits to shareholders .", "With fewer companies paying dividends , investors who want steady incomes are pushed into bonds .", "Those who want to avoid high taxes look for capital gains rather than dividends .", "This , in turn , skews the market toward growth stocks rather than steady earners .", "And by discouraging payouts that require cash profits , the tax code implicitly rewards financial funny business that only looks good on paper .", "Correcting the dividend-tax distortion is not simple , because the tax code affects both public companies and their shareholders .", "But many stockholders do not pay taxes on dividends , because their shares are in tax-sheltered pensions or retirement accounts .", "The biggest distortions are on the corporate side , where the tax code biases decisions about how to raise capital .", "To spur better decision-making and , hence , economic growth , lawmakers should make dividends , like interest payments , tax deductible for corporations .", "But getting rid of a big distortion requires a bigger tax cut than getting rid of a small one .", "Thanks to all those tax-sheltered accounts , exempting dividend income from individual taxes could cost only half as much as making companies ' dividend payments tax-deductible .", "Exempting only part of the dividend from taxation , as the Bush administration is expected to propose , would cost the Treasury even less .", "And most policy makers would rather keep the money in Washington .", "Giving individual investors a break on their dividend income already subjects politicians to demagogic attacks as friends of the rich .", "Cutting corporate taxes , even to eliminate distortions , is politically dangerous .", "In a campaign , `` my opponent supported tax breaks for giant corporations '' is even nastier than `` my opponent supported tax breaks for wealthy investors . ''", "Sounder tax policy hardly seems worth the trouble .", "The second good idea gone bad is cutting the payroll tax , which takes 6.2 percent of everyone 's paycheck from the first dollar of income up to a limit , set in 2002 at $ 84,900 .", "Employers pay an equal amount for the privilege of employing people .", "The employer tax does not show up on the FICA line of your pay stub , but it nonetheless increases the gap between what you take home and what you cost the company .", "Reducing the payroll tax would give every worker an immediate tax break and encourage companies to hire -LRB- or retain -RRB- employees .", "It 's a winning idea whether you 're looking for a Keynesian jolt to consumer spending or a supply-side boost to hiring .", "And it would particularly benefit low-income workers , who pay little or nothing in federal income taxes but still owe payroll taxes .", "The problem arises in defining what it means to `` cut the payroll tax . ''", "A permanent rate reduction , the ideal reform , is not what lawmakers have in mind .", "The most common suggestion is a one-year exemption on the first $ 10,000 or $ 15,000 of income .", "This idea has two problems .", "To spur either spending or long-term hiring , income tax cuts need to be permanent , not short term .", "Of course , employers and workers would appreciate even a one-year break , especially a large one .", "The idea is a political winner , but a temporary cut does little to encourage hiring or spending .", "Companies are n't likely to expand their permanent work force if the after-tax cost of new workers is going to shoot up a year later .", "And consumers generally base their spending on what they expect to earn over the long term .", "They save windfalls , including tax rebates , and borrow to cover temporary shortfalls .", "This principle , which is known in economics as the permanent income hypothesis , may explain why the 2001 tax rebates do n't seem to have stimulated spending as much as boosters had hoped .", "A temporary rate cut might not do much to help the economy , but at least it would n't do much harm .", "A lump-sum exemption , by contrast , could actually hurt low-wage workers .", "Instead of hiring one full-time person for $ 20,000 a year , employers would suddenly find it much cheaper to hire two half-time employees for $ 10,000 each .", "There would probably even be less paperwork -- and fewer health benefits to cover .", "A temporary lump-sum exemption may win some short-term good will among middle-income voters , who are less likely to lose their jobs to part-time workers .", "Lawmakers who care about spurring full-time employment , increasing consumer spending , or helping low-income workers would instead make a long-term commitment to lower payroll tax rates .", "But politicians , like the rest of us , respond to incentives .", "Until voters reward subtle but sound tax reforms , lawmakers will keep turning out the flashy favors that pay .", "Economic Scene Virginia Postrel is the author of `` The Future and Its Enemies . ''", "Her new book , `` Look and Feel : How Style Became Substance , '' will be published in June by HarperCollins .", "E-mail : vpostrel@dynamist.com ."], "summary": ["Congress and Bush administration are promising Americans tax cuts in New Year .", "What form those cuts take will spark fierce debate .", "Tax policy is not just an economic tool .", "It is partisan weapon .", "And its power , whether to improve economic performance or slay political opponents , depends on details .", "Photo of 1040 income tax form ."], "publication": "nyt50", "label": [4, 1, 2, 0, 3], "tag": ["Business"]} -{"id": "1453105", "text": ["Salvatore Pepe , a major developer of commercial real estate in Westchester County , died on Sunday at a hospital in Greenwich , Conn .", "He was 93 and lived in Bronxville , N.Y.", "Mr. Pepe was the president of Bianco & Pepe Inc . , based in Scarsdale , N.Y. , which he founded decades ago in a partnership with his father-in-law .", "It began as a construction company and evolved into a construction , development and real estate management company .", "It built the Vernon Hills shopping center in Eastchester , N.Y.", "Bianco & Pepe is the parent company for the family 's real-estate activities .", "The Pepe assets include office buildings , shopping centers and other retail and industrial sites .", "The Pepe family 's holdings include Westchester One , a building that is one of the main commercial structures in White Plains .", "Built in the 1970 's by Mr. Pepe and his son Nicholas , who has succeeded his father as president of the company , the building has 850,000 square feet of office space .", "Salvatore Pepe began in the construction business in 1931 in Mount Vernon , N.Y.", "He had been a construction worker , was laid off during the Depression and founded Bianco & Pepe .", "The company built a number of industrial structures in Connecticut and lower Westchester County .", "A native of New York City , he lived with his family in the Bronx , Mount Vernon and Los Angeles and returned to the New York City area in the late 1920 's .", "In addition to his son , his survivors include his wife , Catherine Bianco Pepe .", "Two other sons , Eugene and William .", "10 grandchildren .", "And 16 great-grandchildren ."], "summary": ["Salvator Pepe , major developer of commercial real estate in Westchester County , dies at 93 ."], "publication": "nyt50", "label": [0], "tag": ["Obituaries", "New York and Region"]} -{"id": "1453106", "text": ["Florida State Coach Bobby Bowden stepped off the plane here the day after Christmas and was immediately fitted for a blindfold , handed a cigarette and asked if he had any last words .", "One word always suffices for Bowden when things are a little prickly around his program : dadgummit .", "Bowden , the Seminoles ' 73-year-old coach , has used up a bag full of dadgummits the last six days while pulling people off the pile that is his 9-4 team .", "Bowden is dismayed he has had to defend himself and his program after another uncharacteristically poor season on the field and several major mistakes by his players off the field .", "Suddenly , there are whispers that he should retire because he has lost his grip after consecutive four-loss seasons .", "`` This is a ballgame , '' he said with exasperation this week , `` not a guillotine . ''", "The ballgame is the 69th Sugar Bowl here Wednesday night against No . 4 Georgia -LRB- 12-1 -RRB- and Bulldogs Coach Mark Richt , the former Florida State assistant coach who Bowden says `` is like a son to me . ''", "If Florida State , No . 16 in the Associated Press poll and ranked No . 19 by the New York Times computer , had to worry about only the Bulldogs and the hype around Bowden 's facing another of his star pupils , that would be enough itself .", "But the Seminoles have other issues .", "* Quarterback Adrian McPherson was suspended indefinitely from the team over allegations he stole a blank check belonging to a Tallahassee businessman , a check that was later forged .", "* The Tallahassee police said McPherson was part of an inquiry into illegal gambling at Florida State .", "* The starting defensive tackle Darnell Dockett was suspended for the Sugar Bowl after an incident at a shopping center that the police are investigating .", "University officials and the police will not disclose the nature of the incident .", "* Quarterback Chris Rix , expected to be the Sugar Bowl starter , was suspended when he missed a final exam , as per university rules , Bowden said .", "Florida State players have sensed some urgency around their legendary coach in preparing for the Sugar Bowl .", "The senior offensive tackle Montrae Holland said practices had been very intense because a victory over Georgia would restore some of the Florida State luster .", "`` We have to win , '' Holland said .", "`` It 's a big deal .", "It will kind of lift the cloud over Florida State 's head to win this game .", "Right now , it feels like someone is bearing down on us . ``", "`` He 's made it very clear he wants to win this game , `` Holland said of Bowden .", "`` He 's demanding harder work at practice .", "He 's let us know how important this game is to him . ``", "Indeed , Bowden will not deny the significance of the game , especially with the swirl of controversy around his team .", "`` The game means a whole lot to me because it can help regain some of the prestige that we 've lost , `` he said .", "And if Florida State does n't win .", "`` If we lose , '' quarterback Fabian Walker said , `` there are a lot of doubts in people 's minds . ``", "After back-to-back four-loss seasons , there are doubts that Florida State is still able to churn out N.F.L. - caliber athletes .", "In 2000 , the Seminoles had seven players drafted by N.F.L. teams .", "In 2001 , there were nine N.F.L. draftees .", "In 2002 , only two Florida State players were drafted .", "`` It was like a mini pro team running around out there , '' said Holland , who has been in the program since 1998 .", "`` The truth is , we do n't have those type of players anymore .", "I know it 's hard to say , but it 's the truth .", "We lost a lot of great players to the N.F.L. before last season . ``", "Bowden disagrees .", "He admits the team 's quarterback situation was `` never stable , '' but he says there is talent to get back into the national championship picture in 2003 .", "`` Put Charlie Ward at quarterback with this team and we might be undefeated , '' Bowden said .", "`` It 's not like the other material is not there . ``", "Bowden said his program should be allowed room to rebuild , like other top-shelf programs of the 70 's , 80 's and 90 's .", "He can not conceal his dismay over the stubbornness of Florida State faithful to accept a slight downturn in a program that produced 14 consecutive seasons of top-five finishes to go with two national championships -LRB- 1993 and 1999 -RRB- .", "`` I look at Alabama , my school , and I ask myself , ' Whatever happened to Alabama .", "' `` Bowden said .", "`` I look at Southern Cal .", "Whatever happened to Southern Cal .", "I look at Texas , Notre Dame .", "The schools that used to be every year in the top four or five , what happened to them .", "They 've been in a down cycle .", "Some of them are pulling themselves back up .", "You do n't expect us to be in a cycle where we never lose , do you .", "`` Richt , who coached under Bowden for 15 seasons and patterns the Georgia program after Florida State 's , is almost amused by the sudden pressure on his former boss .", "`` This will drive him further from retirement , '' Richt said .", "`` When he leaves , he wants to go out in a blaze of glory . ''", "At least Bowden can still maintain his sense of humor .", "Asked about the program 's turmoil he said things could be worse .", "`` That could be Spurrier over there , '' he said referring to Steve Spurrier , his former rival Florida as the opposing coach .", "COLLEGE FOOTBALL ."], "summary": ["Florida State University prepares to face Georgia University in Sugar Bowl , hoping potential victory could compensate for what has been very difficult season fraught with controversy .", "Photos ."], "publication": "nyt50", "label": [14, 16], "tag": ["Sports"]} -{"id": "1453109", "text": ["Waving reams of fiscal data , a small man in a gray suit charged the barricades of Palestinian reform here today .", "It may have looked like the arid presentation of an annual budget , to the legislature of the Palestinian Authority .", "But it amounted to something more revolutionary .", "First , because there was a 2003 budget proposal at all .", "Second , because it was being disclosed publicly , in detail .", "Third , and most striking , because it included seemingly bland provisions that if enacted would have major consequences for the conduct , not only of Palestinian governance , but also of the Palestinian uprising .", "Take , for example , one apparent yawner : direct deposit of police salaries .", "This measure would strip Palestinian security chiefs of the control they now have over their forces ' pay , which some have used to build unaccountable fiefs .", "It would also twist shut what students of the Palestinian Authority say is a tap for financing some Palestinian militants .", "The man in the gray suit , Salam Fayyad , has been praised by both American and Israeli officials since he was appointed to his post as Palestinian finance minister by Yasir Arafat in June .", "That is something of a problem for the finance minister , a former official of the International Monetary Fund who was educated in Lebanon and Texas .", "Palestinians consider the Bush administration biased in favor of Israel .", "The 27-month-long conflict has made them suspicious if not contemptuous of any change -- and any Palestinian -- conforming to American and Israeli demands .", "When these concerns about outside influence were mentioned to Mr. Fayyad after his presentation , he became so exercised that he dropped his black satchel and began jabbing the air with one index finger .", "`` I 'm talking about public funds -- public money -- the people 's money ! `` he said in rapid-fire English .", "`` We should manage the funds in an honest way .", "Tell me if this is not inspired by the Palestinian people .", "Tell me if the Palestinian people do not benefit from this . ``", "Palestinian legislators , many of whom have chafed at corruption in the Palestinian Authority , said they expected the budget to pass within a month , and then for the real fight to begin : to enact its provisions against officeholders vested in the status quo .", "`` Either he will submit to their demands , or resign , '' Jamal Shobaki , the chairman of the legislature 's economic committee , said of Mr. Fayyad , expressing what he called a common fear among reformers .", "`` What we want is a third way -- that he neither submit , nor resign . ''", "Mr. Fayyad , who is 50 , smiled when told of the comment , and said he knew a third way .", "`` When you hit a wall , '' he said , `` you go through it . ''", "Mr. Fayyad , who has presented Mr. Arafat with his budget , is careful to emphasize his loyalty to the Palestinian leader .", "Yet his proposals , if enacted , would remove levers of patronage and contracting that Mr. Arafat has relied on for years to run the Palestinian Authority , the Palestine Liberation Organization , and his dominant faction , Al Fatah .", "As a result , Israeli officials reacted to the proposed budget with skepticism , along with rare praise for a Palestinian minister .", "`` Everything that Fayyad is trying to do is well appreciated and is the right thing , '' said Raanan Gissin , the spokesman for the Israeli prime minister , Ariel Sharon .", "`` But it 's like full gas in neutral , as long as the money eventually reaches Arafat . ``", "But Mr. Arafat appears to be somewhat boxed in by his finance minister .", "It would be a serious blow to the Palestinian Authority 's remaining credibility abroad -- including in Europe , which has given the Palestinians considerable funds -- if Mr. Fayyad felt forced to resign .", "Although he said he supported reform and democracy , Mr. Arafat did not speak about the budget as he addressed supporters here today , the anniversary of Al Fatah 's founding in 1965 .", "A Western diplomat said Mr. Fayyad had `` thrown down a gauntlet in front of the security services , '' adding that this `` puts the authority in a very difficult position because of Fayyad 's international credibility . ``", "In this diplomat 's appraisal , Mr. Fayyad appeared to be recruiting sidelined Palestinian institutions -- the budget , the legislature -- for wide-ranging reform , using the budget proposal to lend precision to popular but rather amorphous ideas for change .", "No budget was issued for 2002 , and before that , the details of the budgets were not disclosed .", "Mr. Fayyad made a point of posting his $ 1.28 billion proposal on a new Web site , which was also started today -LRB- www.mof.gov.ps -RRB- .", "He said his budget assumed that the Palestinian economy would contract by about 7 percent next year .", "He said the Palestinian G.D.P. now amounted to slightly under $ 2 billion .", "While he referred to the devastating impact of Israel 's offensives into Palestinian areas , he spoke more about what Palestinians could do to take control of their fiscal destiny , peppering his presentation with words like `` transparency , '' `` audit , '' and , repeatedly , `` credibility . ''", "Mr. Fayyad , who was raised in the West Bank town of Tulkarm , was for several years the representative of the I.M.F. to the Palestinian Authority , and he appears to have been storing up ideas of what he would do if he was in charge .", "Among other changes , he said he would ensure compliance by all ministries with Palestinian laws about procurement .", "He specifically referred to the purchasing body for the security services , which Palestinians call the Rock .", "Experts say the Rock is used to generate kickbacks and patronage for security officials .", "Mr. Fayyad called its relationship to the security agencies `` a clear violation of the law '' and said it `` should not , and will not , be allowed to continue . ''", "The proposal would advance several efforts he has already undertaken , like putting auditors in all the ministries and consolidating multiple investment and commercial operations in one closely watched fund .", "Mr. Fayyad , who has a doctorate from the University of Texas , served as a scholar at the Federal Reserve Bank in St . Louis and as an I.M.F. official in Washington .", "He has little popular support , but he gained some credibility in August , when he found himself trapped by a renewed Israeli siege of Mr. Arafat 's compound .", "The Israelis came in so swiftly , after back-to-back suicide bombings killed seven people besides the bombers , that he lost his cellphone and briefcase .", "They were crushed by a tank inside the taxi in which he had arrived at the compound .", "Before American pressure forced the Israelis to withdraw , Mr. Fayyad spent 10 days in the compound , sharing a small , airless bedroom with Mr. Arafat and two other officials .", "At one point , he recalled , the Israelis demanded that everyone evacuate the compound , saying they were about to demolish it .", "`` I thought to myself , ' This is it , ' '' he said .", "Contrary to Palestinian rumor , Mr. Fayyad is not an American citizen .", "Chain-smoking , as usual , in Mr. Arafat 's compound today , he said he knew some Palestinians viewed him as `` this guy who came from the I.M.F. ''", "But he shrugged that off .", "`` I know who I am , '' he said .", "`` Our people deserve respect , and if I can contribute to that , what more can I ask for .", "`` ."], "summary": ["Salam Fayyad , Palestinian finance minister who has been praised by both American and Israeli officials since he was appointed by Yasir Arafat in June , presents 2003 budget proposal to legislature of Palestinian Authority .", "Some of its seemingly bland provisions , if enacted , would bring greater accountability to Palestinian governance and , by seeing that money goes where it is supposed to go , cut of financing for some Palestinian militants .", "Photo of Fayyad ."], "publication": "nyt50", "label": [9, 8, 3, 5], "tag": ["World"]} -{"id": "1453110", "text": ["The nation 's Roman Catholic priests will not miss the year 2002 , their annus horribilis .", "A year ago , few could have imagined the disrepute into which the priesthood would slip following hundreds of sexual abuse cases involving clergy and a clueless response by bishops who misidentified exactly whom they were supposed to be shepherding .", "The anger was intense enough to destroy not just a few ecclesiastical careers but also the goodwill of parishioners and the public that priests used to take for granted .", "Almost forgotten are my former colleagues , the hard-working core of priests who are not malefactors .", "These men remain trapped in a system where they have next to nothing to say about the shape of Catholic leadership or its response to the crisis .", "Little wonder that priests ' numbers are dwindling .", "Their experience , their personal holiness and their spiritual insight often do n't seem to count .", "The hierarchy seeks only their silence and deference .", "As priests see one bishop after another imposed from above to put in place policies without input from the clergy or the laity , they become resigned , disgusted and just plain tired .", "At the same time , a smaller group of clergy ambitious for higher office have long brought all their skills to the challenging task of pleasing their omnipotent superiors rather than responding to the promptings of their subordinates or of the laity .", "In the more than two decades I spent as a priest -LRB- I left the clergy a decade ago over the issue of celibacy -RRB- , I had many opportunities to observe the ways priests are required to grovel to their superiors .", "Once , back in the seminary , as a hundred or so of us stood around waiting for His Eminence the cardinal to appear for an event , a student approached one of the monsignors .", "`` So , it seems the cardinal is late .", "`` he asked .", "`` Excuse me , young man , '' he was told , `` the cardinal is never late .", "Everyone else is early . ``", "Some years later , when I was head of the seminary 's student body , I found myself seated next to the archbishop at a dinner .", "Our student council had recently completed a study of issues affecting seminary life and our future as priests and human beings .", "I was eager to share its results with the authorities , and here I was , sitting at dinner next to Himself ! But as soon as I broached the topic , the cardinal silenced me .", "I was not to approach him directly , he said , but only through the appropriate channels so that the chain of authority would be unbroken .", "He had no desire to know firsthand what his future priests were thinking .", "Bishops anointed `` by the favor of the Apostolic See , '' as the Vatican terms it , are deferred to not because of their competence or learning , but because of that favor .", "This is true today , 40 years after the Second Vatican Council sought to encourage a more collegial style of leadership -- one seeking input from clergy and parishioners and even acknowledging the laity as part of the priesthood .", "Accustomed to this deferential thinking , today 's mismanagers of the clerical abuse scandals do not see themselves as ill-intentioned .", "Ignoring the victims of abuse grows out of an ideology that holds that clergy are different from ordinary people .", "Accountability is for lesser mortals .", "The culture of deference to the clerical mystique is deep-rooted .", "A dozen years ago , I was at a conference for priests on preaching and worship in the context of Vatican II , and the curriculum was suspended one afternoon to make room for an impromptu address by the archbishop .", "By the end of his hour-long monologue , he had effectively dismissed the newer approaches the faculty had been promoting .", "Waxing eloquent on the unique power of priests to accomplish things that not even kings and queens could do , he reminded us that even God obeys the words of a priest when he consecrates the bread and wine at mass .", "There was no rebuttal from the assembled priests .", "The trouble with deference and silence , of course , is that they encourage ignorance and denial about issues that need to be addressed .", "A few months ago , a group of New York clergy were told by a high-ranking official that he was open to discussing issues directly .", "However , some of those present told me , it was stressed that this was to be a so-called Roman dialogue , which means : I 'll do the talking , you listen .", "The seeds of the present crisis were really sown in 1968 , the year of the papal encyclical known as Humane Vitae , which began the undoing of Vatican II .", "The encyclical reasserted the church 's opposition to artificial contraception and to the principle that church teaching grows and develops .", "Catholics were not to decide for themselves , as a matter of conscience , whether to use contraception .", "After the encyclical , thousands of priests remained silent about this teaching on birth control -- one that was out of sync with the life the faithful lived .", "Many decided -LRB- as the laity had begun to do -RRB- that the church 's teaching was no real guide for their own sexual lives .", "Many resigned and sought happiness elsewhere .", "Others stayed but made their own decisions about licit and illicit sexual relationships -- and were silent about it .", "Is it possible that this silence -- combined with a culture that already valued suppression -- fostered the idea among some bad priests that they could get away with predatory behavior .", "Over the last year , however , the silence has been shattered by public outcry and the flock 's rediscovery of its voice .", "What remains to be seen is whether these voices will be joined by others from within the clergy -- and if they will be allowed to influence the course of Catholic teaching and policy .", "Paul E . Dinter , author of the forthcoming `` The Other Side of the Altar : One Man 's Life in the Catholic Priesthood , `` is development director of Care for the Homeless ."], "summary": ["Op-Ed article by Paul E Dinter , former Catholic priest , comments on crisis enveloping church over child sexual abuse by some priests .", "Says during his two decades as priest , he had many opportunities to observe ways priests are required to grovel to their superiors .", "Argues that trouble with deference and silence is that they encourage ignorance and denial about issues that need to be addressed ."], "publication": "nyt50", "label": [31, 10], "tag": ["Opinion"]} -{"id": "1453111", "text": ["The fare on city-subsidized private express buses will rise to $ 4 a ride from $ 3 in April as part of a deal to preserve the service on weekends and holidays , Mayor Michael R . Bloomberg announced yesterday .", "The city also won a 90-day reprieve from a Bronx bus company that had threatened to suspend express service on Jan . 10 , a situation that would have stranded 15,000 riders a day .", "Mr. Bloomberg said the company , New York Bus Service , would keep its buses rolling while the city tried to get the Metropolitan Transportation Authority to take over the private lines .", "The fare increase surprised some City Council officials , who had reached an agreement with the mayor in November to preserve the $ 3 fare .", "The authority said `` complicated issues '' would have to be resolved before it would take over the private bus lines .", "The city spends about $ 98 million a year subsidizing seven private bus companies that operate in areas not served by the authority .", "Mayor Bloomberg said the authority , which runs its own express buses in addition to its regular city buses and the subways , is better suited to run all bus service in the city .", "The mayor has spoken of trying to persuade the authority to take over the private bus lines for months , but yesterday he appeared to be jump-starting what is likely to be a set of complicated negotiations by giving himself 90 days to close a deal .", "`` I would hope that 90 days is more than adequate , '' the mayor said .", "Mr. Bloomberg said it does not make sense for the city to continue subsidizing the private bus companies , which depend on the subsidies to turn a profit .", "`` The economics are such that you ca n't make enough money from the fares to cover all your costs and have a profit , `` the mayor said .", "`` And when it comes to the city or the state subsidizing private companies , you then have an odd mixture of public-private divided authority and responsibility .", "It has clearly not worked in this city to have the private bus companies provide service . ``", "The sticking point in the negotiations to get the transportation authority to take over the bus lines is likely to be the amount of the city subsidy .", "Mayor Bloomberg said yesterday that he hoped to negotiate a deal that would reduce the subsidies .", "But the transportation agency , a state-controlled authority , did not appear eager to assume an additional liability of $ 98 million a year .", "The authority is weighing a subway and bus fare increase to offset its own deficit .", "`` The M.T.A. is always interested in improving transportation services in the region , '' said Tom Kelly , a spokesman for the authority .", "`` There are complicated issues that must be resolved .", "Any takeover would have to be done so that it would not add a further burden to our current customers . ``", "Putting the seven bus companies under the control of the authority could yield administrative savings .", "The top executives at the seven city-subsidized companies earn a total of more than $ 1.8 million a year , according to an article in Newsday in 2002 .", "Under a takeover by the authority , most of those salaries could be dispensed with .", "Transportation experts said the authority might be able to use its purchasing power to win better deals on expenses like gas and maintenance than small companies could .", "The city has some assets it could use in the negotiations : it owns most of the 4,600 buses that the seven private companies use , and some of the bus depots as well .", "City officials and officials for the authority declined to discuss what elements might be involved in a deal to get the authority to take over the bus lines .", "Edward Arrigoni , the chairman of New York Bus Service , which serves the Bronx , said he had planned to suspend service on Jan . 10 if he could not reach an agreement with the city on covering drivers ' pensions .", "He said that he had agreed to keep the buses rolling for 90 more days at the request of the mayor .", "In the Bronx , many commuters expressed relief that the threatened shutdown had been avoided , at least for now .", "`` It 's just a relief knowing they 'll be here , `` said Evelyn Sanchez , 47 , who was waiting for a bus yesterday in Co-op City .", "`` It would 've messed up my life big time if they 'd have stopped running .", "It may sound stupid saying it , but nobody seems to understand how important one little bus can be . ``", "Daniesha Craig , 29 , who was standing next to her and was headed into Times Square for the New Year 's Eve festivities , said she would gladly pay more money to keep the buses running .", "`` That money 's in my pocket now , `` she said .", "`` I would have done almost anything to keep these buses going .", "I use them like the subway : every day , two times a day .", "I would 've been sunk if they had shut them down . ``", "In November , as part of the budget negotiations , the City Council reached an agreement with the mayor to hold express bus fares steady at $ 3 .", "The mayor said yesterday that the fare increase was needed to preserve weekend service on the express lines .", "The higher $ 4 fares will be charged every day on all seven private bus lines , city officials said , even though two of the lines do not provide weekend service .", "Fares could also go up in April for the authority 's express buses , officials said , as part of a proposed fare increase for subways , buses and express buses run by the authority .", "Christopher Policano , a spokesman for the City Council , said : `` We 're glad that weekend service and New York Bus Service will continue .", "But we have serious concerns about raising fares , as we are opposed to an overall M.T.A. fare increase .", "We 're going to be fighting that fare hike , and , if we 're successful , discussing with the mayor ways of avoiding this particular fare hike . ``", "Correction : January 3 , 2003 , Friday An article on Wednesday about plans to increase the fare on subsidized express buses included an erroneous figure provided by the city for the number of buses used by the seven private companies .", "It is about 1,325 , not 4,600 ."], "summary": ["Fare on city-subsidized private express buses will increase to $ 4 from $ 3 in April as part of deal to preserve service on weekends and holidays .", "City also wins 90-day reprieve from New York Bus Service , Bronx company that had threatened to suspend express service , stranding 15,000 riders daily .", "Mayor Bloomberg says company will keep buses moving while city tries to get MTA to take over private lines .", "Photo ."], "publication": "nyt50", "label": [0, 1, 2], "tag": ["New York and Region"]} -{"id": "1453112", "text": ["We 're beginning the new year in a deep fix .", "The Bush administration 's decision to refer North Korea 's revival of its nuclear-weapons program to the United Nations is a reasonable but transparent effort to sidetrack the issue in hopes of avoiding another military crisis on the eve of war with Iraq .", "It is unlikely that the United Nations will take meaningful action in this situation , since no power other than the United States possesses the means to back up words with action .", "Even if the administration 's strategy of isolating North Korea works , at best it would amount to a partial tightening of sanctions against a country whose economy is already moribund .", "The only additional threat available is the denial of food aid for the people of North Korea , an act that would take the United States into new moral territory .", "The administration now is in the awkward position of choosing to give war with Iraq priority over the most serious threat to stability in Asia since the last North Korean nuclear crisis a decade ago .", "Moreover , the North Koreans are moving to develop their nuclear stockpile with such dispatch that the administration 's delaying tactics appear to have little chance to succeed .", "With the last of the international inspectors ejected yesterday and the possibility of a mothballed plutonium reprocessing facility coming back on line in the next month or two , North Korea is giving itself the means to produce ever-greater numbers of nuclear weapons , and no subsequent agreement will be able to reverse that fact .", "There is still a lingering hope that all this will turn out to have been an attempt by North Korea to get the Bush administration to make major concessions .", "If that 's the case , either the United States or North Korea will have to give way .", "Unfortunately neither of these scenarios looks likely .", "And absent either outcome , North Korea is on course to becoming a nuclear power .", "If the North Koreans are successful , the consequences will be severe .", "North Korea already is in a position to provide nuclear technology to other states or to terrorist groups .", "In any event , we should expect that it will continue to develop the ability to deliver nuclear weapons by ballistic missile .", "And no long-term comfort can be found from the relatively limited capabilities of North Korea 's current missiles , which can still threaten our allies , including Japan .", "What 's more , North Korean weapons engineers can gradually develop longer-range rockets and lighter warheads , giving the country true intercontinental ballistic-missile capability .", "While it 's uncertain how far North Korea 's missiles will be able to travel , it is certain that the Bush administration now faces an immediate loss of credibility .", "Its report on National Security Strategy , released in September , claims the right of pre-emption as a means to deal with the type of threat that Iraq is said to represent by virtue of its efforts to build weapons of mass destruction .", "There is no sign , however , that the administration plans to use this doctrine against North Korea , which poses a danger to the vital interests of the United States by virtue of what it has already accomplished .", "The administration 's special addendum to its National Security Strategy , the `` National Strategy to Combat Weapons of Mass Destruction , '' published in December , states on its opening page : `` We will not permit the world 's most dangerous regimes and terrorists to threaten us with the world 's most destructive weapons . ``", "But there is no sign that this new unconditional doctrine will be directed against North Korea .", "Another line in the addendum states that `` effective interdiction is a critical part '' of the American strategy to prevent the spread of weapons of mass destruction and the missiles that deliver them .", "But , again , the administration , after seizing a North Korean vessel in the act of smuggling North Korean ballistic missiles into Yemen , elected to release the ship and its cargo .", "American officials cited reverence for international law , but such a justification , so unusual during the administration 's first weapons-proliferation case , takes the teeth out of its tough pre-emption policy .", "With what lesson for North Korea .", "So on the way to war with Iraq , the United States has been caught out by North Korea -- which apparently saw its opportunity in our distraction and seized it .", "This drama is far from over , but with each day North Korea moves closer to its goal of either forcing the administration to negotiate or enhancing its ability to produce weapons of mass destruction .", "Either way , the balance of power in the Far East is likely to be upset .", "If the president negotiates , he will send a message that the key to respectful attention from his administration is blackmail .", "If he ca n't stop North Korea from pursuing its nuclear ambitions , the only effective remedy would be military action .", "War on the Korean peninsula is almost too horrible to contemplate , although the Clinton administration certainly confronted it when dealing with North Korea 's nuclear program in the early 1990 's .", "-LRB- Then , as now , the North Koreans were preparing to begin a process that would give them enough plutonium to build nuclear weapons serially . -RRB-", "If North Korea proceeds today , we would then be faced with a ruthless government in a position to increasingly threaten its region .", "This threat could cause a number of states , including South Korea and possibly Japan , to question whether American security guarantees are still the most reliable means for their defense and survival .", "One political reminder from this episode is the danger that can come from tough talk .", "When using words as weapons , a leader must be prepared to back up his rhetoric with force .", "The president 's nomination of North Korea as a member of the `` axis of evil '' in his last State of the Union message now looks like a bluff that is being called .", "And the outcome of the administration 's diplomacy is that we are preparing to fight a war with a country that might eventually acquire nuclear weapons , while another country is closing in on the ability to go into mass production .", "Like it or not , the administration needs to test the theory that North Korea is trying to force the United States into negotiations .", "That would be bitter medicine for the administration to swallow , but in view of the alternatives it would be wise for the administration to reverse course and engage with North Korea .", "However , if such a process does n't stop the North Korean nuclear enterprise , and quickly , then the administration must either accept a monumental blow to the security of the United States , or prepare for a second major military enterprise in Korea -- one that would take place simultaneously , or nearly so , with action against Iraq .", "Leon Fuerth , national security advisor to Vice President Al Gore from 1993 to 2000 , teaches international relations at George Washington University ."], "summary": ["Leon Fuerth Op-Ed article contends Bush administration 's decision to refer North Korea 's revival of its nuclear-weapons program to United Nations is reasonable but transparent effort to sidetrack issue in hopes of avoiding another military crisis on eve of war with Iraq .", "Holds it is unlikely that UN will take meaningful action in this situation , since no power other than United States possesses means to back up words with action ."], "publication": "nyt50", "label": [1, 2], "tag": ["Opinion"]} -{"id": "1453114", "text": ["It 's Christmas all over again at Jets headquarters , where the gifts are rolling in from teams the Jets helped with their victory Sunday over the Green Bay Packers .", "From the Cleveland Browns , who got into the playoffs as a wild card , Coach Herman Edwards received a bottle of Dom P\u00e9rignon and a congratulatory letter .", "And Edwards does n't even drink .", "There are flowers and fruit baskets from members of the Tampa Bay Buccaneers , who got a bye .", "Edwards has also heard from friends in Philadelphia , where he spent most of his playing career .", "The Eagles have the home-field advantage throughout the playoffs .", "`` With our win , we helped a lot of people , '' Edwards said .", "`` A lot of people stay in the sun , Philly 's headed home .", "It 's all good for them . ``", "Edwards and Dungy Reminisce Herman Edwards spoke with Indianapolis Colts Coach Tony Dungy , his friend and mentor , for about 20 minutes Monday night , the last time they will speak until they see each other at the Meadowlands on Saturday .", "There was no pregame trash-talking -- that 's not their style -- but there was plenty of reminiscing .", "`` We talked about how we started , when we were in Hawaii , how we first met , '' Edwards said .", "`` There 's a lot of water under that bridge . ``", "Edwards said the two also discussed Dungy 's firing by Tampa Bay last season .", "Edwards spoke to Dungy last year when rumors he would lose his job began even as the Bucs were preparing to play Philadelphia in the playoffs .", "`` We talked about how he was going to handle it , '' Edwards said .", "`` I figured he was going to handle it that way , like a gentleman .", "Then we had a good conversations at the White House , for the Martin Luther King celebration .", "It was a funny period for me .", "Denny Green had just gotten fired and Tony had just gotten fired .", "I 'm looking at these guys who I 've looked up to and said , ` This is crazy . '", "I was trying to convince Tony to go where he went .", "` If you go to Indianapolis , you get an e-ticket . '", "And the next thing was he was n't in the conference , so I do n't have to play him .", "That 's a good thing . ``", "Edwards remains disturbed by how Tampa Bay handled Dungy 's exit .", "`` There 's a lot of times when coaches are going to be dismissed , you can wait until the season is over to do that , `` Edwards said .", "`` Before you 're a coach , you 're still a man and your dignity is very , very important .", "When you do n't show a man dignity , after he 's been there , that bothers me a little bit . ``", "PRO FOOTBALL : NOTEBOOK ."], "summary": ["New York Jets Notebook discusses New York Jets win over Green Bay Packers , which helped many other teams clinch playoff berths or home-field advantages .", "Coach Herman Edwards and former mentor Indianapolis Colts coach Tony Dungy reminisce as friends , not coaches who will oppose each other in playoff game ."], "publication": "nyt50", "label": [9, 0], "tag": ["Sports"]} -{"id": "1453116", "text": ["The Rockefeller Group was most famous as the manager of Rockefeller Center , the landmark cluster of buildings that includes Radio City Music Hall in the heart of Midtown Manhattan .", "That came to an end , though , in February 1997 , when investors led by Tishman Speyer Properties bought the complex from the group 's Japanese parent , Mitsubishi Estate .", "But that did not mean the end of the Rockefeller Group -- it just moved a little to the west .", "Not included in the sale were four skyscrapers on the west side of the Avenue of the Americas between 48th and 51st Streets , which the group managed and in which it held differing levels of ownership .", "A surface parking lot at 745 Seventh Avenue .", "And some land in New Jersey .", "Since then , the parking lot has been turned into a one-million-square-foot headquarters for the Lehman Brothers investment banking company and the land in New Jersey has sprouted office buildings , stores and a distribution center .", "In the last year , some of the New Jersey buildings have been sold , and building sites have been acquired in places like Northern California and eastern and central Florida .", "And the company is in the middle of a new cycle of development .", "`` We have taken advantage of a seller 's market to find outside investors to take our positions in several properties , `` said Jonathan D . Green , president and chief executive of the Rockefeller Group .", "`` We have reaped the fruits of our developments and are about to start new buildings . ''", "He said most of the new projects would be built for specific tenants , although he did not rule out building on speculation if market conditions were suitable .", "He said the company was close to signing an agreement for an 800,000-square - foot distribution center in Central New Jersey .", "One area the company is developing is in Florham Park , an area about 40 miles west of Manhattan that was once a playground of the rich , with grand houses and expansive estates .", "Three of the New Jersey buildings were sold from this parcel to finance further development .", "They were bought by Investcorp , which represents investors from the Middle East , for an undisclosed price .", "The Rockefeller Group started the Florham Park project in 1997 by buying 140 acres in what had been a research park for the Exxon Corporation .", "The property had an existing building , occupied by AT&T , and two more were built .", "One was for AT&T Laboratories .", "The other one was built on speculation and eventually was leased by Novartis , a pharmaceutical company .", "`` We did that building without signing a tenant first , and Novartis came along in midconstruction , '' Mr. Green said .", "He said there was enough land remaining on the parcel , which was once the estate of Florence and Hamilton Twombly -- hence the township name -- to build another 220,000-square - foot building .", "The group formed a joint venture with the Gale Group of New Jersey in the autumn of 2000 to buy 473 acres in Florham Park , next to the 140-acre parcel , to be developed as an office park .", "The site will support two million square feet of office space , although no projects are under way .", "The group is also developing industrial property near Exit 8A of the New Jersey Turnpike , about 50 miles southwest of New York City , near Cranbury .", "This has been a popular area for distribution centers because it is close to major markets .", "And because it was farmland , there are few neighbors to complain about heavy truck traffic .", "The first development in the group 's 150-acre foreign trade zone there was a 900,000-square - foot distribution center for Pearson Education , a publishing company that has a large textbook operation .", "The distribution center has since been sold to a group of investors based in London , Mr. Green said , for a reported $ 68 million .", "The site can support two million square feet of development , so if the agreement is signed for the 800,000-square - foot building , only about 200,000 square feet of development will remain .", "The Rockefeller Group has another foreign trade zone in Mount Olive Township , in far western Morris County , that is nearing completion and will have 2.9 million square feet of space .", "Room for one building remains .", "The group sold land for 800,000 square feet of retail space on the south side of the tract to AIG Baker , a developer based in Alabama .", "The center includes stores like Wal-Mart , Sam 's Club and Lowe 's .", "The Rockefeller Group is placing much of its focus on the development of foreign trade zones , in which companies receive favorable import duty treatment until a product is sold in the United State or is re-exported .", "It has bought land for such zones in West Palm Beach and Homestead , Fla . , and in suburban St . Louis .", "`` Our strategy for the next few years is to do low-risk , build-to-suit projects mostly in suburban markets , '' Mr. Green said .", "COMMERCIAL REAL ESTATE ."], "summary": ["Rockefeller Group , which once ran Rockefeller Center , now has real estate operations across country and is in middle of new cycle of development .", "Rockefeller Group 's New Jersey projects noted .", "Jonathan D Green , president and chief exec of company , says most of new projects will be built for specific tenants , although he does not rule out building on speculation if market conditions are suitable .", "Photo ."], "publication": "nyt50", "label": [11, 8, 9, 5], "tag": ["Technology", "Business"]} -{"id": "1453119", "text": ["Agnes Eisenberger , a manager of classical musicians who was the president and owner of Colbert Artists Management , died on Thursday at Lenox Hill Hospital in Manhattan .", "She was 79 and lived in New York City and Pawling , N.Y.", "The cause was cancer , said Charlotte Schroeder , who succeeds Ms. Eisenberger as president of Colbert Artists .", "Ms. Eisenberger worked with Sir Georg Solti and the Chicago Symphony Orchestra , the soprano Joan Sutherland , the Juilliard String Quartet , the flutist Jean-Pierre Rampal and the bass James Morris , among many others .", "She was also the manager of the pianist Alfred Brendel , starting early in his career , and was instrumental in his success in the United States .", "Ms. Eisenberger was born in Vienna to a musical family and came to the United States as a child .", "She graduated from Adelphi University and in the early 1960 's began to work for Colbert Artists as an assistant to the company 's founders , Henry and Ann Colbert .", "She became the owner and president after the retirement of Mrs. Colbert in 1991 .", "-LRB- Mrs. Colbert died in 2001 . -RRB-", "Ms. Eisenberger had recently completed `` Brahms 's Notebooks , `` a translation of a collection of poetry that Brahms admired , published in German in 1909 .", "The book , with annotations and an introduction by Ms. Eisenberger , is to be issued by Pendragon Press in 2003 .", "Her marriage to the violist Paul Doktor ended in divorce .", "There are no immediate survivors ."], "summary": ["Agnes Eisenberger , manager of classical musicians who was president and owner of Colbert Artists Management , dies at 79 ."], "publication": "nyt50", "label": [0], "tag": ["Arts", "Obituaries"]} -{"id": "1453120", "text": ["Pushing in toward glittering window displays , holiday crowds are elbowing and gaping six-deep at intriguing depictions of what so far amount to nothing more than a shrewd bet on New York 's ability to prevail , not just survive .", "`` Bad men knocked down the two big buildings and we ca n't put them back , but we 're trying to find something better , `` a mother gently explains to a child .", "The lad stares through the window glass in confusion but obvious wonder , too .", "The seasonal mechanical elves and snow scenes may be missing .", "But look : there are delightfully glowing toy buildings , nine versions of them towering elegantly and defiantly enough above a miniature Manhattan to tease anyone with an imagination , particularly the city 's young at heart .", "These are the worthy architect renderings of what might yet be in Lower Manhattan , on display just across the street from where the World Trade Center towers perished so woefully from the skyline .", "What must yet be , to judge from the enthusiasm and considered comments of the public 's reaction as they crowd in daily , from 7 a.m. to 11 p.m. , at the Winter Garden .", "The glow and buzz to be witnessed at the displays signal an empowering by the people even beyond the thoughtful ideas of the architects .", "This vital hubbub amid our dim solstice days stands as a fresh attraction in itself , so near yet so far from the elegiacal footprints of the obliterated towers .", "`` It 's all bittersweet , `` a young woman says to a man equally touched and stricken as their faces glow with the idealized light of future possibilities .", "He answers , `` The site is a graveyard , but these ideas can grow on us . ''", "Amid the crowd , one theme is : Remember how we never appreciated the old towers until they fell .", "Another is : Let 's show the world a tower at least as tall -LRB- although it might take a younger office generation to dare work that high again -RRB- .", "Primal Stonehenge facets in the proposals honor the tragedy 's ineffability .", "One would use glass structures to allow no shadow to fall each Sept . 11 on where the towers stood .", "Another would create rectilinear groves of trees precisely in the towers ' final shadows .", "Visitors parse and top the architects word for word , debating the poignancy of `` The Void '' in one rendering and `` The Sacred Precinct '' in another .", "And let 's not forget `` The Stability System '' in a third , as if pre-9/11 security , like crushed city innocence , might ever be regained .", "People have until Feb . 2 to jot down their honest reactions and advice for the next planning phase in the task of resurrection .", "Daily throngs are serving notice the city will not tolerate this as a feel-good exercise .", "We are on a serious search for some post-necrotic bulwark badly needed by New York .", "There is imaginative energy and honest emotion at the windows .", "Even Rudolph Giuliani 's curmudgeonly dismissal of the initial renderings seems forgivable , a healthy sign perhaps that the city is intent on regaining its old edge .", "FRANCIS X . CLINES The City Life ."], "summary": ["Francis X Clines Editorial Observer expresses belief , as New Yorkers gaze at architects ' renderings for future of Lower Manhattan , that city is determined to regain its health and its old edge ."], "publication": "nyt50", "label": [22, 21], "tag": ["Opinion"]} -{"id": "1453121", "text": ["The Zapatista rebels , after long silence , were stirring today , the eve of the ninth anniversary of their armed uprising against the Mexican government and its free trade accord with the United States .", "The North American Free Trade Agreement eliminates tariffs on most agricultural goods on Jan . 1 .", "Across Mexico , people fear that a flood of cheaper , government-subsidized American imports will ruin hundreds of thousands of Mexican farmers .", "A national coalition of farm groups called off plans today to blockade the border with the United States after the secretary of the economy , Luis Ernesto Derbez , raised the possibility of renegotiating elements of the pact .", "Mexican government officials in Tuxtla Guti\u00e9rrez , the capital of Chiapas , where the Zapatistas maintain their strongholds , said reports of planned rebel actions were rife .", "They included threats to seize a tourist ranch outside the town of Ocosingo , the scene of the heaviest fighting between the group and the Mexican Army in January 1994 , said the American owners , Glen Wersch and Ellen Jones , in telephone interviews .", "Officials from the American , British , German and Dutch Embassies in Mexico City have advised their guests to leave the site , Rancho Esmeralda , which lies a mile from both a Mexican Army base and a Zapatista community .", "`` We 're about to have everything in the world taken away from us , `` Ms. Jones said .", "`` The police are saying they ca n't come until our ranch is trespassed on .", "There is no law here . ``", "She said she and her husband were summoned to a meeting with their Zapatista neighbors 18 days ago and told that their ranch would fall `` under the authority of the Zapatistas , '' who wanted tourism banned in the area .", "The Zapatistas , she said , demanded the title to the ranch , a popular destination for American and European travelers listed as one of the top 10 places to stay in Mexico by the Lonely Planet travel guide .", "Mr. Wersch said he would sit tight .", "`` We 'll be here tonight , `` he said .", "`` I have n't seen any word from the army .", "They have a front-row seat on the problem . ``", "Other threats , unconfirmed , included plans to seize a bridge across the Usumacinta River .", "The bridge , at Boca de Cerro , near the Chiapas-Tabasco state line , is the site of a proposed hydroelectric dam that , if built , would flood peasants ' farms and unexplored sites of Mayan ruins .", "The Zapatistas also plan a march on the town hall in Ocosingo at dawn on New Year 's Day .", "The Zapatista movement rose up on Jan . 1 , 1994 , and fought pitched battles with the Mexican Army before retreating into isolated communities in the mountains and jungles of Chiapas .", "The group has said and done little since its leaders came to Mexico City last year seeking passage of an Indian rights law .", "Though President Vicente Fox strongly backed the law , the Mexican Congress eventually passed a diluted version of the bill that satisfied few .", "The group 's leader , who calls himself Subcommander Marcos , has alienated many of his supporters outside Chiapas by publishing a long and , in many eyes , bizarre attack on a Spanish judge , Baltasar Garz\u00f3n .", "The judge is best known for his legal pursuit of the Chilean dictator Gen . Augusto Pinochet , and for his drive on the Basque separatist group known as ETA .", "Subcommander Marcos called Judge Garz\u00f3n `` a grotesque clown '' in a letter published on the Internet and in a Mexico City newspaper , and ridiculed his attempts to prosecute members of ETA , which has carried out bombings and other attacks in the name of a Basque homeland for four decades .", "The judge replied with a letter accusing the Zapatista leader of standing for `` false rebellion , violence , lies , ignorance . ''", "What this spat has to do with the poverty of people in Chiapas remains obscure .", "`` The Zapatistas have nothing to gain from picking so many fights , '' said Carlos Monsiv\u00e1is , a Mexico City writer and historian sympathetic to the movement ."], "summary": ["Zapatista rebels are stirring on eve of ninth anniversary of their armed uprising against Mexican government and its free-trade accord with United States .", "North American Free Trade Agreement eliminates tariffs on most agricultural goods on Jan 1 .", "Across Mexico , people fear that flood of cheaper , government-subsidized American exports will ruin hundreds of Mexican farmers ."], "publication": "nyt50", "label": [0, 1, 2], "tag": ["World"]} -{"id": "1453123", "text": ["An American soldier wounded in the head during a border patrol on Sunday was shot by a Pakistan border guard , and the United States responded by calling in a coalition plane that bombed the area , the United States military said today .", "The Pakistani guard was part of a unit cooperating with American forces on border control .", "It was not clear why he opened fire , but it appears he strayed over the border into Afghanistan .", "When the American patrol ordered him to move back into Pakistan , he retreated with several others to the cover of a building and opened fire , grazing the American soldier 's head , said a statement from the press center at the United States air base at Bagram , north of Kabul .", "The American patrol called in air cover after the shooting , and a coalition plane dropped a 500-pound bomb on the area , according to the statement .", "`` We are working with the Pakistanis for an accurate battlefield damage assessment of the incident , '' the statement said .", "There was no word from Pakistani officials about the incident .", "The Pakistani news media reported that planes had dropped two bombs , one hitting a checkpoint and one damaging an abandoned religious school building in a place called Bormol .", "There were no reported injuries .", "The American soldier was evacuated and later flown to Germany for further treatment .", "His condition was described as stable .", "Afghan and United States military officials say there has been an increase in cross-border attacks by small groups intent on launching rockets at United States military positions along the border areas , and then retreating into Pakistan .", "An American soldier was killed in a firefight 10 days ago in the same border area , near Shkin , in the eastern province of Paktika .", "The American patrol fired back and thought they had killed one of the attackers .", "Others fled over the border into Pakistan , an American military spokesman said the next day .", "United States forces have mounted joint operations with Pakistani forces along the border , coordinating at times when suspects flee across it .", "But there are frustrations among Pakistani officials in the border areas who have watched local tensions rise while the operations have produced little .", "Pakistan has deployed its own forces to try to stem infiltration of armed groups either way , but they are not seen as particularly effective because of the strong anti-American and pro-Taliban feeling of the local population , which allows the armed groups free movement .", "Several dozen low-level members of Al Qaeda are believed to be in South Waziristan , in Pakistan 's tribal territories , near where the border clashes have occurred , according to Pakistani officials .", "But efforts to arrest them have been thwarted as local people have tipped them off before government forces move in , they said .", "THREATS AND RESPONSES : BORDER INCIDENT ."], "summary": ["American soldier on border patrol in Afghanistan is shot and wounded in head by Pakistani border guard .", "United States responds by calling in coalition plane that bombs area .", "It is not clear why Pakistani guard opened fire , but it appears he strayed over border into Afghanistan .", "Map ."], "publication": "nyt50", "label": [2, 0], "tag": ["World", "Washington"]} -{"id": "1453125", "text": ["Richard Horner , a Broadway theater owner and producer who won a Tony Award for the 1974 revival of Eugene O'Neill 's `` Moon for the Misbegotten , '' died on Saturday at his home in Palm Springs , Calif .", "He was 82 .", "By himself and with his business partner Lester Osterman , Mr. Horner produced dozens of Broadway and Off Broadway shows during the 1960 's and 70 's , including `` Butley , '' Albert Innaurato 's `` Passione '' and `` The Crucifer of Blood , '' a Sherlock Holmes story adapted from Sir Arthur Conan Doyle 's `` Sign of Four . ''", "He was also a general manager for `` Da , '' the 1969 original production of `` 1776 '' and other shows .", "`` A Moon for the Misbegotten '' won three Tony Awards in 1974 , including a special award presented to the producers .", "Mr. Horner and Mr. Osterman 's Coronet Theater Corporation owned several Broadway theaters , including the 46th Street Theater -LRB- now the Richard Rodgers -RRB- , the Helen Hayes and the Morosco , which closed in 1981 .", "During the 1980 's , Mr. Horner was the executive director of the American Shakespeare Company in Stratford , Conn . , and staged productions at Jones Beach Marine Theater .", "Born in Portland , Ore . , he graduated from the University of Washington at Seattle and served in the Navy during World War II .", "Mr. Horner is survived by his wife , the former actress Lynne Stuart .", "Two sons , Lindsey Horner of New York City and Randall Horner of Portland , Ore .", "Two daughters , Robin Horner of San Francisco and Anne Cameron of Albuquerque .", "A sister , Joy Rich of Roseburg , Ore .", "And three grandchildren ."], "summary": ["Richard Horner , Broadway theater owner and producer , dies at 82 ."], "publication": "nyt50", "label": [0, 1], "tag": ["Arts", "Theater", "Obituaries"]} -{"id": "1453128", "text": ["On one point , Michael Zeoli and local health officials agree : the large swimming pool in the side yard of Mr. Zeoli 's house on Victory Street here is most certainly not suitable for swimming -- at least not for humans .", "Bullfrogs and snakes , on the other hand , seem to like it just fine .", "What the officials and Mr. Zeoli disagree about is whether the fetid water in the 30-foot-long in-ground pool is a public health hazard , a sizable breeding ground for mosquitoes that could carry the potentially deadly West Nile virus .", "After a standoff of many months , during which Mr. Zeoli refused to clean the pool , officials decided that enough was enough .", "On Friday , Mr. Zeoli , 50 , was arrested and charged with violating the state 's public health code .", "He surrendered to the Shelton police after they telephoned him to say that a warrant had been issued .", "In doing so , he may have become the first person charged under laws that were tightened after the West Nile virus became a major public health concern in 1999 .", "Capt . Joel Hurliman of the Shelton Police Department said it had never before arrested anyone on a charge of `` maintaining an in-ground pool containing stagnant water serving as a possible breeding ground for mosquitoes , '' as the charge was worded .", "`` This is our first experience with this , '' Captain Hurliman said today .", "`` It 's not like this was a plastic wading pool left in the yard and the water turned green .", "This is an in-ground pool that has been like that for quite some time .", "It has frogs in it . ``", "The local health agency , the Naugatuck Valley Health District , obtained a a warrant in State Superior Court last week , and the police asked Mr. Zeoli to surrender .", "After his arrest , he was released and ordered to appear in court on Jan . 13 .", "If convicted , he could face up to 90 days in prison .", "Mr. Zeoli did not respond to telephone messages left at his home today , and a knock on his door went unanswered .", "But in an interview with The New Haven Register , which reported his arrest this morning , he denied that the pool posed a health risk .", "He noted that there were a dozen insect-eating bullfrogs in the pool and said he used pesticide .", "He said he could not afford to drain the pool for cleaning .", "Health officials first ordered Mr. Zeoli to clean the pool last summer after complaints from neighbors .", "Lenny Walker , 40 , who lives next door to Mr. Zeoli in a middle-class neighborhood of tidy single-family houses , said he had been complaining for years .", "Mr. Walker , a mechanic for Sikorsky Aircraft , said his three children could not use their own pool in the summer because of the mosquitoes from next door .", "`` There are trees growing out of the pool , '' Mr. Walker said .", "`` There are snakes , frogs , ducks .", "There 's millions of mosquitoes . ``", "Mr. Walker rejected the theory that bullfrogs could control the mosquitoes .", "`` That 's a hell of a job for a frog , `` he said .", "`` That frog 's on overtime . ``", "The pool is about 12 feet wide by 30 feet long and surrounded by a wooden deck covered in dead leaves and branches .", "It appears to be four to seven feet deep .", "The water , partly frozen , is black .", "The plastic pool liner is torn in places and covered in mold .", "Part of the pool 's frame appears to be collapsing .", "Last winter , a neighbor 's dog walked onto the frozen water and nearly drowned after falling through the ice , Mr. Walker said .", "While Mr. Zeoli 's house seems to be well-maintained , with Christmas lights twinkling out front , the rest of the property looks like a junkyard , with several rusted and apparently abandoned vehicles along with a couple of motorboats and other detritus .", "On the street out front is a Dumpster filled with other refuse .", "`` It 's definitely a case of blight , `` Mr. Walker said ."], "summary": ["Police in Shelton , Conn , arrest Michael Zeoli , charging that his swimming pool is breeding ground for mosquitoes that can cause West Nile virus .", "Health officials first ordered Zeoli to clean pool last summer after complaints from neighbors , but he did not comply ."], "publication": "nyt50", "label": [19, 2], "tag": ["New York and Region"]} -{"id": "1453129", "text": ["The Giants ' kicking units are like an improvisational comedic troupe , the characters changing constantly .", "Dan O'Leary , the long snapper , is out after tearing a thumb ligament , and Trey Junkin , a 41-year-old with 19 years of experience , is in just days before the playoff game Sunday against San Francisco .", "Junkin will become the fifth -- or is he the 50th .", "-- long snapper used by the Giants since the beginning of training camp .", "He will snap on punts , and Coach Jim Fassel indicated yesterday that he would consider using Junkin on field goals and extra points as well .", "Chris Bober , the center , has been snapping on field goals and extra points .", "He had a poor snap on an extra point in Indianapolis and another on the game-winning field goal against Philadelphia .", "On that play , Matt Allen dug out the low snap and set it in time for Matt Bryant to kick .", "Junkin has played 281 regular-season games for Buffalo , Washington , the Raiders , Seattle and Arizona since beginning his career in 1983 .", "`` I was with him Arizona , '' Fassel said .", "`` I have a lot of faith in him .", "We need a guy with experience to step in and help us right now . ``", "Junkin follows O'Leary , who replaced Bob Jones after Jones snapped a ball over Allen 's head in a game against Houston , a mistake that cost the Giants a safety in a 2-point defeat .", "The Giants have also used three holders in the regular season : Allen , Tom Rouen and Jesse Palmer before Allen got the job back .", "Bryant became the kicker the day before the regular season after a knee injury sidelined Owen Pochman .", "Confident 49er San Francisco center Jeremy Newberry produced the first bulletin board material for the playoff game .", "Using colorful language , Newberry told reporters that the 49ers would clobber the Giants .", "`` I 'm not worried about it , `` he said of the Giants ' potential reaction .", "`` There 's nobody on this team that thinks anything different . ``", "Newberry indicated that the reason he felt strongly was that all of the 49ers who had been nagged by injury would be in the lineup .", "Nevertheless , the 49ers have more physical problems than the Giants .", "Right guard Ron Stone is recovering from a sprained ankle , Terrell Owens did not play Monday because of a groin injury , and cornerback Jason Webster will probably be listed as questionable for the game because of an ankle injury .", "Mike Rumph , a rookie , could replace Webster .", "PRO FOOTBALL : NOTEBOOK ."], "summary": ["New York Giants Notebook discusses Giants injured long-snapper Dan O'Leary , who will be replaced by Trey Junkin in playoff game .", "San Francisco 49ers center Jeremy Newberry tells media team will rout Giants ."], "publication": "nyt50", "label": [1, 15], "tag": ["Sports"]} -{"id": "1453131", "text": ["The machinists ' union at United Airlines said yesterday that it opposed the airline 's demand for short-term wage concessions because executives had not provided enough financial information for the union to make an informed decision and because the two sides had not engaged in back-and-forth negotiations .", "The union also said that United needed to return to profitability by developing a comprehensive business plan rather than by asking its employees for piecemeal cuts .", "The union , the International Association of Machinists and Aerospace Workers , made its statements in a filing yesterday in a bankruptcy court in Chicago .", "The union , which represents 35,000 workers at United , has refused to go along with a 13 percent wage cut sought by the airline , which has obtained tentative agreements on short-term concessions from its four other big unions .", "United has asked the bankruptcy judge , Eugene R . Wedoff , to force the machinists to accept the cuts .", "The airline , a unit of the UAL Corporation , is seeking concessions that would save it $ 70 million a month while it negotiates with its unions for longer-term cuts worth $ 2.4 billion a year from 2003 to 2008 .", "The leaders of the Air Line Pilots Association and the Association of Flight Attendants have approved their shares of the cuts , which amount to pay reductions of 29 percent and 9 percent respectively .", "They would also forgo raises .", "Members of those unions are voting on the proposed cuts through Jan . 8 .", "United is also asking two smaller unions , representing meteorologists and flight dispatchers , to accept wage reductions of 13 percent .", "The machinists , though , are the most militant of the airline 's unions .", "In late November , when United was trying to wrest $ 5.2 billion in concessions from its unions to obtain a $ 1.8 billion federal loan guarantee , the machinists voted to reject their share of the cuts .", "In its filing yesterday , the union said it `` has not yet had the opportunity to engage in bilateral negotiations with United and has not yet been provided with all of the financial information needed to evaluate United 's business plan , particularly as it relates to the I.A.M. members . ``", "United has said that if it does not obtain the $ 70 million in monthly concessions from its unions , it will ask the court for the power to void its labor contracts .", "In that case , United would have to prove that it had engaged in good-faith negotiations with its unions .", "The filing by the machinists argues that the company has not been open in its talks .", "United is also under pressure to show that the union contracts are deadweights on its day-to-day business operations .", "The machinists ' filing said there was no proof of that .", "The filing said that `` the evidence , if any , does not establish that the proposed reductions are necessary or even appropriate on an interim basis . ''", "An airline spokesman , Rich Nelson , said the company had no comment on the filing .", "United is expected to file a response by next Wednesday , and Judge Wedoff has said he will make a decision by Jan . 10 .", "United is seeking the short-term concessions because it must meet monthly cash flow requirements set by its four lenders to maintain its access to financing .", "The company said the concessions were needed before a mid-February deadline set by the lenders .", "If the unions agreed to the cuts , United said it would have enough breathing room until May 1 to negotiate for long-term cuts .", "The company 's chief financial officer , Jake Brace , said in court on Monday that United expected to have a net operating loss of $ 3.2 billion in 2002 , compared with a loss of $ 2.97 billion in 2001 .", "Those figures , which are reported for income tax purposes and which take into account asset depreciation and other matters , are not the same as the net loss reported in financial statements .", "United had a net loss of $ 2.1 billion in 2001 , the largest in airline history .", "Correction : January 3 , 2003 , Friday Because of an editing error , an article in Business Day on Wednesday about opposition by the machinists ' union at United Airlines to a demand for short-term wage concessions referred incorrectly to the employees who rejected a previous request for cutbacks , in November .", "They were the mechanics , who are part of the machinists ' union , not the union 's members over all ."], "summary": ["Machinists ' union at United Airlines says it opposes airline 's demand for short-term wage concessions because executives have not provided enough financial information for union to maker informed decision and because two sides have not engaged in back-and-forth negotiations .", "Union also says United needs to return to profitability by developing comprehensive business plan rather than by asking its employees for piecemeal cuts ."], "publication": "nyt50", "label": [0, 1], "tag": ["Business"]} -{"id": "1453134", "text": ["PricewaterhouseCoopers , the nation 's largest accounting firm , has taken a risky public stance in favor of better , more thorough and more detailed audits .", "In recent advertisements , the firm has promised to take a tougher stance with clients and resign if it can not resolve concerns about a particular audit .", "It is a gamble strongly favored by those who want accounting firms to be more aggressive with their corporate clients , to weed out fraud before investors suffer catastrophic losses .", "And it distinguishes the firm from its three most important competitors among the largest accounting firms .", "But it is still a gamble .", "PricewaterhouseCoopers has begun to outline a yardstick by which its own performance will be judged , and if it falls short , the firm could find itself singled out for special criticism in a profession that came under heavy fire in 2002 .", "`` The talk is great , and if they walk the talk , it will be a tremendous move that will clearly differentiate them from any of the other big firms , '' said Lynn Turner , the former chief accountant for the Securities and Exchange Commission .", "`` It will be a tremendous gain for investors as well .", "But let 's see the walk first . ``", "The talk has been evident in recent full-page newspaper advertisements in which PricewaterhouseCoopers has stated its willingness `` to ask the tough questions and tackle the tough issues . ''", "The firm further pledges , `` In any case where we can not resolve concerns about the quality of the information we are receiving or about the integrity of the management teams with whom we are working , we will resign . ''", "But PricewaterhouseCoopers faces a significant challenge from continuing public scrutiny of its past work .", "For instance , it approved financial disclosures at Tyco International despite the company 's use of `` aggressive accounting that , even when not erroneous , was undertaken with the purpose and effect of increasing reported results above what they would have been if more conservative accounting were used , '' according to a report filed by Tyco on Monday with the S.E.C.", "Tyco also said it was reducing previously reported earnings by $ 382 million .", "The approval of technically permissible -- but perhaps misleading -- `` aggressive accounting '' shows the difficulty the firm faces in bridging what John J . O'Connor , a vice chairman at PricewaterhouseCoopers , called the `` expectations gap '' between what investors want from audits and what auditors do .", "Mr. O'Connor said the firm planned to close that gap .", "`` We are looking at the type of qualitative reporting that we can do , '' he said , so that investors would be informed of just how aggressive or conservative the assumptions behind a company 's financial disclosures were .", "For now , he said , `` we are clearly starting with the audit committees and management . ''", "The firm has also put together ethical guidelines that , while not new , have not been codified before .", "The code of conduct tells employees confronted with difficult judgment calls to consider , among other things , `` Does it feel right .", "`` , `` How would it look in the newspapers .", "`` and '' Can you sleep at night .", "`` The questions illustrate that many of the decisions auditors are called on to make are not strictly dictated by the rules .", "`` That 's the issue , `` said Charles A . Bowsher , a former comptroller general of the United States and head of the Public Oversight Board that used to supervise ethics and disciplinary issues for the accounting profession .", "`` In each case , unfortunately , you 've got to look at the facts .", "I 'm a great believer that if you have a client who 's pushing the envelope too far too many times -- and I believe that is how Arthur Andersen got into trouble -- then the auditor should resign from the account . ``", "Mr. O'Connor says that if the firm 's accountants ask themselves these questions and are uncomfortable with the answers -- even if a client 's preferred accounting complies with generally accepted principles -- they should not sign off on the books .", "In recent months the company has resigned from several clients , he added , but he would not identify them .", "Auditors do not often resign .", "According to Auditor-Trak , a service of Strafford Publications , an Atlanta-based publisher of legal and business information services and accounting industry data , 348 accounting firms resigned from clients in 2002 through Monday , with firms indicating in 59 cases that the reasons were concerns about independence or a company 's practices or concerns by a company about the auditor 's standards .", "The four largest firms resigned from 80 clients , and PricewaterhouseCoopers accounted for 13 of those .", "In 2001 , there were 286 resignations , 88 of them by the four largest firms and 22 by PricewaterhouseCoopers .", "But the data may understate how often companies and auditors part ways over accounting disputes because it is in neither side 's interest to make such disagreements public .", "Executives do not want their companies to suffer the increased scrutiny and decline in stock price that would probably follow an auditor 's resignation , and accounting firms do not want to attract the attention of lawyers looking for grounds for securities lawsuits .", "If PricewaterhouseCoopers does provide audits that give more information to investors , Mr. Turner said , it may actually help shield the firm from such lawsuits .", "`` The way an accounting firm has to manage its risk if it 's going to be successful -- and none of them have been in the last three or four years -- is you have to be sure that whoever you have out there on the audit team is identifying the problems , `` he said .", "Finding the problems will be easier the more thorough the audit is , he added .", "The other large accounting firms have responded to general criticism of the accounting profession with marketing campaigns of their own , but none have gone as far as PricewaterhouseCoopers and some oppose the firm 's proposals , Mr. Turner said .", "`` Will PWC be able to bring the profession along .", "`` he asked .", "`` The proof is going to be in the pudding . '' ."], "summary": ["PricewaterhouseCoopers , nation 's largest accounting firm , takes risky public stance in favor of better , more thorough and more detailed audits .", "In recent advertisements , firm has promised to take tougher stance with clients and resign if it can not resolve concerns about particular audit .", "It is gamble strongly favored by those who want accounting firms to be more aggressive with their corporate clients , to weed out fraud before investors suffer catastrophic losses ."], "publication": "nyt50", "label": [2, 1, 0], "tag": ["Business"]} -{"id": "1453135", "text": ["Bill Parcells never coached a Tampa Bay Buccaneers game and never took a dollar from the team .", "He gave them nothing but agita when he suddenly backed out of a four-year deal last January .", "`` We lost some credibility that we worked very hard to get , '' Tampa Bay 's general manager , Rich McKay , said at the time .", "`` We 'll get it back . ``", "Given Parcells 's lack of contribution to the Buccaneers , does the team deserve compensation if the Dallas Cowboys sign him , as is expected .", "Or is Tampa Bay 's motivation solely revenge .", "The Bucs think they are entitled to compensation , and have asked the National Football League office for just that .", "The league will hold a hearing tomorrow , during which Commissioner Paul Tagliabue will determine the validity of the contract that Tampa Bay said Parcells signed and determine if his signing by Dallas constitutes damages .", "Meanwhile , Dallas did not announce Parcells 's signing yesterday , and a Cowboys spokesman said nothing was expected today .", "`` Later in the week , if anything , '' Rich Dalrymple , the team spokesman , said .", "Parcells did not return a call seeking comment .", "All contracts must be filed with the league , but Parcells 's never was .", "This creates a murky situation , and sports lawyers interviewed yesterday had various opinions about what Tampa Bay deserved .", "Alan Vickery , a lawyer at Boies , Schiller & Flexner in Manhattan , said , `` As a matter of contract law , Tampa Bay abandoned that contract by signing someone else . ''", "When Parcells pulled out , Tampa Bay signed Oakland 's Jon Gruden , who led the Bucs to a 12-4 record -LRB- a three-victory improvement over last season -RRB- and the National Football Conference South title .", "The price for Gruden was steep : four draft choices and $ 8 million over three years .", "Kenneth Munoz , the former general counsel for Madison Square Garden , witnessed the argument for compensation from the management side after Pat Riley resigned as coach of the Knicks and Mike Keenan left the Rangers .", "In both cases , the leagues ' commissioners ordered compensation .", "`` If the facts are , indeed , that he signed an agreement with Tampa Bay for his exclusive head coaching services , then chose not to perform , then he goes to work for another employer , then Tampa Bay has a leg to stand on , '' said Munoz , now a lawyer in the Manhattan office of Sidley , Austin , Brown & Wood .", "Paul C . Weiler , the Henry J . Friendly Professor of Law at Harvard Law School , compared the situation to that of a young player who signs a pro contract but decides to return to college without playing for that team .", "`` If he decides to come back , he is not considered a free agent , '' Weiler said .", "But this is a singular case , because Parcells never worked for Tampa Bay , and never earned a cent on a four-year contract that would have paid him $ 4.25 million or $ 6 million annually .", "News reports on the salary varied .", "The situation is vastly different from when Tagliabue awarded the New England Patriots four Jets draft choices for the Jets ' right to sign Parcells in 1997 .", "Parcells was entering the final year of his contract and had just taken the Patriots to the Super Bowl but had a fractious relationship with Robert K . Kraft , the Patriots ' owner .", "Tagliabue is the judge and jury in these disputes , and he does not rely on court precedent .", "`` I suspect that he would look at the contract Parcells supposedly signed and the circumstances for why he did n't go to work for Tampa Bay , `` Gary Roberts , deputy dean of the Tulane Law School , said .", "`` But you ca n't predict what the commissioner will do .", "There are too many things we do n't know : the language of the contract , why he backed out , the relationship between Parcells and the team . ``", "How much , if any , compensation , Tagliabue may award Tampa Bay is uncertain .", "Roberts said a low-round draft pick might be appropriate .", "Weiler suggested that if Parcells , in Tagliabue 's judgment , is more valuable now than he was to Tampa Bay last January , the compensation might be a little more generous .", "Vickery said a court remedy might have meant awarding Tampa Bay the financial difference between what it paid to hire Gruden -LRB- $ 17.5 million over five years -RRB- if it was more than what Parcells would have received .", "But Parcells 's contract was more lucrative .", "Vickery added , `` Damages are mitigated by the fact that they hired someone else who did a good job . ''", "If that becomes a factor in Tagliabue 's thinking , he will have to speculate on whether Parcells might have equaled Gruden 's success .", "PRO FOOTBALL ."], "summary": ["Tampa Bay Buccaneers file complaint with NFL , seeking damages if Dallas Cowboys hire Bill Parcells as coach , to make up for significant amount of time spent pursuing and signing Parcells to coaching contract , which he abruptly backed out of ."], "publication": "nyt50", "label": [7, 4, 1], "tag": ["Sports"]} -{"id": "1453136", "text": ["Latest fortune for Ying Liu , a Queens fortuneteller : You will be arrested , hauled into court and accused of making expensive predictions that did not come true .", "The Buddha 's eye sees many things , according to Ms. Liu , 45 , who has been peddling her psychic powers to Chinese immigrants from a storefront in Flushing for several years .", "She has predicted marriages , career advancements and births , and she has also made hypertension , cancer and other illnesses disappear , according to her advertisements .", "Among her grander forecasts is that New York City will be the host of the 2012 Summer Olympics .", "But it seems unlikely that Ms. Liu saw handcuffs in her future when she agreed to help Chung Chen Ling , 30 , who sought her supernatural services in June 2001 , according to court papers .", "Ms. Liu told Mr. Chung that for a fee of $ 8,000 she would grant him the same psychic powers that she herself had found so useful , according to investigators , who said Ms. Liu 's case was among the more unusual ones to show up in the Queens courts .", "Fortune-telling is illegal , according to the New York State penal code .", "Under the law , a fortuneteller is defined as anyone who `` claims or pretends to tell fortunes , or holds himself out as being able , by claimed or pretended use of occult powers , to answer questions or give advice on personal matters or to exorcise , influence or affect evil spirits or curses . ''", "The law does not apply to people who tell fortunes `` as part of a show or exhibition solely for the purposes of entertainment or amusement . ''", "Mr. Chung paid the $ 8,000 fee and waited almost a year , according to prosecutors , who said they were looking into whether Ms. Liu left other victims with fortunes that were paid for but never came true .", "The district attorney 's office says that even in a city where soothsayers abound , particularly in many immigrant neighborhoods , fortunetellers are seldom arrested .", "`` Prosecutions for the crime of fortunetelling are not unheard of , but they are still exceedingly rare , '' said Patrick Clark , a spokesman for the Queens district attorney , Richard A . Brown .", "It may well be that most of those who find themselves swindled out of promised rose gardens are reluctant to come forward .", "But Mr. Chung , who lives on Long Island and visited Ms. Liu at her Flushing office , said that when he finally realized he had been had , he had to do something .", "After more than 11 months , there was still nothing psychic about him , he said .", "No special powers , no ability to predict the future , no Buddha 's eye .", "So he went to the authorities , first to the state attorney general , who referred the case to the Economic Crimes Bureau of the district attorney 's office .", "After an investigation into Mr. Chung 's lack of psychic powers , detectives arrested Ms. Liu on Dec . 20 and she was charged with grand larceny in the third degree -- in effect , stealing money from Mr. Chung by not delivering the promised goods , the psychic powers -- and fortunetelling .", "She was arraigned on Dec . 21 and is due back in court on Monday .", "If convicted , Ms. Liu could be sentenced to up to seven years in prison on the grand larceny charge and up to 90 days in jail on the fortunetelling charge .", "Ms. Liu , who also runs a beauty salon in Flushing , refused to discuss the case yesterday .", "She referred calls to her lawyer , who was out of town and could not be reached .", "Last week , Ms. Liu told The World Journal , the Chinese-language daily newspaper , that she was innocent of the charges , and she has told reporters that she would hold a news conference to explain it all ."], "summary": ["Queens fortuneteller Ying Liu is scheduled to appear in court on charges of larceny .", "She promised Chung Chen Ling that for $ 8,000 she would grant him same psychic powers she has ."], "publication": "nyt50", "label": [5, 18], "tag": ["New York and Region"]} -{"id": "1453137", "text": ["Stocks in the United States plunged in 2002 amid fears of war and terrorism , a weak economy , rising oil prices and dozens of corporate scandals .", "It was the third consecutive annual decline , the first time that has happened in 60 years .", "Making matters worse , the average stock fell further in 2002 than it had in either of the previous two years .", "The Standard & Poor 's 500-stock index posted its worst year since 1974 .", "Many big technology stocks slipped into the single digits , and some dropped to prices below $ 1 .", "And Wall Street 's mood is bleak .", "At the end of 2000 , most investors were optimistic that a return to quick gains could not be far off .", "A year ago , many remained resolute , feeling that stocks and the United States economy had withstood the Sept . 11 attacks and were poised for recovery .", "Now that optimism has disappeared almost entirely .", "After two decades in which stocks suffered only brief and mild downturns -- interruptions in profitable years -- the depth of the bear market has shocked many investors , especially those who were not shareholders in the 1970 's , the last time stocks endured a prolonged setback .", "But even longtime professional investors say they have been dismayed by how much stocks , which were mostly flat yesterday on the last day of the year , have fallen from their peak in March 2000 .", "Since then , the Wilshire 5000 , the broadest market index , has dropped 43 percent , leaving investors $ 7.4 trillion poorer -- a potential loss in wealth of $ 26,000 for every American .", "This year , the Wilshire dropped 22.1 percent .", "The S . & P . 500 index dropped 23.4 percent .", "The Dow Jones industrial average finished down 16.8 percent , and the Nasdaq composite index -- most punished among the major gauges -- fell by 31.5 percent .", "`` To my way of thinking , this bear market , traversing almost three years , is certainly worse than the 1973-74 break , '' said James M . Weiss , former chief investment officer of State Street Research .", "Mr. Weiss , who now runs his own money management company , began working on Wall Street in 1972 .", "`` We 're at a point of maximum frustration here , `` he said .", "Hugh Mullin , co-manager of the $ 20 billion Putnam Growth and Income mutual fund , said , `` I think people are much more pessimistic than they have been in a long time . ''", "Mr. Weiss said he thought the bear market could be divided into three phases .", "The first , in 2000 , was a necessary correction to the bubble in technology stocks in the late 1990 's .", "The second , he said , in 2001 , came as a result of the broader recession and profit slowdown .", "At the end of 2001 , Mr. Weiss and many other professional investors were expecting that 2002 would be a positive year .", "`` There was every reason to believe that the factors that led to the onset of the bear market had pretty much run their course by a year ago at this time , '' he said .", "But the optimists did not anticipate the scandals that threw into question the integrity of many corporate financial statements .", "`` We have concluded a three-phase bear market and we did n't need to have the third phase , `` Mr. Weiss remarked .", "Enron had already collapsed and filed for bankruptcy protection by the beginning of 2002 .", "But despite complaints from short sellers that corporations had used accounting gimmickry to inflate their profits , many investors thought the crisis at Enron was an isolated case .", "Then , as the spring wore on , journalists , analysts and regulators began to question the accounting used by dozens of other companies , including General Electric , a market bellwether .", "The crisis peaked in June , when WorldCom announced that it had inflated its cash flow by $ 3.9 billion , a figure that has since been raised to an estimate of $ 9 billion .", "James Chanos , a hedge fund manager who played a major role in unearthing the overstated profits at Enron , said WorldCom 's announcement in June ensured that 2002 would be another losing year .", "`` The scale of the fraud which was disclosed was so massive and so blatant that it really gave rise to the idea that one could n't even rely on cash flow statements anymore to ascertain corporate health , `` Mr. Chanos said .", "`` Prior to WorldCom , I think Wall Street always felt that with enough due diligence in the cash flow statements and financial analysis , one could always be certain where the financial health of a company might lie .", "Post-WorldCom , I think we all had to reassess that . ``", "In the month after WorldCom 's disclosure , the S . & P . 500 dropped 195 points , or 20 percent , to 797.70 on July 23 , its third-lowest close of the year .", "Over the next month , stocks rallied , but by then the economy -- which had showed strong momentum during the first part of the year -- slowed sharply , in part because the market 's plunge discouraged companies from raising capital for new investments .", "Projections for corporate profits dropped quickly , and stocks followed .", "On Oct . 9 , the S . & P . 500 fell to 776.76 , its lowest close of the year .", "Since then , stocks have rallied , but only slightly .", "And Charles L . Hill , the research director for Thomson First Call , which compiles analysts ' profit estimates , said he thought that 2003 could be another disappointing year .", "In 2002 , Mr. Hill was prescient , repeatedly warning that estimates were too high and that investors would be disappointed if they expected that a big earnings recovery would support a rally .", "Looking at forecasts for 2003 , he said earnings growth would once again be weaker than investors hoped .", "Analysts are continuing to cut forecasts for the first half of this year , Mr. Hill said .", "Six months ago , they expected that corporate profits would grow 25 percent , year over year , in the first quarter of 2003 .", "Now , they have cut that forecast to 12 percent .", "By the time quarterly earnings are announced in April , Mr. Hill said , growth could be as low as 5 percent .", "Considering that profits fell 17 percent in 2001 and rose only 1.6 percent in 2002 , that level of recovery is anemic , he said .", "`` A lot of talk at the beginning of the year was , ' We do n't have three years in a row of down markets , so the odds of having a down third year are minimal , ' `` he recalled .", "`` But we 've never in our lifetime had a bubble as big as this one .", "You should n't be surprised that we had three down years , and you should n't be surprised if it turns out to be four . ``", "THE MARKETS : STOCKS ."], "summary": ["US stocks plunge in 2002 amid fears of war and terrorism , weak economy , rising oil prices and dozens of corporate scandals .", "It is third consecutive annual decline , first time that has happened in 60 years .", "Making matters worse , average stock fell further in 2002 than it had in either of previous two years .", "Standard & Poor 's 500-stock index posts its worst year since 1974 .", "Wall Street 's mood is bleak .", "Wilshire 5000 drops 22.1 percent , S & P 500 index drops 23.4 percent , Dow Jones industrial average finishes down 16.8 percent , and Nasdaq composite index falls by 31.5 percent .", "Graphs .", "Photo ."], "publication": "nyt50", "label": [2, 0, 1, 3, 14, 13, 5, 12], "tag": ["Business"]} -{"id": "1453138", "text": ["An influx of investment capital from both Arab and Israeli investors , in roughly equal proportion , has made the Middle East the second-largest source of foreign investment in United States commercial real estate this year after Europe , where changes in German pension fund laws contributed to a surge in investment in American properties .", "Within the last year , seven apartment complexes in San Antonio and six more throughout Texas were purchased by Alon U.S.A. , based in Dallas , a subsidiary of the Alon Israel Oil Company , best known to American consumers for its Fina gasoline stations .", "The company paid about $ 180 million to United Dominion Realty Trust for the complexes , which were built from 1983 to 1995 and range from 140 units to 596 .", "Oil companies are major sources of the investment in American commercial real estate , as are wealthy families that have a long history of quietly investing in commercial real estate in the United States .", "Increasingly , though , more money is coming from smaller investors and groups of investors .", "That shift is a product of a decade or so of efforts by financial institutions and money managers to create investment vehicles that can be marketed in the Middle East , especially to observant Muslims .", "Low borrowing costs and the poor performance of world stock markets have contributed to an increase in investment in American real estate from many parts of the world .", "Australian and Canadian investors , in fact , each had greater increases than those from the Middle East or even Germany on a percentage basis last year , but from much smaller bases .", "Real Capital Analytics , a research firm based in Manhattan that monitors property transfers in excess of $ 5 million , tracked almost $ 1.4 billion in commercial property sales to Middle Eastern investors in the last year , three times the $ 436 million tracked in 2001 .", "The figure for this year , which was based on closed deals through November and those in contract , accounted for about 23 percent of foreign investment in commercial real estate .", "`` These numbers are likely conservative because many foreign investors , particularly from the Middle East , operate quietly and confidentially , '' said Robert M . White , the president of Real Capital Analytics .", "Manhattan and the Washington area , typically among the most popular markets for foreign investors because they are easy to enter and manage from a distance , appear on the list of properties that Middle Eastern buyers acquired in the last 12 months , but they do not dominate it .", "The list includes St . Louis .", "Kansas City , Mo .", "Woodland Hills , Calif .", "Boulder , Colo .", "And Chicago .", "The Kuwait Finance House closed on a $ 13.7 million purchase of an industrial park in Lebanon , Tenn .", "Part of the reason these investors are spreading their money farther afield is the intense competition for premium office properties in Washington and New York , brokers and analysts said .", "The bidding wars among German pension funds are playing no small part in that .", "`` We have n't seen them very active as buyers in this market , `` said James S . Luck , a senior director of the real estate services firm Cushman & Wakefield who handles investment sales in Washington and its suburbs .", "Without the restrictions on their investments that pension fund managers face , Mr. Luck said , wealthy families and oil companies from the Middle East `` have the ability to go where they think the deals are . ''", "`` They tend to be more entrepreneurial and opportunistic , '' Mr. Luck said .", "`` They 're clearly looking for higher yields than the stuff the Germans and the pension funds are bidding on . ``", "From Israel , the most active investor was Alon U.S.A.", "Others that made the list from Real Capital Analytics were the Red Sea Group , a real estate investment company best known for hotel management and shopping mall development in Israel , and one listed as an `` unidentified Israeli family . ''", "From the Arab countries , transactions that listed Investcorp as the buyer dominate the list .", "Several major international financial institutions , including UBS and Citibank , manage money for Middle Eastern families or buy properties on their behalf as a nominee , as do specialty firms like the Carlyle Group and Investcorp , according to international real estate brokers , money managers and lawyers .", "Investcorp , founded in 1982 , has offices in New York , London and Bahrain .", "Its real estate team began operations about seven years ago and oversees a portfolio with a current value of about $ 2.2 billion , according to the firm .", "Its clients , like those of the private banking divisions of multinational financial institutions and of the boutique investment firms , tend to be wealthy , private and secretive families .", "But through new investment vehicles , smaller investors , doctors and lawyers as opposed to scions of royalty and oil magnates , have become a force in commercial real estate markets now , too .", "`` A lot of these funds , the fund manager is a Western , United States company , '' said Michael J . T . McMillen , a lawyer with King & Spalding in New York whose practice concentrates on Islamic financing and project financing .", "`` They 're looking at this area , the Middle East , as a new market , and these investors are trying to get into the United States , so it 's working very well . ``", "Mr. McMillen , who has worked with institutions like HSBC Amanah Global Properties Income Fund and the Gulf Investment House of Kuwait , said Middle Eastern investors were starting to buy into offerings that require five to seven years to pay off as opposed to two - or three-year deals .", "They are focusing on commercial buildings with a single tenant and a purchase price of $ 15 million to $ 20 million , he said .", "Among the obstacles to selling investment vehicles to Muslim investors is the prohibition in Muslim law against paying or receiving interest .", "This has been approached in a variety of ways to sell real estate investments , including the formation of a broad partnership known as a mudarabah , where some investors put in effort and others put in money .", "Mr. McMillen said some funds are now working to sell debt as well as equity investments , a more difficult proposition where interest payments are forbidden by religious custom .", "`` If we went to a bank in the Middle East that does n't pay its depositors , `` Mr. McMillen said , '' we could offer the bank the ability to pay investors 3 percent , keep 1 percent , and the cost of funds to U.S. developers would only be 4 percent . ``", "He said that arrangements like the mudarabah have become particularly difficult to execute and potentially precarious since the advent of the USA Patriot Act .", "The legislation , adopted after the terror attacks of Sept . 11 , lays strict controls on financial institutions .", "The legislation is intended to spread regulation gradually across a wide swath of industries , and real estate companies are not considered first priorities .", "Still , several big domestic commercial real estate investors , especially the more sophisticated pension funds , are beginning to examine their new responsibilities and are casting a warier eye on foreign investors , especially those from the Middle East .", "Under the coming regulations , said J . William Codinha , a partner with Nixon Peabody in Boston who specializes in white-collar crime , `` they are n't going to be able to just take money in that is held in a trust .", "They have to know who the beneficiaries of the trust are . ``", "That could be a costly proposition , one that Mr. Codinha said he was trying to persuade clients to build into their transaction costs .", "Michael D . Allison , the chairman of International Business Research , a corporate security and investigations firm in Princeton , N.J. , said such research on identifying investors and transaction partners could add $ 20,000 to $ 50,000 to the cost of a real estate deal .", "`` For an investor in Minneapolis , it 's pretty easy to figure out what you need to know , `` Mr. Allison said .", "`` Some guy coming at you from Lebanon , the resources available to you to unearth who the guy is and where the money is coming from is difficult , from the language issues to the paucity of information . ''", "COMMERCIAL REAL ESTATE ."], "summary": ["Influx of investment capital from both Arab and Israeli investors , in roughly equal proportion , makes Middle East second-largest source of foreign investment in United States commercial real estate this year after Europe , where changes in German pension fund laws contributed to surge in investment in American properties .", "Within last year , seven apartment complexes in San Antonio and six more throughout Texas were purchased by Alon USA , based in Dallas , subsidiary of Alon Israel Oil Company .", "Oil companies are major sources of investment in American commercial real estate .", "Photo of San Antonio apartment complex .", "Table shows leading sources of foreign investment in US commercial real estate ."], "publication": "nyt50", "label": [0, 1], "tag": ["Business"]} -{"id": "1453139", "text": ["Tom Daschle , the Senate Democratic leader , has told associates that he is likely to run for president in 2004 and will create a presidential exploratory committee sometime this month , Democrats close to Mr. Daschle said yesterday .", "The move , after weeks of deliberations by Mr. Daschle , would sharply alter the dynamics of the increasingly crowded competition among Democrats to challenge President Bush .", "Mr. Daschle is to return to his home in South Dakota this weekend to review his decision one more time with friends and advisers , his associates said .", "But they said that over the last few days Mr. Daschle has expressed an increasingly strong desire to challenge Mr. Bush and has concluded that he needs to move quickly to catch up with Democratic rivals who have been lining up political supporters and financial backers for months .", "Mr. Daschle would announce a decision sometime over the next few weeks and immediately begin traveling to primary states , his associates said .", "The deliberation by Mr. Daschle , who turned 55 early last month and was elected to the Senate in 1986 , has been closely watched by many Democrats who described it as the last major outstanding question about the Democratic presidential field .", "Mr. Daschle , as a face of the Democratic opposition , would presumably draw the support of experienced Democratic Party strategists and many of his fellow Democrats in the Senate who have worked closely with him over the years .", "Mr. Daschle 's advisers said that should he run for president , he would be unlikely to seek re-election as a senator , though they said he had not reached a decision on that .", "His term expires at the end of 2004 .", "They said he would probably stay on for the time being as the Senate minority leader , though he was likely to step down as the demands of a presidential campaign intensified .", "The prospective entry of Mr. Daschle would round out a field of Democratic candidates notable for being grounded inside Washington .", "Indeed , inside the halls of Congress .", "Mr. Daschle would be the fourth Democratic senator running for president in 2004 .", "Another expected candidate is Representative Richard A . Gephardt of Missouri , the former House minority leader .", "The only major Democratic candidate who would seem positioned to run the kind of successful outsider campaign run by Bill Clinton , who was governor of Arkansas , and George Bush , who was governor of Texas , is Gov . Howard Dean of Vermont .", "In addition , the Rev . Al Sharpton of New York is also running for the nomination .", "With the possible exception of Senator Joseph I . Lieberman of Connecticut , who ran for vice president with Al Gore in 2000 , Mr. Daschle is probably the best known of the Democratic candidates , party officials said yesterday .", "That is not entirely a good thing .", "It was under his watch last year that Democrats lost control of the Senate , turning Mr. Daschle from a majority leader into a minority leader .", "The other major Democratic candidates are Senator John Kerry of Massachusetts and Senator John Edwards of North Carolina .", "Two other senators -- Joseph R . Biden Jr . of Delaware and Mr. Lieberman 's Connecticut colleague , Christopher J . Dodd -- have also said they might run ."], "summary": ["Tom Daschle , Senate Democratic leader , tells associates that he is likely to run for president in 2004 and will create presidential exploratory committee ."], "publication": "nyt50", "label": [0], "tag": ["U.S."]} -{"id": "1453141", "text": ["Merry Christmas .", "Get lost .", "That , essentially , was the message Robert Pagein received from his employers -- make that former employers -- at Verizon Communications .", "Mr. Pagein , who is married and has a year-old son , was a field technician who had worked for the phone company for four years .", "One of the lures of the job was its stability .", "The pay was n't great , but it was steady .", "If you were disciplined you could pay your bills , take a vacation every year or so , and put a little aside .", "That 's the way it works in theory .", "In reality , Mr. Pagein was one of 2,400 Verizon workers in New York who were shown the door just a few days before Christmas .", "Those workers formed the bulk of a pre-holiday wave of terminations that claimed the jobs of 3,500 Verizon employees in the Northeast and mid-Atlantic states .", "Mr. Pagein will not be destitute .", "His wife is working and he has a college degree .", "But the cold-blooded way in which he and his fellow workers were lopped off the employment rolls by Verizon , and the phenomenal gap that exists between the compensation available to the company 's ordinary workers and the fabulous , multimillion-dollar packages taken home by executives at the top of the Verizon pyramid , has shaken his faith in a system he believed in .", "`` I 'm 36 years old and grew up in Lefrak City -LSB- Queens -RSB- , `` he said .", "`` As a working-class guy I kind of accepted long ago that I was n't going to make a fortune or anything like that .", "What I figured was that if I worked hard , if I became a cop or I joined the phone company or something like that , at least I would have a regular working-class or middle-class life .", "At least you 'd make your 50 grand or 55 grand a year .", "The government would take out your taxes , but you 'd have something left over . ``", "Mr. Pagein 's comforting belief in a system that looks out for the ordinary worker evaporated with the arrival of his layoff notice .", "As a not-so-merry Christmas and then a not-so-happy New Year approached , he found himself thinking more and more about the big bucks -- the tens of millions of dollars -- being pocketed by top Verizon executives like Ivan Seidenberg and Larry Babbio .", "It 's one thing to acknowledge that there are inequities in the system , he said .", "But it 's `` really tough '' to accept that you can be thrown out of work by executives who take extraordinary sums out of a company whether their business decisions are wise or not .", "`` We were laid off , effective immediately , '' he said .", "`` ` Merry Christmas , thanks for working at ground zero and breathing the dust .", ". '", "They told us we were heroes and used the pictures of us at ground zero to sell themselves .", "Now we 're out . ``", "Last spring Verizon reported a first-quarter loss of $ 500 million .", "The company attributed the loss to a tough economy and a $ 2.5 billion write-down for bad investments .", "By the third quarter it was reporting earnings of $ 4.4 billion .", "But officials said the layoffs , the first in the history of the New York telephone company , were inevitable because the economy is still in trouble and competition is increasing .", "Company officials said the total compensation in 2001 for Mr. Seidenberg , Verizon 's chief executive , was $ 13.4 million , and for Mr. Babbio , $ 24 million .", "Figures released by the Communication Workers of America , which represents the laid-off employees , showed that from 1997 through 2001 , Mr. Seidenberg collected more than $ 56 million in salary , bonuses and stock options , and that Mr. Babbio , the company 's vice chairman , collected more than $ 78 million .", "Those numbers were on Mr. Pagein 's mind as he and his family spent Christmas at his grandmother 's home in Flushing .", "`` I kind of Scrooged on the presents , '' he said , `` Everybody knew .", "It was , like , ` Well , do n't expect Robert to bring anything because , you know , he just got laid off . '", "`` He added : '' It 's tough to take .", "These guys took their outrageous , outrageous bonuses and we 're out on the street .", "I guess you do n't notice the inequities so much when you 're working because then , at least , you 've got something . ``", "Mr. Pagein said he would go on unemployment for a while and use that time to look for a different career .", "`` Part of the pain I 'm feeling right now has to do with some of the others who were fired . ``", "he said .", "`` Some of them were the only income earners in their family .", "They seem shell shocked . ``", "E-mail : bobherb@nytimes.com Maureen Dowd and Thomas L . Friedman are on vacation ."], "summary": ["Bob Herbert Op-Ed column scores ` cold-blooded way ' in which Verizon terminated jobs of 2,400 workers in New York just a few days before Christmas .", "Notes phenomenal gap that exists between compensation available to company 's ordinary workers and fabulous , multimillion-dollar packages taken by executives at top of Verizon pyramid ."], "publication": "nyt50", "label": [12, 8], "tag": ["Opinion"]} -{"id": "1453142", "text": ["At age 7 , Lennwood Pergerson decided that New York City was the place to be .", "`` On Wednesdays , my mother and I would ride the wicker-seat trains into New York , '' he recalled .", "`` We 'd go shopping and eat at the Port Authority when it was fabulous .", "I could n't get over it . ``", "More than 40 years later , Mr. Pergerson , 50 , is still in New York .", "He still wants to live here , to thrive here .", "Only now he spends his Wednesdays looking for a job and a place to live at the Brooklyn Bureau of Community Service , one of the seven local charities supported by The New York Times Neediest Cases Fund .", "At 16 , while he was still in high school , Mr. Pergerson left the genteel Princeton , N.J. , suburb where he grew up and set out for the bright lights of the big city as the frontman for a bluesy rock ` n' roll band .", "With his husky radio-ready basso , seasoned by years of hard living and hard partying , it is easy to imagine him belting out his signature song , `` Dark End of the Street . ''", "The blues come easily to Mr. Pergerson .", "With his stories of friends laid low by Vietnam , drug addiction and AIDS , Mr. Pergerson knows the blues all too well .", "But with the highs and horrors of New York night life behind him , Mr. Pergerson , would rather sing the gospel of faith and redemption .", "`` I 'm lucky , `` he said .", "`` I lived a tragedy , but now I can see light . ''", "Back in the 1970 's , when disco was the rhythm of the night , Mr. Pergerson , was , to use his favorite word , fabulous .", "`` I was a bad boy , and a fun guy that did it to the max , '' he said .", "`` But I was prosperous because I had to pay for all those outrageous clothes . ''", "He toured as a roadie with the disco singer Gloria Gaynor for about a year .", "And Mr. Pergerson often found himself in the center of all the decadent action .", "`` In those years , we just did n't believe there could be anything more horrible than Vietnam , `` he recalled .", "`` So the bar and after-hours scene was all about rampant drugs and sex , all in the name of the beat . ''", "Until that beat started to falter .", "In the late 1970 's and early 80 's , Mr. Pergerson said , he noticed that some people were starting to get sick .", "The randomness of it all reminded him of the draft lotteries that were held on Saturdays back in his small suburb , and it seemed , as it did then , that it was only a matter of time before his number came up .", "But while many in his circle of friends died of AIDS , Mr. Pergerson never got ill .", "He was a disc jockey into the late 80 's .", "After that , during the economic boom of the 90 's , he took up bartending , serving up a good time .", "`` If someone was feeling down , I could make them feel all right , '' he recalled .", "In the summer of 2000 , he was hired as a bartender and server at Pizzeria Uno at the South Street Seaport , just east of the World Trade Center .", "He outpaced his younger colleagues , and in short order , he was doing all the big parties .", "But as with with the Vietnam War and the emergence of AIDS , Sept . 11 changed everything .", "All of a sudden , nobody was in the mood to party , certainly not within view of the devastation at ground zero .", "Like so many other New Yorkers , Mr. Pergerson lost his job .", "At first , he remained optimistic .", "He did not worry because he had never had a problem finding steady work .", "`` I 'm the comeback kid , `` he said .", "`` I thought to myself , ' I 'll be getting a job .", "I do n't need any assistance . '", "`` But in a weak economy , even comeback kids can find themselves looking for help , and for work , without finding either .", "He was out of work for six months when he turned to the Brooklyn Bureau 's Community Response Center last March .", "There , he met an employment specialist , Sharon Greenberg .", "`` Mr. Pergerson was always very upbeat and followed up on every lead , '' she said .", "He was also trying to get assistance from FEMA .", "He described that process as akin to `` digging a ditch in Bosnia and going through the long way just to get them to answer their phones . ''", "Diana Naftal , a social worker at the Brooklyn Bureau , began helping him in his effort to get FEMA aid in mid-May .", "`` Once I met Lennwood , I had an immediate bond with him , '' Ms. Naftal said .", "`` I was impressed with his creativity and his drive .", "He kept his expenses low and came to us for only basic needs . ``", "After months of wrangling with FEMA representatives , Mr. Pergerson received rental assistance in June .", "In addition , over the past year , the Brooklyn Bureau has tapped Neediest Cases to provide him $ 1,110 for emergency help with rent , utilities , food and transportation .", "Mr. Pergerson is looking for a bartending job or work in the community development sector .", "And he is looking for an apartment , because the Brooklyn building he lives in is being sold .", "But in the meantime , he says , he has been fortunate to find a supportive team of advocates .", "`` I remember my mother handing money to a stranger in the Port Authority when I was a little boy , '' he said .", "`` People have been good to me , too , and have shown me that hope and help are right here in front of all of us . ''", "HOW TO HELP Checks payable to The New York Times Neediest Cases Fund should be sent to 4 Chase Metrotech Center , 7th Floor East , Lockbox 5193 , Brooklyn , N.Y. 11245 , or any of these organizations : BROOKLYN BUREAU OF COMMUNITY SERVICE 285 Schermerhorn Street , Brooklyn , N.Y. 11217 .", "CATHOLIC CHARITIES OF THE ARCHDIOCESE OF NEW YORK 1011 First Avenue , New York , N.Y. 10022 .", "CATHOLIC CHARITIES , DIOCESE OF BROOKLYN AND QUEENS 191 Joralemon Street , Brooklyn , N.Y. 11201 .", "CHILDREN 'S AID SOCIETY 105 East 22d Street , New York , N.Y. 10010 .", "COMMUNITY SERVICE SOCIETY OF NEW YORK 105 East 22d Street , New York , N.Y. 10010 .", "FEDERATION OF PROTESTANT WELFARE AGENCIES 281 Park Avenue South , New York , N.Y. 10010 .", "UJA-FEDERATION OF NEW YORK Church Street Station P.O. Box 4100 New York , N.Y. 10261-4100 Donations may be made with a credit card by phone at -LRB- 212 -RRB- 556-5851 -LRB- ext . 7 -RRB- or online , courtesy of CharityWave.com, an Internet donations service , at www.nytimesneediest.charitywave.com.", "For instructions on how to donate stock to the fund , call -LRB- 212 -RRB- 556-1137 or fax -LRB- 212 -RRB- 556-4450 .", "No agents or solicitors are authorized to seek contributions for The New York Times Neediest Cases Fund .", "The Times pays the funds expenses , so all contributions go directly to the charities , which use them to provide services and cash assistance to the poor .", "Contributions to the fund are deductible on federal , state and city income taxes to the extent permitted by law .", "To delay may mean to forget .", "Previously recorded : $ 5,912,644.29 Recorded yesterday : $ 537,979.38 Total : $ 6,450,623.67 Last year to date : $ 5,660,415.27 Correction : January 2 , 2003 , Thursday A chart yesterday with the article about The New York Times Neediest Cases Fund reversed the figures for the amounts collected so far this year and by this time last year .", "Correct updated figures appear today , on Page B7 ."], "summary": ["Lennwood Pergerson , who lost his job after Sept 11 , receives emergency rental assistance from Brooklyn Bureau of Community Service , which is supported by New York Times Neediest Cases Fund .", "Photo ."], "publication": "nyt50", "label": [6], "tag": ["New York and Region"]} -{"id": "1453144", "text": ["When Herodotus Damianos bought a failed winery in this hamlet in 1994 and named it Duck Walk , it was meant as a tribute to the days when a duck in the Hamptons was more likely to be waddling on a farm than bathing in truffle oil at a local bistro .", "Nine years later , Duck Walk Vineyards produces 25,000 cases of wine a year , turning grapes planted on more than 100 acres of vineyards into chardonnay , merlot , port and dessert wine .", "Most bottles are sold in New York , and some have labels featuring flocks of white Peking ducks .", "But if a California winery has its way , Dr. Damianos 's tribute to the days when Long Island duckling was a renowned culinary treat , like Maine lobster or Maryland crabs , could go the way of most East End duck farms themselves .", "The California winery , called Duckhorn Wine Company , believes it owns the right to market wine under the duck name and label .", "`` In the wine business today , the word ' duck ' and duck designs are associated in the minds of the consuming public with Duckhorn Vineyards , `` a lawyer for the California winery 's co-founder , Daniel Duckhorn , wrote to Duck Walk in August 2000 .", "The letter laid the groundwork for what has become a bicoastal legal spar over the right to sell wine using animals commonly associated with stamps and bathtub toys .", "`` Duckhorn is concerned about Duck Walk 's use of the Duck Walk mark and marks comprised of duck designs and the customer confusion it may cause with respect to Duckhorn 's trademarks and trade dress , `` the letter continued , '' and believes it is in the best interests of both companies to avoid any such confusion . ``", "In 2001 , Duck Walk filed a pre-emptive complaint in United States District Court in Central Islip , asking a judge to declare it had every right to use the Duck Walk name and pictures of its web-footed mascot .", "Duckhorn , based in Napa Valley , responded with a lawsuit claiming Duck Walk had intentionally and maliciously violated trademark law .", "The case is still simmering in the Long Island court , and lawyers for both sides are marshalling evidence to bolster their cases .", "This fall , things got so heated that a judge forbade both parties to speak with reporters .", "But before that order was in place , Dr. Damianos , a retired internist who also owns Long Island 's largest vineyard , Pindar , on the North Fork , told Wine Spectator magazine that he thought customers would have no problem distinguishing between the two brands .", "`` It 's not confusing , `` he said .", "`` Our duck is cute .", "Theirs is ugly . ``", "Duck Walk 's array of labels includes a painting of a flock of Peking ducks , a Great Gatsby-esque couple in a convertible with a tiny silver duck affixed to the front , and a watercolor picture of daffodils , with no duck in sight .", "Duckhorn 's array of labels includes a mallard sitting contently on a pond and , for its Paraduxx -LRB- `` pair of ducks '' -RRB- brand , two birds flying in harmony , their wings outstretched .", "Duckhorn Wine Company is larger , producing about 56,000 cases of wine a year , according to court filings .", "Its wines are more expensive , ranging from $ 22 to $ 90 .", "Duck Walk 's wines range from about $ 8 to $ 25 .", "This is not the first time Duckhorn Vineyards has gone after a wine with `` duck '' in its name .", "When Cecchetti Sebastiani Cellar released a Smoking Duck brand of wine in 1999 , it also heard from Mr. Duckhorn .", "He persuaded Don Sebastiani , chairman of the board , to rename the brand Smoking Loon .", "Mr. Sebastiani also agreed to replace the label , which had featured two ducks on a pond , one savoring a cigar , with a more abstract , mosaic-like design .", "And Duck Pond Cellars , a winery in Dundee , Ore . , reached a confidential out-of-court settlement with Duckhorn , Duck Pond 's marketing director said .", "In fact , according to a transcript of an unsuccessful settlement conference in Central Islip last summer , Susan E . Hollander , a lawyer for Duckhorn , said , `` We have stopped every other winemaker in the market , or limited them in some way , that uses the word ' duck . '", "`` But here on Long Island 's East End , where the peaceful seashores and sprawling potato fields of years past have been supplanted in the popular imagination by images of seaside mansions and suburban sprawl , the duck is a symbol of pride .", "And the notion that a California company has dibs on its feathery , beady-eyed likeness has sparked outrage .", "`` I just do n't see where this guy comes off , thinking he can eliminate the duck from the Long Island pedigree , `` said Michael Hollander , president of the Long Island Convention and Visitors Bureau .", "`` It 's amazing that people can think that with all the duck farms we had , that ducks would n't be a part of our history , or a part of our belief , or a part of our culture . ``", "To press that case , Mr. Hollander has instructed his staff members to gather evidence tracing the link between Long Island and its ducks -- from the 1870 's , when Peking ducks were brought here from China , to the 1950 's , when dozens of duck farms flourished , to the 1970 's , when soaring land prices and mounting concerns about waterways polluted by duck droppings conspired to close most farms .", "Only a handful of duck farms remain here today .", "But the duck lives on , through a 20-foot concrete statue in Flanders that is on the National Register of Historic Places , the Long Island Ducks minor league baseball team and John Duck Jr . ` s Restaurant in Southampton , which serves Long Island duckling with apple raisin stuffing , Duck Walk wine and fish caught aboard a boat called the Quack Quack .", "Trademark disputes are not new to the wine business .", "In one famous case , Kendall-Jackson Winery sued E . & J . Gallo Winery for copying the design of its Turning Leaf label .", "It lost .", "But just what is it about ducks and wine .", "Certainly nothing having to do with aroma , say those who remember what it was actually like to drive through the Hamptons during the duck days .", "`` When I was a kid in the car with my parents , '' recalled John B . Westerhoff Jr . , an owner of John Duck Jr . ` s Restaurant , '' we would hold our breath . `` ."], "summary": ["California 's Duckhorn Wine Company is litigating against Long Island 's Duck Walk Vineyards over use of word ` duck , ' claiming it owns right to market wine under duck name and label .", "In 2001 , Duck Walk filed pre-emptive complaint in federal court in Central Islip , NY , asking judge to declare it had right to use name and pictures of duck mascot .", "Duckhorn cross-sued , claiming trademark violation .", "In fall 2002 , gag order was imposed on both sides .", "Details of suit noted .", "Company profiles .", "Photos ."], "publication": "nyt50", "label": [8, 4, 10], "tag": ["New York and Region"]} -{"id": "1453145", "text": ["Some people may be upset that retail sales failed to meet expectations during the holiday season .", "Not Bill Talen .", "For the last four years Mr. Talen , also known as Reverend Billy , has been performing from the theaters of Bleecker Street to the Starbucks on Astor Place , exhorting people to resist temptation -- the temptation to shop -- and to smite the demon of consumerism .", "With the zeal of a street-corner preacher and the schmaltz of a street-corner Santa , Reverend Billy , 52 , will tell anyone willing to listen that people are walking willingly into the hellfires of consumption .", "Shoppers have little regard for how or where or by whom the products they buy are made , he believes .", "They have almost no resistance to the media messages that encourage them , around the clock , to want things and buy them .", "He sees a population lost in consumption , the meaning of individual existence vanished in a fog of wanting , buying and owning too many things .", "`` Consumerism is a dull way of life , '' he says .", "`` We 're all sinners .", "We 're all shoppers .", "Let 's do what we can . ``", "It 's an act , a kind of performance art , almost a form of religion .", "He named it the Church of Stop Shopping .", "As Reverend Billy , he wears a televangelist 's pompadour and a priest 's collar , and is often accompanied by his gospel choir when he strides into stores he considers objectionable or shows up at protests like the annual post-Thanksgiving Buy Nothing Day event on Fifth Avenue in Manhattan .", "The choir , which is made up of volunteers , includes people like David Glover and his daughter , Zena , from Brooklyn .", "There is also Beka Economopoulos , who once sang at the White House , and Meredith Manna , who came in courtesy of one of the keyboard players .", "When they erupt in song , it is hard to ignore : `` Stop shopping ! Stop shopping ! We will never shop again ! '' Other performers preach the same gospel , with their own twists .", "Ange Taggart , who lives in Nottingham , England , turns up in places like Troy , N.Y. , to go into a store , buy a lot of things , and then return them .", "She recently filled a cart with Martha Stewart products at Kmart , then put them on the conveyor in a certain order , so that when she got her receipt , she said , the first letters on the itemized list spelled `` Martha Stewart 's hell . ``", "There is also Andrew Lynn , who created Whirl-Mart last year .", "He gets a group of people together , everyone with a shopping cart , and they stroll the aisles of Wal-Mart or Kmart , putting nothing in the carts .", "When store managers tell him to take his protest elsewhere , he tells them : `` This is n't a protest .", "We 're performing a consumption-awareness ritual . ``", "There may be something to it , too .", "Psychologists at the University of Rochester and at Knox College in Illinois have published studies concluding that people focused on `` extrinsic '' goals like money are more depressed than others and report more behavioral problems and physical discomfort .", "Some economists have also addressed the phenomenon of rich people who feel poor .", "Juliet B . Schor of Harvard University , the author of `` The Overspent American '' -LRB- Basic Books , 1998 -RRB- , says people are frustrated because they compare their lives with what they see on television .", "Robert H . Frank of Cornell reached a similar conclusion in `` Luxury Fever : Why Money Fails to Satisfy in an Era of Excess '' -LRB- The Free Press , 1999 -RRB- .", "It 's not that Reverend Billy thinks no one should ever buy anything .", "On a recent afternoon , he himself was seen purchasing a ream of printer paper and a bottle of wine .", "It is the futility of shopping he is trying to address -- the futility of leaning too heavily on the material at the expense of the spiritual and emotional .", "That mission has given focus to his art , his politics and even his religion .", "Raised by what he calls `` strict Dutch Calvinists '' in Rochester , Minn . , he made his way to New York in the early 1990 's .", "He had his epiphany in 1999 , when protesters disrupted the World Trade Organization meetings in Seattle .", "He discovered the potential of drama to send a political message .", "He discussed the revelation with a friend , Sidney Lanier , an Episcopal minister and cousin of Tennessee Williams who had used theater to evoke social reform themes in the 1960 's .", "Mr. Talen soon realized that after years of producing Spalding Gray and others , he suddenly had an act of his own .", "Mr. Lanier said he suggested a man of the cloth as a vehicle for Mr. Talen 's message .", "`` I encouraged him , '' he said .", "`` I said , you have a kind of Calvinist preacher in you that wants to come out . ''", "Mr. Talen , even before he developed the character , said he admired the cadence and the poetry of good fire and brimstone .", "Child labor , environmental damage and evidence of union busting by big retail chains , all to deliver low prices to consumers , provided plenty of material for any pulpit .", "`` I sense right now that our lives are getting absurd , '' he said .", "On a recent evangelical side trip , Mr. Talen ventured into the Kmart on Astor Place , where speakers blared Elvis and Tom Petty Christmas carols .", "His own face blank , he began to look for smiley-faces , which he considers one of the most nefarious of marketing tools .", "He found them on signs , on children 's pajamas , on stickers .", "Few of the shoppers , however , were smiling , he noticed .", "And that is part of the problem .", "`` The smile has been so thoroughly appropriated by transnational capital , '' he said .", "`` They discovered that smiling makes money . ''", "When he left Kmart , he walked down Lafayette Street , bellowing now and then in character about how creeping consumerism threatens the fabric of society , in the form of chain stores , sweatshops and more .", "But to the public , it mostly just means more stuff to buy at a good price .", "Indeed , it is no surprise that Reverend Billy has not had much of an impact .", "Even this year , considered to be a particularly disappointing Christmas shopping season , Americans are still expected to spend almost $ 1 trillion at stores , restaurants and auto dealers in the last three months of 2002 , up perhaps 3 to 4 percent from the year before .", "`` They do n't care ! `` Reverend Billy shouted to no one in particular on a dark stretch of Lafayette Street , as people carrying shopping bags from J . Crew , Macy 's and the Gap poured into a nearby subway entrance .", "`` They do care , '' a bearded man beside a scaffolding replied .", "`` They just have a bad attitude . ''", "`` Hallelujah ! '' Reverend Billy said .", "He says that a lot .", "The Reverend Billy made his first formal appearance at the Disney store in Times Square , circa 1998 .", "He was driven away in a police car , his wrists still cuffed to a large statue of Mickey Mouse .", "The store has since closed .", "He has found other targets .", "In general , he selects large global companies that he feels are inappropriately seizing control .", "In 1999 , he zeroed in on Starbucks .", "He was pleased to discover later that he had become the subject of a company memo .", "`` Reverend Billy sits quietly at a table with devotees and then begins to chat up the customers , '' the memo , dated April 24 , 2000 , reads .", "`` He works the crowd with an affirming theme but gradually turns on Starbucks .", "Toward the end , he 's shouting . ``", "And it adds : `` According to a store manager , he may stand on your tables . ''", "Audrey Lincuff , a Starbucks spokeswoman , confirmed the authenticity of the memo -- and disputed the accuracy of Reverend Billy 's message , at least as it pertains to Starbucks .", "`` We consider ourselves to be locally relevant where we do business , '' she said , `` and work very hard to weave ourselves into the fabric of the community by associating and working with nonprofit groups and other community groups . ''", "The company 's goal , she added , is to `` connect with our customers not only on a business level but on things that are important to them in their lives . ''", "Reverend Billy says he tries to remain relatively low key .", "`` I 'm against a lot of political people who have become fundamentalists themselves , `` he said .", "He does n't like the anti-fur people who ridicule pedestrians in fur coats or hats , for example .", "He is a latte drinker , though he does n't order it at Starbucks .", "He wants to help awaken desensitized shoppers , he says , because `` they are underestimating the complexity and beauty of life . ''", "And besides , `` they are definitely underestimating the impact of shopping . '' ."], "summary": ["Bill Talen , also known as Reverend Billy , is street preacher who exhorts people to resist temptation -- temptation to shop -- and to smite demon of consumerism .", "He declares consumerism is ` dull way of life ' .", "Photo of Talen preaching outside Plaza Hotel in Manhattan on Buy Nothing Day ."], "publication": "nyt50", "label": [2, 7, 1], "tag": ["Business"]} -{"id": "1453146", "text": ["The Commerce Department ruled today that encircling dolphins with nets a mile wide to catch tuna does not significantly harm them , clearing the way for Mexico and other countries to market their tuna in the United States as dolphin-safe .", "The decision drew an immediate protest from wildlife and environmental advocates , who said the ruling was at odds with the department 's own scientific findings and appeared to be little more than a political gift to Mexico .", "They vowed to take the administration to court .", "Tuna fishermen in Mexico , Colombia and Venezuela have fought for years to put a `` dolphin safe '' label on their exports to the United States , even though their use of nets to encircle schools of dolphins to catch tuna , which often swim just below the dolphins , has been abandoned by American tuna producers .", "American consumers are familiar with the dolphin-safe label on cans of tuna from United States companies like StarKist and Bumble Bee .", "Mexican and foreign exporters have been allowed to sell their tuna on American shelves but have largely stayed away , saying they need the dolphin-safe label to compete .", "William T . Hogarth , the assistant administrator for fisheries of the Commerce Department 's National Marine Fisheries Service , said that the number of dolphin deaths had declined drastically in recent decades and that safeguards would ensure that the dolphins are not endangered in the quest for tuna .", "`` You have to look at the big picture , '' Mr. Hogarth said in an interview .", "`` I looked at all the scientific data .", "I feel very comfortable with this decision . ``", "Mr. Hogarth estimated that about 1,600 dolphins are killed each year by tuna fishermen , down from about 350,000 two decades ago .", "Dolphin and yellowfin tuna tend to run together in the eastern Pacific , with the dolphins swimming near the surface to breathe .", "Under the often criticized practice , fishermen in large vessels send helicopters aloft in search of dolphin schools , then deploy speedboats to encircle them and the tuna schools beneath with floating nets often more than a mile in length .", "While the tuna are harvested , the dolphins are supposed to escape over the floating net .", "The Commerce Department 's decision today said that fishermen using the practice can designate their product as dolphin-safe if no dolphins were injured or killed when the tuna were caught .", "Under an arrangement first reached by the Clinton administration , observers from the fishermen 's countries will be posted on the vessels .", "`` Dolphin-safe means that dolphins can be encircled or chased , but no dolphins can be killed or seriously injured in the set in which the tuna are harvested , '' Mr. Hogarth said .", "Wildlife advocates voiced outrage at the decision , which they said would place huge new strains on two varieties -- the eastern spinner and the offshore spotted dolphin -- whose numbers have declined by as much as 70 percent in recent years .", "`` This fishing method would allow harming and even killing thousands of dolphins each year in tuna nets , '' said Kitty Block , special counsel to the Humane Society of the United States .", "`` For the first time in over a decade , dolphin-deadly tuna will be sold in the United States -- and what makes this so unconscionable is that this tuna will be misleadingly labeled dolphin-safe . ''", "The wildlife advocates said the decision was at odds with a new study by the department 's own scientists .", "In a report that was leaked to the news media earlier this month , scientists at the Southwest Fisheries Science Center in La Jolla , Calif . , found that depleted dolphin populations were not rebounding in the Pacific waters patrolled by Mexican , Colombian and Venezuelan tuna boats , and they cited the net-fishing procedure as a cause .", "`` Despite considerable scientific effort by fishery scientists , there is little evidence of recovery , and concerns remain that the practice of chasing and encircling dolphins somehow is adversely affecting the ability of those depleted stocks to recover , '' the study said .", "Asked about the study , Mr. Hogarth said it was not conclusive .", "But David Phillips , director of the Earth Island Institute 's International Marine Mammal Project , said the Bush administration appeared eager to placate Mexico , which has had many requests to Washington pushed aside by other matters in recent years .", "`` The State Department is saying , ' We promised the Mexicans we 're letting this tuna in , ' `` Mr. Phillips said .", "He said his group would file an injunction against the administration within days .", "The group has twice prevailed in lawsuits seeking to block a redefinition of dolphin - safe .", "Editors ' Note : January 6 , 2003 , Monday An article on Wednesday reported a Commerce Department ruling that the nets used in catching tuna did not significantly harm dolphins -- a decision that cleared the way for Mexico and other countries to market tuna in the United States as dolphin-safe .", "Environmental advocates were quoted as saying the ruling was at odds with the department 's own scientific findings and appeared little more than a political gift to Mexico .", "The seafood industry 's response was omitted when the article was edited to its assigned space .", "Industry representatives said environmentally responsible boat owners had reduced dolphin mortality .", "`` They should be rewarded for that conservation effort by being given access to the United States market , '' said Justin LeBlanc , vice president for government relations at the National Fisheries Institute ."], "summary": ["Commerce Dept rules that encircling dolphins with nets a mile wide to catch tuna does not significantly harm them , clearing way for Mexico and other countries to market their tuna in United States as dolphin-safe .", "Decision draws immediate protest from wildlife and environmental advocates ."], "publication": "nyt50", "label": [0], "tag": ["Business"]} -{"id": "1453148", "text": ["Efforts by water officials in Southern California failed today to reach a deal on water usage from the Colorado River before a midnight deadline .", "As a result , the Bush administration said it would cut flows from the river to the state 's cities and farms beginning in January , making it the first time the federal government has imposed such a penalty .", "Even as the board of one water agency , the Imperial Irrigation District , voted here to approve a revamped proposal , other water officials said they had given up on making the deadline .", "The officials said that differences among them remained too great and that the Imperial proposal was unacceptable .", "The deadline was part of an agreement reached two years ago among seven Western states , including California , that was meant to end fighting over water supplies from the Colorado River .", "Under that agreement , the Imperial Irrigation District was to transfer 200,000 acre-feet of water it has been receiving each year from its farms to the San Diego County Water Authority .", "Gale A . Norton , the secretary of the interior , said the transfer was considered crucial to the bigger agreement because it would have signaled the willingness of farmers in Southern California to share their water with the state 's fast-growing cities .", "Currently , agricultural districts get most of the water that comes from the Colorado River , an imbalance that most water experts agree must change to address the state 's chronic water shortages .", "The president of the largest urban water district , the Metropolitan Water District of Southern California , said last-minute demands included in the Imperial district 's proposal had essentially derailed the negotiations .", "Invoking the memory of hurried decisions made during the recent energy crisis , the metropolitan district 's president , Ronald R . Gastelum , said water officials from other agencies would not be rushed .", "`` The last time an 11th-hour proposal was hastily approved , circumstances led to an energy crisis , '' Mr. Gastelum wrote in a statement .", "`` This will not be the case with water . ''", "Mr. Gastelum estimated that financial demands included in the Imperial proposal , which included $ 200 million in state financing for environmental and other needs , would cost every household in the state about $ 30 .", "The agency also had problems with provisions to allow Imperial to back out of the agreement in the first year if certain demands were not met .", "`` It also allows Imperial to essentially kill any agreement reached after the federal deadline and prolongs the uncertainty of Colorado River supplies for urban Southern California through 2003 , '' Mr. Gastelum said .", "`` Despite this setback , we hope Secretary Norton will continue to assist the parties in reaching a long-term solution . ''", "Early this morning , Ms. Norton 's top water official , Bennett W . Raley , also indicated the talks were at a stalemate when he returned to Washington from Los Angeles .", "Mr. Raley , an assistant secretary of the interior , had flown to California on Monday to join a last-minute flurry of negotiations , when they seemed to be making progress .", "In a conference telephone call with reporters from Washington , Mr. Raley said the Bush administration would make good on its threat to cut surplus water flows from the Colorado River to Southern California .", "In past years , the state has drawn as much as 800,000 acre-feet of extra water , enough to provide for about 1.6 million households .", "The biggest loser will be the Metropolitan Water District , which has relied on the Colorado River for more than half of its water , but the Imperial Irrigation District also stands to loose about seven percent of its allotment .", "Mr. Gastelum said the Metropolitan Water District , which serves 17 million people in six counties , had enough water on hand to meet demands across Southern California for the next two years and perhaps longer .", "`` In the new year , the public will see a heightened focus on voluntary outdoor water conservation that will make even more water available for storage and use in dry years , '' he said .", "At its meeting here in El Centro , the board of the Imperial Irrigation District voted 3 to 2 to approve the revamped agreement that had been drafted over the past several days by its lawyers .", "On Dec . 9 , also by a 3-to-2 vote , the board had rejected an earlier version drafted in October with the cooperation of the other water agencies involved in the talks .", "After the Dec . 9 vote , the board was criticized by other water agencies across the West for essentially derailing the larger seven-state agreement , and board member said they felt tremendous pressure from state and federal officials to change their minds .", "One of the members , Bruce Kuhn , did change his vote today , but he did so knowing the revamped agreement was unacceptable to the other agencies .", "`` If we approve this deal , we ca n't be accused of not approving this deal , `` Mr. Kuhn said before the vote today .", "`` And if they kill it , so be it .", "At least it wo n't be us who did it . `` ."], "summary": ["Water officials in Southern California fail to reach deal on water usage from Colorado River before midnight deadline .", "As result , Bush administration says it will cut flows from river to state 's cities and farms beginning in January , making it first time federal government has imposed such a penalty ."], "publication": "nyt50", "label": [1, 0], "tag": ["U.S."]} -{"id": "1453149", "text": ["Sitting in front of a table laden with funeral programs , where the silenced faces form a tragic tableau , the mothers of murder victims do not cry in their support-group circle .", "They argue , they yell , they preach .", "`` 9/11 last year .", "We have it going on every day , `` said Paula Drew-Williams , the facilitator of the weekly free-for-all at the offices of Save Our Sons and Daughters .", "`` Look at how many babies we do n't have .", "We had 9/11 before 9/11 .", "Where 's the hoop-de-la .", "`` Clementine Barfield , who founded the group , known as So Sad , after her son was killed in 1986 , interjected , '' There is no hoop-de-la if we do n't make it . ``", "A newcomer to the group , Sharon Nowell Neal , said she was handing out a paper she had written about the violence to everyone she could think of .", "`` When we 've had enough , then it will stop , `` said Ms. Neal , 46 , whose 9-month-old granddaughter , Tatijuana , and 3-year-old son , Nathan , were both killed , 15 years apart .", "Ms. Drew-Williams said everyone in the circle had had enough , but Ms. Neal spat back , `` I 'm doing something with my enough . ``", "The women of So Sad have had more than enough this year , as they saw the number of children 16 and under killed in Detroit jump to 25 from 19 in 2001 .", "The 31 percent increase gives this city a child homicide rate of 9 per 100,000 , higher than Chicago , Los Angeles , Boston , San Diego or Miami .", "Statistics for New York were not available this week .", "Though violent crime over all has declined by about 4.5 percent in Detroit this year , and total homicides are down to 391 from 395 , fatal shootings of children increased to 17 from 11 .", "And Detroit is not alone : New Orleans had 16 children killed this year , double last year 's 8 , giving it a child homicide rate of 13 per 100,000 .", "In Washington , 12 children were killed this year , a rate of 11 per 100,000 .", "`` It 's astounding , `` said Shikha Hamilton , president of the Detroit chapter of the Million Mom March , an antigun group that recently pressed the City Council to pass a law banning weapons in public buildings and persuaded The Detroit Free Press to stop accepting classified advertisements selling weapons .", "`` My heart is broken every time . ''", "The year of nightmares began here on Jan . 13 , when Jameise Scaife died three days after he was delivered by emergency Caesarean section after his pregnant mother jumped from a building set afire by an arsonist .", "The last child killing was on Nov . 30 , when 16-year-old Mario Smith was fatally shot on his way home from work at Skateland .", "In between , one 3-year-old was strangled by his mother and another was shot in the head by hers .", "Teenagers were killed by their friends , toddlers were beaten to death by their guardians , and an infant died after a car crash caused by a criminal suspect fleeing the police .", "Children were killed in carjackings and robberies , in gang disputes and at parties .", "Two little girls were gunned down in their homes by AK-47s in separate shootings .", "Six of the victims were 2 or younger , while 15 were under 12 .", "A 26th child , 10-year-old Charmaine Wright , died of complications from injuries sustained when she was violently shaken as a baby , although prosecutors have not officially ruled the death a homicide .", "`` It just seemed like it would never stop -- it got to the point where I did n't even want to look at the news anymore , `` said Harold Pollard , whose 7-year-old granddaughter , Ajanee , was killed on Feb . 25 in a drive-by shooting that also injured her mother and three siblings .", "`` Their wounds are healing , you know , but they still go through missing their sister . ''", "The Detroit Police Department and the Wayne County prosecutor 's office responded to the spate of killings with an intense investigative effort known as Project Destiny , named for 3-year-old Destinee Thomas , who was killed on March 23 .", "Inspector Craig Schwartz , chief of the city 's homicide division , said that all but two of this year 's fatal shootings of children had been solved , compared with only 52 percent of homicides over all , and that as of Dec . 22 nonfatal shootings of children were down to 63 from 80 over the same period in 2001 .", "He also pointed out that two of the teenage victims had been shot while committing crimes , and that a third had been involved in a shootout .", "`` That 's not quite a victim in the pure sense of the word , `` Inspector Schwartz said .", "`` As far as a preventive measure , these things are very hard to prevent .", "Many of these things involve social issues that are n't the responsibility of the police department .", "It becomes the responsibility of the entire community . ``", "At So Sad , Ms. Barfield is bemused by the renewed attention on the crisis she has been living for more than a decade .", "In 1986 , she said , 365 children were shot in Detroit , 43 of them fatally .", "This year 's number pales compared with that .", "Still , newcomers like Ms. Neal join the Monday night circle .", "`` You can lose a spouse , get another spouse .", "You can lose a sister and get another friend , `` Ms. Neal said plaintively .", "`` When you lose a child , I do n't care if you have 13 of them , that is a void that will never be filled . ``", "Ms. Drew-Williams softened and said , `` I wish I had never had any kids . ''", "`` Same here , same here , '' said Shirley Adams , whose daughter was killed in 1999 .", "`` It was just too much having that one murdered , '' Ms. Drew-Williams said .", "Ms. Neal said : `` People ask me now , ' How many children do you have .", "` I say I am the mother of three , but two are still living . ''", "They argued over capital punishment and shared coping strategies , like Ms. Drew-Williams ` s annual party , complete with sheet cake , for her son , who was killed on his birthday .", "Vickie Rose told about a dream she had recently in which her dead son called on the telephone , asking what Santa Claus had brought , what Mama was cooking for Christmas .", "As they talked , the women turned to point at the pile of funeral programs , from which their sons and daughters stared back blankly .", "A toddler in a jumper with a teddy bear .", "A baby in a bathtub .", "Sisters in braids .", "A young man in a military uniform , another in a football jersey , a third dressed for a prom .", "Then they stood up , closed the circle , and joined hands .", "`` We have lost so many kids this year , '' Rose Hunter , whose niece was murdered in 2001 , said to start the closing prayer .", "`` As we end this 2002 , we pray that next year , 2003 , we will not have a kid slaughtered , we will not have a kid hit by a car , we will not have a kid shot . '' ."], "summary": ["Mothers of murder victims in Detroit have support group , Save Our Sons and Daughters , So Sad , which tries to call attention to victims .", "Number of children 16 and under killed in Detroit jumps to 25 from 19 in 2001 , and city 's child homicide rate is 9 per 100,000 , higher than Chicago , Los Angeles , Boston , San Diego or Miami .", "Photo ."], "publication": "nyt50", "label": [12, 11], "tag": ["U.S."]} -{"id": "1453150", "text": ["North Korea strongly suggested today that it would withdraw from a treaty that prohibits it from making nuclear weapons , the latest in a series of fast-paced moves to remove its nuclear program from international controls .", "`` North Korea is not currently able to meet its commitments under the Treaty on the Nonproliferation of Nuclear Weapons -- this is the fault of the United States , '' Pak Ui Chun , North Korea 's ambassador to Russia , said at a news conference in Moscow today .", "On Monday , the North Korean Foreign Ministry accused the United States of `` ditching '' a special 1994 agreement that had kept North Korea bound to the treaty .", "If North Korea abandons the treaty , which would take 90 days from a formal declaration , it would be under no obligation to allow inspections by the International Atomic Energy Agency .", "Last week , the North said that it would expel the inspectors , and today they were made to leave the country .", "Without the inspectors , the outside world can now rely only on satellite photos and statements by North Korea to guess at what work is being done at the Yongbyon nuclear complex , 55 miles north of Pyongyang .", "`` We were the eyes of the world , '' Melissa Fleming , a spokeswoman for the agency , said after the two inspectors , a Chinese woman and a Lebanese man , arrived in Beijing .", "`` Now we virtually have no possibility to monitor North Korea 's nuclear activities nor to provide any assurances to the international community that they are not producing a nuclear weapon . ``", "Shortly after South Koreans elected the liberal candidate in the Dec . 19 presidential election , the North Koreans disabled cameras and broke seals that for almost a decade had restricted their access to facilities capable of producing weapons-grade plutonium .", "Nuclear technicians quickly started work on reopening two facilities : a small reactor and a fuel reprocessing plant .", "Outside experts estimate that the plant is capable of producing enough plutonium for five bombs by late spring .", "`` I do n't know what is worse : pulling out of the NPT or starting to reprocess , `` Victor D . Cha , a Korea expert at Georgetown University , said today .", "`` They may have chosen a totally new strategy : to acquire some form of nuclear deterrent as quickly as possibly , and then try negotiation from a position of strength . ''", "In Moscow , the North Korean ambassador charged today that the Bush administration , which has labeled North Korea , along with Iran and Iraq , as part of an `` axis of evil , '' had `` threatened us with a pre-emptive nuclear strike . ''", "`` In these circumstances , we also can not fulfill the nonproliferation treaty , the basic clause of which is the obligation of nuclear states not to use the nuclear weapon against states which do not possess it , '' he said .", "The North Korean ambassador recalled that in 1993 the North announced plans to pull out of the treaty .", "But , the next year , the signing of an agreement with the United States prompted the country to suspend its official withdrawal .", "Under the agreement , the United States and South Korea agreed to build two light-water power reactors in North Korea .", "It is harder to make a nuclear bomb with this technology .", "This year , however , the Bush administration confronted the North Koreans with evidence that they had secretly continued work on their nuclear weapons program despite that agreement .", "Tensions have escalated since , and the United States has met resistance from some Asian allies as it tries to contain the North .", "Last weekend , Bush administration officials floated the idea of `` tailored containment '' against North Korea , or a ring of economic sanctions by its neighbors .", "But today , South Korea 's President-elect , Roh Moo Hyun , told reporters that he opposed the policy .", "`` I am skeptical whether so-called ' tailored containment ' reportedly being considered by the United States is an effective means to control or impose a surrender on North Korea , `` said Mr. Roh , who takes office Feb . 25 .", "`` I doubt if the policy would work in controlling North Korea .", "`` Success or failure of a U.S. policy toward North Korea is n't too big a deal to the American people , but it is a life-or-death matter for South Koreans , `` Mr. Roh said at his transition committee office .", "`` Therefore , any U.S. move should fully consider South Korea 's opinion . ``", "On Monday , South Korea 's president , Kim Dae Jung , also expressed opposition , noting that four decades of economic sanctions have failed to bring down the Communist government in Cuba .", "As a measure of South Korea 's commitment to economic engagement with North Korea , Kim Yoon Kyu , president of Hyundai Asan Company , left here Monday for a five-day visit to North Korea , where he will discuss a groundbreaking date for a huge South Korean-financed industrial park at Kaesong and an inauguration date for overland bus service across the demilitarized zone to the Mount Kumgang tourism resort , owned by Hyundai .", "On Monday , the Bush administration seemed to back away from the sanctions idea .", "`` I do n't think anybody has suggested at this point imposing sanctions , `` the State Department spokesman , Philip Reeker , said at a news briefing .", "`` The secretary has not asked any nation to take economic action against this desperately poor country , North Korea . ''", "With nervousness gradually spreading to markets here , South Korea 's stock market fell 4.5 percent on Monday , its largest single-day drop in a decade .", "Tonight , in what could reflect Mr. Roh 's calming influence , turnout for an anti-American rally was far below the one million figure originally cited by organizers .", "Although the rally overlapped with New Year 's Eve festivities that traditionally bring at least 100,000 people into downtown Seoul , only about 20,000 are believed to have attended the rally .", "South Korea 's protest movement was fueled by the court martial acquittal in late November of two American soldiers of charges arising from a traffic accident that killed two teenage girls .", "President Bush has apologized several times for the incident , most recently in a telephone conversation with Mr. Kim on Dec . 13 , when he conveyed his `` deep , personal sadness and regret '' over the deaths .", "South Korean protests have prompted talk in the United States -- in Congress and on newspaper op-ed pages -- that the alliance should be reviewed , and if South Korea , a democracy , does not want the 37,000 American troops stationed here , it may be time to start withdrawals .", "During the fall presidential campaign , Mr. Roh said he wanted the American troops to stay here , distancing himself from statements he made a decade ago when he wanted the Americans to go home .", "But on Monday , Mr. Roh brought up the possibility of American troop withdrawals during a meeting with South Korea 's top military command .", "`` I hear such talks coming out again these days , '' he told the generals .", "`` I wanted to ask whether you have a long-term plan on how the South Korean military could make up for a possible reduction . ''", "THREATS AND RESPONSES : WEAPONS ."], "summary": ["North Korea strongly suggests that it will withdraw from treaty that prohibits it from making nuclear weapons , latest in series of fast-paced moves to remove its nuclear program from international controls .", "North Korean ambassador , in Moscow , charges that Bush administration has ` threatened us with a pre-emptive nuclear strike ' .", "South Korea opposes pressuring North .", "Anti-American demonstration is held in Seoul .", "Photo ."], "publication": "nyt50", "label": [0, 13], "tag": ["World", "Washington"]} -{"id": "1453151", "text": ["Roberto Colaninno , the Italian entrepreneur who engineered the hostile takeover of Telecom Italia four years ago , has proposed to Fiat 's creditor banks an ambitious $ 8 billion plan to rescue the Fiat group , people involved in developing the plan said today .", "While the proposal is still only in its broad outlines , it calls for a group of investors led by Mr. Colaninno to put $ 2.5 billion to $ 3 billion into Fiat , with the company raising an additional $ 4 billion through divestitures .", "The plan also envisions a swap with General Motors , which owns 20 percent of the Fiat auto division , under which G . M . would be released from a requirement that it acquire the 80 percent it does not own , in exchange for G.M. ` s joining a capital increase at Fiat Auto .", "If the Colaninno group is successful -- and that is far from certain at this very early stage -- it would effectively mean that the Agnellis , for more than a century the uncrowned industrial monarchs of Italy , would at best have to share control of the Fiat conglomerate with other Italian investors , most notably Mr. Colaninno .", "The proposal , which was first reported on Monday on the Web site of the Italian daily La Repubblica , has been outlined to Fiat 's major creditor banks , which concluded a $ 3 billion convertible loan arrangement with Fiat in May , and to the government of Prime Minister Silvio Berlusconi , the people involved in developing the plan said .", "The government was involved in recent talks with Fiat and its labor unions to soften the impact of layoffs of more than 8,000 workers as Fiat 's auto unit tries an overhaul .", "Fiat , the largest private employer in Italy , has been struggling to return its auto division to profitability .", "Fiat expects automobiles to generate a $ 1.23 billion operating loss for 2002 .", "On Monday , Mr. Berlusconi alluded to the plan at a year-end news conference , telling reporters that `` there are groups of entrepreneurs who have shown interest . ''", "Suggesting that General Motors has sought a way out of its obligation to Fiat Auto , Mr. Berlusconi said , `` With the decline in General Motors ' interest , the government hopes for an interest from Italian companies . ``", "In March 2000 , Fiat swapped 20 percent of Fiat Auto for a 6 percent stake in G . M . , and Fiat has the option to require General Motors to acquire the remaining 80 percent , starting in 2004 .", "The people involved with the plan said Mr. Colaninno and his advisers had not yet approached General Motors .", "It is also unclear what the reaction of the Agnelli family , Fiat 's biggest shareholder , would be .", "Fiat , and Mr. Colaninno , former chief executive of Olivetti , have declined to comment on reports of the plan .", "But one Fiat executive said , `` Ultimately , it is all feasible . ''", "He added , `` There is one clear point : Colaninno has the money . ''", "Last year , Pirelli , the Italian tire and cable company , teamed up with an investment firm controlled by the Benetton clothing family to buy 23 percent of Olivetti for $ 6 billion , giving the consortium effective control of Telecom Italia , where Olivetti owned 54 percent .", "The change in ownership forced out Mr. Colaninno , who had become chief executive of both Olivetti and Telecom Italia after leading an investor group that paid $ 30 billion to gain control of Telecom Italia in May 1999 .", "The size of Mr. Colaninno 's capital gain was never disclosed , but it was considerable .", "A rescue plan would come at a time when Fiat is particularly vulnerable .", "Its shares have sunk to their lowest level in nearly two decades because of its financial troubles .", "Last week , Moody 's Investors service cut the rating on $ 15 billion of Fiat debt to junk status .", "The banks , thanks to their $ 3 billion financing package , `` own Fiat , de facto , '' one person involved with the plan said , adding that `` they are tantamount to being the biggest shareholders . ''", "The four largest creditor banks are IntesaBci , Unicredito Italiano , Capitalia and Sanpaolo IMI .", "`` The banks have a problem , '' this person added .", "`` We say it is better for everybody if Fiat puts money into the business . ''", "Fiat 's chairman , Paolo Fresco , attended a board meeting in Milan today of the investment bank Mediobanca , a meeting that was also attended by Mr. Colaninno , also a director of Mediobanca .", "But the people involved in the plan said other matters , not Fiat , were on the agenda .", "Earlier this month , Mr. Fresco won a temporary lease on life when the Fiat board thwarted an effort to unseat him .", "But increasing financial troubles at Fiat and its auto unit are maintaining pressure for change .", "Under the proposal , Mr. Colaninno and a group of fellow investors would take the banks ' liabilities into a new company .", "Additional money , roughly $ 4 billion , would be raised by Fiat in a sale of assets .", "Mr. Fresco has already said he plans to sell other assets , but those sales have been going more slowly than expected .", "Earlier this month , Fiat raised more than $ 1.6 billion by selling its stake in General Motors and several lesser assets .", "The people said that General Motors would be offered a release from the purchase requirement in exchange for a participation in the capital increase for Fiat Auto .", "In all , the fresh capital for Fiat would total roughly $ 8 billion .", "It is unclear how G . M . would react to such a proposal .", "In October , G . M . ` s chief financial officer , John M . Devine , an outspoken critic of the requirement , called a put option , , told analysts on a conference call , '' If there 's a change in control of Fiat S.p.A. , then the put is automatically eliminated . `` ."], "summary": ["Roberto Colaninno , Italian entrepreneur , proposes to creditor banks ambitious $ 8 billion plan to rescue Fiat group .", "Proposal , in its broad outlines , calls for groups of investors led by Colannino to put $ 2.5 billion to $ 3 billion into Fiat , with company raising additional $ 4 billion through divestitures .", "Colaninno photo ."], "publication": "nyt50", "label": [1], "tag": ["Business"]} -{"id": "1453152", "text": ["IN the movies , school principals are generally portrayed in two distinct categories .", "There is the baseball-bat-toting disciplinarian , evoked by Morgan Freeman -LRB- playing the real-life New Jersey high school principal Joe Clark -RRB- in the 1989 movie `` Lean on Me . ''", "And there is the buffoon forever running a step behind his students , epitomized by the fictional Ed Rooney -LRB- played by the character actor Jeffrey Jones -RRB- as he pursues the chronic truant Ferris Bueller .", "But the job has always been far less colorful than Hollywood would lead us to believe .", "And it has never been harder on the people in it , a circumstance that helps explain a national shortage that has left some states struggling to find a permanent principal for one of every five schools .", "In December , the New York City schools chancellor , Joel I . Klein , followed the lead of educators in Boston , Providence and elsewhere by announcing a corporate-style principal training and incentive program .", "Those New York City principals who agree to work in failing schools for three years could earn $ 75,000 in bonus pay , while a farm team of rookie principals is to be developed partly by having them shadow veterans in a leadership academy .", "Anyone considering such work would do well to consult a recent graduate of a similar program , someone like Teri Schrader , who is in her second year as principal of the Francis W . Parker Charter Essential School in Devens , Mass . , northwest of Boston .", "Depending on the time of day , Ms. Schrader can be rallying her 350 middle - and high-school students before do-or-die standardized tests , combing Massachusetts ' liability codes to assess Parker 's potential exposure in a given case , or buttonholing politicians in an effort to extend the life of her experimental seven-year-old public school , which has an annual budget of nearly $ 2 million .", "`` I 'm constantly likening it to being a theater director , `` said Ms. Schrader , 42 , who as a drama teacher previously directed perhaps 60 productions at schools in Hartford and suburban Massachusetts .", "`` You 're utterly accountable , but you 're not the one on stage .", "It 's really like directing theater surrounded by a ring of fire . ``", "In the summer of 2000 , after nearly two decades teaching drama and art , most recently at Parker , Ms. Schrader decided to enroll in what her family refers to as `` principal college . ''", "She chose the Greater Boston Principal Residency Network , a part-time one-year training program based partly at Northeastern University .", "The program follows a medical school `` residency '' model by placing 10 aspiring principals each year in a mentoring relationship with a veteran principal .", "Working under those veterans , the rookies gain hands -on experience and are eventually expected to graduate to their own principalships .", "During her apprenticeship , Ms. Schrader , shadowing the principal at Parker , Greg Sinner , embarked on a daunting project : deciding how much the school 's teachers and other employees , 62 staff members in all , would be paid .", "As a charter school exempt from union rules , Parker had no compensation policy during its first five years , and that meant that some junior members of the staff were paid more than veterans .", "The arguments were bitter , as some veterans argued for a traditional `` step system '' in which salaries would rise in automatic increments .", "Others wanted raises tied exclusively to teachers ' performance .", "Ms. Schrader was inclined to agree with the latter , until her mentor , Mr. Sinner , who had spent 30 years as a principal in Illinois , talked her down .", "He explained that far more seasoned administrators than she had failed in other districts to define how a teacher 's performance would be judged -- no one at Parker , for example , wanted to link teacher pay to students ' test scores -- and that a fair system linking wages to the subtle work of instruction would most likely be elusive .", "`` He was the oldest person in the room , '' Ms. Schrader said of Mr. Sinner , then in his 60 's .", "`` He was really good at asking big questions . ''", "In the end , a committee led by Ms. Schrader and counseled by Mr. Sinner forged a compromise : it created three experience-based categories of teacher salaries , and , within each level , a range of increases to be awarded by the principal , partly on the basis of observation in the classroom .", "`` The conversations where I have been able to inform these teachers of their adjustments , '' Ms. Schrader wrote in a paper to her colleagues in the principal training program , `` have been among the most joyous and unforgettable of my career . ''", "At the end of that school year , when Mr. Sinner retired , the job of principal was Ms. Schrader 's .", "In the 18 months since , she has had few school days that have lasted less than 12 hours .", "`` I think outsiders function on a movie model of what a school principal is , '' she said .", "`` I do n't think people have a clue how much you love your kids and how hard the work is . ``", "LESSONS E-mail : jacques@nytimes.com ."], "summary": ["Jacques Steinberg Lessons column describes job of school principal , position that is part cheerleader , part theater director , very hard work , and , lately , difficult to fill in numerous school districts .", "Drawing ."], "publication": "nyt50", "label": [26, 9], "tag": ["Education", "New York and Region"]} -{"id": "1453153", "text": ["Hundreds of Pennsylvania doctors who were threatening to stop work or cut back on services because of the high cost of medical malpractice insurance appeared last night to have decided to stay at their posts after state officials promised to try to reduce , at least temporarily , their expenses .", "`` We 've averted a major crisis , `` said Andrew Wigglesworth , the president of the Delaware Valley Health Care Council , an association of 150 hospitals and health care organizations in Pennsylvania , New Jersey and Delaware .", "Twenty-two hospitals in and around Philadelphia had signaled their intention to stop many optional or elective procedures and to limit emergency room operations , Mr. Wigglesworth said .", "Many doctors in Philadelphia as well as in Scranton , in northeastern Pennsylvania , and in Chambersburg and other central Pennsylvania towns were also threatening to reduce their practices or quit medicine entirely .", "Other doctors said they were considering leaving the state , as many already have .", "But many changed their minds after Gov . - elect Ed Rendell promised to urge the Legislature to take steps that would reduce insurance costs for those in the riskiest fields , including obstetrics and neurosurgery , by up to 40 percent , and significantly reduce coverage costs for all doctors .", "Nearly two weeks ago , doctors at the Abington Memorial Hospital in Abington , an affluent suburb on the northern edge of Philadelphia , announced that they were shutting down their trauma center because doctors either could not find professional insurance or were unwilling to pay premiums that were running as high as $ 150,000 a year .", "Another trauma center at a hospital in Scranton had already closed because of insurance problems .", "Soon doctors across the state were warning that they would curtail services .", "They set a deadline of Jan . 1 , when the insurance for about 60 percent of the state 's roughly 35,000 doctors comes up for renewal .", "The spreading protests caught the attention of Mr. Rendell , who met with doctors at Abington and began working on a response .", "`` I think he heard a lot more than he was aware of , '' said Dr. Arthur M . Frankel , a general surgeon who attended the meeting .", "On Monday afternoon , Governor-elect Rendell , joined by Gov . Mark Schweiker , announced his plan .", "It would provide free , for the coming year , half the $ 1 million in coverage required to maintain a medical license in Pennsylvania for doctors in obstetrics , neurosurgery , orthopedic surgery and general surgery .", "All four categories require among the highest insurance premiums .", "Doctors currently buy this coverage from a state fund after buying the first $ 500,000 , usually at somewhat higher prices , from commercial insurers .", "Others doctors would get coverage from the fund at half of what they usually pay .", "Mr. Rendell said he would also ask the Legislature to approve a grant of $ 18 million to $ 22 million to the state 's trauma centers to help them pay for insurance and other expenses .", "He also proposed requiring anyone bringing a malpractice lawsuit to obtain certification from another doctor that the lawsuit was not frivolous .", "To pay for the shortfall to the state insurance fund , Mr. Rendell proposed taking the money from health insurers in the state .", "At the same time , he urged the health insurers to follow the lead of Independence Blue Shield in raising the amount they pay doctors for their work .", "The doctors have complained that as insurance costs have been rising , payments for their work from the federal government and private insurers has been declining .", "James Mead , the chief executive of Capital Blue Cross , another health insurer , told The Associated Press that he was `` disappointed and troubled '' by Mr. Rendell 's plan to take money from his industry to ease the burden on doctors .", "Dr. Andrew Star , the managing physician of the Orthopedic Specialty Center in Willow Grove , Pa . , said the average cost of coverage for the 12 surgeons in his group had jumped from $ 85,000 last year to $ 150,000 this year , including the payment to the state fund .", "Rather than pay the higher price , he said , they had begun cutting back on their work , and some in the group were considering retirement .", "Others were looking for jobs in states where insurance costs less .", "Dr. Star 's group was the first to pull out of the Abington Trauma Center At 6:30 a.m. yesterday , he and the other doctors met to discuss Mr. Rendell 's proposal .", "They could not come to a conclusion , but Dr. Star said he was optimistic .", "`` We want to go back to work , '' he said .", "`` We 're talking to our business advisers .", "We hope we can work it out . `` ."], "summary": ["Hundreds of Pennsylvania doctors who were threatening to stop work or cut back on services because of high cost of medical malpractice insurance appear to have decided to stay at their posts after state officials promise to try to reduce , at least temporarily , their expenses ."], "publication": "nyt50", "label": [0], "tag": ["Health", "U.S."]} -{"id": "1453154", "text": ["Forty years ago , a team of French archaeologists decided that the best way to save the Baphuon temple was to destroy it .", "They began to take apart the fragile temple block by block , keeping meticulous records of their work , planning to put it back together again as a more stable structure .", "Then came war .", "As the Communist Khmer Rouge approached , the restorers fled the Angkor temple complex in 1972 .", "In the chaos that followed , all their written records were destroyed .", "When they returned in 1995 , all they found was 300,000 heavy stone blocks strewn among the trees -- the biggest jigsaw puzzle in the world .", "It is a puzzle without a key , but it does have a solution .", "Block by block , layer by layer , the Baphuon temple is rising again as one of the towering monuments of Angkor .", "When it was built in the 11th century , the multi-tiered sandstone pyramid was the most impressive building of its day -- `` a truly astonishing spectacle , '' according to a 13th-century Chinese traveler , Zhou Daguan .", "Like the other Angkor temples , Baphuon was consumed by the jungle after the great empire fell 500 years ago , and it was only in the last century that French archaeologists began tinkering with it .", "But the Baphuon , clumsily built on sand with a poor drainage system , was teetering and collapsing in chunks , too unstable to repair like its neighbors , Bayon , Angkor Wat and others .", "The solution : anastylosis , the sort of disassembly ambitious mechanics sometimes do with car engines .", "Work began in the 1960 's .", "Half the temple was in pieces when it was abandoned , scattered across 25 acres of land like shredded documents .", "`` So we have a puzzle , but we are missing the map of the puzzle , '' said Pascal Roy\u00e8re , an architect who heads a team of 200 working for the \u00c9cole Fran\u00e7aise d' Extr\u00eame-Orient , a cultural organization with financing from the French government .", "Philippe Peycam , executive director of the Center for Khmer Studies here , said : `` It 's really crazy , this temple , so complex and baroque .", "It 's a nightmare to restore . ``", "The French team was confronted with a variety of challenges that included the reconstruction of a reclining Buddha that was added in the 16th century and the reinforcement of the structure with a concrete core that was begun in the 1960 's and is now considered outmoded .", "But the most fascinating challenge came in the puzzle pieces themselves .", "Worn by centuries of sun , monsoon and jungle growth , the stones of Baphuon were chipped and roughened , each slightly different from all the others .", "Without mortar to cushion the construction , each block must be returned to nestle precisely among those beside , above and below it .", "`` One place for one block , one block for one place , '' Mr. Roy\u00e8re said .", "`` That 's the rule . ``", "Like any jigsaw puzzle , there is no forcing a piece into a place that is almost right , but not quite .", "`` You 'll laugh , but if you are off by ten millimeters here , 20 meters farther along , everything is wrong , `` Mr. Roy\u00e8re said .", "`` It happens regularly , but when it happens you know right away .", "That 's the difficulty and also the insurance against mistakes .", "The monument corrects itself . ``", "Apart from the temple 's own dynamic , the restorers had three things to guide them .", "Jacques Dumarcay , the French architect who had worked on the Baphuon project in the 1960 's , had since retired but was able to offer some institutional memory .", "The second guide was a cache in Paris of almost 1,000 photographs the French had taken of the temple over the years .", "Their chief value was to show which sections had already collapsed before the temple was dismantled , saving the workers from fruitless searches for missing stones .", "Third was the remaining half of Baphuon , which was to be dismantled after the first half was rebuilt .", "By studying this second half , Mr. Roy\u00e8re 's team created stylized drawings of the carved profiles of the blocks in each row of each tier of the temple .", "Early on , an attempt was made to computerize these shapes and create a reconstruction model .", "But given the eroded shapes of the stones , the computer 's generalized solutions were of little use .", "`` So we looked for a more simple solution , which was the man-made solution , '' he said .", "In other words , memorization .", "There are about 500 different shapes , Mr. Roy\u00e8re said , but by now nobody needs to refer to the drawings .", "Each team knows just what shapes it is looking for .", "`` We have people who walk around all day , '' he said .", "About 70 percent of the blocks have now been identified , and Mr. Roy\u00e8re said he was confident that none were missing .", "At times , as with any puzzle , some small sections are fitted together on their own , and the woods are dotted with what look like mini-temples awaiting their moment to be put in place .", "`` This is not a high-tech project , '' Mr. Roy\u00e8re said .", "`` It 's just a question of paying attention to what you do , and do n't sleep . ``", "Siem Reap Journal ."], "summary": ["Team of French archaeologists work at piece-by-piece reconstruction of ancient Baphuon temple in Siem Reap , Cambodia .", "Experts were restoring temple in 1972 , but fled Angkor temple complex when Communist Khmer Rouge approached ."], "publication": "nyt50", "label": [3, 45, 0], "tag": ["Science", "World", "Front Page"]} -{"id": "1453156", "text": ["As labor dynasties go , few can rival the Hogans of Chicago .", "William T . Hogan Jr . took the helm of Local 714 of the Teamsters in 1990 , after his father had run the local for half a century .", "Now his son Robert heads the local .", "William Hogan said his life was so steeped in the union that he carried Teamster picket signs as an 8-year-old and wrote school papers on James R . Hoffa , the onetime Teamster leader .", "`` My whole life has been the Teamsters , '' he said .", "But a union oversight board expelled him from the Teamsters on charges that he negotiated a sweetheart deal with contractors .", "And in a move that Mr. Hogan likened to deportation to Siberia , all members of the union have been barred from having contact with him .", "The only times he can even see relatives who are Teamsters are at occasional family events , like Thanksgiving dinner .", "Even then , relatives are prohibited from discussing union business with him .", "Any Teamster who violates the sanctions runs the risk of suspension or expulsion .", "The oversight board -- a panel the union created to help settle a federal lawsuit intended to end the Teamsters ' history of corruption -- has expelled or forced out about 300 Teamsters and has banned members from associating with them .", "Of all those expelled , Mr. Hogan is putting up the biggest fight against the restrictions , asking a federal judge to overrule the board and mounting a public relations battle .", "`` I have some very good friends that I can not call on the phone , that I can not be with , '' Mr. Hogan said .", "`` I have to keep reminding myself I 'm an American citizen .", "I did n't realize that anyone could give away my rights like that . ``", "The oversight board that expelled Mr. Hogan last May ruled unanimously that he had betrayed the union 's interests by trying to cut a sweetheart deal in Las Vegas with a temporary services company that employed one of his brothers .", "The deal , which union officials blocked before it was signed , would have given convention hall jobs to nonunion workers at 60 percent of the union pay rate .", "The board said Mr. Hogan had insinuated himself into Las Vegas , even though it was far from his Chicago base , largely to help his brother 's company .", "The oversight board , known as the Independent Review Board , is the most intensive watchdog in the labor world .", "It was created as part of a consent decree that the Teamsters signed in 1989 to settle a federal antiracketeering lawsuit , which asserted that the Mafia controlled the union , which has 1.4 million members .", "The oversight efforts have largely purged the union of criminal elements , but the consent decree 's rules still apply , including the provision that says Teamsters can not associate with any expelled member , except for family-related contacts .", "`` The consent decree is quite clear , with that big catchall phrase saying no Teamster can associate with any person enjoined from participating in union affairs , '' said Patrick J . Szymanski , the union 's general counsel .", "Edwin H . Stier , director of the Teamsters ' internal anticorruption efforts , defended the ban on fraternization , saying it was needed to reform a culture of a union long dominated by organized crime .", "But , while not directly addressing Mr. Hogan 's case , Mr. Stier said the ban was especially important for Teamsters expelled for being involved in organized crime , but he said the ban might eventually be eased for others .", "`` To keep organized crime from penetrating into the union , a flat prohibition against associating with those people is important , '' Mr. Stier said .", "`` That ban should be established permanently .", "`` As for people who were thrown out for non-organized-crime activity , that 's more debatable .", "That ban should be kept at least short-term because it 's important for reforming the culture of a union that was long dominated by racketeers . ``", "Whether some Teamsters clandestinely socialize with Mr. Hogan is unclear , but Mr. Hogan , 60 , said his son Robert fears trouble if they play golf together .", "Old Teamster friends say they cross the street when they see him .", "Teamster buddies say they are too nervous to invite him to weddings .", "And he quit Local 714 's softball team because team members said they could be punished for playing with him .", "The effective banishment has enraged Mr. Hogan 's many friends .", "`` I 've known Bill Hogan for more than 20 years , `` said Pete LaRocco , a Local 714 shop steward .", "`` And if I see him at Woodfield Mall or a gas station and I approach him or anything like that , I worry I can lose my job .", "I do n't see how anything like that can happen in America . ``", "Mr. Hogan has asked the judge who oversees the consent decree , Loretta A . Preska of Federal District Court in Manhattan , to overturn his expulsion and to ease the restrictions on associating with union members .", "Asserting that the expulsion was misguided , Mr. Hogan insisted that his negotiating efforts in Las Vegas were not a sweetheart deal , but rather innocent speech protected by the First Amendment .", "He also argued that the ban on socializing with Teamsters violated his First Amendment right to freedom of association .", "But Charles Carberry , the chief prosecutor for the Independent Review Board , said the restrictions did not violate the Constitution .", "`` The First Amendment is not involved because this is not government action , '' Mr. Carberry said , citing several judicial decisions .", "`` It comes into play only when there is government restriction of speech .", "Here , the Independent Review Board is standing in the shoes of the union so it 's a restriction by the union , not by the government . ``", "A white-haired , gregarious man , Mr. Hogan can easily be mistaken for a corporate executive in his work for Chicago civic committees , but when he is in internal union battles , his venom and foul language show there is some street fighter in him .", "Years ago , Mr. Hogan uttered his most-remembered line , `` I am living proof that nepotism works . ''", "In an interview in his lawyer 's office , he said : `` Yes , I once said that .", "Nepotism can work , and at times it does n't work .", "And in this case it did work .", "My reputation speaks for itself .", "My dad retired , I took over the local and it continued to grow .", "It was n't because I was Hogan 's kid .", "It was because I had some ability . ``", "Mr. Hogan 's arguments get support from a man he long detested , Ron Carey , the Teamsters ' former president , who was expelled for breaching his fiduciary duties by failing to stop aides from diverting union money to aid his re-election campaign .", "Mr. Carey 's lawyer , Mark Hulkower , said the courts should narrow the restrictions on socializing .", "`` The associational ban against Ron Carey is absolutely ridiculous , '' Mr. Hulkower said .", "`` The ban clearly was designed to prevent union members from associating with members of organized crime , but when it applies to officials like Ron Carey it makes no sense whatsoever . ''", "In 1996 , Mr. Hogan was to be James P . Hoffa 's running mate when he challenged Mr. Carey , then the Teamster president .", "But Mr. Hogan withdrew from the race when investigators from the Independent Review Board moved to place Local 714 , which represents movie truck drivers and workers at Chicago 's convention center , into trusteeship , saying the local was rife with nepotism and favoritism .", "Investigators found that 28 Hogan family members or relatives had ties to the local .", "Mr. Hogan also stepped down from the local 's helm and the presidency of the joint council of all Chicago Teamster locals .", "When the two-year trusteeship of Local 714 ended in 1998 , his son Robert was elected the local 's principal officer .", "Robert appointed his father as the local 's organizing director .", "Recently several friends organized a fund-raiser to help Mr. Hogan pay his legal bills , but Teamsters were warned not to attend .", "`` It hurts , '' said Robert Hogan .", "`` Here 's my father , fighting for his life .", "And there is a fund-raiser , and I ca n't be with him . `` ."], "summary": ["William T Hogan Jr , longtime Teamster official , is fighting ouster from union by oversight board that accused him of negotiating sweetheart deal with contractors .", "Is angry that all members of union , even relatives , have been barred from having contact .", "Photos ."], "publication": "nyt50", "label": [6, 5], "tag": ["Front Page", "U.S."]} -{"id": "1453161", "text": ["In the days before 9/11 , New Yorkers were regularly treated to fierce legal battles between Mayor Rudolph W . Giuliani and groups that claimed that he was suppressing their First Amendment rights .", "The range of such controversies was considerable , and involved groups like the Ku Klux Klan , the Brooklyn Museum of Art and New York magazine .", "Now , the Bloomberg administration is preparing to defend one of the last remaining cases from that era , a trial stemming from Mr. Giuliani 's decision in 1998 to fire a police officer and two firefighters who wore blackface during a Labor Day parade in Queens .", "All three men have sued , saying the city violated their rights to free speech .", "To prepare for that trial , which is scheduled to start next week in Federal District Court in Manhattan , Mr. Giuliani has given a 137-page deposition in which he defends the city 's decision to fire the men .", "In it , Mr. Giuliani , who is expected to be called as a witness in court , comes across as most New Yorkers recall him before the terrorist attacks brought out a gentler side : candid , blunt , outspoken and combative .", "`` My opinion as mayor of the city , '' he declares , `` was that police officers and firefighters engaging in this kind of conduct disgrace the uniform , and make it impossible for them in the future to function in a fair and impartial way . ''", "At another juncture , the former mayor seems to almost take over the deposition , reading at length from a document to support his position until one of the firefighters ' lawyers asks him to stop editorializing .", "`` I 'm testifying , `` the mayor retorts , continuing to read from the document , a transcript shows .", "A deposition is a traditional part of a civil suit in which lawyers are allowed , before trial , to question witnesses under oath .", "A copy of Mr. Giuliani 's deposition was obtained from the New York Civil Liberties Union , which represents the officer who was fired , Joseph Locurto .", "The civil liberties union contends in its suit that Mr. Giuliani directed the officer 's firing in violation of his rights .", "`` You ca n't fire someone simply in retaliation for the content of their speech , `` Christopher Dunn , associate legal director of the group , said yesterday .", "`` It 's our contention that that 's exactly why they fired him . ``", "Mr. Dunn indicated in pretrial hearings that he might challenge the former mayor 's credibility on his reasons for firing the officer .", "`` We dispute his contention that he was concerned about the racial sensitivity of a police officer , '' he said yesterday , `` in light of his long history of supporting racially insensitive actions by the Police Department . ''", "Mr. Dunn acknowledged that Mr. Giuliani 's image had changed since the attacks , but he cited his `` legacy of hostility to the First Amendment . ''", "The city maintains that the firings of the men were permissible because their conduct would prove disruptive .", "A lawyer for the city , Jonathan Pines , said through a spokesman , `` We feel very confident in the strength of this case , and expect to prevail in court . ''", "A representative of Mr. Giuliani , Daniel S . Connolly , said he would not comment before trial .", "The 1998 Labor Day float , titled `` Black to the Future : 2098 , '' was part of the annual parade sponsored by the Broad Channel Volunteer Fire Department and Ambulance Corps .", "One participant , a firefighter , re-enacted the killing of James Byrd Jr . , a black man who was dragged to his death behind a pickup truck in Texas the previous June .", "Mayor Giuliani condemned the incident and said of Officer Locurto at the time , `` The only way this guy gets back on the police force is if the Supreme Court of the United States tells us to put him back . ''", "The officer , then an off-duty 30-year-old patrolman , admitted participating in the float , and apologized publicly for what he called `` a big mistake . ''", "The two firefighters -- Jonathan Walters and Robert Steiner -- denied any racist intent .", "In the deposition , which was taken last April , Mr. Giuliani said he believed that officers or firefighters who participated in such a `` vicious display of racism '' would become a serious liability to the city .", "He also said that despite his assertions that the men should be fired , had new evidence turned up , the police and fire commissioners `` would have been perfectly free to disagree and not fire them . ''", "At one point , Mr. Giuliani was asked whether controversy over the police response to that summer 's so-called Million Youth March in Harlem , influenced his response to Officer Locurto 's act .", "`` None , absolutely none , '' Mr. Giuliani said ."], "summary": ["Former New York City Mayor Rudolph Giuliani is expected to be called as witness in suit stemming from firings of firefighters Jonathan Walters and Robert Steiner and police officer Joseph Locurto , who wore blackface on float in Labor Day parade in Queens ."], "publication": "nyt50", "label": [2, 24], "tag": ["New York and Region"]} -{"id": "1453162", "text": ["The rapper known as 50 Cent and four other men were arrested on weapons charges in Midtown early yesterday after police officers found two loaded guns in the car they were in , the authorities said .", "The rapper , whose real name is Curtis Jackson , was a passenger in a Jeep Grand Cherokee that officers pulled over about 2 a.m. after they saw it parked in a no-standing zone at 34th Street and 11th Avenue near the Copacabana night club , the police said .", "Inside the car , which had heavily tinted windows , officers found a . 25 caliber handgun on the floor , the police said .", "A search also turned up a . 45 caliber handgun under the front passenger seat , they said .", "Three of the men in the car were wearing bulletproof vests , and one man was sitting on a fourth vest .", "Mr. Jackson , who was not wearing a bulletproof vest , was in the rear passenger side , the police said .", "The five men were each charged with two counts of criminal possession of a weapon , a police official said .", "Mr. Jackson 's lawyer , Charles Pringle , did not immediately respond to phone messages .", "The police briefly considered Mr. Jackson a potential target for violence last year after the Oct . 30 murder of Jam Master Jay , the popular D.J. for the pioneering rap group Run-DMC .", "One initial theory in that case was that the murder stemmed from a grudge against Mr. Jackson , who was a prot\u00e9g\u00e9 of Jam Master Jay and had used his music to criticize gangster rap .", "The police said that Mr. Jackson spent about eight months in prison in 1994 and 1995 after being arrested twice on drug charges ."], "summary": ["Police arrest rapper Curtis Jackson , known as 50 Cent , and four other men in Midtown Manhattan after finding two loaded guns in car ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1453163", "text": ["Responding to intelligence information from federal authorities about a possible terrorist attack on New York Harbor , the United States Coast Guard closed the Port of New York to pleasure craft yesterday and , along with the New York Police Department , increased harbor patrols .", "Several senior law enforcement officials in New York played down the significance of the threat , which came amid heightened security for the New Year 's Eve celebration in Times Square .", "One official said the action was taken out of an `` abundance of caution , '' and another described the intelligence information as `` nonspecific , uncorroborated and of unknown reliability . ''", "Mayor Michael R . Bloomberg , speaking at a news conference in Times Square yesterday afternoon to discuss the New Year 's Eve festivities there , said in response to a question that the actions were simply prudent in an age of increased risk .", "`` I can tell you that there are no credible threats that we know of to our security here , '' he said .", "`` But New York and the N.Y.P.D. is in the business of prevention , and we take all the appropriate precautions that we should when you have large numbers of people get together . ''", "Police Commissioner Raymond W . Kelly , who appeared with Mr. Bloomberg in Times Square , said that the Coast Guard had restricted pleasure craft from coming into the harbor at 3:30 p.m. , and Coast Guard officials said the ban would continue until 8 a.m. today .", "Mr. Kelly said that along with the Coast Guard , the Police Department 's Harbor Unit would increase its patrols .", "An advisory from the New York State Office of Public Security on the threat said that the United States Department of Homeland Security had advised law enforcement authorities in New York of possible attacks at unspecified times on Dec . 31 in the vicinity of the harbor .", "The advisory said that the source that provided the information indicated that eight potential diversionary attacks would precede the harbor attack .", "`` These diversionary attacks are alleged to occur at cities located throughout New York State , '' said the advisory , which noted that the credibility of the threats was undetermined .", "The advisory came as the police and federal authorities in New York City were searching for 19 men who entered the country illegally a week ago , amid concern that , because most are from Pakistan and Persian Gulf states , they might possibly be part of a terrorist plot .", "But one senior law enforcement official said the authorities had no information to connect the men to the harbor threat .", "The men came from the Pakistani cities of Lahore and Karachi through London with forged European Union passports and crossed the border from Canada into New York State , officials said .", "The authorities learned about them during an investigation into a passport-fraud ring that operated in New York , Canada and Pakistan .", "On Sunday , the F.B.I. released photographs of five of the men , and names and dates of birth for them that they said might be fake .", "Members of the F.B.I. - N.Y.P.D.", "Joint Terrorist Task Force and Immigration and Naturalization Service agents conducted raids in Brooklyn and Queens and elsewhere on Monday , and questioned at least six men , who were later released , officials said .", "Senator Hillary Rodham Clinton yesterday cited the reports of the men crossing the border , saying they underscored her concern that there should be a specific office in the new Homeland Security Department devoted to the northern border .", "`` It is clear that we have just not paid the kind of attention or put enough resources into this , '' she said , adding that she would reintroduce a bill that would create a northern border security office in the new department ."], "summary": ["US Coast Guard closes Port of New York to pleasure craft and increases harbor patrols in response to intelligence from federal authorities about possible attack on New York Harbor .", "Law enforcement officials play down significance of threat , describing intelligence as nonspecific and uncorroborated ."], "publication": "nyt50", "label": [0, 1], "tag": ["New York and Region"]} -{"id": "1453164", "text": ["The troops begin massing on the periphery shortly before midnight .", "They arrive under police escort , armed with backpack blowers and mechanical brooms .", "As the carousers head homeward , the battalion moves in -- shock troops engaged in the surgical roundup of an estimated 25 tons of New Year 's Eve trash .", "In New York City , where the extraordinary is always morphing into ordinary , even the annual Times Square confetti removal operation has become mundane -- the rough equivalent , in apartment dwellers ' terms , of vacuuming post-party potato chips off the living room rug or swabbing up spilled Champagne .", "`` This is not a big deal , '' said John J . Doherty , who came out of retirement last year to become commissioner of the Department of Sanitation , where he had worked for 38 years .", "`` I mean , we 've been doing this for years and years and years .", "So it 's pretty routine . ``", "Sure , the tonnage waxes and wanes .", "Way back when , Times Square would be littered with broken glass , back when no one much cared what anyone carried in .", "But since the city began limiting what revelers could bring with them , the quantity of New Year 's Eve trash has dropped , to 25 tons one year ago , from 50 in 2000 .", "Nearly all of it is paper .", "This year , the city added another security precaution : litter baskets were removed , since they are potential hiding places for explosives .", "-LRB- The Police Department also enlisted Sanitation Department welders to seal manhole covers , Mr. Doherty said , declining to speculate on why . -RRB-", "The plan of attack : send in a small crew to `` preclean '' the area on the afternoon of New Year 's Eve .", "The main 60-member crew follows , with police escort , around 11 p.m.", "As soon as the crowds disperse , around 1 a.m. , the workers fan out with street sweepers , blowers , shovels and brooms .", "The hours are grim , but the pay is not bad .", "Crew members earn time and a half , Mr. Doherty said .", "`` Some of them do it because they like the money , '' he said .", "`` Some of the young guys do it because they like to be there .", "And some do it because they have to . ``", "By 6 a.m. , the bulk of the cleanup is done , the trash carted away in collection trucks .", "Later , as confetti drifts off building ledges and into the streets , day-shift workers sweep it up .", "The entire operation , including 78 workers over three shifts , costs the city $ 22,000 .", "`` Where will I be .", "`` Mr. Doherty said when asked yesterday .", "`` I 'll probably be asleep , actually . `` ."], "summary": ["Job of cleaning up Times Square after New Year 's Eve celebration , which has become fairly routine , described ."], "publication": "nyt50", "label": [9], "tag": ["New York and Region"]} -{"id": "1453165", "text": ["Worried that their party has been outgunned in the political propaganda wars by conservative radio and television personalities , influential Democrats are scouring the nation for a liberal answer to Rush Limbaugh and the many others on the deep bench of Republican friends .", "For years , Democrats have groused about their inability to balance what they see as the increasing influence over the electorate by advocates of Republican policies .", "But they say their concerns have taken on a new urgency because of the rise to the top of the cable news ratings by the Fox News Channel , considered by many to have a conservative slant , and the loss of the Senate to the Republicans in November .", "Some Democrats say the election outcome enhanced the influence of Fox News and personalities like Mr. Limbaugh .", "The efforts among influential Democrats , particularly liberals , range from a grass-roots talent search for progressive radio hosts to the creation of research organizations to provide a Democratic spin for the news media , to nascent discussions by wealthy supporters about starting a cable network with a liberal bent .", "People working on these projects acknowledged they were venturing into territory where liberals have failed and failed again , most notably with the short-lived radio programs of Mario M . Cuomo and Jim Hightower , not to mention Phil Donahue 's struggling liberal talk show on MSNBC .", "However , they said , the recent Republican gains have perhaps set the backdrop for the emergence of an angry liberal who could claim the same outsider status that worked so well for Mr. Limbaugh in the early 1990 's .", "The hurried efforts by Democrats to find more powerful media voices come after years of carping but little action .", "`` If you start from the premise that the message was right , which we do , then the problem was that it was n't getting out to the people , `` said one official of the Democratic Party who spoke on condition that his name not be used .", "With that sentiment , there is a sense within the leadership ranks that the party erred in not building a media support system after the 2000 presidential election , when it lost the media coordination of the Clinton White House .", "`` Across the board , we need to muscle up , '' said John Podesta , the former White House chief of staff for Bill Clinton and now a law professor at Georgetown University .", "`` That means from the Congressional operations to the party committees to the think-tank world to , most significantly , beefing up our capacity to communicate with the public in all forms of media , not just through obscure Internet Web sites but on television and radio . ''", "For his part , Mr. Podesta is discussing with the Internet entrepreneur Steven T . Kirsch and others the creation of a liberal version of the Heritage Foundation , the conservative research group that , along with others of its kind , is credited with helping start the modern conservative movement .", "The foundation is part of a circuit of influential conservative groups that are credited with helping to hone a singular message , bolstered each Wednesday at back-to-back meetings held by Grover Norquist , the head of Americans for Tax Reform , and the conservative activist Paul Weyrich .", "Those meetings are monitored and at times attended by some conservative commentators , columnists and Internet writers .", "Democrats have long claimed that the circuit has corralled conservative thinkers , and more important , conservative media , into a disciplined message of the week that gets repeated attention from Web sites like the Drudge Report , Mr. Limbaugh 's radio show , Fox News 's prime-time talk shows and the editorial pages of The Washington Times and The Wall Street Journal .", "Mr. Kirsch , chief executive of the Propel Internet service and a Democratic fund-raiser , said the foundation he and Mr. Podesta envision would do the same for liberals .", "`` During the last 10 years the opposition has become more organized and the liberals have n't adapted to counter it , `` Mr. Kirsch said .", "`` We will have components that will include messaging , message delivery and coordination of progressive groups so progressives will speak with more of a unified voice . ''", "Should the organizers succeed at starting a foundation , it would not have anywhere near the number of prominent , outright partisan media voices that its conservative counterparts do .", "Democrats can point to a scant few .", "Their most prominent television advocates , James Carville and Paul Begala on `` Crossfire '' and Bill Press on CNN 's `` Buchanan and Press , '' square off each day against conservative counterparts .", "Mr. Donahue stands alone on MSNBC , but his program has struggled some against the far more watched Bill O'Reilly on Fox and Connie Chung on CNN .", "Conservatives have Mr. Limbaugh , Sean Hannity , Michael Reagan and Neal Boortz , who collectively draw an audience of at least 30 million people per week with a strictly conservative message .", "They are led , of course , by Mr. Limbaugh , with an estimated audience of up to 20 million people a week , and Mr. Hannity , with nearly 10 million .", "Democrats , most recently Al Gore , have also complained that the Fox News Channel , overseen by the former Republican strategist Roger E . Ailes , slants its coverage against Democrats , a charge Mr. Ailes denies .", "Its average nightly audience of about 1.3 million people is the largest in cable news .", "In one of the more ambitious of the ideas circulating , a group of wealthy Democratic supporters is toying with the idea of starting a liberal cable network .", "That endeavor would cost in the hundreds of millions and require the backing of a media company with enough leverage to force it onto the major cable systems .", "Democratic officials said that they had discussed a similar idea with Haim Saban , a media mogul and party supporter , a couple of years ago , as Fox News began its ascent , but that he ultimately decided against it , in large part because of the odds against success .", "Mr. Saban had no comment , but an associate played down the seriousness of the discussions .", "Still , Rob Glaser , the founder and chief executive of RealNetworks , the Internet video service , said he believed there was room to create a progressive version of Fox News .", "`` There is a hole in the market right now , '' Mr. Glaser said .", "`` From my personal standpoint , holes in the market are opportunities . ''", "Democrats said a far more readily achievable goal would be to foster national liberal radio personalities .", "The task has fallen to a newly formed group , Democracy Radio Inc .", "It is overseen by a former Democratic Congressional staffer , Tom Athens , with help from , among others , Paul W . Fiddick , the Clinton administration assistant secretary for agriculture and a co-founder of the Heritage Media Corporation .", "`` We 're going to go out and identify talent and help them to create programming and actually connect them with local stations , `` Mr. Athens said .", "`` We want to plant a thousand seeds and see how many flowers actually arise . ''", "But if history is any guide , the soil may not be fertile .", "Liberal radio programs have not worked very well in the past .", "Liberals and conservatives said they believed this was in part because the most prominent liberal hosts have tended to present policy issues in all of their dry complexity while refraining from baring fangs against conservative opponents .", "`` Most liberal talk shows are so , you know , milquetoast , who would want to listen to them .", "`` said Harry Thomason , the Hollywood producer who is close to Bill Clinton .", "`` Conservatives are all fire and brimstone . ''", "Mr. Athens said his group would encourage its hosts to be more brazen and entertaining .", "`` Progressives have this problem : They sound too erudite , it 's like eggheads talking at you , `` Mr. Athens said .", "`` We believe that progressive talk radio can be every bit as successful as conservative talk radio if people present and format a show that people like . ''", "Conservatives are skeptical that all of this planning will do the Democrats much good .", "`` It 's not a matter of packaging or meetings , it 's a matter of ideas , `` Mr. Hannity said .", "`` The public is n't interested in the kind of liberalism that the Democratic party has come to represent . ``", "Robert Novak , the syndicated columnist and part of the conservative team on CNN 's `` Crossfire , '' said the Democrats were making too much about the efficacy of the conservative research organizations .", "Mr. Novak said he sent a staff member to Mr. Norquist 's meetings .", "But , he said , while the information shared at the meetings is `` helpful , it 's hardly a decisive factor `` in what he writes in his column or says on television .", "Correction : January 4 , 2003 , Saturday An article on Wednesday about efforts by influential Democrats to counter conservative voices in the news media misidentified the network that broadcasts `` Buchanan and Press , '' with the conservative commentator Patrick J . Buchanan and Bill Press , a prominent advocate of Democratic positions .", "It is MSNBC , not CNN ."], "summary": ["Influential Democrats , worried that their party has been outgunned in political propaganda wars by conservative radio and television personalities , are scouring nation for a liberal answer to Rush Limbaugh and the many others on deep bench of Republican friends .", "For years , Democrats have groused about their inability to balance what they see as increasing influence over electorate by advocates of Republican policies .", "But they say their concerns have taken on new urgency because of rise to top of cable news ratings by Fox News Channel , considered by many to have conservative slant , and loss of Senate to Republicans in November .", "Some Democrats say election outcome enhanced influence of Fox News and personalities like Limbaugh .", "Photo ."], "publication": "nyt50", "label": [0, 2, 1, 3], "tag": ["U.S.", "Washington"]} -{"id": "1453168", "text": ["Like so many others who have gone on to distinguished athletic careers from tiny Belle Glade , Fla . , Brad Banks grew up running in the area 's thick muck , eating sugar cane and chasing rabbits .", "`` You 've got to be on your toes to catch those rabbits , `` said Banks , the Iowa quarterback , who has become one of the most celebrated players in college football in a single storybook season .", "Banks visited his hometown on the northern edge of the Everglades shortly before Iowa assembled here for its game against Southern California in the Orange Bowl on Thursday night .", "He reminisced about his childhood , about the way the burning sugar cane fields would flush out so many rabbits .", "He could catch 30 on a good day , taking some home to eat , selling others for $ 3.50 each .", "`` It was fun to remember how it used to be for me , '' Banks said .", "`` It 's been a long ride , but it 's paying off right now . ``", "Three Belle Glade natives are in the N.F.L. : Fred Taylor of Jacksonville , James Jackson of Cleveland and Johnny Rutledge of Arizona .", "Banks intends to soon make it four .", "He is coming off a regular season during which he led the country in passing efficiency .", "He passed for 2,369 yards , with 25 touchdowns and only 4 interceptions -LRB- 2 of them off deflections -RRB- .", "Tack on his five rushing touchdowns , and that gave him a team record for combined touchdowns .", "`` He 's led our football team and he 's kept a very even , very stable demeanor through the good and the bad , `` Iowa Coach Kirk Ferentz said .", "`` I think that 's something the players really respect in him . ``", "Banks came into the season as a major question mark after playing sparingly in 2001 .", "And in the Hawkeyes ' third game , when Iowa held a 24-7 halftime lead against Iowa State , he lost two fumbles as Iowa fell to its sole defeat , 36-31 .", "`` That was like a wake-up call , '' Banks said .", "`` It got me to focus a little more . ''", "With concerns high , Banks , a senior , quickly turned the doubts into one long highlight reel .", "Two of his best games came at Penn State , where he passed for 261 yards and 4 touchdowns , and at Michigan , where he passed for 222 yards and 3 touchdowns .", "`` Penn State is not an easy venue , plus the game was on national television , '' Ferentz said .", "`` We were n't quite sure how he 'd handle adversity , but he just took it and ran .", "From then on , everything 's been pretty good . ``", "Banks 's late heroics in a 31-28 victory over Purdue cemented his stature .", "He took the Hawkeyes on an eight-play , 87-yard scoring drive that ended with 1 minute 7 seconds remaining in the game .", "He began the drive with a 44-yard run on a quarterback draw , and he finished it with a 7-yard fourth-down touchdown pass to tight end Dallas Clark .", "`` He did n't do anything or say anything different on that drive , `` Clark said .", "`` I think that 's what made it so bizarre and so special .", "With all the pressure on the line , we 're behind , the season 's on the ropes and everybody 's looking at him , he was just as mild-mannered as he was when he started the game . ``", "Clark shook his head in wonder , still impressed by the memory .", "`` That just shows what kind of special quarterback he is , '' he said .", "When offensive tackle Robert Gallery described Banks as `` not a yell-and-scream guy , '' that was an understatement .", "He often talks in one - or two-sentence clips with a soft , self-effacing style .", "`` He never gets flustered in the huddle or screams at anybody , '' Gallery said .", "`` I 'd call him the strong , silent type .", "I just know he 's got everybody believing in him . ``", "Banks said he simply treated games like practice .", "`` Whatever you do on the practice field , you should be able to bring it over to the game , '' he said .", "Iowa went 11-1 for the season , including 8-0 in the Big Ten to claim part of the conference championship with Ohio State .", "And the individual honors came flooding to Banks .", "The Associated Press named him the player of the year .", "Banks also finished second in the Heisman Trophy voting to Southern Cal 's Carson Palmer , his quarterbacking counterpart in Thursday night 's game .", "Seeing him now , it is difficult to believe that he took such a circuitous route to stardom .", "He began his college career at the Central Florida but encountered academic trouble .", "That led him to Hinds Junior College in Mississippi , where there were plenty of occasions when he wondered if he had much of a future in the game .", "In particular , Banks recalled missing part of a season with a bruised quadriceps .", "`` I did a lot of praying and stayed hungry , '' he said .", "He said he felt as if the door to success `` was closed a lot of times . ''", "`` But I kept looking for that crack , because I knew I could explode through it , '' Banks said .", "Arriving at Iowa last year , he found himself playing behind the senior Kyle McCann .", "He stayed there for much of the season .", "To Ferentz , Banks stands now as a great example of waiting for one 's turn , then taking advantage when the opportunity comes .", "`` It still was a good year for me , '' Banks said of last season .", "`` It prepared me for this year . ''", "COLLEGE FOOTBALL ."], "summary": ["Iowa University quarterback Brad Banks , who was Heisman Trophy candidate , took circuitous route to success , attending two colleges before Iowa and growing up in bucolic Belle Glade , Fla .", "Photos ."], "publication": "nyt50", "label": [0, 42], "tag": ["Sports"]} -{"id": "1453169", "text": ["To begin today 's final news conference before the big game , Susan Hamilton , the Gator Bowl chairwoman , gushed , `` Has this been a great week or what .", "`` Then she expressed hope that the predicted heavy rain would not wash out the festivities before Wednesday 's meeting between Notre Dame and North Carolina State .", "But her happy and hopeful words did not dispel an uneasy feeling in the room , particularly around the Fighting Irish delegation .", "Kevin White , the Notre Dame athletic director , set his jaw , shook his head and refused to discuss the most contentious of several issues : the continuing investigation into the arrest and detention of a Notre Dame player .", "Chad DeBolt , a senior safety , was held overnight in a local jail after he was arrested and charged with trespassing , the police said , when he refused to leave the Ocean Club in Jacksonville Beach early Friday morning .", "At some point , DeBolt sustained substantial facial injuries , which were visible in a photograph taken at the Duval County Jail .", "So far , there are more questions than answers about the incident .", "How did those injuries occur .", "When did they take place .", "`` We 've just moved on , that 's all I can say , `` White said .", "Is DeBolt , 22 , still with the team .", "Will he play against the Wolfpack .", "Does he have a lawyer representing him in this case .", "What is his medical condition .", "The mug shot showed swollen eyes and cuts around the mouth and nose .", "White repeated his statement that Notre Dame has `` moved on '' and said he would have no further comment .", "DeBolt has not been available for comment .", "Police officials were just as limited in their revelations .", "Greg Fields , public affairs officer for the Jacksonville Sheriff 's office , said an investigation is in progress to discover what took place between the time of DeBolt 's arrest and his release from the jail about 11 hours later .", "`` They wo n't update it , `` Fields said when asked if there were developments to announce today .", "`` They will come to a conclusion and release everything . ''", "That was just one of three recent incidents that have jolted the Fighting Irish 10-2 -RRB- as they prepare to face the Wolfpack -LRB- 10-3 -RRB- .", "The first was the announcement last week that Notre Dame would play without the seniors Jordan Black and Brennan Curtin , their first-string offensive tackles .", "They were left home because of what was termed `` a university matter . ''", "Another problem is the expected absence of Courtney Watson , a starting linebacker , who has not practiced this week because of a sprained knee and is doubtful for the game .", "These issues follow a relatively good season under Tyrone Willingham , the first-year coach , that began with an 8-0 streak and ended with a 2-2 thud .", "The final game was a 44-13 loss Nov . 30 at Southern California in which Notre Dame 's defense gave up a team-record 610 yards .", "Today , Gerome Sapp , a senior strong safety , said : `` Nobody 's perfect .", "We had a bad game .", "We 'll admit to that . ``", "That game was a good one for U.S.C. quarterback Carson Palmer , who threw for 425 yards and 4 touchdowns .", "Palmer and others said it was the pivotal factor in his winning the Heisman Trophy two weeks later .", "North Carolina State quarterback Philip Rivers , a junior , has completed 239 of 381 passes this season for 3,125 yards and 18 touchdowns .", "He threw only 10 interceptions , but he will be working against a secondary -- led by cornerbacks Shane Walton and Vontez Duff -- that intercepted 21 passes for a Notre Dame team that scored 105 points off turnovers .", "Rivers 's primary target is Jerricho Cotchery , who caught 57 passes for 1,065 yards and 6 touchdowns .", "This will be Notre Dame 's first bowl appearance since a 41-9 loss to Oregon State in the Fiesta Bowl at the end of the 2000 season .", "The Irish have lost their last five bowl games .", "Their last postseason victory was over Texas A & M , 24-21 , in the Cotton Bowl at the end of the 1993 season .", "This will be the first time Notre Dame and N.C.", "State have played .", "The Wolfpack started the season 9-0 , then lost three games before closing with a victory over Florida State .", "The North Carolina State running attack is led by the freshman tailback T . A . McLendon , who rushed for 1,083 yards and 16 touchdowns .", "The Wolfpack has won three of its last four bowl games .", "In today 's news conference , as is often the case when Notre Dame plays , there were humorous remarks about religious affiliation .", "Coach Chuck Amato of N.C.", "State , referring to the forecast for heavy rain , said : `` The priests and the nuns will have to take care of that rain .", "The Baptists I brought with me ca n't handle that . ``", "But Willingham disagreed in his reply .", "`` I am from North Carolina , '' Willingham said , `` and I do know the impact of Southern Baptists . ''", "On a more serious subject , Willingham was asked whether his players still have a bad taste in their mouths after the defeat at U.S.C.", "`` If it 's still there , we 'd like to get it out , `` Willingham said .", "`` But , sometimes , in four weeks , you can lose your taste buds . ''", "COLLEGE FOOTBALL ."], "summary": ["Notre Dame University prepares to face North Carolina State University in Gator Bowl and provides no answers about arrest of safety Chad DeBolt , who was charged with trespassing at nightclub .", "Photo ."], "publication": "nyt50", "label": [4, 35], "tag": ["Sports"]} -{"id": "1453170", "text": ["His shot has started to fall .", "His defense has clamped down on two huge challenges in the post .", "But as well as center Michael Doleac has been playing lately , he was not going to play along when Shandon Anderson fired an alley-oop pass to him during the Knicks ' victory over San Antonio on Monday night .", "Doleac did not even raise his arms to try to dunk the ball that banged off the backboard .", "Even his sudden success in the rotation has not made Doleac believe he is ready for such a play .", "`` I told Shandon that if I had caught that and made it , that would 've been my first alley-oop ever -- high school , college , pro , ever in a game , `` Doleac said .", "`` I 've done it in practice but never in a game . ``", "Although Coach Don Chaney insisted he had seen Doleac and Anderson connect on the play in practice while toiling for the second unit , some Knicks joked with Anderson , saying , `` Know your personnel . ''", "Before the last three games , Doleac had struggled to justify any playing time , failing to show more than a hint of the shooting touch that inspired the Knicks to sign him as a free agent in August .", "He missed the first seven games of the season with a hamstring injury that affected him through most of the preseason .", "In his first 17 games , he averaged 3.1 points -LRB- shooting 36.5 percent from the field -RRB- and 2.3 rebounds and played little defense .", "But in Houston last Friday , when Kurt Thomas got into early foul trouble , Doleac stepped forward and helped the Knicks to victory with 15 points and 5 rebounds , shooting 6 of 9 and playing unexpectedly strong defense in 28 minutes against the Rockets ' 7-foot-6 Yao Ming .", "Against San Antonio , Doleac helped counter the combination of Tim Duncan and David Robinson , scoring 16 points , shooting 8 of 11 and grabbing a season-high 8 rebounds in 33 minutes .", "Doleac will be the first big man off the bench Wednesday night against Toronto because Clarence Weatherspoon was suspended for one game and fined $ 20,000 for fighting with San Antonio 's Kevin Willis .", "`` He 's playing with a lot of confidence , `` Chaney said of Doleac .", "`` Not just shooting , but with other things .", "Naturally he struggled earlier , but his confidence level is there .", "And I think guys are more comfortable with him now , too .", "If he 's open , they 're not reluctant to give him the ball . ``", "Perhaps they should be , at least when it comes to the lob above the rim .", "But on firm ground about 15 feet from the basket , Doleac has suddenly regained his touch and the faith of his teammates .", "`` I think the biggest thing is the way he 's shooting the ball , `` Latrell Sprewell said .", "`` When you 're able to space the floor like that with a big guy and have him knock down perimeter shots , it stretches the defense . ``", "Why has the ball suddenly begun to drop .", "`` I have no idea , '' Doleac said .", "What he does know is that it has been a hard adjustment to a new city and a new team while having little to show for his efforts .", "Doleac , who entered the league as the 12th overall draft pick after helping Utah reach the N.C.A.A. championship game as a senior in 1998 , had four inconsistent seasons with Orlando and Cleveland before joining the Knicks .", "`` It 's always tough to lose games and not play well , `` Doleac said .", "`` I mean , you want to come out there and do well and contribute to the team and try and help the team win games .", "When you 're not playing up to what you think you 're capable and losing games , it 's not much fun .", "Obviously , if I knew what it was I would have done it a long time ago . ``", "What may have turned his offense around has been the unexpected performance on defense .", "At 6-11 and 262 pounds , he is able to provide the Knicks with as close as their roster gets to a true center 's size and style .", "As much as he contributed on offense on Monday , he was just as good on defense .", "`` I usually pass up the first shot I see and try to get a rebound first , or get a stop first , '' Doleac said .", "`` A lot of times that will happen : my shot dictates my game .", "So if I miss my shot , I 'm not doing anything to help the team -- so , you know , sit down .", "I just need to get in the flow .", "If I miss , it 's ` Come on , Mike . '", "But if you 're defending and rebounding , who cares .", "I 'm doing something to help . ``", "Chaney recalled an incident a couple of weeks ago when Doleac said , `` I 'm thinking too much . ``", "`` That comes with a guy who 's not familiar with what we 're doing , `` Chaney said .", "`` The last couple of games , especially the Houston game , he did a great job of using his strength . ''", "Chaney added , `` If you have size like that , it goes to waste if you do n't use it . ``", "BASKETBALL ."], "summary": ["New York Knicks center Michael Doleac gets 15 points and 5 rebounds in win against San Antonio Spurs , finally showing signs of improvement since his signing as free agent months ago .", "Photo ."], "publication": "nyt50", "label": [11, 2], "tag": ["Sports"]} -{"id": "1453172", "text": ["Ohio State tailback Maurice Clarett today disputed the contention of university officials that he failed to file the proper paperwork to allow him to return to his hometown to attend the funeral of a boyhood friend .", "Clarett said he filled out the paperwork Thursday , before the Buckeyes departed for their national title game matchup with Miami here in the Fiesta Bowl on Friday night .", "The National Collegiate Athletic Association requires the form for an athlete to qualify for emergency money , as Clarett would have needed for a plane ticket to his home in Youngstown , Ohio .", "`` They ca n't lie about that , `` Clarett , a freshman , said defiantly before a throng of reporters at Sun Devil Stadium on media day .", "`` I wo n't sit here and let them lie about that . ``", "On Monday , Clarett said university officials first ignored him , then gave him `` the runaround '' when he asked to fly home to attend services for Juaquin A . Bell , who was found fatally shot on Dec . 21 .", "His funeral was Monday .", "Ohio State 's athletic director , Andy Geiger , said that the university would have allowed Clarett to go home but that he had not filled out the proper form .", "In a statement released tonight , Geiger said Clarett had not submitted the form , Free Application for Federal Student Aid , as of Monday .", "He said the form , given to all Ohio State student athletes during orientation , requires information on family income from the previous tax year and must be filed annually .", "`` Maurice may have begun the process , '' Geiger said , `` but at the time we had to make the decision , there was no indication of a Fafsa on file for Maurice .", "We were therefore compelled to follow the N.C.A.A. rules as they apply to the situation . ``", "At a news conference Monday , Clarett , who set freshman records at the university and helped power the Buckeyes to a 13-0 record , said Ohio State officials cared more about a football game than a loss of life .", "Geiger disagreed .", "`` We care deeply about all of our student athletes , and we did everything possible to assist Maurice Clarett in this time of his personal grief , '' Geiger said .", "`` Unfortunately , given the circumstances , we had no choice other than to react in the manner in which we did . ''", "Clarett said today that he had asked Coach Jim Tressel about going home and that Tressel had referred the matter to the athletic department 's compliance section , which reviews any financial aid or benefits given to student-athletes .", "Clarett , 19 , said his mother , Michelle , filled out the form last week and submitted it to the university .", "She declined to comment today .", "Clarett did not hide being angry about missing the funeral .", "He said he was most upset that Heather Lyke Catalano , Ohio State 's associate athletic director for compliance , did not contact him after he filed his paperwork .", "`` The compliance lady said she 'd get back to me in my room , `` Clarett said .", "`` She never called back in my room or on my cellphone .", "That 's the real reason I was mad , because she did n't call me back . ``", "Catalano said she could not comment .", "COLLEGE FOOTBALL ."], "summary": ["Ohio State University tailback Maurice Clarett denies failing to file paperwork required to qualify for emergency money to fly home for friend 's funeral , as university contends ."], "publication": "nyt50", "label": [0, 2], "tag": ["Sports"]} -{"id": "1453173", "text": ["You had one eye in the mirror as you watched yourself gavotte And all the girls dreamed that they 'd be your partner They 'd be your partner , and . .", "You 're so vain Vanity Tuna has been on the prowl again , a swinger on the football scene itching to wheel his imaginary six-shooter , wink and say , `` I 've still got it . ``", "This time , Jerry Jones gave him his number .", "But will Bill Parcells leave Dallas on hold to indulge in his own adversity cravings .", "Was Dave Campo just another coach left to fear the dark-alley footsteps of the N.F.L. ` s salvage genius .", "As they say in Tampa Bay , it 's never over until the house closing .", "No one needs this twisted intrigue , but this time every year , the coaching purge commences , along with the hunt for Tuna , turning the league into a waiting room for the paranoid .", "Who 's next to go .", "Who knows .", "For some , it 's best to pull the rip cord and escape this maddening line of work for a job field one notch -LRB- and only one notch -RRB- more forgiving .", "College life looks good on Pete Carroll .", "Standing on a practice field on Saturday beneath Windex-blue skies in Davie , Fla . , Carroll was wearing khaki pants and a U.S.C. hoodie -LRB- kid code for hooded sweatshirt -RRB- , looking more like a frat pledge than a 51-year-old coach during the countdown to the Orange Bowl tomorrow against Iowa .", "`` He 's like a 20-year-old , `` said Southern Cal quarterback Carson Palmer , he of recent Heisman Trophy glory .", "`` He is intense , really into it . ''", "He 's all but jumping on couch cushions .", "In his early days in New England , Carroll 's boyish exuberance was embraced after the exit of His Royal Tuna-ness , but soon the mocking of Opie Taylor began .", "By the time Carroll 's winning average dipped to . 500 in Year 3 of living with Parcells 's ghost , his good nature had become a symbol of softness .", "It was a rewind of the critique from his cameo as Jets coach in 1994 , and a repeat of what every man cursed as `` a player 's coach `` hears in the end .", "`` If people would go back and monitor what has been said each year , '' Carroll noted , `` it 's almost canned . ``", "Win and you 're a player 's coach .", "Lose and you 've lost your team .", "The same flip-flop in logic confronted Herman Edwards when the Jets were searching for crawl space at 2-5 .", "Was his team adrift .", "No , as it turns out .", "While Edwards should n't dilute his quality image by banging the us-against-the-cynics drum -- after all , did he really expect a warm glass of milk at 1-4 .", "-- he is among a growing number of coaches who resist publicly scolding or blaming players for his team 's predicament .", "`` Times are changing , '' Carroll said .", "`` Players want to know why , and in the old days , they did n't even think to ask why .", "This is a good thing .", "This is the evolution of a sport . ``", "Player communication is in .", "Humiliation is out .", "Where does that leave Parcells .", "Winners are in demand , no matter how combative .", "Too often , nice guys have a smaller margin for error .", "In his case , Carroll is better off for his dismissal .", "Now , he can hug his players and guide them in life , slap hands with the fans and listen to his rock music , all without ridicule .", "`` For the most part , I think there is an innocence in the following for college football that does n't exist in the N.F.L. , and that is refreshing , `` Carroll said .", "`` There 's more tolerance . ``", "He discovered this upon his arrival at Southern Cal .", "Whether he was a retread or a faith healer , no one was quite sure when the Trojans started last season 1-4 , only to win five of their last seven games .", "These days , there is little question : he has assembled a revival tent at Southern Cal .", "Given control of the program -- a situation he never had in the N.F.L. -- Carroll took one year to restore Trojan lore during a 10-2 season that has unearthed the university 's legends .", "Look , there 's O . J . Simpson .", "On Saturday , he turned up for the first time since the glove did n't fit to watch the Trojans go through drills .", "Posing for pictures with players , shaking hands with the curious , Simpson proved even a pariah can find sanctuary .", "What would you expect in the peace , love and happiness world created by the coach who grooves .", "Maybe Carroll is na\u00efve , but he finds something romantic about college football .", "`` The N.F.L. changed when guys started being paid so much money that the normal fan could n't relate , `` Carroll said .", "`` College guys are n't getting that .", "They 're going to school .", "It 's like in the old days when guys worked in the factories and played football on Sunday . ``", "The charm quotient in the modern N.F.L. depends on the finish .", "One final weekend swayed the fate of nearly a dozen coaches .", "If Tom Coughlin 's Jaguars had n't collapsed one last time on Sunday , would he still have a job .", "If Butch Davis 's Browns had run dry on miracles , would he be employed .", "`` There 's a seriousness that is out of balance in the N.F.L. , `` Carroll said .", "`` It 's a wonderful game .", "It 's too bad that the seriousness has to go so far . ``", "The business of the N.F.L. creates a thin line between pink slip and contract extension , between a coach faced with Tuna rumors and one escaping into the playoffs .", "Still , many are willing to loiter around the N.F.L. , hanging out in TV studios or as team assistants , waiting for their next chance at misery .", "Carroll has chosen joy .", "`` I would really like to grow old doing this , '' he said .", "`` I 'm just going to keep having fun , enjoying my work and listening to my songs . ``", "To be sure , Carroll is more Rolling Stones than Carly Simon .", "Sports of The Times ."], "summary": ["Selena Roberts Sports of The Times column discusses more forgiving and less serious environment surrounding college coaching , where former NFL coach Pete Carroll has found welcome refuge and success with Southern California University .", "Photos ."], "publication": "nyt50", "label": [65, 61, 19, 10, 38, 26], "tag": ["Sports"]} -{"id": "1453174", "text": ["CURTIS MARTIN may be the best running back in New York history .", "He may also be one of the most underrated athletes ever to play in a city that encourages and even creates celebrities .", "Martin does not make the gossip columns .", "He does his running on stairwells and empty practice fields .", "`` Curtis is under the radar , '' his coach , Herman Edwards , said as the Jets prepared to meet the Colts in the playoffs on Saturday .", "Part of the problem is that Martin does not have any exotic mannerisms to inflame the multitudes .", "He does not have a patented dance , nor has he produced an autograph-signing pen from his sock .", "He neither taunts nor struts .", "`` Some guys break off a run of 60 yards and they 're a hero , `` Edwards said .", "`` But Curtis gets you 15 yards and people go , ' Hmpf . '", "He 's like that old pair of shoes that you wear all the time , because they 're so comfortable . ``", "Edwards compared his star back to the Colts ' star receiver , Marvin Harrison , who set a league record with 143 receptions this season .", "`` They do n't have the dat-dat-dat ! `` Edwards said , mimicking the staccato theme music of televised sports shows .", "`` Harrison plays in Indianapolis , '' Edwards noted , invoking stereotypical Midwest blandness .", "`` But Curtis plays in New York , '' Edwards said , as if to wonder why the entire region is not obsessed with him .", "Martin is the 14th-leading rusher ever , with eight straight 1,000-yard seasons , five of them with the Jets .", "Why has New York not gone ga-ga over Martin .", "In the late 50 's , before the Jerseyfication of New York football , Frank Gifford and Kyle Rote were the toast of P . J . Clarke 's and other Midtown oases .", "Gifford remains the epitome of the New York running back : the fair-haired import from the University of Southern California , still fifth in Giants history with 3,609 yards and 34 touchdowns rushing , and second among receivers with 367 catches for 5,434 yards and 43 touchdowns .", "Not only that , but Gifford also scored a touchdown on a runback and kicked 2 field goals and 10 extra points .", "And to this day , Gifford says modestly that he might have been a decent defensive back if Rote had not been hurt and had to switch from rusher to receiver .", "Since the Rote-Gifford days , the Jets have produced Matt Snell and Emerson Boozer of the Super Bowl III champions , along with Freeman McNeil , and in the swamplands the Giants have had little Joe Morris and Rodney Hampton and now the engaging Tiki Barber .", "Meanwhile , Knicks fans loved or hated Patrick Ewing but never ignored him .", "Lawrence Taylor transcended the Hudson River barrier .", "Joe Namath , Reggie Jackson , Keith Hernandez , Derek Jeter , Mike Piazza and Mark Messier have all been princes of the city .", "Plus , New York has a way of hyping very good players into cult status : John Starks , Lenny Dykstra , Paul O'Neill , to name three .", "Curtis Martin , however , who actually does venture into the city for restaurants and jazz , does not attract crowds , partly because of his modest body language .", "Another part of Martin 's relative obscurity comes from where he practices and plays .", "The Islanders won four Stanley Cups a few yards from the Jets ' bunker , yet charismatic or quirky stars like Denis Potvin , Mike Bossy and Billy Smith remained essentially anonymous in the city .", "The Devils won two Stanley Cups but are anonymous east of Hoboken .", "The Nets were finalists last season but still can not sell out their arena , partly because of the absence of mass transportation .", "And even though the Jets are trying to piggyback a new West Side stadium with a potential 2012 Summer Games boondoggle on the backs of the taxpayers , the Jets are essentially strangers in the big city .", "Martin does his best to stay unnoticed .", "When he reached 10,000 yards for his career this season , he let the Jets know that he did not want any celebration during the game .", "And he does not indulge himself after touchdowns , either .", "He and Harrison both hand the ball to the official and jog back to the sideline .", "`` Most players in the league want to create more excitement , '' Edwards said .", "`` That 's the way it is in our society . ``", "Instead , Martin remains the private man who trained himself in a rural retreat in Pennsylvania before his senior season at Pittsburgh , who runs the stairs to his 35th-floor apartment in Florida , who gives away some of his salary to the needy , who has never forgotten the misery of his childhood in Pittsburgh , who says he avoids a social life because he will have time when his career is over .", "Curtis Martin does not need the dat-dat-dat .", "Sports of The Times ."], "summary": ["George Vecsey Sports of The Times column discusses New York Jets running back Curtis Martin , who seems to fly under New York media radar and receives very little public attention , despite fact that he is 14th-leading rusher ever , playing in market that loves to create civic celebrities .", "Photo ."], "publication": "nyt50", "label": [15, 40, 0, 39, 36], "tag": ["Sports"]} -{"id": "1453175", "text": ["Nine Northeastern states filed a legal challenge today in federal court here to new air-pollution rules for power plants and other industries , just hours after the Bush administration published those rules .", "The states ' attorneys general said the rules , which were tentatively announced last month , constituted the most serious effort at rolling back the landmark Clean Air Act since it was enacted more than 30 years ago .", "They said they wanted to make a strong , swift objection and filed their legal petition for review after seeing the new rules on a government Web site this morning .", "The rules , published today in the Federal Register , concern a program known as New Source Review .", "The changes would allow thousands of aging coal-fired power plants and other industrial sites to upgrade without having to install costly antipollution devices .", "Eliot L . Spitzer , the New York attorney general and an organizer of the suit , said , `` The Bush administration has taken an action that will bring more acid rain , more smog , more asthma and more respiratory disease to millions of Americans . ''", "The Environmental Protection Agency , which published the rules , defended them and said the administration followed proper procedures in issuing them administratively rather than seeking legislation .", "`` We reaffirm that we strongly believe that these rules will be positive for the environment , '' Joe Martyak , a spokesman for the agency , said .", "`` We feel strongly that at the end of the day , what we 've done is the right thing as well as a valid action .", "To say this is gutting the Clean Air Act is absolutely incorrect .", "It is strengthening these provisions . ``", "The utility industry criticized the suit .", "The Electric Reliability Coordinating Council issued a statement saying that `` the Northeast attorneys general reflect a minority opinion , '' shaped more by economic concerns than by environmental problems .", "In addition to the published rules , the administration issued a new proposal that would expand an exemption allowing power plants and other industrial facilities to escape pollution controls .", "The suit filed by the states did not address that proposal .", "Their one-page petition , filed in the United States Court of Appeals for the District of Columbia Circuit , did not state the grounds for the challenge .", "But legal advisers in the states said they would argue that the rules violated the Clean Air Act and that they could not be made without the consent of Congress .", "The states ' legal action escalates a struggle between utilities and clean-air advocates that has been waged since the Clean Air Act was signed into law by President Richard M . Nixon .", "Industries have complained most recently to the Bush administration that the current rules , begun under President Bill Clinton , were choking off new investments in power-generating plants and discouraging energy-saving efficiencies .", "Because the administration had made its intentions clear when it announced the rules last month , the attorneys general had time to prepare their coordinated legal response .", "Expecting that the administration would publish the rules during the holidays , when few people would be paying attention , the attorneys general signed their documents in advance and delivered them to Joseph Curran Jr . , the attorney general of Maryland , whose aides filed them in court here today .", "`` We could have waited , '' said Peter Lehner , chief of the environmental protection bureau in Mr. Spitzer 's New York office .", "`` But this is the first time there has been such a major retreat on clean air , and the states believed it was very important to respond aggressively and quickly . ''", "Mr. Lehner said he expected other states to join the suit in the next month .", "The nine that filed today were Connecticut , Maine , Maryland , Massachusetts , New Hampshire , New Jersey , New York , Rhode Island and Vermont .", "The filing puts Christie Whitman , the administrator of the Environmental Protection Agency , in an awkward position .", "As the governor of New Jersey , Mrs. Whitman had joined other states in seeking relief from Midwestern power plants whose smokestack pollution drifted eastward .", "Now , as head of the agency promulgating the new rules , she is in the position of defending those rules .", "Both Mr. Spitzer and Richard Blumenthal , attorney general of Connecticut , have said that the rules proposed today would undermine their hand in prosecuting and settling the dozens of enforcement cases that as governor Mrs. Whitman had joined in bringing .", "`` They feel like they 've been sold out by her now that she 's gone to Washington , `` said John Walke , a clean air expert for the Natural Resources Defense Council .", "In those enforcement cases , the states , the E.P.A. and environmental groups asked judges to order companies to spend hundreds of millions of dollars on pollution controls , based on the New Source Review program that the states says the environmental agency is now proposing to weaken .", "Mrs. Whitman said in a recent interview that the new rules did nothing to compromise the enforcement suits .", "`` Those are very important to me , '' she said .", "`` Those cases are still going forward . ''", "Industry groups , which had pushed for the rules that were published today , hailed them and attacked the attorneys general .", "The National Association of Manufacturers said the rules would `` help further clean air and boost energy security '' and `` provide business planners with greater certainty as they work to increase production and limit air pollution in a cost-effective manner . '' ."], "summary": ["Nine Northastern states file legal challenge to new air-pollution rules for power plants and other industries just hours after Bush administration publishes rules .", "Say rules constitute most serious effort at rolling back landmark Clean Air Act since it was enacted more than 30 years ago .", "Changes would allow thousands of aging coal-fired power plants and other industrial sites to upgrade without having to install costly anti-pollution devices ."], "publication": "nyt50", "label": [4, 1, 0], "tag": ["Front Page", "U.S."]} -{"id": "1453176", "text": ["Jose Hernandez had the best year of his 10-year career last season and can not find a team that will give him more than one year and $ 2 million .", "The Mets , who need to fill a hole at third base , are among four teams that have been talking to Hernandez 's agent .", "But like everyone else , they are trying to secure a player 's services at a bargain-basement rate .", "Hernandez 's experience as a free agent has not been unique .", "In recent weeks , agents have told of their clients ' receiving multiple offers , but the offers have been exactly the same or similar .", "And all have been well below any market in recent years .", "`` Years ago we called this collusion .", "Now we call it coincidence , `` one agent said facetiously yesterday .", "But he added : `` In this environment they have reason to say they have to show restraint .", "We 've been hearing the same story from everyone . ``", "Alan Nero , Hernandez 's agent , said all of the offers he had received were for one year in the range of $ 1.5 million to $ 2 million .", "The last time he was a free agent , in 1999 , Hernandez signed a contract with Milwaukee that paid him $ 10 million for three years , plus the $ 25,000 bonus he earned for making the 2002 National League All-Star team .", "`` What has happened , '' Nero said , `` is this market , from the players ' point of view , is very depressed .", "Players have to decide whether they want to take massive paycuts or wait their turn .", "In Jose 's case , he feels he 's coming off a career year , and he does n't feel he should have to take a paycut . ``", "The Mets , San Francisco , Colorado and Cincinnati are the teams that have pursued Hernandez , 33 .", "Steve Phillips , the Mets ' general manager , will not discuss negotiations with a particular player , but it is clear he wants to put together the left side of his infield as cheaply as possible .", "He signed Rey Sanchez -- for a $ 1.3 million salary and the chance to earn $ 700,000 in bonuses based on playing time -- to keep shortstop warm for Jos\u00e9 Reyes .", "He would like to get Hernandez for a similar total .", "The Mets had Bill Mueller on their list of prospective third basemen , but he lost interest in them when Phillips would offer no more than one year .", "A report circulated among general managers and agents yesterday that Mueller had agreed to a two-year , $ 4.5 million contract with Boston , but Mueller 's agent , Dave Meier , said they had no deal .", "`` We 're talking to them , but we 're not there yet , `` he said .", "Dan O'Dowd , the Rockies ' general manager , acknowledged that he had been talking with Nero about Hernandez .", "Asked about the low , similar offers Hernandez received , O'Dowd said : `` I can only speak for ourselves .", "We just do n't have a whole lot of money to work with . ``", "Referring to circumstances clubs and free agents find themselves in , he added , `` We 're getting into the beginning of January and people do n't have jobs , and we have limited resources . ``", "Agents believe the combination of the number of players still seeking jobs and the reduced amount of money available for payroll is the precise situation the clubs have strived to create .", "With spring training about six weeks away , clubs feel , players will become uncomfortable with their unemployed status and accept the lower salaries the clubs want to pay them .", "Ned Colletti , the Giants ' assistant general manager , declined to discuss any offer the Giants might have made to Hernandez .", "`` We 're looking for someone who has some versatility , and Hernandez has as much versatility as anyone out there , `` Colletti said , but added : '' We have n't really had a conversation since before Christmas .", "It 's been kind of quiet . ``", "As the day progressed yesterday , Nero spoke with O'Dowd and learned that the interest in Hernandez had been reduced by one team .", "`` It appears they 're going in another direction , `` Nero said of the Rockies .", "Jim Bowden , the Cincinnati general manager , did not return a call to discuss the Reds ' degree of interest in Hernandez .", "Phillips said Monday that patience was called for , and that he would not panic in his quest for a third baseman .", "The interested clubs might think their slow pace could spur Hernandez to grab one of the low offers , but Nero said he did not expect his client to panic either .", "`` At this point he 's not motivated to accept any of the offers that are out there , `` Nero said .", "`` We 've decided we will be very patient .", "We wo n't be rushing into something that is not quite acceptable .", "Jose Hernandez has worked a long , long time to be where he is , and we 'd like him to be treated appropriately .", "We 're going to continue to wait . ``", "BASEBALL ."], "summary": ["New York Mets pursue third baseman Jose Hernandez , along with three other teams , but he says he will wait for offer that is for more than one year ."], "publication": "nyt50", "label": [19], "tag": ["Sports"]} -{"id": "1453177", "text": ["There is a tradition among inmates serving long stretches in the nation 's houses of correction .", "The new year is rung in with a little Dick Clark and a little jailhouse juice .", "The homebrew is lovingly known as hooch in Sing Sing and called pruno in San Quentin , but the process by which it is made is the same .", "It is a fortified wine concocted from a hodgepodge of ingredients including raisins , prunes -LRB- as in pruno -RRB- , milk or anything containing sugar that can be purloined from the mess hall and fermented into alcohol .", "But the New Year 's tradition was broken this evening at the California State Prison Los Angeles County located high in the Mojave Desert about a 90-minute drive from Los Angeles .", "The warden decided two months ago that fresh fruit should be banned from all lunch boxes that are delivered daily to the cells of its 4,000 inmates .", "Without fruit , the thinking goes , there can be no wine .", "It is the first prison in the California state prison to ban fruit from the cellblocks .", "`` A good deal of the violence that goes on in these walls is alcohol related , '' said Lt . Ron Nipper , a spokesman for the prison .", "`` So this is law enforcement 101 .", "Cut your hair , behave yourself , keep your cell clean and no pruno . ``", "Prison officials acknowledge that it is difficult to curtail pruno production entirely .", "Sugar and water are all that is needed .", "Sugar is broken down into ethyl alcohol in the presence of yeast , which floats about naturally in the air .", "`` We do the best we can , '' Lieutenant Nipper said .", "`` But you ca n't trust these guys .", "Inmates working in the kitchen hide loose yeast in their shoes .", "Everything has a price in here . ``", "Humane and law-abiding citizens need not worry about an outbreak of scurvy among the incarcerated in Los Angeles County .", "State guidelines require that prisoners receive 15 servings of fresh fruit each week .", "Prisoners receive their fruit allotment at breakfast and dinner , which are served in the mess hall .", "Box lunch is served in the cellblocks .", "`` There is pruno in every prison , '' admits Margot Bach , a spokeswoman for the California Department of Correction .", "`` We see an upswing around the holidays like Christmas and New Year 's .", "Inmates are human beings after all .", "They want to ring in the new year like everybody else .", "It 's a pretty busy day for us . ``", "Indeed .", "This morning , a half-dozen correction officers swept through the beds of Cell Block A , ostensibly in search of pruno .", "Though the blend is sealed in plastic bags , it is so rancid it can be detected with a simple sniff of the nostrils .", "`` It 's a never ending battle of contraband , `` said Sgt . J . Ortiz .", "It is estimated by prison officials that more than two million cocktails are confiscated each year in California 's 33 maximum security penal facilities , which house 160,000 people .", "According to correction officers , amateur alcohol is more prevalent in California prisons than any other form of contraband .", "Possession of alcohol is considered a misdemeanor behind bars , while possession of hard substances such as methamphetamines , opiates and marijuana are felonies .", "This is a deterrent to the man with two strikes , who if caught with a marijuana cigarette , could find himself serving a life term .", "Amenities that have been banned in California prisons over the past decade include weights , indoor smoking , conjugal visits for those serving life sentences and now pruno .", "Keep this up , inmates say , and people will stop wanting to come .", "`` Everybody wants to celebrate the New Year , '' said Paul Magnen , a soft-spoken and heavily tattooed man currently serving 50 years to life for the possession of narcotics , his third strike .", "His previous two felonies , he said , were in connection with an armed robbery committed in 1980 .", "`` God knows I could use a drink , '' he said glumly as he sat at a card table in Cell Block A .", "`` I did what I did , but it does n't justify 50 years .", "There 's not much to celebrate in here really .", "One day is just like the next .", "Nothing changes inside .", "Except the day they take me out in a box . ``", "The most famous recipe for jailhouse pruno comes from Jarvis Masters , a death row inmate at San Quentin who won a PEN award for his 1992 poem `` Recipe for Pruno . ''", "Take orange peels , fruit cocktail and water and heat it for 15 minutes in your sink with hot water .", "Keep mixture warm with towels for fermentation .", "Leave hidden and undisturbed for two days .", "Add sugar cubes and six teaspoons of ketchup .", "Heat for 30 minutes .", "Wrap and leave undisturbed for three more days .", "Reheat daily for 15 minutes for three more days .", "Skim and serve .", "The poem ends , `` May God have mercy on your soul . '' ."], "summary": ["Inmates in prisons across country continue their efforts to make jailhouse alcoholic brew .", "Homebrew is lovingly known as hooch in Sing Sing and called pruno in San Quentin , but process by which it is made is same : it is fortified wine concocted from hodgepodge of ingredients , anything containing sugar that can be purloined from mess hall and fermented into alcohol .", "Correction officers say amateur alcohol is more prevalent in California prisons than any other form of contraband .", "Photos ."], "publication": "nyt50", "label": [3, 2, 32], "tag": ["U.S."]} -{"id": "1453178", "text": ["OUT of bed , you bourgeois slugs , for it 's New Year 's Day , and the Poetry Project 's Marathon Reading at St . Mark 's Church begins just about the time you are planning a late lunch .", "Certainly you remember it , from the days when you hung out in the East Village and did not own a suit .", "A fine counterculture event -LRB- oh , sorry , in those days , that would have been your culture -RRB- , featuring , over the years , poets and performers like Allen Ginsberg , Patti Smith , Gregory Corso , William S . Burroughs , John Cage and Lou Reed .", "In the 1970 's , the reading began at 7 at night and ran until 3 or 4 in the morning .", "Today , while artists like Ms. Smith , Eric Bogosian , Philip Glass and Taylor Meade are scheduled to perform , the reading is scheduled to begin at 2 p.m. and run till around midnight .", "A bow to our more somber mood , says Ed Friedman , the artistic director of the Poetry Project and himself a poet .", "Mr. Friedman , it does not seem irrelevant to note , is 52 .", "And he has gone gray .", "`` Everyone is working , '' he sighs .", "`` And for a lot of people that are maybe over 35 -- well , put it this way : Getting people to come out and read at 3 a.m. is n't what it once was . ``", "Can you be saying , Mr. Friedman , that Bohemia is n't what it used to be .", "`` It 's harder , `` he says .", "`` It 's harder than it used to be in the 1970 's .", "Poets could pretty much support themselves with a part-time job .", "And there was a lot more time to write .", "Poets and pretty much everybody have to work much harder now , to assume the work of people who are fired .", "It used to be you had time at your desk that you were n't doing anything , you could write a poem , call a friend , use the copy machine . ``", "A nurturing spot for creative types , St . Mark 's Church , and if you doubt it , try to find a quiet spot to interview Mr. Friedman .", "The sanctuary of the church has an exhibit of ceramic face masks , about to come down , and if you ask that the installation be delayed for 10 minutes while a photographer shoots , the ceramics artist , feeling particularly disenfranchised , sulks .", "`` Yeah , I 'm just the artist , `` he says .", "Discussion on the second-floor balcony is accompanied by the sounds of a piano tuner , and the ringing of the church bells , a few feet away .", "It is a credit to Mr. Friedman , an easygoing , amusing man , that in this environment he can discuss Art .", "Particularly as his poems are not the `` How do I love thee '' sort .", "They are free form , often written as text , and may include references to the process of writing , so that if the poet coughs , the poem includes the words , `` big cough , '' or `` My throat is dry and full of mucus . ''", "BLONGGGG ! -LRB- Short pause . -RRB-", "BLONGGGG , BLONGGGG , BLONGGGG ! -LRB- That was a reference to the piano tuner , concentrating on the lower end of the keyboard .", "He certainly is dedicated .", "Does n't he ever go to the bathroom .", "Moreover , if those bells peal one more time the reporter will leap over the balcony . -RRB-", "`` I am isolated bourgeoisie / who would gladly write for the masses / if only I knew them as friends and co-workers , '' Mr. Friedman begins one poem .", "`` We 'd talk about families and then make plans / to renovate the world that 's crushed us for generations . ``", "A bit of poetic license there , perhaps , for the world , as Mr. Friedman tells it , did not , in the early days , crush him .", "He grew up in Los Angeles , his father a doctor , his mother a bacteriologist turned sculptor .", "There was a year and a half of pre-med at the University of California at San Diego , before graduating with a degree in poetry and literature .", "His parents were not enchanted with his decision to be a poet , but it was the 60 's , Mr. Friedman says with a laugh , when there existed this strange idea that you could be what you wanted .", "He came to New York in the fall of 1971 and was involved in poetry readings at St . Mark 's Church , at 131 East 10th Street , by January 1972 .", "Now he earns $ 40,000 a year at the Poetry Project .", "He is married to a painter who works in textile design , he has a 3-year-old son .", "Things have not always been good .", "More from his autobiographical poem , `` Isolated Bourgeoisie '' : `` The day is cold .", "The sky is blue and cloud-strewn , / my wife is recovering from alcoholism , and / we 're going to Washington to protest even one more / cent being allocated for stupid B-1 bombers instead of schools . / Would you like to come .", "I know there 's room on the / yellow bus we 're borrowing from the / neighborhood vehicle cooperative . ``", "BLONGGG ! -LRB- The piano tuner . -RRB-", "CLANG , CLANG , CLANG ! -LRB- The reverberating church bells .", "Ears hurt .", "Must get out .", "How to ask the rude question .", "-RRB- MR . FRIEDMAN , your nine volumes of poetry have been published in the smallest of the small press .", "It would appear that you are not so successful .", "`` Look at Allen Ginsberg , '' Mr. Friedman says .", "`` Who published him .", "City Lights , who did Gregory Corso , Lawrence Ferlinghetti .", "It has n't been the major houses or major university press that has spawned influential work over the last hundred years . ``", "But there was a point when Ginsberg was published by larger houses .", "`` In the last 20 years of his life , '' Mr. Friedman says .", "If that 's correct , should n't Mr. Friedman , by this time in his life , be getting published in the larger houses now , too .", "`` Gee , I hope so , '' Mr. Friedman says .", "Then grins .", "-LRB- Nice guy .", "Sweet . -RRB-", "PUBLIC LIVES ."], "summary": ["Public Lives profile of Ed Friedman , poet and artistic director of Poetry Project , which conducts annual marathon poetry reading on New Year 's Day at St Mark 's Church in East Village .", "Photo ."], "publication": "nyt50", "label": [0, 5], "tag": ["New York and Region"]} -{"id": "1453179", "text": ["As pinup calendars go , it has many of the standard features : models in black leather perched on beefy motorcycles .", "But the men and women on display here are n't exactly firefighters , or the Girls of `` Baywatch , '' or any other known species of cheesecake or beefcake .", "They 're librarians .", "With eyeglasses here , gray hairs there , and in several portraits , a purple tote bag full of books tucked into a corner .", "The 2003 calendar , titled `` Easy Readers @ Your Library , '' introduces the Librarians of Ocean County , who came up with the idea as an eye-grabbing way to help raise the library 's $ 1.6 million share of a $ 12.9 million expansion of the library system 's main building here .", "For $ 10 -LRB- $ 15 with shipping and handling -RRB- , readers -- and nonreaders , for that matter -- can enjoy 12 months of their favorite local information specialists .", "But there was another motive behind the move .", "`` We wanted to show people we 've changed , `` said Nancy Dowd , the head of public relations for the library system , who snapped the photos with her Olympus C-3000 , a digital camera .", "`` People 's ideas of librarians is conservative , and this just blew it out of the water . ``", "Or as Miranda Sulikowski , 34 , who is Miss November , put it : `` Anyone who sees it , they 're going to say , ` wait a minute , they 're not old fuddy-duds . '", "We 're on top of things . ``", "On top of a red Harley-Davidson XL 1200 , in a black leather bustier , is the county 's library director , Elaine McConnell -LRB- Miss December -RRB- , who normally prefers Talbots ' fashions and turtlenecks .", "Miss May is Heather Andolsen , a senior librarian , who sports a choker collar and tattoo .", "Fourteen women and two men grace the pages -- some doubling up in the pictures -- and are often outfitted in do-rags , tassled gloves and sunglasses .", "Reaction has been overwhelming .", "`` Let us change the librarians ' image ! `` said a fax from one reference librarian in Elgin , Ill . , who heard about the calendar via a television broadcast .", "He wanted three signed calendars .", "Requests have come from as far away as Australia .", "On the other hand , Ms. McConnell said , a patron vexed by other recent changes was not amused .", "`` We knew your taste was in question when the coffee cart opened '' in what had been a books-only realm , he wrote in an anonymous e-mail message , `` and this confirmed it . ''", "The idea for the calendar arose when the library tried to promote a raffle for a Harley motorcycle during the fall .", "Various workers from the county 's 20 branches posed in promotional posters .", "Later , the library director and Ms. Dowd got the idea to turn the posters into a calendar .", "A thousand copies have been printed so far , and Ms. McConnell estimates about $ 3,000 worth have been sold .", "There has not been much discussion of a possible 2004 Librarians of Ocean County calendar , but Ms. Sulikowski offered this possibility : `` Hard hats , tool belts and shorts . ''", "Another librarian chimed in : `` I 'll do shorts .", "That might get more sales . ``", "Or as Becky Schoonmaker , who appears on the March page , put it , referring to her plans to wear skimpy cutoffs , `` Daisy Dukes , woo hoo ! '' ."], "summary": ["Librarians in Ocean County , NJ , pose for calendar to dispel myths about their profession 's conservatism and to help raise money to expand library .", "Photo ."], "publication": "nyt50", "label": [4], "tag": ["New York and Region"]} -{"id": "1453180", "text": ["Two counties in the Hudson Valley will begin enforcing new smoking restrictions in restaurants and workplaces on Wednesday .", "The smoking bans enacted by Dutchess and Orange Counties are not as strict as the ones recently approved by New York City and Nassau County on Long Island , but they are causing a local furor all the same .", "The new smoking law in New York City is to go into effect March 30 , and Nassau 's is to start March 1 .", "Groups representing restaurants and bars in Orange and Dutchess Counties have already filed federal lawsuits against their Legislatures .", "The suit filed by the Dutchess / Putnam County Restaurant and Tavern Association on Monday seeks to have the new law overturned and declared unconstitutional .", "`` It is overly vague and overly broad , '' said the lawyer representing the group , Kevin T . Mulhearn .", "`` Its enforcement applications do n't make sense . ``", "The president of the association , Michael J . Leonard , who owns a restaurant in Wappingers Falls in Dutchess County , said he installed an exhaust system two years ago in his bar area , which is separate from the restaurant .", "Under the new county law , smoking will be prohibited throughout his establishment , Greenbaum & Gilhooley 's .", "`` It will be devastating to my business and to my smoking customers , '' he said .", "`` It 's totally unjust . ``", "The Dutchess law bans smoking in restaurants , both their bar areas and dining rooms , as well as in bingo halls , bowling alleys and work places .", "Bars or taverns that make 60 percent or more of their gross income from selling alcohol are exempt .", "Mr. Mulhearn said the 60 percent threshold would be difficult to pin down , noting that the amount of gross income from alcohol sales fluctuated from week to week and season to season .", "The bill was the third anti-smoking proposal considered by the Dutchess County Legislature in the last few years , and squeaked by with a vote of 18 to 17 .", "The county executive , William Steinhaus , publicly criticized the bill but did not sign or veto it , and allowed the bill to become law .", "Bradford H . Kendall , chairman of the Dutchess County Legislature , said that lawmakers had `` acted within the bounds of good reason '' and that the county was likely to be judicious in enforcing the law at first , while businesses are deciding whether it applies to them .", "In Orange County , the smoking restrictions allow smoking in the bar areas of restaurants if the bars have ventilation systems , but it outlaws smoking sections in restaurant dining rooms .", "Until now , diners could smoke in designated areas if four feet of space or a six-foot partition separated smoking and nonsmoking sections .", "The restrictions that take effect on New Year 's Day are in a sense warmups for the bans planned for New York City and Nassau .", "The law Mayor Michael R . Bloomberg signed on Monday bans smoking in almost all bars and restaurants .", "A 1995 city law had forbidden smoking in all restaurants with more than 35 seats , but it exempted stand-alone bars and the bar areas of restaurants .", "The new law allows only a few exceptions : cigar bars , bars with no employees except the owners , nonprofit clubs with no employees , and some bars with enclosed smoking rooms .", "Nassau 's ban covers all bars , restaurants , bowling alleys and bingo halls .", "The only exceptions will be for businesses that derive 90 percent of their revenue from tobacco sales and for workplaces in private homes .", "Westchester and Suffolk Counties are considering their own bans ."], "summary": ["Smoking bans in restaurants and workplaces go into effect in Dutchess and Orange Counties , NY .", "Although not as strict as New York City 's ban , they are causing local furor .", "Groups representing restaurants and bars have filed federal suits to have laws overturned ."], "publication": "nyt50", "label": [1, 3], "tag": ["New York and Region"]} -{"id": "1453181", "text": ["Clonaid , the company that says it has produced the first human clone , previously made astonishing claims that were not substantiated .", "And the journalist whom Clonaid has appointed to authenticate its latest claim was once an intermediary between a couple who wanted cloning services and a scientist who wanted to provide them , the scientist says .", "Clonaid was founded in 1997 by the leader of a religious sect that believes space travelers populated earth through cloning and that humanity 's mission is to clone .", "When he formed the company , the leader , who calls himself Ra\u00ebl , had an express purpose in mind , Clonaid 's vice president , Thomas Kaenzig , said in an interview this week .", "`` It was a project to create controversy , '' Mr. Kaenzig said .", "`` That was his mission , to wake people up . ''", "Though the company advertised a cloning service , it was hardly ready to provide it .", "For three years , Clonaid `` was just a post office box in the Bahamas , '' Mr. Kaenzig said .", "`` There was no research going on . ''", "But by the spring of 2001 , Clonaid 's research director , Dr. Brigitte Boisselier , who is a chemist , a Ra\u00eblian bishop and now the company 's chief executive , had begun telling of a secret Clonaid laboratory in the United States .", "`` She was very coy about it , '' said an official at the Food and Drug Administration , whose approval would have been required for any human-cloning work in the United States .", "`` She said , ' I have a lab , but I wo n't tell you where it is . '", "`` But the F.D.A. ' s office of criminal investigation soon found it , in a rented room at an abandoned high school in Nitro , W.Va.", "The environment there was hardly ideal for research , said the official , who would speak only on the condition of anonymity .", "Insects flew through the open windows , possibly from a nearby barn .", "`` There was no place where sterile conditions could be had , '' the official said , and the researcher there was a graduate student who seemed woefully unprepared .", "`` The lab notebooks were reviewed by our staff scientists , '' the F.D.A. official said .", "`` They were inadequate '' to document scientific research .", "The work under way was not even with human cells .", "The graduate student had obtained cow ovaries from a slaughterhouse and was trying to extract eggs from them .", "`` The notebooks had a sketchy page and a half : ' We went to the slaughterhouse and got some ovaries , ' '' the official reported .", "But the equipment in the lab was state of the art , the official said .", "It had been bought by a grieving father whose 10-month-old son had died of congenital heart disease and who wanted to clone him .", "The father , Mark Hunt , a lawyer and former West Virginia state legislator , had obtained the equipment from a fertility lab that was going out of business .", "Accounts of how much he paid vary , but Dr. Michael A . Guillen , the journalist appointed by Clonaid in the current case , said on an ABC News television program a year ago that Mr. Hunt had spent $ 200,000 .", "After its inspection of the Clonaid lab , the F.D.A. official said , the agency reached an agreement with Mr. Hunt that he would not proceed any further in trying to have his dead son cloned in this country without F.D.A. permission .", "Mr. Hunt , who did not return repeated telephone calls seeking comment , later sold the laboratory equipment in Nitro and shuttered the lab , the F.D.A. says .", "He also publicly broke off from the Ra\u00eblians , saying they were too avidly seeking publicity .", "The company then moved its operations out of the country , Mr. Kaenzig said .", "He added that the company had begun by learning to create cow embryo clones and that by the fall of 2001 , it had created its first cloned human embryo .", "Many learned of Mr. Hunt and his travails from Dr. Guillen , whose doctorate , from Cornell , is in theoretical physics , mathematics and astronomy .", "On Sept . 7 , 2001 , when he was a science editor for ABC News , Dr. Guillen interviewed Mr. Hunt and his wife , Tracy , on `` 20/20 Downtown '' and showed a video of their baby , Andrew , who had died in 1999 .", "Dr. Guillen did not describe the lab 's inadequacies on that program , but he did say that Dr. Boisselier was being investigated for fraud and reported that she had moved her cloning efforts out of the country .", "-LRB- Citing confidentiality concerns , federal law enforcement authorities would not confirm or deny anything Dr. Guillen said about the investigation . -RRB-", "Only seven months earlier , Dr. Guillen had reported that Clonaid was on the brink of success .", "`` I met with Dr. Boisselier , who is the scientific director , and she told me that in two weeks they 're expecting to conceive the first human clone , implant it in a surrogate mother and hoping for a pregnancy in March , `` he reported on '' 20/20 `` on Feb . 16 , 2001 .", "`` Ready or not , the technology is on its way . ''", "Soon another scientist who was interested in cloning met the Hunts .", "In an interview yesterday , that scientist , Dr. Panos Zavos , founder and director of the Andrology Institute of America , in Lexington , Ky . , said Dr. Guillen had told him that he could send the Hunts to talk to him , but that in return Dr. Guillen wanted exclusive rights to their story .", "Dr. Zavos , who says his work on human cloning is taking place outside the country , ended up seeing the Hunts , but Dr. Guillen was unable to negotiate an exclusive agreement with him because he had already made an agreement with a documentary filmmaker , Peter Williams .", "Dr. Guillen did not return repeated calls yesterday to his office and to his agent 's office .", "Dr. Zavos said he had not cloned yet and had not taken any money from Mr. Hunt .", "He said he wanted to get the technique to work first with cells taken from living people before trying it with stored frozen cells from the dead .", "`` If this technology develops in the best scenario possible , '' Dr. Zavos said , `` if we take fresh tissue and it works , then it is something we can make available to him . ''", "--- --- --- --- --- --- -- Court Petition Seeks a Guardian By The New York Times FORT LAUDERDALE , Fla . , Dec . 31 -- A Florida lawyer asked a court here today to appoint a legal guardian for the baby girl whom Clonaid claims to have produced .", "The petitioner , Bernard F . Siegel , who practices in Coral Gables , has served on the board of a children 's rights organization .", "But he said he was acting solely as a private citizen , asserting that if the baby exists , she is being exploited by Clonaid and may have birth defects ."], "summary": ["Clonaid , company that says it has produced first human clone , previously made astonishing claims that were not substantiated .", "Dr Michael A Guillen , journalist whom Clonaid has appointed to authenticate its latest claim , was once an intermediary between couple who wanted cloning services and scientist who wanted to provide them .", "Clonaid was founded in 1997 by leader of religious sect that believes that space travelers populated earth through cloning and that humanity 's mission is to clone ."], "publication": "nyt50", "label": [2, 1, 0], "tag": ["Health", "U.S."]} -{"id": "1453182", "text": ["A host of new state laws take effect today , but New Yorkers may not notice many of them until income tax time in 2004 .", "That is because many are tax cuts and credits , from a decrease in the so-called marriage penalty to an increase in the percentage of college expenses that can be deducted .", "And under one of the highest-profile measures passed in Albany last year , starting today almost all health insurers in the state will be required to cover prescription contraceptives , osteoporosis screening and , beginning at age 40 , mammograms .", "City dwellers , on the other hand , have a little time to prepare for their most hotly debated new law , a sweeping ban on indoor smoking .", "Patrons will not actually have to stop puffing away in nearly all bars and clubs until the end of March , for example , although a provision that requires employers to change their written smoking policies in time to comply with the new regulations has already gone into effect .", "One city law that takes effect today amends the national electrical code that the city adopted in 2001 to conform to the city 's specific needs .", "But there are several other laws enacted in 2002 that will go into effect at various times over the next two months or so .", "One , known as the living-wage law , requires that certain health - and day-care workers employed by companies with city contracts must receive at least $ 8.10 an hour with health coverage , or $ 9.60 an hour without coverage .", "Another , the so-called predatory lending law , will bar the city from doing business with companies that issue loans to buyers who can not afford the terms or that buy such loans on the resale market .", "Under another new law , people applying for a permit for a rifle or shotgun will be barred from obtaining one if they have been convicted of domestic violence , of assault within the last 10 years , or of any three misdemeanors .", "People with certain outstanding orders of protection will also be barred .", "Another measure forbids emergency shelters for victims of domestic violence to turn people away solely because they lack official documentation like a police report or an order of protection .", "It also expands the definition of domestic violence victims to potentially include not just married people but also common-law and dating couples with access to each other 's residence .", "Sidewalk newspaper dispensers will be subject to a host of new specifications , including size , appearance and placement .", "A new law will expand the Veterans ' Advisory Board , which makes recommendations to the director of the Office of Veterans ' Affairs , to nine members from five , with five to be appointed by the mayor and four by the City Council speaker .", "It also requires that they be veterans and that there be at least one representative from each borough .", "Starting this year , the city will be required to use gender-neutral language in its documents .", "Other new state laws include an increase in the standard deduction for married couples to $ 14,600 from $ 14,200 .", "That brings spouses closer to the $ 15,000 deduction -LRB- $ 7,500 each -RRB- for unmarried couples .", "The state 's Earned Income Tax Credit will rise to 30 percent from 27.5 percent of a filer 's federal allowance , and the college tuition tax deduction will be increased to a maximum of $ 7,500 .", "Another measure cuts the gross receipts tax on natural gas and electricity , which will mean savings for industrial and residential users .", "By July 1 , cigarettes sold in New York must meet new fire safety standards .", "Other statewide measures will tighten security on debit card transactions , decrease to 7 from 10 acres the size of farmland parcels eligible for real property tax assessment and authorize some counties to give property tax breaks to volunteer firefighters and ambulance drivers ."], "summary": ["Roundup of new laws in New York State that will take effect on Jan 1 .", "Many will probably not be noticed until income tax time in 2004 because they are tax cuts and credits ."], "publication": "nyt50", "label": [0, 1], "tag": ["New York and Region"]} -{"id": "1453185", "text": ["President Bush said today that he had directed the F.B.I. to issue the appeal for the public 's help in tracking down five men who the authorities believe may have illegally slipped into the United States last week .", "Mr. Bush , speaking to reporters near his ranch in Crawford , Tex . , said that the electronic communication issued by the F.B.I. on Sunday to thousands of local police agencies was an effort to investigate aggressively any potential threat of a terrorist attack .", "`` I have authorized the Federal Bureau of Investigation , the F.B.I. , to issue an all-points bulletin for five individuals who we believe have been smuggled into the country , '' Mr. Bush said .", "`` We need to know why they have been smuggled into the country , what they 're doing in the country . ``", "Later tonight , government officials said that the bureau was expanding its search .", "The officials told The Associated Press that the government had identified several more men it feared might have used fake passports to get into the country around Christmas Eve .", "The F.B.I. and Homeland Security officials were considering making the names and photos of about a half dozen more men public as early as Wednesday , the officials said .", "The search for the men , based on vague information about their possible connection to terrorism , reflected the heightened level of concern about a New Year 's terrorist attack even as the authorities acknowledged that they had received no credible threats in recent days .", "As a result , the officials said they had not elevated the threat alert level from its current Code Yellow status , which warns of an increased possibility of attack in the United States without any specific threat .", "Mr. Bush 's comments demonstrated the precautionary nature of the all-points bulletin .", "`` We do n't have any idea of what their intentions may be , but we are mindful that there are still some out there who would try to harm America and harm Americans , `` he said .", "Concern about terrorism related to the New Year 's holiday has grown since the millennium celebrations in 2000 .", "Security officials have planned measures to deter terrorism , but so far no specific threats related to any of the events have been received , officials said .", "The F.B.I. has identified the original five men , cautioning that their names and ages might be false , as Abid Noraiz Ali , 25 .", "Iftikhar Khozmai Ali , 21 .", "Mustafa Khan Owasi , 33 .", "Adil Pervez , 19 .", "And Akbar Jamal , 28 .", "Their nationalities are unknown .", "THREATS AND RESPONSES : THE INVESTIGATION ."], "summary": ["Pres Bush directs FBI to issue appeal for public 's help in tracking down five men who authorities believe may have illegally slipped into United States ."], "publication": "nyt50", "label": [0], "tag": ["U.S."]} -{"id": "1453188", "text": ["When Tao Wucheng , a delegate to the national Legislature this year , was asked to investigate allegations of fraud and corruption at a private clinic in this provincial capital , he viewed it mostly as a headache .", "A much praised Communist Party official , he was living in a distant corner of the province , doing work on poverty relief .", "Instead , the task proved life threatening .", "On Sept . 29 , he staggered out of the Wuhan Medical Center of Tonji Hospital -- his face swollen and his bowels oozing blood from beatings that he says were ordered by the clinic 's owner and carried out by its security staff .", "He spent the next month in a hospital .", "When he contacted the local police to inquire about the assault that almost cost him his life , he learned that the investigation had been closed .", "He is now suing the police to take action .", "The entrepreneur who runs the clinic , Hang Yongming , did not answer repeated calls or questions faxed to him about the case .", "While it is extraordinary that an official with national status could be treated this way -- Mr. Tao believes he is the first -- tales of businessmen and journalists being beaten have become routine fodder for the Chinese press .", "Indeed , vigilante justice has become a serious problem for China 's leaders .", "As China 's central government pledges to quell corruption and build the rule of law , those efforts are commonly stymied by a lack of cooperation at the local level .", "Local law enforcement officials often have more loyalty to local interests than to professionalism or national authorities .", "The police , prosecutors and judges all serve at the pleasure of local officials , who are often friends , and there is little possibility of disciplining them from above .", "Likewise , central authorities -- even those from the National People 's Congress -- have little leverage to enforce the decisions they make .", "Only if top leaders mandate action , as they did in the crackdown on the banned spiritual movement called Falun Gong , can they be assured of cooperation .", "`` People have this image of the Chinese government and the Communist Party as a monolith and very powerful , '' said Kenneth Lieberthal , a political scientist at the University of Michigan .", "`` But what power do bodies like the National People 's Congress really have down the hierarchy .", "`` The answer is zilch .", "They can carry out investigations , but they ca n't compel people to act . ``", "In Mr. Tao 's case , demands from both national and provincial officials for a thorough investigation elicited only cursory responses in Wuhan .", "Mr. Tao 's beating has so far gone unreported in the state press , even though it happened in a major city of five million people and to a man with national stature .", "Mr. Tao , a former police officer who later became a businessman , gave up a comfortable life in Beijing and moved to his hometown in rural Hubei province in 1999 to work helping the poor .", "News articles praising his spirit of sacrifice have been featured in official media from The People 's Daily to China Central Television .", "Several years back , his record of good work got him appointed to China 's Legislature , the National People 's Congress .", "With a reputation for fairness and honesty , political leaders in Beijing saw him as ideal to investigate the fraud allegations that pitted the Tonji Hospital against Mr. Hang , a local businessman with good political ties who opened the plush private clinic on the hospital grounds .", "Mr. Tao , who also works as a part-time reporter for a government magazine , made an appointment to see Mr. Hang on Sept . 28 , identifying himself both as a People 's Congress delegate and a writer .", "He interviewed both sides scrupulously , he recalled in an interview in December after he ended his hospital stay .", "His investigation disclosed that the two sides signed a contract in 1995 and built a luxurious clinic , with a marble lobby , dotted by huge porcelain vases and potted palms .", "But the relationship quickly turned sour .", "The hospital said Mr. Hang failed to provide outside funds as he was legally obliged to do , and instead used the hospital 's own land as collateral for loans .", "It said Mr. Hang exaggerated the amount of land the hospital owned in order to borrow more .", "Once the hospital was opened , there were many complaints of accounting problems and suspect business practices , Mr. Tao learned .", "Mr. Hang and his associates took costly trips that were charged to the hospital , the investigation found .", "The price of new equipment was grossly exaggerated , as was the cost of medical tests .", "No one could explain where the extra money went .", "Doctors complained that they were forced to recruit patients who did not need treatment , under threat of having their salary docked .", "`` The scheme was like a shell game , '' Mr. Tao concluded after interviewing all parties .", "`` By the end of the first four-hour interview I knew enough to know he was a swindler , '' said Mr. Tao of Mr. Hang , who he said saw him to the door and offered him a payoff and a meal .", "He refused .", "When Mr. Hang offered to provide him with more materials the following day , however , Mr. Tao said he readily agreed , trying to be fair .", "That next day , as he waited to be received , seven or eight men burst into the room and beat him , he said .", "`` You 're up to no good -- Do you know where you are .", "`` he recalled Mr. Hang saying as he looked on .", "For eight hours Mr. Tao was a captive , kicked and hit , subjected to torture , denied food and drink or access to a phone .", "He said most of his assailants appeared to be civilians but at least one had a police pass and a police uniform .", "`` I was scared because they looked like members of a crime gang , '' said Mr. Tao .", "At one point , when he was given permission to use the bathroom , Mr. Tao tried to escape through a ground floor window -- only to be caught and beaten some more .", "By the time he was released , he said , `` my whole body was in pain . ''", "His hand was so badly crushed that he still can not straighten one finger .", "The head punches left him partly deaf for a time .", "His intestines were bruised and he defecated blood .", "The emotional trauma lingers on .", "From his hospital bed , Mr. Tao reported his beating to National People 's Congress officials , as well as to the local police , and both initiated inquiries .", "Mr. Hang told local investigators that Mr. Tao had sustained his injuries by jumping through a window , which was on the ground floor .", "The inquiries were mysteriously dropped .", "Although two security guards were detained , no further action was taken .", "`` Hang thought he could use money to maintain his connections and ignore the law , '' said Mr. Tao .", "`` And it worked . ''", "A woman who answered the phone in Mr. Hang 's office and identified herself only as Ms. Xian said that he was not in Wuhan and that she had not been able to contact him .", "In October , an police official from Wuhan called Mr. Tao by telephone to apologize but refused to identify himself , noting that one policeman had been detained for 10 days and another had been relieved of his duties .", "`` This is a serious crime and they were treating it as if it 's an administrative misdemeanor , `` said Mr. Tao .", "He has lodged a suit against the Wuhan police for `` administrative inaction , '' charging that they were negligent .", "He is also preparing a $ 100,000 civil suit against Mr. Hang to cover his medical bill and mental suffering .", "The party secretary of Hubei Province and other high officials `` have taken the case very seriously , but the officials below have deceived them , '' Mr. Tao said .", "`` But I 'm confident the law will bring about justice . `` ."], "summary": ["Vigilante justice has become serious problem for China 's leaders .", "As China 's central government pledges to quell corruption and build rule of law , those efforts are commonly stymied by lack of cooperation at local level .", "Local law enforcement officials often have more loyalty to local interests than to professionalism or national authorities .", "Police , prosecutors and judges all serve at pleasure of local officials , who are often friends , and there is little possibility of disciplining them from above .", "Tao Wucheng , delegate to national Legislature , was beaten by thugs when trying to investigate allegations of fraud and corruption at private clinic .", "When he contacted local police to inquire about assault , he learned that investigation had been closed .", "Photos ."], "publication": "nyt50", "label": [10, 12, 11, 5, 9, 0], "tag": ["World"]} -{"id": "1453197", "text": ["President Bush drew a sharp distinction today between the nuclear standoff with North Korea and his confrontation with Iraq , saying he was certain that weapons projects in North Korea could be stopped `` peacefully , through diplomacy . ''", "He said that Saddam Hussein , on the other hand , `` has n't heard the message `` that he must disarm , or face military action .", "Answering questions on his way into the only coffee shop in this one-stoplight town near his ranch , Mr. Bush issued no demands that North Korea halt the nuclear programs it has threatened to restart , and he did not mentioned the ouster today of the international inspectors who have monitored activity at the country 's primary nuclear site .", "`` I believe this is not a military showdown , this is a diplomatic showdown , '' the president said , on his way to get a cheeseburger and to chat with his neighbors here .", "But the president 's tone and his warnings changed noticeably when he turned to Iraq .", "He cited Mr. Hussein 's effort to build a nuclear weapon in the early 1990 's and said that as of now `` we do n't know whether or not he has a nuclear weapon . ``", "Assessing the nuclear capability of both North Korea and Iraq has been among the most difficult tasks facing Western intelligence agencies .", "The Central Intelligence Agency and Britain 's intelligence service have publicly estimated it would take Iraq five years to develop such a weapon -- or a single year if Mr. Hussein was provided with fissile material .", "North Korea already has two weapons , according to C.I.A. estimates , and could build five or six more in the next six months if it reprocessed its large stockpile of spent nuclear fuel into weapons-grade plutonium .", "Adding to the pressure , North Korea took another step today toward removing its nuclear program from international controls by strongly suggesting it would withdraw from the Nuclear Nonproliferation Treaty .", "-LSB- Page A9 . -RSB-", "The signals Mr. Bush sent with his comments were particularly significant because the administration has come under increasing criticism , from Democrats and some Republicans , for playing down the significance of North Korea 's actions while plowing forward in the confrontation with Iraq .", "In The New York Times today , former Secretary of State Warren Christopher wrote that unless Mr. Bush had classified evidence of greater Iraqi military capability than was known to the public , `` the threats from North Korea and from international terrorism are more imminent than those posed by Iraq . ''", "Mr. Bush took issue with that view today .", "Asked whether the United States could afford the $ 50 billion to $ 60 billion it would cost to wage war with Iraq , an estimate his budget director offered on Monday , he said , `` an attack from Saddam Hussein or a surrogate of Saddam Hussein would cripple our economy . ''", "He added , `` A Saddam Hussein with weapons of mass destruction is a threat to the security of the American people . ''", "In contrast , he said nothing about his view of the threat posed by Kim Jong Il , the North Korean leader .", "During his presidential campaign , Mr. Bush often cited the possibility of an attack by North Korea as a reason that the United States needed a missile defense system .", "North Korea already has a significant arsenal of missiles that could reach South Korea , Japan and 100,000 American troops stationed in Asia .", "Mr. Hussein is believed to possess only Scud missiles with far more limited range .", "Nevertheless , Mr. Bush talked at some length today about his worry that Iraq could find a way to attack the United States , either directly or indirectly .", "As he spoke , a crowd of Crawford residents and curious tourists gathered around the entrance of the coffee shop .", "In his comments , Mr. Bush also addressed for the first time the F.B.I. alert issued two days ago , asking Americans to keep a lookout for five people , all of Arab descent , it is searching for in the United States .", "Mr. Bush said he had authorized the F.B.I. to put out an all-points bulletin .", "He did not refer to the men as terrorism suspects , but said , `` We need to know why they have been smuggled into the country . ''", "Mr. Bush 's comments today about North Korea and Iraq seemed to suggest that he has concluded that Mr. Kim can be persuaded to reverse course under threat of economic pressure , a method that Mr. Bush says has failed with Iraq .", "He twice noted that in a meeting at his ranch this fall with President Jiang Zemin of China , the two leaders promised to work in concert to deal with the North Korean government .", "`` Right here in Crawford , we had a dialogue where we both committed ourselves to working in a way to convince Kim Jong Il that it 's not in his country 's interests to arm up with nuclear weapons , `` Mr. Bush said , standing in front of the coffee shop in a light windbreaker , after a morning of working around his ranch .", "`` And I believe that can be resolved peacefully . ''", "China has denounced North Korea 's actions , but it has stopped short of saying it will join in any economic sanctions against the country -- a critical omission , because China is one of the North 's most important trading partners .", "In discussing Iraq , the president told reporters , `` I hope we 're not headed to war . ``", "But he quickly added : `` We 've got a military presence there to remind Saddam Hussein , however , that when I say we will lead a coalition of the willing to disarm him if he chooses not to disarm , I mean it .", "And we will continue to work to resolve the situation on the Korean Peninsula in a peaceful way . ``", "In private , some of Mr. Bush 's aides offer a more explicit explanation of the difference in the administration 's approach to the two countries .", "They argue that the North 's existing nuclear capability , and its ability to wreak enormous damage on Seoul with its conventional weapons , has led them to conclude that the United States has no viable military options , at least without risking the rekindling of the Korean War .", "Mr. Hussein , they contend , is the more dangerous of the two men , seeking regional domination rather than just survival .", "They say he must be confronted before he obtains the kinds of weapons of mass destruction that Mr. Kim already possesses .", "One of Mr. Bush 's senior national security officials argued over the weekend , however , that the United States was not putting North Korea on the back burner while it dealt with Iraq , and did not need to do so .", "`` We can handle both , '' the official said .", "Mr. Christopher 's article today suggested that no president , even in a White House as disciplined as this one , could manage that feat .", "`` Anyone who has worked at the highest levels of our government , '' he wrote , `` knows how difficult it is to engage the attention of the White House on anything other than the issue of the day . ''", "THREATS AND RESPONSES : NUCLEAR STANDOFF ."], "summary": ["Pres Bush makes sharp distinction between nuclear standoff with North Korea and his confrontation with Iraq , saying he is certain that weapons projects in North Korea can be stopped ` peacefully , through diplomacy , ' news conf , Crawford , Texas .", "Asserts that Saddam Hussein , on other hand , ` has n't heard the message ' that he must disarm , or face military action .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["World", "Front Page", "Washington"]} -{"id": "1453198", "text": ["Gov . James E . McGreevey 's chief counsel resigned today , the third member of his inner circle to leave in recent weeks as Mr. McGreevey seeks to revitalize his administration after a bumpy first year .", "The counsel , Paul Levinsohn , who had been criticized in the past month for representing a company that sought to put a billboard on state land in South Jersey , said the controversy had nothing to do with his decision to step down .", "But his departure is the latest sign that Mr. McGreevey is trying to retool an administration hampered by sagging polls numbers and embarrassing missteps .", "Since the governor took office last January , ending eight years of Republican rule in Trenton , he has had a measure of success , managing to close a $ 6 billion gap in the state 's $ 23.4 billion budget without increasing the sales or income tax .", "He assembled a plan to revamp the state 's troubled Department of Motor Vehicles , tightened environmental regulations and began to reorganize the state university system .", "But the budget crisis has given him little room to woo legislators by approving their spending projects , and Mr. McGreevey has suffered several self-inflicted wounds .", "His nominee to head the state 's Office of Homeland Security resigned after questions about his thin r\u00e9sum\u00e9 , and his State Police superintendent stepped down after being reprimanded twice by the state attorney general .", "Even Mr. McGreevey 's choice for state poet laureate , Amiri Baraka , caused a public relations disaster by writing a poem suggesting that Israel had advance knowledge of the attack on the World Trade Center .", "Mr. McGreevey was also criticized for using the state helicopter for personal business 14 times and spending more than $ 50,000 of the taxpayers ' money to entertain staff members and relatives on a trip to Ireland that was billed as a trade mission .", "Mr. McGreevey has apologized for the trips , and the state Democratic Party has reimbursed the state for the helicopter rides and the trip to Ireland , but with a recent poll showing Mr. McGreevey 's approval rating at 37 percent , many political analysts say a shake-up in his executive staff was inevitable .", "`` There 's a real disjuncture in Jim McGreevey 's performance , and I think the public senses that , `` said David Rebovich , a political science professor at Rider University in Lawrenceville , N.J.", "`` In all the polls , his personal likability is high .", "But he has n't been able to translate that into approval by the public because of all the mistakes and a failure in communicating to the press and the public .", "That 's why we 're seeing this shakedown , the recognition that he , McGreevey , had to show his stronger suit . ``", "With another budget crisis awaiting him in the new year , and legislative elections in November that could give control of the evenly divided State Senate to Republicans , Mr. McGreevey has sought to recast an administration dominated by aides who worked for his campaign or for his administration as mayor of Woodbridge .", "Mr. Levinsohn had worked on Mr. McGreevey 's campaign , and Gary Taffette , the governor 's chief of staff before he resigned last month , had worked for Mr. McGreevey in Woodbridge .", "The governor 's new chief of staff , James P . Fox , is a seasoned political tactician who served as an aide to former Gov . Jim Florio , and to Senators Frank R . Lautenberg and Robert G . Torricelli , before working as Mr. McGreevey 's commissioner of transportation .", "Mr. Fox said Mr. McGreevey was confident that his second year in office would be less tumultuous than his first .", "`` The governor walked into his job inheriting a deficit of $ 6 billion , an E-ZPass system that was a mess , debt and mismanagement .", "And he spent the first year cleaning up those problems , `` Mr. Fox said .", "`` In the upcoming year he 's looking forward to outlining his vision , including initiatives regarding education , controlling sprawl , the university system and a common-sense approach to government . ``", "Mr. McGreevey , who has been vacationing in Florida , issued a brief statement today saying he had regretfully accepted Mr. Levinsohn 's resignation , but did not comment further .", "Mentioned as a likely successor is Paul Josephson , the governor 's liaison to the New Jersey Turnpike Authority and other authorities .", "Mr. Levinsohn , 34 , said he was leaving the job to re-enter the private sector .", "Mr. Levinsohn and Mr. Taffett had been criticized for representing a company that was allowed to place a billboard in Washington Township without getting approval from a local zoning board .", "State and federal officials found no evidence of wrongdoing , but local officials complained about the arrangement , creating more embarrassing publicity for the administration .", "Administration officials insist that the contretemps played no role in either man 's departure .", "Another top McGreevey aide , Jo Astrid Glading , recently announced that she would leave as director of policy and communications to take a job in the attorney general 's office .", "The governor 's spokesman , Paul Aronsohn , also resigned , and today was his last day on the job .", "While some legislators are gleeful about the turbulence in the administration , even Republican strategists say Mr. McGreevey has the opportunity and the tools to reinvent his administration .", "New Jersey has the most powerful governor 's office in the nation .", "Democrats control the State Assembly and share control of the Senate .", "And Mr. McGreevey can refocus the agenda in mid-January , when he delivers the State of the State address .", "`` The administration has n't put its message out very well , but the State of the State lets him lay out his vision without any real rebuttal , `` said Carl Golden , who worked for two Republican governors , Christie Whitman and Thomas H . Kean .", "`` He can put the bad news of 2002 behind him . '' ."], "summary": ["Paul Levinsohn , New Jersey Gov James E McGreevey 's chief counsel , resigns .", "Is third member of McGreevey 's inner circle to quit in recent weeks .", "Resignation is seen as latest sign that McGreevey is trying to retool administration hampered by low poll numbers and embarrassing missteps ."], "publication": "nyt50", "label": [0, 2], "tag": ["New York and Region"]} -{"id": "1453199", "text": ["The State Department has accused two leading American aerospace companies of 123 violations of export laws in connection with the transfer of satellite and rocket data to China during the 1990 's .", "The Boeing Company and Hughes Electronics Corporation , a unit of General Motors , were notified of the accusations last week .", "The letter outlining the accusations was made public earlier this week by the Office of Defense Trade Controls , the State Department unit that regulates defense-related trade .", "The letter provides new details of how American companies competed for Chinese business by offering to transfer aerospace data in connection with launchings of their satellites .", "The information included responses to inquiries by the Chinese and others about failures of the rockets carrying those satellites .", "The United States stopped permitting the use of American satellites for Chinese aerospace ventures in 1999 .", "At the time , the Clinton administration had concerns over China 's aid to missile programs in North Korea and Pakistan .", "Since the technology used to launch missiles is similar to that used for civilian rockets and satellites , there are tight curbs on exports of aerospace and satellite equipment and services .", "The State Department alleges that the companies violated arms export laws and regulations when they failed to obtain State Department approval before transferring information to Chinese-related entities , some private and some governmental .", "It included data on rocket failures , guidance systems , telemetry and aerodynamics .", "The activities at issue relate to work by Hughes during the 1990 's .", "Boeing was cited because it acquired Hughes Space and Communications , a piece of Hughes , in 2000 .", "The companies have denied wrongdoing .", "Company spokesman were not available to comment yesterday .", "But the letter indicates that lawyers for the companies assert that the activities in question were lawful because the information that was transferred did not fit the definition of a licensable `` defense service '' or was constitutionally protected as `` speech . ''", "The companies face fines of up to $ 500,000 for each count -- over $ 60 million -- as well as losing the ability to obtain future approval for exports , a major part of their business .", "In the past , bans on exporting have been of limited scope and duration .", "The filing is unusual , officials said , since companies typically negotiate a settlement for lesser amounts with the government .", "One year ago , Loral Space and Communications agreed to pay $ 20 million , a record fine in a case involving one of several satellite issues in the case against Boeing and Hughes .", "If there is no settlement , the case would go before an administrative law judge .", "The letter , the civil equivalent of an indictment , was signed by William J . Lowell , director of the trade controls unit , who is leaving his job .", "Friends of Mr. Lowell , who asked not to be identified , said his resignation was related to concerns that his office is being reorganized to make it easier for American defense companies to export sensitive technology .", "State Department officials have said that the reorganization was meant to improve the defense trade unit 's performance .", "The current case is the last of a series of investigations involving American aerospace companies and entities controlled or associated with the government of China .", "In addition to the Loral case , the Lockheed Martin Corporation agreed to pay $ 13 million in fines in 2000 in connection with allegations that it provided technical aid and space-related information to a Hong Kong-based company with ties to Beijing .", "The latest State Department letter mentions three separate cases in which company officials helped Chinese-related entities determine what went wrong on failed launchings .", "Most of the counts are against Boeing , though the activities in question took place at the Hughes Space and Communications unit .", "Mr. Lowell 's letter also accuses Boeing with failing to properly disclose the 1996 hiring of the son of a top Chinese general , whom , the letter charged , the company was trying to cultivate .", "The Chinese general , Shen Rongjun , was described in a 1995 Hughes memorandum as `` the most important Chinese space official , '' according to Mr. Lowell .", "The general 's son , according to Hughes internal documents from the mid-1990 ` s cited in Mr. Lowell 's letter , provided inside information on bidding efforts by Lockheed Martin , a Hughes competitor .", "The inside information included price data and Lockheed 's negotiating strategy to `` sweeten their bid with technology transfers on launch vehicles , '' according to a 1995 Hughes memorandum cited by Mr. Lowell .", "The general 's son was described as a `` translator '' by the company in 1996 when it sought federal approval to hire him .", "But Mr. Lowell said he also functioned as an `` intermediary with his father . ''", "Lawyers for the companies told the State Department this month that there was no need to disclose more information about the general 's son and that the 1996 disclosure was proper .", "The State Department , in 1998 , initially approved the deal involving the general 's son .", "The contract was awarded to Hughes from a company with Chinese partners that wanted to build a mobile communications system with a Hughes satellite .", "The State Department killed the deal in 1999 after the role of the general 's son was disclosed ."], "summary": ["State Dept accuses two leading American aerospace companies -- Boeing Co and Hughes Electronics Corp -- of 123 violations of export laws in connection with transfer of satellite and rocket data to China during 1990 's .", "Provides new details of how American companies competed for Chinese business by offering to transfer aerospace data in connection with launchings of their satellites ."], "publication": "nyt50", "label": [0, 3], "tag": ["World"]} -{"id": "1453200", "text": ["The Army is sending thousands more soldiers from the Third Infantry Division in Georgia to Kuwait in the largest single ground deployment to the Persian Gulf since the war there in 1991 , military officials said today .", "One of the division 's three combat brigades , about 4,000 soldiers , has been training in the Kuwaiti desert since September , but the unit 's headquarters at Fort Stewart , Ga . , received an order from the Army within the last day directing the rest of the more than 15,000 combat troops to join the soldiers in Kuwait , officials said .", "`` They all have deployment orders , '' Capt . James Brownlee , a division spokesman , said today .", "The Pentagon has been steadily building up forces in the Persian Gulf for months , but this deployment is the first time a full division , which includes foot soldiers , armor , aviation and artillery units , has been sent to the region as part of that escalation .", "The deployment is the latest visible signal that the Bush administration is moving toward military action to force Iraq to disarm .", "The signal is all the more sharp because the Third Infantry Division specializes in desert warfare , and its brigades have been rotating through desert-training exercises in Kuwait and in Southern California for months .", "In addition to the Army 's order , the Navy today directed the Abraham Lincoln aircraft carrier battle group to remain at sea for perhaps three more months and be prepared to steam to the Persian Gulf on short notice , officials said .", "The Lincoln and its seven-ship flotilla recently completed a six-month tour in the gulf region , and last week left Australia on the way home to Everett , Wash . , when the Navy ordered it to stay in the western Pacific in preparation for a possible war with Iraq , officials said .", "Officials would not discuss the precise timing of the Third Infantry Division 's movements , but they said troops would leave in the coming days from Fort Stewart , Fort Benning and Hunter Army Airfield , all in Georgia .", "Much of the division 's equipment , including many of its 4,300 vehicles , is in Kuwait , but Captain Brownlee said other equipment would be shipped from Savannah .", "The Third Infantry Division became a likely candidate to be sent to the Persian Gulf after Defense Secretary Donald H . Rumsfeld signed an order last Tuesday to activate an Army division for duty there , officials said .", "Mr. Rumsfeld 's directive also set in motion the first wave of about 50,000 reinforcements that will be dispatched to the gulf region in the next month , roughly doubling the American forces there .", "At the same time , Mr. Rumsfeld directed the Navy to keep two aircraft carriers and two Marine amphibious assault groups ready to be sent to the gulf on 96-hour notice .", "There is now one aircraft carrier in the gulf and another in the Mediterranean Sea , with the Lincoln in reserve as well as the George Washington , in Norfolk , Va .", "Last week , the Navy activated one of its two 1,000-bed hospital ships , the Comfort , to be sent for possible duty in the Persian Gulf .", "The Comfort will leave in the next few days for the Indian Ocean base at Diego Garcia , Navy officials said .", "The Air Force has ordered several units to prepare for gulf duty , a directive that could more than double the 100 combat aircraft now in the region .", "The units include the First Fighter Wing , an F-15C fighter unit based at Langley Air Force Base , Va .", "The Fourth Fighter Wing , an F-15E unit based at Seymour Johnson Air Force Base , N.C.", "The 28th Bomb Wing , a B-1B unit at Ellsworth Air Force Base , S.D. AC-130 gunships from Hurlburt Field , Fla .", "E-8C Joint Stars ground surveillance aircraft from Robins Air Force Base , Ga .", "And Predator reconnaissance aircraft from Nellis Air Force Base , Nev .", "Logistics specialists like port handlers and crane operators are also arriving in the region , officials said .", "The military 's classified war plan for Iraq calls for as many as 250,000 American troops , about half of the forces that massed for the Persian Gulf war in 1991 .", "But any American-led invasion force would be much smaller than that , with a sizable number of troops held in reserve , defense officials said .", "The Third Infantry division is the first of perhaps three or four Army and Marine divisions -- equipped with hundreds of M1 Abrams tanks , Bradley fighting vehicles and AH-64 Apache attack helicopters -- that could be sent to the region .", "The 101st Airborne Division at Fort Campbell , Ky . , equipped with Apache attack helicopters and Blackhawk troop transports , is likely to be deployed .", "Any ground campaign is also likely to include elements of the 17,000-member First Marine Expeditionary Force , based at Camp Pendleton , Calif .", "The current buildup is palpable in Kuwait .", "At Camp Doha , the Army has converted a Kuwaiti port packed with warehouses into major military storage .", "Hundreds of M1 Abrams battle tanks , Bradley fighting vehicles and fuel and cargo trucks are parked there .", "Some have been unloaded recently from ships normally afloat near Diego Garcia in the Indian Ocean .", "And while military officials say they could start an attack against Iraq now if they had to , most planners are looking at mid-February as the optimal time for any offensive President Bush might order .", "The Army deployments came as allied warplanes bombed Iraqi air defense radars and communications facilities late Monday in one of the largest strikes against Iraqi targets in recent days in response to Iraqi violations of the no-flight zone in southern Iraq .", "Thirteen allied planes , including Air Force F-16 ` s , carrier-based FA-18 ' s and British Tornado GR-4 ' s dropped 16 precision-guided bombs on Iraqi air defense sites , including a Spoon Rest early-warning radar , in Basra , Al Kut and An Nasiriyah , a military official said .", "A few hours after the attack planes pounded their targets , an Air Force Predator surveillance aircraft , flying from its base in Kuwait , fired a Hellfire missile at a Spoon Rest radar , the official said .", "Military authorities said today that they were still assessing damage from the strikes .", "The allied airstrikes , coupled with the highly publicized troop deployments , appear to have had scant effect so far on President Saddam Hussein 's decisions on Iraqi troop movements .", "Most Iraqi forces have dug in to defensive positions around the country , but about 200 troops from the three Iraqi Republican Guard divisions stationed around Baghdad have moved south and west of the capital in recent days , a defense official said today .", "The troops , which are from some of the better trained and equipped Iraqi units , did not deploy with their heavy equipment , the official said , leaving American analysts somewhat puzzled by the activity .", "`` We 're watching them , but we 're not reading a whole lot into it , `` the official said .", "`` We expect they 'll be back in their barracks in the next day or two . ``", "THREATS AND RESPONSES : THE MILITARY ."], "summary": ["US Army acts to send thousands more soldiers from Third Infantry Division in Georgia to Kuwait in largest single ground deployment to Persian Gulf since war there in 1991 .", "One of division 's three combat brigades , about 4,000 soldiers , has been training in Kuwaiti desert since September , but unit 's headquarters at Fort Stewart , Ga , receives order within last day directing rest of more than 15,000 combat troops to join soldiers in Kuwait .", "Photo ."], "publication": "nyt50", "label": [1, 0], "tag": ["World", "Front Page", "Washington"]} -{"id": "1453226", "text": ["On one hand , DEAN BLAIS , North Dakota 's coach , wishes the United States team success in the world junior championships .", "On the other , he sure would like three of his best players back from that team in time for a key series against Colorado College this weekend .", "`` We could lose these two games this weekend , '' Blais said , `` and it could cost us the league championship . ''", "Top-ranked North Dakota has lost the most players to the national junior team , which is competing in the 10-nation world championships in Nova Scotia .", "Four other Division I teams have two players each on the national roster .", "But the Fighting Sioux -LRB- 18-1-1 -RRB- are without their leading scorer , the freshman ZACH PARISE , and two defensemen , MATT JONES and MATT GREENE , for as long as the United States remains in the tournament .", "`` It 's one thing to miss Zach Parise , but the two defensemen have been rock solid for us , `` Blais said .", "`` We 've been averaging only 18 shots against us a game , and those guys hold the red line like it 's the blue line . ``", "Blais , who coached the United States junior team in 1993 and was an assistant coach two other years , holds international competition in high regard .", "`` I know how demoralizing it is when college coaches wo n't let their players go for selfish reasons , `` he said .", "North Dakota is 8-1-1 in the Western Collegiate Hockey Association , second behind third-ranked Colorado College -LRB- 16-2-2 , 9-1-2 -RRB- .", "The Sioux won the inaugural Subway Holiday Tournament on home ice in Grand Forks last weekend .", "They edged Bemidji State -LRB- 5-6-6 -RRB- , 4-3 , in the final , after a 5-2 victory over Brown -LRB- 5-6-1 -RRB- in the first round .", "The sophomore BRANDON BOCHENSKI scored six goals in the two tournament games and was named most valuable player .", "His 25 goals are tops in Division I .", "He is third in scoring with 36 points , behind Parise -LRB- 38 -RRB- and Colorado College 's PETER SEJNA -LRB- 41 -RRB- .", "`` We would have loved to have Parise in the holiday tournament , '' Blais said .", "`` His absence probably hurts your attendance and chances of winning .", "But then , to have him display his talents to all the N.H.L. scouts and represent the country is as patriotic as can be . ``", "Another coach concerned about the effect of the world juniors on his team is TIM WHITEHEAD of second-ranked Maine -LRB- 15-1-2 -RRB- .", "The Black Bears are without forward GREG MOORE and goaltender JAMES HOWARD .", "`` The only potential negative is those guys could get burned out , '' Whitehead said .", "`` I 've seen it before . ``", "Blais , though , sees another positive .", "`` For me , losing those players to the world junior team , its a compliment to our program , '' he said .", "`` You look at the roster and see three players from U.N.D. on it -- it says something about our development and recruitment . ''", "HOCKEY EAST Eight Hockey East teams reached the championship games in holiday tournaments over the weekend , with four coming home with victories .", "The Everblades College Classic in Estero , Fla . , featured an all-Hockey East final , with Maine -LRB- 5-0-1 Hockey East -RRB- beating Massachusetts -LRB- 10-7-1 , 4-5-0 -RRB- by 8-3 .", "To reach the final , the Black Bears beat fifth-ranked Cornell -LRB- 10-3-0 -RRB- , scoring three straight third-period goals in a 3-2 victory .", "Twelfth-ranked Boston University -LRB- 11-6-2 , 5-4 -RRB- won the Great Lakes Invitational with a 4-3 victory over No . 10 Michigan -LRB- 12-5-1 -RRB- .", "Goalie SEAN FIELDS , the tournament 's most valuable player , had 37 saves .", "The Terriers also scored three straight third-period goals to pull ahead .", "Merrimack -LRB- 7-6-3 , 4-3-1 -RRB- defeated Wayne State -LRB- 6-11-0 -RRB- in the final of the Rensselaer / HSBC Holiday Tournament , 4-1 .", "JOE EXTER , Merrimack 's senior goalie , was named the M.V.P. , stopping 58 of 60 shots in two games .", "Northeastern -LRB- 6-8-2 , 1-6-1 -RRB- won the National Capitol Tournament in Ottawa with a 6-3 victory over York University of Toronto .", "C.C.H.A.", "Ninth-ranked Ohio State -LRB- 13-4-2 , 9-2-1 Central Collegiate Hockey Association -RRB- will play a nonconference game Friday against Princeton at Mellon Arena in Pittsburgh , an unusual site for a college game .", "The Pittsburgh Penguins extended the invitation to play on their home ice .", "Pittsburgh has no colleges playing Division I hockey .", "Mercyhurst , in Erie , Pa . , which plays in the Metro Atlantic Athletic Association , is the closest .", "Ohio State has four players on its roster from the Pittsburgh area .", "The game was originally scheduled for 8 p.m. , but was moved to 5 p.m. because the Buckeyes ' football team will be playing for the national championship in Arizona that evening .", "MARK SCHEERER COLLEGE HOCKEY ."], "summary": ["North Dakota University has lost players Zach Parise , Matt Jones and Matt Greene to US junior hockey team and world junior championships ."], "publication": "nyt50", "label": [3], "tag": ["Sports"]} -{"id": "1453277", "text": ["`` I was 28 , and I did n't speak English , `` Saori Kawano said about her arrival from Yokohama , Japan , 20 years ago .", "But she invested $ 2,000 in 30 cartons of Japanese porcelain , sold them from her home and in 1983 opened Korin , a Japanese restaurant supply company in TriBeCa .", "Some 4,000 restaurants , including Nobu , shop at Korin , which has 5,000 items in stock , from 60-cent chopsticks to $ 22,000 sushi robots .", "But Korin also sells to the public .", "New wares include a linen noren , or curtain , in the picture 's background , which shades from beige to pink and has abstract white cherry blossoms .", "It is 3 by 5 feet -LRB- $ 220 -RRB- .", "A silk obi , 12 by 70 inches -LRB- $ 139 -RRB- , can be used as a table runner .", "The green glass bowls , right , used at Nobu , are $ 10 -LRB- three inches -RRB- and $ 22 -LRB- six inches -RRB- .", "And those wax replicas of tuna , shrimp , fluke and mackerel sushi .", "You can buy them and more for $ 12 each .", "Korin is at 57 Warren Street , -LRB- 212 -RRB- 587-7021 or www.korin.com.", "CURRENTS : KITCHENWARE ."], "summary": ["Korin , Manhattan company that supplies chopsticks and other tableware to Japanese restaurants , sells to public as well .", "Photo ."], "publication": "nyt50", "label": [3, 8], "tag": ["Home and Garden", "Style"]} -{"id": "1453281", "text": ["When Freyer Architects , a Manhattan firm , renovated a fourth-floor apartment in an Upper West Side brownstone and added a penthouse on the roof , the firm was asked for `` sunlight , a garden and ecological friendliness , '' Warren Freyer said .", "He and William Machan , one of his architects , designed floor-to-ceiling windows at the rear of both floors , above right , and added a skylight for even more dazzling light .", "The lower floor , 870 square feet , has a living room , above , and kitchen .", "The 390-square-foot penthouse has a bedroom and a bath , plus a roof garden .", "Recessed grilles in the cherrywood floors provide heat and eliminate radiators .", "Red tiles on the roof deck are made of recycled rubber tires , $ 11.55 for a half-inch-thick tile , 24 inches square , at Environmental Molding Concepts in San Bernardino , Calif . -LRB- 909 -RRB- 383-7049 .", "The budget for the apartment renovation was $ 300,000 .", "Freyer Architects , 37 East 18th Street , can be reached at -LRB- 212 -RRB- 598-0900 .", "CURRENTS : ARCHITECTURE ."], "summary": ["Freyer Architects renovates fourth-floor apartment in Manhattan brownstone and adds penthouse on roof .", "Photos ."], "publication": "nyt50", "label": [0], "tag": ["Home and Garden", "Style"]} -{"id": "1453282", "text": ["A tiny restaurant called Mr. Sushi on Houston Street was in trouble , and its owners , Edmund Liu and Winston Kulok , decided that design could save it .", "So they hired David Ling , a Manhattan architect , to transform their 900-square-foot space into a Chinese restaurant called ED .", "The budget was a minuscule $ 40,000 .", "`` We made a luminous cave , backlit in red , '' Mr. Ling said .", "`` And it culminates with a waterfall that looks like liquid mercury . ''", "The waterfall cascading over a mirror behind a sushi bar , above , is the focal point for the restaurant .", "He draped white translucent fabric , designed for rear-screen projection , on black steel gas pipes to create a taut rectangular tent .", "The floor , left , is a hand-sanded acrylic mirror .", "`` I liked the contrast between industrial materials and the handmade finish , '' he said .", "`` It makes for a murky , sandy , luminous surface , '' he said .", "-LRB- Quarter-inch-thick acrylic mirror sells for $ 6 a square foot at Industrial Plastic Supply , 309 Canal Street , 212-226-2010 . -RRB-", "On this stretch of Houston , east of Macdougal Street , the sidewalk runs two feet above the entrances to the storefronts .", "The sidewalk at Mr. Sushi 's came right up to the building , making the entrance door only 8 feet high .", "Mr. Ling excavated the sidewalk back 30 inches , to its original footprint , giving the storefront a full 10-foot height , above , and offering a pink glow that illuminates the street .", "ED is at 142 West Houston Street .", "David Ling can be reached at -LRB- 212 -RRB- 982-7089 .", "225 East 21st Street , New York City .", "CURRENTS : RESTAURANT ."], "summary": ["Tiny restaurant on Houston Street call Mr Sushi is transformed into luminous Chinese restaurant called ED on budget of only $ 40,000 .", "Photos ."], "publication": "nyt50", "label": [0, 2], "tag": ["Home and Garden", "Style"]} -{"id": "1453283", "text": ["Last January , the artist Ruben Toledo returned to Cuba with his wife , Isabel .", "It was the first time he had been back since his family left the island in 1967 .", "One result of the trip is Cuban Black , a collection of seven tile murals for Ceramica Bardelli , an Italian ceramics maker .", "`` Cuba is such a bleached country , '' Mr. Toledo said .", "`` Nothing has been painted in 40 years . ''", "He called the country `` colorful in a black-and-white way . ''", "One Cuban Black -LRB- right -RRB- , made of four tiles , each eight inches square , and selling for $ 203 , shows a dog .", "Another , 18 tiles and $ 375 , depicts a cat on a chair .", "Two other designs -LRB- 18 tiles , $ 443 .", "24 tiles , $ 562 -RRB- show plants .", "They are at available at Hastings Tile and Il Bagno Collection at 230 Park Avenue South -LRB- 19th Street -RRB- .", "-LRB- 800 -RRB- 351-0038 .", "CURRENTS : CERAMICS ."], "summary": ["Ruben Toledo designs series of ceramic tile murals inspired by trip to Cuba .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Home and Garden", "Style"]} -{"id": "1453284", "text": ["The race to win the Greater New York Bridge Association 's Player of the Year title appeared to be a runaway but turned into a cliffhanger .", "When the Edgar Kaplan Regional began a week ago , Glenn Milgrim seemed to have an unassailable lead .", "But John Fout , in second place after an equally stellar year , was chasing hard and gaining ground .", "He was due to take the title if he won the final event , the Board-a-Match Teams , and was leading going into the last session .", "But that session dropped him into second place , and the Player of the Year title went to Milgrim .", "Last year 's winner , Chris Willenken , finished third .", "He won the Team Player of the Year title , with Fout second and Milgrim third .", "Milgrim 's favorite deal of the year , on which he sat South , is shown in the diagram .", "He was playing with Willenken , and they arrived in six hearts from the South side of the table .", "That was the result of the three-spade bid , a transfer to hearts by agreement , and the four-diamond bid , a retransfer .", "Six hearts from the normal North position would have been hopeless after the marked club lead .", "As it was , three intermediate cards in the North hand proved crucial : the eight-seven of hearts and the club eight .", "Milgrim won the opening diamond lead with dummy 's ace and spotted a rare safety play , little-known even in expert circles .", "He led the heart 10 , intending to run it , guarding against the possibility of a singleton nine in the West hand .", "That was exactly the position , and East covered with the jack .", "South won with the king and led his remaining heart to the ace .", "The heart seven was led , and East took his queen and shifted to a club .", "South won with the ace , crossed to the spade queen , and played the heart eight to remove East 's last trump .", "The rest needed good technique , and Milgrim demonstrated it .", "He led to his diamond king and ruffed a diamond , to reach this position : Diagram The last trump was led from dummy , and East and South discarded diamonds .", "West could not guard both black suits , and the slam was made .", "This column referred erroneously on Monday to South 's prospects in the deal shown .", "He did not have the potential to take all 13 tricks .", "With the cards West held , he was certain to take a trump trick .", "BRIDGE ."], "summary": ["Glenn Milgrim wins Greater New York Bridge Assn 's Player of the Year title .", "Diagram ."], "publication": "nyt50", "label": [0], "tag": ["Arts"]} -{"id": "1453285", "text": ["Qin Shihuang 's fearsome exercise of power 2,200 years ago has been compared to the actions of Napoleon and Stalin , and his bloody legacy remains a raw wound in today 's China .", "The Qin emperor was a military adventurer who unified the country for the first time by subsuming six warring states and began to build the Great Wall .", "Ruthless , he imposed absolute order by executing those suspected of disloyalty .", "Modern artists approach the subject with caution , in part because Mao Zedong saw the founding emperor as an inspiration and the Communist Party still views the ancient leader as a pointed allegory .", "So when Zhang Yimou , China 's best-known and arguably most talented director , chose the Qin court as the setting for his big-budget martial arts epic `` Hero , '' expectations were high .", "The director of `` Raise the Red Lantern '' and `` To Live , '' Mr. Zhang has often explored the emotional whiplash inflicted on common people by China 's tumultuous history .", "He has also infuriated the Beijing government and found himself blacklisted , while delighting many critics .", "But `` Hero , '' despite its complicated subject , has delighted Beijing 's mandarins , who are submitting it as China 's nominee for best foreign film at the Academy Awards .", "And it has infuriated some Chinese critics , who have panned Mr. Zhang 's plot for promoting a philosophy of servitude .", "`` ` Hero ' does not have the courage to present the massacres Qin Shihuang ordered in the name of peace under heaven , `` said Tou Jiangming , writing in The Sat-China Weekly .", "`` The history so often questioned by modern thinkers is ignored by Zhang Yimou . ''", "Or as a critic using the pen name Bu Tong put it in The Beijing Youth Daily : `` Zhang Yimou 's movie has a deep servility inside .", "He tried to understand what the world looks like from the ruler 's standpoint . ``", "This is a little like Fellini suddenly promoting Victorian values .", "Most of Mr. Zhang 's earthy films view the world through the powerless , people stuck in anonymous villages who rely only on inner dignity and intense passions to guide them through a world that takes them for granted .", "`` Hero '' is something new .", "Mr. Zhang , 51 , set out to prove that he could make a Hollywood-style blockbuster that appealed to both Chinese and foreign audiences , while retaining his artistic touch .", "He may have succeeded .", "But he did something else new as well , whether because he needed government support to produce a film of unprecedented cost and scale for China or because he wanted the police to do more to help him fight rampant piracy : he made a movie that those in China 's propaganda apparatus are thrilled to promote .", "After its premiere in mid-December in the Great Hall of the People , the deputy director of the state film bureau , Zhang Pimu -LRB- who is no relation to the director -RRB- , called it `` artistic , entertaining and thoughtful . ''", "The $ 31 million production has an all-star cast of Jet Li , Tony Leung , Maggie Cheung and Zhang Ziyi .", "It has aerial martial arts choreography like that in `` Crouching Tiger , Hidden Dragon , '' the runaway success directed by Ang Lee .", "Miramax backed `` Hero '' and will release it in the United States early in 2003 .", "Like Mr. Zhang 's early films , `` Hero '' is lyrical .", "From the lakes of Jiuzhaigou to the forests of Inner Mongolia , Mr. Zhang mixes spectacular natural scenery with his own cinematic vision , producing a colorful slide show of fine art .", "The moment of truth in the story , written by Mr. Zhang and two others , comes when Jet Li , playing a nameless assassin , makes a gravity-defying assault on the king of Qin .", "-LRB- The king has not finished subsuming all rival states and creating the Qin empire . -RRB-", "The assassin decides , with a split second to spare , that his highest calling is to abandon his personal quest and let the king unify China .", "The written Chinese characters `` Tian Xia , '' all under heaven , are the movie 's coda .", "The king of Qin appears as a misunderstood leader who dispatches his black-armored cavalry to slaughter his neighbors but suffers quiet agony at the pain he must inflict for the common good .", "Mr. Zhang 's king even sheds a tear for his converted assassin when , with a flick of his wrist , the king orders his execution .", "The historical Emperor Qin left little evidence of his compassion .", "He replaced feudalism with a merciless monarchy .", "He killed Confucian scholars and burned their books .", "The emperor 's ruthlessness left him few admirers until Mao .", "`` Please do n't slander Emperor Qin Shihuang , sir , `` Mao wrote in a 1973 poem .", "The Communist leader praised the emperor for suppressing Confucian orthodoxy , which Mao despised for its intricate morals .", "Today , Qin 's rule is not a forbidden subject .", "But it remains sensitive , particularly after Chen Kaige , Zhang Yimou 's peer , covered the same historical ground as `` Hero '' in his 1998 film , `` The Emperor and the Assassin . ''", "Mr. Chen portrayed the emperor as a Shakespearean tyrant whose brutality covers inner shame .", "The opening scene is of Qin soldiers exterminating a family .", "To disguise his bastard birth , the emperor does away with his father .", "Though the censors allowed it , Mr. Chen was roundly criticized for neglecting the emperor 's full record -- his unification of the nation and the building of the Great Wall .", "Mr. Zhang has offered varying explanations as to why he took a more sympathetic view .", "In interviews surrounding the national release of the film , Mr. Zhang initially disavowed any ideology .", "`` The only test of a film 's success , especially a martial arts film , is whether it can keep the audience 's attention for 90 minutes , not its metaphysics , `` he said .", "But he also explained that he aimed to break the mold of martial arts movies .", "Too often , he said , they center on the hero avenging a master 's death .", "He wanted his hero to have transcending values .", "`` I wanted to write about people with warm blood , '' Mr. Zhang said .", "`` People who have faith and ideals . ''", "So what are these ideals .", "Mr. Zhang quoted a well-known phrase attributed to a Song Dynasty official named Fan Zhongyan : `` One should be the first to worry for the future of the state and the last to claim his share of happiness . ''", "Mr. Zhang has not commented on the movie 's metaphor for modern politics .", "But Tony Leung , the Hong Kong actor who plays a peace-loving warrior in the film , made the connection .", "In an interview with B International , a Hong Kong-based magazine , Mr. Leung said he applauded the message of `` peace and human kindness '' in `` Hero , '' then reflected on the Beijing government 's suppression of a democracy movement 13 years ago .", "`` During the June 4 incident , I did n't join any demonstrations , because what the Chinese government did was right -- to maintain stability , which was good for everybody , `` he was quoted as saying .", "Mr. Leung later said that his comments had been taken out of context and that he was speaking from the perspective of his character in the film .", "`` My interest is in making movies , not politics , '' he said .", "Mr. Zhang has never been a dissident .", "But until recently he seemed to enjoy flirting with the limits of China 's artistic tolerance .", "`` Red Sorghum , '' `` Raise the Red Lantern '' and `` Ju Dou '' were all set in the pre-Communist era .", "They were all banned domestically after they were made , though all have since been released .", "Censors objected , most likely , because they portrayed China as violent , backward and capricious and suggested that the condition was not merely a byproduct of its pre-Communist politics .", "In `` To Live , '' Mr. Zhang extended the theme to Maoist China .", "The 1994 epic , which won the Cannes Palme d' Or award , is the tale of a couple tumbling through successive historical calamities of China 's civil war , the Great Leap Forward and the Cultural Revolution .", "It has never been legally shown here .", "But over the past eight years , as China 's economy became more prosperous , Mr. Zhang 's films became less provocative .", "In `` Not One Less , '' which he directed in 1998 , a young village schoolteacher goes to great lengths to retrieve a student who ventures into the big city to find work .", "The teacher 's success depends on a soft-hearted official who runs a television station and takes a Rockwellian shine to the peasant girl .", "Recently Mr. Zhang has also accepted some official duties .", "He directed movies to promote China 's bid to be host of the 2008 Olympics in Beijing and its entry to stage the 2010 World Exposition in Shanghai .", "He said in a recent newspaper interview that he no longer cares what the critics say because he gets attacked no matter what he does .", "`` Only one film I 've done in my life has not been attacked , `` he said of his promotional movie for the Olympics competition .", "`` And that 's only because Beijing won . ``", "ARTS ABROAD ."], "summary": ["Latest Zhang Yimou film Hero , which is set in court of Emperor Qin Shihuang , wins praise of Chinese leaders , who have been infuriated by Zhang 's earlier films , and condemnation from some critics , who accuse film of promoting philosophy of servitude .", "Depicts Emperor Qin as far more compassionate character than historical evidence would justify , and his ruthlessness had few admirers until Mao Zedong .", "Photo ."], "publication": "nyt50", "label": [8, 34, 31, 15, 50, 23, 59], "tag": ["Movies", "Arts"]} -{"id": "1453287", "text": ["From an early age , it seems , Albrecht D\u00fcrer harbored no doubts about his artistic talent .", "A remarkable self-portrait , drawn when he was 13 , could almost be interpreted as a proclamation of his genius .", "Within a few years , this German artist had made his name in Nuremberg , his home city .", "And although only 23 when he first visited Italy in 1494 , he quickly understood that he was in the same league as the Renaissance masters .", "Yet if today D\u00fcrer does not stand alongside , say , Raphael , Leonardo and Michelangelo except in Germany , this may be his own doing .", "Obsessed with winning fame and fortune , he achieved these objectives by turning his energies toward printmaking .", "His prints , marked with his famous A D monogram and sold across Europe , brought him extraordinary wealth and prestige .", "But then as now , to the public , painters outrank printmakers .", "So it is that 32 years have passed since the last D\u00fcrer exhibition was held at the British Museum , which has one of three major D\u00fcrer graphic collections .", "-LRB- The others are at the Albertina in Vienna and the Kupferstichkabinett in Berlin . -RRB-", "Now , to coincide with its 250th anniversary this year , the British Museum is making amends with `` Albrecht D\u00fcrer and His Legacy : The Graphic Work of a Renaissance Artist , '' which runs through March 23 .", "This exhibition presents a wide range of D\u00fcrer 's prints , drawings and watercolors , with one small oil added as a reminder that he was also an exceptional painter .", "Many of the works come from the British Museum 's collection and were part of Sir Hans Sloane 's donation to the museum when it was founded in 1753 .", "But there are also important loans , including his teenage `` Self-Portrait '' and his majestic `` Study for the Praying Hands of an Apostle , '' both on display in Britain for the first time .", "More than simply introducing D\u00fcrer to a new generation of museum visitors , though , the exhibition also trumpets his importance in the history of Western art by examining , in the words of Neil MacGregor , the British Museum 's new director , `` his astonishing afterlife , the absorption and adaptation of his work through the centuries . ''", "Put differently , both contemporaries and successors were not only inspired by D\u00fcrer , but they also copied him without blushing .", "D\u00fcrer is often described as the first truly commercial artist .", "He knew the value of his work , and in art history 's first recorded plagiarism dispute , he sued an Italian artist for copying his prints , even down to the A D monogram .", "D\u00fcrer also hired an agent to sell his prints outside Germanic territories .", "And in what would later become something of a tradition among leading artists , his wife , Agnes Frey , acted as his hard-nosed manager .", "The British Museum is showing many works inspired by or copied from D\u00fcrer .", "Some , including numerous portraits of a stern-looking D\u00fcrer , were made after his death in 1528 .", "Others reflect D\u00fcrer renaissances , one in the 17th century , when his engraving style influenced Rembrandt and others , and again in the early 19th century , when German writers like Goethe and painters like Caspar David Friedrich raised D\u00fcrer to mythical status as a Romantic icon .", "D\u00fcrer 's graphic work stands out for its variety and originality .", "One innovation was self-portraiture : at least 12 drawn or painted self-portraits survive .", "He even used his own image to represent Christ in some prints , notably his `` Sudarium of St . Veronica . ''", "But he was also something of an outdoors type , leaving his studio for painting and sketching forays .", "For instance , `` The Courtyard of Innsbruck Castle , '' painted immediately after his first journey across Europe to Italy , suggests he had absorbed an interest in architecture from Renaissance masters .", "His approach to nature was untypically realistic .", "In his early 20 's he painted a series of stunning watercolor landscapes , including `` Study of a Rock Face , '' which is accurate enough to illustrate a modern geology textbook .", "His drawings and paintings of animals were equally detailed .", "Among the treasures in this show are two early watercolors of the muzzle of a bull , in which every hair has been painted individually .", "Two later drawings , `` Resting Dog '' and `` Head of a Walrus , '' confirm that he remained intrigued by animals throughout his life .", "His most famous illustration from nature is `` Rhinoceros , '' from 1515 .", "Because he had never seen such an animal , D\u00fcrer based this woodcut on a description and sketch sent from Portugal , where the king had been given a rhinoceros from India .", "Remarkably , D\u00fcrer created a good likeness , capturing the platelike folds of that animal 's thick skin and its menacing appearance .", "In the process , he bequeathed an image that would be used for centuries in coats of arms , books and prints .", "But D\u00fcrer was also very much a man of his age in that , while he evidently enjoyed being a celebrity , he was also profoundly religious and open to the myths , magic , superstition and neo-paganism prevalent in Germany during the late 15th and early 16th centuries .", "He was not even 30 , for instance , when he embarked on his landmark `` Apocalypse '' woodcut series , inspired by the Book of Revelations , which eventually resulted in 15 prints .", "When the 16th-century Florentine painter and writer Giorgio Vasari referred to D\u00fcrer 's `` extravagant imagination , '' he had `` Apocalypse '' in mind .", "The most copied image , `` The Four Horsemen of the Apocalypse , '' is in this show , with its three powerful riders representing Conquest , War and Famine , and the fourth , a skeletal white-haired figure on a sickly horse , representing Death .", "As much as through individual prints like `` Rhinoceros , '' D\u00fcrer spread his fame through wide distribution of major narrative series on religious subjects , notably `` Life of the Virgin , '' `` Large Passion , '' `` Small Passion '' and `` Engraved Passion , '' each of which dealt with familiar passages from the New Testament with a power and freshness that , once again , would inspire many copies .", "It is in a 1514 engraving called `` Melancholia , '' however , that many art historians have sought out the inner D\u00fcrer .", "This show 's catalog reprints a 1972 essay by G\u00fcnter Grass devoted to this print , while another German writer , Peter-Klaus Schuster , calls it `` the picture of pictures '' in his two-volume study of the work .", "Packed with symbols , the print shows a winged figure leaning her chin on her left hand and looking out at the world with despair .", "D\u00fcrer 's only engraving to match the philosophical depth of `` Melancholia '' is `` Knight , Death and the Devil , '' also in this show .", "But the public D\u00fcrer was more straightforward : hardworking and obsessed with money .", "His paintings are masterly , yet in his view they did not represent good use of his time .", "`` I can not execute this work without loss for the mere 130 florins we agreed upon , '' he once told a patron who had commissioned a painting .", "Your panel has almost 100 faces .", "I am missing out on too much other work . ``", "If his workshop in Nuremberg functioned as a cottage industry , with D\u00fcrer regarded as a prosperous artisan , his two long voyages to Italy gave him a taste for a world where artists were treated with respect .", "`` Here I am treated like a gentleman , '' he wrote during his second visit to Venice , `` whereas at home I am treated as a parasite . ''", "Well , perhaps then .", "Today , in Germany , but also by many experts beyond , D\u00fcrer is viewed quite simply as the greatest printmaker of all time ."], "summary": ["British Museum mounts exhibition of Albecht Durer prints , drawings and watercolors 32 years after its last Durer exhibition .", "Devotes space to Durer 's influence on succeeding generations of artists .", "Photo ."], "publication": "nyt50", "label": [11, 8], "tag": ["Arts"]} -{"id": "1453288", "text": ["It is a remarkable display of civility that this city offers with the famous Johann Strauss concerts by the Vienna Philharmonic on New Year 's Eve and New Year 's morning .", "All the more remarkable given what comes between .", "To a midnight mob scene outside St . Stephen 's Cathedral worthy of Times Square , add the firepower of Chinatown at its new year : firecrackers and cherry bombs detonating virtually in your pocket , even full-scale fireworks , launched , it seems , by the guy next to you .", "Life and limb thankfully aside , the din proved dismaying for a musical ear , because the big bell that rings in the cathedral tower to greet the new year could simply not be heard .", "Well , solace came quickly , with the concert New Year 's morning at the Musikverein , conducted by Nikolaus Harnoncourt and telecast live throughout much of the world , later in the day in the United States .", "Mr. Harnoncourt , the reigning Austrian maestro , made his reputation as an early-music specialist but has carried his questing spirit into a wide range of repertory .", "Although the New Year 's concerts are formulaic affairs , steeped in tradition , Mr. Harnoncourt , working with the Strauss specialist Franz Mailer , managed to inject seven works new to the series as well as two additional composers , Brahms and Weber .", "-LRB- Three , if you count Berlioz , who orchestrated Weber 's `` Invitation to the Dance . '' -RRB-", "Brahms was a friend of Strauss 's and no stranger to this hall , and his Hungarian Dances Nos . 5 and 6 fitted comfortably into the mix in lusty yet subtly inflected interpretations .", "The Strauss novelties -- in addition to one by Johann 's brother Josef and one by their father , Johann I -- were the `` Niko-Polka '' -LRB- ending delightfully with a quiet passage for piccolo and harp that seemed to have strayed from a different piece , squelched by a big bang -RRB- and the `` Kr\u00f6nungs-Lieder . ''", "The orchestra played superbly in a way that only it can in this repertory , with polish , spirit , humor and the inimitable lilt that comes from rushing the second beat in a waltz .", "The magic lies in when , and just how much .", "The only mishap in the smoothly oiled affair came with `` Invitation to the Dance , '' which ends with a rousing climax .", "-LRB- Applause ! -RRB- No : actually it ends with a return to the quiet opening , with a cello solo , played beautifully here by Franz Bartolomey .", "But premature applause is almost a given , and everyone took it in good humor .", "While tradition may be made to be broken , these concerts are also steeped in commerce .", "-LRB- Thus the inordinate appeal of the formula : it works . -RRB-", "In addition to the television broadcasts , expected to reach some 50 million viewers , the recordings are perennial best sellers -LRB- not least , the one from Mr. Harnoncourt 's previous appearance , in 2001 -RRB- .", "This year , Deutsche Grammophon promises to have finished CD 's in European stores by Jan . 7 .", "American release is scheduled for Jan . 28 .", "So there was also the expected .", "Invariably , the first strains of the second encore , `` The Blue Danube , '' are interrupted by applause : in this case it was part of the well-oiledness .", "And invariably the conductor turns around to wish everyone a happy new year on behalf of the orchestra .", "Here Mr. Harnoncourt seemed to be reaching for something more , commenting earnestly on music 's ability to reach all and wishing the audience a `` blessed and peaceful '' new year .", "Equally predictably , the third and final encore was Johann Strauss I 's `` Radetzky March , '' with everyone clapping along heartily .", "Then it was off with the television cameras and down with civility as audience members descended on the flowers that festooned the hall -LRB- an impossibly beautiful one to begin with -RRB- , absconding with armfuls of white roses and carnations .", "Mr. Harnoncourt and the orchestra are to bring music of the Strauss brothers as part of their tour of seven American cities in February and March .", "If only they could bring the same heady atmosphere .", "MUSIC REVIEW ."], "summary": ["James R Oestreich reviews Vienna Philharmonic 's New Year 's morning concert at Musikverein .", "Concert is conducted by Nikolaus Harnoncourt .", "Photo ."], "publication": "nyt50", "label": [4], "tag": ["Arts"]} -{"id": "1453294", "text": ["ON a recent Sunday morning , Catherine Crier , the cool and cerebral Court TV anchor , was in her kitchen here mixing pancake batter and looking for ingredients .", "`` Cynthia , '' she called upstairs to her sister .", "`` Do we have any nuts .", "`` `` Right-hand cabinet , second shelf , '' Cynthia Crier said without missing a beat .", "There 's a reason that Cynthia Crier , who lives in Manhattan , knows what her sister 's cabinets in Westchester hold .", "And it 's not because she 's a gourmand : she is the architect who doubled the size of her sister 's 3,000-square - foot house .", "And in the two years it took , she did everything from supervising the contractor to stocking the shelves .", "Some people might say that letting your sister take over your house is folly .", "The pitfalls are obvious : childhood feuds , ancient jealousies , the threat of long-term repercussions if something goes awry .", "`` They were very similar growing up , '' said their mother , Ann Crier .", "`` Both are very competitive and goal-oriented . ''", "But for Catherine and Cynthia Crier , the renovation built not only a home but a better relationship .", "Catherine Crier left Dallas for college at 16 , when Cynthia was 12 .", "And by the time she returned , Cynthia was off to Cornell to get a degree in architecture .", "Growing up , they had shared little but competitive horseback riding .", "-LRB- Catherine keeps four horses in a barn she built on her 30 acres here . -RRB-", "`` It could be intimidating to have a sister who is both brilliant and beautiful , '' said Cynthia Crier , 44 .", "`` But it also motivated me .", "I also feel lucky that we chose different fields .", "Catherine was so political -- I was more artistic .", "I knew from the time I was a little girl that I wanted to be an architect .", "It would be a lot tougher if we were competing in the same career . ``", "Catherine 's career , in typical overachiever fashion , included a stint as a state district court judge -LRB- she was the youngest person ever elected a judge in Texas -RRB- , then high-profile positions at CNN , ABC News , Fox News and Court TV .", "Even when she moved to Westchester in 1993 , only an hour from Cynthia , their relationship was cordial rather than close .", "Who knew that talk of drainage pipes and mahogany dining room tables would provide glue for their relationship .", "In fact , Catherine Crier , 48 , initially paid another architect $ 10,000 to design an 800-square-foot guesthouse .", "`` It was a disaster , '' Cynthia said , with obvious satisfaction .", "As her sister put it : `` I 'm sure he 's a fine architect for someone else .", "I know it must have been very hard for him , because I could n't articulate exactly what I wanted . ``", "When she said she liked the idea of a cozy cottage , he gave her something she described as `` too Laura Ashley . ''", "When she elaborated and said she wanted stone and wood , to reflect the look of her property , he drew what she called `` a Swiss chalet . ''", "Then , in August 2000 , Cynthia visited from Greenwich Village .", "She had been the chief architect for the New York Fire Department from 1993 to 2000 and had previously worked for Kohn Pedersen Fox in New York .", "She sat down to sketch a guesthouse .", "She drew a house complete with beamed arches and rustic clapboards and , with Catherine 's blessing , went on to draw blueprints for a renovation of the main residence , built in 1930 .", "The design opened up the boxy rooms and made the living room into a grand space , 22 by 43 feet and two stories high .", "`` The Texas girl in me requires open space , '' Catherine said .", "Nonetheless , she had some initial reservations about working with her sister on the guesthouse .", "`` I 'm a lawyer and aware of the risks involved in working with a relative , `` she said .", "Even after that project was completed satisfactorily , it was a big step to move on to the main house .", "So , in a lawyerly way , she worked with her sister to lay down ground rules .", "The first step was a standard architectural contract , with a section on interior design that specified the percentage Cynthia would be paid on the furnishings and finish work .", "Second , Catherine knew she had to get out of the way so her sister could work .", "It was decided she would move into the new guesthouse for a year while the main residence was being torn apart and put together again .", "And they agreed that the total cost for the guesthouse , the expansion of the main house , furnishings and an art studio would not exceed $ 1.5 million .", "Then came the ground rules for their friendship .", "`` We 're both very headstrong , passionate people , `` said Catherine , who was recently divorced .", "`` But we understood that to make it work , we would have to respect our jobs and our roles .", "I 'm the owner , and have veto power over interior design , but I would defer when it came to architectural issues . ``", "Well , not always .", "Joseph Riccardi , the general contractor on the project , recalls one face-off .", "`` There was one time where Cynthia had designed a staircase in the living room that Catherine decided she did n't want , `` he said .", "The staircase would have let Catherine go directly from her bedroom to the kitchen , without going through a more public spot , the foyer .", "`` Cynthia argued for it , '' he said , `` but finally gave in , explaining , ' This is what the boss wants . '", "`` The plaster curlicue sconces in the master bedroom set the stage for another clash .", "`` I found them too ornate , '' Cynthia said .", "`` But Catherine insisted on having them , so I custom-made a fireplace frame to complement the design for a seamless effect . ''", "But Catherine deferred to her sister when she insisted on the luxury of copper rain gutters -- `` They last a lifetime , '' Cynthia said -- and gave up the brick she wanted outside so its $ 8,000 cost could be used for radiant heat for the kitchen floors .", "`` Since Cathy wanted open space , the ceilings are very high , '' Cynthia said .", "`` And heat rises , as do the heating bills .", "With heated floors , your expenses are reduced . ``", "Both sisters laughed when asked if the renovation caused any screaming brawls .", "`` No , I ca n't think of anything we argued about , `` Cynthia said .", "Catherine said : `` Some people may want a Mario Buatta-styled house , and in a way , that 's easier to interpret .", "I do n't want to feel that I 'm walking into some stranger 's house that 's been made for Architectural Digest .", "I wanted to return to a place that is an extension of me .", "And Cynthia just got it . ``", "What saved them was experience .", "Catherine had renovated houses in Dallas , Wyoming and Atlanta , but had not been totally satisfied with the results .", "`` I could never articulate exactly what I wanted because I needed someone to interpret what I needed to reflect my life , '' she said .", "As for her sister , she said : `` I saw how detailed she was in other projects , and I already had great respect for her work . ''", "And Cynthia had already handled the design of 300,000 square feet of complex office space at MTV , where she had to assess the needs of everyone from Sumner Redstone to office managers .", "She was happy to be working for one client .", "It helped that Cynthia anticipated every interior detail , including the exact position of the lights for her sister 's varied paintings , which include one by Andr\u00e9 Bourri\u00e9 and one with a Southwestern theme by Inger Jirby .", "And as they worked together , they rediscovered each other 's exacting natures .", "`` Our father still has electric bills from the 1950 's , `` Cynthia said .", "`` We get it from him . ''", "Yet Catherine 's perfectionism was channeled into her broadcast and legal work , more than into her home .", "`` Cynthia realized how schizoid I am , and how I 'm always on the go and not organized at home , `` she said .", "`` She made sure the house was designed to compensate for my failings . ''", "In addition to organizing and alphabetizing her sister 's 3,000 books , and knowing where the nuts were stashed in her kitchen , Cynthia had separate drawers made for her sister 's silver jewelry and gold jewelry and for each sweater color , and she added shoe bins to the walk-in closets , compartmentalized into anchor clothes and casual clothes .", "`` She even organized my linen closet , '' Catherine said .", "`` Well , '' said her sister , `` even after I designed the firehouses , I made the beds upstairs for the boys to make sure they were perfect . ''", "To Mr. Riccardi , the contractor , `` Cynthia was the person who took care of all aspects of this project , way beyond what architects normally do , or even interior decorators . ''", "That meant she had to come up with a way to display her sister 's collection of Asian and African artifacts -- clay water pitchers , urns , furniture and Tibetan money that looks like colored steel toothpicks .", "And she added the separate art studio .", "`` I saw what she did and screamed : ' This is amazing ! This is what I want , ' '' Catherine said .", "As with many working women , she already had her professional life , in jurisprudence , under control .", "What she needed help with was her domestic life .", "She was recently looking over copper-tufted swatches of upholstery fabric with Rebecca Boles , a Texas designer who met the sisters when they were girls on the Dallas equestrian circuit .", "Although there has already been a housewarming party , some details still have to be attended to : a geometric bedspread for the guest room , pillows for the Kravet living-room couches , custom-made rugs from Switzerland and China in olive green .", "The sisters put together a scrapbook of more than 2,000 pictures to document every step of the construction and sent it to their mother and their father , Bill .", "Their parents and their sister , Carolyn , 50 , a homemaker in Dallas , traveled to Westchester over the holidays to see the collaboration up close .", "It was the first Christmas the Crier family had spent together in 16 years , and they did it with a fresh snowfall and competitive games of pool , bridge and Scrabble .", "What impressed their mother is that Cynthia , who prefers `` small rooms that are more intimate , '' made the house so expansive , to suit her sister .", "`` They now have the friendship that they could have had in earlier years , '' Ann Crier said .", "Catherine Crier bought a house .", "Cynthia Crier made it her home .", "HOUSE PROUD ."], "summary": ["Cynthia Crier , Manhattan architect and sister of Catherine Crier , Court TV anchor , doubles size of her sister 's house in Katonah , NY , doing everything from supervising contractor to stocking shelves .", "Renovation builds better relationship between sisters as well as better home .", "Photos ."], "publication": "nyt50", "label": [6, 5, 11, 97], "tag": ["Home and Garden", "Style"]} -{"id": "1453296", "text": ["The year that just ended will be remembered as a year when the failures of America 's corporate governance and accounting procedures became widely apparent .", "But a full reckoning of the Enron-WorldCom era must also take into consideration the ways in which the business press failed , too .", "The late 1990 's witnessed an explosion of business media .", "CNBC became the most profitable cable channel in America .", "New magazines and Web sites sprang up : Business 2.0 , Red Herring , The Street.com and the publication I worked for , The Industry Standard .", "All purported to untangle the mysteries of the burgeoning Internet economy .", "Yet for all that increased attention , it 's difficult to say that the enlarged business media played a decisive role in exposing the shortcomings of American corporate practices .", "Indeed , too often the new magazines and Web sites acted as incurious cheerleaders , championing executives and innovative companies without questioning their books .", "Do a search , for example , of the word `` Enron '' in the databases of those publications prior to 2000 and you 'll find little but praise for its market innovations .", "The mainstream media , too , did its share of hyping the technology boom , but no one did as much evangelizing as the so-called new economy publications .", "They preached about how technology created new paradigms .", "But they were frequently slow to note when technology did n't work , or markets did n't exist , and they relied far too much on a handful of self-interested bankers for information .", "The billions that poured into Internet companies in the late 1990 's usually came through the hands of venture capital firms or large Wall Street brokerage houses , each of whom had a vested interest in the company 's success , or at least its rapid growth .", "But they were often among the only people who had studied the industry closely enough to have an informed opinion , and so they were the ones we called .", "With the benefit of hindsight , it 's now clear that I and others were wrong to rely so heavily on sources who had so much at stake .", "I had begun to get an inkling of this in early 1999 : while writing about the merger of two large Internet music retailers , I sought a comment from one of the bankers following the stock of the newly formed entity .", "Yes , he acknowledged , the merger had taken too long to complete , and in the meantime Amazon.com had taken the lead in online compact disc sales .", "But , he insisted , the management team was solid and the company was on track .", "I was impressed , and put his endorsement in my story .", "In a matter of weeks , I noticed , the bank had dumped all its stock in the merged retailer .", "I resolved never again to rely on analysts , but I confess that I did n't bring this epiphany to the attention of my colleagues or my readers .", "Another part of the problem was that our own businesses were too far inside the beast we were covering .", "The Industry Standard , which began publishing in 1998 , had the same start-up mentality as many of the companies it covered .", "Inevitably , some of their worldview rubbed off on us .", "At exotic conferences in Aspen and Barcelona , our management mingled with the leaders of high-flying tech firms , some of whom were simultaneously advertising in the magazine , sponsoring a section of our Web site , speaking at magazine-sponsored conferences they had helped pay for , selling us software and giving colorful quotes to our reporters .", "This was not a formula for sustained independence .", "As we grew -- The Industry Standard sold more advertising pages in the year 2000 than any magazine in America -- we inherited some of the dot-com hubris as well .", "We spent millions on television advertisements , without being able to track whether they actually brought in subscriptions .", "The magazine 's very success was sometimes a distraction that blurred the difference between us and our sources .", "Our competitors , too , acted brashly .", "Both Red Herring and Business 2.0 had so many pages of advertising that in order to publish a respectable amount of editorial content in certain issues they simply reproduced articles they had already published .", "-LRB- One editor went so far as to defend the practice , arguing that his magazine had acquired lots of new readers since the articles had first been published . -RRB-", "The storm of information surrounding the Internet boom was blinding .", "So many words and press releases were swirling around that it was impossible to know if anything anyone said made a difference .", "This depressing suspicion was made real for me one morning , when I appeared on CNNfn , the cable network 's financial channel , to discuss the state of the market for initial public offerings .", "I had specifically told the producer that while I could discuss some Internet offerings , I was not an expert on the market for I.P.O. ` s .", "No matter : when I turned up on the screen , the words `` I.P.O. analyst '' showed up beneath my head .", "As if that was n't bad enough , the anchor then asked me a question about a company called Bamboo.com that was scheduled to issue stock later that week .", "Unfortunately , I had never heard of Bamboo.com, so I found myself improvising .", "Here 's what I said , warts and all : `` Bamboo.com is a specialized technology and Internet company that does certain kinds of currency exchanges .", "Internet currency exchanges , I should say , come up with a specialized currency just for the Internet and it 's one that people are looking at .", "I 'm not convinced that 's an absolute winner . ``", "It 's not just the grammar that was off .", "Bamboo.com was an online real-estate company that specialized in a technology that allowed for 360-degree images on the computer screen .", "Could I have said no comment .", "Sure , I suppose , and I was furious with myself .", "Equally disturbing , though , was the fact that it just did n't matter .", "The stock doubled in value on its first day of trading , and no one from the company ever bothered to contact me to correct my error .", "The froth of stock trading during that period obscured the facts and , it seemed , even the need for facts .", "It would be wrong to blame the news media alone for the business debacle .", "If a company sets out to mislead regulators and investors , and finds a prestigious accounting firm willing to sign off on baked books , it 's extremely difficult for an outside reporter to uncover the truth , especially on deadline .", "And there were plenty of occasions when The Industry Standard and others did diligently expose practices of tech and financial companies that seemed less than above-board .", "We wrote about questionable sales tactics at America Online and tried to curb enthusiasm for Priceline 's discount grocery service , which has since failed .", "But on balance I think even the best new economy journalists could not shout down the hype coming from the bankers and public relations machines .", "-LRB- And when the banking and advertising money stopped flowing , journalism was not enough to keep us alive . -RRB-", "I 'd like to believe that those of us who witnessed the tech bubble will be smart enough to prick the next bubble that comes along before too many investors get duped .", "Encouragingly , some improvements have been made .", "CNBC now usually identifies whether a banker it is interviewing owns stock in or does business with the companies being discussed on the air .", "But in more skeptical moments , I fear that the rise of any boom sector in the American markets will bring with it an attending press that is at least compliant , if not out-and-out boosterish .", "Editors and reporters need to be able to resist the notion that any single development in technology or business creates a new economy that defies traditional laws of business .", "That 's not a problem that the Securities and Exchange Commission and Congress can solve .", "James Ledbetter , business editor of Time Europe , is author of the forthcoming `` Starving to Death on $ 200 Million : The Short , Absurd Life of The Industry Standard . '' ."], "summary": ["Op-Ed article by James Ledbetter says full reckoning of America 's corporate governance and accounting procedures scandals must also take into consideration ways in which business press failed , too .", "Says late 1990 's witnessed explosion of business media , yet it is difficult to say that enlarged business media played decisive role in exposing shortcomings of American corporate practices .", "Says too often new magazines and Web sites acted as incurious cheerleaders of innovative companies , without questioning the books .", "Says mainstream media also hyped technology boom .", "Fears that rise of any boom sector in American markets will bring with it attending press that is at least compliant , if not out-and-out boosterish .", "Drawing ."], "publication": "nyt50", "label": [58, 7, 6, 1, 2], "tag": ["Opinion"]} -{"id": "1453300", "text": ["TOO many people are resigned to living with cruddy kitchens that have them cursing through the holidays .", "The temptation is to defer any improvements until Santa delivers $ 100,000 wrapped in a ribbon .", "But you do n't have to wait for a dream budget .", "You just have to be smart and realistic , and avoid blowing it all on a new Sub-Zero .", "Focus instead on fixes that will make the kitchen a better place to cook and commune .", "The following three kitchens were transformed with $ 5,000 , $ 10,000 and $ 20,000 , and they offer a few rules of thumb for minimizing costs .", "Whatever you do , do not blindly ape an elaborate spread from a magazine , or you 'll end up with one of those pizza and champagne depositories like those shown on MTV 's `` Cribs . ''", "Your kitchen , like the three shown here , should encourage you to pull out the pans and get busy .", "The first thing I intend to make in mine is a cenone , a traditional Italian meal to ensure prosperity for the new year .", "If you follow my lead , a little grappa will help instill courage for a renovation .", "Discount Difference : $ 5,000 A burst pipe was the best thing that ever happened to the kitchen in our home in Sagaponack .", "The blue Formica counter peeled and the cabinets delaminated .", "We had no choice but to start over .", "In most kitchens , the biggest budget-buster is the cabinetry .", "For an average kitchen , custom millwork can easily run $ 30,000 .", "To cut costs , we went to the Home Depot and picked out flat rail , one of the simplest cabinet styles .", "We ordered the cabinets unpainted , for a 10 percent savings , and used the color of our choice .", "I chose White Caf\u00e9 from Schreuder , a Dutch company that makes enamels reminiscent of glossy French storefronts : steely blues , misty whites and foamy greens -LRB- www.finepaints.com -RRB- .", "But the real savings was in confining conventional cabinets to the area under the counter .", "We replaced the `` uppers , '' the expensive cabinets that usually hang over a counter , with open shelving .", "To avoid that millwork altogether , Victoria Hagan , a designer , suggests converting a nearby closet to a pantry .", "You pay to install a couple of shelves and the result is a wealth of storage .", "As an enthusiastic but often disorganized cook , I also treated myself to hefty window-sash pulls for the drawers and doors that I incessantly open and close .", "To make those deep low cabinets more accessible , I also sprang for pull-out drawers rather than simple shelves .", "I even threw in a super-deep Elkay stainless-steel sink -LRB- the better to hide those dirty pots -RRB- .", "Cost so far : $ 2,110.26 , including tax .", "Ikea cabinetry is a designer secret so obvious as to be embarrassing .", "Sometimes a professional just has to suck it up and admit that the obvious trumps the obscure .", "Modern without looking cheap , the cabinet with frosted glass panel doors is a favorite , but I prefer to dress up cabinet fronts with special doors .", "Paul Grassfield , a contractor in Brooklyn -LRB- 718-782-8408 -RRB- , often mounts panels of plain cherry or walnut for use as doors .", "The panels can be oiled and waxed , a chore the untutored amateur can tackle with $ 100 and a Saturday afternoon .", "More adventurous alternatives include doors made of recycled sunflower seeds , paper and agricultural fiber -LRB- available from Phenix Biocomposites , www.phenixbiocomposites.com -RRB- or polycarbonates that look like clear plastic versions of corrugated cardboard -LRB- priced from $ 1.66 a square foot at www.sundancesupply.com -RRB- .", "All these products have a richness and a depth that look like big bucks .", "Drab metal cabinets can be revitalized by spraying them with automotive paint that comes in a wide range of colors -LRB- from Heavenly Bodyworks in Chelsea , 212-691-1092 -RRB- .", "For the countertops , I used white Formica -LRB- $ 600 including installation -RRB- .", "For just $ 200 more , I could have inserted a slab of honed white Carrara marble for rolling pastry or a hefty chunk of butcher block for a built-in cutting board .", "Another dress-up option is to edge the plain Jane counters in a metal trim that adds detail and coordinates with the silver hardware .", "Appliance costs can also get out of hand .", "We kept the old ones , except for the oven , which was destroyed in the flood .", "I replaced it with the sturdiest basic model I could find , a 30-inch-wide Hardwick SF9616 for $ 369.95.", "It has a gas pilot that allows it to keep working if summer storms cut off power .", "We also perked up the rusty refrigerator by replacing the front panel for $ 50 .", "To better see the new stuff , we updated some lighting , adding a few inexpensive recessed fixtures and mushroom-shaped surface-mounted ones , all from the Home Depot .", "ROOM TO IMPROVE ."], "summary": ["Three kitchens are redesigned on shoestring budgets : $ 5,000 , $ 10,000 and $ 20,000 .", "Offer a few lessons on minimizing costs .", "Photos ."], "publication": "nyt50", "label": [5], "tag": ["Home and Garden", "Style"]} -{"id": "1453302", "text": ["FOR Leo Hughes , 23 , of South Weymouth , Mass . , music is a passion .", "Having suffered a brain injury at 16 months that left him physically and mentally impaired , he is confined to a wheelchair and needs care 24 hours a day .", "While home , Mr. Hughes sometimes uses a drumstick to `` conduct '' songs that are playing on the radio .", "Although these performances are in pantomime , he knows quite a bit about playing music : while at the Massachusetts Hospital School in nearby Canton , he was in a musical group with other disabled students called the Headbangers .", "The group is the brainchild of Jon Adams , an assistant technology specialist at the school , a public institution serving 120 students ages 6 to 22 with significant physical disabilities .", "About 80 students live at the school during the week .", "Mr. Adams said he teaches students how to `` use technology to access their world , usually through computers . ''", "He often helps them use computers to communicate , move their wheelchairs , open doors and the like .", "They do that with switches -- large buttons hooked up to computers that can be activated with the tap of the foot , the wiggle of a finger , a breath of air , the blink of an eye or a tap of the head .", "Mr. Adams , who holds degrees in music education and music composition from the Berklee College of Music and Goddard College , said it was only natural for him to seek a way to use the technology to enhance the students ' music therapy sessions .", "About 10 years ago he wrote a software program , Switch Ensemble , that enables a computer to play particular notes or rhythms when activated by a switch .", "Shortly thereafter the Headbangers were born .", "-LRB- The name was supplied by one of the students . -RRB-", "Each member of the ensemble is assigned a different `` instrument '' and learns to follow the conductor through weekly practice .", "The program can also be integrated with an IntelliKeys computer keyboard , which can be adapted for multiple uses and is widely used by disabled students .", "The group plays many recognizable tunes , like `` Santa Baby , '' rendered at a recent holiday performance .", "Although the music is activated with a simple rap on a switch , the tunes do not play automatically as a child 's toy might .", "Just as with a regular orchestra , the students ' actions make or break the music .", "The program , which Mr. Adams recently upgraded , simplified and renamed Super Switch Ensemble , requires that students learn about music to be successful performers , said Suzanne B . Hanser , the chairwoman of the music therapy department at Berklee , in Boston .", "`` They have to recognize the melody , '' she said .", "`` They have to match the rhythm of that melody and to make that auditory motor match so they know precisely when they need to move to create the sound that will work in the music .", "`` This is earthshaking for individuals who have been locked inside themselves , literally unable to communicate . ''", "Dr. Hanser is using Mr. Adams 's software to train music therapists .", "Traditional music therapy for the disabled involved having a child imitate humming or use an instrument adapted to the child 's disabilities to create a single set of sounds .", "Generally this does not involve performance of a work .", "`` Musical instruments just do n't work -- they do n't have the physical abilities , `` said Eve Montague , a music therapist who coordinates the creative arts program at the hospital school , referring to the limitations of traditional music therapy for the severely disabled .", "`` At some point you have to say there has to be another way . ''", "Ms. Montague said that for many students in the Headbangers , it is their first experience of working in a group .", "Because of their disabilities , group interaction had previously seemed unimaginable .", "Mr. Adams has adapted his technology for use in dramatics , at the school radio station and in the school band , where students play percussion instruments supplemented by music created by students working with switches .", "Recently students performed a dramatic version of `` Robin Hood , '' using the switch technology to `` recite '' their lines .", "The Headbangers have performed at the school , at a conference in Rhode Island and at the headquarters of the Big Dig transportation construction project in Boston .", "Mr. Hughes , who has graduated from the school and is now living in a small group home during the week , took part in those performances .", "When he left school last year , he was awarded a $ 2,000 scholarship that was used to buy a computer on which he plays music .", "`` Music is his primary focus in life right now , '' said his mother , Denise .", "`` You do n't have to have all your physical whatever to enjoy music .", "It transcends that .", "It makes him feel like a whole person . `` ."], "summary": ["Headbangers is musical group for disabled founded by Jon Adams at Massachusetts Hospital School .", "Adams has helped students use computers to communicate and move their wheelchairs and now enhances music therapy sessions using IntelliKeys computer keyboard , which can be adapted for multiple uses .", "Students must recognize melody and match rhythm to make auditory monitor match sounds that will work in music .", "Photo ."], "publication": "nyt50", "label": [14, 20, 7], "tag": ["Technology"]} -{"id": "1453303", "text": ["WHEN Michael Scantlen purchased a Sirius satellite radio system for his car , he had to buy not only the equipment but extra gasoline as well .", "`` The first week I got Sirius , I used up an extra half-tank of gas because I did n't want to stop listening to the programming , `` said Mr. Scantlen , 47 , an electrical engineer in Agawam , Mass .", "`` I have n't listened to regular radio since I bought it . ``", "Comments like Mr. Scantlen 's must come as relief to Sirius Satellite Radio and XM Satellite Radio , the two companies in the business of supplying an alternative to conventional AM and FM broadcast radio .", "Both have spent billions of dollars on satellites , transmission equipment , studios and programmers -LRB- and seen their stock prices plummet in the process -RRB- to create all-digital radio networks .", "Think satellite or cable television without pictures , and you will understand the digital satellite radio concept .", "Both Sirius and XM offer 100 channels of static-free radio reception in the car or at home , scores of unique and proprietary channels coast to coast and excellent sound , no matter whether you are driving through Manhattan or the Arizona desert .", "Like satellite or cable television , satellite radio requires you to sign up as a subscriber -LRB- usually through a car audio dealer -RRB- and pay a monthly fee : $ 12.95 for Sirius , $ 9.99 for XM -LRB- Sirius offers discounts for long-term subscriptions -RRB- .", "You also need to buy new equipment .", "Replacement receivers are available for cars -LRB- they also receive AM and FM broadcasts and come with the typical options like CD and tape players -RRB- , as are adapter kits that work with existing audio systems , feeding the signal through the cassette player or over an unused FM frequency .", "Starting with the 2003 model year , many auto manufacturers are including satellite radios with certain cars .", "For the home , receivers are available that connect to stereo systems , usually through an auxiliary input .", "Sirius and XM use somewhat different satellite technology .", "Three Sirius satellites orbit the earth in a figure-eight pattern , with two of the three always over the United States .", "To ensure uninterrupted programming , all three transmit the same signal , but with a four-second delay between any two satellites .", "This allows a memory buffer in the receiver to smooth over any loss-of-signal problems .", "XM 's network consists of two geostationary satellites hovering over the United States -- one over the East Coast , the other over the West -- that also employ a delay-and-buffer system .", "A small roof - or window-mounted car antenna picks up the signal .", "Since the radio signals travel by line of sight , both companies have also created a network of ground-based repeater stations to ensure that the signals can be picked up in the shadow of a mountain , in the steel canyons of New York or in other areas where the transmissions might be blocked .", "The similarity of the two services ' programming outweighs their differences .", "Both have created extensive digital studios for live broadcasts and original performances .", "Both offer at least 60 channels of music plus 30 or more channels of news , talk , variety and sports .", "An uncensored comedy channel .", "Children 's programming .", "Radio dramas .", "And news from the BBC , CNBC , CNN , C-Span and Fox News , among others .", "Fans of National Public Radio 's signature news magazine programs , `` Morning Edition '' and `` All Things Considered , '' wo n't find them on either service , although Sirius does offer NPR talk and variety shows .", "`` Our news programs are staples of public radio , and it 's important to keep them exclusive to our stations , `` said an NPR spokeswoman .", "Sirius hopes that will change .", "`` Stay tuned , '' said Jay Clark , the company 's vice president for programming .", "`` We 're having discussions with NPR about that . ``", "One difference between the services is in their policies on commercials .", "All of Sirius 's 60 music channels are commercial free .", "XM runs ads on half its 70 music channels but pledges that it will never program more than six minutes per hour of commercial spots , which is one-third the amount found on standard commercial broadcast radio .", "Both companies emphasize the abundance of offerings to encourage listeners to surf the dial .", "Beatles fans listening to the group 's songs on one channel might be directed to another channel to hear an interview with Paul McCartney .", "People who like one type of music may be advised that a similar group is playing on another channel .", "In that way , the companies hope to build loyalty to the service , not just one channel .", "Both companies offer a wide range of specialty music genres .", "Jazz and blues fans have a choice of seven channels on XM , and eight on Sirius .", "XM has separate channels playing the hits of each decade from the 1940 's to the 1990 's , while Sirius offers four similarly themed channels .", "XM and Sirius both classify 11 of their channels as rock-oriented , and both break down the genre into channels playing classic , heavy , album , alternative , soft and mainstream rock .", "Listening to the offerings is the best way to decide which service is most appealing .", "Customers can sample both services free at the companies ' Web sites -- siriusradio.com and xmradio.com.", "XM offers a three-hour loop of each music channel , and Sirius simultaneously provides each channel 's content in its entirety .", "To date , most subscribers have arranged to receive service by buying a replacement car radio or adapter .", "But both services are counting on licensing agreements they have forged with car manufacturers to push sales to their break-even point .", "BMW , Ford and DaimlerChrysler are offering integrated Sirius-compatible radios as a dealer-installed option on certain 2003 models .", "GM is offering XM-compatible radios on 25 of its models , including all Cadillacs .", "XM service will also be available as an option at many Toyota dealerships and to purchasers of Honda 's Accord , Pilot and Acura MDX models .", "Nissan plans to offer Sirius and XM to customers on select 2003 models , and Volkswagen / Audi says it plans to offer both but has not specified when .", "Sirius and XM have agreed eventually to market a radio that can receive either service , but both companies say that it will not be available any time soon .", "Meanwhile , integrated dealer-installed radios for either service typically cost $ 325 , and after-market add -on units can be purchased for $ 200 or more , including installation .", "To ease the burden for new-car buyers , manufacturers will often offer to fold the cost of the radio and a year 's service into the lease or financed purchase price .", "Is digital satellite radio worth the price .", "Some early adopters , frustrated with the limitations of regular commercial AM and FM radio , say it definitely is .", "William Dreskin , a rabbi in Greenburgh , N.Y. , keeps his children content on car trips with the youth-oriented channels on the XM radio he bought when he leased a new car .", "`` I 've set six channel presets on children 's programming for my kids , six for me and six for my wife .", "I like to listen to jazz , but with a regular jazz radio station , I never knew what I 'd hear and if I 'd lose the signal when I was driving to Queens or Long Island to serve my congregants . ``", "Brian Stafford , who owns a machine tool factory in Little Rock , Ark . , and travels 200 to 300 miles a week on business , said : `` Since subscribing to Sirius , I ca n't remember the last time I 've listened to regular radio .", "The variety 's unbelievable and I can hear the programming wherever I go .", "I have n't even bothered to reprogram my radio for the AM and FM channels I used to listen to . ``", "BASICS ."], "summary": ["Satellite radio , currently supplied by Sirius and XM , sees its fortunes rise after spending billions of dollars on equipment , studios and programmers .", "Customers must susbscribe to service , pay monthly fee and buy equpment to receive broadcasts .", "Networks offer similar programming with major difference being policies on commercials .", "Sirius music channels are commercial free , but only half of XM channels are .", "Both have abundance of offerings to meet almost every taste .", "Chart compares services , fees and offerings .", "Photo ."], "publication": "nyt50", "label": [4, 32, 31, 34, 23, 12], "tag": ["Technology"]} -{"id": "1453307", "text": ["ROY COOPER pawed through hundreds of tapestries , searching for a fetching combination of colors and print .", "Nothing caught his eye until a tiny , toothless woman wearing a felt fedora and dozens of gold necklaces presented a black and tan rug with geometric patterns .", "`` Runners in dark shades and earth tones , that 's all anyone wants , `` Mr. Cooper , a 52-year-old Kentucky native , said in Spanish with a thick Southern drawl .", "`` The gringos wo n't pay a dime these days for the brightly colored rugs , so forget the reds and pinks . ``", "Mr. Cooper , who has lived in Quito with his Ecuadorean wife , Eulalia , for 23 years , bargained the price from $ 12 to $ 10 , purchased the Navajo-style floor runner and lit a cigarette to celebrate : The rug will likely fetch $ 30 or more at auction on eBay , where he sells tapestries , baskets and religious relics at substantial markups .", "Mr. Cooper , who devotes 15 hours a week to buying , listing and shipping eBay items , clears roughly $ 1,300 a month from his online business , and up to $ 2,500 each November and December .", "By contrast , the World Bank estimates that the average Ecuadorean earns $ 1,460 a year .", "Mr. Cooper 's business puts him in an elite group in Ecuador , not only by virtue of his income but also because of the tools with which he makes it .", "Of Ecuador 's 13 million people , only 2.7 percent have been online , according to the government-owned communications company , Conatel .", "Internet entrepreneurs flourish in Ecuador 's largest cities , but many are educated businessmen with ties to the United States .", "Thousands of households in Quito -LRB- the capital -RRB- and Guayaquil -LRB- the largest city -RRB- have Internet access , but few rural communities have telephone lines .", "The discrepancies make experts pessimistic .", "They worry that the rapid pace of change in the technology industry will cause third-world nations like Ecuador to slip further behind Europe and North America .", "`` In the late 1990 's , everyone jumped up in arms over the digital divide , but it has proven almost impossible to bridge , `` said Peter Hakim , president of the Inter-American Dialogue , a Washington-based policy-analysis center , and an expert on Latin America .", "`` It is a region of inequality .", "Why would access to technology be any different than access to education , health care , employment or financial aid .", "`` In 2000 , fewer than 1 percent of Ecuadoreans had sent e-mail or surfed the Web at home , school or work or in cybercafes .", "In August 2001 , in an effort to expand access , the government created the National Connectivity Commission .", "Public `` telecentros '' have sprung up to provide free Internet access -- under the auspices of nonprofit groups for which the commission helps find donors -- and home connections , previously timed by the minute , are now available for a flat rate of about $ 25 a month .", "`` I know that in five years , most people in Ecuador still wo n't be able to buy a computer , `` said Jos\u00e9 Pileggi , president of Conatel , which oversees the connectivity project .", "`` But my hope is that they will at least know that they have access to computers . ''", "-LSB- Help may also come from the United Nations , which on Dec . 9 began an Internet initiative in Esmeraldas , one of Ecuador 's most impoverished provinces .", "The project will be run by workers from the United Nations Volunteers and financed by the Ecuadorean government , the World Bank and Japan 's International Cooperation Agency , offering `` one-stop offices '' where fishermen , artisans and other small-business owners can use the Web to find new markets . -RSB-", "Ecuador 's government is also trying to create a hospitable environment for online businesses .", "In April , the legislature approved a bill giving electronic documents the same legal status as paper documents and making digital theft a crime .", "Still , Ecuador 's politics , economy and geography have proven formidable barriers to Internet access .", "The most ominous threats are political instability and corruption .", "The connectivity program was the brainchild of President Gustavo Noboa 's administration .", "But the program 's fate belongs to President-elect Lucio Guti\u00e9rrez , who takes office Jan . 15 .", "Mr. Guti\u00e9rrez , a military coup leader who will become Ecuador 's sixth president in six years , did not mention technology or Internet access in his campaign .", "Even if the program survives , some dismiss it as a potential hotbed of bribery .", "Ricardo Garcia Fuentes , owner of Limon y Caf\u00e9 , a cybercafe in the Galapagos Islands , said he and several partners paid about $ 50,000 to set up satellite Internet access in Puerto Ayora , 600 miles from the mainland .", "Mr. Fuentes said government officials , primarily from Conatel , the phone company , demanded an additional $ 300,000 in `` startup fees , '' which he and his partners painstakingly gathered or borrowed from investors , including some in the United States .", "He said it did not matter whether the fees were required tariffs or flagrant bribes .", "He had to pay them to open his business .", "`` We are out here on the vanguard of technology , '' Mr. Fuentes said in his bar , a tourist haunt with tiki lights and salsa tunes .", "`` But the government makes it too costly .", "They try to give us a solution but it creates an even bigger headache .", "There is already too much bureaucracy in this country . ``", "What fees the $ 300,000 represented and where the money wound up is unclear .", "Several Internet service providers here reported having paid officials similar fees .", "But the connectivity commission says it neither receives nor solicits payments from prospective Internet service providers , and Mr. Pileggi , the president of Conatel , insisted that no one had to bribe a government official to become a provider .", "In any case , some say corruption may be eroding the commission 's fundamental source of revenue -- a tax on phone companies ' revenues -- because many officials believe executives understate revenue to minimize corporate taxes .", "Whatever the integrity of the system , the cost of access is daunting .", "Mario Ort\u00edz , a Conatel executive in charge of infrastructure , estimated that providing universal Internet access -- including lines to vast tracts of the Amazon jungle -- would be at least $ 1.9 billion .", "That is more than one-third of Ecuador 's $ 5.1 billion annual budget .", "Mr. Ort\u00edz is considering the costs and benefits of connections other than land lines , including spread spectrum , a method developed by the United States military that spreads a narrow-band signal over a broader portion of the radio frequency band .", "But because of security concerns , he does not want spread spectrum in urban areas .", "In the meantime , Mr. Cooper , in his way , bridges the divide .", "In addition to selling items on eBay , he takes custom orders when clients want a statue of a specific saint or a rug of particular dimensions .", "He sees himself as a New Economy intermediary , a man who connects the haves and have-nots of cyberspace .", "`` The Indians are very happy to sell me tapestries for $ 10 , even when they know I will turn around and sell them for three times as much , '' Mr. Cooper said as he packed alpaca rugs , wooden statues and glass-framed Amazon insect collections into his sport-utility vehicle .", "`` These people do n't have computers and have never heard of e-commerce . ``", "So they are glad to have the $ 10 , he said .", "For now , at least ."], "summary": ["Only 2.7 percent of Ecuadorians have been online .", "Experts are concerned that rapid pace in technology industry will cause third-world nations like Ecuador to slip further behind Europe and North America .", "Government is trying to create hospitable environment for online businesses although many see it as potential hotbed for bribery and instances of exorbitant fees have already been reported .", "United Nations will begin program in one of country 's poorest provinces to give Internet access to small business owners to help expand markets .", "Photo ."], "publication": "nyt50", "label": [12, 23, 30, 8], "tag": ["Technology"]} -{"id": "1453310", "text": ["AS the author of a book about piano technology and the co-curator of an exhibition at the Smithsonian Institution celebrating the 300th anniversary of the instrument 's invention , Edwin M . Good has examined and played just about every kind of piano there is .", "They all share at least one trait , he said .", "`` All pianos are always out of tune , '' said Dr. Good , a professor emeritus of religious studies at Stanford University .", "`` A piano is by definition not in tune . ''", "That sweeping statement is true : the most common piano tuning , based on what is known as the equal-tempered scale , deliberately alters the pitch of some notes to improve the instrument 's overall harmonics .", "But if all pianos are indeed out of tune , some are more out of tune than others .", "With temperature and humidity changes , it does not take long for the 88 tones of an acoustic piano to get out of whack .", "`` A very large part of the piano is wood , and wood expands and contracts with changes in humidity , '' Dr. Good said .", "While concert pianists can have their instruments tuned shortly before a performance , most pianists just put up with the problem .", "At best , they might have their pianos tuned once or twice a year .", "Don A . Gilmore , an amateur piano player and professional engineer from Kansas City , Mo . , however , has developed an electronic system that he says could allow pianists to tune their own instruments at the touch of a button .", "Most other instruments can be tuned as needed by the person playing them .", "A violinist , for example , tunes the violin before playing and can even compensate for tuning problems while playing by slightly repositioning the fingers on the strings .", "With a piano , however , Mr. Gilmore said , `` you 're at the mercy of the instrument when you play it . ``", "When he is not playing Chopin , Mr. Gilmore , 38 , spends his time designing customized industrial equipment and factory systems .", "Early in his career , while developing a machine that used servomotors , which can be commanded to start and stop very precisely , Mr. Gilmore began considering ways that they might be used in a self-tuning piano .", "The 88 tones of a piano are created by about 250 strings .", "The lower notes use single strings , while the middle and higher notes use two or three strings each , which helps increase their volume .", "But multiple strings make tuning more difficult .", "With a high note , for example , `` if any one of the three strings in a note is off a slight amount , it 's obvious , `` Mr. Gilmore said .", "While briefly unemployed about a decade ago , he developed and later patented his first tuning device .", "It could mechanically tune three strings simultaneously based on electronic analysis of their sound .", "That system had some major problems , he said .", "For one thing , because it used microphones to pick up the tones from the strings , the device could not distinguish very well between individual strings .", "It was certainly a long way from a self-tuning piano .", "Yet Mr. Gilmore resumed his efforts after he was informally contacted by QRS Music Technologies , a company that makes paper rolls for old-fashioned player pianos and systems that convert conventional pianos into electronic self-playing instruments .", "QRS also owns the piano maker Story & Clark .", "Mr. Gilmore 's said his `` epiphany '' came when he tried using separate magnetic pickups , like those found on electric guitars , for each of the strings .", "Unlike microphones , the pickups are not affected by adjacent strings or extraneous noise .", "The pickups , combined with a microprocessor , took care of figuring out how much tuning the piano required .", "But he still needed to devise some way to do the tuning .", "`` I knew anything mechanical was not going to be reliable , '' he said .", "Initially he considered making piano strings from Flexinol , a nickel-titanium shape memory wire , which flexes in specific ways in response to temperature changes .", "But the final solution was simpler : Mr. Gilmore decided to use heat provided by electricity to expand or contract conventional piano strings and alter their tuning .", "Piano strings are relatively poor conductors of electricity , and their resistance will quickly generate heat when a current is passed through them .", "Increasing the current will raise a string 's temperature and cause it to expand .", "The expansion decreases the tension of the string , lowering the pitch .", "Reducing the current makes the string cooler and causes it to contract , increasing tension and raising pitch .", "Working with technicians at QRS , which has licensed the technology , Mr. Gilmore is developing a prototype of the self-tuning piano .", "He anticipates that once the pianos are in production , their strings will be heated to 95 degrees before being tuned at the factory .", "That reference tuning would then be stored in the piano 's electronic memory .", "Once at its final destination , the piano will always have to warm up before play .", "To retune the piano , users will press a button and all of its notes will sound .", "The computer will compare the results to the reference tuning and raise or lower individual string temperatures as needed .", "Mr. Gilmore expects the process to take about 20 seconds .", "The self-tuning piano may still need manual retuning if it goes badly out of tune because of , say , a move from an extremely humid to an extremely dry place .", "Users would also be able to store the work of their own tuners as the reference if they prefer .", "Thomas A . Dolan , the president and chief executive of QRS , declined to predict when the first self-tuning piano would come to market or to estimate its price .", "He said the first product based on some of Mr. Gilmore 's technology would probably be a portable tuning aid that will still require manual adjustments of the strings .", "Mr. Dolan said that when Story & Clark makes its first self-tuning pianos , they will be grand pianos rather than uprights .", "`` You do n't want the tuning system to cost twice the value of the piano , `` he said .", "Because the pianos are likely to be expensive , he said , he believes that at first they will be purchased mainly by schools and professional musicians .", "Dr. Good , for one , is unlikely to be interested .", "`` Maybe it works , '' he said .", "`` But if it works , I 'm not sure I want it .", "Why bother with all this expense when you can just get the piano tuned every three months .", "`` WHAT 'S NEXT ."], "summary": ["Don A Gilmore , amateur piano player and professional engineer , develops electronic system that he says could allow pianists to tune their own instruments at touch of button .", "Device , which uses heat provided by electricity to expand or contract conventional piano strings and alter their tuning , will be installed in prototype self-tuning piano made by QRS Music Technologies .", "Drawing ."], "publication": "nyt50", "label": [10, 33], "tag": ["Technology"]} -{"id": "1453311", "text": ["A RESEARCHER at AT&T Labs is proposing to stop at least some spam before it starts by using e-mail addresses that expire or come with other restrictions attached in code .", "`` It came to me one day that spam works because there 's no easy way to differentiate between what 's real e-mail and what is n't , `` said John Ioannidis , a member of the research department at AT&T Labs in Florham Park , N.J.", "Dr. Ioannidis suggests adopting something he calls `` single-purpose addresses '' rather than continuing to refine software filters that try to sort the good from the bad .", "Such addresses would not replace permanent e-mail addresses , which , under Dr. Ioannidis 's plan , users would continue to give to those they trust and need to maintain contact with , like relatives or employers .", "Instead , single-purpose addresses would be used when the senders have no continuing relationship with the other parties and fear that their e-mail addresses might be sold or given to spammers .", "Online purchasing or newsgroup postings are obvious examples .", "Dr. Ioannidis will present a paper about his approach in February at a meeting of experts in computer network security .", "Under the system , users would generate single-purpose addresses with special software .", "The process could be relatively simple .", "Using an on-screen menu , the user would first select how long the address would exist .", "Currently , the shortest period with Dr. Ioannidis 's technology is one day .", "A user could also choose to have the address work only when sent from a specific domain -LRB- the part that follows the @ symbol -RRB- .", "This would prevent an unexpired address from being used by spammers .", "After those settings are made , the address software would generate a code containing the date and domain restrictions and the user 's permanent e-mail address .", "That code , in turn , would be converted into a string of 26 characters that appear to be a jumble of numbers and letters .", "Together with the user 's domain , the string would form the single-purpose address , which could be cut and pasted into forms like those used by online stores .", "When , say , the store sends a reply indicating that a user 's desired item is out of stock , software on the customer 's mail server would decode the special address and then , assuming it remains valid , forward the mail to the permanent address .", "Dr. Ioannidis acknowledges that even with his system , spammers could still get access to permanent e-mail addresses .", "A trusted relative , he said , may give someone 's full e-mail address to an online greeting card service , which could then sell it to spammers .", "But Dr. Ioannidis hopes that if his system is widely adopted , it will pollute spam mailing lists with so many invalid addresses that the lists will become increasingly useless .", "The process could take decades , however , he said .", "`` The idea is to raise the bar to make it difficult to spam my address , '' Dr. Ioannidis said .", "John Mozena , a co-founder and vice president of an anti-spam group , the Coalition Against Unsolicited Commercial E-mail , said that Dr. Ioannidis 's technology would not likely change his organization 's view that legislation remains the most effective form of anti-spam protection .", "`` This technology might protect some individual users from a certain amount of spam , '' Mr. Mozena said .", "`` But it 's adding insult to injury to also have us spend time , money and effort on tools to keep spam out of our mailboxes . ``", "Mr. Mozena also said he found it unlikely that spammers would simply give up if e-mail lists became filled with worthless addresses .", "`` The quality of those lists are already so miserable that it would n't really matter , `` he said ."], "summary": ["John Ioannidis , researcher at AT&T Labs , develops idea for combatting spam .", "Suggests adopting what he calls single-purpose addresses to be used when senders have no continuing relationship with other parties and fear that their e-mail addresses might be sold or given to spammers .", "Other similar ideas discussed ."], "publication": "nyt50", "label": [4], "tag": ["Technology"]} -{"id": "1453313", "text": ["The Oklahoma state government has begun offering an e-mail service that alerts subscribers whenever terror threat levels change at the state or national level .", "Those who sign up for the service at YourOklahoma.com can receive the updates through any device with e-mail capability .", "Jeff McCartney , general manager of YourOklahoma.com, said the service had been well received in the state , which was terrorized by a bombing seven years ago .", "`` We 've had to deal with very serious issues , `` Mr. McCartney said .", "`` My personal opinion is that others would look to Oklahoma for leadership . ''", "As far as state officials know , they are the first to set up an e-mail notification service that deals exclusively with terror threats .", "The White House Web site -LRB- www.whitehouse . gov -RRB- allows people to sign up for updates on homeland security issues but has no specific notification for changes in threat levels .", "Whenever threat levels change , subscribers to Oklahoma 's service receive a brief notification that informs them of the change and directs them to the state 's Web site for details .", "`` Just from our experience , we 've learned that communication is very critical , `` Mr. McCartney said , adding that he expects demand for the service to grow .", "Rebecca Fairley Raney NEWS WATCH : SAFETY ."], "summary": ["Oklahoma state government begins offering e-mail service that alerts subscribers whenever terror threat levels change at state or national level .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Technology"]} -{"id": "1453314", "text": ["THE dollar ended last year in a precipitous plunge , falling 5 percent against the euro and 3 percent against the yen in December , and it looks to be headed lower in 2003 .", "The question is how much .", "The dollar had been slipping against the euro and the yen since early last year , weakened by sluggish economic growth at home , a falling stock market and wary foreign investors .", "Lately , the threat of war with Iraq , rising oil prices and other geopolitical uncertainties have been dragging it lower .", "Still , the off-the-cliff acceleration in December caught many currency analysts by surprise and upset forecasts for 2003 .", "The dollar 's fall was so swift that it is already lower now against the euro and the yen than some forecasters had predicted for the end of this year .", "Before last month 's drop , forecasters predicting a 15 percent decline in the dollar this year might have seemed on the lunatic fringe , said Lara Rhame , senior foreign exchange economist at Brown Brothers Harriman .", "`` All of a sudden , that does n't look out of the range of possibilities , `` she said .", "Ms. Rhame is expecting a dollar decline , but maybe not as steep as that .", "Nor is she saying that the dollar is weak .", "It will just not be as strong as it has been .", "On a broad , trade-weighted basis , the average dollar value last year is still the third strongest it has been since the currency began to be freely traded in the early 1970 's .", "`` Until you get another major industrial economy on track with strong productivity growth and high expectations of investment returns , there is not going to be a big flow of foreign money out of the United States , '' Ms. Rhame said .", "So , she said , `` the dollar can skate through at a relatively strong value . ''", "No matter how far the dollar falls , if that happens , that would be the dollar 's first back-to-back annual declines since 1994-95 .", "But a weaker dollar is not necessarily bad .", "It could help a broader economic recovery by making American exports cheaper and more competitive abroad .", "American manufacturers that compete against foreign companies -- notably automakers -- would also benefit because the dollar price of imports would rise .", "A weaker dollar would also help Americans investing overseas by reducing losses or increasing gains when foreign holdings are translated back into dollars .", "Big stock markets abroad were in the red last year , but the weaker dollar trimmed a 43.9 percent decline in Germany 's DAX index to 33.9 percent and an 18.6 percent decline in the Nikkei index of 225 Japanese stocks to 9.8 percent .", "Foreign bond returns were also enhanced , with the total return from a portfolio of European bonds rising to 28 percent when translated into dollars as a result of a euro gain of just over 9 percent .", "The big unknown for the dollar in 2003 is President Bush 's dollar policy .", "The administration has backed a strong dollar , but the desire to ensure that the economy rebounds nicely ahead of the 2004 presidential election may encourage it to shift course and try to nudge the dollar lower .", "John W . Snow , the Treasury secretary-designate , is likely to be asked repeatedly about his views on the dollar , and each word will be weighed carefully as currency traders look for any sign of support for a weaker dollar .", "If they see one , they will pounce .", "The risk in trying to talk down the value of the dollar is that it could fall much further and faster than Bush policy makers would like .", "An orderly dollar decline , like the one that occurred last year , is easy for the economy and the financial markets to handle .", "But a rapid fall could further disrupt the economy .", "The dollar fell 15.2 percent last year against the euro , a 12-nation currency , ending 2002 at 1.0492 and giving that currency its first positive year since it was introduced into limited use in January 1999 .", "The dollar also dropped against the currencies of two other big trading partners , falling 9.8 percent against the Japanese yen , to 118.79 , and 1.3 percent against the Canadian dollar , to 1.5718 -LRB- the Canadian dollar settled at 63.62 cents -RRB- .", "Its biggest declines came against the South African rand , off 28.4 percent , and the Norwegian krone , down 22.6 percent .", "But the dollar also had some gains , including a rise of 13.2 percent against the Mexican peso , to a rate of 10.37.", "It surged against the currencies of three economically troubled Latin American nations : Brazil , Venezuela and Argentina .", "This mixed performance means that the broadest measure of the dollar 's overall value , a trade-weighted dollar adjusted for inflation , slipped just 1.5 percent in 2002 , based on preliminary Federal Reserve data .", "While recent forecasts of the dollar 's value at year-end may have been overtaken by the December decline , the predictions for the dollar-euro rate included in the December reading by Consensus Economics of London show that there is no unanimity on the dollar 's direction .", "As of Dec . 9 , the forecasts ranged from a dollar decline of 12.1 percent to a rise of 9.9 percent against the euro .", "Against the yen , the forecasts ranged from a dollar fall of 12.4 percent to a rally of 13.5 percent .", "The yen forecast is much more difficult because the economic fundamentals militate for a weaker yen and stronger dollar .", "But Finance Minister Masajuro Shiokawa of Japan and other government leaders made clear in December that they wanted a weak yen to help exporters and the ailing economy .", "The Brazilian real could benefit from the dollar 's weakness and rally this year if Brazil 's new president , Luiz In\u00e1cio Lula da Silva , appears to be dealing successfully with the country 's economic and debt problems .", "Mexico 's peso is likely to slip a bit further , based on the consensus forecast .", "The most interesting offbeat dollar play last year was the Norwegian krone , said Laurie A . Cameron , global currency strategist at the J . P . Morgan Private Bank .", "With the forecast of a 4 percent rise in the value of the krone against the dollar and 6.5 percent from investing in short-term government securities , this year could bring a return of more than 10 percent .", "In addition to being affected by sluggish growth here and a lackluster stock market , the dollar could also be dragged lower by concerns about the size of the United States ' current-account deficit , which rose to $ 127 billion in November , nearly a record .", "It takes an enormous flow of capital from abroad to cover the deficit , which includes red ink in trade , and that inflow may be slowing , which could weaken the dollar .", "Yet the dollar could stage a surprise rally .", "The geopolitical drag on its value could be lessened if a war with Iraq was averted or if it was successful and over quickly .", "While the forecasts of economic growth for the United States are not robust , those for Europe and Japan are still weaker .", "If the American economy and stock markets move ahead faster than expected , many investors could be drawn back to the United States , pushing up the dollar 's value .", "A narrowing of the gap in interest rates between Europe and the United States would also make the dollar more attractive .", "The European Central Bank cut its benchmark interest rate by half a percentage point , to 2.75 percent , in December .", "That is more than double the 1.25 percent benchmark rate set by the Federal Reserve .", "But that gap could narrow substantially if the European bank cuts rates again and if the Fed begins to raise its benchmark interest rate as the economy finally picks up steam in the second half of 2003 .", "Money & Investing ."], "summary": ["US dollar fell against currencies of many American trading partners in 2002 , including European Union , Canada and Japan , and lackluster US economy could cause it to fall further in 2003 .", "Dollar fell 5 percent against euro and 3 percent against yen in December .", "Weaker dollar could help broader economic recovery by making American exports cheaper and more competitive abroad .", "Chart .", "Drawing ."], "publication": "nyt50", "label": [16, 0, 27], "tag": ["Business"]} -{"id": "1453315", "text": ["THE dominant image of the stock market tumble that began nearly three years ago has been a bursting bubble .", "And while it was true that the worth of many companies was inflated beyond all reason , it turns out that deflating those valuations did not immediately reveal all the damage .", "Instead , the decline in stocks took its time in damaging the economy , much as a domino near the end of a line can topple long after the first ones fall .", "The first pain was felt by millions of investors , who lost billions but proved surprisingly resilient .", "The fact that consumers kept spending helped keep the 2001 recession mild .", "But it soon became clear that sales at companies that had prospered most during the boom , notably telecommunications concerns , were going to remain depressed .", "Forecasts of an early revival were consistently wrong , and now some companies have put off their hopes for growth until 2004 .", "Only in 2002 did it become clear just how far this string went -- from the federal government to city halls .", "Governments had been dependent on the revenue-producing effects of the bubble -- particularly income taxes on profits from cashed-in stock options .", "Now state and local governments are being forced to reduce spending sharply and raise taxes , moves that will slow the economy .", "What 's left to tumble .", "Consumer behavior is the area that many economists are watching .", "Reports of disappointing Christmas spending could be an indication that high debt levels are finally slowing purchases .", "But housing remains strong , and thanks to low interest rates , most consumers have ample income to pay interest charges on the money they owe .", "Productivity numbers have remained strong , providing a source of reassurance that the economy remains healthy .", "But it is possible that the good news could end in 2003 .", "`` If the productivity numbers reflected the wild and excessive investments , '' said Robert J . Barbera , the chief economist of ITG / Hoenig , a Wall Street firm , `` productivity might begin to fade '' since bubble-related capital spending has collapsed .", "He said some research showed a two-year delay in the impact of investments on productivity numbers .", "And then , of course , there are signs of a weakening dollar , high oil prices and the prospect of war with Iraq , with a possibly messy aftermath , to give investors pause .", "But to some bulls , the real issue is not so much whether the economic outlook is bright , but whether share prices fell far enough to more than compensate for the remaining risks .", "There were signs within the market , including widespread bearishness and a big increase in volatility , that a turning point was reached when stocks bottomed on Oct . 9 .", "There are also historical precedents .", "It 's been seven decades since the last time the market went down in four consecutive years -- as would happen if 2003 turned out to be a bad year .", "And it has been more than six decades since the third year of a presidential election cycle brought lower share prices .", "On the other hand , big bear markets are usually followed by bull markets with leadership that is different from the previous bull market .", "But the rebound since Oct . 9 has been led by technology and telecommunications stocks -- which also led previous rallies in the bear market that began in the spring of 2000 .", "Those earlier rallies all ended with declines that took the market to new lows .", "It is hard to remember a time when valuation levels were harder to determine , simply because earnings numbers are now in doubt .", "Part of that relates to scandals that have led to restatements , and part to a growing sense that the willingness to use numbers that leave out bad news -- so-called pro forma earnings -- created too optimistic a picture .", "But some analysts , notably Steve Leuthold of the Leuthold Group , a research firm based in Minneapolis , calculate that valuations had fallen to reasonable levels at the October lows .", "Others point out that many companies whose share prices plunged have reported lower profits , and that their stocks still trade at higher multiples of earnings than in pre-bubble years .", "That is particularly true of large technology stocks .", "For bulls , a tempting comparison is to the mid-1970 ` s , when the stock market fell roughly as far as it did during the bear market that ended -- knock on wood -- in October .", "From top to bottom in 1973-74 , the Standard & Poor 's 500-stock index fell 48 percent .", "This time , the decline was 49 percent .", "The economic problems of that era were different from today -- rising inflation and economic stagnation were the principal fears -- but the pessimism as the market hit bottom was similar .", "The pessimists then were right to forecast that the economy would do worse than it had in previous decades .", "But the stock market still took off in 1975 , with the S . & P . 500 gaining 32 percent , although prices did not climb to record levels for several years .", "The market in 2002 ended with declines in all the principal gauges for a third consecutive year , something that had not happened since 1939-41 .", "Wall Street forecasters are virtually united in saying the string will stop there , although a year ago they were forecasting that the year just ended would have increases .", "But on a historical basis , at any rate , they have more going for them now than they had a year ago .", "First , the last time that the big indexes had four consecutive declines was in 1929-32 .", "And while this economy has not been especially vibrant , no one is talking about a depression either .", "Second , there is the presidential election cycle .", "For whatever reason -- and cynics have asserted that presidents planning to seek re-election are eager to make things look good in the year when potential rivals are deciding whether to run -- the third year of the election cycle is often the best one .", "The last time either the Dow Jones industrial average or the S . & P . 500 fell in the third year of a presidential cycle was in 1939 , when World War II began in Europe .", "-LRB- In 1987 , there was a crash , but both indexes ended the year with small gains . -RRB-", "To some extent at least , 2002 was the year that Americans ' faith in the stock market was finally shaken .", "It was the first year since 1988 that Americans took more money out of stock mutual funds than they put in , a fact that is all the more impressive given the automatic investments that come from 401 -LRB- k -RRB- plans and that are counted in the numbers .", "But the Investment Company Institute , a trade group , reports that net withdrawals through November amounted to just 0.7 percent of assets , an indication that investors did not come close to panicking .", "To many , investors ' continuing faith in stocks makes perfect sense .", "With interest rates low , savings accounts and even bonds look less attractive than they did during previous market swoons .", "And even after the recent fall , the Dow has risen at a compound annual rate of 10.9 percent over the last 20 years , excluding dividends .", "That is better than any 20-year period in the Dow 's history before the late 1990 's .", "-LRB- In 1929 , the peak came with the Dow 's having risen at less than a 9 percent annual rate over the previous two decades . -RRB-", "It may not make investors who entered the market near its peak feel any better , but the recent falls have wiped out only a small part of the gains of the great bull market that began in 1982 .", "-LRB- Numerologists take note : In 1982 , the Dow bottomed at 776.92.", "Last year , on Oct . 9 , the S . & P . hit its year 's low at 776.76. -RRB-", "The declines were widespread in 2002 -- more so than during the previous years of the bear market .", "Both the Dow and the S . & P . had worse years in 2002 than they experienced in either 2000 or 2001 .", "The S . & P . 500 's decline of 23.4 percent was the steepest for any full year since the fall of 29.7 percent in 1974 , and every sector in the index posted declines .", "Just 131 of the 500 stocks rose -- the lowest number for any year since the S . & P . began tracking that statistic in 1980 .", "The Dow , which was blessed by its relatively low level of technology stocks , fell just 16.8 percent , but that was still the worst since 1977 .", "And of the 30 Dow stocks , only 3 -- Eastman Kodak , Procter & Gamble and 3M -- showed gains for the year .", "Volatility also rose to levels rarely seen before .", "About one trading day in two had the S . & P . 500 and the Dow rise or fall more than 1 percent , and on average they had moves of at least 2 percent once a week .", "For the S . & P . , volatility was the highest since 1938 .", "For the Dow , it was greater than in any year since 1933 .", "Volatility in the Nasdaq composite was even greater , with 67 percent of sessions showing moves of at least 1 percent .", "But that was down from a peak of 75 percent set in 2000 .", "Volatility often peaks around the time markets change directions , and that may have been true in 2002 .", "The S . & P . 500 showed moves of at least one percentage point in 15 consecutive sessions ended Oct . 11 , a period that included the market 's bottom on Oct . 9 .", "Since World War II , the longest such string had been nine days , ended Oct . 21 , 1974 .", "That volatility came just after the end of the 1973-74 bear market .", "The recovery since Oct . 9 indicates that the market is forecasting no new recession , even if the economy is not expected to return to the days of heady growth that investors once took for granted .", "`` I am increasingly impressed at the remarkable resilience of this economy , '' Alan Greenspan , the Federal Reserve chairman , told the Economic Club of New York last month .", "Investors hope that he will continue to feel that way ."], "summary": ["Article on whether stock market will rebound in 2003 after three years of decline .", "Says to some bulls , real issue is not so much whether economic outlook is bright , but whether share prices fell far enough to more than compensate for remaining risks .", "Holds it has been seven decades since last time market went down for four consecutive years , as would happen if 2003 turned out to be bad year .", "Adds it has been more than six decades since third year of presidential election cycle brought lower share prices .", "Says big bear markets are usually followed by bull markets with leadership that is different from previous bull market .", "Graphs .", "Charts .", "Drawing ."], "publication": "nyt50", "label": [19, 22, 24, 23], "tag": ["Business"]} -{"id": "1453316", "text": ["THE bankruptcies of companies like Global Crossing and Adelphia Communications helped make 2002 a tough year for junk bond investors .", "A record 14.89 percent of junk bonds outstanding were in default as of the 12 months ended Nov . 29 , according to Credit Suisse First Boston , which includes in its tally companies like WorldCom , which once had sound credit ratings .", "But with hope of a stronger economy , some investors suggest that the new year may be a good time to make some cautious bets in this volatile market .", "Junk bonds -- high-yield instruments that are rated below investment grade -- are often issued by young or fast-growing companies , which tend to borrow heavily .", "In the late 1990 's , the biggest issuers of junk bonds were rapidly expanding cable and telecommunications companies , many of which ultimately defaulted on their debts .", "When the economy is weak , defaults of these securities tend to rise sharply .", "The default record set in 2002 was a little more than six percentage points higher than the 8.8 percent default rate in 1991 , at the depths of the last recession .", "The rising defaults contributed to an unusually wide gap between the yields on junk bonds and the yields on bonds issued by the federal government , which are considered to be the safest form of debt .", "Despite these defaults , the junk bond market managed to earn a cumulative total return of 3.02 percent , through Dec . 26 , according to an index kept by Credit Suisse .", "That compares with a total return of 12.54 percent for the year on 10-year Treasuries , one of the most conservative investments , and a decline of 23.4 percent for the Standard & Poor 's 500-stock index .", "The return on a bond takes into account the interest rate paid to an investor as well as movements in price .", "When the price of a bond falls , its yield rises .", "Junk bonds typically carry higher interest rates than Treasury or investment-grade corporate debt , and their prices are more volatile than those of other bonds , particularly when the economy is bad .", "Like stocks , junk bonds do poorly when the economy is sour .", "Since the latest economic downturn began , the gap between yields on junk bonds and on government debt has widened , as it had during other times the economy was weak .", "But by the middle of October , the difference , or spread , between junk bonds and yields on Treasuries with comparable maturities reached 11.16 percentage points , an even bigger gap than in the recession of the early 90 's , when it hit 10.96 percentage points , said Sam DeRosa-Farag , co-director of leveraged finance research at Credit Suisse .", "`` This took us all by surprise , '' Mr. DeRosa-Farag said .", "The spread between junk bonds and Treasuries never grew that wide in the previous two decades , he added .", "The outlook for junk bonds improved after the Federal Reserve cut interest rates by one-half of a percentage point in November .", "The spread between high-yield bonds and Treasuries has since narrowed to 9.38 percentage points .", "`` The market is back from the dead , '' said David C . Hinman , a high-yield portfolio manager for the Pacific Investment Management Company , a large bond fund specialist .", "Some investors say they are getting more confident about investing in junk bonds now than they were in recent months because they think that defaults are finally on the brink of declining .", "`` We do think the worst is over , '' said Diane Vazza , managing director of global fixed-income research at Standard & Poor 's .", "`` Entering 2003 , we expect the global default rate to remain elevated but to decelerate slowly . ''", "Defaults tend to peak two to three years after bonds are issued .", "At the height of the bull market , from 1997 to 1999 , $ 315 billion of junk bonds were issued .", "At the time of issuance , S . & P . gave 28 percent of them junk ratings of B - or lower .", "A good number of those companies have already run into trouble , Ms. Vazza said .", "In 1991 , a year after defaults peaked , junk bonds reported their best total return ever , rising nearly 44 percent , according to Credit Suisse .", "`` People should n't expect that kind of return `` in 2003 , said Mr. Hinman of Pacific Investment Management , noting that there had been extraordinary reasons for the market 's decline in 1990 , like the bankruptcy of Drexel Burnham Lambert , then the leading underwriter of junk bonds .", "But he is optimistic that this will be a good year for junk bonds .", "Still , some investors , citing worry that the economy is shaky , say they remain wary of junk bonds .", "`` We are n't sure where we are in the economic cycle , `` said John W . Ueleke , a financial planner with Legacy Wealth Management in Memphis .", "He has close to 4 percent of his clients ' assets in high-yield bonds but is reluctant to increase that percentage just yet .", "Another uncertainty is the expectation that interest rates will rise , making it more expensive for some companies to refinance their debts and hurting companies in industries that benefited from the lower rates , like suppliers to the auto industry and home builders .", "Steven A . Tananbaum , chief investment officer at GoldenTree Asset Management , a $ 3.8 billion investor in junk bonds and related securities , said he was avoiding these sectors .", "Instead , he is favoring publishing companies and some European cable operators , which , he says , have been punished too much in recent months .", "`` To figure out the outlook for high-yield bonds , you have to have a general opinion on the economy and rates , '' Mr. Tananbaum said .", "But , more important for companies that issue junk bonds , he said , a stronger economy will mean higher profits , and fewer borrowers will default on their debt .", "`` A stronger economy will make people put money into high-yield bonds instead of Treasuries , '' said Kingman D . Penniman , president of KDP Investment Advisors , a highyield bond research firm based in Montpelier , Vt .", "`` As soon as we get a clearer picture on defaults and the economy , we should finally have a good year . ''", "Money & Investing ."], "summary": ["Optimism persists for junk bonds in 2003 despite record defaults in 2002 .", "Credit Suisse First Boston says record 14.89 percent of junk bonds outstanding were in default as of 12 months ended Nov 29 .", "Graph ."], "publication": "nyt50", "label": [1], "tag": ["Business"]} -{"id": "1453318", "text": ["ARE the good times over .", "The answer is yes .", "That 's the question -- and the answer -- for bond investors going into this year after the fixed-income market outperformed stocks in 2002 , for the third consecutive year .", "That is a three-year run not seen since 1939 to 1941 , making it a first for most investors .", "So savor the returns .", "Over the three-year span , the total return from the fixed-income market , including Treasuries and corporates , was 33.6 percent , in contrast to a negative return , including dividends , of 37.5 percent in the Standard & Poor 's 500-stock index .", "Bonds have outperformed stocks so much over the last three years that if investors had put their money into Treasury bonds as far back as March 1994 and ignored stocks , they would be even with investors who put all their money into stocks .", "The overall return for both markets was about 120 percent .", "In the third year of this run for bonds , high-quality fixed-income securities , especially high-rated corporates and Treasury bonds and notes , did the best .", "But those who had a taste for risk did well in emerging-markets bonds , despite declines in bonds from Argentina and Brazil .", "As for this year , Mary J . Miller , assistant director of the fixed-income division at T . Rowe Price Associates , the mutual fund company , said , `` It does n't have to be a disastrous year for fixed income . ``", "But to avoid disaster , she said , investors will have to take on increased risk by moving into high-yield junk bonds and lower-rated investment-grade bonds because those sectors will be hurt less if interest rates rise , as forecast .", "The Treasury market , which would suffer the most as rates climb , is a no-no .", "Jack V . Malvey , chief global fixed-income strategist at Lehman Brothers , agreed , saying , `` Lower-quality debt will be the star in 2003 . ''", "He forecasts a return of only 4 to 5 percent in the broad fixed-income market .", "That means declines in bond prices would eat into the interest investors were paid on their fixed-income securities .", "Bonds could even have a negative year , he warned , if the economy is a little stronger than expected and the Federal Reserve raises its benchmark short-term interest rate sooner than the second half of the year and more than the expected half a percentage point .", "If this forecast sounds like last year 's , it should , because this is basically what analysts were saying at the end of 2001 .", "`` We were joking internally that we could practically take the 2002 outlook and change the numbers a little and put it out again , '' Mr. Malvey said .", "He said 2002 turned out to be different from what was expected because analysts had been too optimistic about the strength of the recovery , had not expected the threat of a war with Iraq and had yet to realize how Enron and the corporate scandals that followed would undermine investors ' confidence .", "Although the economy is not expected to be robust this year , growth of 2.5 to 3 percent should preclude worries of a double-dip recession , which may make investors willing to take on more risk .", "And many forecasters are expecting growth to be steady enough by the second half of the year for Fed policy makers to push their benchmark rate , now at 1.25 percent , a little higher .", "All that spells modestly higher interest rates , with some analysts forecasting that the yield on the Treasury 's 10-year note , which ended 2002 at 3.82 percent , will be up to 4.5 percent by the end of this year .", "Shorter-term rates are expected to rise more if the Fed nudges its benchmark rate up .", "The Treasury 's two-year note , which ended 2002 at 1.60 percent , could rise to 3 percent .", "Another negative for bonds could be the stock market .", "In recent years , the two markets have moved in opposite directions , with Treasury prices falling when stock prices rose , and vice versa .", "If that relationship continues , even a modest stock rebound this year will spell trouble for bonds .", "But forecasters were fooled last year and could be again .", "One unknown is whether there will be war with Iraq .", "Stocks could fall and the bond market rise if a war approaches .", "But if the history of the Persian Gulf war is replayed , an early sign of success could send stocks higher and bonds lower .", "`` Every economist I know reserves the right to redraw the outlook when the events occur , '' said Alan D . Levenson , chief economist at T . Rowe Price .", "Another unknown is the federal budget deficit .", "The cost of any war and the cost of rebuilding Iraq in the aftermath could add a lot to the deficit .", "And if the Bush administration wins tax cuts to stimulate the economy , that could make the deficit balloon further .", "So far , analysts and investors seem unworried about the deficit , which grew to $ 159 billion in the 2002 fiscal year , which ended Sept . 30 , in contrast with a surplus of $ 127 billion the year before .", "Some deficit forecasts for this fiscal year are already over $ 250 billion .", "The belief is that these deficits will begin to shrink as the economy grows .", "But if investors begin to doubt that , more upward pressure on interest rates could follow .", "Over all in 2002 , the fixed-income market -- including Treasuries , agencies , investment-grade corporate bonds and mortgages -- had a total return of 10.4 percent , after returns of 8.3 percent in 2001 and 11.7 percent in 2000 , according to Merrill Lynch bond indexes .", "The S . & P . 500 fell 23.4 percent , 13 percent and 10.1 percent in the last three years .", "Interest rates fell to lows not seen in more than four decades as the expected economic recovery stumbled and investors worried in the summer about the possibility of a double dip into a second recession .", "Corporate accounting scandals and corruption at companies like WorldCom and Tyco International made investors wary of risk .", "That contributed , with the economic worries and the growing possibility of a war with Iraq , to a temporary seizing up of the corporate bond market in July that made trading nearly impossible .", "Finally , Federal Reserve policy makers were worried enough about the outlook to cut their benchmark short-term interest rate in November by half a percentage point , to 1.25 percent , a 41-year low .", "In this environment , the Treasury market became a haven for many investors .", "The yield on the Treasury 's 10-year note fell as low as 3.57 percent , a 44-year low for securities with maturities of 10 years .", "The two-year note 's year-end yield of 1.60 percent was a 58-year low .", "The interest rate on the average 30-year mortgage , which was dragged lower by the decline in Treasury yields , fell to 5.93 percent , the lowest since 1965 , according to Freddie Mac , the mortgage lender .", "For the year , the total return from Treasury notes and bonds was 11.6 percent , according to Merrill Lynch , up from 6.7 percent in 2001 but down from the 13.4 percent return in 2000 .", "But Treasuries were outdone by higher quality corporate bonds , which investors bought for their relative safety and higher yields .", "Investment-grade bonds rated double A -LRB- few companies are rated triple A -RRB- had a total return of 12.3 percent , well above the return of 7.1 percent for triple-B-rated companies .", "Single A corporate bonds returned 12.6 percent .", "Despite a strong recovery in November and December when the total return from high-yield bonds was 8.2 percent -LRB- annualized , that would be 59 percent -RRB- , junk bonds still had a down year .", "Yields were above 12 percent , but the sector 's total return was a minus 1.9 percent .", "Emerging-markets bonds had a return of 14.2 percent , although the return was a negative 5.6 percent for Argentine bonds and a negative 3.3 percent for Brazilian bonds , according to J . P . Morgan bond indexes .", "Two of the best-performing emerging-markets bonds were those from Russia , with a return of 35.9 percent , and Turkey , with a return of 20.7 percent .", "News Analysis Correction : February 28 , 2003 , Friday An article in the special Outlook section of Business Day on Jan . 2 misstated the history of the yield of the two-year Treasury note .", "At the end of 2002 , the yield was 1.60 percent , the lowest in 44 years , not 58 .", "-LRB- Questions about the data in a recent e-mail message from a reader uncovered the error . -RRB- ."], "summary": ["Analysis of prospects for fixed-income market , including Treasury and corporate bonds .", "Holds fixed-income bonds are not expected to outperform stocks in 2003 as they have for three consecutive years .", "Notes total return of such investment instruments was 33.6 percent , in contrast to negative return , including dividends , of 37.5 percent in Standard & Poor 's 500-stock index .", "Graphs ."], "publication": "nyt50", "label": [5, 53], "tag": ["Business"]} -{"id": "1453319", "text": ["In his yearly report on the federal judiciary , Chief Justice William H . Rehnquist called today for higher judicial pay , more judgeships and speedier filling of existing vacancies .", "The themes were familiar , as the chief justice acknowledged in his 17th year-end report .", "`` I am struck by the number of issues that seem regularly to crop up , or perhaps they never go away , '' he said .", "Judicial pay , in particular , is close to the heart of many members of the branch of government that Chief Justice Rehnquist heads .", "Many federal judges have found it galling that the promise of regular and uncontested cost-of-living increases they received under a 1989 federal law , which barred most types of outside income for judges , has not been fulfilled .", "Congress ended its session this year without giving judges their expected cost-of-living increase or , for that matter , approving a budget for the federal courts .", "The chief justice asked Congress to approve budget requests for a modernization and security upgrade project at the Supreme Court and to address a backlog in the federal judiciary 's continuing courthouse construction plan .", "A $ 5 billion building program began in 1985 to address decades of neglect of the federal courts ' physical needs .", "The biggest federal civilian construction program since the 1930 's , it has run into some Congressional resistance .", "Judicial pay drew the most impassioned commentary in the report .", "It was `` the most pressing issue today '' for the courts , he said , adding that `` inadequate compensation seriously compromises the judicial independence fostered by life tenure . ''", "Financial considerations are driving judges off the bench and deterring highly experienced lawyers from becoming federal judges , he said .", "Federal judicial salaries range from $ 150,000 for district judges to the chief justice 's own $ 192,600 .", "These salaries are no longer competitive with the earnings of partners at major law firms , or with those of professors at major law schools , as Justice Stephen G . Breyer testified last summer when he appeared with the chief justice before a privately financed group studying problems of government service .", "The chief justice said today that he hoped the group , known as the Volcker Commission for its chairman , Paul A . Volcker , former chairman of the Federal Reserve , would find a solution .", "`` It is obvious that the current approach to judicial and other high-level salaries does not work , '' he said .", "`` We can not continue to use an arrangement for setting pay that simply ignores the need to raise pay until judicial and other high-level government salaries are so skewed that a large -LRB- and politically unpopular -RRB- increase is necessary . ''", "The chief justice noted that `` there will always be a differential between government and private sector pay for excellent lawyers , '' but added : `` But the judiciary , in particular , will be compromised if there is too wide a gap .", "At the present time there is not just a gap , there is a chasm . ``", "On other topics , the chief justice said the federal system needed 10 new judgeships for the courts of appeals , where no positions have been created since 1990 .", "In the Second Circuit , which covers New York , Connecticut and Vermont , he noted that no positions had been created since 1984 , while the circuit 's workload has risen by almost 70 percent .", "In his brief discussion of vacancies on the federal bench , the chief justice appeared to avoid apportioning blame for widely noted difficulties in the confirmation process .", "With a nod to the departing Democratic leadership of the Senate Judiciary Committee , he said , `` We appreciate the fact that the Senate confirmed 100 judges during the 107th Congress . ''", "That was a considerably faster pace than the Republicans set when they controlled the committee and were processing the Clinton administration 's nominations .", "Congress adjourned this year leaving 60 vacancies on the federal courts .", "For 29 of these , the Bush administration had not submitted nominations .", "`` Judicial vacancies must be filled in a timely manner with well-qualified candidates , '' Chief Justice Rehnquist said .", "`` It is of no concern to the judiciary which political party is in power in the White House or the Senate .", "We simply ask that the president nominate qualified candidates with reasonable promptness and that the Senate act within a reasonable time to confirm or reject them . `` ."], "summary": ["Chief Justice William Rehnquist , in yearly report on federal judiciary , calls for higher pay , more judgeships and speedier filling of vacancies ."], "publication": "nyt50", "label": [0], "tag": ["U.S."]} -{"id": "1453321", "text": ["IT would be hard to conjure up a worse year for Wall Street and its analysts than 2002 .", "Stock analysts and strategists were pilloried for their gauzy optimism and accused by Eliot Spitzer , the New York attorney general , of being beholden to the investment banking side of their firms .", "At the same time , the collapsing equity markets made it especially hard to sort out the winners from the losers .", "Consequently , those who survived 2002 look at this year with a cool , skeptical , as well as careful eye .", "The role of analysts has changed .", "As a result of the settlement in principle between brokerage firms and federal and state legislators , analysts can not earn their keep by doing investment banking work .", "Now it is all about stock picking , through rigorous company-specific analysis , which in theory should be good for investors .", "Two years into his easing cycle , the magic man of years past , Alan Greenspan , the Federal Reserve chairman , has lost his power to move markets with a well-timed interest rate cut .", "On the contrary , many experts now say , the market 's next bull run will be presaged by evidence that the Fed is moving toward a rate-tightening bias .", "Put simply : higher rates signal an economy on the mend , which means better earnings for companies across the board .", "Despite the market 's glum mood , investment opportunities still exist , a group of analysts say .", "Many companies in crucial sectors like retail , cable , brokerages and technology have been revamping for more than a year .", "Layoffs , store closings and other measures of cost-cutting have given a number of large companies spruced-up balance sheets .", "They will be well placed to take advantage of sector recoveries when they come along .", "From I.B.M. to Comcast , to Merrill Lynch to Gap , the names are familiar , and in all cases , they are trading well below their highs .", "These analysts , Dana Telsey of Bear , Stearns , Guy Moszkowski of Salomon Smith Barney , Laura C . Conigliaro of Goldman , Sachs , Richard Bilotti of Morgan Stanley and Richard Bernstein of Merrill Lynch , do not expect the companies to approach their past highs -- by most accounts , 2003 will be a difficult year for the markets as a whole .", "Most agree , however , that these companies will outperform the broader market .", "DANA TELSEY is the senior retail analyst at Bear , Stearns .", "Most retailers are `` overstored . ''", "A lot of companies have reached a stage of maturation where it is not possible to achieve the same level of top-line sales growth they had in the past .", "Many retailers are working to make efficiencies on expenses , distribution costs and systems to enhance returns .", "We think 2003 will show a continuation of this sort of refinement .", "Over all , retailers are faced with an environment where pricing flexibility is limited and where the consumer knows that the longer he waits , the cheaper the merchandise will be .", "To be successful , retailers must maintain positive gross margins through consistent markups and lower sourcing costs .", "The consumer is holding up pretty well .", "Keep in mind , we have tax cuts , low interest rates and low inflation .", "All of which bodes well for consumer spending .", "One company that will continue to do well is Coach .", "They do something that is special .", "By testing their products at least six months ahead of time , they are able to offer the consumer just what they like .", "Their balance sheet is also very clean , and their store base is not saturated .", "Gap is a different story -- if not a speculative one .", "There is momentum building , because their top-line growth is beginning to improve , albeit off a very depressed base .", "In addition , they have a new chief executive -- Paul Pressler , formerly of Walt Disney -- and an appealing new marketing campaign .", "The company is attracting new customers into all divisions -- Gap , Banana Republic and Old Navy .", "All of this is breathing new life into the company .", "Plus , in retailing there are not a lot of companies with market capitalizations larger than $ 10 billion .", "So Gap stands out at $ 13 billion .", "-LSB- Ms. Telsey does not own stock in Coach or Gap , nor does Bear , Stearns do investment banking business with them . -RSB-", "GUY MOSZKOWSKI is a brokerage analyst at Salomon Smith Barney .", "Business conditions remain difficult for brokerage stocks .", "Do n't forget -- the operating environment is the stock market .", "A better equity market translates into more trading volume on the retail investor side , better underwriting volume and a better mergers-and-acquisitions environment .", "Weakness in such areas has driven a decline in return on equity for the big firms to 10 percent or so from historic highs of 30 percent or better .", "When the market moves up and sustains itself , it will give investors confidence in the sector .", "But we have not seen that yet .", "Of the big firms , Merrill Lynch had the most fat to cut .", "Give them credit -- they realized in early 2000 when things were still good that they had become bloated and have laid off close to 20,000 employees in the past two years .", "Unlike other firms , they realized that we were not going to return to the late 90 's business levels for a long time .", "They have managed their costs so well that people will be surprised by the extent of their earnings power once the markets recover .", "Goldman Sachs has maintained market share in some key banking activities -- like equity underwriting and mergers and acquisitions -- during this difficult period .", "It does a few things extremely well , while others are spread more thinly .", "If there is any improvement in these areas , the stock has a lot of upside .", "With regard to the settlement in principle between brokerage firms and federal and state regulators , it clearly gets the regulatory issues behind the firms .", "Of course , the potential for civil litigation still exists , in terms of arbitration of retail client issues , if not class action .", "Given the relative size of fines levied -LSB- $ 1.4 billion -RSB- , along with e-mail evidence already released by regulators , it would appear that Merrill Lynch has the most exposure here , with Morgan Stanley , Lehman Brothers , Bear Stearns and Goldman Sachs considerably less so .", "-LSB- Mr. Moszkowski owns no shares in the firms he cites .", "Salomon has banking relationships with all of them . -RSB-", "LAURA C . CONIGLIARO is a technology analyst and strategist at Goldman , Sachs .", "For the technology sector , spending on information technology has been most affected by macroeconomic variables .", "As we have been seeing for two years now , the I.T. spending environment is weak .", "That is the feedback we get from end users in sectors like manufacturing , financial services and telecommunications .", "We are hearing that their spending patterns will be similar to the end of 2002 .", "Unfortunately , technology spending for 2002 was down , as chief investment officers cut back on their budgets .", "Typically , if profitability is improving , so will capital and I.T. spending .", "But many chief investment officers we speak to are saying it 's not just profitability .", "Cost cutting will not be enough -- they need to see a sustained pickup in revenue , and they have yet to see that .", "When we look at stocks , I.B.M. differentiates itself in a number of ways .", "First , the company never participated in the bubble .", "So its numbers never got out of whack .", "Unlike others , it was never overwhelmingly focused on the two big end markets that drove the bubble -- communications and financial services .", "In a normal spending environment , financial services and communications might represent 20 percent apiece of overall tech spending .", "During the bubble , those two sectors combined for 60 percent of end-market spending .", "That points to another key point about I.B.M. : it is very broad-based when it comes to its end market .", "The company is gaining market share .", "It is improving its product line and its execution .", "We have it rated a sector outperform , which is our highest rating .", "I.B.M. is a core holding , indeed it is both a defensive and an offensive stock .", "It is trading now at roughly 19 times earnings for 2003 -- in line with the Standard & Poor 's 500-stock index .", "Hewlett-Packard is another stock we have a sector outperform rating on .", "It is a different story : it 's a bit of a contrarian pick with an attractive risk-and-reward ratio .", "Hewlett-Packard has a lot of ground to gain on the cost-cutting front .", "They have a clear strength in printers , and are improving in PC 's and enterprise systems .", "Relative to other tech stocks , it is not one of the more expensive -- trading at 14 times 2003 earnings .", "You would expect a discount to I.B.M. , but this one is a little steep .", "-LSB- Ms. Conigliaro owns no stock in the companies mentioned .", "Goldman , Sachs has banking ties with Hewlett-Packard and I.B.M. -RSB- RICHARD BILOTTI is a cable and entertainment analyst at Morgan Stanley .", "What we expect to see in 2003 are cable companies that will have a higher strategic value than content companies like Viacom and Walt Disney .", "This has not been true since the late 70 's during the heyday of the build-out of cable in the United States .", "There are three main themes behind this evolution .", "First , after years of competition with satellite broadcasters , we are entering a period of pricing stability for cable companies .", "They are also selling value-added services to their customers and finding that it makes more sense to mine existing customer bases than to chase after new ones .", "All the major distribution companies -- Comcast , Cox Communications and EchoStar Communications -- are undervalued by 40 percent .", "Secondly , we have never seen a cable company the size of Comcast , with 22 million subscribers , provide size and scale without precedent .", "If it feels that its supplier of content is too expensive , it could conceivably launch its own replacement channel .", "Say , for example , the company does not want to carry a sports package because one of the channels is too expensive .", "They can go to the supplier of content -- the N.F.L. , for example -- and say , `` You know what , I 'll buy that football package directly from you . ``", "The cost of buying the programming rights could be less expensive when spread over the Comcast base of subscribers than the wholesale fees charged by the exisiting networks .", "What do the content companies do .", "Well , they will have to go out and buy their own distribution channels .", "And finally , for the first time since I 've been an analyst , the rate of return on reinvested capital for cable companies is higher than that of content companies : 20 percent for distribution companies , after taxes , compared with 3 to 6 percent for content companies .", "Unlike the content companies , cable and satellite companies are investing all their discretionary cash flow back into their core businesses .", "For the content companies , it is a different story .", "Because of the fragmentation of the television audience and the sheer number of channels , content companies are finding it hard to capture advertising .", "So instead of investing in their core business , they are looking to make acquisitions of television stations and cable channels .", "But that gives you a return of just 6 or 7 percent .", "Historically , robust advertising growth has camouflaged these weak returns .", "Look at Comcast , which completed its merger with AT&T Broadband in November .", "It will have discretionary cash flow -- cash flow after interest expenses , taxes and maintenance capital expenditures -- of $ 1.50 to $ 1.75 a share .", "They will choose to put that money back into their business and earn 15 to 20 percent returns .", "Comcast trades at 15 times discretionary cash flow , well below the 25 to 30 times that is common for content companies .", "-LSB- Mr. Bilotti does not own stock in the companies he cites , but Morgan Stanley does have banking relationships with them . -RSB-", "RICHARD BERNSTEIN is chief United States strategist at Merrill Lynch .", "We recently lowered our equity allocation .", "Previously , we were recommending 50 percent stock , 30 percent bonds and 20 cash .", "Now we are at 45 percent , 35 and 20 .", "We think the equity market is very speculative .", "Equities remain the asset class of choice , and that does not bode well for their future performance .", "You hear people all the time say , `` Where can you get higher returns than in equities .", "`` That was the correct thing to say in 1982 when everyone wanted to look at money market funds .", "Or 1994 , when everyone was in bonds .", "Call it the paradox of investing .", "If everyone thinks equities are the asset class of choice , the odds are they are not .", "We have the S . & P . at 29 times trailing earnings .", "My point is that the only sure thing we have these days are announced earnings .", "According to our research , forecasting earnings growth is the least predictable and most volatile it has been in 60 years .", "So why would we value the market on somebody 's forecast if this is true .", "People are assuming that earnings growth is a given .", "My argument is this : Wait a minute , future earnings are the least predictable in our lifetimes .", "With regard to the Fed , its repeated easing shows that the economy is not making the transition from the early phase to the middle phase of the economic cycle .", "Continued easing is keeping the economy on life support .", "The Fed is basically admitting that things are not working .", "Our belief is that the next bull market begins only when the Fed starts to tighten .", "That will tell you that we have moved beyond this deflationary environment .", "The View From Wall Street ."], "summary": ["Stock analysts Dana Telsey of Bear Stearns , Guy Moszkowski of Salomon Smith Barney , Laura C Conigliaro of Goldman Sachs , Richard Bilotti of Morgan Stanley and Richard Bernstein of Merrill Lynch comment on prospects for stock market in 2003 and offer opinions for specific companies , including IBM , Comcast , Hewlett-Packard , Merill Lynch , Gap and Coach .", "Photos ."], "publication": "nyt50", "label": [15], "tag": ["Business"]} diff --git a/reproduction/Summarization/Baseline/test/testdata/val.jsonl b/reproduction/Summarization/Baseline/test/testdata/val.jsonl deleted file mode 100644 index af306124..00000000 --- a/reproduction/Summarization/Baseline/test/testdata/val.jsonl +++ /dev/null @@ -1,100 +0,0 @@ -{"id": "1788718", "text": ["AT the end of the last season of `` The Wire , '' another battle in the drug war came to an unceremonious close .", "As an experiment the police in the show 's grim Baltimore neighborhood had decided to try drug legalization within a circumscribed area , which locals started calling Hamsterdam .", "But within weeks , political blowback forced the experiment 's cruel end .", "Hamsterdam 's seedy row houses were torn down , leaving an equally inhospitable pile of rubble .", "Amid the destruction Juan Donovan Bell saw an opportunity .", "One half of Darkroom Productions , a local Baltimore hip-hop production team , he had been avidly following `` The Wire '' since its first season .", "`` These communities they depict , I live there , '' he recently said over the telephone from his West Baltimore studio .", "He said the show had done a good job of depicting the city 's drug gangs , police officers and politicos , but it had all but ignored the city 's music .", "So he began work on a mixtape album to showcase local rappers .", "`` I knew the mixtape would blow up if I called it ' Hamsterdam , ' '' Mr. Bell said .", "`` I was like , ' If you look at the show for entertainment , do n't forget about us . '", "`` He shot the cover photograph around the corner from where the Hamsterdam episodes were filmed , as the original location was unavailable : '' When they tore the houses down , that was real . ``", "`` Hamsterdam '' became one of the more acclaimed hip-hop records to come from Baltimore last year , and one of the first to receive attention outside the city .", "It caught the ear of David Simon , the creator and an executive producer of the series .", "`` I put it in my car 's CD player and drove around with it for three days straight , `` he said recently in a phone interview .", "`` I 'd been so frustrated about not being able to be authentic in the past .", "The music they 're listening to , it should be hip-hop , and it should be the hip-hop they 're listening to in Baltimore . ``", "When the show 's fourth season begins tonight , Baltimore 's rap scene -- by no definition a national powerhouse -- will have its biggest showcase to date .", "Darkroom contributes several songs featuring several unsigned rappers , most notably Tyree Colion , Mullyman and Diablo .", "`` The amount of people in Baltimore in the last five years who 've received record contracts , `` Mr. Bell said , '' you can count on one hand , with fingers left . ``", "No national rap star has emerged from Baltimore , despite all this grass-roots activity , largely because a distinctive local black sound -- Baltimore club , or house , a thrusting , occasionally lewd form of dance music -- already existed .", "-LRB- Last season `` The Wire '' used a few songs from DJ Technics , a local club-music figure .", "The context was `` quite tasteless , the way it was supposed to be , '' DJ Technics said jokingly .", "He contributes more club tracks this season . -RRB-", "`` The Wire '' has already invigorated the city 's musicians .", "`` Even though it 's fictional , the show has influenced rappers in Baltimore , `` said Blake Leyh , the show 's music supervisor .", "`` And by using this music , there 's a sense in which these different worlds are feeding back on each other now . ``", "Mr. Simon added : `` I think the show gave Baltimore a certain pride .", "It was coming out of their ghetto .", "Forget West Philly , forget East New York .", "When it comes to drug trafficking , we 're the first string .", "There 's perverse pride in that . ``", "In one scene this season two members of the show 's primary drug crew , trying to figure out whether the new corner boys are from a rival New York set , ask about a popular Baltimore song by Young Leek .", "The guy they are interrogating replies , in an unprintable fashion , that he has never heard of it , and he is thanked for his candor with a bullet in the head .", "UNLIKE most television shows , on which pop music is used to provide broad emotional prompts , `` The Wire '' uses songs only as source music , as it would be heard by the characters themselves .", "`` We 're adding to the credibility of the moment , `` Mr. Simon said .", "`` We 're not trying to cue people as to what to think .", "The perfect song that comments on the action , that 's never on the jukebox when the moment actually happens . ``", "And so the uses of Baltimore hip-hop this season helps firm `` The Wire 's `` grip on naturalist storytelling .", "`` The attempt , '' Mr. Leyh said , `` is to make everything as real as possible .", "Our concern is verisimilitude .", "The cumulative effect of all of these choices adds up to something very powerful . ``", "Inspired by the attention now being paid to their city and their work , the Darkroom producers are at work on a second volume of `` Hamsterdam , '' as well as a documentary about the city 's rap scene .", "In a dark pun on Baltimore 's nickname of Charm City , they are calling it `` Harm City Exposed . ''", "`` The streets is a monster here , '' Mr. Bell said .", "`` It can swallow up anyone .", "That 's why I want to get this door kicked down soon , because a lot of people do n't have any options . ``", "-LRB- Mr. Colion , one of this season 's most prominently featured artists , wo n't be able to see how his work is used on the show : he 's currently behind bars . -RRB-", "Using this music , Mr. Leyh said , `` is one more way ' The Wire ' can give back to Baltimore . ``", "Already , the artists attached to the `` Hamsterdam '' project are beginning to receive major-label interest .", "`` This is for us , '' Diablo said , `` and we need to make sure that it counts .", "Our only problem has been getting heard , and now we getting heard . ``", "In the final scene of the final episode of this season , one of the show 's young characters drives down a quiet street , Mullyman 's song `` The Life , the Hood , the Streetz '' blasting from the window of his stolen car .", "From Mullyman 's `` Still H.I.M. '' mixtape , it was one of the bigger Baltimore rap records of the past year , but in this new context portends a whole new life and meaning for the song and its author .", "`` In Baltimore your hood is your whole world , '' Mullyman said .", "`` ` The Wire ' inspired me , let me know we had a voice I did n't know we had .", "It showed me I might be sitting on oil . ``", "THE NEW SEASON -- TELEVISION ."], "summary": ["Article discusses how TV series The Wire will feature music from Baltimore 's rap scene , using songs only as they would be heard by the characters themselves .", "Baltimore rap music scene described .", "Photos ."], "publication": "nyt50", "label": [34], "tag": ["Arts"]} -{"id": "1788720", "text": ["THE singer-songwriter Marisa Monte has one of those supple , knowing voices that make Brazilian pop so inviting .", "After a decade in which she became one of Brazil 's top stars , Ms. Monte stopped touring in 2001 to catch her breath , have a child and re-examine her music .", "She did n't disappear .", "`` Tribalistas , '' the album she made in 2002 in a two-week collaboration with fellow Brazilian songwriters , Carlinhos Brown and Arnaldo Antunes , won a Latin Grammy award and sold a million copies in Brazil .", "Now she 's back with two very different albums , being released this week by the Metro Blue / Blue Note label in the United States : `` Infinito Particular '' and `` Universo ao Meu Redor . ''", "-LRB- They 're already hits in Brazil . -RRB-", "And on Nov . 14 she will come to the Beacon Theater as part of her first American tour since 2000 .", "`` Universo ao Meu Redor '' is about heritage : the sambas that Ms. Monte grew up hearing and studied more deeply when she produced an album for the Velha Guarda da Portela , the old guard , or elder members , of the long-running Portela samba school in Rio de Janeiro .", "-LRB- Her father is one of the school 's directors . -RRB-", "She sings borrowed sambas -- reaching back as far as the 1940 's and including some that have been widely sung but never recorded -- along with a few of her own songs , and she 's backed largely by acoustic instruments in cozy but untraditional arrangements .", "It 's samba carried inward .", "`` Infinito Particular '' is even more pensive .", "It features her own songs in settings that wrap her and a small band in an aura of orchestration and electronics .", "Both albums are wonderfully introspective , and it should be illuminating to hear what she does with the songs when she brings them to the stage in November .", "THE NEW SEASON -- MUSIC ."], "summary": ["Jon Pareles article profiles Brazilian singer-songwriter Marisa Monte .", "Describes her two just-released very different albums .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Arts"]} -{"id": "1788721", "text": ["LISTENING to Young Jeezy is an immersive experience .", "Not just because he plunges listeners into a world of crack-trade narratives and gun-busting threats and grim late-night parties where no one ever seems to relax .", "-LRB- Though he does that , and does it brilliantly . -RRB-", "But also because he finds musical ways to suck in his listeners .", "He gravitates toward slow tempos , so he can sink into the beats , and so listeners have no choice but to slow down and sink with him .", "And his constant ad-libs -- `` Yeeeah '' and `` Ha-haaa ! '' and `` Thaaat 's riiiight `` and all the rest -- cleverly blur the line between sense and sound .", "Last year , after an impressive run on the mixtape circuit , Young Jeezy released `` Let 's Get It : Thug Motivation 101 `` -LRB- Island Def Jam -RRB- , a major-label debut that felt like a momentous event .", "He sounded utterly untouchable when he growled , `` You better call your crew , you gon ' need help / Whole car strapped , and I ai n't talking seat belts . ``", "The album yielded a handful of hits , most notably `` Soul Survivor , '' a collaboration with Akon .", "Maybe more important , the album established Young Jeezy as one of hip-hop ` s most unimpeachable stars : an Atlanta upstart not even narrow-minded New York listeners could deny .", "A pop hitmaker who still sounded great on mixtapes .", "And you can hear Young Jeezy 's influence in Rick Ross , the Miami rapper who clearly patterned his recent major-label debut after `` Let 's Get It . ``", "Now , barely a year later , Young Jeezy is gearing up for his second proper album .", "He has plenty of motivation : his friendly -LRB- so far -RRB- Atlanta rival T I . released an impressive CD , `` King , '' earlier this year .", "And his recent collaborations and mixtape contributions suggest he has n't lost a step .", "In `` I Do This , '' he rhymes , `` S550 , yeah , the brand-new Benz / Bought two the same color , I call ' em Siamese twins . ''", "Now the only question is : Will his plan -- to release the album on Halloween -- hold firm .", "Young Jeezy may seem immovable , but major-label hip-hop release dates are anything but .", "THE NEW SEASON -- MUSIC ."], "summary": ["Kelefa Sanneh article discusses Young Jeezy , one of hip-hop ` s most unimpeachable stars .", "Barely a year after releasing his first album , he is gearing up for second one .", "Photo ."], "publication": "nyt50", "label": [12, 9], "tag": ["Arts"]} -{"id": "1788722", "text": ["A JAZZ concert season can suggest comparisons to a course curriculum , a museum catalog or a tasting menu .", "The new season at Merkin Concert Hall was designed with yet another model in mind .", "It aspires to the gritty , unofficial quality of a mixtape .", "That may sound slightly out of character for Merkin Hall , which has earned a reputation for avant-gardism of a cool and cerebral disposition .", "But for Brice Rosenbloom and Simon Rentner , the season 's producers , it makes sense .", "Certainly it fits their profile .", "Mr. Rosenbloom has booked music at Makor and the Knitting Factory , and Mr. Rentner has a background in radio production .", "Their commitment to eclecticism comes across as matter-of-fact , even when it carries a whiff of youthful pretension .", "DJ Spooky was one source of inspiration for their programming concept , Mr. Rentner said in a recent phone conversation .", "The 45-concert season began on Sept . 2 with a tribute to New Orleans jazz and soul .", "Its closing jazz event , on May 7 , pairs the Ron Carter Nonet and the Aaron Goldberg Trio .", "Both concerts fall under Chamber Jazz , a six-concert subscription series that will also include the free-jazz pianist Cecil Taylor -LRB- Oct . 12 -RRB- and the Mingus Orchestra -LRB- Nov . 30 -RRB- .", "The hall 's No Minimum series no longer heeds its original dual-piano premise : its first installment on Oct . 9 will feature the pianist Robert Glasper with , separately , the guitarist Lionel Loueke and the bassist Meshell Ndegeocello .", "The Zoom : Composers Close Up series will include a Feb . 15 showcase for the Argentine composer Guillermo Klein .", "And the first concert of the new Masters Reimagined program , on Sept . 9 , will feature the music of Hermeto Pascoal as performed by the Bobby Sanabria Big Band .", "Though studded with stylish contemporary references -- like Radiohead and Bj\u00f6rk , the subjects of an interpretive concert on Oct . 19 -- the season 's most promising feature has a historic tinge .", "Reissue is a series that addresses repertory in a different fashion than Jazz at Lincoln Center , which once employed both Mr. Rosenbloom and Mr. Rentner .", "On Sept . 16 Reissue pays homage to Don Cherry 's `` Symphony for Improvisers , '' with that album 's original bassist , Henry Grimes , and the trumpeters Dave Douglas and Roy Campbell .", "On Nov . 14 it will salute Andrew Hill 's `` Passing Ships , '' with Mr. Hill leading an octet as well as his trio .", "On Dec . 19 Miles Davis 's `` Bitches Brew '' will be revisited by Animation , led by the saxophonist and producer Bob Belden .", "And on Feb . 19 , `` The Connection , '' by Freddie Redd , will be revived with the alto saxophonist Lou Donaldson filling in for Jackie McLean and , after a lengthy obscurity , Mr. Redd at the piano .", "THE NEW SEASON -- POP MUSIC ."], "summary": ["Nate Chinen article discusses highlights of 45-concert jazz season at Merkin Concert Hall , which aspires to gritty , unofficial quality of mixtape .", "Photo ."], "publication": "nyt50", "label": [2, 1], "tag": ["Arts"]} -{"id": "1788724", "text": ["THERE is nothing more exciting than the premiere of a new work that seems destined to stick with you from the moment it starts .", "I may forever associate the 2005-6 season with the premiere of Peter Lieberson 's `` Neruda Songs , '' commissioned by James Levine and the Boston Symphony Orchestra and performed at Symphony Hall .", "That haunting , refined and emotionally revealing song cycle was like a love poem from Mr. Lieberson to his wife , the unforgettable mezzo-soprano Lorraine Hunt Lieberson , who sang it sublimely in what would be one of her last performances before her death in July .", "As I look ahead to the coming season , several intriguing premieres catch my interest , starting with Tan Dun 's ambitious opera `` The First Emperor , '' which was commissioned by the Metropolitan Opera and receives its premiere there -LRB- Dec . 21 -RRB- .", "With Mr. Tan , an immensely skilled and eclectic composer , it 's hard to know what to expect .", "He has written works that are baffling in their banality , like `` Red Forecast , '' a multimedia piece for soprano , orchestra , a battalion of percussionists , video projections and audio tracks .", "The work is like some pretentious 60 's happening , with chanting , chaos , images of students rioting , a gaggle of voices .", "Yet other pieces have been close to entrancing , notably `` Water Passion After St . Matthew . ''", "Though though marred by exasperating stretches of meditative vamping , the work has some ethereal music and mesmerizing instrumental colors .", "And Mr. Tan 's Oscar-winning score for the 2000 film `` Crouching Tiger , Hidden Dragon '' is a knockout from start to finish : arresting , brutal , yet somehow charming .", "Mr. Tan 's new opera is set in the ancient court of Qin Shi Huangdi , the first emperor of China , a role conceived for Pl\u00e1cido Domingo .", "Count on the production by the film director Zhang Yimou -LRB- `` House of Flying Daggers '' -RRB- to be elaborate and flashy .", "How will it turn out .", "Not knowing is part of the fun .", "Perhaps the New York Philharmonic grew tired of reading about the excitement the conductor Esa-Pekka Salonen has been generating with the Los Angeles Philharmonic at its stunning new home , Disney Hall .", "Rather than just bring him here to conduct , the New York Philharmonic commissioned Mr. Salonen , an accomplished composer , to write a new piano concerto for Yefim Bronfman , which receives its premiere in New York -LRB- Feb . 1-3 -RRB- , with Mr. Salonen conducting .", "As a consolation prize the Los Angeles Philharmonic gets first dibs on recording the concerto .", "On a smaller scale , the eminent pianist Peter Serkin , whose passion for contemporary music is as intense as it was during his young rebel days as a member of the uncompromising quartet Tashi , is scheduled to give the premiere of a new chamber work by the British composer Oliver Knussen .", "Despite having written some bracing and ingenious scores , Mr. Knussen has had a woeful record of meeting deadlines .", "This work was supposed to have had its premiere when Mr. Serkin presented his Perspectives series at Carnegie Hall several seasons back .", "This time the delayed premiere by Mr. Serkin and the Zankel Band is to take place at Zankel Hall -LRB- April 13 -RRB- , with Mr. Knussen conducting .", "This season at the Chamber Music Society of Lincoln Center is the first planned completely by the ensemble 's new artistic directors , the pianist Wu Han and the cellist David Finckel .", "They have devised many programs that intriguingly mix old and new works .", "But I 'm looking forward to an all-Leon Kirchner concert .", "The society took part in commissioning this towering American composer to write his String Quartet No . 4 .", "One way to present its New York premiere would be as part of a varied program that might place the new work in a larger musical context .", "Instead the society will present the Orion String Quartet performing all four Kirchner quartets at Alice Tully Hall -LRB- March 7 -RRB- .", "They will be played in order , starting with the first , composed in 1949 .", "Here is a chance to follow Mr. Kirchner 's exploration of the quartet genre over a span of nearly 60 years .", "Out of town , admirers of the elegant composer Kaija Saariaho are looking forward to her 60-minute oratorio , `` La Passion de Simone . ''", "The work will have its premiere in Vienna this fall , and the Los Angeles Philharmonic will give the American premiere -LRB- Jan . 12-14 -RRB- .", "James Levine , who continues to champion tough-guy modernists , presents the premiere of Charles Wuorinen 's Eighth Symphony with the Boston Symphony at Symphony Hall -LRB- Feb . 15 -RRB- .", "Those who find Mr. Wuorinen 's music too off-putting in its complexity should hear Mr. Levine perform his works .", "In recent years Mr. Levine 's palpable excitement for the music has come through in stunning accounts of daunting Wuorinen scores .", "On the other end of the contemporary-music spectrum John Adams has fashioned the `` Doctor Atomic '' Symphony from his engrossing and courageous opera `` Doctor Atomic , '' which had its premiere last season in San Francisco .", "David Robertson , who is galvanizing audiences as music director of the St . Louis Symphony , conducts the premiere in St . Louis -LRB- March 16 -RRB- , then brings it to Carnegie Hall -LRB- March 31 -RRB- .", "As always with premieres from searching composers , do not assume anything .", "THE NEW SEASON -- CLASSICAL MUSIC ."], "summary": ["Anthony Tommasini article notes some classical music and opera premieres scheduled for upcoming season , including Tan Dun 's The First Emperor at the Metropolitan Opera .", "Photo ."], "publication": "nyt50", "label": [3], "tag": ["Arts"]} -{"id": "1788725", "text": ["GUSTAVO DUDAMEL has quickly come a long way from his boyhood in Barquisimeto , Venezuela , when he used to conduct toy soldiers .", "He has attracted glowing praise from Simon Rattle , Claudio Abbado and Daniel Barenboim , and a reputation as a rising star of the podium .", "And he 's still just a tender 25 .", "Mr. Dudamel is one of five musicians selected here as artists on the cusp of promising futures .", "He is a product of one of the classical music world 's most extraordinary phenomena : Venezuela 's youth orchestra system .", "Financed by the government and the Inter-American Development Bank , it has yielded scores of youth orchestras and trained hundreds of thousands of players , many from poor neighborhoods .", "Mr. Dudamel conducts its jewel , the Sim\u00f3n Bol\u00edvar National Youth Orchestra .", "He was also recently appointed the principal conductor of the Gothenburg Symphony in Sweden .", "He appears in America with the Los Angeles Philharmonic -LRB- Jan . 4 to 6 -RRB- , conducting Rachmaninoff 's Third Piano Concerto -LRB- with Yefim Bronfman as soloist -RRB- , Kodaly 's `` Dances of Galanta '' and Bartok 's `` Concerto for Orchestra '' .", "And with the Chicago Symphony -LRB- April 5 to 7 and 10 -RRB- , leading Bruch 's First Violin Concerto -LRB- with Pinchas Zukerman -RRB- and Mahler 's First Symphony .", "But he will be a busy guest conductor in Europe , appearing with a dozen orchestras , including the City of Birmingham Symphony , the Philharmonia of London , the Rotterdam Philharmonic and the Vienna Symphony .", "He has also been entrusted with Mozart 's `` Don Giovanni '' at La Scala Opera in Milan , Italy .", "Deutsche Grammophon , meanwhile , is about to release his recording of Beethoven 's Fifth and Seventh Symphonies with the Bol\u00edvar youngsters .", "In the vocal realm , it will be a big season for Eric Owens , 36 , a Philadelphia-born bass-baritone who took part in two of the most important opera events last season .", "He won praise for his portrayal of Gen . Leslie Groves in John Adams 's `` Doctor Atomic '' at the San Francisco Opera .", "But raves came with the title role of Elliot Goldenthal 's `` Grendel '' at the Los Angeles Opera and the Lincoln Center Festival .", "Some critics described the part as perhaps the most demanding bass-baritone role in the repertory .", "Mr. Owens was `` consistently charismatic , theatrically and vocally , '' Peter G . Davis wrote in New York magazine .", "Others said he dominated the opera .", "This season Mr. Owens continues his close association with Mr. Adams .", "He will sing in the premiere of Mr. Adams 's new opera , `` A Flowering Tree , '' inspired by Mozart 's `` Magic Flute , '' at Peter Sellars 's New Crowned Hope Festival in Vienna with the Vienna Philharmonic -LRB- Nov . 14 , 16 , 17 and 19 -RRB- .", "Other performances of the work are scheduled with the Berlin Philharmonic -LRB- Dec . 21 and 22 -RRB- and the San Francisco Symphony -LRB- March 1 to 3 -RRB- .", "Mr. Owens will sing other Adams works with the Boston Symphony -LRB- Dec . 7 to 9 -RRB- and the American Composers Orchestra at Carnegie Hall -LRB- April 27 -RRB- , and he makes his debut at the Netherlands Opera with a run of `` Doctor Atomic '' in June .", "He also has appearances with the Houston , Cincinnati and Alabama Symphonies .", "Compared with the celebrity glow of globe-trotting opera stars like Mr. Owens , contemporary composers like Michael Gandolfi lead low-key lives .", "A self-effacing 50-year-old from Cambridge , Mass . , and a friend of the much more famous composer Osvaldo Golijov , he started out in rock and jazz .", "Like many of his peers , he now survives by teaching -LRB- at the New England Conservatory and the Tanglewood Music Center -RRB- and winning foundation grants .", "His primarily tonal music , sometimes motored by Minimalism , is suffused with rhythmic vigor and vivid images .", "And he has a welcome humorous streak .", "One work is titled `` Budget Cuts : A Septet for Three Players and Conductor . ''", "Examples of his work will appear on programs by the American Composers Orchestra -LRB- Oct . 13 , Zankel Hall -RRB- and Boston Musica Viva -LRB- Feb . 4 , Tsai Performance Center in Boston -RRB- .", "Most notably his `` Impressions From ' The Garden of Cosmic Speculation , ' '' a work still in progress , will be conducted by Robert Spano with the Houston Symphony -LRB- March 16 to 18 -RRB- , the New World Symphony in Miami Beach -LRB- April 21 -RRB- and the Atlanta Symphony -LRB- May 24 to 26 -RRB- , where Mr. Gandolfi is in residence .", "Back to performers .", "The cellist Alicia Weilerstein , 24 , has been performing for nearly 20 years .", "She would probably have had an even bigger career by now if not for the small matter of college .", "She graduated with a history degree from Columbia in 2002 , an unusual stop-off for most virtuosos .", "Ms. Weilerstein also has a penchant for chamber music , performing in the Weilerstein Trio with her parents .", "One of her first major dates this season is a trio concert with Maxim Vengerov , violinist , and Lilya Zilberstein , pianist , at Carnegie Hall -LRB- Oct . 14 -RRB- .", "She performs the Elgar Concerto with the New York Philharmonic at Avery Fisher Hall -LRB- Jan . 11 and 13 -RRB- and with the Baltimore Symphony -LRB- June 8-10 -RRB- , and drops in for a slew of dates in places like Kalamazoo , Mich .", "Poughkeepsie , N.Y.", "Toledo , Ohio .", "Helena , Mont .", "And Wichita , Kan .", "Among the crop of emerging young pianists , Simone Dinnerstein , 33 , has done it the hard way .", "Praised for her intelligent and sensitive music making , Ms. Dinnerstein has won no major competition and lives with her schoolteacher husband and 4-year-old child in the Park Slope section of Brooklyn .", "She raised the money for her first recording , Bach 's `` Goldberg Variations , '' on her own .", "She played the work -- a brave choice -- at her New York debut in 2005 , winning positive reviews .", "In January she was taken on by her first major manager , Columbia Artists .", "In October the Delos label will release her recording of the Beethoven cello sonatas with Zuill Bailey , and her career generally gathers steam in the 2006-7 season .", "Highlights include a performance of Liszt 's Piano Concerto No . 1 and Totentanz at the Bard Music Festival -LRB- Oct . 27 -RRB- in Annandale-on-Hudson , N.Y. , and a recital at the Metropolitan Museum of Art -LRB- Nov . 19 -RRB- .", "Just as important may be a performance for an audience of one : an audition next month for Christoph Eschenbach , the music director of the Philadelphia Orchestra .", "THE NEW SEASON -- CLASSICAL MUSIC Correction : September 17 , 2006 , Sunday An article and picture caption last Sunday about five classical music artists poised for breakthroughs misspelled the given name of one .", "She is Alisa Weilerstein , not Alicia .", "The article also misstated the date she graduated from Columbia .", "It was 2004 , not 2002 ."], "summary": ["Article profiles five musicians who are poised for a breakthrough and briefly describes their careers : conductor Gustavo Dudamel , bass-baritone Eric Owens , composer Michael Gandolfi , cellist Alicia Weilerstein and pianist Simone Dinnerstein .", "Photos ."], "publication": "nyt50", "label": [33, 24, 3, 43], "tag": ["Arts"]} -{"id": "1788727", "text": ["HERE 'S the bottom line about opera : Whatever the story , the music or the length , a performance rides on the quality of the singers in the principal roles .", "Verdi 's `` Don Carlo '' is a large and ambitious piece involving love triangles , political intrigue , the Spanish Inquisition and even a mysterious ghost .", "But the main difficulty in staging it is that it has six lead parts , each requiring a major singer .", "So it is rare to see a truly satisfying `` Don Carlo , '' yet this season the Metropolitan Opera is making a strong bid to provide one on Nov . 30 .", "Patricia Racette , Johan Botha , Olga Borodina , Dmitri Hvorostovsky , Ren\u00e9 Pape and Samuel Ramey onstage , and James Levine in the pit : the lineup is worthy of an opera gala in both star power and sheer musical ability .", "The one question mark is Ms. Racette : not whether this intelligent and moving artist will provide a committed performance , but what her lyric instrument will make of a role calling for a strong Verdi soprano .", "Still , when a singer as fine as Ms. Racette is your question mark , you are in good shape .", "`` Don Carlo , '' the story of the impetuous , weak crown prince of Spain -LRB- the underrated Mr. Botha -RRB- who defies his authoritative father , Philip II -LRB- the breathtaking Mr. Pape -RRB- , was written on the generous scale of French grand opera , and it underwent numerous reworkings after its 1867 premiere -LRB- including translation into Italian -RRB- as Verdi tried to get it right .", "Some of the difficulties were there from the beginning : to tailor the complex role of Princess Eboli to the voice of its first performer , Verdi ended up creating two unequal arias , one frilly , one dramatic .", "Ms. Borodina is one of the few singers alive who , with her languid , sensual voice , can hope to do justice to both .", "Some scenes simply proved intractable , like the discussion in Act II in which the Marquis of Posa speaks truth to the wrongheaded power of the autocratic Philip , a scene Verdi struggled with for years .", "The Met has found a way to curb its long-windedness .", "When Mr. Hvorostovsky , the heartthrob baritone with the never-ending breath support , and Mr. Pape are onstage together , any opera lover would be happy to watch , and listen , all night .", "THE NEW SEASON -- CLASSICAL MUSIC ."], "summary": ["Anne Midgette article on difficulties of producing Verdi 's opera , Don Carlo .", "Says Metropolitan Opera is making strong bid to solve them with good singers for all six leads .", "Photo ."], "publication": "nyt50", "label": [3], "tag": ["Arts"]} -{"id": "1788729", "text": ["OF the few weeds that crop up in the well-mowed lawn that is the New York Philharmonic 's coming season , one of the more welcome is native to the Midwest .", "David Robertson , left , taking a week off from his music director 's job at the St . Louis Symphony , will conduct music by Kaija Saariaho , Debussy and Sibelius -LRB- Dec . 14 to 16 -RRB- .", "He 's not a completely unfamiliar species : the Philharmonic is a co-commissioner of Ms. Saariaho 's `` Adriana Songs , '' a cycle extracted from her recent opera `` Adriana Mater '' with new connective tissue to join the various sequences .", "On the other hand , it takes visitors like Mr. Robertson to do the kind of heavy thinking that brings these three composers together in such a provocative way .", "Ms. Saariaho represents an interesting merger of Finnish music 's gray , stony aesthetic with a deep involvement in the `` spectral '' movement and its slow-moving clouds of carefully calculated overtones .", "`` Adriana Mater '' -- a story of rape and vengeance but , most of all , motherhood -- offers moments of sonic violence uncharacteristic of her earlier work , but the songs will also turn to dream sequences and Ms. Saariaho 's dramatically conciliatory ending .", "The Irish mezzo-soprano Patricia Bardon will be the soloist .", "Debussy 's `` Martyr de St . - S\u00e9bastien '' conceals his later , leaner and more ascetic style inside a lumbering multimedia vehicle crushed by its own hyperactivity .", "When Kurt Masur did a cut-down version with the Philharmonic a few years ago , spectacle was eliminated , but the arm-waving excesses of d' Annunzio 's texts were inescapable .", "Mr. Robertson performs further liposuction by presenting what he calls `` Symphonic Fragments . ''", "If Debussy 's elusiveness has made its mark on Ms. Saariaho 's music , Sibelius 's `` Night Ride and Sunrise '' helps show the door she first came through .", "Brief , curious , obsessive , complex , with strings at a gallop , it is wintry in tone , a little hard-hearted and not easy to perform .", "`` La Mer , '' Debussy 's great essay on orchestral beauty , should warm us up at the end .", "After hearing this work , subtitled `` Day in the Life of the Sea , '' Erik Satie said that he liked the part at about half-past 10 .", "We should be on our way home by then .", "THE NEW SEASON -- CLASSICAL MUSIC ."], "summary": ["Bernard Holland article on David Robertson , who will conduct the New York Philharmonic in works by Saariaho , Debussy and Sibelius .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Arts"]} -{"id": "1788741", "text": ["My state does not mandate paid maternity leave , and federal law does n't cover a business as small as where I work .", "Fortunately , my generous employer offers eight weeks of paid leave and an additional four unpaid .", "I am not sure I will return to work after that .", "May I still accept this offer .", "If I accept but do not return , must I refund the money .", "Name withheld , New Mexico Maternity benefits are not an advance payment on future work .", "They are better seen as compensation already earned , akin to health-care benefits or accumulated vacation days .", "There is nothing dishonorable in your not returning or unseemly about your current uncertainty .", "Having a child , particularly a first child , is a life-changing experience .", "You can not be sure how it will affect your feelings about the job .", "But your employer is operating out of personal generosity -- and a desire to keep employees happy and on the job -- rather than legal compulsion .", "-LRB- Where maternity leave is guaranteed by law , the issue is simple : comply .", "There is no requirement that you return to work or refund money if you do not . -RRB-", "So you should discuss the matter with your employer , to ensure that both of you clearly understand the situation and that both of you gain the peace of mind of knowing you have behaved openly , honestly and fairly .", "It may be that , learning of your indecision , your employer will amend the offer , even asking you to repay part of the money if you do not come back .", "That would be an unwise business decision -LRB- or perhaps a contract issue -RRB- , however : the policy is a model of decency and justice .", "But you should not behave deceitfully , particularly toward an employer who behaves so well toward you .", "UPDATE : A week after having her baby , the letter writer told her boss she was on the fence about returning to work .", "The boss changed the offer to two weeks of paid leave instead of eight .", "Once a week I pick up donated bakery items and deliver them to the food bank to be distributed to needy families .", "One morning I helped myself to a box of doughnuts .", "The following week I took some cookies for my grandson .", "Then it was a pecan pie , etc.", "Is this wrong .", "There seems to be plenty of food for the needy .", "Marjorie S . Desimone , Lansdowne , VA .", "I see where this is going : first you swipe a few doughnuts , then it 's a whole pie and before you know it you 're shooting it out with the cops at a Sara Lee factory .", "It 's an old , old story .", "Here 's one way to think about it .", "Suppose you were collecting cash for this same charity : would you help yourself to a few bucks if there were still `` plenty '' for the needy .", "If it 's wrong to take a wad of $ 10 bills , why is it right to take a wad of doughnuts .", "-LRB- Do they come by the wad .", "-RRB- I admire your work for the food bank , but you should stop skimming pastry .", "Here 's another way to think about it : If you did n't know the answer , you would n't have asked the question .", "It is often said that if you feel uneasy about your conduct , you 're probably doing wrong .", "But is that so .", "The proddings of the conscience are unreliable .", "Some people have a hypersensitive conscience and feel guilty about nearly everything .", "-LRB- Hence all those jokes that , in different versions , Jews and Catholics tell about themselves . -RRB-", "Other people do appalling things and sleep through the night untroubled .", "There 's no consistent calibration of the conscience .", "Feelings are not a reliable substitute for thought .", "That 's why therapists flourish .", "And columnists proffering ethical advice .", "THE WAY WE LIVE NOW : 9-10-06 : THE ETHICIST ."], "summary": ["Randy Cohen Ethicist column on comments on whether you may take paid maternity leave if employer is not required to provide it and you are unsure about whether you will return to work after having child .", "Also comments on whether a food bank volunteer may take the occasional box of doughnuts or cookies for herself ."], "publication": "nyt50", "label": [2, 30, 0, 32, 41], "tag": ["Health", "Magazine"]} -{"id": "1788742", "text": ["In New York City , Bohemia is determined by real estate : artists gather in raffish neighborhoods where studio space is cheap .", "The new outposts of culture and consumption they establish make the quarter desirable , thus raising the rents to prohibitive levels .", "The artists then decamp for the next shabby enclave .", "Starting in the 1960 's , what New Yorkers meant by `` downtown '' shifted from Greenwich Village to SoHo to TriBeCa to the Lower East Side .", "But Manhattan is a narrow island , and the portion of it dense enough to sustain the feeling of self-enclosure that Bohemia requires is quite small .", "And so , starting in the 1980 's , as rents skyrocketed , downtown began to migrate across the East River to Brooklyn .", "By now , those portions of Brooklyn first colonized by fleeing artists have almost completed the cycle of embourgeoisement .", "Williamsburg , the heart of Brooklyn 's gallery scene , has been thoroughly tamed by brasseries and boutiques .", "The kind of artists who are n't yet showing in those galleries are now moving to deepest , darkest Queens .", "But the middle-class householder geography of Queens offers too barren a soil for the rooting of a new Bohemia .", "Fortunately , there is lots more Brooklyn available .", "There is , of course , something wishful , or perhaps wistful , about this perpetual hunt for the urban El Dorado .", "A place that can shift around so easily sounds less like a neighborhood than a mentality , or a species of nostalgia .", "In most of America , after all , `` downtown '' simply means `` the city '' -- the place where things are close enough to one another that you can walk .", "But in New York , where every square inch feels urban , downtown is a refuge from -- a repudiation of -- the conventionality of Midtown , and mid-everything .", "Downtown is a concept , and perhaps an archaic one .", "The idea of Bohemia arose with the bourgeois city , against which it defined itself .", "In the Paris of `` La Boh\u00e8me '' -- the Latin Quarter , circa 1830 -- the artist willingly courts starvation and disease as the price of freedom .", "The poet Rodolfo may be giddy as he shovels his manuscript into the fire to keep warm , but it 's still the only source of fuel he has .", "Life was scarcely less desperate -- or less delightful -- in the downtown Manhattan of 1910 , when the poet and propagandist John Reed , according to one biographer , `` ate in obscure foreign restaurants , talked with the girls who walked the street in ' Satan 's Circus `` ` and caroused with Spanish longshoremen .", "Reed 's latter-day descendants are threatened not by penury but by gentrification .", "How can Bohemia contend with the twin baby stroller .", "The other day , walking around Fort Greene , one of Brooklyn 's current claimants to downtown cultural status , I stopped at an office building called 80 Arts .", "In the Museum of Contemporary African Diaspora Arts , or MoCADA , which occupies the ground floor , an exhibition of major black artists had just come down .", "I picked up some fliers from the counter .", "One , issued by a company called Downtown Babies , advertised `` Creative Play and Music Classes '' and `` Themed Birthday Parties '' to be held at MoCADA .", "Downtown Babies -- the end of Bohemia as we know it .", "80 Arts had been gutted and renovated as part of a `` cultural district '' established by a `` local development corporation '' organized around the Brooklyn Academy of Music , the cultural mega-institution of Fort Greene .", "Here was a planned Bohemia -- surely a contradiction in terms .", "Indeed , many locals , and local organizations , had protested the development -LRB- as they are now even more loudly protesting the Atlantic Yards , a nearby mega-project featuring skyscrapers and a basketball stadium to be designed by Frank Gehry -RRB- .", "But MoCADA owes its presence in Fort Greene to the cultural district .", "Should the project be fully implemented , a new theater and public library will be built as well .", "In short , downtown , or the idea of downtown , has become thoroughly implicated in the cultural and economic forces that it once resisted with every ounce of its scruffy integrity .", "The misfits and longhairs and revolutionaries deemed unassimilable by mainstream culture in John Reed 's day are now considered `` edgy '' .", "And edge , in turn , attracts sneaker stores and bistros and cultural entrepreneurs and even young couples with strollers .", "The derri\u00e8re-garde keeps catching up with the avant-garde .", "The avant-garde falls prey to its own lurid appeal .", "If Rodolfo 's artist pal Marcello were around today , his gallery opening would be catered by Absolut .", "And so the Bohemias of yesteryear have gone the way of Reed 's Spanish longshoremen .", "But is that so bad .", "Take a walk in Fort Greene , an ethnically and economically mixed neighborhood with tree-lined blocks of fine brick homes .", "A block away from 80 Arts , beyond the town house that the painter David Salle has lavishly rehabilitated , lies the lime-green Habana Outpost , an eco-friendly cafe where mothers push downtown babies on swings amid racks of folkloric skirts , priced to sell .", "And then , moving up Fulton Street , once a commercial swamp , there 's the wine store and the soul-food restaurant and the beloved Cake Man Raven .", "A few blocks away stands the Brooklyn Academy of Music , which has been irreproachably avant-garde since long before there was any money in it .", "Fort Greene feels less like Bohemia than what the scholar Joel Kotkin calls an `` elite urban enclave '' -- a place suited to the sophisticated tastes of the `` knowledge workers '' who now propel New York 's economy .", "But the wheel of development that brought in those young cosmopolites , and priced out a number of the area 's longtime , predominantly black residents , has not stopped turning : the Atlantic Yards project threatens to disrupt Fort Greene 's delicate ecology once again .", "We want to preserve our precious and beloved utopias like paperweight worlds .", "But the city -- at least this city -- will not permit it .", "THE WAY WE LIVE NOW : 9-10-06 James Traub is a contributing writer for the magazine ."], "summary": ["James Traub column on peculiar New York cycle by which one bohemian neighborhood after another is transformed from cheap place for artists to live , work and gather into elite urban enclave that they and other longtime residents can no longer afford .", "Says downtown , or idea of downtown , has become thoroughly implicated in cultural and economic forces it once resisted with every ounce of its scruffy integrity .", "Photos .", "Graphs ."], "publication": "nyt50", "label": [32, 0], "tag": ["Magazine"]} -{"id": "1788743", "text": ["Q : In the 20-odd years since you opened your first restaurant , Union Square Cafe , in downtown New York , the area has gone from being a place to get a 25-cent cup of diner coffee into a foodie paradise .", "Would you say New York is the restaurant capital of the country .", "Absolutely .", "For its variety , quality and hospitality , no other city comes close .", "What are the up-and-coming food cities .", "There are so many ! Both Portlands -- Maine and Oregon -- are obsessed with good food .", "So are Seattle , Boston and Birmingham , Ala .", "I 'd throw in Las Vegas , but the profusion of restaurants there is more about an obsession with money and restaurant brands than it is with the soul of good food .", "Right .", "They 're too addicted to celebrity chefs , not unlike the Food Network , with Emeril and Mario Batali and Rachael Ray and the rest .", "Do you watch their various cooking shows .", "No , because I generally do n't learn anything that I did n't already know about food .", "And I 'm not even that smart .", "Is it fair to say that your contribution to the New York scene is that you helped pioneer the comfort version of high cuisine .", "Either that or the high-cuisine version of comfort food .", "As a Midwesterner , I am always looking for the middle ground .", "And I do n't mean that in a kind of wishy-washy , milquetoast way .", "You like to present yourself as a well-scrubbed , earnest guy from St . Louis who came East and made New York dining less stuffy , less French , less attitudinal .", "That 's me , although I am not sure about scrubbed .", "How much do you worry about your competition , which in the restaurant business is known to be fierce .", "When I first started , I worried hugely .", "When I opened Union Square Cafe , that neighborhood did n't have the density that it now has , and every time someone would open , I literally felt like another dog had peed on my tree .", "These days you are part owner of six restaurants downtown as well as of the Modern , your deluxe eatery at the Museum of Modern Art that is relatively uptown in geography and spirit and price .", "The chef 's tasting menu is priced at $ 125 per person .", "Down-to-earth is not limited to certain price points or architectural styles .", "Or so you contend in your forthcoming book , `` Setting the Table : The Transforming Power of Hospitality in Business , '' which , surprisingly , does n't have a single recipe in it .", "What happened .", "One big debate my publisher and I had editorially was recipes or not , and the answer was no .", "Others have done narratives with recipes .", "Do something fresh , they said .", "So instead you decided to write a book on the hot new subject of hospitality .", "Yes .", "Show me three world-class art museums with equally good art and one of them will always have friendlier guards than the other two .", "That 's the one I 'm the most likely to return to .", "Have you ever been to Taco Bell .", "I went last year for the first time ever .", "I thought it was horrible .", "Whatever I am going to get in terms of a lower-priced and quicker transaction would never , ever convince me not to spend twice as much time and twice as much money getting something made by someone with a more individual point of view .", "But the price differential between Taco Bell and the Modern is not 2 to 1 .", "It 's 50 to 1 .", "Do you think that New York restaurants are overpriced .", "The Modern is not relevant to my comment .", "The value of dining at the Modern should be considered relative to dining at other luxury establishments of its ilk .", "Who cooks in your house .", "My wife , Audrey .", "To her credit , it is always fresh and always has at least four colors on the plate , and the kids are never given a choice .", "If they do n't want to experiment , they do n't eat .", "What do you see as the next food trend in New York .", "Two years ago , it was heirloom tomatoes .", "Last year it was Meyer lemons .", "It was watermelon this summer , watermelon as a replacement for tomato .", "The only ingredient that does not go out of style is hospitality .", "What about lettuce .", "Lettuce never goes out of style .", "You 've got a point .", "I 'll have to crunch on that .", "Deborah Solomon THE WAY WE LIVE NOW : 9-10-06 : QUESTIONS FOR DANNY MEYER ."], "summary": ["Deborah Solomon interview with Danny Meyer , downtown restaurateur who has ventured uptown with Modern , restaurant at Museum of Modern Art .", "Photo ."], "publication": "nyt50", "label": [22], "tag": ["Magazine"]} -{"id": "1788744", "text": ["The International Astronomical Union all but threatened to go out on strike last month unless the rest of the solar system ejected an errant , puny member from the exclusive planets ' club .", "Thus , on the vote of a few residents of the hegemonic planet Earth , Pluto -- a celestial body whose only crime was to dare to wander in orbit to the beat of a different drummer -- was stripped of its 76-year-old classification .", "Pluto is now officially downgraded to a new category called dwarf planet , and all textbooks in all languages are ordered to refer to it with that adjectival derogation .", "World media , except a few unscientific sentimentalists -LRB- and the fictional Lois Lane 's Daily Planet -RRB- are meekly complying .", "The pejorative designation decreed by the stargazers ' union presuming to represent the solar `` Club of Eight '' is bottomed on dwarf star , defined as `` relatively small , with low mass and below-average luminosity . ''", "-LRB- Our sun falls into that demeaning category , but even so , sunblock is recommended . -RRB-", "The interest of language mavens in this astronomical rejiggering is the connotation of the words dwarf and planet .", "Dwarf , as both noun and adjective , means `` shorter than the average for the species , '' sometimes `` malformed or disproportionate . ''", "Because of cruel folklore portraying those affected by dwarfism as ugly Rumpelstiltskins , many with that genetic abnormality prefer to be called `` little people '' or `` of short stature . ''", "Midget , though well proportioned , is used to describe objects like tiny cars and submarines , and many little people take offense when the word is applied to them .", "The standard plural of dwarf is `` dwarfs , '' like Snow White 's Seven , though the novelist J.R.R. Tolkien has popularized dwarves .", "Planet and planetary are rooted in the Greek planasthai , `` to wander '' -LRB- Pluto wandered too far -RRB- .", "These words are gaining a political coloration .", "A recent visit to Australia by Mikhail Gorbachev drew a headline `` Former Soviet Supremo in Brisbane to Promote Planetary Perestroika . ''", "A Boston Globe article noted Colin Powell 's visit to the U.N. three years ago `` to convince the world of the planetary threat of Iraqi dictator Saddam Hussein . ''", "But environmentalists have been gravitating the use of planet , substituting it for world , as others cool toward global .", "`` Worried about a potential planetary crisis , '' William Broad wrote in The Times this summer , `` these leaders are calling on governments and scientific groups to study exotic ways to reduce global warming .", ". ``", "Roger Lewis of The Washington Post reported on `` green architecture '' at the National Building Museum , referring to `` atmospheric carbon dioxide and its planetary consequences . ''", "And Al Gore 's book `` An Inconvenient Truth '' was subtitled `` The Planetary Emergency of Global Warming and What We Can Do About It . ''", "To my ear , many liberals are taking up the vogue use of planet and planetary while conservatives cling to world , worldwide and global -LRB- strategy , not warming -RRB- .", "Marshall McLuhan would probably now be writing about a planetary village .", "`` To save the planet '' connotes practical environmentalism , while `` to save the world '' connotes dreamy idealism .", "`` The growing use of planetary , '' opines Tom Pitoniak , an editor at Merriam-Webster , `` might be a symptom of a changing worldview , of people getting more conscious of our situation in a multigalactic universe .", "But there are still contexts where , if you use planetary , you 'll sound as if you 're in the realm of spaceships .", "Newt Gingrich said , a few weeks ago , that we were in World War III .", "If he had said that this was ` Planetary War III , ' you would have thought that aliens were landing . ''", "To whom , then , would the melodramatic suicide note -- `` Goodbye , cruel world ! '' -- be addressed .", "Though a planet is a physical world , the metaphoric world -- of ideas , of work , of entertainment -- is larger than the planet Earth .", "If a dwarf planet were a person , Pluto would be in a world of hurt .", "Evidence Some of our top intelligence officials are irritated at the way their analysts have been playing down reports from agents in the field of contacts between Hezbollah in Lebanon and Iran 's Revolutionary Guard .", "Gun-shy after criticism about past analyses of a series of contacts between Saddam 's Iraq and Al Qaeda , they are said to be `` unwilling to make judgment calls .", ". We 're not in a court of law , `` a source identified as '' a senior United States official `` told Mark Mazzetti of The Times .", "`` When they say there is ' no evidence , ' you have to ask them what they mean -- what is the meaning of the term ' evidence ' .", "`` -LRB- Guide to sourcemanship : `` a senior United States official '' is a political appointee over 30 .", "`` A senior agency employee '' is a midlevel careerocrat looking out a Langley window and dreaming about a book contract .", "No confidential informant is ever identified as `` junior , '' which would impugn the status of the source .", "-RRB- The job of a rhetorician is to answer rhetorical questions .", "I would sharpen the question , `` What is the meaning of the word evidence .", "`` by adding '' and how is it different from proof .", "`` Here 's an answer : First , forget the clich\u00e9 modifier credible .", "When it comes to evidence , what is believable to one analyst is incredible to another .", "Evidence may be hard or soft , conflicting or incontrovertible , it may be unpersuasive or convincing , exculpatory or damning , but with whatever qualifier it is presented , the noun evidence is neutral : it means `` a means of determining whether an assertion is truthful or an allegation is a fact . ''", "But here 's the rub that rubs so many intelligence analysts the wrong way : Evidence -- from tips , taps , tapes , testimony , confessions , weapons , documents , satellite photos and the like -- is not in itself proof .", "Only the conclusion that experienced minds draw from a weighing of all the evidence can approach proof .", "With that requirement for human judgment understood , intelligence analysts can take their best shot .", "THE WAY WE LIVE NOW : 9-10-06 : ON LANGUAGE Send comments and suggestions to : safireonlanguage@nytimes.com ."], "summary": ["William Safire column on term ` dwarf planet , ' new designation for Pluto , and trend among environmentalists to use terms ' planet ' and ` planetary ' instead of ` world ' .", "Also comments on distinction between ` evidence ' and ` proof ' ."], "publication": "nyt50", "label": [2, 15, 33], "tag": ["Magazine"]} -{"id": "1788760", "text": ["THE 80 's are back in the first part of the season , when three notable works from that decade will be on view .", "Each represented a breakthrough for its choreographer .", "Each has been infrequently seen since its debut .", "Twyla Tharp 's 1981 `` Golden Section , '' right , to be performed by Alvin Ailey American Dance Theater at City Center in December , is the final part of a larger work , `` The Catherine Wheel , '' to music by David Byrne , the lead singer of that quintessentially 80 's group Talking Heads .", "Ms. Tharp 's kinetic , whiplash movement , Mr. Byrne 's jerky , compulsive rhythms and Santo Loquasto 's reworking of his original sporty gold costumes turn the dancers into superheroes , raised to a higher plane through the sheer joy of movement .", "While Ms. Tharp was first garnering critical plaudits for `` The Golden Section , '' a young Belgian choreographer , Anne Teresa de Keersmaeker , was completing a piece she had started during a year at the Tisch School of the Arts at New York University .", "Her `` Fase '' -LRB- 1982 -RRB- , set to four pieces by Steve Reich , started Ms. de Keersmaeker 's career .", "A rigorously austere duet that will be performed during the Brooklyn Academy of Music 's `` Reich @ 70 '' celebration in October , `` Fase '' is minimal in its formal composition and rich in its effects .", "Simple repetitive turns and steps show two women in swirling black dresses slipping in and out of phase with each other in a tour de force of timing and memory .", "Ms. de Keersmaeker makes us hear the shifts of the musical rhythms and see the bones of her dance with a thrilling intensity .", "The French choreographer Angelin Preljocaj was hardly unknown when he created his version of `` Les Noces '' in 1989 .", "But the success of the work , to be performed by his company in November at the Joyce Theater , secured an international reputation for Mr. Preljocaj , who drew on his Albanian roots in his hard-driving vision of Stravinsky 's work .", "Five men and five women fling five dummies in wedding dresses into the air as they enact the apparently deeply instinctive rituals of courtship , marriage , sex and death with a physical energy and a violence that are at once alarming and exciting .", "Mr. Preljocaj 's chicly black-clad dancers do n't look like the peasants of Bronislava Nijinska 's 1923 original , but his piece indicates that in some ways nothing ever changes very much .", "THE NEW SEASON : DANCE ."], "summary": ["Roslyn Sulcas article describes three notable dance works from 80 's , each of which represented breakthrough for its choreographer , that will be revived in New York this season .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["Arts"]} -{"id": "1788761", "text": ["AFTER the August doldrums , autumn offers more dance performances than one person could possibly attend , with various come-ons promising the sublime at each .", "Yet within months -- or even weeks -- most fade from memory , as if they had never happened .", "No , dance does n't last , but once in a while the memories do , and they keep audiences coming back .", "Whether adding to the anticipation of premieres by favorite choreographers or sweetening the return of cherished works , these afterimages , as the critic Arlene Croce called them , become part of each new performance .", "Twyla Tharp 's `` In the Upper Room '' -LRB- above , with David Hallberg and Paloma Herrera -RRB- returns for American Ballet Theater 's City Center season next month after last year 's smash run .", "Driven by a propulsive Philip Glass score , it would seem too relentless to produce specific recollections : a furious blur of bodies fades in and out of billowing smoke that , for once , does n't read as a theatrical clich\u00e9 .", "But somehow , once this 39-minute blur slows and 13 dancers stand dripping and heaving before rapturous applause , distinct personalities have emerged -- for me , most memorably in Ethan Stiefel 's fist-pumping exultation .", "It 's too soon to know if Mr. Stiefel , sidelined last winter with a knee injury , will dance in this grueling ballet .", "But we can hope .", "RoseAnne Spradlin cultivates an earthier physicality , one indelibly achieved with the unprettified , desperate bodies in her 2002 `` under / world . ''", "`` Survive Cycle , '' in development for a November premiere at Dance Theater Workshop , will feature original video , music and a landscape of shredded clothing .", "Unlike `` under / world , '' it includes no nudity , but I imagine moments of naked vulnerability to haunt the mind 's eye as that work still does .", "As always in dance , you had to be there , and Barbara Milberg Fisher was .", "A member of Ballet Society and New York City Ballet from 1946 to 1958 , she has written `` In Balanchine 's Company : A Dancer 's Memoir , `` coming Oct . 3 from Wesleyan University Press .", "In the introduction , Ms. Croce writes : `` One feels that , for Barbara Milberg Fisher , nothing supersedes the memory of once having been part of a magic circle .", "That memory is the treasure she imparts to us now . ``", "THE NEW SEASON : DANCE ."], "summary": ["Claudia La Rocco article describes two ballets and book that keep memories of dance performances alive .", "Photo ."], "publication": "nyt50", "label": [2], "tag": ["Arts"]} -{"id": "1788762", "text": ["TALL , red-blooded words -- the kind that wrestle big ideas to the ground -- are storming the stages of New York .", "Throughout their bracingly ambitious careers Tom Stoppard , August Wilson and David Hare have always insisted that conversation be something more exalted and exhausting than a mere after-dinner diversion .", "Now Mr. Stoppard alone bids fair to make this the most dynamically verbal theater season since Shaw was a young thing of 70 .", "-LRB- Shaw , incidentally , is fittingly represented by `` Heartbreak House , '' his rueful but energetic meditation on a social class paralyzed by world-annihilating war , in a revival from the Roundabout Theater Company . -RRB-", "In `` The Coast of Utopia , '' the first installment of which , `` Voyage , '' begins performances on Oct . 17 at the Vivian Beaumont Theater at Lincoln Center , Mr. Stoppard has filled not one but three plays with the lives of the intellectual forebears of the Russian Revolution .", "Their discussions and arguments , which span three decades of the 19th century and consume roughly nine hours of stage time , concern mind-quaking subjects like the dialectic of history , the path of nations , the impact of literature and even the limitations of their favorite weapons , words themselves .", "When the trilogy was first produced at the National Theater of London , this talk teemed with such passion that I left -LRB- to my surprise -RRB- more energized than depleted .", "The New York version is directed by Jack O'Brien , who propitiously proved his mastery of epic scope and towering language in the first-rate Lincoln Center Theater production of Shakespeare 's `` Henry IV . ''", "With `` Utopia , '' whose three parts will open sequentially , Mr. O'Brien will be overseeing -LRB- gulp ! -RRB- more than 40 actors in 70 roles .", "The ensemble includes Billy Crudup , Richard Easton , Jennifer Ehle , Josh Hamilton , Ethan Hawke , Amy Irving , Brian F . O'Byrne and Martha Plimpton , none of whom is likely to lapse into the automatic rhythms of `` yadda yadda yadda . ''", "Mr. Stoppard 's contemporary , David Hare , is confining himself to only one play , of conventional length .", "That 's `` The Vertical Hour , '' which begins previews at the Music Box Theater on Nov . 9 .", "The British Mr. Hare , who dissected American realpolitik in his Washington docudrama `` Stuff Happens , '' continues to focus on these United States with his drama about an American academic -LRB- and former war reporter -RRB- who experiences culture shock while on vacation abroad .", "If this all sounds a tad dry , you should know that the academic is played by the luscious -LRB- and brilliant -RRB- Julianne Moore .", "The director is Sam Mendes , whose last collaboration with Mr. Hare , `` The Blue Room , '' was a concentration of commercial catnip that brought new life to the career of Nicole Kidman , whose brief appearance in the play buck-naked is still discussed by dirty old theatergoers .", "A little less conversation has never been on the agenda for the characters of August Wilson .", "Before he died last year , Mr. Wilson completed the most ambitious cycle of American plays ever written .", "A chronicle of the African-American experience in the 20th century , the 10 plays are resonant with rich talk , both earthy and celestial , that considers nothing less than the history and destiny of a people .", "In a season celebrating Mr. Wilson , the Signature is presenting three of these plays : `` Seven Guitars '' -LRB- which opened last month and runs through Sept . 23 -RRB- , `` Two Trains Running '' -LRB- Nov . 7 to Dec . 31 -RRB- and `` King Hedley II '' -LRB- next February -RRB- .", "Lend them your ears , for in Mr. Wilson 's work , talk turns into unforgettable song .", "THE NEW SEASON THEATER ."], "summary": ["Ben Brantley article on plays ` that wrestle big ideas to the ground ' that are coming to Broadway this season .", "Discusses The Coast Of Utopia trilogy by Tom Stoppard , The Vertical Hour by David Hare and three plays by August Wilson .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["Arts", "Theater"]} -{"id": "1788763", "text": ["THEATER has always been in the business of recycling and renovation .", "Even ye olde Greeks were retelling oft-told tales .", "But these days , as revivals seem to outnumber new plays , at least on Broadway , it is easy to point to their preponderance as evidence of the business 's superannuated condition .", "And yet the ability to revisit classic -- or even just old -- works is one of the major glories of the theater .", "Admit too that even stuffy old Broadway has a better track record when it comes to `` remakes '' than Hollywood , which can not manage even to improve on cheesy sitcoms when it goes foraging in the past .", "Three notable productions this fall exemplify different approaches to the craft of theatrical restoration .", "The Roundabout Theater Company tends to play it sensibly , safely and respectably .", "The formula is simple : Take a classic , add a few stars and a director of stature , and mix .", "This fall the company 's slate includes a welcome revival of Shaw 's twilight comedy-drama `` Heartbreak House , '' with a cast of reliable names like Philip Bosco , Laila Robins and Swoosie Kurtz as well as Lily Rabe , the daughter of David Rabe and Jill Clayburgh , who is displaying an impressive devotion to the stage at a time when most young actors have their sights set elsewhere .", "The director is Robin Lefevre .", "And the Roundabout deserves credit for its allegiance to Shaw , particularly as the 150th anniversary of his birth year comes to a close .", "Another season , another Stephen Sondheim revival .", "No reason to complain when they are exhilarating reconsiderations like John Doyle 's `` Sweeney Todd '' from last year , which boiled the ghoulish musical down to its firm bones and rattled them beautifully .", "The satisfying partnership of Mr. Doyle and Mr. Sondheim returns this season with a new look at `` Company , '' from 1970 .", "Mr. Doyle , who won a Tony for directing `` Sweeney , '' is once again using an economy of means to unearth new riches in a much-revived musical .", "In literal terms that means that once again the cast is doubling as the orchestra .", "But the presumption that Mr. Doyle 's unique musical methodology is merely a gimmick is refuted by this sleek , compelling production , which I saw at its premiere last season at the Playhouse in the Park in Cincinnati .", "Despite its treasured score , the rap on `` Company , '' in which the roving eye of a bachelor turning 30 traces the fault lines in the marriages of his friends , is that it has been permanently stuck in its period , as a sort of musical comedy answer to `` That 70 's Show . ``", "But Mr. Doyle 's production , which stars a magnetic Ra\u00fal Esparza as the chronically unattached Bobby , cleanly ushers it into a timeless present tense without playing fast and loose with George Furth 's book .", "`` My Deah , '' a new comedy by John Epperson , better known as the voiceless drag diva Lypsinka , typifies the reliable theatrical appeal of taking a fiendishly irreverent approach to a piece of antique dramaturgy -- in this case , Euripides ' revered `` Medea . ''", "I attended a reading of Mr. Epperson 's spoof sometime around 210 B.C. , it seems to me , and have been baffled by its failure to be produced in New York .", "Treading confidently a path forged by the respected auteurs Charles Ludlam and Charles Busch -LRB- possibly in similar heels -RRB- , Mr. Epperson sends up the original by transplanting it to the Deep South .", "Lypsinka herself will not be appearing -- a voiceless Medea is inconceivable -- but Nancy Opel , a dab hand at low-down comedy , should be perfect casting for the title role , a woman who makes your garden-variety steel magnolia look like a shrinking violet .", "The production , directed by Mark Waldrop , comes courtesy of the Abingdon Theater Company .", "THE NEW SEASON THEATER ."], "summary": ["Charles Isherwood article says that ability to revisit classic works is one of major glories of theater .", "Productions of Shaw 's Heartbreak House , Sondheim 's Company and John Epperson 's My Deah , based on Medea , described .", "Photo ."], "publication": "nyt50", "label": [3, 11], "tag": ["Arts", "Theater"]} -{"id": "1788764", "text": ["WELCOME to the new season .", "Same as the old season .", "And the season before that .", "For most of its history , Broadway was like a train station .", "Shows came and went .", "Some big hits would stick around for a few years , then leave before too long .", "But things have changed , or to be precise , stopped changing .", "September is traditionally the time when the marquees light up with new titles , productions fresh from the road tryout or from a successful run at a smaller theater .", "This year , however , 10 of the shows on Broadway have played more than 1,000 performances , 7 of them more than 2,000 .", "-LRB- Several others , like `` Spamalot , '' `` Jersey Boys '' and `` The Color Purple , '' seem capable of joining the thousand-performance club before they 're done , whenever that will be . -RRB-", "Half of the 10 longest-running shows of all time on Broadway -- `` The Phantom of the Opera , '' `` Beauty and the Beast , '' `` Rent , '' `` Chicago '' and `` The Lion King '' -- are still running , and two more -- `` Les Mis\u00e9rables '' and `` A Chorus Line '' -- are about to be revived .", "`` Somehow or other , '' said John Breglio , an entertainment lawyer and a producer of the new `` Chorus Line , '' `` Broadway has found a popular form of entertainment that keeps these shows going and going . ''", "All of which is good news for those that are already up and running , but potentially terrifying for anyone hoping to mount a new one .", "There are only 39 theaters on Broadway .", "If 10 are booked indefinitely , that means far fewer available sites for new shows , and the competition has risen accordingly .", "Nevertheless musicals or plays that are ready for the stage and have money in the bank generally find a place .", "A number of productions that proved themselves off Broadway will be making the transition this season .", "`` Spring Awakening , '' the rock musical about adolescence and its discontents that first played at the Atlantic , will open at the Eugene O'Neill on Dec . 10 .", "`` Grey Gardens , '' the musical based on the bizarre lives of Big Edie and Little Edie Beale -LRB- aunt and cousin of Jacqueline Bouvier -RRB- , will move to the Walter Kerr for a Nov . 2 opening .", "And `` The Little Dog Laughed , '' the satire of Hollywood mores that had a successful run at the Second Stage , will open on Nov . 13 at the Cort .", "Meanwhile `` High Fidelity , '' which had been moving forward in the development process without knowing exactly where it might land , got a place to go in the Imperial when `` Dirty Rotten Scoundrels '' called it quits .", "And if `` The Wedding Singer '' ca n't make it through the less maritally inclined months of fall and winter , the Hirschfeld will be ready for its next tenant , most likely the Kander and Ebb musical `` Curtains . ''", "Rocco Landesman , the owner of the five Jujamycn theaters on Broadway , dismisses the idea that there 's any shortage of stages .", "`` If you have a show that anybody has a modicum of interest in , '' he said , `` you 're going to have a theater . ``", "What you may not have , though , is the theater you want .", "In addition to the 918-seat Walter Kerr , other small and mid-size theaters -- the 1,079-seat Gerald Schoenfeld and the 1,078-seat Bernard B . Jacobs -- which have often housed straight plays are being occupied by musicals : `` A Chorus Line '' at the Schoenfeld and `` Martin Short : Fame Becomes Me '' at the Jacobs .", "Other shows have to make do in some of Broadway 's least loved theaters , like the three that sit on cross streets -- gasp ! -- east of Broadway .", "TO some , the trend toward long-running sure things suggests that Broadway has dumbed down .", "But it can also be seen as the result of an effort , over more than 30 years , to make the showpiece of American theater a more customer-friendly business .", "`` Broadway as we know it today began in the 70 's , `` said Nancy Coyne , a founder of the Broadway marketing firm Serino Coyne , which has worked for 8 of the 10 longest-running shows .", "Back then business was off .", "The city was on the verge of bankruptcy , and Times Square was a danger zone .", "And the few Broadway and Off Broadway productions that had experimented with credit cards and phone-in reservations had limited success , said Paul Libin , the producing director at Jujamcyn , who at the time was the managing director for the Circle in the Square .", "Then in 1972 American Express was accepted , for the first time , at all Broadway box offices .", "That allowed people to book tickets by telephone , instead of having to visit the box office or put a check in the mail .", "By the early 1980 's the sales were conducted through centralized offices that could be open 24 hours a day .", "The advent of computerized ticketing made purchases easier still .", "What 's more , said Robert E . Wankel , the executive vice president of the Shubert Organization , `` computerizing tickets let us put tickets on sale for a year , which we were not able to do on a hard ticket .", "And now we can actually sell as long as we want to . ``", "In the early 1990 's customers got the opportunity -LRB- despite resistance from the Shuberts -RRB- to choose their seats when ordering tickets .", "By the late 1990 's people all over the world were able to buy tickets quickly over the Internet .", "And now , with e-ticketing , an actual ticket is no longer even necessary .", "Over the same period Broadway was learning a new word .", "In 1972 `` marketing meant going to the A & P , '' recalled Harvey Sabinson , a longtime press agent who later became the executive director of the League of American Theaters and Producers .", "And television commercials , he added , `` were talking heads repeating quotes . ''", "But in 1972 Mr. Sabinson and the team behind the musical `` Pippin '' started brainstorming and got the idea to include scenes from the actual show .", "A production number from `` Pippin '' was filmed on a soundstage in New Jersey that winter , and a new way of advertising Broadway was born .", "`` Pippin '' eventually played 1,944 performances , at the time the 10th-longest run ever .", "The success of the commercial introduced a whole new form of advertising .", "`` ` Grease ' was one of the very first ones , `` Ms. Coyne recalled .", "`` It was in its fifth year , and no one expected it to go much longer than that .", "The commercial gave it another three years . ``", "`` The Wiz , '' `` Evita '' and `` Cats '' followed suit .", "Still , the television commercial that may have been the most significant factor in expanding the Broadway audience was not even for a Broadway show .", "On Feb . 14 , 1978 , the New York State Department of Commerce introduced a new `` I Love New York '' commercial that focused on Broadway as a tourist attraction .", "The impact was immediate and enormous , said George Wachtel , the founder of Audience Research and Analysis .", "Before that , Broadway `` was a little provincial . ''", "But afterward , he continued , `` it was what New Yorkers did , it was what high rollers did when they came to New York .", "` I Love New York ' made Broadway Everyman 's theater . ``", "The TKTS booth , which opened in June 1973 , also brought in the Everyman , offering half-price tickets and helping to fill out the audience for many a struggling show .", "So between 1972 and 1978 Broadway had introduced itself to the credit card market , the computerized ticketing system , telephone reservations , modern television advertising and , by way of the `` I Love New York '' commercial , the rest of the country .", "The stage was almost set for the perpetual run , long before `` Beauty and the Beast '' was a gleam in Disney 's eye .", "What arose next .", "Prices .", "When `` They 're Playing Our Song `` opened in 1979 , Mr. Wachtel said , the top ticket cost $ 27 .", "Three years later the top ticket to `` Cats '' cost $ 45 , a jump of 66 percent .", "Prices continued to rise over the next decade , albeit a bit more slowly .", "But they did not end up scaring people away .", "`` Shows used to say , ' O.K. , we get to the winter , we die , ' '' said Emanuel Azenberg , a longtime producer and manager on Broadway who also cited the importance of new , more sophisticated accounting methods .", "`` But now you could sustain the bad winter as long as spring was coming again because of the prices . ''", "Meanwhile New York was once more becoming a tourist attraction .", "Between 1991 and 2005 , according to the city 's convention and visitors bureau , the number of visitors to New York nearly doubled , to 42.7 million per year .", "And since 1985 , the league reports , attendance on Broadway has grown by almost five million , with nearly twice the proportion of out-of-towners , and a great increase in children and teenagers .", "No one took advantage of the growing -- and changing -- audience quite like the British impresario Cameron Mackintosh , who , through a combination of spectacular productions and business savvy , produced the three longest running Broadway shows in history .", "But other producers have still managed to come up with a few tricks .", "The year 2001 saw the introduction of premium-price ticketing , selling the best seats at `` The Producers '' for $ 480 .", "The Internet , and e-mail blasts , provided an advertising medium much cheaper than television .", "More complex business models evolve all the time .", "`` We have so much more sophisticated discounting , coupled with premium pricing , and we can anticipate seasonal fluctuations , '' said Kevin McCollum , a producer of the 10-year-old `` Rent , '' which created a lottery system for cheaper tickets .", "`` There 's a lot more shifting of schedules and trying to capture the audiences . ``", "And if a show does make it to the tenure track , it can take a page from the playbook of Fran and Barry Weissler , who with their 1994 revival of `` Grease '' and the 1996 revival of `` Chicago , '' perfected the star-replacement technique , giving shows that are years old a calculated injection of buzz every few months .", "-LRB- Is there a single person watching Usher 's performance as Billy Flynn who bought tickets just to see how `` Chicago '' is holding up .", "-RRB- With this ever-growing inventory of gimmicks , the question at the beginning of each season has become : Which new entry is going to join the marathoners ' club .", "This season will it be `` High Fidelity , '' the musical based on Nick Hornby 's best-selling novel .", "Or `` Mary Poppins , '' presented jointly by two heavyweights of the long run , Mr. Mackintosh and Disney Theatricals .", "Or will it be a dark horse contender , like `` Spring Awakening '' .", "After all , this time last year few could have predicted that `` Jersey Boys '' would be likely to tie up the August Wilson Theater for years .", "That 's a heartening example for producers .", "Except for the ones who want to put their shows in the Wilson .", "THE NEW SEASON : THEATER ."], "summary": ["Article on factors that have resulted in many more Broadway productions than in past running for years .", "New plays find it difficult to locate appropriate theaters .", "Reasons for long runs include new marketing techniques , television commercials and increasing percentage of audience made up of tourists .", "Photos ."], "publication": "nyt50", "label": [53, 28, 13, 15, 0], "tag": ["Arts", "Theater"]} -{"id": "1788765", "text": ["VISUAL artists who create performance works are nothing new , but Claude Wampler , a tiny woman with austere artistic intent , is a wonder .", "Ms. Wampler , who has a history of manipulating the line between audience and performer , presents her newest production , `` PERFORMANCE -LRB- career ender -RRB- '' at the Kitchen Nov . 16 to 18 .", "As usual it is full of secrets .", "The culmination of several years of experiments , the production , as she recently explained , will `` merge a kind of visual-arts experience into a performance context , or vice versa . ''", "Much of Ms. Wampler 's performance work is about the perception of power and how swiftly it can change hands .", "For `` Bucket , '' presented at Performance Space 122 in 1999 , she hired attractive people to walk out in a huff during the show to test the real audience 's commitment .", "For `` Stable -LRB- Stupidity Project Part 10 -RRB- , '' seen at P.S. 122 in 2003 , Ms. Wampler seated the audience before a ring of Rottweilers wearing western outfits as a topless cowgirl gyrated next to the sound booth .", "As the minutes ticked by , the dogs -- huge , gentle and ridiculous -- stared at us .", "We stared back .", "-LRB- Who was more stupid .", "-RRB- As the audience reached the breaking point of fatigue and frustration , a screen was revealed onstage : Ms. Wampler had been secretly videotaping the crowd , turning the audience into performers .", "`` I 'm trying to remind the audience of their part in a performance , so that there 's a moment of instability , and then maybe some spontaneity can happen , `` she said in a recent telephone interview .", "`` But I 'm not taking advantage of the audience or using it as my material .", "It 's morally for them .", "When I 'm watching dance or theater pieces , I 'm praying that someone will do that for me . ``", "It seemed odd that Ms. Wampler -- a brave , independent and transgressive force in both the performance and the art worlds -- was excluded from last year 's Performa , a biennial of visual art performance in New York .", "But for her , the very word `` performance , '' purposely crossed out in her work 's title , is both used up and a little vague .", "`` This could be my last work , and I do n't even mean in the theater , but forever , `` she said .", "`` That 's the way I have to think about it .", "I 'm suspending my own disbelief and saying , ` If I had to make a final piece , what would it be .", "` That 's the career-ender . ``", "THE NEW SEASON : DANCE ."], "summary": ["Gia Kourlas article describes newest production of Claude Wampler , PERFORMANCE -LRB- career ender -RRB- , which will merge visual arts experience into performance arts context or vice versa .", "Wampler 's previous works and philosophy of dance discussed .", "Photo ."], "publication": "nyt50", "label": [3, 1], "tag": ["Arts"]} -{"id": "1788769", "text": ["FEW directorial debuts in recent years have raised such high expectations as `` In the Bedroom , '' Todd Field 's adaptation of the Andre Dubus short story `` Killings . ''", "That drama won five Oscar nominations , including best picture , actor and actress -LRB- for Tom Wilkinson and Sissy Spacek -RRB- .", "And Mr. Field received an award from the New York Film Critics Circle for best first film .", "Now , five years later , comes `` Little Children , '' his screen adaptation of Tom Perrotta 's satirically edged 2004 novel of contemporary suburban life and its discontents .", "In light of its forerunner the choice makes perfect sense .", "As he demonstrated with `` In the Bedroom , '' Mr. Field , an actor turned director , scans the depths of his characters ' souls and sees them whole .", "This psychological radar is a gift he shares with Mr. Perrotta , with whom he wrote the screenplay for `` Little Children . ''", "The double-edged title refers not only to the suburban kids who more or less run their parents ' lives , but also to the 30 - and 40-something adults whose needy inner children cry out for release .", "One of its wistful themes is that , like it or not , having children brings your own youth to a crashing halt .", "You 're no longer at the center of things .", "The story 's emotional lightning rod is Ronald James McGorvey , a damaged middle-aged man who has recently returned to live with his mother after serving time for exposing himself to children .", "He is a pariah in their suburban Boston community and a focus of its collective fears , embodied by Larry Hedges , a disgraced former cop with a violent streak who wants to hound him out of town .", "In one of the novel 's most disturbing scenes , Ronald 's presence at a public swimming pool on a brutally hot day precipitates mass panic among the assembled mothers watching over their children .", "In the film Ronald is played by the former child star Jackie Earle Haley .", "His powerfully creepy portrayal is a pointed departure from the standard screen image of the child molester or flasher as an ordinary guy with an unfortunate kink .", "Two mismatched couples fill out the rest of the film , which will be shown Sept . 30 at the New York Film Festival and is set to open on Oct . 6 .", "Kate Winslet and Gregg Edelman play Sarah and Richard Pierce , and Jennifer Connelly and Patrick Wilson are Kathy and Brad Adamson .", "Noah Emmerich plays the former cop .", "The abundance of subplots in `` Little Children '' -- more than are in `` In the Bedroom '' -- is one reason that the film incorporates a sporadic voice-over -LRB- by an authoritative male narrator -RRB- to push the story along .", "A major subplot in the novel , and its most satirical thread -- Richard 's deepening addiction to Internet pornography -- is alluded to , then dropped .", "The movie concentrates on the affair between Sarah and Brad -LRB- called Todd in the novel -RRB- , who is so handsome the women refer to him as `` the Prom King '' when he shows up at the playground with his son .", "Brad , who plays quarterback on the local football team and is entranced by teenage skateboarding culture , is a classic case of a man in the throes of Peter Pan syndrome .", "But in the movie 's kind and patient view , we all carry varying shades and degrees of arrested development into adult life .", "THE NEW SEASON : FILM ."], "summary": ["Stephen Holden article discusses Todd Fields 's new film Little Children , his screen adaptation of Tom Perrotta 's satirically edged 2004 novel of contemporary suburban life and its discontents .", "Photo ."], "publication": "nyt50", "label": [3], "tag": ["Movies", "Arts", "Books"]} -{"id": "1788770", "text": ["THE Myth of Crazy Mel began seeping out of Hollywood long before he was arrested for drunken driving six weeks ago and burst out with the ugly , anti-Semitic comments that have put him in extreme damage-control mode .", "A 2004 episode of `` South Park '' about `` The Passion of the Christ '' depicts him as a looney-tunes guy bouncing off the walls in his underwear and whooping .", "Mel Gibson is `` crazy , dude , '' one South Park kid tells another .", "`` Mel 's crazy , but I like him , `` a name-dropping billionaire says in Bruce Wagner 's latest Hollywood novel , `` Memorial '' -LRB- released this month but written pre-meltdown -RRB- .", "And Mr. Gibson 's new film , `` Apocalypto , '' was already one of the most talked-about of the season , largely because of the Crazy Mel factor .", "Even for him , the oddball quotient is high .", "An action movie set in the dying days of the Maya civilization , the 15th century , `` Apocalypto '' was made in the Yucatec dialect without a single recognizable actor , and shot in the jungles of Mexico , where heavy rains slowed production and postponed its planned release from this summer .", "Photos from the set showed that Mr. Gibson had grown a full beard and let its central white streak grow longer than the rest , as if defiantly choosing to look like an aging eccentric .", "As a director , he has been some kind of mad genius so far , anticipating what audiences want with startling clarity : making a sword-and-sandals epic when it was no longer fashionable , yet winning Oscars -LRB- including best director -RRB- for `` Braveheart '' -LRB- 1995 -RRB- .", "Turning what seemed a gigantic folly -- a gruesome , subtitled , self-financed passion play -- into a $ 600 million worldwide blockbuster with `` The Passion '' -LRB- 2004 -RRB- .", "But those photos from Mexico and the subject of `` Apocalypto '' -- the hero , called Jaguar Paw , is chosen as a human sacrifice and makes a fast-paced escape through the rain forest -- were enough to make anyone wonder whether Mr. Gibson had finally gone around the bend and turned into some cinematic Kurtz , lost in the dark jungle .", "The film is still being edited , so there 's no way to know whether `` Apocalypto '' might be crazy-brilliant or just crazed .", "But we know from his recent mug shot that Mr. Gibson has lost the beard .", "-LRB- As those things go , it 's a glamour shot , showing that some actors can play to the camera no matter how high their blood alcohol level . -RRB-", "And we know that his drunken Malibu tirade casts an inescapable shadow over the film 's opening , raising many questions , including : Will `` Apocalypto '' really arrive on Dec . 8 .", "As recently as last week Touchstone , the Disney division releasing it , insisted it would .", "That 's about all the studio will say about a movie that must have become an albatross , because the crucial question is : How can this film be marketed .", "Mr. Gibson 's name and ability to chat up `` Apocalypto '' was its only real selling point .", "Now he trails apologies and questions about bigotry wherever he goes , which will make it pretty hard to stay on message about old Jaguar Paw .", "Whatever happens with `` Apocalypto , '' it would be unfair if his personal debacle were to overshadow Mr. Gibson 's immense gifts and accomplishments as a director .", "Apart from commercial success , his films have been rich with action , emotion and visual interest .", "`` The Man Without a Face '' -LRB- 1993 -RRB- was n't the safest or easiest choice for a first-time director .", "He cast himself as a former teacher whose face is horribly disfigured on one side , and whose innocent relationship as mentor to a teenage boy is questioned .", "The film may not be as gripping as it should be , but the camera moves fluidly in this pretty-looking period piece , set in Maine in 1968 , and the delicate subject is not overplayed until Mr. Gibson gives himself one scenery-chewing monologue near the end .", "You can almost feel him finding has way as a director while yearning to burst the limits of the movie 's small scale .", "By the time he got to `` Braveheart , '' just two years later , even the logo for his production company , Icon , looked better .", "By far the best of his three pre - `` Apocalypto '' films , this epic sounds as silly as ever when described , and the warrior 's blue face paint that Mr. Gibson wears as William Wallace , the 13th-century Scottish freedom fighter , has become the laziest of Mel jokes .", "Yet `` Braveheart '' still works as a big , enormously satisfying popcorn movie .", "Mr. Gibson brings all his star power to the screen , convincingly taking Wallace from romance to brutal vengeance when his wife is killed .", "The scope and relentless pace of the battle sequences remain thrilling .", "And in hindsight we can spot two elements that have become his trademark : torture scenes on screen and controversy off .", "When Wallace is tortured and martyred before a crowd in a prolonged scene at the end , the visceral depiction of suffering leads straight to `` The Passion of the Christ . ''", "`` The Passion '' remains difficult to sit through because of its extremely graphic scenes of Jesus ' torture : we see his flesh ripped off as he is scourged .", "Early in the film one eye is swollen shut , and by the end his face glows red with blood .", "But this is exactly the film Mr. Gibson set out to make -- unsubtle , grisly and disturbing -- and it 's easy to respect him for his uncompromising vision .", "You ca n't miss how deeply felt and eccentric a project it was , with a spark of zealotry that goes beyond simple faith .", "Yet `` The Passion of the Christ '' also gives new meaning to preaching to the converted .", "The film never proselytizes .", "It simply speaks forcefully to an audience of believers .", "The charges of anti-Semitism leveled at `` The Passion , '' primarily because of a scene in which the Jewish crowd calls for Jesus ' death , have come back to haunt Mr. Gibson now .", "There were equally wrongheaded attacks calling `` Braveheart '' homophobic because of a scene in which the English king , Longshanks , pushes his gay son 's lover out a tower window .", "In both cases Mr. Gibson 's critics confused the characters with the director .", "Any homophobia in `` Braveheart '' comes from Longshanks , who also resented the political influence the lover was gaining .", "And both Caiphas , the Jewish high priest , and Pilate , the weak-willed Roman governor , bear responsibility for Jesus ' death in `` The Passion . ''", "The fault line is not between Jews and Romans but between believers and nonbelievers .", "Of course , during his highway arrest , it was Mel Gibson himself , not some character , who spouted anti-Semitic remarks .", "And in the way that news reports about Mr. Gibson 's ultraconservative Catholicism and comments by his father , Hutton Gibson , denying the extent of the Holocaust bled into the reception for `` The Passion , '' so his current off-screen problems are likely to deflect attention from `` Apocalypto . ''", "The film 's Web site , put up months ago , still heralds it as `` a heart-stopping mythic action-adventure , '' and the trailer -LRB- a notoriously unreliable guide , but all we have -RRB- suggests it is squarely aimed at fans of `` Braveheart . ''", "As Jaguar Paw races through the jungle pursued by torch-bearing warriors , the movie seems fraught with the kind of action that makes Yucatec or any other language superfluous .", "There will be subtitles , but Mr. Gibson , who wrote the screenplay with his former assistant , Farhad Safinia , has said there is n't much dialogue anyway .", "Some actors have extravagantly painted faces , while others are caked with white powder from a lime quarry .", "There is romance , or at least there has been sex : we see Jaguar Paw look tenderly at a pregnant woman .", "And a huge crowd scene at a Mayan temple is presided over by a man with clawlike nails straight from a horror film .", "More mysteriously , in May Mr. Gibson told Time magazine , `` The fearmongering we depict in this film reminds me a little of President Bush and his guys . ''", "That adds an intriguing , media-ready frisson , but now even attacks on the Bush administration ca n't displace the Mel Meltdown in any discussion of `` Apocalypto . ''", "Mr. Gibson 's drunken comments and his two public statements of apology have landed in a changed world of celebrity gossip and Internet speculation , which wo n't let this story fade .", "Internet chatter and celebrity magazines probably wo n't lead many people to decide whether to see `` Apocalypto '' or not , though .", "Loyal Gibson fans will view this as a sad story of alcoholism , forgive him and buy their tickets .", "Others will reject the apologies as mere spin and think his anti-Semitic outburst proves what `` The Passion '' led them to suspect .", "But then , any viewer incensed by `` The Passion '' was n't likely to go to `` Apocalypto '' in the first place .", "Disney 's marketing problem is more complicated : how to reach the vast middle ground of people who are simply looking for entertainment .", "`` Apocalypto '' is not as big a financial nightmare for Disney as it could have been .", "Mr. Gibson 's production company financed the film -LRB- the budget is reportedly under $ 50 million -RRB- , and Disney has only domestic distribution rights .", "But that arrangement is costly enough .", "Legally bound to release the film , the studio has no choice but to tough it out .", "And in the end , just as Mr. Gibson 's actions have made him his own worst enemy , he may have no choice but to become his own greatest asset .", "If the arrest report could travel with lightning speed , arriving on the Web site TMZ.com two days later , so a Mel Gibson apology tour could move as fast .", "-LRB- Alan Nierob , Mr. Gibson 's publicist , said plans for promoting the film have not been addressed yet , partly because the movie is n't ready to be seen . -RRB-", "In his second apology Mr. Gibson said there was `` no excuse '' for his remarks .", "And the issue of whether he is truly anti-Semitic is ultimately between him and his conscience .", "But that statement , asking for forgiveness and help from Jews in his recovery , was the shrewdest public relations gambit he could have made in a dire situation .", "It puts anyone who doubts his sincerity or refuses his apology in the camp of the unforgiving .", "Who wants to be there .", "And while the moviegoing audience might be tickled by celebrity gossip , it does n't really want to believe the worst of its stars .", "The Meltdown might even become a blip in his career .", "After all , atonement and forgiveness are as crucial to the Judeo-Christian tradition as money is to Hollywood 's .", "THE NEW SEASON : FILM ."], "summary": ["Caryn James article profiles director Mel Gibson .", "His new film Apocalypto , set in 15th century Mayan civilization and made in Yucatec dialect without single recognizable actor , is set to open in December .", "Interest in film is high not only because of its subject but because of controversies surrounding Gibson .", "Photos ."], "publication": "nyt50", "label": [6, 4, 5], "tag": ["Movies", "Arts"]} -{"id": "1788773", "text": ["IT was Monday at the Chateau de Versailles , the gates closed to tourists , and Sofia Coppola was camped out in a quiet corner of the grounds , resurrecting Marie Antoinette .", "A cold spring afternoon had been transformed into dawn with a spotlight that mimicked the rising sun .", "Wildflowers from an adjacent field had been replanted in the tall grass .", "Ms. Coppola arranged strands of a foot-high hairdo on the actress Kirsten Dunst , then stepped back and took a photo .", "Then the cameras started rolling , and the young queen sat on the edge of a reflecting pool , tipsily sipping the last of her Champagne with some hangers -on , her royal husband tucked away in bed .", "So this was what it must have been like for Marie Antoinette to have the place all to herself .", "Versailles administrators granted Ms. Coppola , the 35-year-old writer-director , unprecedented access to the chateau and its grounds , allowing her to film scenes for `` Marie Antoinette '' over 12 weeks in the spring of 2005 .", "Based on a best-selling book by Lady Antonia Fraser , this stylized , impressionistic portrait of the controversial French queen had its premiere this year at the Cannes Film Festival to mixed reviews .", "Even the two critics for The New York Times who saw the movie there came down on opposite sides of the fence .", "Since then , it has attracted more than a million moviegoers in France .", "It is set to open in the United States on Oct . 20 .", "`` I 'm so glad we were n't in Budapest or whatever , like , trying to fake it , `` Ms. Coppola said a few weeks after wrapping , upstairs at that Right Bank institution the Caf\u00e9 de Flore .", "Once favored by Jean-Paul Sartre and now the canteen of choice for American expatriates in the St . - Germain-des-Pr\u00e9s neighborhood , it is next door to the apartment she rented while making the movie .", "`` It 's so cool to be in the real places .", "There 's something that just gets you into the mood .", "They let us shoot in places people were n't allowed to normally , like Marie Antoinette 's private theater .", "They were like , ` This is your home . '", "`` The queenly welcome had to do with the fact that Ms. Coppola is something of a cult figure in France .", "The French admire her talent : she won a best-foreign-film C\u00e9sar , the French Academy Award , in 2005 for her last film , `` Lost in Translation , '' about a young American who spends most of a trip to Tokyo holed up in the Park Hyatt .", "But they also esteem her much-photographed , tastefully chic personal style .", "And her status as Hollywood royalty does n't hurt : her father , the director Francis Ford Coppola , is a demigod in France .", "So her decision to make Marie Antoinette the star of her latest film has resulted in a grand comeback for the much-maligned queen .", "Along with director and star , Marie Antoinette herself now ranks as a fashion icon .", "Magazines have devoted special issues to her , featuring her portrait on the cover .", "French luxury houses have issued Marie Antoinette merchandise .", "Several books have appeared , tied not to the 250th anniversary of her birth last year but to the opening of the film , filling many an hour on both high - and lowbrow talk shows .", "It is as if the French needed the hype of a Hollywood movie to get them excited about their own history .", "But Ms. Coppola said she was more interested in the emotional life of her young heroine .", "`` I wanted to make a personal story and not a big epic historical biopic , '' she said , adding that she wanted to tell the story from the point of view of a 14-year-old Austrian girl who is shipped off to France in 1770 to marry the future King Louis XVI , who is 16 .", "She used Lady Antonia 's dense , anecdotal book as her primary source .", "`` I would get bored when it would get sort of too detailed , '' she said of the book .", "`` I did n't want to get bogged down with history , but to focus on the personal relations between these people .", "Louis would n't sleep with her , so she wanted to go out and party -- like someone in a bad marriage going shopping .", "It just seemed like the same old story . ``", "This Marie Antoinette is a party girl with a gay hairdresser and a shoe fetish .", "She drowns her sorrows in bonbons and Champagne while , beyond the castle walls , the people starve .", "As for her famous response when told that the masses had no bread -- `` Let them eat cake '' -- both Lady Antonia and Ms. Coppola dismiss it as gossip .", "-LRB- In the film , Marie Antoinette herself laughs it off . -RRB-", "Speculation that she had a passionate affair with the Swedish count Axel Fersen is portrayed as fact .", "Ms. Coppola 's film takes other liberties : she eschews the often stately colors used in portraits of the French court for pastels inspired by the famous macaroons of the Parisian pastry house Ladur\u00e9e .", "She relied on the costume department to vet dress styles or advise on the appropriate size of a bow -- but only to a point .", "`` I want it to be believable , so that it does n't take you out of the story , `` Ms. Coppola said , '' but I 'd rather pick a heel that is more appealing to me that maybe was invented 50 years later .", "I 'm not a fetishist about historical accuracy .", "I 'm just , like , making it my thing . ``", "Anyway , as she points out , `` they did n't speak English in Versailles , either . ``", "The actors speak in their own mostly American voices .", "`` I was trying to make it sound normal , '' she said , `` although I 'm a little afraid of it ever sounding a little too much California Valley Girl .", "I 'm trying to get them to say ` all right ' instead of ` O.K. , ' to make it a little more formal than we would be , but not to feel like you were in a stiff period movie . ''", "For several days in April 2005 , the production moved into the H\u00f4tel de Soubise , a city palace that is now part of the National Archives in Paris .", "Hairdressers carrying 18th-century powdered wigs on plastic heads walked the narrow streets of the Marais district , on their way to and from the set .", "The actor Jason Schwartzman , who is Ms. Coppola 's cousin , was coming out of his trailer dressed as Louis XVI when a teenager in oversize shorts dropped to the sidewalk and began to bow in mock homage to the king , crying , `` Le roi ! Le roi ! '' Girls in long gowns , powdered wigs and sunglasses smoked cigarettes and talked on their cellphones between takes .", "In an ornate 18th-century salon , Ms. Coppola huddled by a monitor in wordless conference with her brother , Roman , who is also a filmmaker , and who was on the set shooting secondary scenes .", "`` I do n't have to say anything .", "He can go shoot something and he 'll get exactly what I want , `` Ms. Coppola said .", "`` It 's like having another brain . ``", "Ms. Coppola said she was just following in her father 's footsteps by hiring family members .", "Her mother , Eleanor , who filmed the `` Marie Antoinette '' making-of documentary , did the same job on Francis Ford Coppola 's `` Apocalypse Now . ''", "And he acted as executive producer for his daughter on this film .", "`` I mean , they 're just doing what they want .", "They sort of have a little bit of a bratty attitude . ``", "Mr. Schwartzman said he appreciated the mood on the set .", "`` One thing that 's really nice about Sofia is , like , you do n't realize you 're working , `` he said .", "`` And she talks to you about your character in a modern context , which you almost need .", "Because they were people -- they 're not just facts and dates and that kind of stuff -- so she gives you something you can relate to . ``", "Ms. Coppola said she wrote the lead roles for Mr. Schwartzman and Ms. Dunst -LRB- who starred in her directorial debut , `` The Virgin Suicides , '' in 1999 -RRB- .", "`` Kirsten to me has just , like , a fun , bubbly , effervescent quality , and that 's how I think of Marie Antoinette , `` she said .", "`` And she also has a depth .", "And she 's German , so I thought she had the coloring and the features . ``", "Ms. Dunst , who has been acting in films since she was 7 , said that she empathized with the young queen .", "`` She was a girl surrounded by grown-ups who wanted things from her and judged her , and she did n't exactly know what people expected from her , `` Ms. Dunst said during a lunch break , in sweats and her pink-cheeked Marie Antoinette makeup and giant hair .", "`` I could relate to that kind of loneliness . ''", "By the time the film opened at Cannes , Marie Antoinette mania had reached such a fevered pitch that the French news media -- which had helped to generate it -- seemed stunned that the movie itself might not live up to the hype .", "`` I was a little bit disappointed , '' the normally gushy Cannes veteran Laurent Weill said apologetically during one of his nightly television reports from the Croisette .", "After some of the Cannes audience booed the film , another national newscaster told her audience , with a dash of understatement , `` Sometimes the most anticipated films are not the most appreciated . ''", "For her part , Ms. Coppola calmly repeated in every interview that a strong reaction -- good or bad , anything but indifference -- was what she hoped for .", "Many critics and observers saw the film as a comment on modern celebrity youth culture , with Marie Antoinette as an 18th-century Paris Hilton .", "Others wondered aloud if Ms. Coppola 's sympathetic portrait of her heroine as a poor little rich girl had more to do with her own experience as a child of Hollywood and privilege .", "Why , they asked , did Ms. Coppola focus on the queen 's frivolous lifestyle and teenage psyche , ending the movie well before she meets her destiny at the guillotine .", "All her films have dealt with child-women during painful , alienated moments in their young lives .", "`` I see them like a trilogy , and this is the final chapter , '' she said at the Caf\u00e9 de Flore .", "`` It 's a continuation of the other two films -- sort of about a lonely girl in a big hotel or palace or whatever , kind of wandering around , trying to grow up .", "But in the other ones , you know , they 're always sort of on the verge .", "This is a story about a girl becoming a woman .", "And in this , I feel like she does . ``", "THE NEW SEASON : FILM Correction : September 17 , 2006 , Sunday An article last Sunday about the making of the film `` Marie Antoinette '' misstated the location of Caf\u00e9 de Flore , where the director , Sofia Coppola , was interviewed .", "It is in Paris on the Left Bank , not the Right ."], "summary": ["Article discusses director Sofia Coppola 's new film Marie Antoinette , which premiered at Cannes Film Festival to mixed reviews .", "Coppola was granted twelve weeks of unprecented access to chateau and grounds of Versailles for filming .", "Film portrays Marie Antoinette as party girl .", "Photos ."], "publication": "nyt50", "label": [6, 7], "tag": ["Movies", "Arts"]} -{"id": "1788776", "text": ["PERHAPS partly because it was made at a time when movies often threw in a little something for everyone , there 's a sappy love story tucked into the corners of `` Frankenstein . ''", "But it 's inconsequential : there could be nothing in `` Frankenstein '' more romantic than Boris Karloff 's monster , right .", "With his heavy-lidded gaze and his twisted smile , he 's not quite human and yet more human than we can almost bear .", "To honor the movie 's 75th anniversary , Universal unveils this special edition with extras that have not appeared on previous releases .", "-LRB- On the same date Universal will also release the 75th anniversary edition of `` Dracula . ''", "And Sept . 19 marks the release of the Boris Karloff Collection , featuring some of his lesser-known films , including `` Tower of London '' and `` The Strange Door , '' which co-stars Charles Laughton . -RRB-", "We learn that Karloff would entertain his fellow actors on-set with Cockney ditties , and insisted on breaking for tea .", "This charming actor , underneath layers of carefully conceived makeup , created a character whose resonance has not diminished over the years .", "The monster 's cries of anguish , frustration and finally pain are the evidence of his curse : he is allowed to walk among men but not invited to be of them .", "His clumsy underestimation of the fragility of life results in the death of a little girl , and the scene in which she drowns , cut from the version of the movie we all used to see on television , is heartbreaking and shocking even today .", "How could any of us not know how this monster feels .", "-LRB- Universal Home Entertainment , Sept . 26 , $ 26.98. -RRB-", "STEPHANIE ZACHAREK THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new DVD of film Frankenstein , starring Boris Karloff , on the 75th anniversary of film 's original release .", "Photo ."], "publication": "nyt50", "label": [4, 1], "tag": ["Movies", "Arts"]} -{"id": "1788778", "text": ["G . W . PABST 'S tragic fable , from two plays by Frank Wedekind about a prostitute whose love for -- and conquest of -- a married man begins her spiral of decline , is one of the most beautifully filmed of all silent movies .", "Pabst 's unobtrusive but masterly compositions and disarmingly delicate lighting effects are the stuff of rapture .", "Then again , when Louise Brooks , above , is your star , it 's your duty to place her in a context of perfection .", "Brooks plays the doomed , exquisite Lulu , who , with her sable bob and mischievous , calculating smile , became an enduring symbol of jazz-age freedom and joyousness .", "If beauty and saucy charm were all Brooks had to offer , she would have ended up a caricature .", "But this performance is so vital and so infinitely shaded that it inspires wonder each time you see it .", "Brooks 's Lulu is an image of relaxed modernity : she may be willful , petulant and manipulative , but she is also a woman striding toward an uncertain future in a world that does n't provide easy comforts .", "On the night of her disastrous wedding to the rich Dr. Sch\u00f6n -LRB- Fritz Kortner -RRB- , who believes he adores her but really wants to possess her , she stands in front of the mirror , preparing to remove her wedding finery .", "The first thing to come off is a new strand of pearls , which represent the safe , pampered life she has been striving for .", "She lets the glowing beads pool in the palm of her hand , and we see her face in the mirror , an ivory moon framed by darkness .", "The faint smile that crosses her lips is not one of greed or catlike satisfaction but of quiet relief : she has set herself up for a life without worry and strife , not yet knowing that such a life is impossible .", "We have seen how frivolous and thoughtless she can be , and we have witnessed her gentle treachery , but judging her is unthinkable .", "We ca n't trust Lulu .", "We can only believe her .", "In addition to a new , restored transfer of the film , this two-disc set has four different musical scores -LRB- two of which were commissioned for this release -RRB- and a booklet that includes an essay by J . Hoberman , the Village Voice film critic , and Kenneth Tynan 's essential Brooks profile , `` The Girl in the Black Helmet . ''", "-LRB- Criterion Collection , Nov . 10 , $ 39.95. -RRB-", "STEPHANIE ZACHAREK THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new DVD of G W Pabst film Pandora 's Box starring Louise Brooks .", "Photo ."], "publication": "nyt50", "label": [2], "tag": ["Movies", "Arts"]} -{"id": "1788779", "text": ["KATHRYN BIGELOW made `` Point Break '' as if she were determined to show up all the big boys as sissies .", "She pumps this movie full of adrenaline like someone determined to see how much helium a balloon can hold before it bursts .", "This is the sort of picture in which the hero -LRB- Keanu Reeves , above left -RRB- is named Johnny Utah , and a bad guy about to meet his maker exclaims , `` I 'll see you in hell , Johnny . ``", "It would be ridiculous if it were n't so delirious .", "Utah is an F.B.I. agent who goes undercover to bring down a group of surfers financing their pursuit of the big waves with a string of bank robberies .", "His nemesis is Bodhi , the surfing guru-bank robber who , as played by the marvelous Patrick Swayze , above right , is like a stoned version of the chest-thumping he-man Robert Shaw portrayed in `` Jaws . ''", "The Carlos Castaneda of machismo , Bodhi is ready with a quasi-mystical justification of everything from riding a tube to planning a heist .", "Ms. Bigelow is in love with macho thrill seeking , but she ca n't resist kidding it .", "So as `` Point Break '' grows more intense , it becomes funnier .", "She keeps the movie in perpetual motion , whether speeding along in a car , gently bobbing on a surfboard or , in the movie 's stunning visual set piece , sky diving .", "The cinematographer , Donald Peterman , makes you feel as if you 're hovering beside the sky divers as they caper around in midair before pulling the rip cord .", "When the five come together to form a floating circle , it 's as if you 're looking at some wacko version of King Arthur 's knights , and it 's elating .", "This is the action movie as goofy rapture .", "-LRB- 20th Century Fox Home Entertainment , Oct . 3 , $ 19.98. -RRB-", "CHARLES TAYLOR THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new DVD of film Point Break directed by Kathryn Bigelow .", "Photo ."], "publication": "nyt50", "label": [8], "tag": ["Movies", "Arts"]} -{"id": "1788780", "text": ["DUMPED into theaters as an exploitation cheapie in 1968 , this lyrical thriller is a minor American classic .", "As Dennis , a young man trying to get his feet on the ground after being released from a reformatory , Anthony Perkins , right , gives perhaps his richest performance , certainly his most touching .", "Just as Perkins was trying to leave behind the juvenile roles that had typecast him , Dennis , a basically decent fellow , is trying to become an adult .", "But even when he succeeds in hiding his past , he ca n't resist playing the smart aleck or slipping into a world of make-believe .", "Dennis persuades the town golden girl Sue Ann -LRB- Tuesday Weld , right -RRB- to slip into that world with him .", "The twist is that she 's every bit the psychopath people assume Dennis is .", "And since she 's bored with the small town and hates her mother , she 's ready for anything .", "Lorenzo Semple Jr . ` s screenplay is beautifully worked out , and the director , Noel Black , does a superb job of modulating the film 's conflicting elements : the coming-of-age story and the thriller .", "Sensitive and unsettling , `` Pretty Poison '' at times suggests a smaller-scale version of `` Splendor in the Grass , '' without the Freudian gush .", "And when violence breaks out in the suburban setting , Mr. Black plays it straight , not for the cheap irony that won so much praise for Terrence Malick 's phony , condescending `` Badlands . ''", "A large part of what makes `` Pretty Poison '' chilling is Ms. Weld 's amazing performance .", "It is no stretch to cast her as the prettiest girl in town , but resisting the urge to telegraph a character 's craziness takes real discipline .", "Ms. Weld pulls off the neat trick of making Sue Ann seem even more like a normal , carefree teenager after she kills .", "Pointing a gun , as she 's preparing to commit a murder she has long dreamed of , Ms. Weld 's smile has never been sweeter .", "-LRB- 20th Century Fox Home Entertainment , Sept . 5 , $ 14.98. -RRB-", "CHARLES TAYLOR THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new DVD of 1968 thriller Pretty Poison starring Anthony Perkins and Tuesday Weld .", "Photo ."], "publication": "nyt50", "label": [10], "tag": ["Movies", "Arts"]} -{"id": "1788781", "text": ["PLENTY of Jane Austen devotees nearly fainted in horror at the liberties Joe Wright took with his 2005 adaptation of `` Pride & Prejudice '' .", "The DVD release of Robert Z . Leonard 's far more outlandish 1940 version with Greer Garson and Laurence Olivier , above , is likely to set many more Regency bonnets quivering with indignation .", "Forget that Austen 's dialogue has been zingered up and accented with a whimsical score , and that the costumes , by the Hollywood gown god Adrian , have a distinct and bizarre antebellum flair .", "-LRB- Georgian , Southern -- what 's the difference .", "-RRB- The picture is deeply , ridiculously pleasurable not in spite of its anachronisms but because of them .", "As the critic Robin Wood said , `` It is not a very good film , but at least it is alive . ''", "Garson makes a highly unbelievable Lizzy Bennet -- her satiny coolness comes off mostly as indifference -- but at least Olivier 's Darcy is there to make up the deficit .", "Olivier may be playing a matinee idol as much as a literary character , but he manages to meld the two seamlessly , playing Darcy 's propriety and reserve as a subtle , erotic mating dance .", "Maybe Austen herself would have approved of that , but even if not , this `` Pride and Prejudice '' at least has a crazy , joyful sheen .", "The trailer , included here , gives us a shot of the five Bennet girls chattering away in their drawing room as the words `` Five love-hungry sisters and how they got their husbands ! '' splash across the screen .", "Yet even the boldness of that ad campaign has a certain charm .", "Sure , MGM was trying to sell a classic to the masses , but sometimes , the movies are the crossroads where schoolgirls and scholars meet .", "-LRB- Warner Home Video , Oct . 10 , $ 19.98. -RRB-", "STEPHANIE ZACHAREK THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new DVD of 1940 film of Jane Austen 's Pride and Prejudice starring Greer Garson and Laurence Olivier .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Movies", "Arts"]} -{"id": "1788782", "text": ["BEFORE the accident that paralyzed Christopher Reeve , it was all you could do to convince people that such a charming , good-looking guy was also a wonderful actor .", "After the accident he became so beloved that his performances were almost beside the point .", "It would be fitting if this eight-disc collection -LRB- two discs for each of his four Superman movies -RRB- focused attention on the beauty of Reeve 's performances as the man from Krypton and his alter ego , Clark Kent .", "Reeve was blessed with some of the best comic timing the movies have seen since the heyday of screwball .", "His Superman takes great pleasure in playacting the super nerd Kent .", "You feel a prankster 's joy behind Kent 's every klutzy move , every whinging bit of jealousy he admits to feeling over the adoration Margot Kidder 's Lois Lane , above , has for Superman .", "In `` Superman II '' -LRB- 1980 -RRB- , directed by Richard Lester and the best entry in the series , Superman gives up his powers so he can love Lois as a man .", "Reeve enacts a remarkable scene when , for the first time , Superman finds himself a vulnerable mortal after getting into a fight with a bully at a roadside diner .", "Crumpled to the ground , Kent notices the blood coming from his mouth and stares at it , giving a sick little laugh reverberating with a fear he never before felt .", "To watch it is to feel all the security ever inspired by a superhero shaken to the core .", "The extras in this set were not available for preview -LRB- nor was the cut of `` Superman II '' directed by Richard Donner , being released on a second disc .", "Mr. Donner left after disputes with the producers , and Mr. Lester was brought in to finish the movie -RRB- .", "It is a nice tribute to Reeve that in this year 's `` Superman Returns , '' the director , Bryan Singer , captures a mournful , moving spirit that keeps faith with the deepest and most painful places Reeve ventured to go during his time in the cape .", "-LRB- `` Superman : The Christopher Reeve Collection , '' Warner Home Video , Nov . 28 , $ 79.98.", "`` Superman II : The Richard Donner Cut , '' Warner Home Video , Nov . 28 , $ 24.98. -RRB-", "CHARLES TAYLOR THE NEW SEASON : FILM / DVD 'S ."], "summary": ["Article discusses new eight-DVD collection of four Superman movies starring Christopher Reeve .", "Photo ."], "publication": "nyt50", "label": [2], "tag": ["Movies", "Arts"]} -{"id": "1788789", "text": ["WHEN Natalie Wells bought a home in Englewood , N.J. , a year ago , she was unaware that her American pit bull terrier was illegal to own in the city .", "Shortly after moving in , she was told by one of her daughters about a city law that banned the breed , commonly called pit bulls , along with several similar breeds and Rottweilers .", "Under the 1999 law , even this year 's best-in-show winner at the prestigious Westminster Kennel Club Dog Show , Rufus , a colored bull terrier from Holmdel , N.J. , would be banned in Englewood .", "`` I pretty much knew in my gut it was n't right , `` Ms. Wells said .", "In July , Ms. Wells filed a challenge to the law in Bergen County Superior Court along with Mia Rodriguez , a neighbor who also owns a pit bull , and the American Dog Owner 's Association of Castleton , N.Y.", "Last month , Superior Court Judge Jonathan N . Harris agreed with Ms. Wells and ordered the city to stop enforcing the law because it was in conflict with a New Jersey statute that prohibits restricting dogs by breed .", "`` Cities do n't have the right to make laws that violate state law , `` said Flora Edwards , the lawyer who represented the plaintiffs .", "`` If the legal drinking age is 21 under state law , the City of Englewood or Montclair ca n't say it 's 25 or 18 . ``", "According to a Centers for Disease Control study , the pit bull breed was responsible for more dog-bite fatalities than any other breed from 1979 to 1998 , the latest year for which figures were available .", "The breed was responsible for 66 of 238 dog-bite fatalities during that period .", "Rottweilers were next , with 39 .", "The New Jersey Vicious and Potentially Dangerous Dog Act sets out criteria for dealing with aggressive dogs , but prohibits breed discrimination .", "New York has a similar statute .", "Connecticut 's law does not ban breed discrimination .", "Despite such laws , some communities still have restrictions on specific breeds .", "They range from outright bans to requiring property insurance coverage and the use of shorter leashes and muzzles in public .", "Tanya Ford , village clerk in Hempstead , N.Y. , said she was aware of no challenges to its law , which categorizes American pit bull terriers and several related breeds as vicious dogs , requiring that they be muzzled when walked and kept on a chain with a minimum strength of 300 pounds and not exceeding three feet in length .", "Owners must also have liability insurance of $ 100,000 .", "Mahlon Goer , a pit bull owner who tracks legislation in New York for the American Dog Owner 's Association , said the state still allowed insurance companies to drop customers or deny property insurance to prospective customers based on the breed of dog they own .", "Underwriting policies vary , according to the group , but beyond pit bulls and related breeds , the list includes Siberian huskies , Great Danes , German shepherds , St . Bernards and Dalmatians .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws because keeping tabs at the local level can be difficult unless laws are highly publicized .", "According to the American Kennel Club , last year it tracked 105 communities around the nation where breed-specific legislation was pending , enacted or defeated .", "The group had tracked another 76 through July .", "Among the municipalities in the region that have breed-specific laws are Larchmont , Sands Point and Hempstead in New York and Millville and Atlantic City in New Jersey .", "Numerous communities across the United States have such laws .", "One of the most controversial is in Denver , where authorities have euthanized more than 1,000 pit bulls since the reinstatement of a ban on the breed in May 2005 .", "The city 's animal control division had suspended enforcement of the ban in 2004 after the governor signed a bill restricting local governments from outlawing certain breeds .", "But the city successfully sued , arguing that the bill violated its home-rule authority .", "In Englewood , Douglas Bern , a lawyer who served on the City Council when the law was passed , said the council was responding to incidents in a public park where the dogs were being used to intimidate people .", "He said the police had also felt threatened by pit bulls when responding to a call at a home .", "The city argued that the municipal law complemented state statute , which was designed to address situations where `` existing local laws inadequately address the problem '' of aggressive dogs .", "`` The city of Englewood 's ordinance in this regard actually furthers and is consistent with the legislative intent , which is to address a void where local governments have not addressed the area of vicious or potentially dangerous dogs , `` the city said in a court brief .", "Under the ordinance , bull terriers , Staffordshire bull terriers , American pit bull terriers , American Staffordshire terriers , Rottweilers or `` any dogs of mixed breed which has the appearance or characteristics of being predominantly of the breeds , '' were banned from the city .", "Some summonses had been issued under the law , but city officials did not know how many .", "`` It 's like there 's a stigma for having one of these kinds of dog , `` said Ms. Rodriguez , who owns an ailing 8-year-old pit bull named Cyrus .", "The Englewood City Council will discuss the law at its Sept . 19 meeting , said Scott Reddin , the council president .", "He said he did not expect the council to challenge the court 's decision .", "`` We were profiling certain breeds and that was found to be unconstitutional , '' he said .", "`` I do n't think the council will have any problem rescinding that . ``", "Numerous national dog owner and veterinarian associations have come out against breed-specific laws , saying they are unfair and do not address the problem of aggressive and dangerous dogs .", "`` As we like to say , punish the deed , not the breed , '' said Lisa Peterson , a spokeswoman for the American Kennel Club .", "`` We think breed-specific laws are unfair to responsible dog owners . ''", "Barbara Bishop , who owns Rufus , the top dog at the Westminster show , said she was trying to use the dog 's success to highlight the unfairness of breed-specific bans .", "`` We want to let people know that every dog has teeth and every dog can bite , whether it 's a Chihuahua or a bull mastiff , `` Ms. Bishop said .", "`` Every dog will be a product of what it 's brought up to do . ``", "Ms. Bishop attributed much of the image problem of the pit bull breeds to people who train them to be vicious , including drug dealers who use them as guard dogs .", "`` We have Rufus , who 's the top winning colored terrier of all time , and we still have people stop in the street and say , ` There 's a pit bull , ' `` she said .", "For Ms. Wells , the law seemed even more absurd because her 12-year-old pit bull , Sentry , has cataracts and has had cancer , heart surgery and a hysterectomy .", "`` She is a member of the family , '' said Ms. Wells , who has two daughters , ages 34 and 32 .", "`` My kids tease me all the time and say she 's my favorite daughter . `` ."], "summary": ["Article on legal challenges pit bull owners have been making against local laws in New York City metropolitan area that ban or restrict certain dog breeds .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws .", "Numerous national dog owner and veterinarian associations oppose breed-specific laws , saying they are unfair and do not address problem of aggressive and dangerous dogs .", "Photos ."], "publication": "nyt50", "label": [39, 20], "tag": ["New York and Region"]} -{"id": "1788791", "text": ["HE sits in his wheelchair as the family rushes around him .", "He can not move much , or say more than hello .", "He can not participate in the summer activities that everyone pursues with great vigor day after day .", "If it 's warm enough , he goes out onto the deck and snoozes in the sun with the dogs .", "Unlike them , however , he does n't jump up and make excited noises when people come .", "At most , he slowly turns his head and smiles .", "Everyone speaks to him politely , but not for long .", "What 's the point .", "He ca n't say more than a couple of words , and it 's hard to tell how much he understands .", "He is my stepfather , Peter , an 88-year-old man who in the last decade has been transformed from a lively and dynamic person into not much more than a body occupying space .", "He has post-polio syndrome , a condition that seeps the strength from his upper body as steadily as it weakened his legs when he was a teenager .", "A couple of strokes have further debilitated him .", "As my son , Asher , said to my mother one day , it 's as if he 's hardly a person anymore .", "And yet this is n't how Asher , 14 , behaves toward him .", "He constantly monitors Peter 's feet to see if they 've slipped off the footrests of his wheelchair .", "He always asks if Peter wants something to drink .", "His recognition of the full extent of what it means to be a person goes beyond his frustration at Peter 's limitations .", "Asher is concerned with Peter 's comfort , his feeling of inclusion .", "Peter 's situation brings out Asher 's own humanity .", "Peter is certainly a person to my mother , Addy , though she has no illusions about his abilities .", "He is her third husband , the one who was finally her friend .", "She does what she can to make him comfortable and to replicate his old habits .", "Since his only real pleasure is food , she makes him good meals for lunch and dinner .", "At night they listen to Amy Goodman on NPR and then watch Chris Matthews and a couple of episodes of `` Seinfeld '' or `` Curb Your Enthusiasm . ''", "On Tuesdays , Peter 's longtime men 's lunch group comes over to eat with him and discuss books and politics .", "Peter does n't participate , but he enjoys the routine .", "Last summer he could still join them at the local restaurant .", "He would motor up the street in his Jazzy wheelchair with an orange pennant waving above his head to warn cars away .", "He is far from being able to do anything like that now .", "Peter needs to be cared for at the most basic custodial level .", "When my friend Anne visited , her 9-year-old son , Nick , was interested in what this entailed .", "Over the course of a five-day stay , Nick asked many questions of Stacey , the woman who comes in to get Peter out of bed in the morning -- the very practical questions that most adults prefer not to think about .", "Several times Stacey saw Nick looking in the window when it was changing time .", "He was n't fazed by what he saw .", "He accepted Peter 's condition and presence in the house as natural .", "He was right about that .", "My mother and Peter live on the lip of a harbor in Maine .", "All summer , family members passed through , usually for a week or so .", "I stayed the longest -- six weeks .", "On some days there were enough people staying to fulfill my old fantasy of a big house full of people , bursting with robust togetherness .", "This was a new phenomenon here .", "For many years we were only welcome to stay for a short time .", "The stepparents had limited tolerance for each other 's children , especially the noisy grandchildren .", "I often rented nearby to make visits to my mother less stressful .", "Other sons and daughters did the same .", "We rarely overlapped or had the sense of a beloved summer house , full of traditions passed down through generations .", "We each had a private relationship with Maine , and with Peter and my mother .", "But an unexpected side effect of Peter 's deterioration has been a lessening of the feeling that anyone beyond my mother and stepfather creates a crowd .", "Now Peter seems to enjoy the bustle that my mother used to believe was an imposition on him .", "He is no longer an aging intellectual who requires quiet for reading and writing .", "The grandchildren are older , and he is younger , babylike .", "After breakfast , he sleeps for a couple of hours in the kitchen , no matter the amount of dish washing or screen-door banging .", "So family life swirled around him this summer .", "We spent the kind of easy time together that I like best , quantity rather than quality .", "Just hanging out .", "Siblings , nieces and nephews trooped through with significant others in tow .", "They each had a relationship with Peter while they were there .", "Some spent time talking to him even if he could n't reply .", "Others made sure he was comfortable at the table during meals .", "Though it was easy to forget he was in the room , everyone was delighted when he broke into a conversation with a responsive remark .", "The old Peter ! It was good to see him again , if only for a moment .", "Starting the last week of July , my mother began to say fall was in the air .", "I bridled against this , though I knew what she meant .", "I felt it too , a change in the light from white to yellow , a softening of the wind , a resignation of the leaves on certain trees .", "But I did n't want to skip ahead , so I pretended not to notice .", "It 's summer , I insisted .", "This is what summer is like in Maine .", "It is tempting to make this whisper of fall a metaphor for Peter 's diminishing presence .", "September brings up memories of how the end of summer felt during childhood , a loss .", "Yet I find myself resisting the comparison .", "Peter is alive , and summer does n't officially end for 10 more days .", "I 'm still wearing white .", "GENERATIONS ."], "summary": ["Alice Elliott Dark Generations essay on summer spent in Maine with her family and her stepfather , Peter , 88 , who is debilitated with post-polio syndrome and effects of strokes .", "Drawing ."], "publication": "nyt50", "label": [9, 66, 11], "tag": ["New York and Region"]} -{"id": "1788795", "text": ["Last spring Robert Ehrlich , a restaurateur and food entrepreneur from Sea Cliff , posted a notice on Craigslist that read : `` Looking for out-of-the-box innovative sushi chefs who want to try anything and who have a sense of humor . ''", "Dhani Diastika , formerly of Nobu 57 in Manhattan , answered the call and soon installed his team of six chefs at Mr. Ehrlich 's little coffeehouse .", "The result is a curious hybrid .", "The coffeehouse , the Sea Cliff Coffee Company , offers breakfast fare like cheese omelets -LRB- $ 8.95 -RRB- and salads -LRB- $ 7 -RRB- and sandwiches -LRB- $ 7.50 -RRB- for lunch daily , just as it has done for five years .", "But now , Wednesday through Sunday evenings , it becomes the Sea Cliff Sushi Company , serving Asian fusion cuisine worthy of a big-city hot spot .", "Mr. Ehrlich also runs his snack food empire , Robert 's American Gourmet , from an office next door , selling his line of Pirate 's Booty puffed rice and corn snacks .", "`` I travel a lot and I always find a funky local place that 's kind of off the beaten path , `` he said .", "`` Now I can live that every day . ''", "Tucked away in a cul-de-sac off Sea Cliff 's main street , the cafe is washed in vibrant shades of mango and pomegranate and furnished with vintage Grateful Dead posters and about 10 mismatched tables , with more seating on the front patio .", "On sushi nights , the tiny kitchen turns out a big-flavored spicy tuna sandwich , topped with tempura crunch , avocado and tobiko -LRB- roe -RRB- -LRB- $ 14.95 -RRB- , rich marinated black cod in sweet miso glaze -LRB- $ 15.95 -RRB- and pristine yellowtail sashimi with jalape\u00f1o , yuzu-soy and cilantro -LRB- $ 15.95 -RRB- .", "Customers can bring their own wine or beer -LRB- no hard liquor allowed -RRB- , and reservations are suggested .", "Sea Cliff Coffee Company and Sea Cliff Sushi Company , 100 Roslyn Avenue , Sea Cliff , N.Y. -LRB- 516 -RRB- 671-4411 .", "SUSAN M . NOVICK ."], "summary": ["Article on Sea Cliff Coffee Co , coffeehouse in Sea Cliff , NY , that converts to Sea Cliff Sushi Co restaurant at night ."], "publication": "nyt50", "label": [11], "tag": ["New York and Region"]} -{"id": "1788796", "text": ["IT may have left no physical damage in its wake , but the recent communal storm in this oceanfront city over the future of its beaches has realigned the political and environmental landscape .", "Despite fears about the city 's vulnerability to a major hurricane , the five-member City Council , three Democrats and two Republicans , voted unanimously in May to reject a $ 98.5 million beach preservation project by the Army Corps of Engineers that was designed to protect Long Beach from ocean flooding .", "The plan would have placed a berm of dredged sand along the beach 10 feet high , with a 5-foot dune on top , from the western end of Long Beach to Point Lookout , more than six miles to the east .", "Point Lookout agreed to a separate plan after the Long Beach project was rejected .", "A major opponent of the corps ' plan was an environmental and surfer-advocacy group , the Surfrider Foundation , whose members said the project would create dangerous riptides and harm the look of the beach , with no guarantee that the city would be better protected , as the corps and the proponents of the plan claimed .", "The group held meetings to get its message to the public and the council alike , and produced testimony by a coastal engineer and several representatives from local communities whose beaches had undergone similar projects .", "All testified against the corps ' proposals for Long Beach .", "Jeff Kupferman , the chairman of Surfrider 's Long Beach Action Committee and a 45-year city resident , said that while rejection of the plan was a `` major victory '' for Surfrider , surfing was far from the only issue .", "`` We had concerns about swimming safety , as well as surfing , about fishing , kayaking , aesthetics -- any use of the beach , '' he said .", "James P . Hennessy , a Republican council member , agreed .", "`` It was never just about surfing , '' he said .", "`` The council does n't agree about much , but it did agree that the beach fill part of the project was wrong . ``", "What annoyed Mr. Kupferman was that Surfrider was portrayed negatively by those who favored the plan .", "`` Their attitude was we were , ' Yo , just a bunch of surfer dudes out to get a wave , ' '' he said .", "`` And they used that as the hook to try and discredit us .", "The fact that we prevailed has sent a lot of ripples out into this community . ``", "Alison Johnson , a Long Beach resident and vice chairwoman of Surfrider 's New York City chapter , which worked closely with the Central Long Island chapter in opposing the plan , said that the decision had ramifications beyond Long Beach .", "`` It will make the powers that be look at storm protection on the East Coast in a different way , '' she said , `` which is the biggest success you can ask from any project . ''", "Assemblyman Harvey Weisenberg , a lifelong Long Beach resident and a vocal supporter of the Corps of Engineers ' project , was less sanguine about the outcome .", "`` How did people get elected to office that are so ignorant .", "`` he said of the City Council .", "`` I just pray hard and hope to God we do n't get hit by anything . ``", "Even with the beach issue decided , the officials ' alliance with activists may continue .", "Mr. Hennessy and the other Republican council member , Thomas R . Sofield Jr . , have proposed an alternative storm-management plan , which includes working with advisory groups like Surfrider , and the city has asked independent coastal engineers for ways to address beach protection .", "Mr. Hennessy said he still had hopes of working with the Corps of Engineers should it agree to return to Long Beach , but he is adamant about his vote to reject the project .", "`` I can count on the fingers of one hand the number of people who came up to me and said we 'd made a mistake , `` he said .", "STORM PROTECTION ."], "summary": ["Article on controversy over rejection by City Council in Long Beach , NY , to beach preservation project by Army Corps of Engineers designed to protect beach from ocean flooding .", "Surfrider Foundation contended plan to build 15-foot-high berm and dune from Long Beach to Point Lookout would create dangerous riptides and harm look of beach and would not protect beach .", "Photos ."], "publication": "nyt50", "label": [1, 4], "tag": ["New York and Region"]} -{"id": "1788798", "text": ["ON the morning of Sept . 11 , 2001 , I was scheduled to fly from La Guardia Airport to Columbus , Ohio .", "At home in Babylon , I put on my navy blue American Airlines uniform , which made me feel as if I represented the guarantee of safety and security to all the passengers on my flight .", "I always knew that I wanted to be a flight attendant .", "As a child growing up in Queens , I would ride my bicycle down 79th Street and Astoria Boulevard , about 500 feet from La Guardia Airport , to watch the planes take off .", "Sometimes I could see people 's faces in the windows of the planes .", "I 'd imagine them going to exciting places in a jet filled with important people .", "I wanted to be on those planes .", "Training for my dream job as a flight attendant was an awesome experience .", "I learned how to evacuate an aircraft in less than 60 seconds , and how to use and find all the emergency equipment aboard different planes .", "I was trained in first aid and judged fit to withstand G-force turbulence and abrupt cabin pressure changes .", "I loved flying to all those different places .", "Still , there was nothing I loved more than flying home to Babylon , seeing Long Island 's twin forks , being thankful that I was n't stuck in the bumper-to-bumper traffic I could see on the Long Island Expressway and spotting my old elementary school , my old street and finally my old house in Queens as the pilot approached runway 13 Right .", "My mother , a security worker at MacArthur Airport in Islip , would often drive me to the airport , just as she was preparing to do on the morning of Sept . 11 .", "But on this morning , we stopped in front of the television only to learn that American Airlines Flight 77 had crashed into the Pentagon , and that American Airlines Flight 11 and United Airlines Flight 175 had already crashed into the World Trade Center .", "As the south tower collapsed , news came that United Flight 93 had crashed somewhere in Pennsylvania .", "Faces , voices and names of colleagues flashed through my mind as the television anchors spoke .", "I called my supervisor , who told me that I should stay home until further notice .", "That week was a blur and then it was time to go back to work .", "I got up and put on my blue uniform .", "My stomach tightened and my heart grew heavy , not because I was afraid to fly , but because I was afraid of being weak in front of my passengers .", "My mother drove me to John F . Kennedy International Airport .", "She looked over at me as we drove west on the Southern State Parkway .", "`` Do you want to do this .", "`` she asked .", "I said yes , but she knew better .", "I got to Kennedy barely able to control my emotions .", "Security was tight , and the airport was quiet , not because there were n't any passengers for the few scheduled flights that morning but because everyone was nervously silent .", "Like me , people seemed afraid of being forced back into the routine of living .", "The television set in the crew 's flight room ran news of box cutters found on some passengers , and rumors of breaches in security were flying around .", "That 's when I broke down .", "I thought I could control myself .", "I was supposed to be strong , yet there I was crying like a child .", "My supervisor consoled me , but she took me off the flight and told me to go home .", "I called my mother to pick me up .", "She had never left the airport and was waiting for my call .", "Silently , we drove back home to Babylon .", "I eventually did get back in the swing of things , but layoffs kicked in and my flight schedule became erratic and unpredictable .", "Eventually , I left my job .", "How do I feel about memorials and tributes and prayers for 9/11 .", "Then , as now , my way of dealing with them is simple : I can not be associated with them in any way .", "Save me from the prayers , monuments , movies and television specials .", "It 's too emotional for me , and I have too much respect for my colleagues , those who died in those planes , to pretend that I could possibly know what they went through .", "I do know , however that they worked hard that day to save lives and calm nerves .", "When I go back to 79th Street , I can still see into the windows of planes taking off from the airport .", "Maybe one day I 'll return to the airline industry , but it will never be the same -- safety and security are no longer something that I or anyone else can guarantee .", "Op-Ed Contributor ."], "summary": ["Op-Ed article by former flight attendant Michelle Henderson , as told to Prof Howard Gold , on her reaction to terrorist attacks of September 11 , 2001 ."], "publication": "nyt50", "label": [0, 2, 45, 6], "tag": ["New York and Region", "Opinion"]} -{"id": "1788801", "text": ["Every year , tens of thousands of children pass through Suffolk County 's Family Court , the second busiest in New York State .", "Many of them are troubled teenagers accused of crimes , and a few hundred have to be locked up while awaiting trial or sentencing for serious offenses , or to serve time for lesser violations , like truancy or vandalism .", "Suffolk has no place of its own to put these children , and has n't since 1974 , when its juvenile detention center was shut down by the state .", "It has dealt with the issue by shipping children out of the county -- to Nassau County , whose center has troubles of its own , and as far away as Syracuse and Buffalo .", "None of this would be a problem if the only concerns were controlling costs and shuttling people from place to place .", "The juvenile court machinery has shown itself to be perfectly capable of dealing with the logistical challenges and caseloads .", "But the bureaucracy needs to remember that these cases involve children , immature by definition and often fragile .", "They need close contact with their families and lawyers and ready access to medical treatment , mental-health care and other social services .", "They are not hardened criminals , and if the goal is to make sure they stay that way , the county urgently needs to find a way to keep children in custody that is safe , humane and more accommodating to the particular needs of young defendants .", "As it is , the juveniles themselves -- last year , Suffolk held 608 of them in secure custody for a combined 4,055 days -- and their families have done most of the accommodating .", "State law requires that juvenile cases be resolved within three days of a youth 's being charged .", "This means children can be bused out of Suffolk to a detention center as much as 12 hours away , only to return in a day or two .", "The distant centers offer only a room and meals , so treatment like counseling has to be suspended while the young people are being shuffled around .", "Officials in Nassau and Suffolk have talked about building a new center together , but the classic Long Island predicament -- where do you put it . -- caused the effort to fall apart .", "One obvious solution is also close at hand : Nassau 's juvenile center in Westbury .", "That building is 50 years old and in bad shape .", "In April the state urged the county to take `` immediate action '' to fix serious deficiencies , including fire code violations , inadequate perimeter fencing and staffing shortfalls .", "A dining room ceiling was sagging and near collapse , and there was no system to open all the cell locks at once in a fire .", "Nassau has set aside $ 2.8 million in its capital budget to make repairs , and is hoping for a matching amount from Albany .", "The action is welcome but late , and we will waste no ink applauding the county for suddenly deciding , under pressure , to make sure that the center 's heating and ventilation systems , locks and fire alarms , among other absolutely essential things , are finally adequate .", "Nassau 's action on its own juvenile-center problems opens the possibility of a solution to Suffolk 's .", "Rather than build something new , and endure the predictable local resistance to choosing a location for it , the smarter tack may be for Suffolk to help Nassau put the center in Westbury into decent shape and for both counties to use it jointly .", "There may be room in Nassau 's center -- which has a licensed capacity of 32 , and 46 rooms in all -- to handle the needs of both counties .", "The shabby treatment of juvenile defendants has been ignored for too long .", "The Suffolk County executive , Steve Levy , should join his Nassau counterpart , Thomas Suozzi , in putting the needs of juvenile defendants on the front burner .", "Long Island ."], "summary": ["Editorial , scoring lack of juvenile detention facility in Suffolk County , suggests it join forces with Nassau County to renovate center in Westbury and to use it jointly ."], "publication": "nyt50", "label": [21], "tag": ["New York and Region", "Opinion"]} -{"id": "1788802", "text": ["JESSE FRIEDMAN bit into a bagel with lox last Sunday and ripped into the criminal justice system in Nassau County as if he had just been charged that morning , and not in 1987 , with hundreds of sex crimes against children .", "Convicted as a sexually violent predator -- the highest-risk category , Level 3 -- he is barred from parks and places with many children .", "Bagel stores are not off limits , so he sat in one on the Upper East Side of Manhattan and discussed his case and how it was affected by the 2003 film `` Capturing the Friedmans . ''", "`` It 's the trial I never had , `` he said of the Oscar-nominated documentary , which portrayed the decline and fall of the Friedmans of Great Neck after Jesse and his father , Arnold , were arrested in 1987 , accused of molesting children during after-school computer classes in the basement of their Picadilly Road home .", "The film contained interviews indicating flaws in the prosecution of Jesse Friedman and raising questions about his culpability , despite his guilty plea .", "Mr. Friedman has used this information to try to overturn his conviction .", "An appeal failed in the state courts earlier this year , but he retained the civil rights lawyer Ronald L . Kuby and filed another motion , in federal court , in July .", "A central claim of the motion is that prosecutors , lacking physical evidence , built their case on the false accusations of children who admitted to being abused only after investigators used `` high pressure , manipulative and result-oriented interrogation techniques with child witnesses that produced false allegations . ''", "These techniques included intimidation , threats and hypnosis to create false memories of abuse , the motion claims .", "It also contends that investigators unlawfully withheld exonerating material , including denials from students -- some of whom said in the film that they were pressured into fabricating abuse claims .", "But there are also accusers who stand by their charges and are infuriated at Mr. Friedman 's continued denials .", "Frances Galasso , who led the Nassau police sex crimes squad in the Friedman investigation , said in an interview on Wednesday that coercion was not used with the children .", "`` They gave detailed statements that upset my most hardened detectives , '' said Ms. Galasso , who is now retired .", "`` The parents were brought in as soon as we were able to break the ice , and they co-signed the statement . ''", "She called the documentary one-sided and `` a disgrace and insult to the victims and their parents . ''", "`` I 'm really happy it did n't get the Academy Award , because it was n't accurate , `` she said .", "`` Both Jesse and his father are guilty as sin .", "Remember , he pled guilty , and you do n't plead guilty to a crime like this if you did n't do it . ``", "Jesse Friedman was 17 when he and his father were arrested and charged with hundreds of counts of sex crimes against children .", "Arnold Friedman pleaded guilty to sexually abusing children .", "-LRB- He committed suicide in prison in 1995 . -RRB-", "Jesse Friedman said that he wanted to fight the charges in court but felt that a successful defense would be impossible to mount .", "His father had already decided to plead guilty , and Jesse believed that the judge and jury would be biased against him , he said .", "According to his recent motion , he was `` condemned in the press and vilified by a community hungry for retribution . ''", "Fearing life in prison , he said , he accepted a deal , admitting guilt and receiving a 6 - to 18-year sentence .", "After serving 13 years in prison , he was released in December 2001 and has since lived in Manhattan .", "He is now 37 and will be finished with his parole in three months , making him a free man , in a sense -- except for his Level 3 status , which he will always maintain , unless the conviction is overturned .", "`` That follows me till the day I die , '' he said .", "Upon release from prison , Jesse moved in with his brother , David , who works as Silly Billy , one of the most prominent party clowns in the city .", "But when the co-op board found out who Jesse was , they objected , and Jesse moved out .", "He now lives with his fianc\u00e9e , Elisabeth Walsh , in an East Harlem building with no child residents , a condition ordered by his parole officer .", "He said his landlord and neighbors accept his situation .", "He may not leave New York City and must observe a 9 p.m. curfew .", "Ms. Walsh , 27 , is also from Great Neck .", "She never knew Mr. Friedman growing up , but she knew of him , especially because she attended his alma mater , a small charter high school called the Village School .", "`` I 'm the age of his accusers , `` she said , adding that she never believed the allegations .", "After seeing the documentary , she said , she wrote him a letter of support .", "They met and are now engaged , despite her family 's misgivings .", "`` My parents love Jesse , but they feel like I 'm taking on all his problems , `` she said , nuzzling him .", "`` But for me , he 's worth it . ``", "Mr. Friedman says he and Ms. Walsh plan to move someplace where he is less likely to be recognized .", "He laughed hard when asked if he would ever settle down on Long Island .", "`` I think if I set foot on Long Island , I 'd have my house burned down , `` he said , though he added that one thing would make him welcome a return : if a federal court ordered hearings in his case .", "`` To go back there and be able to grill my prosecutors , '' he said .", "`` I 'd take that . ``", "THE ISLAND E-mail : theisland@nytimes.com ."], "summary": ["Corey Kilgannon The Island column on Jesse Friedman , arrested in 1987 , along with his father , on charges of molesting children during after-school computer classes at their home in Great Neck , NY . Friedman , convicted as sexually violent predator , has filed second motion to try to overturn his conviction .", "Photo ."], "publication": "nyt50", "label": [3, 5], "tag": ["New York and Region"]} -{"id": "1788803", "text": ["FOR failing to meet performance standards , the Clara T . O'Connell elementary school in Bristol , Conn . , spent three years on the `` in need of improvement '' list under the federal No Child Left Behind program .", "When a new list came out last month , Connecticut had 290 elementary and middle schools on it , but the O'Connell School was not among them .", "It had achieved what no other school in the state had managed under the four-year-old program : It had worked itself off the list .", "`` For three years , the headline was that we were on the list , '' said Michael F . Audette , O'Connell 's principal .", "`` Human nature being what it is , people would ask the teachers , ' What school do you teach at .", "` And when the teachers would say , ' O'Connell , ' they 'd say , ` Is n't that the school that 's on the list .", "` And the teachers would say , ' Yeah , but we 're doing a lot of good things . '", "But nobody sticks around for the ` yeah , but . '", "Now it 's nice to have a different headline , and now we can say , ` Yes , we 're that school . '", "`` Henry Garcia , a spokesman for the State Department of Education , said O'Connell 's achievement was a testament to its hard work .", "`` It takes schools that are in need of improvement time to see the progress once they develop curriculum and other strategies that improve student achievement , '' he said .", "The number of Connecticut schools failing to meet what the program calls adequate yearly progress doubled in the 2005-6 academic year , up from 145 schools the year before .", "The results were reached using scores from the Connecticut Mastery Tests , then figuring them into a host of categories and subcategories , including the number of children living in poverty who attend a school .", "At the O'Connell School 80 percent of the students are poor , Mr. Audette said .", "The tests require that at least 74 percent of students demonstrate proficiency in math , 68 percent in reading and 70 percent in writing .", "In the 2002-3 school year , O'Connell passed all categories except reading , getting a score of 44 percent .", "It also failed to meet the reading goal in 2003-4 , but reached it the next year .", "In 2005-6 , it scored 61 percent in reading .", "That was not high enough to meet the No Child Left Behind requirements , but federal officials put O'Connell in the `` safe harbor '' category , for schools that have significantly improved , and removed it from the `` in need of improvement '' list .", "To raise the reading scores , Mr. Audette said , he and his staff reviewed the pupils ' reading data for weak areas .", "The Mastery Tests require that pupils read passages and answer questions about what they have read .", "To prepare , the children were asked to answer a reading question each day until they showed proficiency in expressing their comprehension .", "Mr. Audette also hired additional reading support staff members and trained teaching assistants , assigning them to particular grades , where they placed the children into small groups and gave them a second instructional reading period each day .", "Mr. Audette signed on with the Teachers College Reading and Writing Project at Columbia University .", "The Bristol School District paid for consultants from Columbia to teach faculty members at O'Connell .", "The effort paid off , especially for the third graders .", "They scored 90 percent in writing proficiency in the 2005-6 Mastery Tests .", "`` If I was to pinpoint exactly what we did , I would say we really looked at our reading instruction , '' Mr. Audette said .", "`` It 's kind of common sense .", "If you want to be a good pianist , you practice the piano . ``", "EDUCATION ."], "summary": ["Article on Clara T O'Connell elementary school in Bristol , Conn , which spent three years on ` in need of improvement ' list under No Child Left Behind program and has managed to work itself off list .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Education", "New York and Region"]} -{"id": "1788804", "text": ["IN the 50 years he has lived in Montclair , N.J. , Rob Bianco has seen his share of monsoon-like downpours , blizzards , ice storms , even the remnants of hurricanes .", "But Mr. Bianco , the superintendent of public works for Montclair , had never seen anything like the storm that ravaged the leafy landscape of his hometown on July 18 .", "`` We literally had trees and telephone poles , some as high as 20 to 30 feet in the air , that were sheared and cut off , '' he said .", "`` It was a treetop tornado , meaning it never hit the ground , but it still caused a great amount of destruction . ''", "The storm , which hit the northeast corner of Verona , N.J. , before blowing with a vengeance -- about a mile wide -- through a swath of Montclair for roughly a half-hour , destroyed about 200 trees on public property in Montclair , an Essex County township of about six square miles .", "The most heavily damaged areas , Mr. Bianco said , were Brookdale Park , which covers 121 acres in Montclair and Bloomfield , and the township 's Edgemont Park .", "`` We had some cars smashed and a lot of people running for cover , '' he said .", "`` It was a miracle that no one got hurt . ''", "But what about all of those damaged oak and pine trees , some of which Mr. Bianco said were 250 years old .", "`` Cleaning it all up was quite a daunting task , '' said Matthew A . Vastano , the executive vice president of Nature 's Choice Corporation , a yard-waste recycling company in Union , N.J. , hired by Montclair to clear the debris caused by the storm .", "`` Montclair is not your normal town where vegetative waste is concerned , '' Mr. Vastano said .", "`` The town , which has huge , huge trees , is very environmentally conscious , and it wants to keep all the trees it can . ''", "The trees it could not keep were hauled away by Nature 's Choice to a temporary storage site .", "Any piece of wood 16 to 18 inches long was put onto chipper trucks and into machines that turned it into chips .", "Anything larger was cut into logs .", "Some 25 truckloads of those logs were placed into 40-foot containers on trucks -- at a cost of $ 350 per container -- for eventual mulching .", "In the end , about 600 cubic yards of mulch and topsoil , or 300 tons , were produced -- enough to cover about 100,000 square feet , according to Mr. Vastano .", "Mr. Bianco said that Nature 's Choice would give Montclair some of the mulch or topsoil for free if the town needed it for a special project , but that the company was free to sell it to landscapers and other businesses .", "`` We are a business , not a charity , '' Mr. Vastano said .", "`` We 'll take most of that mulch and turn it into hardwood mulch or dye it either black or a shade of red before selling it . ``", "Dianne Marus , the director of Montclair 's Department of Finance , said that the cost of the storm cleanup came to $ 366,950 but that the price , tallied by the Department of Community Services , did not include overtime costs for the Police -LRB- $ 74,983 -RRB- and Fire Departments -LRB- $ 4,650 -RRB- .", "All told , Montclair has spent $ 446,583 on storm-related services , and the job is not yet finished .", "`` There are still a number of stumps to be removed and lots of re-planting to do , '' Mr. Bianco said .", "`` By the time all is said and done , this entire project is going to cost us more money and continue for at least another month . ''", "STORM CLEANUP ."], "summary": ["Article on work of Nature 's Choice Corp , yard-waste recycling company hired by Montclair , NJ , to clear debris caused by July 18 storm that destroyed about 200 trees on public property .", "Company turned trees into about 600 cubic yards of mulch and topsoil .", "Photo ."], "publication": "nyt50", "label": [9, 16], "tag": ["New York and Region"]} -{"id": "1788805", "text": ["DEEP into suburbia , on a sound barrier that runs along the Taconic State Parkway here , a graffiti artist announces his presence with a single word painted in yellow and black : `` Me . ''", "Officials said that graffiti had reached new heights this summer .", "And in a town that bills itself as a retreat from more urban locales , politicians and police officers are taking the problem seriously .", "`` Whether you grew up here all your life , or whether you moved here from the Bronx or Yonkers or Long Island , you do n't want to see that , `` said Linda G . Cooper , the town supervisor .", "`` And so we 're trying to take a very firm position . ``", "In June , the Yorktown police began graffiti patrols as a deterrent .", "They also began photographing graffiti they found to create a catalog of the work of local vandals for use in investigations , Lt . Donald Schuck said .", "Since July , Lieutenant Schuck said , the police have arrested nine boys on graffiti-related charges .", "The most recent came on Aug . 28 , with the arrest of a 14-year-old from Mohegan Lake , a hamlet of Yorktown .", "The police said he had sprayed a wall at a town-owned sports club , causing about $ 400 in damage .", "The boy , charged with making graffiti and possession of graffiti instruments , both misdemeanors , was released to his mother and was scheduled to appear on Friday in Westchester County Family Court in White Plains .", "The town , which has seen stop signs , park buildings , businesses and even a police radar unit defaced this summer , is also considering new legislation .", "One proposed law would require vandals to pay restitution .", "Another would mandate that residents who discover graffiti on their property clean it within 72 hours .", "Ms. Cooper said that rapid removal discouraged vandals .", "Town officials and youth advocates said there were a number of reasons for the surge in graffiti .", "Lieutenant Schuck said increased access to the tools of graffiti had played a role .", "Young people , previously stymied by a county law forbidding the sale of spray paint to anyone under 18 , have begun ordering the paint over the Internet , he said .", "Joan Valenstein , chairwoman of the advisory board for the Yorktown Teen Center , a branch of the Boys and Girls Club of Northern Westchester , said the increase might be the byproduct of boredom in a town that she said did not provide enough youth activities .", "Ms. Cooper said some of the graffiti included gang insignia previously seen in the southern , more urban part of the county .", "Whatever the source of the graffiti , the town seems determined to stamp it out .", "But out on the Taconic State Parkway , high above the cars rushing by , defiance is etched in yellow and black .", "VANDALISM ."], "summary": ["Officials in Yorktown , NY , say graffiti has reached new heights this summer .", "Police have begun graffiti patrols as deterrent and have arrested nine boys on graffiti-related charges since July .", "Photo ."], "publication": "nyt50", "label": [7, 1, 5], "tag": ["New York and Region"]} -{"id": "1788813", "text": ["MODERN dance from beyond the borders of the United States has always flowed into New York , but this season the flow will become a torrent as presenters all over the region place a new emphasis on internationalism , even with tighter visa restrictions .", "Some might fret that this is bad : every slot allotted to a foreigner deprives an American , they reason , and life for American dancers is hard enough .", "But the gain in cosmopolitanism will more than offset any loss , enlivening local choreographic creativity and broadening the audience 's perspectives .", "This season the mainstream midlevel New York dance theaters are augmenting their foreign offerings , and so are presenters in the broader New York region .", "Dance Theater Workshop has been emphasizing international dance of late , but now the Joyce Theater and Danspace Project at St . Mark 's Church are joining in .", "And there will be even more to come as the winter and spring schedules fill out .", "Three events this fall stand out for me , maybe in part because I 've already seen these artists or these very pieces in Europe and know they 're good .", "This month the riveting French conceptualist Boris Charmatz , in partnership with another choreographer and dancer , Dimitri Chamblas , will make one of his rare New York appearances .", "Mr. Charmatz is charming and full of ideas .", "Even when they do n't work , they 're interesting .", "He and Mr. Chamblas will be at St . Mark 's Church , in a piece called `` \u00c0 Bras le Corps , '' described as a `` private perspective on their strenuous tumbling , lunging and grappling , '' all in a boxing-ring setting , Their appearance is part of a citywide multiarts festival called European Dream .", "The French ballerina Sylvie Guillem is one of the biggest stars in the dance world .", "But partly because she 's restlessly curious and maybe also because she 's in her early 40 's now and seeking -LRB- as Mikhail Baryshnikov has done for so long -RRB- new , nonballetic challenges , she is moving into what the British call contemporary dance .", "This month she has a collaboration in London with the choreographer Akram Khan .", "Two years ago she undertook a similar collaboration with Russell Maliphant , full of stark movements seemingly designed for the leggy Ms. Guillem , angular gestures and vivid lighting .", "That program will be seen next month at City Center , the first offering in the center 's new partnership with the innovative , dance-oriented Sadler 's Wells Theater in London .", "Jedediah Wheeler , the longtime New York dance manager and producer , is making the performing-arts series at Montclair State University in New Jersey into an innovative force that , with money and persistence , may one day rival the Brooklyn Academy of Music as a beyond-Manhattan destination for new work .", "For November Mr. Wheeler has booked one of the most exciting of all the new British physical theater troupes : the Vincent Dance Theater from Sheffield , England , led by Charlotte Vincent , in a piece I saw last spring called `` Broken Chords . ''", "It is theatrically wrenching and hilarious , choreographically intense , musically compelling .", "-LRB- One of the dancers is also a first-rate violinist , and the title refers to both marital breakup and the need to arpeggiate chords in Baroque string music with a modern bow . -RRB-", "I will be curious to see if the New York dance audience shares my enthusiasm for this piece .", "Finally , a quick note on two Japanese-flavored imports .", "Yubiwa Hotel in `` Candies : girlish hardcore '' is at the Japan Society this week , part of the society 's `` Girl , Girly , Girlish '' series , and Kota Yamazaki / Fluid Hug-Hug will be at Dance Theater Workshop in November .", "Ms. Yamazaki , Japanese-born and New York-based , conducted research for her new `` Rise : Rose '' in Senegal .", "Who needs world travel .", "Titles like that are a trip all by themselves .", "THE NEW SEASON : DANCE ."], "summary": ["John Rockwell article on how flow of modern dance presentations into New York from around the world will become torrent this year .", "Notable visitors from France , United Kingdom and Japan discussed .", "Photo ."], "publication": "nyt50", "label": [0, 24], "tag": ["Arts"]} -{"id": "1788814", "text": ["NEW YORK dance is a wildly inventive scene , and I would n't be surprised if sometime during the coming season we had the chance to see naked stiltwalkers expertly dancing hip-hop to Arvo P\u00e4rt on a float on the Gowanus Canal .", "But the monuments of dance , the long-standing sources from which all this recent innovation has flowed , will also be represented , and returning to them regularly will be good for the soul .", "To start , the Merce Cunningham Dance Company will perform at the Joyce Theater in mid-October , at Mr. Cunningham 's most slyly playful , it would seem , in a premiere called `` eyeSpace , '' to which audience members will be encouraged to take their iPods to hear the dance 's downloadable score by Mikel Rouse .", "-LRB- Technophobes will be provided with loaners . -RRB-", "Also on the program will be one of Mr. Cunningham 's signature site-specific `` events '' and a revival of his 1960 `` Crises , '' which John Cage once described as a harsh and erotic piece about a man and a woman bound together in part by elastic bands .", "It is hard to imagine today 's performers matching the wildness and ferocity that Mr. Cunningham , Viola Farber and Carolyn Brown once brought to it .", "Will they make it new .", "Paul Taylor 's `` Troilus and Cressida -LRB- reduced -RRB- , '' a premiere to be performed by his company in March at City Center , sounds like a typically outrageous Taylor cartoon .", "Its Shakespearean characters include a schlub of a Troilus , a worn Cressida and a swarm of macaroni-headed Cupids to egg the lovers on .", "The music .", "Ponchielli , of `` Dance of the Hours '' fame .", "Finally , this season and next will be the last time to savor the great artistry of Kyra Nichols , a ballerina of incomparable musicality and nuanced technical expertise .", "Ms. Nichols retires from the New York City Ballet in June , but she will dance in ballets including Balanchine 's `` Vienna Waltzes '' and `` Liebeslieder Walzer , '' and she 'll also play Carabosse in `` The Sleeping Beauty . ''", "THE NEW SEASON : DANCE ."], "summary": ["Jennifer Dunning article notes three monuments of dance , long-standing sources from which all recent innovation has flowed , who will be represented in New York this season : Merce Cunningham , Paul Taylor and Kyra Nichols .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["Arts"]} -{"id": "1788815", "text": ["BUILDINGS are looking prettier than ever , thanks to the freedom that contemporary architects have these days to play with form .", "Meanwhile , many of our cities are falling to pieces , as the infrastructure that once bound them into functioning communities crumbles from years of neglect .", "New Orleans exposed a breakdown in infrastructure and social policy that will not be repaired by a conventional formula of tourism , architectural nostalgia and gated communities .", "And the atomization of cities as diverse as Dubai , Beijing and Beirut , where the construction of glistening new urban centers masks growing social inequities at their edges , have only further exposed the hollowness of some contemporary urban-planning strategies .", "So the most promising trend this year is a renewed emphasis in architectural circles on urbanism as a field for creative exploration .", "Architects like Eyal Weisman -LRB- in London -RRB- , Teddy Cruz -LRB- San Diego -RRB- , Philipp Oswalt -LRB- Berlin -RRB- and Rem Koolhaas -LRB- Rotterdam and just about everywhere else -RRB- have been striving to bridge the gap between architectural fantasy and stark political and social realities .", "Seeking to distance themselves from the current obsession with `` star '' buildings , they proceed from the assumption that we can not create valid new architectural forms until we arrive at a deeper understanding of the era we live in .", "The 10th Venice Biennale of Architecture , which opens this weekend , is the first to focus on entire cities rather than uncovering the latest architectural trends .", "Organized by Ricky Burdett , the exposition examines the effect of design in cities as diverse as Cairo , Mumbai , S\u00e3o Paolo , Johannesburg , Mexico City and Caracas .", "Among the highlights is Al-Azhar Park in Cairo , conceived as an `` urban lung '' to provide relief for a city that has only one square meter of green space for each inhabitant , and a series of schools in S\u00e3o Paolo that function as around-the-clock community centers to help reduce violence among youths in the poorest slums .", "-LRB- Simply getting children off the streets is a starting point . -RRB-", "`` I think projects like this give a raison d' \u00eatre to architecture again , which is what the profession is looking for , '' Mr. Burdett said .", "In `` Lago : How It Works , '' a book to be published this fall , Mr. Koolhaas turns his penetrating gaze to the Nigerian city , a dense matrix of congested slums and infill markets that in many cases have devised their own court systems and electricity and water utilities .", "In the 1970 's Lagos was the nexus of a stirring intellectual renaissance and a wave of sprawling , megalomaniacal urban planning projects .", "That optimism evaporated with the drop in oil prices at the end of the decade , and the city was left to fend for itself .", "Today , the Nigerian government is trying to resurrect some of the old planning projects , clamp down on illegal street trade and rein in urban indiscipline in general .", "Yet in eight years of research , Mr. Koolhaas realized that what seems like chaos to outsiders is a complex and highly organized social organism .", "His analysis suggests that a democratic , informal urban planning model could be combined with aggressive planning to lift Lagos out of poverty without destroying the spontaneous freedom of daily urban life .", "`` In Lagos there is no choice , but there are countless ways to articulate the condition of no choice , '' Mr. Koolhaas has said .", "`` In New York , on the other hand , there 's a sense of infinite choice , but a very conventional set of options from which to choose . ``", "Of course Mr. Koolhaas , now 62 , has been known for countering conventional wisdom about how cities really work since the publication of `` Delirious New York , '' a 1978 book casting the `` city of congestion '' as an antidote to the sterility of Modernist planning conventions .", "Today , he is joined by a younger generation of architects who are no longer content to consider architecture in isolation from larger urban patterns .", "Among them is Mr. Oswalt , 42 , who has organized a show that arrives in December at the Van Alen Institute in New York and the Pratt Manhattan Gallery .", "Titled `` Shrinking Cities , '' it examines the shrinking industrial centers on the fringes of the emerging global economy .", "The show sheds light on the abysmal failure of planners to avert the gradual disintegration of cities like Leipzig , Germany .", "Ivanovo , Russia .", "And Detroit .", "The phenomenon of decay is often juxtaposed with a different form of assault : the insidious encroachment of suburban values .", "New Yorkers need only stroll through SoHo to get the point .", "The exhibition , which originally opened in Berlin 2004 , also resurrects some largely forgotten critiques of urbanization .", "It touches on the Disurbanist proposals of Soviet Constructivists like Moisei Ginzburg and Mikhail Barshch , who challenged the thinking behind Western urban traditions in favor of a more rural Russian model , and Frank Lloyd Wright 's Broadacre City , in which each family would be allotted an acre of land and the agglomeration would serve as a decentralized metropolis .", "But the show 's most penetrating attacks are reserved for more recent urban strategies , particularly the argument that the salvation of cities rests in a so-called `` creative class '' that leads the way to gentrification .", "`` Shrinking Cities '' is to travel in February to Detroit , where , a bit paradoxically , it will go on view in an abandoned 21,000-square - foot warehouse that will be the temporary home of the Museum of Contemporary Art Detroit .", "Designed by Andrew Zago , the museum is being opened , in part , with the goal of revitalizing the city center .", "Finally , Mr. Weisman , an Israeli-born architect who is the recipient this year of the prestigious Stirling Prize for architecture , will open a series of lectures this fall at the Canadian Center for Architecture in Montreal .", "The talks are pegged to the release of `` Hollow Land : The Architecture of Israeli Occupation , '' a chilling book in which he explores the way the military selects targets in bombing and fortifying cities and how those strategies can re-emerge in civilian planning practices during peacetime .", "His analysis is ideally timed .", "If the Modernist mass-housing programs of a half-century ago reduced a generation of urban poor to mere numbers in a machine , many of those projects are now being wiped away to make room for an equally troubling formula : gated communities , open-air malls and sanitized tourist enclaves that have exacerbated social inequities by making destitute children invisible .", "Acknowledging the complexity of these issues is not enough .", "Thankfully , some architects have assumed the challenge of binding us back into a civilization whose fabric often seems on the verge of unraveling .", "THE NEW SEASON -- ARCHITECTURE ."], "summary": ["Nicolai Ouroussoff article says most promising trend of year is renewed emphasis among architects on urbanism as field for creative exploration .", "Says attempt is being made to bridge gap between architectural fantasy and social reality .", "Photos ."], "publication": "nyt50", "label": [4], "tag": ["Arts"]} -{"id": "1788840", "text": ["FOR failing to meet performance standards , the Clara T . O'Connell elementary school in Bristol , Conn . , spent three years on the `` in need of improvement '' list under the federal No Child Left Behind program .", "When a new list came out last month , Connecticut had 290 elementary and middle schools on it , but the O'Connell School was not among them .", "It had achieved what no other school in the state had managed under the four-year-old program : It had worked itself off the list .", "`` For three years , the headline was that we were on the list , '' said Michael F . Audette , O'Connell 's principal .", "`` Human nature being what it is , people would ask the teachers , ' What school do you teach at .", "` And when the teachers would say , ' O'Connell , ' they 'd say , ` Is n't that the school that 's on the list .", "` And the teachers would say , ' Yeah , but we 're doing a lot of good things . '", "But nobody sticks around for the ` yeah , but . '", "Now it 's nice to have a different headline , and now we can say , ` Yes , we 're that school . '", "`` Henry Garcia , a spokesman for the State Department of Education , said O'Connell 's achievement was a testament to its hard work .", "`` It takes schools that are in need of improvement time to see the progress once they develop curriculum and other strategies that improve student achievement , '' he said .", "The number of Connecticut schools failing to meet what the program calls adequate yearly progress doubled in the 2005-6 academic year , up from 145 schools the year before .", "The results were reached using scores from the Connecticut Mastery Tests , then figuring them into a host of categories and subcategories , including the number of children living in poverty who attend a school .", "At the O'Connell School 80 percent of the students are poor , Mr. Audette said .", "The tests require that at least 74 percent of students demonstrate proficiency in math , 68 percent in reading and 70 percent in writing .", "In the 2002-3 school year , O'Connell passed all categories except reading , getting a score of 44 percent .", "It also failed to meet the reading goal in 2003-4 , but reached it the next year .", "In 2005-6 , it scored 61 percent in reading .", "That was not high enough to meet the No Child Left Behind requirements , but federal officials put O'Connell in the `` safe harbor '' category , for schools that have significantly improved , and removed it from the `` in need of improvement '' list .", "To raise the reading scores , Mr. Audette said , he and his staff reviewed the pupils ' reading data for weak areas .", "The Mastery Tests require that pupils read passages and answer questions about what they have read .", "To prepare , the children were asked to answer a reading question each day until they showed proficiency in expressing their comprehension .", "Mr. Audette also hired additional reading support staff members and trained teaching assistants , assigning them to particular grades , where they placed the children into small groups and gave them a second instructional reading period each day .", "Mr. Audette signed on with the Teachers College Reading and Writing Project at Columbia University .", "The Bristol School District paid for consultants from Columbia to teach faculty members at O'Connell .", "The effort paid off , especially for the third graders .", "They scored 90 percent in writing proficiency in the 2005-6 Mastery Tests .", "`` If I was to pinpoint exactly what we did , I would say we really looked at our reading instruction , '' Mr. Audette said .", "`` It 's kind of common sense .", "If you want to be a good pianist , you practice the piano . ``", "EDUCATION ."], "summary": ["Article on Clara T O'Connell elementary school in Bristol , Conn , which spent three years on ` in need of improvement ' list under No Child Left Behind program and has managed to work itself off list .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Education", "New York and Region"]} -{"id": "1788841", "text": ["IT may have left no physical damage in its wake , but the recent communal storm in this oceanfront city over the future of its beaches has realigned the political and environmental landscape .", "Despite fears about the city 's vulnerability to a major hurricane , the five-member City Council , three Democrats and two Republicans , voted unanimously in May to reject a $ 98.5 million beach preservation project by the Army Corps of Engineers that was designed to protect Long Beach from ocean flooding .", "The plan would have placed a berm of dredged sand along the beach 10 feet high , with a 5-foot dune on top , from the western end of Long Beach to Point Lookout , more than six miles to the east .", "Point Lookout agreed to a separate plan after the Long Beach project was rejected .", "A major opponent of the corps ' plan was an environmental and surfer-advocacy group , the Surfrider Foundation , whose members said the project would create dangerous riptides and harm the look of the beach , with no guarantee that the city would be better protected , as the corps and the proponents of the plan claimed .", "The group held meetings to get its message to the public and the council alike , and produced testimony by a coastal engineer and several representatives from local communities whose beaches had undergone similar projects .", "All testified against the corps ' proposals for Long Beach .", "Jeff Kupferman , the chairman of Surfrider 's Long Beach Action Committee and a 45-year city resident , said that while rejection of the plan was a `` major victory '' for Surfrider , surfing was far from the only issue .", "`` We had concerns about swimming safety , as well as surfing , about fishing , kayaking , aesthetics -- any use of the beach , '' he said .", "James P . Hennessy , a Republican council member , agreed .", "`` It was never just about surfing , '' he said .", "`` The council does n't agree about much , but it did agree that the beach fill part of the project was wrong . ``", "What annoyed Mr. Kupferman was that Surfrider was portrayed negatively by those who favored the plan .", "`` Their attitude was we were , ' Yo , just a bunch of surfer dudes out to get a wave , ' '' he said .", "`` And they used that as the hook to try and discredit us .", "The fact that we prevailed has sent a lot of ripples out into this community . ``", "Alison Johnson , a Long Beach resident and vice chairwoman of Surfrider 's New York City chapter , which worked closely with the Central Long Island chapter in opposing the plan , said that the decision had ramifications beyond Long Beach .", "`` It will make the powers that be look at storm protection on the East Coast in a different way , '' she said , `` which is the biggest success you can ask from any project . ''", "Assemblyman Harvey Weisenberg , a lifelong Long Beach resident and a vocal supporter of the Corps of Engineers ' project , was less sanguine about the outcome .", "`` How did people get elected to office that are so ignorant .", "`` he said of the City Council .", "`` I just pray hard and hope to God we do n't get hit by anything . ``", "Even with the beach issue decided , the officials ' alliance with activists may continue .", "Mr. Hennessy and the other Republican council member , Thomas R . Sofield Jr . , have proposed an alternative storm-management plan , which includes working with advisory groups like Surfrider , and the city has asked independent coastal engineers for ways to address beach protection .", "Mr. Hennessy said he still had hopes of working with the Corps of Engineers should it agree to return to Long Beach , but he is adamant about his vote to reject the project .", "`` I can count on the fingers of one hand the number of people who came up to me and said we 'd made a mistake , `` he said .", "STORM PROTECTION ."], "summary": ["Article on controversy over rejection by City Council in Long Beach , NY , to beach preservation project by Army Corps of Engineers designed to protect beach from ocean flooding .", "Surfrider Foundation contended plan to build 15-foot-high berm and dune from Long Beach to Point Lookout would create dangerous riptides and harm look of beach and would not protect beach .", "Photos ."], "publication": "nyt50", "label": [1, 4], "tag": ["New York and Region"]} -{"id": "1788842", "text": ["IN the 50 years he has lived in Montclair , N.J. , Rob Bianco has seen his share of monsoon-like downpours , blizzards , ice storms , even the remnants of hurricanes .", "But Mr. Bianco , the superintendent of public works for Montclair , had never seen anything like the storm that ravaged the leafy landscape of his hometown on July 18 .", "`` We literally had trees and telephone poles , some as high as 20 to 30 feet in the air , that were sheared and cut off , '' he said .", "`` It was a treetop tornado , meaning it never hit the ground , but it still caused a great amount of destruction . ''", "The storm , which hit the northeast corner of Verona , N.J. , before blowing with a vengeance -- about a mile wide -- through a swath of Montclair for roughly a half-hour , destroyed about 200 trees on public property in Montclair , an Essex County township of about six square miles .", "The most heavily damaged areas , Mr. Bianco said , were Brookdale Park , which covers 121 acres in Montclair and Bloomfield , and the township 's Edgemont Park .", "`` We had some cars smashed and a lot of people running for cover , '' he said .", "`` It was a miracle that no one got hurt . ''", "But what about all of those damaged oak and pine trees , some of which Mr. Bianco said were 250 years old .", "`` Cleaning it all up was quite a daunting task , '' said Matthew A . Vastano , the executive vice president of Nature 's Choice Corporation , a yard-waste recycling company in Union , N.J. , hired by Montclair to clear the debris caused by the storm .", "`` Montclair is not your normal town where vegetative waste is concerned , '' Mr. Vastano said .", "`` The town , which has huge , huge trees , is very environmentally conscious , and it wants to keep all the trees it can . ''", "The trees it could not keep were hauled away by Nature 's Choice to a temporary storage site .", "Any piece of wood 16 to 18 inches long was put onto chipper trucks and into machines that turned it into chips .", "Anything larger was cut into logs .", "Some 25 truckloads of those logs were placed into 40-foot containers on trucks -- at a cost of $ 350 per container -- for eventual mulching .", "In the end , about 600 cubic yards of mulch and topsoil , or 300 tons , were produced -- enough to cover about 100,000 square feet , according to Mr. Vastano .", "Mr. Bianco said that Nature 's Choice would give Montclair some of the mulch or topsoil for free if the town needed it for a special project , but that the company was free to sell it to landscapers and other businesses .", "`` We are a business , not a charity , '' Mr. Vastano said .", "`` We 'll take most of that mulch and turn it into hardwood mulch or dye it either black or a shade of red before selling it . ``", "Dianne Marus , the director of Montclair 's Department of Finance , said that the cost of the storm cleanup came to $ 366,950 but that the price , tallied by the Department of Community Services , did not include overtime costs for the Police -LRB- $ 74,983 -RRB- and Fire Departments -LRB- $ 4,650 -RRB- .", "All told , Montclair has spent $ 446,583 on storm-related services , and the job is not yet finished .", "`` There are still a number of stumps to be removed and lots of re-planting to do , '' Mr. Bianco said .", "`` By the time all is said and done , this entire project is going to cost us more money and continue for at least another month . ''", "STORM CLEANUP ."], "summary": ["Article on work of Nature 's Choice Corp , yard-waste recycling company hired by Montclair , NJ , to clear debris caused by July 18 storm that destroyed about 200 trees on public property .", "Company turned trees into about 600 cubic yards of mulch and topsoil .", "Photo ."], "publication": "nyt50", "label": [9, 16], "tag": ["New York and Region"]} -{"id": "1788843", "text": ["DEEP into suburbia , on a sound barrier that runs along the Taconic State Parkway here , a graffiti artist announces his presence with a single word painted in yellow and black : `` Me . ''", "Officials said that graffiti had reached new heights this summer .", "And in a town that bills itself as a retreat from more urban locales , politicians and police officers are taking the problem seriously .", "`` Whether you grew up here all your life , or whether you moved here from the Bronx or Yonkers or Long Island , you do n't want to see that , `` said Linda G . Cooper , the town supervisor .", "`` And so we 're trying to take a very firm position . ``", "In June , the Yorktown police began graffiti patrols as a deterrent .", "They also began photographing graffiti they found to create a catalog of the work of local vandals for use in investigations , Lt . Donald Schuck said .", "Since July , Lieutenant Schuck said , the police have arrested nine boys on graffiti-related charges .", "The most recent came on Aug . 28 , with the arrest of a 14-year-old from Mohegan Lake , a hamlet of Yorktown .", "The police said he had sprayed a wall at a town-owned sports club , causing about $ 400 in damage .", "The boy , charged with making graffiti and possession of graffiti instruments , both misdemeanors , was released to his mother and was scheduled to appear on Friday in Westchester County Family Court in White Plains .", "The town , which has seen stop signs , park buildings , businesses and even a police radar unit defaced this summer , is also considering new legislation .", "One proposed law would require vandals to pay restitution .", "Another would mandate that residents who discover graffiti on their property clean it within 72 hours .", "Ms. Cooper said that rapid removal discouraged vandals .", "Town officials and youth advocates said there were a number of reasons for the surge in graffiti .", "Lieutenant Schuck said increased access to the tools of graffiti had played a role .", "Young people , previously stymied by a county law forbidding the sale of spray paint to anyone under 18 , have begun ordering the paint over the Internet , he said .", "Joan Valenstein , chairwoman of the advisory board for the Yorktown Teen Center , a branch of the Boys and Girls Club of Northern Westchester , said the increase might be the byproduct of boredom in a town that she said did not provide enough youth activities .", "Ms. Cooper said some of the graffiti included gang insignia previously seen in the southern , more urban part of the county .", "Whatever the source of the graffiti , the town seems determined to stamp it out .", "But out on the Taconic State Parkway , high above the cars rushing by , defiance is etched in yellow and black .", "VANDALISM ."], "summary": ["Officials in Yorktown , NY , say graffiti has reached new heights this summer .", "Police have begun graffiti patrols as deterrent and have arrested nine boys on graffiti-related charges since July .", "Photo ."], "publication": "nyt50", "label": [7, 1, 5], "tag": ["New York and Region"]} -{"id": "1788845", "text": ["Little by little , the cafe inside Atticus Bookstore in New Haven has swallowed more and more of the space .", "It began with a couple of tables , a counter and display cases in 1981 , when the cafe opened .", "There are now 20 tables occupying about a third of the store , and according to Caleb Fraser , an assistant manager , plans are slowly evolving for even more cafe space .", "The cafe has proved to be wildly popular for its hearty soups , generous sandwiches and salads , coffees and other beverages , as well as baked goods , including cookies , tarts , muffins and scones .", "A popular order is soup and half a sandwich or salad for $ 8.20.", "Half a sandwich and half a salad at the same price is also a favorite combination , with eight sandwiches or panini and seven salads to choose from .", "I 'm partial to the lemon chicken salad and grilled tomato panino with pesto , but have n't been disappointed with any of my choices .", "Black bean soup is a menu regular , and there 's always a meat soup like chicken Florentine with spinach and a vegetarian soup like lentil -LRB- or try the cold avocado-yogurt pur\u00e9e if it 's on the day 's menu -RRB- .", "The cafe is open daily from 8 a.m. to 10 p.m. and is especially crowded at lunchtime , between 11 a.m. and 2 p.m.", "Throughout the day , from breakfast on , students , professors and others congregate for coffee , espresso , cappuccino and a pick-me-up nibble .", "Matineegoers to the Yale Repertory Theater down the street often drop by for a quick lunch or post-performance snack .", "But no one needs an excuse -- the sweets are incredibly delicious , whether raspberry or blackberry Danish , sticky buns , plain or whole wheat croissants , lemon tarts , bread pudding , chocolate cheesecake brownies or almond bars .", "All baked goods , like the soups and everything else at the cafe , are made at the local Chabaso Bakery , the bookstore 's parent company .", "Atticus Bookstore / Caf\u00e9 , 1082 Chapel Street , New Haven .", "-LRB- 203 -RRB- 776-4040 .", "PATRICIA BROOKS ."], "summary": ["Patricia Brooks reviews cafe inside Atticus Bookstore in New Haven , Conn , photo ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788846", "text": ["HE sits in his wheelchair as the family rushes around him .", "He can not move much , or say more than hello .", "He can not participate in the summer activities that everyone pursues with great vigor day after day .", "If it 's warm enough , he goes out onto the deck and snoozes in the sun with the dogs .", "Unlike them , however , he does n't jump up and make excited noises when people come .", "At most , he slowly turns his head and smiles .", "Everyone speaks to him politely , but not for long .", "What 's the point .", "He ca n't say more than a couple of words , and it 's hard to tell how much he understands .", "He is my stepfather , Peter , an 88-year-old man who in the last decade has been transformed from a lively and dynamic person into not much more than a body occupying space .", "He has post-polio syndrome , a condition that seeps the strength from his upper body as steadily as it weakened his legs when he was a teenager .", "A couple of strokes have further debilitated him .", "As my son , Asher , said to my mother one day , it 's as if he 's hardly a person anymore .", "And yet this is n't how Asher , 14 , behaves toward him .", "He constantly monitors Peter 's feet to see if they 've slipped off the footrests of his wheelchair .", "He always asks if Peter wants something to drink .", "His recognition of the full extent of what it means to be a person goes beyond his frustration at Peter 's limitations .", "Asher is concerned with Peter 's comfort , his feeling of inclusion .", "Peter 's situation brings out Asher 's own humanity .", "Peter is certainly a person to my mother , Addy , though she has no illusions about his abilities .", "He is her third husband , the one who was finally her friend .", "She does what she can to make him comfortable and to replicate his old habits .", "Since his only real pleasure is food , she makes him good meals for lunch and dinner .", "At night they listen to Amy Goodman on NPR and then watch Chris Matthews and a couple of episodes of `` Seinfeld '' or `` Curb Your Enthusiasm . ''", "On Tuesdays , Peter 's longtime men 's lunch group comes over to eat with him and discuss books and politics .", "Peter does n't participate , but he enjoys the routine .", "Last summer he could still join them at the local restaurant .", "He would motor up the street in his Jazzy wheelchair with an orange pennant waving above his head to warn cars away .", "He is far from being able to do anything like that now .", "Peter needs to be cared for at the most basic custodial level .", "When my friend Anne visited , her 9-year-old son , Nick , was interested in what this entailed .", "Over the course of a five-day stay , Nick asked many questions of Stacey , the woman who comes in to get Peter out of bed in the morning -- the very practical questions that most adults prefer not to think about .", "Several times Stacey saw Nick looking in the window when it was changing time .", "He was n't fazed by what he saw .", "He accepted Peter 's condition and presence in the house as natural .", "He was right about that .", "My mother and Peter live on the lip of a harbor in Maine .", "All summer , family members passed through , usually for a week or so .", "I stayed the longest -- six weeks .", "On some days there were enough people staying to fulfill my old fantasy of a big house full of people , bursting with robust togetherness .", "This was a new phenomenon here .", "For many years we were only welcome to stay for a short time .", "The stepparents had limited tolerance for each other 's children , especially the noisy grandchildren .", "I often rented nearby to make visits to my mother less stressful .", "Other sons and daughters did the same .", "We rarely overlapped or had the sense of a beloved summer house , full of traditions passed down through generations .", "We each had a private relationship with Maine , and with Peter and my mother .", "But an unexpected side effect of Peter 's deterioration has been a lessening of the feeling that anyone beyond my mother and stepfather creates a crowd .", "Now Peter seems to enjoy the bustle that my mother used to believe was an imposition on him .", "He is no longer an aging intellectual who requires quiet for reading and writing .", "The grandchildren are older , and he is younger , babylike .", "After breakfast , he sleeps for a couple of hours in the kitchen , no matter the amount of dish washing or screen-door banging .", "So family life swirled around him this summer .", "We spent the kind of easy time together that I like best , quantity rather than quality .", "Just hanging out .", "Siblings , nieces and nephews trooped through with significant others in tow .", "They each had a relationship with Peter while they were there .", "Some spent time talking to him even if he could n't reply .", "Others made sure he was comfortable at the table during meals .", "Though it was easy to forget he was in the room , everyone was delighted when he broke into a conversation with a responsive remark .", "The old Peter ! It was good to see him again , if only for a moment .", "Starting the last week of July , my mother began to say fall was in the air .", "I bridled against this , though I knew what she meant .", "I felt it too , a change in the light from white to yellow , a softening of the wind , a resignation of the leaves on certain trees .", "But I did n't want to skip ahead , so I pretended not to notice .", "It 's summer , I insisted .", "This is what summer is like in Maine .", "It is tempting to make this whisper of fall a metaphor for Peter 's diminishing presence .", "September brings up memories of how the end of summer felt during childhood , a loss .", "Yet I find myself resisting the comparison .", "Peter is alive , and summer does n't officially end for 10 more days .", "I 'm still wearing white .", "GENERATIONS ."], "summary": ["Alice Elliott Dark Generations essay on summer spent in Maine with her family and her stepfather , Peter , 88 , who is debilitated with post-polio syndrome and effects of strokes .", "Drawing ."], "publication": "nyt50", "label": [9, 66, 11], "tag": ["New York and Region"]} -{"id": "1788848", "text": ["FIVE years ago my brother and I spoke regularly .", "Five years ago we drank together , teased each other without mercy and , occasionally , even expressed feelings of affection .", "After 9/11 we did not .", "My brother is a 20-plus-year veteran of the New York City Fire Department , a former marine with a degree in history whose politics are conservative .", "He is four years older and quite a bit larger than me .", "I am a 20-plus-year veteran of big-agency advertising , a creative director turned fiction writer whose politics , not surprisingly , do not lean conservatively .", "Until five years ago , this was not such a big problem .", "There were plenty of other things to talk about , and we knew that despite our differences , there was still much to appreciate about each other .", "On Sept . 11 , 2001 , I was in Boca Raton , Fla . , on business accompanied by my wife , my 3-year-old daughter and my mother-in-law -LRB- yes , my mother-in-law came on business trips with us back then -RRB- at , of all things , a sales convention for a yogurt company .", "At 9 a.m. , we were granted a 10-minute respite from the executive Vince Lombardi-isms and the Roman Colosseum-inspired motivational d\u00e9cor .", "The first thing I did was check the messages on my personal communications device du jour , because in 2001 , I was convinced that the more I checked the quotidian drivel it contained the more it seemed that my ad agency , yogurt convention , frequent-flier-miles-financed-family-business-trip life mattered .", "But this time what the messages told me were hardly drivel .", "I immediately called New York to check on my brother who was not supposed to be working , but with firefighters you never know .", "He was n't working .", "He 'd soon go to the site , but luckily he would n't be one of the 343 firefighters killed .", "For the next hour , I 'm fairly sure that he watched what I watched , that he looked away when I looked away , and I am fairly sure that at 9:59 a.m. , when the south tower of the World Trade Center collapsed , he felt exactly what I felt .", "What we all felt .", "I am also sure that those were the last pure , nonpoliticized thoughts any of us would have about that day , and the last time that my brother and I would feel the same way about anything for some time .", "Renting a car and driving home was our only option .", "At first I wanted to push through , to rush the family home .", "`` To what .", "`` my wife asked .", "`` To have our daughter watch her parents sit paralyzed in front of a television set .", "`` So we took our time , taking a scenic , non-traditional route back to New York .", "I got my information from fellow travelers and National Public Radio .", "I found comfort in the measured voices of `` All Things Considered , '' solace in `` I love New York '' signs in Georgia , inspiration in the words on Jefferson 's tombstone at Monticello , near Charlottesville , Va . , which noted that he was also one of the fathers of religious tolerance in this country .", "Meanwhile , my brother was getting his information from rescue workers and fellow firefighters .", "Because of his military background , his job in the years before the attack had included training recruits at the Fire Academy at Randalls Island .", "His job in the months ahead would be to coordinate funerals .", "Dozens of funerals .", "For friends and friends of friends , each with a story more tragic than the last .", "Late at night on Sept . 14 , my family slept as I drove through Pennsylvania .", "With no NPR to be had for the time being , I listened to sports guys weighing in on the Northern Alliance , D.J. ` s explaining the tenuous Pakistani-Afghani relationship .", "With each passing mile , more and more proselytizing and hate seeped into the views of the syndicated giants .", "Driving near Port Jervis , N.Y. , a state trooper pulled alongside our car and shined a spotlight inside while the rest of my family was sleeping .", "Four strangers in a red rental car with Florida plates .", "Suspects .", "To think that 9/11 drove a stake between my brother and me is as na\u00efve as thinking that it drove one through the country .", "Red and blue staters had been at each other 's throats for a while , and my brother and I had clashed on and off over lots of things for years .", "But this took it farther .", "He had been affected by it in ways I could not imagine .", "Of the 343 firefighters killed , he knew dozens .", "No one that I knew had died .", "Within a week , I would go back to work .", "For more than a year , he would go to funerals and I imagine that in addition to grief , a man with my brother 's courage and sense of duty must also have been dealing with a serious case of survivor 's guilt .", "But did that make his opinions -- which had become increasingly angry and pronounced -- right .", "Over the last five years we 've disagreed about everything from the 2000 and 2004 elections to the war in Iraq , radical Islam and of course , the liberal news media .", "For a while we tiptoed around politics but when we were together everything seemed political .", "For a while we did n't speak at all .", "But lately we 've been talking .", "I care too strongly for him to let politics destroy our relationship and I think he feels the same .", "The other day I called him .", "He had just gotten home from the hospital where a fellow firefighter , Lt . Howard Carpluk Jr . lay in critical condition from injuries suffered when the floor had given way in a burning Bronx building .", "Another firefighter , 25-year-old Michael Reilly , who had served in the Marines in Iraq , had already died .", "My brother told me he was there near Mayor Michael Bloomberg as the doctors gave them an update .", "-LRB- Lieutenant Carpluk died the following day . -RRB-", "My brother sounded tired .", "After some time , while discussing Labor Day plans , I told him that I 'd been invited to discuss my book on a conservative talk show in Boston and joked that I feared an ambush .", "He told me to tell them that my brother was a New York City firefighter , and maybe they 'd go easy on me .", "Op-Ed Contributor James P . Othmer is the author of the novel `` The Futurist . '' ."], "summary": ["Op-Ed article by James Othmer describes ways in which his relationship with his brother , veteran New York City firefighter who worked at World Trade Center site , changed after 9/11 ."], "publication": "nyt50", "label": [3, 59], "tag": ["New York and Region", "Opinion"]} -{"id": "1788850", "text": ["C . Lee Hanson , of Easton , rarely plans ahead to visit Connecticut 's memorial to the victims of Sept . 11 .", "But at odd moments he finds himself going to the site , on Long Island Sound at Sherwood Island State Park in Westport .", "There , he sees once again the three names carved in stone : those of his son , Peter .", "His son 's wife , Sue .", "And their 2-year-old daughter , Christine .", "They were passengers on United Flight 175 and died when their airplane hit the World Trade Center five years ago tomorrow .", "Paula Clifford Scott of Mystic goes there , too , mostly during annual memorial services .", "There are two familiar names etched in stone , and on her heart : those of her daughter , Ruth McCourt , and 4-year-old granddaughter , Juliana .", "They were on the same flight as Mr. Hanson 's family , and were among the 152 from Connecticut who died that day .", "States with the largest losses from the attacks have had difficulty memorializing those who died .", "New York only recently awarded the first big construction contract for the footings of a memorial at the World Trade Center site .", "New Jersey 's state memorial , under construction at Liberty State Park in Jersey City , has run into opposition because it obscures views of Lower Manhattan from the park .", "But Connecticut , at former Gov . John Rowland 's urging , moved swiftly to create a state tribute to the victims of Sept . 11 .", "Within seven months a site overlooking the Sound had been chosen .", "At the first anniversary of the attacks , the memorial was taking shape , with nearly every element donated -- including the design , done with the participation of victims ' families .", "Best of all is the location .", "In a place where sea meets sky , the memorial offers the promise of tranquillity , even in the face of loss .", "`` I like simple things , '' Mr. Hanson said , and it pleases him that the design of the memorial is not complicated .", "The names of the victims are carved in granite bricks , scattered randomly , to symbolize the senselessness of the act that took their lives .", "Beach roses bloom nearby , and benches invite contemplation .", "On a clear day , the Manhattan skyline is visible .", "It is from here that horrified Connecticut residents watched the billowing smoke from New York City after the attacks , and mobilized to help .", "Asked what the state memorial meant to her , Ms. Scott said : `` Remembrance -- and life .", "Life stopped for a little while that day .", "It was up to us to begin again . ``", "Connecticut 's lovely memorial to Sept . 11 keeps alive memories of the victims , comforts the families and offers solace to visitors .", "It stands also as a monument in proud contradiction to those who say government can not do anything right .", "Connecticut ."], "summary": ["Editorial holds Connecticut 's memorial to September 11 , at Sherwood Island State Park in Westport , keeps alive memories of victims , comnforts families , offers solace to visitors and stands as monument that contradicts those who say government can not do anything right ."], "publication": "nyt50", "label": [25, 26, 1], "tag": ["New York and Region", "Opinion"]} -{"id": "1788855", "text": ["WHEN affluent suburbanites travel , they can be quite fussy , demanding that airplanes show up and take off like clockwork and that anything less is -- to use a word the fastidious favor -- unacceptable .", "But when they 're lounging in their backyards , they want the skies to be jet-free , or at least noise-free .", "If jets must be seen , they should not be heard .", "The problem in hubs like New York is that the two desires are fast becoming irreconcilable .", "American airlines handled 739 million passengers last year .", "By 2015 that number is expected to balloon to 1 billion .", "No one wants an airport sprouting next door , with all the attendant whines , drones and roars , so the Federal Aviation Administration has been trying to squeeze in more flights by rejiggering the way planes use air space in the corridor between Philadelphia and Connecticut .", "But the redesign has set off a fury among people living around Westchester County Airport , which includes thousands of Connecticut residents .", "It turns out that flight patterns around the airport may have to be tinkered with because of the spillover from changes in patterns at La Guardia Airport , meaning more whines and roars for those living below .", "Until recently , the issue of rerouting seldom flashed on the public 's radar , mostly because the F.A.A. had not always been forthcoming .", "In February , Janet Lockton , president of the Greenwich-based Air Conservation Trust , went to an F.A.A. slide show in Stamford and came away reassured that Westchester 's airport would be untouched , and that La Guardia flights would spend more time over the uncomplaining waters of Long Island Sound .", "But then she ordered three CD 's containing 1,226 pages of the preliminary environmental statement and discovered in the nearly impenetrable appendixes that planes taking off from the county airport would indeed loop over Westchester and Connecticut for longer periods .", "She alerted officials across the state border , and in June , County Executive Andrew J . Spano issued a letter to the F.A.A. , based on an environmental consultant 's study .", "Mr. Spano 's letter rebuffed the F.A.A. ` s preferred proposal for flight patterns because the plan would have '' a significant impact `` on places like Rye Brook , Pleasantville and Briarcliff .", "Some aircraft , Mr. Spano bristled , would `` incredibly '' be rerouted over the Indian Point nuclear plant .", "He did n't mention Connecticut , but the new flight paths would raise decibel levels in Greenwich , Stamford and New Canaan -- though how unhinging the noise would be has been disputed , with the F.A.A. minimizing any spikes in volume and the consultant , Harris Miller Miller & Hanson Inc . , saying the changes would probably be `` highly noticeable . ''", "The county airport already has limits on noise enshrined in county law .", "In any half-hour-period , no more than four commercial planes can land or take off at the airport , and those planes together can carry no more than 240 passengers .", "Flights are practically banned between 11 p.m. and 6 a.m.", "The airport has intentionally been kept a backwater , with such anomalies as propeller-plane flights to Toronto .", "`` There are people who persist in talking about how Westchester could , should or would expand , '' said Robert Funicello , director for environmental projects at Westchester 's transportation department .", "`` It is n't going to happen . ``", "He and Ms. Lockton argue that planners need to think about alternatives like high-speed trains .", "The Port Authority thinks the area needs a fourth full-scale airport to go with Kennedy , Newark and La Guardia , probably at Stewart Airport outside of Newburgh , N.Y.", "Other regional planners think a day of reckoning can be put off if everybody sacrifices , including Westchester Airport and its neighbors .", "It 's hard to argue that officials should not do all they can to block noise in the suburbs , where people pay a premium for tranquillity .", "But advocates for quiet skies may have to start pondering a day not too far distant when the major airports ca n't handle any more planes , and travelers ca n't get flights they need .", "Changes will have to be made that would raise the number of flights and ratchet up the noise .", "We live in an era , after all , when flying on a plane has become almost as routine as riding a subway .", "Many of the people who can afford spreads in Westchester and Connecticut earn much of their living traveling for business , which could slow down if air travel gridlocks .", "`` We 're going to run out of room at airports or room in the skies unless people get the big picture view , `` said Kevin Mitchell , chairman of the Business Travel Coalition , a group that represents corporate buyers of travel services .", "`` The upshot is much higher business fares and even higher leisure fares . ''", "In the past half century , airplane travel has changed from an indulgence of the well-to-do to a convenience that most people can enjoy .", "If airports stop growing , travel could become far less democratic , and just visiting an aunt in Missouri may become prohibitive .", "That 's a trade-off well worth thinking about by those carrying on the worthy fight against noise .", "E-mail : joeberg@nytimes.com ."], "summary": ["Joseph Berger column on dispute between Federal Aviation Administration and tens of thousands of Westchester County and Connecticut residents over FAA plan to alter flight patterns at Westchester County Airport , which has potential to increase noise .", "Photo ."], "publication": "nyt50", "label": [7, 27], "tag": ["New York and Region"]} -{"id": "1788856", "text": ["Fans of the lyrical qualities of 19th-century paintings -- purity , serenity , sentiment -- will find a visit to the Bush-Holley Historic Site in Cos Cob immensely appealing .", "There , the latest installation in a series of exhibitions devoted to artists who lived and worked in the Greenwich area presents the work of John Henry Twachtman -LRB- 1853-1902 -RRB- , one of America 's most genteel and popular Impressionist painters .", "Twachtman is a particularly appropriate choice for this museum .", "He was a cornerstone of a lively late-19th-century art colony centered on the Bush-Holley House -- where he lived from time to time as a guest and painted and taught other artists .", "He also bought a 17-acre property in Greenwich , where he did his finest work .", "The current exhibition is a selection of paintings produced on his Greenwich property , coupled with a handful of works done on his travels to Massachusetts , California and Europe .", "The exhibition is subtitled a painter 's painter , for Twachtman was admired more by other painters than by collectors for much of his career .", "Several of the early paintings here were owned by his peers , including a lovely Venetian cityscape that once belonged to J . Alden Weir , a fellow Impressionist .", "Twachtman devoted himself to teaching , which also helped to buoy his reputation among other artists .", "The show opens with a picture of the artist 's home in Greenwich , showing a small farmhouse surrounded by acres of largely uncultivated land .", "Twachtman set the farmhouse high up in the canvass , at the crest of a hill and shaded by a light clump of autumnal trees .", "It works on all levels , being neither ponderous like his earlier tonalist pastiches nor as formulaic as some later Impressionist paintings .", "Twachtman also liked to paint the back facade of his house , viewed close up from above and below or framed through the tangled greenery of his overgrown garden .", "`` Artist 's Home Seen From the Back `` -LRB- circa 1894 -RRB- is perhaps the most accomplished of these pieces , the contrasts of color and light in the garden areas augmented with some pleasantly pedantic detailing .", "Though it shows the artist 's obvious painterly gifts , the most interesting works are views from the Bush-Holley House , looking out over the quiet bay below .", "A great deal has changed in the century since Twachtman painted here , not the least of which is the development of the shoreline .", "Gone is much of the natural beauty and tranquillity of the setting , which is what drew Twachtman and other artists to the Bush-Holley house .", "Two of the most curious paintings are also the least successful .", "They are scenes of Yellowstone Park , where Twachtman traveled to paint on commission for a patron .", "One of the works depicts a gushing waterfall , the other a ravine , but neither is entirely convincing .", "Both are painted from an implausible point of view , that of someone hovering midair or standing in the middle of a river .", "Nor do the tones of pink , lavender-gray , powder blue and green seem quite right .", "Twachtman was painting in Yellowstone in 1895 , by which time he was an accomplished and much admired artist .", "So why were his Yellowstone paintings so off the mark .", "Perhaps he had become accustomed to painting at home , focusing on the lush greenery , brooks and barns of Connecticut , and was just unsure of himself and his relationship with the new subject matter .", "Whatever the explanation for these aberrant images , they pale in comparison to his Greenwich and Cos Cob paintings , of which there are several excellent examples here .", "`` Autumn Mists '' -LRB- circa 1890 -RRB- , which depicts the pond on his property , clearly shows Twachtman 's skill at painting water .", "Passages of blue and green paint are thickened in parts with white to give the surface some texture , but they also capture subtle gradations of light .", "It is thoroughly cinematic .", "`` John Twachtman : A Painter 's Painter , `` William Hegarty Gallery , Bush-Holley Historic Site , 39 Strickland Road , Cos Cob , through Oct . 29 .", "Information : -LRB- 203 -RRB- 869-6899 or www.hstg.org.", "ART REVIEW ."], "summary": ["Benjamin Genocchio reviews works by John Twachtman at Bush-Holley Historic Site in Cos Cob , Conn .", "Photos ."], "publication": "nyt50", "label": [29], "tag": ["New York and Region"]} -{"id": "1788857", "text": ["Alma Winemiller is the old maid 's old maid .", "And Amanda Plummer is all over her like a swarm of drugged Southern bees in Hartford Stage 's heartfelt and heart-wrenching new production of `` Summer and Smoke . ''", "Tennessee Williams often said that Miss Alma , as almost everyone calls her , was his finest female character and the one most like him .", "She is a Mississippi minister 's daughter , still relatively young in the years just before World War I but already spinsterish , admiring of the `` everlasting struggle '' between good and evil represented by Gothic cathedrals , and eventually dependent on and grateful for `` little mercies , '' like her prescription for sleeping tablets .", "Ever since her mother turned into a mental infant , Alma has been responsible for the running of the household .", "She is dutiful but angry -LRB- `` You act like a child , but you have the Devil in you , '' she tells her mother -RRB- and occasionally vengeful .", "There is not an uncomplicated or stereotypical bone in Alma 's body , and Ms. Plummer adds complexities and vulnerabilities of her own .", "Alma 's problem is that she has been in love since childhood with the golden boy next door , John Buchanan Jr . , played with easy Redfordish charm by Marc Kudisch .", "That 's Doctor Buchanan .", "But John 's problem is that he is n't much interested in taking over his respected father 's medical practice .", "He prefers to spend his time drinking and carousing with fast women like Rosa Gonzalez -LRB- Stephanie Beatriz -RRB- , whose father owns the local casino .", "Williams almost always throws an exotic `` foreign '' character or two into his company of mostly genteel white Southerners .", "In this case he does n't have anything nice to say about Latinos .", "Rosa exists only as a giggly sexual object , and Papa Gonzales -LRB- Mateo G\u00f3mez -RRB- is depicted as loud , drunken , greedy and violent .", "The only two characters who really matter , however , are Alma and John .", "The obvious question is why hunky John bothers with pitiful Alma at all , so real chemistry between the two characters is crucial .", "This production , smoothly directed by Michael Wilson , delivers that in no uncertain terms .", "Mr. Kudisch 's John is sincerely fascinated by Alma , if only because of her enlightened ideals .", "Ms. Plummer 's Alma is absolutely breathless when John unbuttons her blouse to place the stethoscope on her chest .", "John 's sexual interest in her is real , too .", "At this point in his life , he wants to sleep with every woman who crosses his path .", "If `` Summer and Smoke '' is one of Williams 's lesser-known works , there are unfortunate reasons .", "Although the critic Brooks Atkinson praised it to high heaven when it opened on Broadway in 1948 , it was a flop , running only three months or so .", "Maybe that was a result of perceived overkill : Williams 's Pulitzer Prize-winning drama `` A Streetcar Named Desire '' had opened less than a year before and was playing two blocks away .", "It is also possible that Margaret Phillips was not the right Miss Alma .", "The 1952 Off Broadway revival , at Circle in the Square , with the remarkable Geraldine Page in the role , received a warmer welcome .", "Ms. Page recreated her role for the imperfect 1961 film version , opposite Laurence Harvey as John .", "-LRB- Ms. Page and Ms. Plummer starred in the 1982 Broadway production of `` Agnes of God , '' for which Ms. Plummer won a Tony Award . -RRB-", "The brilliance of `` Summer and Smoke '' lies in its final scenes -- Alma and John 's last encounter -LRB- in which his line `` It has only been three or four times '' is an almost unbearably painful realization -RRB- , followed by Alma 's unexpected conversation with a traveling salesman .", "If this were John Buchanan 's story , it becomes clear , Alma would play a very minor role , if she appeared in it at all .", "`` Summer and Smoke '' is at Hartford Stage , 50 Church Street , through Oct . 1 .", "Information : www.hartfordstage.org or -LRB- 860 -RRB- 527-5151 .", "THEATER REVIEW ."], "summary": ["Anita Gates reviews Hartford Stage production of Tennessee Williams 's play Summer and Smoke , directed by Michael Wilson .", "Amanda Plummer and Marc Kudisch star .", "Photo ."], "publication": "nyt50", "label": [1, 16], "tag": ["Theater", "New York and Region"]} -{"id": "1788859", "text": ["IT may have left no physical damage in its wake , but the recent communal storm in this oceanfront city over the future of its beaches has realigned the political and environmental landscape .", "Despite fears about the city 's vulnerability to a major hurricane , the five-member City Council , three Democrats and two Republicans , voted unanimously in May to reject a $ 98.5 million beach preservation project by the Army Corps of Engineers that was designed to protect Long Beach from ocean flooding .", "The plan would have placed a berm of dredged sand along the beach 10 feet high , with a 5-foot dune on top , from the western end of Long Beach to Point Lookout , more than six miles to the east .", "Point Lookout agreed to a separate plan after the Long Beach project was rejected .", "A major opponent of the corps ' plan was an environmental and surfer-advocacy group , the Surfrider Foundation , whose members said the project would create dangerous riptides and harm the look of the beach , with no guarantee that the city would be better protected , as the corps and the proponents of the plan claimed .", "The group held meetings to get its message to the public and the council alike , and produced testimony by a coastal engineer and several representatives from local communities whose beaches had undergone similar projects .", "All testified against the corps ' proposals for Long Beach .", "Jeff Kupferman , the chairman of Surfrider 's Long Beach Action Committee and a 45-year city resident , said that while rejection of the plan was a `` major victory '' for Surfrider , surfing was far from the only issue .", "`` We had concerns about swimming safety , as well as surfing , about fishing , kayaking , aesthetics -- any use of the beach , '' he said .", "James P . Hennessy , a Republican council member , agreed .", "`` It was never just about surfing , '' he said .", "`` The council does n't agree about much , but it did agree that the beach fill part of the project was wrong . ``", "What annoyed Mr. Kupferman was that Surfrider was portrayed negatively by those who favored the plan .", "`` Their attitude was we were , ' Yo , just a bunch of surfer dudes out to get a wave , ' '' he said .", "`` And they used that as the hook to try and discredit us .", "The fact that we prevailed has sent a lot of ripples out into this community . ``", "Alison Johnson , a Long Beach resident and vice chairwoman of Surfrider 's New York City chapter , which worked closely with the Central Long Island chapter in opposing the plan , said that the decision had ramifications beyond Long Beach .", "`` It will make the powers that be look at storm protection on the East Coast in a different way , '' she said , `` which is the biggest success you can ask from any project . ''", "Assemblyman Harvey Weisenberg , a lifelong Long Beach resident and a vocal supporter of the Corps of Engineers ' project , was less sanguine about the outcome .", "`` How did people get elected to office that are so ignorant .", "`` he said of the City Council .", "`` I just pray hard and hope to God we do n't get hit by anything . ``", "Even with the beach issue decided , the officials ' alliance with activists may continue .", "Mr. Hennessy and the other Republican council member , Thomas R . Sofield Jr . , have proposed an alternative storm-management plan , which includes working with advisory groups like Surfrider , and the city has asked independent coastal engineers for ways to address beach protection .", "Mr. Hennessy said he still had hopes of working with the Corps of Engineers should it agree to return to Long Beach , but he is adamant about his vote to reject the project .", "`` I can count on the fingers of one hand the number of people who came up to me and said we 'd made a mistake , `` he said .", "STORM PROTECTION ."], "summary": ["Article on controversy over rejection by City Council in Long Beach , NY , to beach preservation project by Army Corps of Engineers designed to protect beach from ocean flooding .", "Surfrider Foundation contended plan to build 15-foot-high berm and dune from Long Beach to Point Lookout would create dangerous riptides and harm look of beach and would not protect beach .", "Photos ."], "publication": "nyt50", "label": [1, 4], "tag": ["New York and Region"]} -{"id": "1788860", "text": ["HE sits in his wheelchair as the family rushes around him .", "He can not move much , or say more than hello .", "He can not participate in the summer activities that everyone pursues with great vigor day after day .", "If it 's warm enough , he goes out onto the deck and snoozes in the sun with the dogs .", "Unlike them , however , he does n't jump up and make excited noises when people come .", "At most , he slowly turns his head and smiles .", "Everyone speaks to him politely , but not for long .", "What 's the point .", "He ca n't say more than a couple of words , and it 's hard to tell how much he understands .", "He is my stepfather , Peter , an 88-year-old man who in the last decade has been transformed from a lively and dynamic person into not much more than a body occupying space .", "He has post-polio syndrome , a condition that seeps the strength from his upper body as steadily as it weakened his legs when he was a teenager .", "A couple of strokes have further debilitated him .", "As my son , Asher , said to my mother one day , it 's as if he 's hardly a person anymore .", "And yet this is n't how Asher , 14 , behaves toward him .", "He constantly monitors Peter 's feet to see if they 've slipped off the footrests of his wheelchair .", "He always asks if Peter wants something to drink .", "His recognition of the full extent of what it means to be a person goes beyond his frustration at Peter 's limitations .", "Asher is concerned with Peter 's comfort , his feeling of inclusion .", "Peter 's situation brings out Asher 's own humanity .", "Peter is certainly a person to my mother , Addy , though she has no illusions about his abilities .", "He is her third husband , the one who was finally her friend .", "She does what she can to make him comfortable and to replicate his old habits .", "Since his only real pleasure is food , she makes him good meals for lunch and dinner .", "At night they listen to Amy Goodman on NPR and then watch Chris Matthews and a couple of episodes of `` Seinfeld '' or `` Curb Your Enthusiasm . ''", "On Tuesdays , Peter 's longtime men 's lunch group comes over to eat with him and discuss books and politics .", "Peter does n't participate , but he enjoys the routine .", "Last summer he could still join them at the local restaurant .", "He would motor up the street in his Jazzy wheelchair with an orange pennant waving above his head to warn cars away .", "He is far from being able to do anything like that now .", "Peter needs to be cared for at the most basic custodial level .", "When my friend Anne visited , her 9-year-old son , Nick , was interested in what this entailed .", "Over the course of a five-day stay , Nick asked many questions of Stacey , the woman who comes in to get Peter out of bed in the morning -- the very practical questions that most adults prefer not to think about .", "Several times Stacey saw Nick looking in the window when it was changing time .", "He was n't fazed by what he saw .", "He accepted Peter 's condition and presence in the house as natural .", "He was right about that .", "My mother and Peter live on the lip of a harbor in Maine .", "All summer , family members passed through , usually for a week or so .", "I stayed the longest -- six weeks .", "On some days there were enough people staying to fulfill my old fantasy of a big house full of people , bursting with robust togetherness .", "This was a new phenomenon here .", "For many years we were only welcome to stay for a short time .", "The stepparents had limited tolerance for each other 's children , especially the noisy grandchildren .", "I often rented nearby to make visits to my mother less stressful .", "Other sons and daughters did the same .", "We rarely overlapped or had the sense of a beloved summer house , full of traditions passed down through generations .", "We each had a private relationship with Maine , and with Peter and my mother .", "But an unexpected side effect of Peter 's deterioration has been a lessening of the feeling that anyone beyond my mother and stepfather creates a crowd .", "Now Peter seems to enjoy the bustle that my mother used to believe was an imposition on him .", "He is no longer an aging intellectual who requires quiet for reading and writing .", "The grandchildren are older , and he is younger , babylike .", "After breakfast , he sleeps for a couple of hours in the kitchen , no matter the amount of dish washing or screen-door banging .", "So family life swirled around him this summer .", "We spent the kind of easy time together that I like best , quantity rather than quality .", "Just hanging out .", "Siblings , nieces and nephews trooped through with significant others in tow .", "They each had a relationship with Peter while they were there .", "Some spent time talking to him even if he could n't reply .", "Others made sure he was comfortable at the table during meals .", "Though it was easy to forget he was in the room , everyone was delighted when he broke into a conversation with a responsive remark .", "The old Peter ! It was good to see him again , if only for a moment .", "Starting the last week of July , my mother began to say fall was in the air .", "I bridled against this , though I knew what she meant .", "I felt it too , a change in the light from white to yellow , a softening of the wind , a resignation of the leaves on certain trees .", "But I did n't want to skip ahead , so I pretended not to notice .", "It 's summer , I insisted .", "This is what summer is like in Maine .", "It is tempting to make this whisper of fall a metaphor for Peter 's diminishing presence .", "September brings up memories of how the end of summer felt during childhood , a loss .", "Yet I find myself resisting the comparison .", "Peter is alive , and summer does n't officially end for 10 more days .", "I 'm still wearing white .", "GENERATIONS ."], "summary": ["Alice Elliott Dark Generations essay on summer spent in Maine with her family and her stepfather , Peter , 88 , who is debilitated with post-polio syndrome and effects of strokes .", "Drawing ."], "publication": "nyt50", "label": [9, 66, 11], "tag": ["New York and Region"]} -{"id": "1788862", "text": ["It 's exciting to wander the stalls of foreign bazaars , food halls and markets , with their stocks of rich and strange ingredients that promise , perhaps , a thrill of sweet discovery .", "Some of that excitement can also be had in White Plains , where Yaranush packs its shelves and cases with foods from the Mediterranean and the Middle East .", "Even shoppers familiar with the wares grab a basket and trawl the aisles of this small shop .", "But old-timers might head right for the clear boxes of dried fruit and shelled and unshelled nuts , just beyond the breads heaped at the entrance .", "Some wander to the back for sesame cakes and for glistening , honey-dripping baklava .", "Then there 's the refrigerated case .", "Next to items like stuffed grape leaves , eight kinds of olives , savory pastries , yogurts , hummus , tabbouleh and tzatziki are the bins of feta , why I usually come here .", "Yaranush has three types of feta bobbing in a brine bath : Greek , French and Bulgarian .", "And they 're all made from sheep 's milk .", "The crumbly Bulgarian 's sour , salty taste is good on a salad .", "The Greek style , smooth and somewhat less salty , goes well with tomatoes and in sandwiches .", "The mild and creamy French spreads nicely on crackers and can take a sweet or savory topping .", "Customers unsure of which feta to choose can request samples .", "Between the breads and the baklava , shelves burst with coffees , all sorts of preserves -LRB- including quince and pumpkin -RRB- , vegetables , herbs , spices , cookies , condiments , biscuits , candy and even mortars and pestles and sets of dainty coffee cups .", "Most labels are in English , but if a jar of mysterious syrup or a bag of Persian dried lemons catches the eye , the gracious owners are always on hand for explanations and advice .", "Yaranush Mediterranean Foods , 322 Central Avenue , White Plains .", "-LRB- 914 -RRB- 682-8449 .", "M . H . REED Westchester ."], "summary": ["Article on Yaranush Mediterranean Foods in White Plains , NY ."], "publication": "nyt50", "label": [15], "tag": ["New York and Region"]} -{"id": "1788865", "text": ["FIVE years ago my brother and I spoke regularly .", "Five years ago we drank together , teased each other without mercy and , occasionally , even expressed feelings of affection .", "After 9/11 we did not .", "My brother is a 20-plus-year veteran of the New York City Fire Department , a former marine with a degree in history whose politics are conservative .", "He is four years older and quite a bit larger than me .", "I am a 20-plus-year veteran of big-agency advertising , a creative director turned fiction writer whose politics , not surprisingly , do not lean conservatively .", "Until five years ago , this was not such a big problem .", "There were plenty of other things to talk about , and we knew that despite our differences , there was still much to appreciate about each other .", "On Sept . 11 , 2001 , I was in Boca Raton , Fla . , on business accompanied by my wife , my 3-year-old daughter and my mother-in-law -LRB- yes , my mother-in-law came on business trips with us back then -RRB- at , of all things , a sales convention for a yogurt company .", "At 9 a.m. , we were granted a 10-minute respite from the executive Vince Lombardi-isms and the Roman Colosseum-inspired motivational d\u00e9cor .", "The first thing I did was check the messages on my personal communications device du jour , because in 2001 , I was convinced that the more I checked the quotidian drivel it contained the more it seemed that my ad agency , yogurt convention , frequent-flier-miles-financed-family-business-trip life mattered .", "But this time what the messages told me were hardly drivel .", "I immediately called New York to check on my brother who was not supposed to be working , but with firefighters you never know .", "He was n't working .", "He 'd soon go to the site , but luckily he would n't be one of the 343 firefighters killed .", "For the next hour , I 'm fairly sure that he watched what I watched , that he looked away when I looked away , and I am fairly sure that at 9:59 a.m. , when the south tower of the World Trade Center collapsed , he felt exactly what I felt .", "What we all felt .", "I am also sure that those were the last pure , nonpoliticized thoughts any of us would have about that day , and the last time that my brother and I would feel the same way about anything for some time .", "Renting a car and driving home was our only option .", "At first I wanted to push through , to rush the family home .", "`` To what .", "`` my wife asked .", "`` To have our daughter watch her parents sit paralyzed in front of a television set .", "`` So we took our time , taking a scenic , non-traditional route back to New York .", "I got my information from fellow travelers and National Public Radio .", "I found comfort in the measured voices of `` All Things Considered , '' solace in `` I love New York '' signs in Georgia , inspiration in the words on Jefferson 's tombstone at Monticello , near Charlottesville , Va . , which noted that he was also one of the fathers of religious tolerance in this country .", "Meanwhile , my brother was getting his information from rescue workers and fellow firefighters .", "Because of his military background , his job in the years before the attack had included training recruits at the Fire Academy at Randalls Island .", "His job in the months ahead would be to coordinate funerals .", "Dozens of funerals .", "For friends and friends of friends , each with a story more tragic than the last .", "Late at night on Sept . 14 , my family slept as I drove through Pennsylvania .", "With no NPR to be had for the time being , I listened to sports guys weighing in on the Northern Alliance , D.J. ` s explaining the tenuous Pakistani-Afghani relationship .", "With each passing mile , more and more proselytizing and hate seeped into the views of the syndicated giants .", "Driving near Port Jervis , N.Y. , a state trooper pulled alongside our car and shined a spotlight inside while the rest of my family was sleeping .", "Four strangers in a red rental car with Florida plates .", "Suspects .", "To think that 9/11 drove a stake between my brother and me is as na\u00efve as thinking that it drove one through the country .", "Red and blue staters had been at each other 's throats for a while , and my brother and I had clashed on and off over lots of things for years .", "But this took it farther .", "He had been affected by it in ways I could not imagine .", "Of the 343 firefighters killed , he knew dozens .", "No one that I knew had died .", "Within a week , I would go back to work .", "For more than a year , he would go to funerals and I imagine that in addition to grief , a man with my brother 's courage and sense of duty must also have been dealing with a serious case of survivor 's guilt .", "But did that make his opinions -- which had become increasingly angry and pronounced -- right .", "Over the last five years we 've disagreed about everything from the 2000 and 2004 elections to the war in Iraq , radical Islam and of course , the liberal news media .", "For a while we tiptoed around politics but when we were together everything seemed political .", "For a while we did n't speak at all .", "But lately we 've been talking .", "I care too strongly for him to let politics destroy our relationship and I think he feels the same .", "The other day I called him .", "He had just gotten home from the hospital where a fellow firefighter , Lt . Howard Carpluk Jr . lay in critical condition from injuries suffered when the floor had given way in a burning Bronx building .", "Another firefighter , 25-year-old Michael Reilly , who had served in the Marines in Iraq , had already died .", "My brother told me he was there near Mayor Michael Bloomberg as the doctors gave them an update .", "-LRB- Lieutenant Carpluk died the following day . -RRB-", "My brother sounded tired .", "After some time , while discussing Labor Day plans , I told him that I 'd been invited to discuss my book on a conservative talk show in Boston and joked that I feared an ambush .", "He told me to tell them that my brother was a New York City firefighter , and maybe they 'd go easy on me .", "James P . Othmer is the author of the novel `` The Futurist . '' ."], "summary": ["Op-Ed article by James Othmer describes ways in which his relationship with his brother , veteran New York City firefighter who worked at World Trade Center site , changed after 9/11 ."], "publication": "nyt50", "label": [3, 59, 2], "tag": ["New York and Region", "Opinion"]} -{"id": "1788867", "text": ["`` The Rising , '' Westchester County 's towering memorial to the victims of 9/11 , did not get off to a promising start .", "The drawings were lovely but the design was flawed .", "It called for 109 polished steel cables , one for each Westchester resident killed that day , to rise from a circular base and intertwine into a pillar soaring magically to the sky .", "But the plan had to be rethought when the engineers informed the architect that his vision was likely to collapse under its own weight .", "Fixing it meant spending a lot more money , and as the Board of Legislators debated the issue and a crash fund-raising effort began , it seemed possible that this project could end up stalling and floundering as badly as its hexed counterpart at ground zero in Lower Manhattan .", "That it did not -- and that a completed memorial is scheduled to be dedicated today at Kensico Dam Plaza in Valhalla -- is a testament to the virtues of flexibility and cooperative good will , and to the power of a beautiful idea .", "`` The Rising '' was the unanimous choice of a committee of victims ' family members overseeing the design competition .", "The architect , Frederic Schwartz , and his engineering team recovered quickly from the technical setback .", "They replaced the cables with thicker stainless-steel rods , adjusted the swooping profile and came up with a structure that honored the original concept while promising to stand up to wind and gravity .", "The cost overrun for the redesigned memorial was not insignificant -- its original $ 200,000 price tag ended up at about $ 770,000 .", "By all accounts , that was the only real mix-up .", "Under the leadership of County Executive Andrew Spano , who first proposed the memorial in a State of the County address in 2002 , `` The Rising '' arose in a process that seems to have been a near-miraculous convergence of vision and efficient follow-through .", "The victims ' families were consulted at every step .", "They chose not only the design but also the site , in the shadow of the mighty Kensico Dam .", "They were invited to submit messages to be etched beneath the victims ' names on the granite plaques encircling the memorial , and many did .", "The product of their collaboration is remarkable , a web of polished steel that recalls but does not mimic the elegant geometry of the World Trade Center .", "To walk around and beneath it -- to read a name and family tribute on a marker and then follow the path of a single steel strand that pulls your gaze up and into the endless depth of a blue sky -- is to participate in an act of remembrance that can upend your expectations of what a memorial can do .", "`` The Rising '' proves that adding a bit of group participation and democracy to the creation of a work of art is not always fatal to an artist 's vision -- that the work of committees is not doomed to end in blandness and incoherence .", "It shows that a memorial incorporating personal tributes -- one that lets loved ones write the text -- does not have to succumb to maudlin sentiment .", "The messages are powerful in their simple dignity , and remind us that the heroism and suffering of that September morning were not confined to a few blocks of Lower Manhattan , but radiated throughout the New York region , and beyond .", "Finally , and most strikingly , `` The Rising '' shows that a memorial necessitated by an act of mass murder need not be morbid .", "This strange , shimmering tower , which aims skyward and lifts the heart with it , seems likely to be simultaneously a place for solemn remembrance and a source of delight .", "That 's a paradox , but one we will be only too glad to puzzle over .", "Westchester ."], "summary": ["Editorial praises The Rising , Westchester County 's towering memorial to victims of 9/11 at Kensico Dam Plaza in Valhalla , NY , as testament to virtues of flexibility and cooperation and power of a beautiful idea ."], "publication": "nyt50", "label": [5, 0], "tag": ["New York and Region", "Opinion"]} -{"id": "1788870", "text": ["TO those caught up in the loose-hipped calypso beat wafting over Peekskill this time last year , the One World Arts and Culture Fest at the Paramount Center for the Arts might have seemed little more than well-organized fun .", "But like a good many events masquerading as simple entertainment , the festival , which returns for a second run on Sept . 13 , was the beginning of a larger mission for Jon Yanofsky and his staff , who had watched the region metamorphose and had not liked what they believed was a limited cultural response to the changes .", "`` We saw a demographic that was not really respected in arts programming , '' Mr. Yanofsky , the center 's executive director , said recently in a phone interview .", "`` Looking around , we were seeing a void in the kind of really broad-spectrum festival that would meet the interests and needs of an increasingly diverse regional demographic .", "There were lots of heritage festivals , but no one that put out a really big umbrella and looked at a large spectrum of cultural and artistic expressions . ``", "And so , building upon an idea from a board member , Geneive Brown Metzger , Mr. Yanofsky and his team created what they thought was missing : a festival whose scope was vast enough to entertain the masses in one of Westchester County 's most diverse areas and to provide the Paramount with a means of more accurately defining its potential audience and ultimately connecting with it .", "`` We did n't know what to expect , `` Mr. Yanofsky said .", "`` But one of the best ways to learn about and reach your market is to offer a festival and learn everything for free . ''", "What they got was a great turnout , he said , and events , like an Afro-Puerto Rican dance and music workshop , that were attended by people aged 7 to 65 .", "`` We were able to fund-raise enough last year that we could honor our original vision that the majority of activities be free , '' Mr. Yanofsky said , noting that the festival charges admission only to films .", "`` That proved to be a really important component . ''", "This year 's festival , which has been expanded to five days from four , begins Wednesday at 5:30 p.m. with an awards ceremony and the opening of a visual arts show .", "Two films -- `` The Harder They Come , '' Perry Henzell 's 1972 cult hit starring Jimmy Cliff as a reggae singer-turned-political outlaw amid the brutal exploitation of the Jamaican music scene , and `` Calle 54 , '' Fernando Trueba 's 2000 love song to Latin jazz -- will be shown at 8 p.m. on Thursday and Friday , respectively .", "-LRB- Admission is $ 8 for adults , $ 5 for children . -RRB-", "The pianist Arturo O'Farrill , whose father , the Afro-Cuban jazz pioneer Chico , is the subject of an entire segment of `` Calle 54 , '' will introduce that film and answer questions afterward , and even play a few riffs .", "Saturday is Family and Youth Day , a new addition to the festival with seven hours of free performances and workshops in capoeira , the Brazilian martial art .", "African dancing and drumming .", "South American storytelling .", "And flamenco dance and steel drum playing , starting at noon .", "`` We wanted to specifically provide programs to bring kids to so they could experience the very diverse expressions of our region 's very diverse communities , `` Mr. Yanofsky said .", "Last year 's Free Music Day was headlined by the calypso giant the Mighty Sparrow .", "This year 's , from 1 to 7 p.m. on Sunday , features performances by Bakithi Kumalo of South Africa , the Adehye African Dance Group from Ghana , the Ecuadorian altiplano group Runahurco , and samba and capoeira ensembles , before segueing into the day 's top draw , the reggae stalwarts the Wailers .", "`` People have been coming up to me and saying : ' The Wailers in Peekskill for free .", "Are you kidding me .", "' `` Mr. Yanofsky said .", "`` And I tell them , ' Yes , it 's free . '", "Thank our sponsors when you see them . ``", "Paramount Center for the Arts , 1008 Brown Street , Peekskill .", "Information : -LRB- 914 -RRB- 739-2333 .", "Www.paramountcenter.org."], "summary": ["Article on second annual One World Arts and Culture Fest at Paramount Center for the Arts in Peekskill , NY , which is set to open on September 13 .", "Photos ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788871", "text": ["THE Jacob Burns Film Center 's annual fund-raising awards dinner and auction is a flashy affair , featuring dignitaries from the film industry and bidders who pool together to try to win big prizes , like a ride in the Fuji blimp .", "At this year 's event , which will celebrate the nonprofit center 's fifth anniversary , the items up for auction include a weeklong vacation in Nantucket and , perhaps the most-sought-after prize , an in-home concert performed by the Tokyo String Quartet .", "Stephen Apkon , executive director of the Burns Center , said that items offered at the event generally go for $ 5,000 and up , and bidding for the quartet usually starts at about $ 10,000 .", "The Tokyo Quartet , widely considered one of the world 's premier chamber music groups , has a long-running connection with Westchester County and the Burns Center , based in Pleasantville .", "The first violinist , Martin Beaver , and the cellist , Clive Greensmith , live in Dobbs Ferry .", "The second violinist , Kikuei Ikeda , lives in Chappaqua .", "When the quartet needed a local rehearsal space a few years ago , Brian Skarstad , a Pleasantville violin maker , offered a room in his downtown workshop .", "The quartet used the space for a couple of years , and town residents would often stop outside the door to listen .", "After the Burns Center opened in 2001 , Mr. Skarstad 's wife , Louise Beach , a composer , asked the quartet if they would be willing to play a house concert for the film center 's first benefit auction , in 2002 .", "They agreed , and found the intimacy of the performance refreshing .", "`` We really love doing these concerts , '' Mr. Greensmith said in a telephone interview .", "`` More often than not , the audience is made up of incredibly interesting people , real chamber music lovers .", "The Burns Center attracts a very cultural audience . ``", "The Tokyo Quartet has performed a house concert for every Burns Center fund-raising auction .", "At the start of each year , the quartet sets aside two or three potential concert dates -LRB- the quartet 's regular touring schedule is arranged at least a year in advance -RRB- .", "Mr. Greensmith and Mr. Ikeda are also regulars at the film center , which each year presents more than 450 films from 50 different countries .", "`` It 's a different spread of films than you 'd get at the blockbuster-type places , `` Mr. Greensmith said .", "`` We live near this cultural center of New York City , but it 's important to celebrate the ingenuity of local places like this . ``", "Last year 's house concert was held at the Chappaqua home of Marvin Israelow and Dorian Goldman , classical music fans who had unsuccessfully bid on the Tokyo Quartet in two previous years .", "`` When we were outbid the first time , we were invited to join the performance , '' Mr. Israelow said in a telephone interview .", "`` After that , we became even more enchanted by the possibility of having the Tokyo in our home . ''", "On their third try , Mr. Israelow and Ms. Goldman joined forces , and funds , with six friends and won the Tokyo 's services in a lively bidding session .", "They invited nearly 20 friends for the concert and a pot-luck dinner .", "The quartet performed works by Mozart and Bartok , discussing the pieces before playing .", "At intermission , Mr. Beaver offered to let a teenage violin student in the audience play his Stradivarius -LRB- all four members of the Tokyo play Stradivariuses that were once owned by the virtuoso Niccol\u00f2 Paganini -RRB- .", "`` His parents ' jaws dropped to the floor , but he played it well , `` Mr. Israelow said .", "`` The performers were extraordinarily gracious and generous , and the living room vibrated for weeks and months afterwards with the sounds of the concert . ''", "The Jacob Burns Film Center 's Fifth Anniversary Celebration and Awards Dinner will be held on Saturday , Sept . 16 , at 7:30 p.m. , across from the center , at 364 Manville Road , Pleasantville .", "For information , call -LRB- 914 -RRB- 773-7663 or visit burnsfilmcenter.org."], "summary": ["Article on Jacob Burns Film Center 's annual fund-raising awards dinner and auction in Pleasantville , NY . Items for auction include in-home concert performed by Tokyo String Quartet .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["Movies", "New York and Region"]} -{"id": "1788874", "text": ["WHEN affluent suburbanites travel , they can be quite fussy , demanding that airplanes show up and take off like clockwork and that anything less is -- to use a word the fastidious favor -- unacceptable .", "But when they 're lounging in their backyards , they want the skies to be jet-free , or at least noise-free .", "If jets must be seen , they should not be heard .", "The problem in hubs like New York is that the two desires are fast becoming irreconcilable .", "American airlines handled 739 million passengers last year .", "By 2015 that number is expected to balloon to 1 billion .", "No one wants an airport sprouting next door , with all the attendant whines , drones and roars , so the Federal Aviation Administration has been trying to squeeze in more flights by rejiggering the way planes use air space in the corridor between Philadelphia and Connecticut .", "But the redesign has set off a fury among the tens of thousands of Westchester and Connecticut residents living around Westchester County Airport .", "It turns out that flight patterns around the airport may have to be tinkered with because of the spillover from changes in patterns at LaGuardia Airport , meaning more whines and roars for those living below .", "Until recently , the issue of rerouting seldom flashed on the public 's radar , mostly because the F.A.A had not always been forthcoming .", "In February , Janet Lockton , president of the Greenwich-based Air Conservation Trust , went to an F.A.A. slide show in Stamford and came away reassured that Westchester 's airport would be untouched , and that LaGuardia flights would spend more time over the uncomplaining waters of Long Island Sound .", "But then she ordered three CD 's containing 1,226 pages of the preliminary environmental statement and discovered in the nearly impenetrable appendixes that planes taking off from the county airport would indeed loop over Westchester and Connecticut for longer periods .", "She alerted officials across the state border , and in June , County Executive Andrew J . Spano issued a letter to the F.A.A. , based on an environmental consultant 's study .", "Mr. Spano 's letter rebuffed the F.A.A. ` s preferred proposal for flight patterns because the plan would have '' a significant impact `` on places like Rye Brook , Pleasantville and Briarcliff .", "Some aircraft , Mr. Spano bristled , would `` incredibly '' be rerouted over the Indian Point nuclear plant .", "How unhinging the noise would be has been disputed , with the F.A.A. minimizing any spikes in volume and the consultant , Harris Miller Miller & Hanson Inc . saying the changes would probably be `` highly noticeable . ''", "The county airport already has limits on noise enshrined in county law .", "In any half-hour-period , no more than four commercial planes can land or take off at the airport , and those planes together can carry no more than 240 passengers .", "Flights are practically banned between 11 p.m. and 6 a.m.", "The airport has intentionally been kept a backwater , with such anomalies as propeller-plane flights to Toronto .", "`` There are people who persist in talking about how Westchester could , should or would expand , '' said Robert Funicello , director for environmental projects at Westchester 's transportation department .", "`` It is n't going to happen . ``", "He and Ms. Lockton argue that planners need to think about alternatives like high-speed trains .", "The Port Authority thinks the area needs a fourth full-scale airport to go with Kennedy , Newark and La Guardia , probably at Stewart Airport outside of Newburgh , N.Y.", "Other regional planners think a day of reckoning can be put off if everybody sacrifices , including Westchester Airport and its neighbors .", "It 's hard to argue that officials should not do all they can to block noise in the suburbs , where people pay a premium for tranquillity .", "But advocates for quiet skies may have to start pondering a day not too far distant when the major airports ca n't handle any more planes , and travelers ca n't get flights they need .", "Changes will have to be made that would raise the number of flights and ratchet up the noise .", "We live in an era , after all , when flying on a plane has become almost as routine as riding a subway .", "Many of the people who can afford spreads in Westchester and Connecticut earn much of their living traveling for business , which could slow down if air travel gridlocks .", "`` We 're going to run out of room at airports or room in the skies unless people get the big picture view , `` said Kevin Mitchell , chairman of the Business Travel Coalition , a group that represents corporate buyers of travel services .", "`` The upshot is much higher business fares and even higher leisure fares . ''", "In the past half century , airplane travel has changed from an indulgence of the well-to-do to a convenience that most people can enjoy .", "If airports stop growing , travel could become far less democratic , and just visiting an aunt in Missouri may become prohibitive .", "That 's a trade-off well worth thinking about by those carrying on the worthy fight against noise .", "E-mail : joeberg@nytimes.com ."], "summary": ["Joseph Berger column on dispute between Federal Aviation Administration and tens of thousands of Westchester County and Connecticut residents over FAA plan to alter flight patterns at Westchester County Airport , which has potential to increase noise .", "Photo ."], "publication": "nyt50", "label": [7], "tag": ["New York and Region"]} -{"id": "1788875", "text": ["WHEN Natalie Wells bought a home in Englewood , N.J. , a year ago , she was unaware that her American pit bull terrier was illegal to own in the city .", "Shortly after moving in , she was told by one of her daughters about a city law that banned the breed , commonly called pit bulls , along with several similar breeds and Rottweilers .", "Under the 1999 law , even this year 's best-in-show winner at the prestigious Westminster Kennel Club Dog Show , Rufus , a colored bull terrier from Holmdel , N.J. , would be banned in Englewood .", "`` I pretty much knew in my gut it was n't right , `` Ms. Wells said .", "In July , Ms. Wells filed a challenge to the law in Bergen County Superior Court along with Mia Rodriguez , a neighbor who also owns a pit bull , and the American Dog Owner 's Association of Castleton , N.Y.", "Last month , Superior Court Judge Jonathan N . Harris agreed with Ms. Wells and ordered the city to stop enforcing the law because it was in conflict with a New Jersey statute that prohibits restricting dogs by breed .", "`` Cities do n't have the right to make laws that violate state law , `` said Flora Edwards , the lawyer who represented the plaintiffs .", "`` If the legal drinking age is 21 under state law , the City of Englewood or Montclair ca n't say it 's 25 or 18 . ``", "According to a Centers for Disease Control study , the pit bull breed was responsible for more dog-bite fatalities than any other breed from 1979 to 1998 , the latest year for which figures were available .", "The breed was responsible for 66 of 238 dog-bite fatalities during that period .", "Rottweilers were next , with 39 .", "The New Jersey Vicious and Potentially Dangerous Dog Act sets out criteria for dealing with aggressive dogs , but prohibits breed discrimination .", "New York has a similar statute .", "Connecticut 's law does not ban breed discrimination .", "Despite such laws , some communities still have restrictions on specific breeds .", "They range from outright bans to requiring property insurance coverage and the use of shorter leashes and muzzles in public .", "Tanya Ford , village clerk in Hempstead , N.Y. , said she was aware of no challenges to its law , which categorizes American pit bull terriers and several related breeds as vicious dogs , requiring that they be muzzled when walked and kept on a chain with a minimum strength of 300 pounds and not exceeding three feet in length .", "Owners must also have liability insurance of $ 100,000 .", "Mahlon Goer , a pit bull owner who tracks legislation in New York for the American Dog Owner 's Association , said the state still allowed insurance companies to drop customers or deny property insurance to prospective customers based on the breed of dog they own .", "Underwriting policies vary , according to the group , but beyond pit bulls and related breeds , the list includes Siberian huskies , Great Danes , German shepherds , St . Bernards and Dalmatians .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws because keeping tabs at the local level can be difficult unless laws are highly publicized .", "According to the American Kennel Club , last year it tracked 105 communities around the nation where breed-specific legislation was pending , enacted or defeated .", "The group had tracked another 76 through July .", "Among the municipalities in the region that have breed-specific laws are Larchmont , Sands Point and Hempstead in New York and Millville and Atlantic City in New Jersey .", "Numerous communities across the United States have such laws .", "One of the most controversial is in Denver , where authorities have euthanized more than 1,000 pit bulls since the reinstatement of a ban on the breed in May 2005 .", "The city 's animal control division had suspended enforcement of the ban in 2004 after the governor signed a bill restricting local governments from outlawing certain breeds .", "But the city successfully sued , arguing that the bill violated its home-rule authority .", "In Englewood , Douglas Bern , a lawyer who served on the City Council when the law was passed , said the council was responding to incidents in a public park where the dogs were being used to intimidate people .", "He said the police had also felt threatened by pit bulls when responding to a call at a home .", "The city argued that the municipal law complemented state statute , which was designed to address situations where `` existing local laws inadequately address the problem '' of aggressive dogs .", "`` The city of Englewood 's ordinance in this regard actually furthers and is consistent with the legislative intent , which is to address a void where local governments have not addressed the area of vicious or potentially dangerous dogs , `` the city said in a court brief .", "Under the ordinance , bull terriers , Staffordshire bull terriers , American pit bull terriers , American Staffordshire terriers , Rottweilers or `` any dogs of mixed breed which has the appearance or characteristics of being predominantly of the breeds , '' were banned from the city .", "Some summonses had been issued under the law , but city officials did not know how many .", "`` It 's like there 's a stigma for having one of these kinds of dog , `` said Ms. Rodriguez , who owns an ailing 8-year-old pit bull named Cyrus .", "The Englewood City Council will discuss the law at its Sept . 19 meeting , said Scott Reddin , the council president .", "He said he did not expect the council to challenge the court 's decision .", "`` We were profiling certain breeds and that was found to be unconstitutional , '' he said .", "`` I do n't think the council will have any problem rescinding that . ``", "Numerous national dog owner and veterinarian associations have come out against breed-specific laws , saying they are unfair and do not address the problem of aggressive and dangerous dogs .", "`` As we like to say , punish the deed , not the breed , '' said Lisa Peterson , a spokeswoman for the American Kennel Club .", "`` We think breed-specific laws are unfair to responsible dog owners . ''", "Barbara Bishop , who owns Rufus , the top dog at the Westminster show , said she was trying to use the dog 's success to highlight the unfairness of breed-specific bans .", "`` We want to let people know that every dog has teeth and every dog can bite , whether it 's a Chihuahua or a bull mastiff , `` Ms. Bishop said .", "`` Every dog will be a product of what it 's brought up to do . ``", "Ms. Bishop attributed much of the image problem of the pit bull breeds to people who train them to be vicious , including drug dealers who use them as guard dogs .", "`` We have Rufus , who 's the top winning colored terrier of all time , and we still have people stop in the street and say , ` There 's a pit bull , ' `` she said .", "For Ms. Wells , the law seemed even more absurd because her 12-year-old pit bull , Sentry , has cataracts and has had cancer , heart surgery and a hysterectomy .", "`` She is a member of the family , '' said Ms. Wells , who has two daughters , ages 34 and 32 .", "`` My kids tease me all the time and say she 's my favorite daughter . `` ."], "summary": ["Article on legal challenges pit bull owners have been making against local laws in New York City metropolitan area that ban or restrict certain dog breeds .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws .", "Numerous national dog owner and veterinarian associations oppose breed-specific laws , saying they are unfair and do not address problem of aggressive and dangerous dogs .", "Photos ."], "publication": "nyt50", "label": [39, 20], "tag": ["New York and Region"]} -{"id": "1788876", "text": ["FOR failing to meet performance standards , the Clara T . O'Connell elementary school in Bristol , Conn . , spent three years on the `` in need of improvement '' list under the federal No Child Left Behind program .", "When a new list came out last month , Connecticut had 290 elementary and middle schools on it , but the O'Connell School was not among them .", "It had achieved what no other school in the state had managed under the four-year-old program : It had worked itself off the list .", "`` For three years , the headline was that we were on the list , '' said Michael F . Audette , O'Connell 's principal .", "`` Human nature being what it is , people would ask the teachers , ' What school do you teach at .", "` And when the teachers would say , ' O'Connell , ' they 'd say , ` Is n't that the school that 's on the list .", "` And the teachers would say , ' Yeah , but we 're doing a lot of good things . '", "But nobody sticks around for the ` yeah , but . '", "Now it 's nice to have a different headline , and now we can say , ` Yes , we 're that school . '", "`` Henry Garcia , a spokesman for the State Department of Education , said O'Connell 's achievement was a testament to its hard work .", "`` It takes schools that are in need of improvement time to see the progress once they develop curriculum and other strategies that improve student achievement , '' he said .", "The number of Connecticut schools failing to meet what the program calls adequate yearly progress doubled in the 2005-6 academic year , up from 145 schools the year before .", "The results were reached using scores from the Connecticut Mastery Tests , then figuring them into a host of categories and subcategories , including the number of children living in poverty who attend a school .", "At the O'Connell School 80 percent of the students are poor , Mr. Audette said .", "The tests require that at least 74 percent of students demonstrate proficiency in math , 68 percent in reading and 70 percent in writing .", "In the 2002-3 school year , O'Connell passed all categories except reading , getting a score of 44 percent .", "It also failed to meet the reading goal in 2003-4 , but reached it the next year .", "In 2005-6 , it scored 61 percent in reading .", "That was not high enough to meet the No Child Left Behind requirements , but federal officials put O'Connell in the `` safe harbor '' category , for schools that have significantly improved , and removed it from the `` in need of improvement '' list .", "To raise the reading scores , Mr. Audette said , he and his staff reviewed the pupils ' reading data for weak areas .", "The Mastery Tests require that pupils read passages and answer questions about what they have read .", "To prepare , the children were asked to answer a reading question each day until they showed proficiency in expressing their comprehension .", "Mr. Audette also hired additional reading support staff members and trained teaching assistants , assigning them to particular grades , where they placed the children into small groups and gave them a second instructional reading period each day .", "Mr. Audette signed on with the Teachers College Reading and Writing Project at Columbia University .", "The Bristol School District paid for consultants from Columbia to teach faculty members at O'Connell .", "The effort paid off , especially for the third graders .", "They scored 90 percent in writing proficiency in the 2005-6 Mastery Tests .", "`` If I was to pinpoint exactly what we did , I would say we really looked at our reading instruction , '' Mr. Audette said .", "`` It 's kind of common sense .", "If you want to be a good pianist , you practice the piano . ``", "EDUCATION ."], "summary": ["Article on Clara T O'Connell elementary school in Bristol , Conn , which spent three years on ` in need of improvement ' list under No Child Left Behind program and has managed to work itself off list .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Education", "New York and Region"]} -{"id": "1788877", "text": ["IN the 50 years he has lived in Montclair , N.J. , Rob Bianco has seen his share of monsoon-like downpours , blizzards , ice storms , even the remnants of hurricanes .", "But Mr. Bianco , the superintendent of public works for Montclair , had never seen anything like the storm that ravaged the leafy landscape of his hometown on July 18 .", "`` We literally had trees and telephone poles , some as high as 20 to 30 feet in the air , that were sheared and cut off , '' he said .", "`` It was a treetop tornado , meaning it never hit the ground , but it still caused a great amount of destruction . ''", "The storm , which hit the northeast corner of Verona , N.J. , before blowing with a vengeance -- about a mile wide -- through a swath of Montclair for roughly a half-hour , destroyed about 200 trees on public property in Montclair , an Essex County township of about six square miles .", "The most heavily damaged areas , Mr. Bianco said , were Brookdale Park , which covers 121 acres in Montclair and Bloomfield , and the township 's Edgemont Park .", "`` We had some cars smashed and a lot of people running for cover , '' he said .", "`` It was a miracle that no one got hurt . ''", "But what about all of those damaged oak and pine trees , some of which Mr. Bianco said were 250 years old .", "`` Cleaning it all up was quite a daunting task , '' said Matthew A . Vastano , the executive vice president of Nature 's Choice Corporation , a yard-waste recycling company in Union , N.J. , hired by Montclair to clear the debris caused by the storm .", "`` Montclair is not your normal town where vegetative waste is concerned , '' Mr. Vastano said .", "`` The town , which has huge , huge trees , is very environmentally conscious , and it wants to keep all the trees it can . ''", "The trees it could not keep were hauled away by Nature 's Choice to a temporary storage site .", "Any piece of wood 16 to 18 inches long was put onto chipper trucks and into machines that turned it into chips .", "Anything larger was cut into logs .", "Some 25 truckloads of those logs were placed into 40-foot containers on trucks -- at a cost of $ 350 per container -- for eventual mulching .", "In the end , about 600 cubic yards of mulch and topsoil , or 300 tons , were produced -- enough to cover about 100,000 square feet , according to Mr. Vastano .", "Mr. Bianco said that Nature 's Choice would give Montclair some of the mulch or topsoil for free if the town needed it for a special project , but that the company was free to sell it to landscapers and other businesses .", "`` We are a business , not a charity , '' Mr. Vastano said .", "`` We 'll take most of that mulch and turn it into hardwood mulch or dye it either black or a shade of red before selling it . ``", "Dianne Marus , the director of Montclair 's Department of Finance , said that the cost of the storm cleanup came to $ 366,950 but that the price , tallied by the Department of Community Services , did not include overtime costs for the Police -LRB- $ 74,983 -RRB- and Fire Departments -LRB- $ 4,650 -RRB- .", "All told , Montclair has spent $ 446,583 on storm-related services , and the job is not yet finished .", "`` There are still a number of stumps to be removed and lots of re-planting to do , '' Mr. Bianco said .", "`` By the time all is said and done , this entire project is going to cost us more money and continue for at least another month . ''", "STORM CLEANUP ."], "summary": ["Article on work of Nature 's Choice Corp , yard-waste recycling company hired by Montclair , NJ , to clear debris caused by July 18 storm that destroyed about 200 trees on public property .", "Company turned trees into about 600 cubic yards of mulch and topsoil .", "Photo ."], "publication": "nyt50", "label": [9, 16], "tag": ["New York and Region"]} -{"id": "1788878", "text": ["DEEP into suburbia , on a sound barrier that runs along the Taconic State Parkway here , a graffiti artist announces his presence with a single word painted in yellow and black : `` Me . ''", "Officials said that graffiti had reached new heights this summer .", "And in a town that bills itself as a retreat from more urban locales , politicians and police officers are taking the problem seriously .", "`` Whether you grew up here all your life , or whether you moved here from the Bronx or Yonkers or Long Island , you do n't want to see that , `` said Linda G . Cooper , the town supervisor .", "`` And so we 're trying to take a very firm position . ``", "In June , the Yorktown police began graffiti patrols as a deterrent .", "They also began photographing graffiti they found to create a catalog of the work of local vandals for use in investigations , Lt . Donald Schuck said .", "Since July , Lieutenant Schuck said , the police have arrested nine boys on graffiti-related charges .", "The most recent came on Aug . 28 , with the arrest of a 14-year-old from Mohegan Lake , a hamlet of Yorktown .", "The police said he had sprayed a wall at a town-owned sports club , causing about $ 400 in damage .", "The boy , charged with making graffiti and possession of graffiti instruments , both misdemeanors , was released to his mother and was scheduled to appear on Friday in Westchester County Family Court in White Plains .", "The town , which has seen stop signs , park buildings , businesses and even a police radar unit defaced this summer , is also considering new legislation .", "One proposed law would require vandals to pay restitution .", "Another would mandate that residents who discover graffiti on their property clean it within 72 hours .", "Ms. Cooper said that rapid removal discouraged vandals .", "Town officials and youth advocates said there were a number of reasons for the surge in graffiti .", "Lieutenant Schuck said increased access to the tools of graffiti had played a role .", "Young people , previously stymied by a county law forbidding the sale of spray paint to anyone under 18 , have begun ordering the paint over the Internet , he said .", "Joan Valenstein , chairwoman of the advisory board for the Yorktown Teen Center , a branch of the Boys and Girls Club of Northern Westchester , said the increase might be the byproduct of boredom in a town that she said did not provide enough youth activities .", "Ms. Cooper said some of the graffiti included gang insignia previously seen in the southern , more urban part of the county .", "Whatever the source of the graffiti , the town seems determined to stamp it out .", "But out on the Taconic State Parkway , high above the cars rushing by , defiance is etched in yellow and black .", "VANDALISM ."], "summary": ["Officials in Yorktown , NY , say graffiti has reached new heights this summer .", "Police have begun graffiti patrols as deterrent and have arrested nine boys on graffiti-related charges since July .", "Photo ."], "publication": "nyt50", "label": [7, 1, 5], "tag": ["New York and Region"]} -{"id": "1788879", "text": ["WHEN Natalie Wells bought a home in Englewood , N.J. , a year ago , she was unaware that her American pit bull terrier was illegal to own in the city .", "Shortly after moving in , she was told by one of her daughters about a city law that banned the breed , commonly called pit bulls , along with several similar breeds and Rottweilers .", "Under the 1999 law , even this year 's best-in-show winner at the prestigious Westminster Kennel Club Dog Show , Rufus , a colored bull terrier from Holmdel , N.J. , would be banned in Englewood .", "`` I pretty much knew in my gut it was n't right , `` Ms. Wells said .", "In July , Ms. Wells filed a challenge to the law in Bergen County Superior Court along with Mia Rodriguez , a neighbor who also owns a pit bull , and the American Dog Owner 's Association of Castleton , N.Y.", "Last month , Superior Court Judge Jonathan N . Harris agreed with Ms. Wells and ordered the city to stop enforcing the law because it was in conflict with a New Jersey statute that prohibits restricting dogs by breed .", "`` Cities do n't have the right to make laws that violate state law , `` said Flora Edwards , the lawyer who represented the plaintiffs .", "`` If the legal drinking age is 21 under state law , the City of Englewood or Montclair ca n't say it 's 25 or 18 . ``", "According to a Centers for Disease Control study , the pit bull breed was responsible for more dog-bite fatalities than any other breed from 1979 to 1998 , the latest year for which figures were available .", "The breed was responsible for 66 of 238 dog-bite fatalities during that period .", "Rottweilers were next , with 39 .", "The New Jersey Vicious and Potentially Dangerous Dog Act sets out criteria for dealing with aggressive dogs , but prohibits breed discrimination .", "New York has a similar statute .", "Connecticut 's law does not ban breed discrimination .", "Despite such laws , some communities still have restrictions on specific breeds .", "They range from outright bans to requiring property insurance coverage and the use of shorter leashes and muzzles in public .", "Tanya Ford , village clerk in Hempstead , N.Y. , said she was aware of no challenges to its law , which categorizes American pit bull terriers and several related breeds as vicious dogs , requiring that they be muzzled when walked and kept on a chain with a minimum strength of 300 pounds and not exceeding three feet in length .", "Owners must also have liability insurance of $ 100,000 .", "Mahlon Goer , a pit bull owner who tracks legislation in New York for the American Dog Owner 's Association , said the state still allowed insurance companies to drop customers or deny property insurance to prospective customers based on the breed of dog they own .", "Underwriting policies vary , according to the group , but beyond pit bulls and related breeds , the list includes Siberian huskies , Great Danes , German shepherds , St . Bernards and Dalmatians .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws because keeping tabs at the local level can be difficult unless laws are highly publicized .", "According to the American Kennel Club , last year it tracked 105 communities around the nation where breed-specific legislation was pending , enacted or defeated .", "The group had tracked another 76 through July .", "Among the municipalities in the region that have breed-specific laws are Larchmont , Sands Point and Hempstead in New York and Millville and Atlantic City in New Jersey .", "Numerous communities across the United States have such laws .", "One of the most controversial is in Denver , where authorities have euthanized more than 1,000 pit bulls since the reinstatement of a ban on the breed in May 2005 .", "The city 's animal control division had suspended enforcement of the ban in 2004 after the governor signed a bill restricting local governments from outlawing certain breeds .", "But the city successfully sued , arguing that the bill violated its home-rule authority .", "In Englewood , Douglas Bern , a lawyer who served on the City Council when the law was passed , said the council was responding to incidents in a public park where the dogs were being used to intimidate people .", "He said the police had also felt threatened by pit bulls when responding to a call at a home .", "The city argued that the municipal law complemented state statute , which was designed to address situations where `` existing local laws inadequately address the problem '' of aggressive dogs .", "`` The city of Englewood 's ordinance in this regard actually furthers and is consistent with the legislative intent , which is to address a void where local governments have not addressed the area of vicious or potentially dangerous dogs , `` the city said in a court brief .", "Under the ordinance , bull terriers , Staffordshire bull terriers , American pit bull terriers , American Staffordshire terriers , Rottweilers or `` any dogs of mixed breed which has the appearance or characteristics of being predominantly of the breeds , '' were banned from the city .", "Some summonses had been issued under the law , but city officials did not know how many .", "`` It 's like there 's a stigma for having one of these kinds of dog , `` said Ms. Rodriguez , who owns an ailing 8-year-old pit bull named Cyrus .", "The Englewood City Council will discuss the law at its Sept . 19 meeting , said Scott Reddin , the council president .", "He said he did not expect the council to challenge the court 's decision .", "`` We were profiling certain breeds and that was found to be unconstitutional , '' he said .", "`` I do n't think the council will have any problem rescinding that . ``", "Numerous national dog owner and veterinarian associations have come out against breed-specific laws , saying they are unfair and do not address the problem of aggressive and dangerous dogs .", "`` As we like to say , punish the deed , not the breed , '' said Lisa Peterson , a spokeswoman for the American Kennel Club .", "`` We think breed-specific laws are unfair to responsible dog owners . ''", "Barbara Bishop , who owns Rufus , the top dog at the Westminster show , said she was trying to use the dog 's success to highlight the unfairness of breed-specific bans .", "`` We want to let people know that every dog has teeth and every dog can bite , whether it 's a Chihuahua or a bull mastiff , `` Ms. Bishop said .", "`` Every dog will be a product of what it 's brought up to do . ``", "Ms. Bishop attributed much of the image problem of the pit bull breeds to people who train them to be vicious , including drug dealers who use them as guard dogs .", "`` We have Rufus , who 's the top winning colored terrier of all time , and we still have people stop in the street and say , ` There 's a pit bull , ' `` she said .", "For Ms. Wells , the law seemed even more absurd because her 12-year-old pit bull , Sentry , has cataracts and has had cancer , heart surgery and a hysterectomy .", "`` She is a member of the family , '' said Ms. Wells , who has two daughters , ages 34 and 32 .", "`` My kids tease me all the time and say she 's my favorite daughter . `` ."], "summary": ["Article on legal challenges pit bull owners have been making against local laws in New York City metropolitan area that ban or restrict certain dog breeds .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws .", "Numerous national dog owner and veterinarian associations opposes breed-specific laws , saying they are unfair and do not address problem of aggressive and dangerous dogs .", "Photos ."], "publication": "nyt50", "label": [39, 20], "tag": ["New York and Region"]} -{"id": "1788881", "text": ["FOR failing to meet performance standards , the Clara T . O'Connell elementary school in Bristol , Conn . , spent three years on the `` in need of improvement '' list under the federal No Child Left Behind program .", "When a new list came out last month , Connecticut had 290 elementary and middle schools on it , but the O'Connell School was not among them .", "It had achieved what no other school in the state had managed under the four-year-old program : It had worked itself off the list .", "`` For three years , the headline was that we were on the list , '' said Michael F . Audette , O'Connell 's principal .", "`` Human nature being what it is , people would ask the teachers , ' What school do you teach at .", "` And when the teachers would say , ' O'Connell , ' they 'd say , ` Is n't that the school that 's on the list .", "` And the teachers would say , ' Yeah , but we 're doing a lot of good things . '", "But nobody sticks around for the ` yeah , but . '", "Now it 's nice to have a different headline , and now we can say , ` Yes , we 're that school . '", "`` Henry Garcia , a spokesman for the State Department of Education , said O'Connell 's achievement was a testament to its hard work .", "`` It takes schools that are in need of improvement time to see the progress once they develop curriculum and other strategies that improve student achievement , '' he said .", "The number of Connecticut schools failing to meet what the program calls adequate yearly progress doubled in the 2005-6 academic year , up from 145 schools the year before .", "The results were reached using scores from the Connecticut Mastery Tests , then figuring them into a host of categories and subcategories , including the number of children living in poverty who attend a school .", "At the O'Connell School 80 percent of the students are poor , Mr. Audette said .", "The tests require that at least 74 percent of students demonstrate proficiency in math , 68 percent in reading and 70 percent in writing .", "In the 2002-3 school year , O'Connell passed all categories except reading , getting a score of 44 percent .", "It also failed to meet the reading goal in 2003-4 , but reached it the next year .", "In 2005-6 , it scored 61 percent in reading .", "That was not high enough to meet the No Child Left Behind requirements , but federal officials put O'Connell in the `` safe harbor '' category , for schools that have significantly improved , and removed it from the `` in need of improvement '' list .", "To raise the reading scores , Mr. Audette said , he and his staff reviewed the pupils ' reading data for weak areas .", "The Mastery Tests require that pupils read passages and answer questions about what they have read .", "To prepare , the children were asked to answer a reading question each day until they showed proficiency in expressing their comprehension .", "Mr. Audette also hired additional reading support staff members and trained teaching assistants , assigning them to particular grades , where they placed the children into small groups and gave them a second instructional reading period each day .", "Mr. Audette signed on with the Teachers College Reading and Writing Project at Columbia University .", "The Bristol School District paid for consultants from Columbia to teach faculty members at O'Connell .", "The effort paid off , especially for the third graders .", "They scored 90 percent in writing proficiency in the 2005-6 Mastery Tests .", "`` If I was to pinpoint exactly what we did , I would say we really looked at our reading instruction , '' Mr. Audette said .", "`` It 's kind of common sense .", "If you want to be a good pianist , you practice the piano . ``", "EDUCATION ."], "summary": ["Article on Clara T O'Connell elementary school in Bristol , Conn , which spent three years on ` in need of improvement ' list under No Child Left Behind program and has managed to work itself off list .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["Education", "New York and Region"]} -{"id": "1788882", "text": ["IT may have left no physical damage in its wake , but the recent communal storm in this oceanfront city over the future of its beaches has realigned the political and environmental landscape .", "Despite fears about the city 's vulnerability to a major hurricane , the five-member City Council , three Democrats and two Republicans , voted unanimously in May to reject a $ 98.5 million beach preservation project by the Army Corps of Engineers that was designed to protect Long Beach from ocean flooding .", "The plan would have placed a berm of dredged sand along the beach 10 feet high , with a 5-foot dune on top , from the western end of Long Beach to Point Lookout , more than six miles to the east .", "Point Lookout agreed to a separate plan after the Long Beach project was rejected .", "A major opponent of the corps ' plan was an environmental and surfer-advocacy group , the Surfrider Foundation , whose members said the project would create dangerous riptides and harm the look of the beach , with no guarantee that the city would be better protected , as the corps and the proponents of the plan claimed .", "The group held meetings to get its message to the public and the council alike , and produced testimony by a coastal engineer and several representatives from local communities whose beaches had undergone similar projects .", "All testified against the corps ' proposals for Long Beach .", "Jeff Kupferman , the chairman of Surfrider 's Long Beach Action Committee and a 45-year city resident , said that while rejection of the plan was a `` major victory '' for Surfrider , surfing was far from the only issue .", "`` We had concerns about swimming safety , as well as surfing , about fishing , kayaking , aesthetics -- any use of the beach , '' he said .", "James P . Hennessy , a Republican council member , agreed .", "`` It was never just about surfing , '' he said .", "`` The council does n't agree about much , but it did agree that the beach fill part of the project was wrong . ``", "What annoyed Mr. Kupferman was that Surfrider was portrayed negatively by those who favored the plan .", "`` Their attitude was we were , ' Yo , just a bunch of surfer dudes out to get a wave , ' '' he said .", "`` And they used that as the hook to try and discredit us .", "The fact that we prevailed has sent a lot of ripples out into this community . ``", "Alison Johnson , a Long Beach resident and vice chairwoman of Surfrider 's New York City chapter , which worked closely with the Central Long Island chapter in opposing the plan , said that the decision had ramifications beyond Long Beach .", "`` It will make the powers that be look at storm protection on the East Coast in a different way , '' she said , `` which is the biggest success you can ask from any project . ''", "Assemblyman Harvey Weisenberg , a lifelong Long Beach resident and a vocal supporter of the Corps of Engineers ' project , was less sanguine about the outcome .", "`` How did people get elected to office that are so ignorant .", "`` he said of the City Council .", "`` I just pray hard and hope to God we do n't get hit by anything . ``", "Even with the beach issue decided , the officials ' alliance with activists may continue .", "Mr. Hennessy and the other Republican council member , Thomas R . Sofield Jr . , have proposed an alternative storm-management plan , which includes working with advisory groups like Surfrider , and the city has asked independent coastal engineers for ways to address beach protection .", "Mr. Hennessy said he still had hopes of working with the Corps of Engineers should it agree to return to Long Beach , but he is adamant about his vote to reject the project .", "`` I can count on the fingers of one hand the number of people who came up to me and said we 'd made a mistake , `` he said .", "STORM PROTECTION ."], "summary": ["Article on controversy over rejection by City Council in Long Beach , NY , to beach preservation project by Army Corps of Engineers designed to protect beach from ocean flooding .", "Surfrider Foundation contended plan to build 15-foot-high berm and dune from Long Beach to Point Lookout would create dangerous riptides and harm look of beach and would not protect beach .", "Photos ."], "publication": "nyt50", "label": [1, 4], "tag": ["New York and Region"]} -{"id": "1788883", "text": ["IN the 50 years he has lived in Montclair , N.J. , Rob Bianco has seen his share of monsoon-like downpours , blizzards , ice storms , even the remnants of hurricanes .", "But Mr. Bianco , the superintendent of public works for Montclair , had never seen anything like the storm that ravaged the leafy landscape of his hometown on July 18 .", "`` We literally had trees and telephone poles , some as high as 20 to 30 feet in the air , that were sheared and cut off , '' he said .", "`` It was a treetop tornado , meaning it never hit the ground , but it still caused a great amount of destruction . ''", "The storm , which hit the northeast corner of Verona , N.J. , before blowing with a vengeance -- about a mile wide -- through a swath of Montclair for roughly a half-hour , destroyed about 200 trees on public property in Montclair , an Essex County township of about six square miles .", "The most heavily damaged areas , Mr. Bianco said , were Brookdale Park , which covers 121 acres in Montclair and Bloomfield , and the township 's Edgemont Park .", "`` We had some cars smashed and a lot of people running for cover , '' he said .", "`` It was a miracle that no one got hurt . ''", "But what about all of those damaged oak and pine trees , some of which Mr. Bianco said were 250 years old .", "`` Cleaning it all up was quite a daunting task , '' said Matthew A . Vastano , the executive vice president of Nature 's Choice Corporation , a yard-waste recycling company in Union , N.J. , hired by Montclair to clear the debris caused by the storm .", "`` Montclair is not your normal town where vegetative waste is concerned , '' Mr. Vastano said .", "`` The town , which has huge , huge trees , is very environmentally conscious , and it wants to keep all the trees it can . ''", "The trees it could not keep were hauled away by Nature 's Choice to a temporary storage site .", "Any piece of wood 16 to 18 inches long was put onto chipper trucks and into machines that turned it into chips .", "Anything larger was cut into logs .", "Some 25 truckloads of those logs were placed into 40-foot containers on trucks -- at a cost of $ 350 per container -- for eventual mulching .", "In the end , about 600 cubic yards of mulch and topsoil , or 300 tons , were produced -- enough to cover about 100,000 square feet , according to Mr. Vastano .", "Mr. Bianco said that Nature 's Choice would give Montclair some of the mulch or topsoil for free if the town needed it for a special project , but that the company was free to sell it to landscapers and other businesses .", "`` We are a business , not a charity , '' Mr. Vastano said .", "`` We 'll take most of that mulch and turn it into hardwood mulch or dye it either black or a shade of red before selling it . ``", "Dianne Marus , the director of Montclair 's Department of Finance , said that the cost of the storm cleanup came to $ 366,950 but that the price , tallied by the Department of Community Services , did not include overtime costs for the Police -LRB- $ 74,983 -RRB- and Fire Departments -LRB- $ 4,650 -RRB- .", "All told , Montclair has spent $ 446,583 on storm-related services , and the job is not yet finished .", "`` There are still a number of stumps to be removed and lots of re-planting to do , '' Mr. Bianco said .", "`` By the time all is said and done , this entire project is going to cost us more money and continue for at least another month . ''", "STORM CLEANUP ."], "summary": ["Article on work of Nature 's Choice Corp , yard-waste recycling company hired by Montclair , NJ , to clear debris caused by July 18 storm that destroyed about 200 trees on public property .", "Company turned trees into about 600 cubic yards of mulch and topsoil .", "Photo ."], "publication": "nyt50", "label": [9, 16], "tag": ["New York and Region"]} -{"id": "1788884", "text": ["DEEP into suburbia , on a sound barrier that runs along the Taconic State Parkway here , a graffiti artist announces his presence with a single word painted in yellow and black : `` Me . ''", "Officials said that graffiti had reached new heights this summer .", "And in a town that bills itself as a retreat from more urban locales , politicians and police officers are taking the problem seriously .", "`` Whether you grew up here all your life , or whether you moved here from the Bronx or Yonkers or Long Island , you do n't want to see that , `` said Linda G . Cooper , the town supervisor .", "`` And so we 're trying to take a very firm position . ``", "In June , the Yorktown police began graffiti patrols as a deterrent .", "They also began photographing graffiti they found to create a catalog of the work of local vandals for use in investigations , Lt . Donald Schuck said .", "Since July , Lieutenant Schuck said , the police have arrested nine boys on graffiti-related charges .", "The most recent came on Aug . 28 , with the arrest of a 14-year-old from Mohegan Lake , a hamlet of Yorktown .", "The police said he had sprayed a wall at a town-owned sports club , causing about $ 400 in damage .", "The boy , charged with making graffiti and possession of graffiti instruments , both misdemeanors , was released to his mother and was scheduled to appear on Friday in Westchester County Family Court in White Plains .", "The town , which has seen stop signs , park buildings , businesses and even a police radar unit defaced this summer , is also considering new legislation .", "One proposed law would require vandals to pay restitution .", "Another would mandate that residents who discover graffiti on their property clean it within 72 hours .", "Ms. Cooper said that rapid removal discouraged vandals .", "Town officials and youth advocates said there were a number of reasons for the surge in graffiti .", "Lieutenant Schuck said increased access to the tools of graffiti had played a role .", "Young people , previously stymied by a county law forbidding the sale of spray paint to anyone under 18 , have begun ordering the paint over the Internet , he said .", "Joan Valenstein , chairwoman of the advisory board for the Yorktown Teen Center , a branch of the Boys and Girls Club of Northern Westchester , said the increase might be the byproduct of boredom in a town that she said did not provide enough youth activities .", "Ms. Cooper said some of the graffiti included gang insignia previously seen in the southern , more urban part of the county .", "Whatever the source of the graffiti , the town seems determined to stamp it out .", "But out on the Taconic State Parkway , high above the cars rushing by , defiance is etched in yellow and black .", "VANDALISM ."], "summary": ["Officials in Yorktown , NY , say graffiti has reached new heights this summer .", "Police have begun graffiti patrols as deterrent and have arrested nine boys on graffiti-related charges since July .", "Photo ."], "publication": "nyt50", "label": [7, 1, 5], "tag": ["New York and Region"]} -{"id": "1788885", "text": ["HE sits in his wheelchair as the family rushes around him .", "He can not move much , or say more than hello .", "He can not participate in the summer activities that everyone pursues with great vigor day after day .", "If it 's warm enough , he goes out onto the deck and snoozes in the sun with the dogs .", "Unlike them , however , he does n't jump up and make excited noises when people come .", "At most , he slowly turns his head and smiles .", "Everyone speaks to him politely , but not for long .", "What 's the point .", "He ca n't say more than a couple of words , and it 's hard to tell how much he understands .", "He is my stepfather , Peter , an 88-year-old man who in the last decade has been transformed from a lively and dynamic person into not much more than a body occupying space .", "He has post-polio syndrome , a condition that seeps the strength from his upper body as steadily as it weakened his legs when he was a teenager .", "A couple of strokes have further debilitated him .", "As my son , Asher , said to my mother one day , it 's as if he 's hardly a person anymore .", "And yet this is n't how Asher , 14 , behaves toward him .", "He constantly monitors Peter 's feet to see if they 've slipped off the footrests of his wheelchair .", "He always asks if Peter wants something to drink .", "His recognition of the full extent of what it means to be a person goes beyond his frustration at Peter 's limitations .", "Asher is concerned with Peter 's comfort , his feeling of inclusion .", "Peter 's situation brings out Asher 's own humanity .", "Peter is certainly a person to my mother , Addy , though she has no illusions about his abilities .", "He is her third husband , the one who was finally her friend .", "She does what she can to make him comfortable and to replicate his old habits .", "Since his only real pleasure is food , she makes him good meals for lunch and dinner .", "At night they listen to Amy Goodman on NPR and then watch Chris Matthews and a couple of episodes of `` Seinfeld '' or `` Curb Your Enthusiasm . ''", "On Tuesdays , Peter 's longtime men 's lunch group comes over to eat with him and discuss books and politics .", "Peter does n't participate , but he enjoys the routine .", "Last summer he could still join them at the local restaurant .", "He would motor up the street in his Jazzy wheelchair with an orange pennant waving above his head to warn cars away .", "He is far from being able to do anything like that now .", "Peter needs to be cared for at the most basic custodial level .", "When my friend Anne visited , her 9-year-old son , Nick , was interested in what this entailed .", "Over the course of a five-day stay , Nick asked many questions of Stacey , the woman who comes in to get Peter out of bed in the morning -- the very practical questions that most adults prefer not to think about .", "Several times Stacey saw Nick looking in the window when it was changing time .", "He was n't fazed by what he saw .", "He accepted Peter 's condition and presence in the house as natural .", "He was right about that .", "My mother and Peter live on the lip of a harbor in Maine .", "All summer , family members passed through , usually for a week or so .", "I stayed the longest -- six weeks .", "On some days there were enough people staying to fulfill my old fantasy of a big house full of people , bursting with robust togetherness .", "This was a new phenomenon here .", "For many years we were only welcome to stay for a short time .", "The stepparents had limited tolerance for each other 's children , especially the noisy grandchildren .", "I often rented nearby to make visits to my mother less stressful .", "Other sons and daughters did the same .", "We rarely overlapped or had the sense of a beloved summer house , full of traditions passed down through generations .", "We each had a private relationship with Maine , and with Peter and my mother .", "But an unexpected side effect of Peter 's deterioration has been a lessening of the feeling that anyone beyond my mother and stepfather creates a crowd .", "Now Peter seems to enjoy the bustle that my mother used to believe was an imposition on him .", "He is no longer an aging intellectual who requires quiet for reading and writing .", "The grandchildren are older , and he is younger , babylike .", "After breakfast , he sleeps for a couple of hours in the kitchen , no matter the amount of dish washing or screen-door banging .", "So family life swirled around him this summer .", "We spent the kind of easy time together that I like best , quantity rather than quality .", "Just hanging out .", "Siblings , nieces and nephews trooped through with significant others in tow .", "They each had a relationship with Peter while they were there .", "Some spent time talking to him even if he could n't reply .", "Others made sure he was comfortable at the table during meals .", "Though it was easy to forget he was in the room , everyone was delighted when he broke into a conversation with a responsive remark .", "The old Peter ! It was good to see him again , if only for a moment .", "Starting the last week of July , my mother began to say fall was in the air .", "I bridled against this , though I knew what she meant .", "I felt it too , a change in the light from white to yellow , a softening of the wind , a resignation of the leaves on certain trees .", "But I did n't want to skip ahead , so I pretended not to notice .", "It 's summer , I insisted .", "This is what summer is like in Maine .", "It is tempting to make this whisper of fall a metaphor for Peter 's diminishing presence .", "September brings up memories of how the end of summer felt during childhood , a loss .", "Yet I find myself resisting the comparison .", "Peter is alive , and summer does n't officially end for 10 more days .", "I 'm still wearing white .", "GENERATIONS ."], "summary": ["Alice Elliott Dark Generations essay on summer spent in Maine with her family and her stepfather , Peter , 88 , who is debilitated with post-polio syndrome and effects of strokes .", "Drawing ."], "publication": "nyt50", "label": [9, 66, 11], "tag": ["New York and Region"]} -{"id": "1788886", "text": ["WHEN Natalie Wells bought a home in Englewood , N.J. , a year ago , she was unaware that her American pit bull terrier was illegal to own in the city .", "Shortly after moving in , she was told by one of her daughters about a city law that banned the breed , commonly called pit bulls , along with several similar breeds and Rottweilers .", "Under the 1999 law , even this year 's best-in-show winner at the prestigious Westminster Kennel Club Dog Show , Rufus , a colored bull terrier from Holmdel , N.J. , would be banned in Englewood .", "`` I pretty much knew in my gut it was n't right , `` Ms. Wells said .", "In July , Ms. Wells filed a challenge to the law in Bergen County Superior Court along with Mia Rodriguez , a neighbor who also owns a pit bull , and the American Dog Owner 's Association of Castleton , N.Y.", "Last month , Superior Court Judge Jonathan N . Harris agreed with Ms. Wells and ordered the city to stop enforcing the law because it was in conflict with a New Jersey statute that prohibits restricting dogs by breed .", "`` Cities do n't have the right to make laws that violate state law , `` said Flora Edwards , the lawyer who represented the plaintiffs .", "`` If the legal drinking age is 21 under state law , the City of Englewood or Montclair ca n't say it 's 25 or 18 . ``", "According to a Centers for Disease Control study , the pit bull breed was responsible for more dog-bite fatalities than any other breed from 1979 to 1998 , the latest year for which figures were available .", "The breed was responsible for 66 of 238 dog-bite fatalities during that period .", "Rottweilers were next , with 39 .", "The New Jersey Vicious and Potentially Dangerous Dog Act sets out criteria for dealing with aggressive dogs , but prohibits breed discrimination .", "New York has a similar statute .", "Connecticut 's law does not ban breed discrimination .", "Despite such laws , some communities still have restrictions on specific breeds .", "They range from outright bans to requiring property insurance coverage and the use of shorter leashes and muzzles in public .", "Tanya Ford , village clerk in Hempstead , N.Y. , said she was aware of no challenges to its law , which categorizes American pit bull terriers and several related breeds as vicious dogs , requiring that they be muzzled when walked and kept on a chain with a minimum strength of 300 pounds and not exceeding three feet in length .", "Owners must also have liability insurance of $ 100,000 .", "Mahlon Goer , a pit bull owner who tracks legislation in New York for the American Dog Owner 's Association , said the state still allowed insurance companies to drop customers or deny property insurance to prospective customers based on the breed of dog they own .", "Underwriting policies vary , according to the group , but beyond pit bulls and related breeds , the list includes Siberian huskies , Great Danes , German shepherds , St . Bernards and Dalmatians .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws because keeping tabs at the local level can be difficult unless laws are highly publicized .", "According to the American Kennel Club , last year it tracked 105 communities around the nation where breed-specific legislation was pending , enacted or defeated .", "The group had tracked another 76 through July .", "Among the municipalities in the region that have breed-specific laws are Larchmont , Sands Point and Hempstead in New York and Millville and Atlantic City in New Jersey .", "Numerous communities across the United States have such laws .", "One of the most controversial is in Denver , where authorities have euthanized more than 1,000 pit bulls since the reinstatement of a ban on the breed in May 2005 .", "The city 's animal control division had suspended enforcement of the ban in 2004 after the governor signed a bill restricting local governments from outlawing certain breeds .", "But the city successfully sued , arguing that the bill violated its home-rule authority .", "In Englewood , Douglas Bern , a lawyer who served on the City Council when the law was passed , said the council was responding to incidents in a public park where the dogs were being used to intimidate people .", "He said the police had also felt threatened by pit bulls when responding to a call at a home .", "The city argued that the municipal law complemented state statute , which was designed to address situations where `` existing local laws inadequately address the problem '' of aggressive dogs .", "`` The city of Englewood 's ordinance in this regard actually furthers and is consistent with the legislative intent , which is to address a void where local governments have not addressed the area of vicious or potentially dangerous dogs , `` the city said in a court brief .", "Under the ordinance , bull terriers , Staffordshire bull terriers , American pit bull terriers , American Staffordshire terriers , Rottweilers or `` any dogs of mixed breed which has the appearance or characteristics of being predominantly of the breeds , '' were banned from the city .", "Some summonses had been issued under the law , but city officials did not know how many .", "`` It 's like there 's a stigma for having one of these kinds of dog , `` said Ms. Rodriguez , who owns an ailing 8-year-old pit bull named Cyrus .", "The Englewood City Council will discuss the law at its Sept . 19 meeting , said Scott Reddin , the council president .", "He said he did not expect the council to challenge the court 's decision .", "`` We were profiling certain breeds and that was found to be unconstitutional , '' he said .", "`` I do n't think the council will have any problem rescinding that . ``", "Numerous national dog owner and veterinarian associations have come out against breed-specific laws , saying they are unfair and do not address the problem of aggressive and dangerous dogs .", "`` As we like to say , punish the deed , not the breed , '' said Lisa Peterson , a spokeswoman for the American Kennel Club .", "`` We think breed-specific laws are unfair to responsible dog owners . ''", "Barbara Bishop , who owns Rufus , the top dog at the Westminster show , said she was trying to use the dog 's success to highlight the unfairness of breed-specific bans .", "`` We want to let people know that every dog has teeth and every dog can bite , whether it 's a Chihuahua or a bull mastiff , `` Ms. Bishop said .", "`` Every dog will be a product of what it 's brought up to do . ``", "Ms. Bishop attributed much of the image problem of the pit bull breeds to people who train them to be vicious , including drug dealers who use them as guard dogs .", "`` We have Rufus , who 's the top winning colored terrier of all time , and we still have people stop in the street and say , ` There 's a pit bull , ' `` she said .", "For Ms. Wells , the law seemed even more absurd because her 12-year-old pit bull , Sentry , has cataracts and has had cancer , heart surgery and a hysterectomy .", "`` She is a member of the family , '' said Ms. Wells , who has two daughters , ages 34 and 32 .", "`` My kids tease me all the time and say she 's my favorite daughter . `` ."], "summary": ["Article on legal challenges pit bull owners have been making against local laws in New York City metropolitan area that ban or restrict certain dog breeds .", "Opponents of breed-specific laws say it is difficult to know how many communities have such laws .", "Numerous national dog owner and veterinarian associations oppose breed-specific laws , saying they are unfair and do not address problem of aggressive and dangerous dogs .", "Photos ."], "publication": "nyt50", "label": [39, 20], "tag": ["New York and Region"]} -{"id": "1788887", "text": ["A selection of New Jersey events scheduled in honor of the victims of Sept . 11 : Bernards Township -- Memorial service .", "Monday at 7 p.m. Liberty Corner Presbyterian Church , 45 Church Street .", "-LRB- 908 -RRB- 647-0340 .", "Camden -- `` For the Healing of the Nations '' concert , featuring Nnenna Freelon , Andy Bey , Mark Johnson , Sandra-Turner Barnes , the Afro Blue Vocal Ensemble of Howard University and others .", "Sunday at 3 p.m. $ 25 .", "Walter K . Gordon Theater , Camden Center for the Arts , Rutgers University , Third and Pearl Streets .", "-LRB- 856 -RRB- 225-2700 .", "Hamilton -- Free admission to sculpture park for Sept . 11 .", "Monday , from 10 a.m. to 8 p.m. Grounds for Sculpture , 18 Fairgrounds Road .", "-LRB- 609 -RRB- 586-0616 .", "Manalapan -- The Heart of New Jersey Chorus , a chapter of Sweet Adelines International , will perform .", "Monday at 7:30 p.m. Monmouth County Library , 125 Symmes Drive .", "-LRB- 732 -RRB- 431-7242 .", "Montville -- `` George Washington 's Indispensable Men : The 32 Aides-de-Camp Who Helped Win American Independence , `` memorial program presented by the author Arthur Lefkowitz .", "Monday at 7:30 p.m.", "Senior House , 356 Main Street .", "-LRB- 973 -RRB- 394-0554 .", "New Brunswick -- Evensong : works by Walmisley , Handl , Williams and Bainton .", "Sunday at 6 p.m. Christ Church Episcopal , 5 Paterson Street .", "-LRB- 732 -RRB- 545-6262 .", "Oradell -- `` The Guys , '' a 9/11-inspired drama by Anne Nelson , free for survivors of Sept . 11 and members of victims ' families .", "Sept . 16 and 17 .", "Bergen County Players , 298 Kinderkamack Road .", "-LRB- 201 -RRB- 261-4200 .", "West Orange -- Remembrance ceremony , at the Essex County Sept . 11 memorial .", "Monday at 8 a.m.", "Eagle Rock Reservation , Eagle Rock Avenue .", "-LRB- 973 -RRB- 621-4404 .", "West Windsor -- `` Remembering 9/11 Through Dance , '' choreographic tribute to West Windsor victims .", "Sunday at 6 p.m. Ron R . Rogers Arboretum , Route 571 and Clarksville Road .", "-LRB- 609 -RRB- 799-6141 .", "RECALLING SEPT . 11 ."], "summary": ["Selection of New Jersey events scheduled to honor victims of September 11 .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788888", "text": ["AS Jamaica station came into view , Joe Singh of Deer Park stood by the train door and looked out the window .", "What he and other commuters on the Long Island Rail Road see these days is much different from what they saw before a $ 300 million renovation of the station that began more than five and a half years ago .", "`` It 's much better than it was before , `` Mr. Singh said .", "`` There is a better covering from the rain .", "The platforms were covered before , but it was n't as good . ``", "Mr. Singh , who was about to step off the 7:47 a.m. train from Deer Park , said he takes the Long Island Rail Road to Jamaica and then takes the E subway to his job in Forest Hills , Queens , where he is a sales representative for Quest Diagnostics .", "As part of the renovation , a new walkway was built to connect the railroad station to the E , J and Z lines .", "Mr. Singh said that he liked the walkway and had always found it clean but that the front entrance of the railroad station was `` very , very dirty . ''", "`` They never clean it , '' he said .", "`` Now they 're fixing the first-floor bathrooms behind the ticket booth .", "Let 's see how they do . ``", "Another commuter , Monte Colbert of Merrick , a certified public accountant with Marcum & Kliegman in Melville , said the station `` feels more modern . ''", "Mr. Merrick , who was waiting for a train to Pennsylvania Station , added , `` It lends itself to commuting . ''", "An estimated 98,000 Long Islanders travel through Jamaica station during the morning rush , said Susan McGowan , a spokeswoman for the L.I.R.R.", "She said that about 14 percent of the riders switch trains at Jamaica for either Brooklyn or Hunters Point Avenue trains .", "The renovation was `` substantially completed '' in April , said Richard C . Oakley , director of capital program management at the railroad , although some odds and ends must still be wrapped up , like the reconstruction of Sutphin Boulevard .", "That work , which involves installing catch basins , is being overseen by the Port Authority of New York and New Jersey , Mr. Oakley said .", "The Port Authority and the Metropolitan Transportation Authority , the parent of the railroad , shared in the cost of the Jamaica station renovation .", "It included the AirTrain , an 8.1-mile light rail system that opened in December 2003 to connect the station to Kennedy International Airport .", "Mr. Oakley said a key feature of the newly renovated Jamaica station was the replacement of wooden beams , waiting rooms and mezzanines with steel beams and laminated glass .", "The result is a more open feel .", "The mezzanines , for example , are no longer enclosed , and the east-end mezzanine , formerly 16 feet wide , is now 80 feet wide .", "UPDATE ."], "summary": ["Update on $ 300 million renovation to Long Island Rail Road 's Jamaica Station .", "Photo ."], "publication": "nyt50", "label": [1], "tag": ["New York and Region"]} -{"id": "1788889", "text": ["What was supposed to have been summer 's last hurrah instead became another quest for any available stretch of sand and surf .", "Many Suffolk County residents found themselves warned off their neighborhood beaches over the Labor Day weekend , just as Long Islanders had been periodically throughout the rainy summer .", "Downpours and the elevated bacterial levels in the water that resulted led Suffolk and Nassau County health officials to close or issue warnings against swimming at a patchwork of beaches and parks for days at a stretch in June , July and August .", "Most recently in Nassau , 23 beaches were shut down on Aug . 25 .", "They reopened six days later .", "Suffolk authorities issued an advisory covering 62 beaches on Aug . 28 .", "Two days later , two beaches were closed but the advisory was lifted for 57 beaches and their waters again cleared for swimming .", "The fun was short-lived .", "On Sept . 1 , the Friday of the long holiday weekend , the remnants of Hurricane Ernesto struck .", "The arrival of Ernesto , which had been downgraded to a tropical storm before it hit Long Island , prompted Suffolk to issue a new advisory that covered 52 beaches .", "According to Suffolk 's beach hot line , that advisory was lifted Monday at 24 beaches but remained in effect at 28 -- in Centerport , Cold Spring Harbor , the Great South Bay , Huntington , Long Island Sound and Northport -- for an additional 24 hours .", "Although Nassau shut down beaches on its North and South Shores four times over the summer , Ernesto dropped too little rain there to raise bacteria counts beyond acceptable levels , said Cynthia Brown , a Nassau health department spokeswoman .", "That , however , does not mean Nassau residents can make up for lost time by spending this weekend or next at a county beach .", "`` The beaches are now officially closed until next year , '' said Ms. Brown , noting that residents ' user permits expired on Tuesday , the day after Labor Day .", "First Plant to Turn Grease Into Fuel Opens The Northeast 's first plant to convert restaurant grease into biofuel has officially opened .", "The pilot plant , which is in Bohemia , is expected at first to produce up to 1,000 gallons a day of biofuel that could be used in diesel-powered cars and trucks , as well as home-heating systems .", "North American Biofuels , founded last year by C . David Butler II and Alan Ellenbogen , opened the plant on Aug . 29 .", "By December , its output should reach about 4,000 gallons a day , the company said .", "Suffolk County 's sewage-treatment plant stopped accepting restaurant grease in 2002 after its system clogged up , and Nassau 's plant accepts grease only from restaurants inside the county .", "For the past four years , Suffolk restaurant owners have had to pay to ship their grease to New Jersey , where plants have developed their own problems , or dump it illegally down the drain .", "`` This facility takes a costly problem -- the disposal of waste grease -- and turns it into an energy solution by producing a clean , renewal energy product , which will reduce harmful emissions into the atmosphere , '' Steve Levy , the Suffolk County executive , said in a statement .", "Mr. Butler , an engineer , developed the processing system and its computerized operating technology .", "Russell Reid , a New Jersey-based waste management firm , will transport the used grease to the plant .", "Island Biofuel , based in Center Moriches , will be the major distributor of the end product , Mr. Ellenbogen said .", "Producers of biodiesel and other forms of renewable energy can be eligible for federal , state and local tax incentives .", "But North American Biofuels was not because it relies on a technology that is cheaper to build than plants that process `` virgin '' sources , like soy oil -- about $ 7 million compared with $ 20 million to $ 30 million .", "But the company will pass along a 50-cent-a-gallon federal subsidy to its customers , Mr. Ellenbogen said .", "That would lower the wholesale price per gallon to less than $ 1.90 , compared with about $ 2.25 for home-heating fuel and about $ 3.10 for biodiesel made from soy , he said .", "Bill to Penalize Inmates For Spitting at Guards Under a year-old state law , a jail inmate who throws blood , semen , urine or feces at a corrections officer can be charged with a felony .", "Now a bill , sponsored in the State Assembly by Thomas P . DiNapoli , a Democrat from Great Neck , would add saliva and phlegm to that list .", "Corrections officials in Nassau and Suffolk said inmates know that the law established tougher penalties for throwing certain bodily fluids at guards but also know that it did not include saliva , so they have spit at guards more .", "The bill , which passed the Senate in June , would make that a Class E felony punishable by one and a third to four years in state prison , not in a county jail .", "The sister bill in the Assembly , sponsored by Mr. DiNapoli , is in committee .", "The unions representing corrections officers in Nassau and Suffolk support its passage .", "`` One hundred percent , '' said Vito Dagnello , the president of the Suffolk County Correction Officers Association .", "`` It 's become part of the job that you go home at night not knowing what you 've contracted . ``", "Saliva or phlegm , particularly if it contains blood , can carry infectious pathogens , including those that cause hepatitis , tuberculosis or H.I.V. , the unions said .", "Mr. Dagnello said it was difficult to get worker 's compensation or disability payments for illnesses that guards believe were contracted from inmates , `` because the government 's position is that you could have contracted it somewhere else . ``", "John Duer , the president of the Nassau County Sheriff Officers Association , said the same compensation battle exists for corrections officers statewide .", "He said that one of his members was recently spit at by an inmate whose saliva landed in the officer 's mouth .", "`` Now he 's being tested for a whole list of contagions , `` Mr. Duer said of the officer .", "THE WEEK ."], "summary": ["The Week column .", "Remnants of Hurricane Ernesto and elevated bacterial levels in water prompt Nassau and Suffolk County health officials to close or issue warnings against swimming at several beaches and parks .", "C David Butler and Alan Ellenbogen open American Biofuels , Northeast 's first plant that converts restaurant grease into biofuel , in Bohemia , NY . State Assemblyman Thomas DiNapoli sponsors bill that would establish tougher penalties against prison inmates who spit at guards .", "Photo ."], "publication": "nyt50", "label": [2, 14, 16], "tag": ["New York and Region"]} -{"id": "1788890", "text": ["The Shore Institute of the Contemporary Arts in Long Branch has unveiled its big fall exhibition titled `` Stuff It ! '' -- a riotous ensemble of artworks employing stuffed objects .", "It is a victory of a kind for this plucky institution , for in May it nearly closed for lack of funds .", "But it was saved by an anonymous donation of $ 20,000 from a famous Shore rock musician .", "It has also just received a grant of $ 22,000 from the New Jersey State Council on the Arts .", "The exhibition was organized by Doug Ferrari , an artist who founded the institute and mortgaged his home two years ago to open the nonprofit space .", "For this exhibition he has chosen artists from all over the country , drawing on submissions through an informal national open call , along with recommendations from friends and other artists .", "Most of the artists are women , and all of the contributors seem to make art that involves filling , stuffing , inflating , upholstering or padding of one kind or another .", "Stuffed sculpture goes back to the 1960 's , when pop artist Claes Oldenburg began to make oversized everyday objects -- a floor fan , a hamburger , a sofa -- with woven and sometimes painted fabric left flaccid or stuffed with soft materials .", "Some of the art here is in that vein , but there are also pieces that defy categorization -- a collection of antique sock monkeys , say , or a video by Matt Barton that shows one of his kinetic art installations with stuffed animals .", "Not surprisingly , much sewing , needlepoint , darning and embroidery is evident throughout this sweeping survey of nearly 50 works , including everything from upholstered abstract geometric canvases by Suzanne Brady of Manalapan to arch , titillating sculptures made from discarded socks by Tracey Featherstone from Hamilton , Ohio .", "There are also all kinds of stylish orchestrations of fabric and stuffing by Bethany Jean Fancher , Audrey Chibbaro and Joan Wheeler .", "Nothing is what it seems on Orly Cogan 's table of confections , for her edible-looking cakes , doughnuts , flans , slices , muffins , tarts and other treats are actually hand-sewn and crocheted from pieces of old socks , clothes and blankets .", "The subterfuge is far from obvious , but I guess that is the point behind much of this kind of trompe l' oeil sculpture .", "It is also what makes it incredibly popular with viewers , who can appreciate an act of craftsmanship that fools the eye .", "Rebeca Raney from New York City has contributed a wonderfully eccentric installation -- a tripped-out tableau of messy psychedelic wall drawings and radioactive-looking rocks encased in fake fur along with a stuffed , life-size cartoon figure in a space helmet and shimmering gold jumpsuit .", "It looks likes Versace crossed with the Wiggles crossed with `` Star Trek . ''", "Gaping in disbelief , I stood in front of the work for several minutes .", "The eccentricity of the tableau is ingratiating , but it would n't count for much if the pieces themselves were n't well made .", "Happily , they have been constructed with the utmost care , especially the central figure , whose face and hands are delineated by a jumble of colorful miniature embroidery .", "This formal detail makes for a lovely , unexpected surprise , representing hours of long , hard work by the artist .", "Not all contemporary art is as junky as it sometimes looks .", "The aim of the Shore Institute of the Contemporary Arts is to increase public access to , and awareness of , the contemporary arts in central New Jersey -- visual art , music , theater and dance .", "This exhibition is another example of how the institution is meeting its mission .", "I know of no other nonprofit art center in New Jersey that does so much with so little .", "`` Stuff It ! '' is at the Shore Institute of the Contemporary Arts , 20 Third Avenue , Long Branch , through Oct . 6 .", "-LRB- 732 -RRB- 263-1121 or www.sica.org.", "ART REVIEW ."], "summary": ["Benjamin Genocchio reviews Stuff It -RSB- , exhibition of artworks employing stuffed objects on view at Shore Institute of the Contemporary Arts in Long Branch , NJ . Photo ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788891", "text": ["SEAN O'CASEY 'S one-act play `` The Cooing of Doves , '' typed and marked up by the playwright , lay on a reading table at Princeton University 's Firestone Library .", "Neither yellowed by history 's aura nor piously unblemished , the unbound stack of 18 pages looked more like a term paper than a century-old artifact in the university 's new Leonard L . Milberg Irish Theater Collection .", "In fact , the manuscript , which became the second act of O'Casey 's seminal work `` The Plough and the Stars , '' looked so ordinary that Paul Muldoon , the poet and Princeton professor , at first overlooked it while perusing other samples of the collection , which was donated in his honor .", "The university announced the gift Aug . 29 .", "`` Fantastic , '' Mr. Muldoon , 55 , murmured with hushed excitement , leafing through the manuscript .", "`` Fantastic . ''", "The collection will be on public display at the library from Oct . 13 to April 22 .", "The opening of the exhibition will be observed with a series of events , including a production of Brian Friel 's `` Translations '' at the McCarter Theater .", "A symposium including the Irish actors Stephen Rea and Gabriel Byrne .", "And a lecture by Joe Dowling , the former artistic director of the Abbey Theater in Dublin , Ireland 's national theater , where many of the collection 's works were first staged .", "Irish drama provides `` a case study in the inextricability of literature and politics , '' said Prof . Michael Cadden , who teaches Irish theater at Princeton , because so many plays articulated the desire for independence from Britain .", "The collection 's 1,200 works include playbills from `` The Hostage '' and `` The Quare Fellow , '' by Brendan Behan , and a yellowed 1952 first edition of Samuel Beckett 's `` En Attendant Godot , '' in the original French , which he later translated as `` Waiting for Godot . ''", "The collection is named for its donor , the financier Leonard L . Milberg , a member of Princeton 's class of 1953 who lives in Rye , N.Y.", "Mr. Milberg had already given the university three collections of works by American poets , Irish poets and Jewish-American writers , before he was inspired by Mr. Muldoon to add Irish drama to the list .", "`` Paul is a friend , a wonderful man , '' Mr. Milberg said in a telephone interview .", "`` I 'm a fan of his poetry , and I 'm a fan of Paul Muldoon . ``", "Mr. Milberg acquired the collection over the past five years , he said , through dealers and at auction in New York , Ireland and England , with the help of a dealer , J . Howard Woolmer .", "Being in the presence of such manuscripts is a thrill and an inspiration , said Mr. Muldoon , who was born in County Armagh , Northern Ireland , and who won the Pulitzer Prize in 2003 for his collection of poems `` Moy Sand and Gravel . ''", "`` It 's quite touching , really , to be in the presence of O'Casey 's DNA , `` he said .", "`` It does help to be reminded that these plays in this case -- like poems , novels -- are indeed written by , in some profound sense , ordinary people -- people with a particular gift , of course . ''", "The manuscripts can also be insightful .", "For example , in a scene in `` The Cooing of Doves , '' O'Casey crossed out some characters ' proper names and identified them by occupation : `` Mrs. Macineely '' became `` The Orange Seller . ''", "The change stripped her of the personality or the geographic associations the name might carry , Mr. Muldoon suggested , and transformed her into a `` type . ''", "He added : `` It 's almost as if one is engaged , in some sense , in a bit of detective work , you know -- if not the scene of the crime , at least some evidence that an event has taken place here .", "In this case , the making of the play . `` ."], "summary": ["Article on Princeton University 's newly acquired Leonard L Milberg Irish Theater Collection , donated by Leonard Milberg , member of class of 1953 who lives in Rye , NY . Collection was donated in honor of poet and Princeton Prof Paul Muldoon .", "Photo ."], "publication": "nyt50", "label": [12, 15], "tag": ["Theater", "New York and Region"]} -{"id": "1788892", "text": ["THE Bridgeport Police Department has joined a handful of other departments around the state in putting officers on Segways , the futuristic-looking battery-powered upright scooters .", "Police Chief Bryan T . Norwood said the department bought four Segways last month for $ 5,500 each to use on regular patrols and at events like concerts .", "He said officers from the bicycle unit were trained to ride the Segways first .", "Chief Norwood said that besides giving officers better mobility , the scooters were good public relations .", "`` It 's phenomenal , `` he said .", "`` I 've seen officers who are normally quite reserved become animated when they 're on this vehicle .", "And the people just flock to them out of curiosity . ``", "Officer Caleb E . Lopez , a spokesman for the South Windsor Police , said the department began using its only Segway earlier this year as a test .", "If the Segway proves useful , Officer Lopez said , the department will consider buying more .", "`` The officer on it is not as physically taxed as he or she would be on a bicycle , '' he said .", "`` And anytime you have a conversation piece , it 's a great way of getting to know the public . ``", "A spokeswoman for Segway Inc . of Bedford , N.H. , said the police in Hartford and the state police at Bradley International Airport in Windsor Locks have Segways .", "Mayor John M . Fabrizi of Bridgeport tried one and said they were easy to ride .", "`` After about 10 minutes of practice , I was able to handle it , '' he said .", "Carla M . Vallone , a spokeswoman for Segway , said about 20 percent of the vehicle 's sales were to police departments and security agencies .", "She said they liked the scooters for a variety of reasons .", "`` They can cover twice the amount of ground that they could walking , '' she said .", "`` They 're also eight inches off the ground when they 're on the scooters , so they have a sight line that 's above a crowd .", "And when they 're working in an area with a dense population , they can move as fast or as slow as the people around them . ``", "Ms. Vallone said the model Bridgeport bought , a police edition with a front cargo bag and extra reflectors , has a top speed of 12.5 miles per hour and can travel about 24 miles on 6 to 8 hours of charging .", "Chief Norwood said he hoped to buy four new Segways , with wider tires designed for off-road use , in the next few weeks .", "NOTICED ."], "summary": ["Police Department in Bridgeport joins handful of others around state in putting officers on Segways .", "Four battery-powered upright scooters will be used on regular patrols and at events like concerts .", "Photo ."], "publication": "nyt50", "label": [0, 1], "tag": ["New York and Region"]} -{"id": "1788893", "text": ["The remnants of Tropical Storm Ernesto hit Fairfield County last weekend , bringing winds that downed trees and knocked out power to tens of thousands of homes .", "The storm had been downgraded to a tropical depression , and its center had already veered toward Pennsylvania by the time it struck Connecticut last Saturday , but winds in some areas in the state were gusting at nearly 60 miles per hour , said Todd Miner , a meteorologist at Pennsylvania State University .", "The storm also dropped nearly three inches of rain on some parts of southwestern Connecticut , Mr. Miner said .", "More than 20,000 United Illuminating customers lost power during the storm , 5,800 of them in Fairfield .", "By Tuesday morning , power had been restored , said Al Carbone , a company spokesman .", "Last Saturday night , a peak of about 54,400 Connecticut Light and Power customers were without power , the vast majority of them in Fairfield County .", "Stamford was hit hardest , with 14,300 people losing power on Saturday .", "In Greenwich , 11,100 lost power , according to Mitch Gross , a spokesman for the company .", "On Sunday morning , 10,250 customers in Norwalk had no power .", "More than 100 crews went out over the weekend to make repairs , including outside contractors and workers from other states , Mr. Gross said .", "About two dozen poles had been snapped , he said .", "All power was restored by Tuesday night .", "Mayor Dannel P . Malloy of Stamford said he was concerned that Connecticut Light and Power has had to scramble the past few months to restore power after the failures .", "`` I think they were overwhelmed again , '' he said .", "`` They have a staffing problem in lower Fairfield County .", "I think they 're going to be overwhelmed a lot unless they place more resources here . ``", "Last month , Gov . M . Jodi Rell ordered the Department of Public Utility Control to examine the power supply in the state and review plans by power companies for handling failures .", "THE WEEK ."], "summary": ["Remnants of Tropical Storm Ernesto hit Fairfield County , downing trees , knocking out power to tens of thousands of homes and bringing heavy rains to some parts of southwestern Connecticut .", "Photo ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788896", "text": ["Steve Lonegan , the mayor of Bogota -LRB- the one in New Jersey , not Colombia -RRB- , began the summer on an ugly note by demanding that McDonald 's remove a billboard that is printed in Spanish .", "The fast-food giant stood by its guns -- or maybe its hamburgers -- and the billboard still stands .", "But Mr. Lonegan is not to be deterred .", "He is ending the summer season with an effort to allow local voters to say whether they want English to be the town 's official language .", "Wisely , the Bergen County clerk , Kathleen Donovan , whose office has the final say over what goes on ballots , asked for a legal opinion on whether the nonbinding referendum question was allowable under the law .", "The answer was no .", "A lawyer , John Carbone , wrote that questions on municipal ballots must pertain to a matter a town can act on .", "State law does not authorize Bogota or any other municipality to declare an official language .", "Mr. Lonegan predicts he will win when he appeals the ruling to the courts .", "Judges , he says , are reluctant `` to take away the right '' of voters to express themselves .", "He also says he will raise money privately to pay for the appeals .", "That is consistent with his belief as a conservative Republican that government spending should be kept as low as possible .", "But the entire effort , which seems like a slap in the face to Hispanics , is also consistent with his practice of seeking widespread approval from ultraconservatives whose opposition to illegal immigrants is at a fevered high .", "As Mr. Lonegan admits , even a successful referendum would change nothing in Bogota .", "The town would still be required to abide by state and federal law stipulating that some documents be printed in more than one language .", "What is likely to change is Mr. Lonegan 's reputation : he may grow in stature among bigots , while losing credibility among minorities and fair-minded citizens .", "The choice is his .", "New Jersey ."], "summary": ["Editorial opposes effort by Mayor Steve Lonegan of Bogota , NJ , to introduce referendum on making English town 's official language ."], "publication": "nyt50", "label": [3, 0], "tag": ["New York and Region", "Opinion"]} -{"id": "1788899", "text": ["PROCLAIMING the beginning of a new era in Atlantic City gaming , Pinnacle Entertainment Inc . has agreed to buy the Sands Casino Hotel , the smallest of the 12 casinos in the city , for $ 250 million , with the intention of tearing it down and building an upscale casino-hotel complex .", "`` It 's the right thing for Atlantic City , `` said George Toth , the president of the Sands .", "`` I just think to try to run the small type of gaming house we were running no longer fits the model in a destination city .", "We did n't have shopping or a swimming pool , so we were just limited .", "Today in the gaming field , you have to offer a lot more things . ``", "Pending regulatory approvals , the deal should close by November , with demolition of the existing buildings starting then and construction taking as long as three or four years .", "The deal was announced Tuesday .", "The Borgata Hotel Casino and Spa , the most recent casino to open in Atlantic City , in 2003 , cost $ 1.1 billion to build .", "Estimates for the new Pinnacle operation , which will be on 18 acres adjacent to the Boardwalk , have run as high as $ 1.5 billion .", "`` What it does is take an afterthought of a property and suddenly makes it the talk of the town , '' said Joe Weinert of the Spectrum Gaming Group , a casino consultant that also publishes The Gaming Industry Observer .", "`` This is terrific news for Atlantic City , for it takes a property which has no impact on Atlantic City and will transform it into the next great casino here . ''", "The Sands was built 26 years ago but ran into severe debt problems before being bought by Carl Icahn in 2000 .", "After a series of transfers into holding companies , the Sands went into bankruptcy in 2005 .", "The case is in bankruptcy court in Camden .", "The Sands has 2,100 employees , and they will be out of work if the plans for demolition are completed .", "Given a statement by Daniel R . Lee , Pinnacle 's chairman and chief executive , that appears likely .", "`` The success of recent Atlantic City developments has proven that customers in the Northeast respond positively to state-of-the-art gaming resort design and amenities , '' Mr. Lee said .", "`` While we regret the necessity of closing the Sands to create an exciting new resort , we look forward to working with gaming regulators , state and local authorities on this project to create more jobs , tax revenues and other lasting benefits for the region . ''", "Pinnacle owns casinos in Nevada , Louisiana , Indiana , Argentina and the Bahamas , but not in the top two American casino cities , Atlantic City and Las Vegas .", "Mr. Lee , though , worked for Steven Wynn when he was considering opening a casino in Atlantic City in the late 1990 's , so he is familiar with that landscape , Mr. Weinert said .", "`` This is the beginning of an arms race , '' said Mr. Weinert , noting that three other potential operators are looking at sites around Atlantic City .", "`` It is only a question of which of these four groups will make the standard for the next-generation concept in Atlantic City . ''", "Jeff Vasser , the executive director of the Atlantic City Convention and Visitors Authority , which works to bring conventions to town , said he was not ecstatic about losing the 600 rooms at the Sands as the prime convention season comes around , but was happy to look at the future , with as many as 2,000 rooms planned for the new hotel .", "`` It 's a giant game of leapfrog here right now , and it is the time for Sands and Pinnacle to do the leaping , `` Mr. Vasser said .", "`` This is the Sands ' reaction to the Borgata , and now the other casino companies like Harrah 's and Trump will respond . `` ."], "summary": ["Pinnacle Entertainment Inc agrees to buy Sands Casino Hotel in Atlantic City , NJ , for $ 250 million .", "Plans to tear it down and build upscale casino-hotel complex ."], "publication": "nyt50", "label": [0], "tag": ["New York and Region"]} -{"id": "1788900", "text": ["IF you were an employee of a company and were disgruntled with its performance , who would you want to hold accountable .", "The chief executive , who is popular with key constituents and wields clout over your financial well-being .", "Or at least one , if not two , of the chief executive 's top lieutenants , both unpopular , and presumably easy to scapegoat .", "That , essentially , is the question that faces State Senator Thomas H . Kean Jr . as he grapples with the biggest political issue in his race to unseat United States Senator Robert Menendez : the war in Iraq .", "Mr. Kean is a Republican -- and a fairly moderate one at that , in contrast to the conservatives who run Washington .", "And as a member of the New Jersey Legislature , he had no role in any decisions related to Iraq .", "But he does say that he would have voted for the initial resolution authorizing the use of force , just as Senators Hillary Rodham Clinton and Joseph I . Lieberman , both Democrats , did .", "But because of his party affiliation , Mr. Kean has been flayed by Mr. Menendez and other Democrats -- and the polls show that Iraq is a problem for Republicans .", "Yet Mr. Kean ca n't join Democrats in pummeling President Bush , because he ca n't win without the backing of the national Republican Party in a traditionally Democratic state and with only a third of the money that his opponent has raised .", "So he has tried , with varying success , to distance himself from the chief executive , President Bush , without directly criticizing him , while taking issue with two key underlings , Vice President Dick Cheney and Defense Secretary Donald H . Rumsfeld .", "A few months ago , Mr. Kean experienced one of the low points of his campaign , when Mr. Cheney came to New Jersey for a fund-raiser on his behalf .", "Rather than show up at the event , where photos of the two would have been instantly transformed into Democratic red meat , Mr. Kean appeared after Mr. Cheney had left -- blaming traffic between Trenton and Newark .", "But many people wondered whether he had calculated his late arrival by traveling on Route 1 , known for its horrific traffic , rather than the New Jersey Turnpike .", "Then , a week ago , Mr. Kean became one of the highest profile Republicans to call for Mr. Rumsfeld 's resignation .", "He said that it was detrimental that Mr. Rumsfeld had `` politicized the war through some of his comments , '' including a recent speech touching on those who advocated appeasing Nazi Germany in the 1930 's -- a speech that some people interpreted -LRB- incorrectly , according to the Pentagon -RRB- to be a veiled criticism of the war 's detractors .", "But throughout a one-hour-plus interview , Mr. Kean declined to criticize Mr. Bush .", "`` I just do n't think that the president is very well served by Secretary Rumsfeld staying in that position , `` he said .", "`` I think that the president needs a constant flow of information if he 's going to have his best ability possible to make the decisions that he needs to make . ``", "Democrats have derided Mr. Kean 's attempts to blame everyone but Mr. Bush .", "It is analogous , they say , to criticizing everyone at Enron other than Kenneth L . Lay , or attributing the famines in China in the 1950 's and 1960 's to someone other than Mao Zedong .", "Indeed , in a news conference this past week , Mr. Menendez likened it to `` throwing the first mate of the Titanic overboard while standing side by side with the captain as he steers straight toward the iceberg . ''", "It is hard to tell whether people will find Mr. Kean 's balancing act credible or whether they may find him disingenuous .", "But suffice it to say , the more undecided people are about his intentions , the better it is for Mr. Kean .", "`` I think it 's very smart , I think it 's very tricky , and I think that there 's no alternative , `` said Peter J . Woolley , a professor of politics at Fairleigh Dickinson University .", "Mr. Woolley is also executive director of the university 's PublicMind poll .", "It recently found that Mr. Kean had a small lead among registered voters that widened to 11 percentage points if Iraq were not a factor .", "And while many believe that Mr. Menendez should still be considered a slight favorite because of New Jersey 's Democratic tendencies , Mr. Woolley said that Mr. Kean should be encouraged that his favorability ratings have been higher than most Republicans ' in recent statewide races and that Mr. Menendez 's ratings have been a bit lower than most Democrats ' .", "Still , Mr. Woolley says he does n't think that Mr. Kean will invite Mr. Bush to New Jersey anytime soon .", "But the Bush family is another matter .", "On Wednesday , Mr. Bush 's father , former President George H . W . Bush , stumped for Mr. Kean in Bridgewater .", "So , too , did the first lady , Laura Bush , a couple of months ago .", "So maybe Mr. Bush 's mother and twin daughters will come next .", "POLITICAL MEMO David W . Chen is Trenton bureau chief of The New York Times ."], "summary": ["David Chen Political Memo column on issue of war in Iraq as it applies to New Jersey State Sen Thomas Kean Jr , moderate Republican who is seeking to unseat Democratic US Sen Robert Menendez .", "Notes Kean has tried , with varying success , to distance himself from Pres Bush without directly criticizing him , while taking issue with Vice Pres Dick Cheney and Defense Sec Donald Rumsfeld .", "Photos ."], "publication": "nyt50", "label": [9, 3], "tag": ["New York and Region"]} -{"id": "1788980", "text": ["The vibrant restaurant scene along Hudson Street , which includes longtime crowd pleasers like Nobu and Chanterelle , has continued to grow in the past year , with these five newcomers .", "DANI * -LSB- Rating : One Star -RSB- -LRB- 212 -RRB- 633-9333 .", "333 Hudson Street -LRB- Charlton Street -RRB- .", "$ $ .", "Review : 4/12/06 .", "Dani aims for a bit of distinction in a crowded New York marketplace of Italian restaurants by taking a few steps toward an often unexplored area of Italy : Sicily .", "That region 's fruits , nuts and proximity to northern Africa are reflected in a menu that also hedges its bets with the kind of fare found at many other New York restaurants , Italian and otherwise .", "The best dishes , like bucatini with sardines , and the desserts are very good indeed , and the noisy room is visually appealing .", "FATTY CRAB -LRB- 212 -RRB- 352-3590 .", "643 Hudson Street -LRB- Horatio Street -RRB- .", "$ .", "$ 25 and Under : 10/26/05 .", "Zak Pelaccio 's rollicking Malaysian roadhouse is done up in dark woods and reds .", "Ornately carved chairs with stiff backs are crammed around small tables that fill the tiny dining room .", "The signature chili crab consists of a Dungeness crab served in a delicious pool of spicy , crab-drenched sauce with broiled white bread to sop it up .", "MR . CHOW TRIBECA -LRB- 212 -RRB- 965-9500 .", "121 Hudson Street -LRB- North Moore Street -RRB- .", "$ $ $ .", "Review : 6/28/06 .", "Mr. Chow Tribeca opened more than a quarter-century after the Mr. Chow on East 57th Street , but it stays true to its predecessor 's formula : a mixture of familiar and less familiar Chinese food in a slick setting with formally attired servers and an aura of clubby exclusivity .", "It can be expensive , if amusing , but a few dishes , like the Peking duck , are terrific .", "NOVO -LRB- 212 -RRB- 989-6410 .", "290 Hudson Street -LRB- Spring Street -RRB- .", "$ .", "Article : 2/1/06 .", "This Latino place has Alex Garcia as chef and partner .", "The dinner menu is divided into categories like ceviches , grilled meats and fish , hot and cold tapas plates , and salads .", "The lunch menu offers sandwiches including a Cuban pressed sandwich with chicken croquettes and crispy mariquitas , and colorful limeades , smoothies and aqua fresca -- water infused with fresh fruit like melon , papaya and guava .", "SETACCI -LRB- 212 -RRB- 675-0810 .", "420 Hudson Street -LRB- Leroy Street -RRB- .", "$ $ .", "Article : 5/3/06 .", "Lisa Cannistraci , who owns Henrietta Hudson , and Joey D' Angelo , a chef , formerly of Union Square Caf\u00e9 and Esca , own this stylish Italian place in the West Village .", "The name is Italian for sift .", "The menu has crudo like oysters or clams on the half-shell , antipasti like grilled baby octopus , or house cured sardines , and entree selections that include grilled whole branzino and brick-roasted baby chicken .", "GOOD EATING / HUDSON STREET E-mail : eating@nytimes.com ."], "summary": ["Excerpts from previously published reviews of restaurants along Manhattan 's Hudson Street -LRB- Good Eating column -RRB- ."], "publication": "nyt50", "label": [2], "tag": ["New York and Region"]} -{"id": "1788983", "text": ["That 's Mr. Gettys to You Q . `` Citizen Kane '' is my absolute favorite movie .", "I know that the Charles Foster Kane character , who builds an empire around a New York newspaper , is modeled after William Randolph Hearst , and that Susan Alexander resembled Hearst 's companion , Marion Davies .", "But which politician was Boss Jim Gettys supposed to be .", "A . That one 's not quite as obvious .", "Jim Gettys , played by Ray Collins , could have been a composite of figures from Tammany Hall and New York City politics in the early 20th century .", "There were plenty to choose from , with names like Johnny Oakley and Big Tim Sullivan .", "But one clue is the scene in which Gettys chews out Kane for printing a cartoon showing him in prison stripes .", "That really happened to Charles F . Murphy , a Tammany leader who started as a horsecar driver and owned several bars while building political power .", "Hearst , a Tammany opponent who twice ran for mayor himself , had his papers regularly denouncing Murphy as a latter-day Boss Tweed .", "One Hearst cartoon in 1903 showed Murphy dressed in striped prison clothes .", "A caption , referring to the restaurant where the Boss and his sachems often huddled , read : `` Look out , Murphy .", "It 's a short lock-step from Delmonico 's to Sing Sing . ``", "Politics has a way of uniting enemies , though , and three years later , the ever-calculating Murphy helped Hearst get the Democratic nomination for governor .", "He lost to Charles Evans Hughes .", "Sparrow Snatching Q .", "While traveling to Yankee Stadium on the D train , a friend and I noticed a woman carrying a live sparrow in a plastic shopping bag .", "The next day , in Union Square , I saw a young couple carrying a transparent take-out container with a live sparrow .", "Is there something going on that involves the sacrifice of a small bird .", "And is it legal to capture sparrows .", "A .", "We asked people at the Bronx Zoo and the New York Companion Bird Club about this , and they ruled out a cult or ritual .", "Your sightings seem to be a coincidence .", "One club member noted that while the birds may have been unprotected English sparrows , there are also dozens of sparrow species protected by the Migratory Bird Treaty Act , many of them in New York , and capturing them is illegal .", "One respondent added that the housing described in the note was awful .", "Another animal advocate wrote : `` I think the people were just picking up fledglings thinking they were injured or orphaned . ''", "New York City Audubon offers guidelines on how to handle injured birds and baby birds on its Web site , www.nycaudubon.org.", "Look for Project Safe Flight under Programs .", "Double Digits Down There Q . Am I misremembering , or did there use to be subway trains with names like CC , AA , BB , etc.", "What happened to them .", "A . You 're not imagining things .", "Double-letter designations survived until 1986 , when they were phased out .", "Double letters once identified local routes , and single letters meant express , but as routes and designations changed over the years , that distinction became almost meaningless .", "Unless you 're an optometrist , this all may be too much information , but in 1986 , the CC became the C , the GG became the G , the LL became the L , the QB became the Q , the RR became the R , and the AA became the K , which was absorbed two years later by the B and C .", "There also used to be a KK train on the J line , an HH train -LRB- Rockaway shuttle -RRB- , a T and TT -LRB- West End express and local -RRB- and a QT -LRB- Brighton Beach local -RRB- .", "MICHAEL POLLAK F . Y .", "I . E-mail : fyi@nytimes.com ."], "summary": ["FYI column answers questions about which politician Boss Jim Gettys was modeled after in movie Citizen Kane , why people are seen in city with live sparrows in shopping bags and take-out containers and whether there used to be subway trains with double letters .", "Drawing ."], "publication": "nyt50", "label": [2, 27, 0], "tag": ["New York and Region"]} -{"id": "1788984", "text": ["WHEN we first met in 2003 , Eve , to me , epitomized TriBeCa loftiness .", "I was making a hasty move from Washington to Manhattan , where I had grown up on the Upper West Side .", "All the remaining parts of my immediate family still resided there , my sister and cousins , nearly all within talking-out-the-window distance from one another , exactly like our parents ' and grandparents ' generation .", "I desperately wanted to try something new and to see if , at nearly 50 , I could become a downtown girl .", "I found a great loft to sublet in TriBeCa .", "But before I could move into the small loft building on Hubert Street near Greenwich Street , I had to pass muster with the six other people who already lived there .", "Some of them , like Eve , had been in the building for decades , when the neighborhood was filled with artists , struggling writers and illegal squatters instead of brokers with bonuses .", "For my co-op sublet interview , I was prepared to answer all manner of questions about my personal probity and financial condition , none of which got asked that night .", "Instead , Eve , a birdlike , somewhat frail-looking woman in her 60 's , had some sharp questions for me about coverage of culture and politics at The New York Times , where I work .", "I learned that she was on the faculty at Hunter College and was a teacher of poetry .", "I found I loved the old , low industrial architecture and cultural edginess of TriBeCa , even with borrowed belongings .", "It made me feel as if I were coming to live in a new city , not returning to a familiar one .", "My landlord 's library , I found , was superior to my own .", "I loved the big open space , especially the windows that faced out onto the Hudson River .", "I loved taking walks on the river with Buddy , my 13-year-old dog , whose adjustment to Manhattan life , after years spent running around our suburban yard , was immediate and joyous .", "And of course some things took getting used to , like having no front door , just an elevator that opened directly into my place .", "The elevator , in fact , played a big part in my fateful encounter with the professor , her poetry and , later , her painting .", "One winter Sunday , when I decided to run out for a manicure , Buddy became outraged that I was going for a walk without him .", "He growled as I put on my coat and boots .", "Then , suddenly , the elevator opened without warning .", "-LRB- The elevator generally came to our floor only when I summoned it , or when it was opened by a special key .", "Its opening this time is still a mystery . -RRB-", "Buddy , a sweet-tempered Westie , went into guard-dog mode and , before I realized what was happening , flew into the elevator , sensing an intruder muscling onto our turf .", "I heard a ruckus and , horrified , saw that Eve was in the elevator , barefoot and in her cotton nightgown .", "Buddy had nipped her on the knee .", "I had instant visions of lawsuits and eviction letters .", "Instead , when I followed Eve up to her loft on the top floor to help her clean the bite , I found a serene woman who felt worse about the situation than I did .", "Her loft was crammed with books and brightly colored canvases she had painted .", "Her place was a bit of a mess , and completely unrenovated .", "The floors were not buffed and gleaming the way mine were .", "Eve 's place was utterly unchic , unlike so many of the lofts I had seen elsewhere in TriBeCa , which sometimes had the look of sleek boutique hotels , stylish but coldly anodyne .", "She did , however , have stunning views of the river .", "I kept vigil over Eve , bringing her fresh Band-Aids and antibiotic ointment .", "I urged her to let me take her to a doctor .", "She reluctantly agreed , and we went uptown to see an internist friend .", "In our Town Car ride , we chatted about books and our neighbors .", "She was n't partial to the new restaurants that were constantly opening -LRB- and usually soon closing -RRB- in our neighborhood .", "She loved the graceful dowager , Capsouto Fr\u00e8res .", "`` It 's wonderful in the spring when they open all the doors , `` she told me .", "Although the bite turned out to be harmless , I found that I enjoyed checking in on Eve .", "I learned that she had grown up near Boston and gone to the University of Chicago and grad school at Columbia .", "Romantic poetry was her love , and she had a wonderful library .", "She painted .", "She alluded to a husband , but since she was always alone when I saw her , I assumed the husband was an ex .", "COME spring , Eve invited me to her retirement party .", "Some of her graduate students showed up , and so did her husband , who appeared to be much younger and was a good-looking English teacher at a military school out west .", "In fact , the two of them were heading west for the summer and intended to sublet Eve 's loft .", "By this time , the owner of my sublet had plans to return to New York , and although the prices were insane , I bought a loft two blocks from my sublet .", "After I moved , I lost track of my neighbor .", "Then , a few weeks ago , strolling with Buddy , I spotted Eve and her husband entering their building .", "She was in a wheelchair , hobbled by bad knees .", "`` I 'm having knee replacement surgery , `` Eve happily announced .", "Her husband told me they had sold the loft and were under pressure to clean out all of Eve 's belongings over the weekend .", "The next time I passed the building , there were cartons of books by the curb , including a Modern Library edition of Plutarch 's `` Lives , '' Samuel Pepys 's diaries , and a hardcover edition of Yeats 's collected poems , many with Eve 's jottings from her grad school days .", "An old paperback of Damon Runyon 's `` Guys and Dolls '' especially beckoned me .", "Eve 's name was written on the title page in fountain pen ink , with the notation `` London July , 1960 . ''", "When her husband appeared , I asked him if it was O.K. to take some of the books .", "He said that was what they were there for .", "I also told him I would love to have one of Eve 's paintings if she was n't moving them all .", "`` She would love that , '' he said in a noncommittal voice .", "Then one Saturday my cell rang .", "`` Jill , it 's Eve . ``", "She was calling from out west .", "She told me that the new owner of her loft was moving in and that she had left three of her paintings in the hallway for me to choose from .", "Two nights later , I went back and rode up to her place in the elevator in which Buddy had committed his crime .", "The loft was empty , and I imagined it would soon be completely renovated and buffed to Architectural Digest standard .", "I agonized over the canvases , all of which were large and extremely colorful oil paintings , all abstract and beautiful .", "I picked one that was heavy with sapphire and orange .", "I carried the painting over my head down Greenwich Street , past the spanking new Wolfgang 's Steakhouse filled with brokers ordering $ 50 hangers and strips .", "I went past the new Robert De Niro boutique hotel going up on the corner of North Moore .", "I thought about that scene in `` An Unmarried Woman '' in which Jill Clayburgh , at the end of the film , is carrying Alan Bates 's painting through the streets of SoHo .", "Eve 's painting has a home on one of my brick walls , not far from the bookcase with her books and Buddy 's dog bed .", "But , sadly , TriBeCa has lost one of its truly original loft girls .", "NEW YORK OBSERVED ."], "summary": ["Jill Abramson New York Observed essay on her former TriBeCa loft neighbor , Eve , somewhat frail-looking woman in her 60 's , who had wonderful library and is a painter .", "Photo ."], "publication": "nyt50", "label": [8], "tag": ["New York and Region"]} -{"id": "1788985", "text": ["JULIAN MONTAGUE has spent seven years spotting shopping carts buried in undergrowth or pond muck .", "An artist who lives in Buffalo , he has also taken thousands of photographs of carts that ended up far from their original homes .", "Mr. Montague , utterly deadpan , classifies the artifacts by location type and likely cause of demise for a Web site -LRB- www.strayshoppingcart.com -RRB- and in his new book , `` The Stray Shopping Carts of Eastern North America : A Guide to Field Identification '' -LRB- Abrams Image -RRB- .", "His categories can be self-explanatory -LRB- `` bus stop discard , '' `` plow crush '' -RRB- or cryptic : `` open true '' -LRB- abandoned on pavement or lawn -RRB- , `` gap marginalization '' -LRB- between buildings -RRB- .", "Happier subtypes , like `` alternative usage '' and `` structurally modified , '' are for carts adapted as things like souvenir stands or driveway barriers .", "`` This language of scientific classification can be very powerful , '' Mr. Montague said .", "`` It affects your perceptions .", "It brings this peripheral stuff into focus .", "And I like to speculate on what happened to the carts .", "How many people were involved , and is it in a permanent or ephemeral state .", "`` Through Oct . 14 , 40 of Mr. Montague 's photos , taken in five cities , will be shown at the Black and White Gallery , 636 West 28th Street , near 11th Avenue .", "The two New York examples , spotted in Dumpsters in Coney Island and Brighton Beach , fall into the `` in / as refuse '' category .", "EVE M . KAHN NEIGHBORHOOD REPORT : NEW YORK IN FOCUS ."], "summary": ["Selection of photographs by Julian Montague of abandoned shopping carts .", "Exhibition of Montague 's photos will be shown at Black and White Gallery in Manhattan ."], "publication": "nyt50", "label": [10], "tag": ["New York and Region"]} -{"id": "1788986", "text": ["In New York , even monks face the vagaries of hectic schedules and urban congestion .", "Case in point , Pema Wangdak , a Tibetan Buddhist monk , who the other day could be found sitting in traffic that crept along the Cross Bronx Expressway at 10 miles an hour .", "A police car cruised shot by , its alarms screaming .", "Drivers glared .", "Pema Wangdak remained unrattled , smiling as if he were sitting at the edge of a mountain stream .", "`` When I 'm in the car , somehow it has a soothing effect on me , `` he said .", "`` It 's comforting . ``", "Pema Wangdak , 52 , who came to the United States from Dharamsala , India , in 1982 , drives to the city from his home half an hour away in Cresskill , N.J. , several days a week .", "His appointments are as far-flung as the city 's largely decentralized Tibetan community .", "Recently , he gave talks at Tibet House near Union Square in Manhattan and at the Jacques Marchais Museum of Tibetan Art in Richmond on Staten Island , offered spiritual counseling to Tibetan friends in Sunnyside and Jackson Heights in Queens , and visited a Tibetan nun recovering from surgery in Washington Heights in Upper Manhattan .", "Between appointments , he has encountered people puzzled by the sight of a small , reedy monk in burgundy robes smiling his way through city traffic , often with a cellphone earpiece in place .", "`` I usually tell them that we lamas do n't have enough money to hire a driver , `` he said , '' so I have to drive myself . ``", "His serenity in the face of everyday frustrations has inspired some of his fellow commuters .", "`` I used to resist driving in the city , '' said Peter Arcese , an adjunct professor of literature at New York University and a student of Pema Wangdak 's .", "`` You do n't feel that enlightened on a bad day dealing with traffic .", "`` Then I saw that the lama is constantly driving .", "Driving can be its own meditation . ``", "On a recent Monday afternoon , Pema Wangdak traveled to Calvary Hospital in Pelham Bay , the Bronx , where a 32-year-old Tibetan woman had died of liver cancer that morning .", "A dozen of her relatives greeted him in the waiting room .", "By the time Pema Wangdak had finished praying for the dead woman to be reborn in the Buddha realm , it was 2:59 p.m.", "His next appointment , with a student on the Upper West Side , was at 3 .", "He got back into his car , resigned to being late .", "`` You always think , ' I have to do this or that , ' '' he said .", "`` But we forget that the process of getting there is equally important .", "Everything is a dharma teaching . ``", "JENNIFER BLEYER NEIGHBORHOOD REPORT : ON THE ROAD ."], "summary": ["Tibetan Buddhist monk Pema Wangdak comments on soothing effect he experiences while driving in New York City .", "Photo ."], "publication": "nyt50", "label": [13, 5], "tag": ["New York and Region"]} -{"id": "1788987", "text": ["AT midnight on the evening of Thursday , Aug . 31 , WLIB-AM , one of the city 's most storied radio stations and one that for years broadcast from the fabled Hotel Theresa in Harlem , ushered in a new era .", "A staff member cued up a rousing hymn called `` High Praise , '' and with the push of a button , WLIB was now `` Your praise and inspiration station , '' playing gospel music round the clock .", "Until that moment , and for the previous two and a half years , the station had been the home of Air America , the liberal talk-radio network .", "For the previous decade , the station had broadcast Caribbean music .", "Radio stations switch formats all the time .", "But for a station like WLIB , long regarded as the `` Voice of Harlem , '' moving forward takes place in the shadow of a long and illustrious past .", "With its lively call-in shows that gave voice to the concerns of the Harlem community , WLIB spoke to and for black New Yorkers .", "The format change , first reported by The Amsterdam News , has made many avid listeners remember what this heartbeat of black life once was .", "`` Switching to gospel , I ca n't be against that , of course , `` said the Rev . Herbert Daughtry , a veteran civil rights advocate and preacher at the House of the Lord Church in Boerum Hill , Brooklyn , who used to telephone WLIB to report news of interest to the station 's listeners , like reports of police brutality .", "`` We surely need the Gospel .", "But knowing how vital it was , I feel we are missing something . ``", "In 1971 , WLIB became the city 's first black-owned radio station , and the crown jewel of Inner City Broadcasting Corporation .", "A co-founder of Inner City was Percy Sutton , a former Manhattan borough president and long one of the city 's most powerful black leaders .", "Perhaps paradoxically , the new gospel format is in keeping with the station 's history .", "WLIB played gospel in the 1950 's and 60 's .", "Even so , the new format has received a tepid response from community leaders who , like Mr. Daughtry , say that the community needs a black-talk format .", "`` We do n't really have a format right now for information on social , political and economic change in Harlem , `` said Councilwoman Inez Dickens , who represents Harlem .", "According to Deon Levingston , the station 's general manager , and Vinny Brown , the operations manager , the station will reintroduce talk shows at some point but will remain music-intensive .", "The station , which now operates out of an office at Park Avenue and 34th Street , broke ties with Air America when WLIB could not reach an agreement on the terms of a renewal of Air America 's lease , according to representatives of the station and Air America .", "`` We saw the move to gospel as a way to keep viable programming , '' Mr. Levingston said .", "`` Our goal is to serve the community , and you ca n't do that if you ca n't keep your lights on . ``", "The gospel format , he said , was in keeping with WLIB 's history and meshed well with the soul format of its sister station , WBLS-FM .", "A return to the old talk show format , he added , would not have been financially viable , especially in what is one of the nation 's most competitive radio markets .", "WLIB has an average audience of 24,900 listeners at any given 15-minute period , according to the station 's ratings report for the spring of this year .", "When WLIB was broadcasting Air America , a popular talk show , `` Mindflight , '' whose host was a D.J. , Gary Byrd , was broadcast in the wee hours .", "And on weekends , the station broadcast boisterous public affairs shows , like the Rev . Al Sharpton 's `` Sharp Talk . ''", "With the switch to all gospel , both programs are off the air , at least temporarily .", "At its peak , WLIB represented , as Mr. Daughtry put it , `` the heartbeat and the drumbeat '' of New York 's black community .", "Its era began in 1949 , when two brothers , Morris and Harry Novik , purchased the station from The New York Post .", "As general manager , Harry Novik jettisoned the station 's classical-music format , replacing it with rhythm-and-blues and gospel music and community affairs programs aimed at black audiences .", "It was during this period that the station moved to the Hotel Theresa .", "In the following years , WLIB broadcast live from conferences of the National Association for the Advancement of Colored People and staged music festivals at the Savoy Ballroom in Harlem .", "By 1967 , the station dedicated seven and a half hours each weekday to gospel and rhythm and blues .", "The most popular program was a call-in show , `` Community Opinion , '' moderated by Leon Lewis , the station 's program director .", "During the turbulent 60 's , the station 's airwaves crackled as black listeners shared heated opinions on subjects like the civil rights struggle , the Vietnam War and the crushing poverty in the nation 's ghettos , Harlem chief among them .", "When the Rev . Dr. Martin Luther King Jr . was assassinated in April 1968 , WLIB interrupted its regular programming and served as a public bereavement forum for grieving callers .", "ITS political role continued into the 1980 's and the early 90 's .", "`` I may get up in the morning and get a call that someone in the community had been killed , '' Mr. Daughtry said .", "`` I could pick up the phone to WLIB , and the word went out . ''", "Other radio programs focus on local issues and have a call-in segment , but to WLIB fans , the station offered something more .", "`` Folks want to hear a concentrated discussion of what 's going on in the hood , `` said Bill Perkins , a lifelong Harlemite , former councilman and now a Democratic candidate for the State Senate .", "`` What 's going on in the City Council as it relates to the hood .", "How do we look at Iraq from the view of the hood .", "On these other shows , you might hear that incidentally , but it 's not the mission . ``", "On the streets of Harlem the other day , the consensus among WLIB listeners seemed to favor gospel over talk .", "`` Gospel is good every day , all day long , '' said a white-haired man named Alfred Solomon , who was standing on 125th Street .", "Was he worried about the absence from the airwaves of a black talk station .", "`` What are they going to talk about .", "`` Mr. Solomon replied .", "`` That 's been going on for years and it has n't accomplished anything .", "We need gospel music that can do something for the spirit . ``", "Radio Days November 1926 The predecessor of WLIB , called WBKN , signs on from a studio on Pitkin Avenue in Brownsville , Brooklyn , the fi rst of several homes throughout the city .", "1930 's - 1940 's The station has varied and changing offerings , including the city 's fi rst program in Chinese .", "Eventually , it becomes known as `` The Voice of the Negro Community . ''", "April 1968 When the Rev . Dr. Martin Luther King Jr . is assassinated , WLIB opens the airwaves to angry and bereaved callers , helping , in the opinion of many , to prevent rioting in the city .", "1969 As the black power movement gains strength , the Black Panthers are the host of their own program , `` The People 's Information Slot . ``", "October 1970 WLIB goes off the air for several days during a labor dispute that involves the fi ring of an employee and picketing by station workers .", "July 1971 WLIB becomes the city 's fi rst black-owned radio station when it is bought by a consortium that includes Percy Sutton , the former Manhattan borough president .", "URBAN TACTICS ."], "summary": ["Article traces history of WLIB-AM , radio station that has broadcast for years from fabled Hotel Theresa in Harlem and recently switched from call-in shows to playing gospel music round the clock .", "Timeline .", "Photos ."], "publication": "nyt50", "label": [0, 3], "tag": ["New York and Region"]} -{"id": "1788988", "text": ["Slashing diagonally across Manhattan 's street grid , Broadway is responsible for memorable irregularities like Times Square , Union Square and of course the Flatiron Building , one of the city 's best-known and most-photographed structures .", "In Brooklyn , Flatbush Avenue follows a similar wayward course , though the lower-slung buildings on the irregular lots that line the street are more likely to be gas stations , garages and warehouses than works of architectural distinction .", "But now , in a borough where comparisons with Manhattan remain a fact of life , some optimists are saying that a building planned for a triangular lot near the Manhattan Bridge could be a flatiron building of Brooklyn 's own .", "The project 's architect , Ismael Leyva , does not shy away from the comparisons .", "`` To me , when I saw this site being triangular-shaped , the first thing that came to my mind was that this is going to be similar to the Flatiron Building , '' he said on Thursday , adding that his team had rounded off the front of the planned building in an homage to the famous skyscraper .", "Construction of Brooklyn 's flatiron building , a project of the developer Isaac Hager , is scheduled to begin in a few weeks and to be completed in the summer of 2008 .", "The differences between the buildings are considerable .", "The 22-story Flatiron Building , at 23rd Street , Broadway and Fifth Avenue , has a limestone and terra-cotta facade , while the new building , a 21-story structure at 85 Flatbush Avenue Extension , on Tillary Street , will be glass .", "Moreover , the Flatiron Building is commercial , while the new building will hold luxury condominiums .", "-LRB- Mount Hope Court , a 10-story 1914 structure on the Grand Concourse that is known as the Bronx 's Flatiron Building , is also home to apartments . -RRB-", "Most significant , the Flatiron Building of 1902 is a revered architectural icon , while the Brooklyn building , even optimistically speaking , has an uphill climb to reach such status .", "Still , the comparisons have come , beginning with those in The Brooklyn Papers , a weekly , which described the structure as looking `` like the lovechild of the Flatiron Building and a spaceship . ''", "Mr. Leyva said he was gratified that people saw similarities between the buildings but demurred .", "`` Most of the shape is dictated by the site and the zoning requirements , '' he said of his project .", "One way in which the two buildings really could be similar has to do with their role as a catalyst to development .", "The Flatiron Building helped open a heady era of construction , and in Downtown Brooklyn , Mr. Leyva 's project is one of several buildings to emerge from zoning changes made in recent years .", "A few other tall buildings are planned for the immediate neighborhood .", "Two of them , on Gold Street , are from Mr. Leyva 's designs , and a third , at Flatbush and Myrtle Avenues , will sit on a similar triangular lot that could spawn another high-rise wedge .", "In the opinion of Simeon Bankoff , executive director of the Historic Districts Council , an advocacy group , the planned building at 85 Flatbush Extension appears to be out of context with what is in the area now .", "But he added , tongue only partly in cheek , `` Maybe it 's in context with the new Brooklyn . ``", "JAKE MOONEY NEIGHBORHOOD REPORT : DOWNTOWN BROOKLYN ."], "summary": ["Article on plans by developer Isaac Hager to build luxury condominium building on Flatbush Avenue in Brooklyn designed to emulate Flatiron Building in Manhattan .", "Drawing ."], "publication": "nyt50", "label": [5, 8], "tag": ["New York and Region"]} -{"id": "1788989", "text": ["Over the last decade , when it came to choosing who would represent Staten Island and southwest Brooklyn in Congress , Anita Lerman has been the unchallenged standard-bearer of the Independence Party , a small but growing group with 6,703 members on the island .", "But in the primary elections on Tuesday , Ms. Lerman will face a rival for her party 's mantle , an opponent who is better known to voters : Vito Fossella , the Republican who has held the House seat in the 13th District since 1997 .", "Mr. Fossella and Ms. Lerman , the 62-year-old former host of a public access television show called `` Anita Lerman Presents News and Views for Staten Island , '' are squaring off for a line on the ballot that in the general election typically attracts 1,000 to 2,000 votes , just 1 or 2 percent of the total .", "But local officials of the Independence Party of New York -- there are others in Florida , Michigan and elsewhere -- say they are not surprised that Mr. Fossella is seeking their party 's nomination .", "`` There are going to be more and more primaries on the Independence line , '' said Sarah Lyons , chairwoman of the party 's Staten Island chapter .", "`` The party has grown , and it 's become recognized as a thing of great value .", "We 've been working at the grass roots to build a voting base underneath it . ``", "Mr. Fossella , 41 , who will run unopposed on the Republican line in November and in the past has won election by tens of thousands of votes , said he was seeking the Independence nomination because of his personal beliefs .", "`` I pride myself on being an independent fighter for people I represent , '' Mr. Fossella said last week .", "`` The core of the word ' independence ' is to step up and do what 's right , regardless of the fact that sometimes it means going against your own party . ``", "Mr. Fossella cited the examples of his battles against the Bush administration to increase homeland security funds for New York and to preserve deductions for home mortgage interest and for state and local taxes on federal income taxes .", "Ms. Lerman , who is the vice chairwoman of the local party , said her top issue was to make energy policy more environmentally friendly and to reduce consumers ' energy costs .", "As for her chances in the primary , Ms. Lerman said she did not see her campaigns in terms of winning and losing .", "Rather , she said , her campaigns are about meeting the people of Staten Island as she goes door to door and talks to them about their views .", "`` People say , ' When I was in the big party , nobody ever came to my door with a petition for a candidate , ' '' Ms. Lerman said .", "`` Every person who feels more involved and is heard , and says , ' Wait , this political process can have something to do with me ' -- that 's a victory . ``", "JEFF VANDAM NEIGHBORHOOD REPORT : STATEN ISLAND UP CLOSE Correction : September 17 , 2006 , Sunday Because of an editing error , an article last Sunday about the Congressional race for the seat that represents Staten Island and southwest Brooklyn referred incorrectly to the general election in November .", "Representative Vito Fossella will face Stephen A . Harrison .", "He will not run unopposed ."], "summary": ["Article on New York City 's Independence Party primary election that will pit party standard-bearer Anita Lerman against Republican Vito Fossella , who has held House seat in 13th District since 1997 .", "Photos ."], "publication": "nyt50", "label": [1], "tag": ["New York and Region"]} -{"id": "1788990", "text": ["SHELDON GRUBER never met his grandfather Joseph Punn , a ceiling man from an era when pressed-tin ceilings could be found all over New York .", "But he has heard the stories .", "Too poor to afford a truck in the 1920 's , Mr. Punn used to hang panels of decorative tin around his waist on ropes and travel by subway across the city to install them .", "`` I think eventually he might have gotten a horse and wagon , '' Mr. Gruber said .", "A result of the labors of Mr. Punn and others in his industry was an array of baroque ceilings around the city .", "It was possible to glance upward in countless barrooms , parlors and factories and see a textured latticework of flowers , torches or other flourishes .", "Although pressed-tin ceilings are sold by stores like Home Depot , Mr. Gruber says he believes that his company , AA-Abbingdon Affiliates , is the city 's last remaining business that sells them exclusively .", "Even as recently as the 1950 's , he says , a handful of other tin-ceiling specialists could be found in New York , but no longer .", "Mr. Gruber , a ruddy 48-year-old with slick black hair , fills his orders from a small showroom and warehouse on an industrial stretch of Utica Avenue in East Flatbush , Brooklyn .", "He recently shipped pressed-tin ceiling panels to South Korea for a hotel , to the Western United States for a chain of billiard halls , and to Mexico and Ireland for restaurants .", "Pressed-tin ceilings in New York date to the 1860 's , when thin iron sheets coated with tin were used in buildings for fire protection .", "According to some accounts , immigrants from Europe began having the sheets stamped with repeating ornamental patterns to recreate the look of molded plaster ceilings popular in their native lands .", "With metal , they could do so at a fraction of the cost , using a material more durable than plaster and less likely to crack .", "The appetite for stamped metal has only increased in recent years as deteriorating pieces from the 19th century need to be restored or replaced , according to Richard Pieper , director of preservation for Jan Hird Pokorny Associates , a Manhattan-based architecture firm .", "`` There are certainly other manufacturers of metal ceilings and cornices , '' Mr. Pieper said , `` but as far as I know , Abbingdon is the only one still in New York . ''", "Though commonly described simply as tin ceilings , they are now usually panels made of steel , chrome , brass or copper , which are specially manufactured for Abbingdon outside the city .", "The ceilings come in great variety .", "Abbingdon , for instance , offers 41 ceiling designs at up to $ 12.50 a square foot , along with 15 cornice designs .", "Some make use of the same vintage patterns that Mr. Gruber 's grandfather sold 80 years ago .", "While the company ships around the world , about 15 percent of its sales are in New York , Mr. Gruber says , a proportion that may reflect New Yorkers ' appetite for decorative touches evoking the city 's past .", "Walking through his warehouse one morning recently , Mr. Gruber lifted the tops of large cardboard boxes to inspect goods being readied for shipping .", "Inside one box was a pile of shiny chrome panels decorated with concentric squares .", "The panels in the next box bore a quaint Victorian pattern .", "A third shipment featured a diamond pattern emitting sunburst rays .", "`` The main factor , '' Mr. Gruber said , `` is that what 's old is new . ``", "NEIGHBORHOOD REPORT : EAST FLATBUSH ."], "summary": ["Article on AA-Abbingdon Affiliates in East Flatbush , Brooklyn , which may be New York City 's last remaining business that sells press-tin ceilings exclusively .", "Photos ."], "publication": "nyt50", "label": [6], "tag": ["New York and Region"]} -{"id": "1788991", "text": ["WHEN the planes hit the twin towers , I was on the last day of a weeklong fishing trip in that part of the country where Montana , Wyoming and Idaho come together in a kind of north-woods wonderland , a patch of the West that is home to Yellowstone National Park , the Tetons , gleaming rivers and herds of elk .", "It is hard to imagine a place more removed from the successive horrors of that day .", "Early the next morning , the woman at the check-in counter at the small airport in Butte seemed genuinely sorry for me when she saw the Bronx address on my driver 's license .", "`` Well , you 're going right back into it , are n't you .", "`` she said , her '' it `` heavy with portent .", "I sensed that what she was really saying was : `` You poor thing .", "I sure am glad I live out here and not in that East Coast hellhole of yours . ``", "Of course , it turned out I was going nowhere that day , at least not on a plane .", "But the thought of remaining in Montana while my hometown bled was unacceptable .", "The previous day , watching those horrific images on television in the lounge of a motel just outside Yellowstone , I had felt absurdly out of place .", "I 'm not sure I had fully appreciated until that morning what a deep-dyed New Yorker I had become over the years .", "Those were my neighbors in those buildings , the people I sat with on the subway and exchanged glares with after bumping shoulders in Times Square .", "I might even have known some of them .", "It was essential to get home .", "Like many travelers stranded around the country that week , I rented a car .", "In my haste to get to New York I did n't pay much attention to the speed limits on the interstate , but every once in a while another car would leave me in the dust and I 'd wonder if it was another New Yorker hurrying home , perhaps having learned bad news .", "I wondered how many hundreds or thousands of us there were , all racing across the continent in rental cars toward the wounded city .", "So it was east through brown and yellow Montana , then south through the Crow Indian Reservation into Wyoming , then east again through the spectacular reddish emptiness of northeastern Wyoming and into endless South Dakota , where walls of rain came down in the night and lightning bolts provided eerie glimpses of the Plains .", "Then the farms of Iowa and Illinois , then Chicago and night again , the relentlessly heavy traffic of Interstate 80 in Indiana and Ohio .", "Then morning again and the most beautiful landscape since Wyoming , the densely green hills of western Pennsylvania , a green so deep in some places as to be almost black .", "Finally , that Friday afternoon , Sept . 14 , the short sprint across New Jersey and then , 2,400 miles and 49 hours after I left Butte , the smoldering hole in the skyline , the smoke and dust still rising from the rubble .", "Trifling thoughts occur even at solemn moments , and I could not help thinking how , after many years of regarding the towers as brutish and ugly , I had only recently developed a grudging fondness for them and was going to miss them .", "Some New Yorkers like to ridicule the tourists who stand gawking at the big buildings .", "But here 's a little secret : There are New Yorkers who , despite a lifelong familiarity with the city , sometimes find themselves standing in the middle of the sidewalk and staring up , momentarily whacked by the amazingness of Manhattan .", "More than once , a sidewalk hustler , observing my skyward gaze , has taken me for a rube easily separated from his money .", "As I looked at the skyline that afternoon , having driven most of the length of the country , one of the first things that came to mind was the Tetons , in whose blue-skied grandeur I had just spent a couple of days .", "The association seemed entirely natural .", "One was made by man and the other by nature , but both reached high into the sky , and both were powerful symbols of the country .", "As I look back , that seems to have been a particularly good time to drive across the United States , a chance to see the American flags breaking out on lampposts and front porches from region to region , and to listen on the car radio as people with continually changing accents struggled to understand what had happened .", "For me , Sept . 11 will always evoke certain place names along the American highway -- Billings , Rapid City , Davenport , Toledo -- and a long , fast drive from one extreme to another , from the simple joys of Yellowstone to the smoldering epicenter of the real world .", "FIVE YEARS ON : LANDSCAPE ."], "summary": ["Mitch Keller essay recalls his trip across country from fishing trip out West to return to New York City after September 11 , 2001 .", "Photo ."], "publication": "nyt50", "label": [0, 20], "tag": ["New York and Region"]} -{"id": "1788994", "text": ["FIVE years ago my brother and I spoke regularly .", "Five years ago we drank together , teased each other without mercy and , occasionally , even expressed feelings of affection .", "After 9/11 we did not .", "My brother is a 20-plus-year veteran of the New York City Fire Department , a former marine with a degree in history whose politics are conservative .", "He is four years older and quite a bit larger than me .", "I am a 20-plus-year veteran of big-agency advertising , a creative director turned fiction writer whose politics , not surprisingly , do not lean conservatively .", "Until five years ago , this was not such a big problem .", "There were plenty of other things to talk about , and we knew that despite our differences , there was still much to appreciate about each other .", "On Sept . 11 , 2001 , I was in Boca Raton , Fla . , on business accompanied by my wife , my 3-year-old daughter and my mother-in-law -LRB- yes , my mother-in-law came on business trips with us back then -RRB- at , of all things , a sales convention for a yogurt company .", "At 9 a.m. , we were granted a 10-minute respite from the executive Vince Lombardi-isms and the Roman Colosseum-inspired motivational d\u00e9cor .", "The first thing I did was check the messages on my personal communications device du jour , because in 2001 , I was convinced that the more I checked the quotidian drivel it contained the more it seemed that my ad agency , yogurt convention , frequent-flier-miles-financed-family-business-trip life mattered .", "But this time what the messages told me were hardly drivel .", "I immediately called New York to check on my brother who was not supposed to be working , but with firefighters you never know .", "He was n't working .", "He 'd soon go to the site , but luckily he would n't be one of the 343 firefighters killed .", "For the next hour , I 'm fairly sure that he watched what I watched , that he looked away when I looked away , and I am fairly sure that at 9:59 a.m. , when the south tower of the World Trade Center collapsed , he felt exactly what I felt .", "What we all felt .", "I am also sure that those were the last pure , nonpoliticized thoughts any of us would have about that day , and the last time that my brother and I would feel the same way about anything for some time .", "Renting a car and driving home was our only option .", "At first I wanted to push through , to rush the family home .", "`` To what .", "`` my wife asked .", "`` To have our daughter watch her parents sit paralyzed in front of a television set .", "`` So we took our time , taking a scenic , non-traditional route back to New York .", "I got my information from fellow travelers and National Public Radio .", "I found comfort in the measured voices of `` All Things Considered , '' solace in `` I love New York '' signs in Georgia , inspiration in the words on Jefferson 's tombstone at Monticello , near Charlottesville , Va . , which noted that he was also one of the fathers of religious tolerance in this country .", "Meanwhile , my brother was getting his information from rescue workers and fellow firefighters .", "Because of his military background , his job in the years before the attack had included training recruits at the Fire Academy at Randalls Island .", "His job in the months ahead would be to coordinate funerals .", "Dozens of funerals .", "For friends and friends of friends , each with a story more tragic than the last .", "Late at night on Sept . 14 , my family slept as I drove through Pennsylvania .", "With no NPR to be had for the time being , I listened to sports guys weighing in on the Northern Alliance , D.J. ` s explaining the tenuous Pakistani-Afghani relationship .", "With each passing mile , more and more proselytizing and hate seeped into the views of the syndicated giants .", "Driving near Port Jervis , N.Y. , a state trooper pulled alongside our car and shined a spotlight inside while the rest of my family was sleeping .", "Four strangers in a red rental car with Florida plates .", "Suspects .", "To think that 9/11 drove a stake between my brother and me is as na\u00efve as thinking that it drove one through the country .", "Red and blue staters had been at each other 's throats for a while , and my brother and I had clashed on and off over lots of things for years .", "But this took it farther .", "He had been affected by it in ways I could not imagine .", "Of the 343 firefighters killed , he knew dozens .", "No one that I knew had died .", "Within a week , I would go back to work .", "For more than a year , he would go to funerals and I imagine that in addition to grief , a man with my brother 's courage and sense of duty must also have been dealing with a serious case of survivor 's guilt .", "But did that make his opinions -- which had become increasingly angry and pronounced -- right .", "Over the last five years we 've disagreed about everything from the 2000 and 2004 elections to the war in Iraq , radical Islam and of course , the liberal news media .", "For a while we tiptoed around politics but when we were together everything seemed political .", "For a while we did n't speak at all .", "But lately we 've been talking .", "I care too strongly for him to let politics destroy our relationship and I think he feels the same .", "The other day I called him .", "He had just gotten home from the hospital where a fellow firefighter , Lt . Howard Carpluk Jr . lay in critical condition from injuries suffered when the floor had given way in a burning Bronx building .", "Another firefighter , 25-year-old Michael Reilly , who had served in the Marines in Iraq , had already died .", "My brother told me he was there near Mayor Michael Bloomberg as the doctors gave them an update .", "-LRB- Lieutenant Carpluk died the following day . -RRB-", "My brother sounded tired .", "After some time , while discussing Labor Day plans , I told him that I 'd been invited to discuss my book on a conservative talk show in Boston and joked that I feared an ambush .", "He told me to tell them that my brother was a New York City firefighter , and maybe they 'd go easy on me .", "James P . Othmer is the author of the novel `` The Futurist . '' ."], "summary": ["Op-Ed article by James Othmer describes ways in which his relationship with his brother , veteran New York City firefighter who worked at World Trade Center site , changed after 9/11 ."], "publication": "nyt50", "label": [3, 59, 2], "tag": ["New York and Region", "Opinion"]} -{"id": "1788996", "text": ["To the Editor : In a Sept . 3 letter -LRB- `` New York Is Bike-Friendly '' -RRB- , Ryan Russo , the Department of Transportation 's director for street management and safety , states that he and his colleagues `` have to work hard to win local support for the bike lanes , as there is often significant community board and elected official opposition to these plans . ''", "We have been working for two years with Queens Community Board 9 to establish a bike path on the long-abandoned city-owned former Long Island Rail Road Rockaway Beach Branch right of way .", "Returning it to public use as a greenway would seem to be a no-brainer , especially considering it was on the city 's 1997 greenway master plan .", "The city , however , has been less than helpful .", "Last year the Department of City Planning obtained funding for a feasibility study for this proposal , but could not go ahead because they were unable to secure a required `` implementation partner , '' even though they approached the obvious choices : the Parks Department and Department of Transportation .", "We ourselves were rebuffed in a meeting with officials from the Parks Department , who suggested that we assume full responsibility for conducting a study and amassing the funds for implementation .", "Mr. Russo has our support .", "Do we have his .", "Jordan Sandke Richmond Hill , Queens The writer is chairman , Rockaway Beach Branch Greenway Committee .", "To the Editor : It 's great that the Department of Transportation is adding bike lanes to the streets of the city .", "Now if it could just persuade cyclists to use them instead of ignoring red lights , riding the wrong way on one-way streets , or on the sidewalk , and in general behaving as if traffic regulations do not apply to them .", "David Vaughan East Village ."], "summary": ["Letters from Greenway Committee Rockaway Beach branch chairman Jordan Sandke and David Vaughan on Ryan Russo 's September 3 letter on difficulty in winning local support for bike lanes in New York City ."], "publication": "nyt50", "label": [0, 8, 11], "tag": ["New York and Region", "Opinion"]} -{"id": "1788997", "text": ["Mayor Michael Bloomberg and his schools chancellor , Joel Klein , believe there 's a great deal of savings to be had in restructuring the financial operations of the school system -- $ 200 million is a frequently quoted number .", "To obtain it , Mr. Klein has awarded a $ 15.8 , 18-month no-bid contract to a management consulting firm to do the overhauling .", "Mr. Bloomberg , who took control of the city 's schools four years ago , has always invited a `` buck stops here '' accountability for his efforts .", "We 're going to be waiting to see how well he and Mr. Klein deliver on his promise to make this questionable deal pay off .", "As of right now , the arguments that the contract could not be put out to competitive bidding seem thin .", "Mr. Klein 's choice was the consulting firm Alvarez & Marsal , a New York-based group that has stepped in to reorganize everything from the government of Orange County , Calif . , to the company that makes Twinkies .", "The firm also has experience working with school systems in St . Louis and New Orleans .", "The city 's Public Advocate , Betsy Gotbaum , is demanding an investigation of the consultants ' previous work with those two educational systems .", "That 's not unreasonable , but even if the work in St . Louis and New Orleans went well , finding ways to save money and streamline systems in a medium-sized city with completely dysfunctional schools is very different from working in New York , with its $ 15 billion budget , 1.1.", "million students and 80,000 teachers .", "For all its problems , New York 's system has also been the target of reform efforts by several different chancellors over the last decade .", "Many of the more obvious fixes have been made .", "But Mr. Klein , who first hired Alvarez & Marsal for a few months of work that was paid for by private donations , quickly signed the firm for the big project , which is being paid for by taxpayers and involves more money than all the department 's no-bid contracts awarded in the last fiscal year .", "He says the firm has already produced savings that has sent nearly $ 50 million to the schools for the hiring of more than 300 teachers and other workers .", "As they 've tackled a myriad of education problems , Mr. Bloomberg and Mr. Klein -- who both lack previous education management experience -- have frequently shown a lack of respect for the education department 's own experts .", "While both men clearly have the schools ' best interests at heart , it 's sometimes hard to tell whether their eagerness to reach outside for help is based on real need or a simple impatience with people who do n't fit the corporate model .", "And in the past they have not always been right .", "In 2003 , they rushed a no-bid contract to put Snapple soft drinks in the schools , a deal criticized by Comptroller Bill Thompson .", "It produced only a fraction of the profit promised .", "In the not-so-long-ago bad old days , whenever the Department of Education wanted to enter into a contract over $ 50,000 , an approval required a request for bids , a public hearing and a vote by the school board .", "Nobody wants to go back to that cumbersome process again .", "But in this case , the mayor and his schools chancellor have n't done the argument for expediency much good .", "The City ."], "summary": ["Editorial questions Mayor Michael Bloomberg and schools chancellor Joel Klein 's decision to award $ 15.8 million , 18-month no-bid contract to Alvarez & Marsal to overhaul financial operations of New York City school system ."], "publication": "nyt50", "label": [0, 1, 22], "tag": ["New York and Region", "Opinion"]} -{"id": "1788999", "text": ["STEPPING out of the 207th Street subway stop in Inwood , the last station on the A train , visitors from downtown Manhattan -LRB- that is , anywhere else on the island -RRB- can be forgiven for thinking they have reached the end of the earth .", "A mosaic on the station wall commemorates the end of the city 's longest subway line with the words `` At long last . ''", "And while the apartment buildings , at five or six stories , are just as tall as the ones beyond Inwood , the sky seems somehow bigger , or at least closer .", "Outside the station , the narrow strip of neighborhood is surrounded by river and parkland .", "Looming at the corner of Isham and Cooper Streets , behind a statue of Christ with a lamb on his shoulders , is the 70-year-old Church of the Good Shepherd , made of thick stone blocks that look as if they have risen out of the earth themselves .", "That is not what happened , of course .", "The Paulist Fathers , an order of Catholic priests , were there from 1911 on , there for the church 's construction , for its opening , for baptisms , weddings , funerals and everything else .", "Everything , that is , until 10 days ago .", "Shocking many parishioners , Paulist leaders announced in April that in the face of staffing problems , they would be leaving Good Shepherd behind .", "The decision led the Roman Catholic Archdiocese of New York to assume control of the struggling Good Shepherd School next door , replacing the principal , who had worked there for more than 20 years .", "The archdiocese has replaced the Paulists in the mostly Latino parish with members of the Capuchin Franciscan order , many of whose priests speak Spanish .", "The shift was first reported in The Manhattan Times , a bilingual weekly newspaper .", "For James Hamilton , a 34-year-old parishioner who is a member of the church 's men 's club and its Knights of Columbus chapter , the surprise was doubled .", "Late last year , he served on a committee aimed in part at helping the Paulists decide which of their churches to leave .", "At the time , the order decided to remain in Inwood .", "`` We would have understood more last year if they had made the decision as part of a process , but they decided to stay , '' Mr. Hamilton said last week .", "`` We were very shocked when this abrupt turnaround came : ' By the way , we 're leaving . '", "`` The man who made the surprise announcement at Sunday Masses in April , the Rev . John F . Duffy , president of the Paulist Fathers , said it came with '' a tremendous sense of sadness `` but was unavoidable .", "The order has dwindled to only 150 priests in the United States , Canada , Rome and Jerusalem , down from a high of 262 in the early 1960 's .", "And because the order 's priorities include evangelization and interfaith relations , he said , the everyday business of running churches is too time-consuming .", "`` The pastoral demand is such that you may have no time to do anything outside of parish work , '' Father Duffy said .", "`` Or , as in this case , you may think that in a couple more years you may not even be able to do the parish work adequately . ''", "The archdiocese hopes that the change in administration will also help shore up the school , which runs through eighth grade .", "According to Father Duffy , the school is in debt , and enrollment has dropped to barely 200 students , down from more than 1,200 in the mid-70 ` s .", "At noon on Wednesday , the sixth day of the Capuchin priests ' residence at the church , the Rev . Martin Curtin , the new pastor , celebrated Mass for about 60 people .", "They were scattered about the pews in the church 's cavernous interior , under an arching stone ceiling .", "Father Curtin was still moving his things to the church from his previous post in East New York , and the other priests in his order were moving from East Harlem , where their previous church , Our Lady Queen of the Angels , was awaiting final word on whether it will close .", "The words Father Curtin read , from the Gospel of Luke , were a goodbye of a sort that felt familiar , from Jesus to a group of his loyal followers who had tried to stop him from moving on .", "`` He said to them , ' To the other towns also I must proclaim good news of the Kingdom of God , ' '' Father Curtin read , `` ` because for this purpose I have been sent . '", "`` Street Level | Inwood E-mail : streetlevel@nytimes.com ."], "summary": ["Street Level column on decision by Paulist Fathers , order of Catholic priests , to leave Church of the Good Shepherd in Inwood section of Manhattan .", "Archdiocese of New York , assuming control of church and school , has replaced Paulists with members of Capuchin Franciscan order .", "Photo ."], "publication": "nyt50", "label": [10, 9, 29], "tag": ["New York and Region"]} -{"id": "1789037", "text": ["Gussie Moran felt pretty before Maria Sharapova did .", "Cheekily pretty , for 1949 .", "In the face of a Wimbledon ban on all but the whitest attire , she flashed color and lace -- under her white hemline , on her naughty knickers .", "So began a decades-long line of ball-bashing beauties dressed by Ted Tinling with an eye toward putting a little after-hours into the afternoon .", "The glittery black cocktail number modeled by Sharapova in the last two weeks is less the latest mutation in the evolution of the tennis outfit than a return to the play-and-then-party frock of earlier generations .", "Which dress , then , belongs with which decade and player .", "It 's no longer a cinch to tell . a . -RRB- 1966 .", "Tinling may have loved to dress the Brazilian champion Maria Bueno most of all .", "`` She was an actress , '' he said in an interview with The Washington Post in 1989 .", "`` I would leave her alone for a few minutes and she would do her poses in front of the mirror and please herself about how the dress was going to look on her . ''", "Bueno won the Wimbledon singles title three times .", "b . -RRB- 1981 .", "Tracy Austin wore this Tinling dress when she beat Martina Navratilova to win her second United States Open singles title .", "c . -RRB- 1971 .", "Virginia Wade , the last Briton to win a Wimbledon singles title -LRB- in 1977 -RRB- , was a Tinling gal too . d . -RRB- 2006 .", "Do all Marias like to `` feel pretty , '' like the `` West Side Story '' Maria whose song accompanies Sharapova 's new Nike commercial .", "Maria Kirilenko , seeded No . 20 in the current Open , wore this pretty dress , designed by Stella McCartney for Adidas , before curtseying out in the third round .", "Answers : 1 .", "b . 2 .", "d . 3 .", "c . 4 .", "a MARY JO MURPHY The Basics ."], "summary": ["Women 's tennis outfits have evolved over past four decades .", "Photos of tennis dresses designed by Ted Tinling and Stella McCartney for Adidas ."], "publication": "nyt50", "label": [16, 7], "tag": ["Week in Review"]} -{"id": "1789043", "text": ["OVER the last year , as Iran , Iraq and Lebanon have dominated headlines , hopes of gaining firmer control of a largely forgotten corner of the war on terrorism -- the lawless Pakistan-Afghanistan border region -- have quietly evaporated .", "On Tuesday , the Pakistani government signed a `` truce '' with militants who have resisted Pakistani military efforts to gain control of the region , which is roughly the size of Delaware .", "The agreement , which lets militants remain in the area as long as they promised to halt attacks , immediately set off concern among American analysts .", "Al Qaeda 's surviving leadership is suspected of using the border areas as a base of operation to support international terrorist attacks , including possibly the July 2005 London subway bombings .", "Meanwhile , the Taliban leadership is widely believed to be using another border area to direct spiraling attacks in Afghanistan .", "`` There 's a link with broader international terrorism , `` said Robert Grenier , the former top counterterrorism official for the Central Intelligence Agency .", "`` There 's a link with what is happening in Afghanistan .", "Al Qaeda , such as it is now , really has its center of gravity in the area . ``", "Last week 's truce agreement covers North Waziristan , an area on the Pakistani side of the border .", "After the Taliban fell in 2001 , senior Qaeda and Taliban leaders are believed to have fled there from Afghanistan and to other remote border areas in Pakistan .", "The locations of Osama bin Laden and his deputy Ayman al-Zawahiri remain unknown .", "But American officials suspect that they are somewhere along the border .", "After two attempts to assassinate President Pervez Musharraf in December 2003 were linked to the tribal areas , Pakistani officials expanded the military effort to subdue the region .", "But after suffering heavy casualties in 2004 and early 2005 , they began negotiating with local militants .", "Last year , Pakistan signed a separate agreement with militants in South Waziristan , but the move failed to slow the killing of government supporters .", "`` If you look at the number of deaths in the region , it 's not clear that they 've dropped , `` said Xenia Dormandy , former director for South Asia for the National Security Council .", "Signing such truces , she said , `` is a potentially dangerous route to take because there is little pressure that you can bring to bear to make sure they can follow through on the agreements . ''", "Two hundred miles to the south , the Taliban leadership is believed to have established a base of operations in and around the Pakistani city of Quetta , according to American analysts .", "Afghan officials say the Taliban used the area to plan and carry out sweeping attacks in southern Afghanistan in the spring .", "Pakistan has largely turned a blind eye to Taliban activities , American officials say , because it sees the group as a tool to counter growing Indian influence in Afghanistan .", "The Pakistanis have longed viewed a friendly Afghanistan as critical to their survival and fears India may be trying to encircle their country .", "At the same time , a separate uprising in Baluchistan province has tied up Pakistani soldiers .", "Ethnic Baluch tribesmen complain that Pakistan 's military government is not sharing enough of the profits from natural gas exploration with the locals .", "The killing last month of a charismatic tribal elder who was a rebel leader set off riots in several cities .", "`` Pakistan is essentially trying to put down a civil war in Baluchistan , '' said Ms. Dormandy , now an analyst at the Kennedy School of Government at Harvard .", "`` At the same time , it 's trying to monitor its border with India , monitor the border of Afghanistan and bring down the Taliban and Al Qaeda . ``", "In Afghanistan , NATO forces that took control of security in the south from American forces this summer have been surprised by the size and strength of the Taliban insurgency .", "Roadside bomb attacks have doubled this year , and suicide bombings have tripled .", "Yesterday , a suicide bombing in Kabul killed at least 2 American soldiers and 14 Afghan civilians .", "All told this year , heavy clashes in eastern and southern Afghanistan have killed more than 100 American and NATO soldiers , roughly twice the number killed in the same period in 2005 .", "Since Aug . 1 alone , 28 NATO soldiers have been killed .", "Analysts say the problem in the border region is an explosive mix of conditions : a lack of government authority , a vast amount of weaponry and the rise of Islamic militancy .", "Until the 1980 's , the area was ruled by local tribes , whose brute self-government kept the population isolated and impoverished but allowed for a degree of stability .", "In the 1980 's , the American-backed anti-Soviet jihad unfolded in the region and began to wear away longstanding tribal structures .", "Huge piles of weapons and cash empowered Islamist organizations to open dozens of training camps , hard-line mosques and conservative religious schools along the border .", "In the 1990 's , the Taliban emerged there .", "Today , said Mr. Grenier , the former C.I.A. official , the only way to increase government authority in the rural areas on both sides of the Pakistan-Afghanistan border was to develop the impoverished rural areas over time .", "`` But that 's a generational process , `` said Mr. Grenier , now a managing director at Kroll Inc . , a security firm based in New York .", "This summer , local people interviewed in southern Afghanistan said they were unsure that the United States and NATO would remain committed to the long , expensive process of stabilizing the border region .", "This year , the United States cut its aid to Afghanistan by 30 percent .", "Al Qaeda and the Taliban are no doubt betting that time is on their side .", "THE WORLD ."], "summary": ["Pakistani government signs ` truce ' with militants who have resisted Pakistani military efforts to gain control of North Waziristan , lawless Pakistan-Afghanistan border region .", "Agreement allows militants to remain in area as long as they promise to halt attacks .", "American analysts are concerned because Al Qaeda 's surviving leadership is suspected of using border areas as base to support international terrorist attacks .", "Taliban leadership is widely believed to be using another border area to direct attacks in Afghanistan .", "Photo .", "Map ."], "publication": "nyt50", "label": [3, 4, 1, 2], "tag": ["Washington", "Week in Review"]} -{"id": "1789044", "text": ["WHEN Steve Irwin died last week , the world lost an entertainer who could command an audience by jumping on crocodiles and manhandling snakes .", "But Mr. Irwin , who was 44 when he was killed by a stingray on the Great Barrier Reef , was more than a showman .", "For better and worse , as the star of `` The Crocodile Hunter '' and other nature programs he was the modern face of wildlife conservation .", "`` I sort of see him as the ' Mutual of Omaha ' for Generation X , `` said Eric Dinerstein , chief scientist and vice president for science of the World Wildlife Fund .", "Since 1992 , when he filmed his first wildlife special , Mr. Irwin had helped set the standard for a new genre of nature shows , one where the host shared the stage with the animals .", "Mr. Irwin poked and grabbed crocodiles , snakes and other wild creatures , often seeming to defy death in the process .", "His in-your-croc ` s-face approach was different from conventional documentary fare that typically kept the camera and the host at zoom-lens distance from the wildlife .", "`` He 's a far cry from David Attenborough , `` said Thane Maynard , interim director of the Cincinnati Zoo and host of the National Public Radio program '' 90 Second Naturalist . ``", "`` Steve Irwin 's more at the level of the kid 's wildlife program , laced with danger and adventure . ``", "Jack Hanna , the longtime host of wildlife shows , said that Mr. Irwin 's program `` was a whole new thing '' when it first appeared on the cable channel Animal Planet in 1996 .", "`` The Crocodile Hunter '' attracted younger viewers who might be bored by the staid style of `` Mutual of Omaha 's Wild Kingdom `` or an Attenborough documentary , but who would eagerly watch Mr. Irwin sloshing around with his charges .", "`` You can either like how he does it , or not like it , '' said Mr. Hanna , who added that Mr. Irwin always laced his programs with information about animals and conservation .", "Mr. Irwin 's approach was part of an evolution of how nature was portrayed on television , Dr. Dinerstein said .", "`` Twenty years ago , you almost never saw the predator catch the prey on film .", "Now , it 's a requirement . ``", "Conservationists said that in his interactions with animals , Mr. Irwin seemed to know what he was doing .", "But Steven Sanderson , president and chief executive of the Wildlife Conservation Society , said his biggest contribution was his enthusiasm .", "`` One of the greatest challenges we face as conservationists is people do n't care , `` Dr. Sanderson said .", "`` He really made people crazy about encounters with the wild . ''", "But other than inspiring viewers to care about wildlife , what kind of conservation message did Mr. Irwin deliver with his antic showmanship .", "`` There 's a circus-like atmosphere to it , `` said Eugene Linden , whose books on animals and intelligence include '' The Octopus and the Orangutan . ``", "`` How does that make for conservation .", "The implicit message is that animals are only interesting if they are dangerous or extreme . ``", "Alan Thornhill , executive director of the Society for Conservation Biology , knows how that message can sink in .", "When he was a teacher , Dr. Thornhill said , he used to travel with students to tropical rainforests .", "Before leaving he would often get calls from parents concerned about safety .", "`` They 'd have seen all the poisonous snakes on TV shows and they knew how dangerous it is , `` Dr. Thornhill said .", "`` My response always was , ' Your child is safer with me in a tropical rainforest than he is driving on the freeway . '", "`` Conservationists should be able to consider the merits of Mr. Irwin 's approach for a long time , as his shows will no doubt continue in reruns .", "But like the animals they showcase , nature shows keep evolving .", "That , Mr. Hanna said , is a problem .", "Mr. Irwin 's success has spawned imitators who have taken his approach even farther .", "In MTV 's `` Wildboyz , '' for example , the stars have been intentionally stung by a scorpion and bitten by a snapping turtle .", "`` My big concern is the Steve Irwin wannabes , '' Mr. Hanna said .", "`` I do n't know if that 's the thing we want to portray about conservation if it 's really blood and guts out there . ``", "Animal Nature ."], "summary": ["Steve Irwin was showman , entertainer and face of wildlife conservation for new generation .", "Some conservationists say his biggest contribution was his enthusiasm .", "Others say Irwin 's success spawned troubling imitators who do not have sufficient knowledge or consider safety .", "Photo ."], "publication": "nyt50", "label": [16, 31], "tag": ["Week in Review"]} -{"id": "1789045", "text": ["THE Web site for an outfit called Term Paper Relief features a picture of a young college student chewing her lip .", "`` Damn ! '' a little comic-strip balloon says .", "`` I 'll have to cancel my Saturday night date to finish my term paper before the Monday deadline . ``", "Well , no , she wo n't -- not if she 's enterprising enough to enlist Term Paper Relief to write it for her .", "For $ 9.95 a page she can obtain an `` A-grade '' paper that is fashioned to order and `` completely non-plagiarized . ''", "This last detail is important .", "Thanks to search engines like Google , college instructors have become adept at spotting those shop-worn , downloadable papers that circulate freely on the Web , and can even finger passages that have been ripped off from standard texts and reference works .", "A grade-conscious student these days seems to need a custom job , and to judge from the number of services on the Internet , there must be virtual mills somewhere employing armies of diligent scholars who grind away so that credit-card-equipped undergrads can enjoy more carefree time together .", "How good are the results .", "With first semester just getting under way at most colleges , bringing with it the certain prospect of both academic and social pressure , The Times decided to undertake an experiment in quality control of the current offerings .", "Using her own name and her personal e-mail address , an editor ordered three English literature papers from three different sites on standard , often-assigned topics : one comparing and contrasting Huxley 's `` Brave New World '' and Orwell 's `` 1984 '' .", "One discussing the nature of Ophelia 's madness in `` Hamlet '' .", "And one exploring the theme of colonialism in Conrad 's `` Lord Jim . ''", "A small sample , perhaps , but one sufficient , upon perusal , to suggest that papers written to order are just like the ones students write for themselves , only more so -- they 're poorly organized , awkwardly phrased , thin on substance , but masterly in the ancient arts of padding and stating and restating the obvious .", "If they 're delivered , that is .", "The `` Lord Jim '' essay , ordered from SuperiorPapers.com, never arrived , despite repeated entreaties , and the excuse finally offered was a high-tech variant of `` The dog ate my homework . ''", "The writer assigned to the task , No . 3323 , was `` obviously facing some technical difficulties , '' an e-mail message explained , `` and can not upload your paper . ''", "The message went on to ask for a 24-hour extension , the wheeziest stratagem in the procrastinator 's arsenal , invented long before the electronic age .", "The two other papers came in on time , and each grappled , more or less , with the assigned topic .", "The Orwell / Huxley essay , prepared by Term Paper Relief and a relative bargain at $ 49.75 for five pages , begins : `` Although many similarities exist between Aldous Huxley 's ` A Brave New World ' and George Orwell 's ` 1984 , ' the works books -LSB- sic -RSB- though they deal with similar topics , are more dissimilar than alike . ''", "That 's certainly a relief , because we could n't have an essay if they were n't .", "Elsewhere the author proves highly adept with the `` on the one hand / on the other '' formula , one of the most valuable tools for a writer concerned with attaining his assigned word count , and says , for example , of `` Brave New World '' : `` Many people consider this Huxley 's most important work : many others think it is his only work .", "This novel has been praised and condemned , vilified and glorified , a source of controversy , a subject for sermons , and required reading for many high school students and college undergraduates .", "This novel has had twenty-seven printings in the United States alone and will probably have twenty-seven more . ``", "The obvious point of comparison between the two novels is that where Orwell 's world is an authoritarian , police-state nightmare , Huxley 's dystopia is ostensibly a paradise , with drugs and sex available on demand .", "A clever student might even pick up some extra credit by pointing out that while Orwell meant his book as a kind of predictive warning , it is Huxley 's world , much more far-fetched at the time of writing , that now more nearly resembles our own .", "The essay never exactly makes these points , though it gets close a couple of times , declaring at one point that `` the two works vary greatly . ''", "It also manages to remind us that Orwell 's real name was Eric Blair and that both he and his book `` are misunderstood to this day . ''", "The paper does makes a number of embarrassing spelling errors -LRB- `` dissention , '' `` anti-semetic '' -RRB- but William H . Pritchard , an English professor at Amherst , who read the paper at The Times 's request , shrewdly suggested that , in this day of spell check , they may have been included deliberately , to throw suspicious teachers off the track .", "If confronted with such a paper from one of his own students , he wrote in an e-mail message , he probably would n't grade it at all but would instead say `` come see me '' -LRB- shuddering at the prospect -RRB- .", "The Hamlet essay was a trick assignment , or perhaps a poorly worded one .", "Ophelia 's genuine madness , as opposed to Hamlet 's feigned craziness , has become a touchstone in Shakespeare studies , especially among feminist and gender studies scholars who read in Ophelia 's songs and fragmentary utterances a coded response to the irrationality and sexual repression of the Elizabethan patriarchy .", "The author of the four-page paper , supplied by Go-Essays for $ 127.96 , approaches the question more literally and concludes , not incorrectly , that Ophelia is literally driven crazy by her father , brother and lover -- or as the essay puts it : `` Thus , in critical review of the play , Ophelia mentally suffers from the scars of unwanted love and exploitation rather than any singular or isolated cause . ''", "The paper goes on to repeat this point with so much plot summary and quotation from the text that it soars right to the assigned length .", "It 's also written in language so stilted and often ungrammatical -LRB- `` Hamlet is obviously hurt by Ophelia 's lack of affection to his vows of love `` -RRB- that it suggests the author may not be a native speaker of English , and even makes you suspect that some of these made-to-order term papers are written by the very same people who pick up the phone when you call to complain about your credit card bill .", "He added : `` If I had paid for this , I would demand my money back . ''", "As it happens , a refund is just what Superior Papers offered , along with a 10 percent discount on a new paper .", "Term paper writing is an arduous business , we need to remember , and we should n't expect too much .", "As the author of the Orwell / Huxley essay says : `` It is so often that one wants something and in wanting romanticizes it , thus bringing disappointment when the end is finally obtained .", "They serve as a reminder that it is necessary to have pain to compare with joy , defeat to compare with victory , and problems in order to have solutions . ``", "Outsourcing Homework ."], "summary": ["Web sites claim that college students can buy customized , ` completely non-plagiarized ' term papers online .", "College instructors are becoming adept at using search engines to identify these papers .", "Three papers purchased for article are found to be poorly organized and unsubstantial ."], "publication": "nyt50", "label": [6, 8], "tag": ["Technology", "Education", "Week in Review"]} -{"id": "1789046", "text": ["SOME debates are so polarized , with the competing sides so certain that any compromise would be dangerous , that it 's hard to imagine any middle ground emerging .", "The argument over how to try the 14 terror suspects recently transferred from Central Intelligence Agency prisons to military custody at Guant\u00e1namo Bay seems to be one of them .", "On one side is the Bush administration , which last week proposed that these suspects , whom it called the most dangerous in the war on terror , should be tried in military commissions under procedures that the White House asked Congress to endorse .", "Under the Bush proposal , the trials would not resemble any civilian trials or courts-martial held in the United States .", "Hearsay evidence and evidence obtained under coercion or duress could be admitted .", "And suspects could be denied access to classified evidence , although it would be disclosed to their military defense lawyers .", "On the other side of this debate are civil libertarians who insist that suspected terrorists ca n't receive fair trials unless they are given the protections of an ordinary court-martial -- including the right to exclude hearsay and coerced evidence and the right to see evidence against them .", "So , what 's a fair trial and how much due process does it require .", "Can the suspected terrorists be tried by a tribunal that lacks some of the protections that military defendants ordinarily demand .", "Surprisingly enough , this is not a debate about what the law requires .", "Short of endorsing cruel and unusual punishments for terrorists , Congress can set up whatever military commissions it likes .", "`` The administration 's proposals , if adopted by Congress , would almost surely pass muster with the Supreme Court , `` said Peter Spiro , who teaches international law at Temple University .", "`` Congress may not have complete carte blanche , but in the Hamdan case last June , four of the justices who voted against the commissions stressed that the president could always go back to Congress to get the authority he wanted . ''", "The issue , then , is more about the court of public opinion : how a trial , without the customary procedural rights , would be perceived in the United States and abroad .", "Inevitably , the military commissions , whatever form they take , will be compared to the Nuremberg trials in which Nazi war crime defendants were given due process -- up to a point .", "`` At Nuremberg , there was no secret evidence and no closed proceedings that the defendants ' counsel was excluded from , although it 's not clear how much physical access to the evidence the individual defendants had , `` said John Barrett , who teaches at St . John 's University law school .", "`` Hearsay and evidence produced by coercive methods were n't formally excluded by the Nuremberg charter , but the American interrogators did n't do rough stuff like water boarding -- although there might have been a corner or two cut that the coercion standard would find troubling . ``", "Many want the United States to meet this Nuremberg standard , which they believe gave it authority and credibility over the next half century to serve as a global model of due process .", "`` The trials should n't differ fundamentally from the fair trial provisions in any trial , especially when you have the potential death penalty or life in prison , `` said Richard Goldstone , the South African chief prosecutor of the United Nations International Criminal Tribunals for the former Yugoslavia and Rwanda .", "`` To exclude a defendant from any part of the trial , or to withhold evidence from the defendant , seems to me , by any civilized standard , outside what is acceptable . ''", "He added , `` The other objectionable provision is that evidence would be admitted even if obtained under duress or torture . ''", "But other legal scholars say that this is a different time .", "For one thing , they point out , the war is not over .", "`` In a war situation , international commissions often use hearsay as long as it 's reliable , `` said Jack Goldsmith of Harvard Law School , who led the Office of Legal Counsel under President Bush .", "Other scholars who defend the administration argue that national security can not be compromised .", "`` The issue of access to classified information is pivotal , '' says John Yoo , who helped the Bush administration draft an earlier version of the proposed commissions and who now teaches law at University of California , Berkeley .", "In his new book , `` War By Other Means , '' Professor Yoo argues that in the first World Trade Center bombing case in 1993 , prosecutors had to give the defense a list of 200 unindicted co-conspirators .", "The list , he writes , was `` delivered to bin Laden '' `` and '' was later found during the investigation of the African embassy bombings . ``", "Republicans in Congress are now negotiating with the Bush administration on a series of compromises .", "The military commissions may operate closer to the Nuremberg standard , but give the administration some of the authority it seeks .", "For example , one proposal would allow the use of hearsay and evidence obtained through coercion short of torture , while refusing to allow the use of evidence not disclosed to the defendant .", "Could this satisfy critics .", "Some legal scholars suggest that a trial conducted along these lines could have international legitimacy as well as meeting basic standards of due process .", "Even Mr. Goldstone , the chief prosecutor for the U.N. tribunals , says that jettisoning the use of secret evidence would remove the `` most objectionable '' part of the proposal .", "Professor Spiro of Temple University agrees .", "`` The Republican moderates ' proposal would come a lot closer to satisfying international opinion , and in part because it 's coming from the Senate and not the administration , `` he said .", "Still , the atmospherics of the trial , and the fact that the defendants face the death penalty , may make it harder for the verdict to be internationally accepted .", "`` Guant\u00e1namo is now a rallying cry in Europe , so any trials that take place there will be tainted by that fact alone , '' Professor Spiro said .", "`` The administration would be well advised to allow the proceedings to be televised to avoid the image of a show trial . ''", "But televised proceedings might give the defendants a political platform to hijack the proceedings .", "`` There 's always a danger that this will degenerate into a political circus , `` says Professor Goldsmith .", "`` Slobodan Milosevic used the trial against him as a political platform , and Saddam Hussein is doing the same thing .", "The more procedural rights you give the defendant , the more you allow him to continue the war by other means . ``", "Professor Goldsmith says the debate about what a fair trial requires will continue throughout every stage of the terrorist trials -- from the initial indictments to the actual trials to the Supreme Court appeals that would follow any death sentence .", "`` There 's a trade-off , `` he said .", "`` You can clamp down on access , but you do so at the cost of the perception of fairness , and finding the right balance of fairness and control is very hard . ''", "IDEAS & TRENDS Jeffrey Rosen 's latest book is `` The Most Democratic Branch : How the Courts Serve America . ''", "Editors ' Note : September 17 , 2006 , Sunday An article last Sunday reported on the debate over how to try 14 terror suspects recently transferred to United States military custody .", "The Bush administration has proposed that the suspects be tried in military commissions under procedures the White House has presented to Congress , including rules that would allow the admission of evidence obtained under coercion or duress .", "Civil libertarians , on the other hand , say the suspects should get the stronger due-process protections of an ordinary court-martial .", "The article included comment from Richard Goldstone , the South African chief prosecutor of the United Nations International Criminal Tribunals for the former Yugoslavia and Rwanda , who objected to the provision `` that evidence would be admitted even if obtained under duress or torture . ''", "The administration disputes this characterization of the proposed rules , saying they do prohibit the introduction of evidence obtained through torture .", "The article should have included this viewpoint , and should have reflected the fact that part of the debate is about how the term `` torture '' is interpreted ."], "summary": ["Polarizing debate over how to try 14 terror suspects who have been transferred to military custody at Guantanamo Bay is not only argument between Bush administration and civil libertarians , but public opinion debate over how trial would be perceived in US and abroad .", "Many people want US to meet standard of Nuremberg trials , which they believe gave it authority and credibility to serve as global model of due process .", "Some legal scholars say trials should include usual fair trial provisions .", "Others say this is different time and war is not over .", "Photos ."], "publication": "nyt50", "label": [17, 1, 21, 13, 22], "tag": ["Week in Review"]} -{"id": "1789047", "text": ["In a directive whose logic is not always apparent , the Transportation Security Administration has spelled out what airline passengers can carry on board with them , what must be placed in checked luggage , and what ca n't go on the plane at all .", "Knives must be checked but knitting needles and corkscrews are allowed in the cabin .", "Up to four ounces of eye drops can be carried aboard , with fingers crossed that multiple terrorists wo n't combine their allotments to exceed the limit .", "Laptops , digital cameras , mobile phones and other electronic devices are permitted , so never mind any warnings you 've heard that they could be used to trigger a bomb .", "The bomb ingredients themselves , notably liquid explosives , will be kept out of the cabin by a ban on liquids , gels and lotions , except for small amounts of baby formula and medications .", "The ban on liquids surely makes sense given the lack of a reliable , efficient way to detect liquid explosives on the passenger screening line .", "But the other fine distinctions in this directive make us think the best approach would be a ban on virtually all carry -on items , or at least a limit of one small personal bag per passenger to tote travel documents , keys , vital medications , reading materials and any other minimal items that are allowed .", "There 's a lot to be said for a drastic reduction in what can be carried aboard .", "Passenger security lines would move faster if there were little or nothing for the screeners to screen .", "Passengers could be boarded faster and more comfortably if they were n't clogging the aisles while stuffing bags in the overhead bins .", "Most important , security would probably be enhanced .", "If a terrorist somehow slipped onto your flight , he would n't have bomb materials with him , or much of anything else for that matter .", "And his bags would get tougher scrutiny because the machines that screen checked luggage are said to be better at detecting explosives and other dangerous materials than the metal detectors and X-ray machines used for screening passengers and their carry -on bags .", "The chief downside , from a security standpoint , is that a greater burden would be placed on the lines that screen checked baggage , which in some airports are already overstretched .", "That raises the risk that screeners will rush checked bags through with inadequate scrutiny of the images of their contents , or that bags will back up and flights will be delayed to wait for them to be loaded .", "Still , that should not be a problem beyond the ingenuity of aviation planners .", "The handful of airports that already have big explosive-detection machines integrated into their baggage conveyor systems ought to be able to handle the load easily .", "When we raised the possibility of a ban on most carry -on items a month ago , there was a chorus of complaints from travelers who count on using their laptops during the flight .", "Or fear that valuable electronic devices might be lost , broken or stolen if checked .", "Or resent long waits after a flight to get their checked bags .", "Some travelers have already shifted to trains or automobiles for short trips and more will do so if the inconvenience mounts .", "These are not trivial issues .", "Airlines , already financially strapped , depend on business fliers who are the most likely to object to a change in the rules .", "Airlines could head off some of these problems by , for example , storing valuable electronic devices in locked overhead bins where they ca n't easily be stolen , and hiring more baggage handlers to unload planes rapidly .", "Separating people from their laptops during flights would be painful , although some people could surely use the time to go over reading material , or even revert to pen and paper .", "A ban on most carry -on items need not be permanent .", "Technologies that could screen passengers and their carry -on bags rapidly to detect known dangerous materials are under development , but it is uncertain when they might be ready .", "Even then , sophisticated terrorists will always look for new tactics to evade detection .", "For now , the surest way to keep dangerous materials out of the cabin is to keep virtually all materials out of the cabin .", "Editorial ."], "summary": ["Editorial calls for ban on most airline carry -on items until technologies are developed that can screen passengers and their carry -on bags rapidly to detect dangerous materials .", "Holds that new Transportation Security Adm directive regarding carry -on is confusing and that virtual ban of carry -on items would speed passenger screening and enhance security ."], "publication": "nyt50", "label": [26, 25, 29], "tag": ["Opinion"]} diff --git a/reproduction/Summarization/Baseline/test/testdata/vocab b/reproduction/Summarization/Baseline/test/testdata/vocab deleted file mode 100644 index 0e8e5cfa..00000000 --- a/reproduction/Summarization/Baseline/test/testdata/vocab +++ /dev/null @@ -1,200000 +0,0 @@ -. 12172211 -the 11896296 -, 9609022 -to 5751102 -a 5100569 -and 4892246 -of 4867879 -in 4431149 -'s 2202754 -was 2086001 -for 1995054 -that 1944328 -' 1880335 -on 1858606 -` 1821696 -is 1797908 -he 1678396 -it 1603145 -with 1497568 -said 1348297 -: 1344327 -his 1302056 -at 1260578 -as 1230256 -i 1089458 -by 1064355 -have 1016505 -from 1015625 -has 969042 -her 935151 -be 932950 -'' 904149 -`` 898933 -but 884494 -are 865728 -she 850971 -they 816011 -an 766001 -not 738121 -had 725375 -who 722127 -this 721027 -after 669231 -were 655187 -been 647432 -their 645014 -we 625684 -will 577581 -when 506811 --rrb- 501827 -n't 499765 --lrb- 497508 -one 490666 -which 465040 -you 461359 --- 460450 -up 437177 -more 433177 -out 432343 -about 428037 -would 400420 -- 399113 -or 399001 -there 389590 -people 386121 -new 380970 -also 380041 -all 350670 -two 343787 -can 341110 -him 338345 -do 330166 -into 319067 -last 315857 -so 308507 -than 306701 -just 305759 -time 302071 -police 301341 -could 298919 -told 298384 -over 297568 -if 297292 -what 293759 -years 288999 -first 283683 -no 274488 -my 273829 -year 272392 -them 270715 -its 269566 -now 262011 -before 260991 -mr 250970 -other 247663 -some 245191 -being 243458 -home 229570 -like 229425 -did 227833 -down 225681 -says 222145 -while 219855 -world 219529 -because 216385 -# 211838 -back 209643 -where 208325 -only 207580 -left 204437 -family 200501 -during 196788 -three 194077 -our 193418 -found 189730 -get 189581 -made 187324 -how 184245 -then 183450 -most 181262 -against 180317 -day 180180 -? 180158 -cnn 179775 -off 178728 -me 175085 -right 165306 -may 164738 -very 164583 -many 162062 -court 160356 -$ 158740 -around 158390 -children 156748 -even 155081 -any 155056 -since 154689 -say 152372 -man 151289 -through 150574 -life 150286 -make 149875 -according 144095 -city 143897 -those 143584 -take 142523 -u.s. 142391 -way 141478 -still 139324 -week 138389 -former 135560 -government 134898 -work 134829 -going 133807 -president 133687 -house 133287 -should 131282 -video 131149 -see 129239 -state 127754 -another 127694 -your 127576 -go 126866 -us 125978 -these 125553 -such 123054 -united 123009 -well 122788 -country 121715 -think 121387 -between 121291 -used 120438 -school 119994 -know 119915 -est 119162 -four 119155 -including 118955 -women 118614 -under 117888 -much 116859 -show 116198 -death 116108 -team 115951 -per 115136 -help 113188 -part 113033 -today 112723 -both 112128 -night 111276 -took 111218 -mother 111006 -called 110816 -next 110758 -want 110624 -million 109910 -later 108648 -2013 108147 -'re 107848 -group 107697 -good 107239 -days 107096 -pictured 105141 -never 104920 -london 103101 -public 102401 -added 101536 -seen 99972 -months 99971 -own 99914 -away 99721 -got 99403 -hospital 99235 -does 98595 -set 97340 -five 97326 -case 96854 -come 96718 -second 96519 -'m 96487 -put 94920 -taken 94752 -place 94699 -went 94021 -obama 94001 -report 93797 -came 93653 -news 93193 -men 92552 -'ve 92089 -car 91764 -cent 91678 -2012 91178 -same 90994 -young 90955 -woman 89933 -died 89701 -here 89633 -too 89608 -high 89596 -times 89372 -national 88900 -really 88491 -every 87976 -long 87561 -month 87431 -killed 86489 -south 86281 -top 85941 -best 85658 -; 85646 -wife 85024 -use 84933 -end 84224 -health 82997 -need 82928 -number 82825 -father 81817 -game 81527 -published 81183 -england 79944 -however 79944 -york 79746 -money 79200 -having 79192 -son 79169 -league 79136 -without 78862 -until 78573 -states 78387 -each 78204 -six 78166 -company 78010 -british 77835 -head 77753 -look 77591 -following 77535 -10 77174 -asked 77083 -face 76940 -security 76592 -body 76233 -child 76146 -officials 76143 -little 76118 -... 75318 -support 75246 -side 74975 -north 74898 -sunday 74640 -reported 74620 -hours 74506 -international 74195 -club 73954 -attack 73096 -saying 72938 -thought 72868 -ago 72810 -season 72414 -monday 72275 -west 71895 -party 71542 -white 71276 -given 70990 -great 70982 -across 70634 -friends 70517 -few 70474 -something 70457 -big 70357 -tuesday 70211 -daughter 70064 -known 70000 -uk 69311 -again 68780 -find 68661 -friday 68473 -statement 68320 -university 68003 -area 67948 -david 67870 -couple 67495 -american 67336 -parents 67211 -updated 67194 -office 67052 -cup 66828 -several 66704 -already 66689 -despite 66374 -able 66027 -local 65797 -military 65491 -members 65412 -near 65335 -taking 65220 -earlier 65087 -working 65078 -law 64987 -water 64983 -outside 64841 -war 64831 -always 64732 -service 64630 -wednesday 64564 -past 64558 -minister 64488 -believe 64240 -authorities 64237 -whether 64006 -why 63874 -using 63870 -win 63756 -john 63698 -lot 63695 -saturday 63512 -early 63398 -20 63225 -far 63195 -hit 63195 -weeks 63020 -shot 63019 -play 62723 -behind 62496 -started 62307 -miss 62038 -making 62020 -officers 61991 -am 61761 -old 61624 -lost 61470 -become 61207 -thursday 61142 -among 61069 -give 60908 -real 60862 -care 60836 -once 60756 -wanted 60175 -! 60045 -claims 60016 -heard 59882 -move 59611 -love 59373 -ms 59339 -| 59047 -least 58996 -things 58974 -released 58833 -media 58645 -arrested 58564 -husband 58106 -players 57747 -better 57747 -spokesman 57657 -scroll 57458 -might 57335 -fire 57159 -saw 56853 -trying 56817 -looking 56502 -department 56500 -morning 56459 -shows 56355 -ever 56143 -close 56102 -different 56008 -britain 56000 -keep 55802 -star 55749 -live 55613 -system 55574 -park 55496 -others 55295 -mrs 55238 -food 55220 -girl 55189 -together 55134 -investigation 55059 -open 54864 -start 54554 -person 54508 -information 54489 -black 54464 -air 54420 -change 54321 -dead 54029 -january 53918 -street 53863 -must 53776 -campaign 53756 -judge 53744 -minutes 53693 -incident 53645 -held 53553 -staff 53541 -half 53406 -claimed 53141 -final 53078 -pay 52938 -himself 52841 -2011 52800 -friend 52777 -manchester 52710 -began 52670 -official 52339 -call 52337 -revealed 52279 -yet 52251 -run 52172 -front 52070 -name 52049 -wrote 51864 -almost 51807 -decision 51673 -though 51507 -point 51455 -job 51443 -feel 51393 -less 51331 -business 51243 -getting 51193 -evidence 51102 -along 51015 -facebook 50979 -became 50826 -enough 50812 -30 50700 -expected 50568 -accused 50352 -prison 50223 -march 50141 -deal 50054 -yesterday 49907 -football 49885 -1 49810 -chief 49772 -political 49633 -sent 49563 -won 49559 -murder 49394 -ca 49360 -social 49304 -recent 49248 -power 49246 -done 49237 -september 49079 -due 49061 -doing 49015 -12 48871 -comes 48855 -small 48790 -daily 48751 -site 48600 -officer 48325 -likely 48307 -15 48291 -phone 48283 -fans 48247 -michael 48115 -room 48046 -full 47985 -november 47966 -inside 47906 -medical 47874 -december 47768 -watch 47737 -stop 47686 -games 47652 -baby 47328 -charges 47290 -online 47284 -rights 47213 -october 47104 -lives 47055 -clear 47020 -third 46797 -age 46781 -free 46526 -'d 46478 -happened 46462 -spent 46424 -boy 46225 -june 46160 -often 45891 -tried 45829 -control 45707 -within 45700 -trial 45680 -county 45654 -thing 45638 -community 45585 -human 45563 -seven 45453 -director 45453 -future 45409 -further 45218 -charged 45192 -hard 45070 -manager 45026 -leave 44975 -scene 44938 -china 44820 -return 44803 -road 44679 -although 44564 -late 44564 -august 44562 -living 44545 -believed 44351 -involved 44338 -action 44190 -received 44120 -major 44023 -history 43975 -july 43963 -hope 43950 -described 43871 -played 43853 -2010 43808 -led 43729 -gave 43622 -% 43616 -april 43385 -film 43382 -leader 43325 -someone 43197 -'ll 43163 -match 43147 -let 43133 -eight 43132 -james 43024 -sex 42902 -met 42895 -important 42817 -town 42775 -reports 42771 -centre 42611 -east 42419 -red 42348 -force 42078 -line 42053 -possible 41805 -twitter 41667 -america 41596 -building 41430 -lead 41361 -council 41336 -record 41055 -nearly 41038 -nothing 41038 -order 41031 -general 41024 -prime 40982 -training 40964 -turned 40904 -heart 40880 -tv 40864 -series 40860 -player 40814 -large 40758 -ahead 40627 -try 40587 -legal 40567 -summer 40544 -center 40531 -students 40474 -story 40469 -mark 40128 -washington 40108 -federal 39954 -royal 39920 -event 39888 -goal 39859 -anything 39859 -research 39782 -cancer 39736 -victim 39694 -forward 39524 -miles 39516 -18 39487 -11 39467 -appeared 39410 -coming 39400 -2014 39351 -admitted 39216 -fact 39153 -february 39147 -worked 39095 -victims 39083 -2 38987 -forces 38980 -fight 38912 -study 38872 -playing 38748 -website 38701 -ground 38601 -hotel 38386 -bill 38343 -australia 38227 -plans 38222 -drug 38162 -thousands 38049 -treatment 38024 -role 37959 -forced 37858 -risk 37672 -countries 37551 -liverpool 37503 -continue 37499 -secretary 37457 -guilty 37441 -chelsea 37440 -course 37389 -flight 37269 -california 37260 -sure 37246 -announced 37236 -recently 37190 -showed 37152 -safety 37134 -agency 37042 -instead 37004 -girls 36992 -issue 36877 -justice 36866 -book 36833 -press 36813 -allegedly 36763 -european 36730 -mail 36654 -happy 36596 -caused 36560 -currently 36559 -private 36519 -problem 36479 -everything 36409 -post 36405 -picture 36380 -dr 36281 -hand 36272 -brother 36151 -whose 36104 -families 36067 -read 36055 -25 36029 -problems 35888 -visit 35775 -everyone 35749 -services 35745 -anyone 35737 -moment 35689 -foreign 35577 -special 35551 -serious 35539 -space 35486 -knew 35468 -5 35461 -soon 35460 -van 35452 -cost 35448 -looked 35423 -premier 35397 -16 35358 -felt 35339 -violence 35312 -attorney 35259 -act 35250 -2009 35199 -student 35143 -suffered 35133 -doctors 35110 -means 34984 -paul 34925 -crime 34911 -plan 34874 -latest 34874 -brought 34858 -missing 34756 -stay 34591 -paid 34575 -decided 34509 -chance 34495 -huge 34457 --lsb- 34375 -include 34357 -above 34345 -result 34322 -career 34274 -failed 34268 --rsb- 34231 -alleged 34211 -100 34195 -french 34186 -named 34177 -posted 34160 -moved 34113 -george 33973 -claim 33877 -member 33860 -dog 33829 -remains 33758 -running 33730 -condition 33727 -cut 33710 -pair 33678 -bad 33634 -tell 33616 -interview 33607 -property 33600 -france 33567 -cases 33516 -50 33511 -makes 33498 -14 33374 -race 33362 -search 33329 -workers 33320 -rather 33318 -relationship 33309 -attacks 33256 -based 33236 -sexual 33194 -brown 33193 -difficult 33150 -experience 33066 -cause 32971 -christmas 32962 -4 32819 -hearing 32751 -popular 32751 -syria 32708 -light 32700 -kind 32696 -discovered 32681 -process 32573 -blood 32541 -6 32485 -nation 32314 -patients 32302 -plane 32300 -airport 32264 -station 32256 -similar 32252 -shooting 32214 -king 32207 -married 32151 -killing 32101 -themselves 32034 -weekend 32013 -leaving 31948 -wants 31944 -europe 31938 -needed 31881 -2008 31843 -senior 31836 -cameron 31833 -meeting 31830 -13 31816 -bring 31783 -allowed 31666 -bank 31638 -board 31638 -gone 31607 -hands 31557 -army 31555 -strong 31511 -labour 31509 -bit 31374 -charge 31328 -helped 31316 -comments 31302 -nine 31229 -3 31229 -actually 31138 -arrived 31067 -images 31048 -message 31007 -17 30992 -iraq 30979 -capital 30955 -born 30921 -church 30893 -island 30871 -africa 30811 -tax 30803 -list 30800 -test 30783 -24 30763 -abuse 30720 -idea 30715 -wrong 30665 -central 30626 -driver 30578 -policy 30517 -level 30490 -current 30488 -injuries 30326 -market 30263 -russia 30209 -form 30138 -travel 30055 -release 30001 -situation 29963 -looks 29933 -points 29930 -groups 29900 -driving 29875 -turn 29792 -personal 29752 -billion 29672 -leaders 29570 -areas 29480 -prince 29470 -confirmed 29436 -needs 29435 -available 29424 -ball 29377 -40 29347 -leading 29304 -gun 29267 -key 29264 -issues 29243 -returned 29222 -caught 29213 -injured 29177 -middle 29118 -wearing 29075 -image 29047 -talk 29021 -drugs 28925 -music 28898 -florida 28884 -disease 28879 -calls 28796 -speaking 28770 -victory 28723 -sister 28667 -de 28578 -pictures 28526 -bbc 28506 -pressure 28373 -election 28348 -whole 28346 -internet 28329 -quite 28283 -kept 28229 -criminal 28223 -price 28094 -longer 28052 -residents 28024 -crash 28013 -sold 28006 -photo 27967 -arsenal 27962 -worth 27948 -provide 27924 -created 27889 -executive 27886 -martin 27872 -short 27851 -hundreds 27809 -program 27761 -takes 27706 -experts 27678 -rest 27604 -operation 27582 -data 27570 -matter 27566 -previous 27557 -college 27552 -smith 27550 -19 27507 -jail 27461 -break 27272 -hold 27229 -americans 27178 -technology 27064 -source 27032 -meet 27017 -users 27002 -vote 26997 -average 26991 -included 26990 -model 26969 -trip 26957 -defense 26919 -injury 26914 -view 26914 -emergency 26914 -fighting 26905 -region 26901 -sign 26890 -coast 26887 -union 26879 -carried 26860 -project 26817 -green 26718 -percent 26697 -previously 26675 -homes 26651 -fell 26648 -biggest 26642 -tour 26639 -committee 26630 -kids 26618 -feet 26606 -queen 26602 -arrest 26598 -position 26570 -single 26567 -warned 26565 -district 26561 -round 26541 -launched 26541 -stand 26537 -total 26531 -owner 26524 -marriage 26493 -comment 26467 -remain 26463 -russian 26371 -germany 26351 -photos 26350 -surgery 26335 -7 26327 -continued 26310 -english 26226 -21 26224 -letter 26201 -stage 26191 -question 26184 -store 26153 -assault 26105 -giving 26098 -conference 26027 -page 26006 -battle 25978 -reporter 25915 -sentence 25891 -goals 25889 -camera 25874 -apple 25865 -champions 25847 -finally 25823 -financial 25779 -22 25706 -beach 25703 -firm 25683 -weight 25636 -administration 25621 -details 25620 -potential 25617 -response 25601 -female 25588 -quickly 25568 -energy 25564 -passengers 25555 -australian 25550 -access 25500 -account 25496 -sea 25480 -loss 25410 -hair 25405 -questions 25376 -land 25361 -believes 25297 -coach 25294 -range 25277 -offer 25212 -immediately 25186 -convicted 25161 -grand 25086 -chinese 25073 -texas 25054 -chris 25025 -door 24903 -refused 24860 -border 24851 -weather 24850 -damage 24833 -economic 24826 -vehicle 24821 -8 24776 -companies 24757 -safe 24753 -interest 24679 -accident 24618 -al 24579 -joined 24567 -train 24500 -2007 24450 -contributed 24441 -denied 24431 -beat 24422 -works 24383 -customers 24370 -allow 24366 -attention 24344 -share 24332 -education 24298 -raised 24290 -afghanistan 24269 -main 24268 -create 24213 -conditions 24202 -probably 24180 -either 24125 -wo 24075 -words 24014 -spoke 24009 -events 24007 -scotland 23979 -captain 23968 -lived 23968 -industry 23933 -global 23920 -period 23871 -growing 23846 -sir 23842 -scored 23822 -title 23809 -built 23807 -cars 23788 -brain 23767 -williams 23758 -ten 23754 -troops 23745 -followed 23728 -civil 23725 -shown 23714 -iran 23674 -walk 23671 -northern 23615 -low 23610 -san 23609 -allegations 23601 -challenge 23599 -23 23535 -results 23491 -herself 23457 -google 23453 -trust 23447 -figures 23437 -towards 23398 -hour 23355 -success 23352 -republican 23340 -reason 23328 -los 23302 -especially 23288 -passed 23284 -impact 23222 -goes 23210 -offered 23151 -fall 23136 -opening 23117 -eventually 23073 -birth 23064 -buy 23043 -animal 23002 -simply 22981 -holiday 22951 -winning 22913 -ended 22877 -spending 22864 -boss 22801 -contact 22779 -parts 22746 -seems 22735 -soldiers 22716 -doctor 22715 -korea 22703 -schools 22699 -field 22685 -increase 22642 -investigators 22628 -understand 22626 -sports 22622 -jobs 22605 -happen 22599 -wedding 22592 -television 22591 -alone 22577 -famous 22528 -st 22522 -changed 22517 -lawyer 22512 -boys 22507 -stopped 22498 -else 22496 -appear 22492 -johnson 22490 -reach 22434 -gold 22433 -clinton 22416 -network 22398 -weapons 22388 -26 22360 -madrid 22356 -protect 22313 -signed 22313 -crown 22293 -partner 22253 -faces 22245 -peter 22215 -appears 22211 -ready 22163 -professor 22136 -striker 22120 -opened 22115 -india 22088 -step 22087 -evening 22072 -scientists 22059 -oil 22006 -costs 21993 -broke 21991 -threat 21979 -suspect 21963 -28 21958 -agreed 21942 -dangerous 21938 -treated 21923 -kill 21922 -aged 21908 -speech 21852 -showing 21850 -save 21812 -william 21809 -& 21803 -27 21789 -jury 21759 -german 21755 -reportedly 21734 -jackson 21734 -secret 21710 -charity 21707 -angeles 21675 -complete 21628 -economy 21623 -concerns 21619 -animals 21612 -calling 21579 -moving 21561 -considered 21521 -nearby 21519 -ask 21515 -richard 21510 -nations 21505 -bought 21479 -common 21462 -jones 21427 -society 21398 -prosecutors 21397 -ran 21384 -changes 21372 -congress 21362 -researchers 21324 -earth 21320 -fashion 21310 -adding 21295 -crisis 21292 -mexico 21272 -efforts 21266 -size 21215 -brazil 21210 -pain 21196 -afternoon 21194 -designed 21190 -blue 21187 -isis 21151 -ordered 21119 -spain 21118 -floor 21116 -jailed 21116 -fellow 21109 -performance 21090 -attempt 21023 -rules 21023 -amount 21018 -eye 21015 -wales 21011 -unable 21000 -eyes 20990 -true 20983 -poor 20867 -footage 20830 -river 20823 -sometimes 20803 -identified 20753 -mean 20748 -opportunity 20738 -fear 20691 -emerged 20650 -supporters 20609 -robert 20593 -issued 20554 -wall 20544 -meanwhile 20514 -9 20501 -throughout 20473 -israel 20398 -mobile 20375 -art 20357 -twice 20355 -talking 20339 -gas 20335 -armed 20330 -appeal 20327 -class 20297 -effort 20294 -largest 20255 -loved 20224 -suffering 20218 -speak 20199 -completely 20137 -movie 20104 -millions 20100 -seeing 20088 -particularly 20087 -sense 20063 -ice 20059 -served 20019 -spend 20015 -association 20008 -rise 19960 -example 19959 -drive 19918 -terms 19907 -cash 19837 -mission 19836 -higher 19820 -cover 19816 -carry 19770 -stars 19757 -thomas 19745 -sentenced 19706 -italy 19670 -storm 19652 -squad 19651 -chairman 19627 -radio 19620 -60 19610 -western 19605 -skin 19603 -aircraft 19599 -streets 19599 -ban 19576 -newspaper 19576 -onto 19564 -development 19559 -2006 19548 -date 19536 -managed 19535 -telling 19498 -walking 19485 -attacked 19482 -significant 19452 -crew 19448 -fine 19435 -target 19402 -removed 19394 -levels 19371 -nhs 19364 -senate 19363 -track 19361 -pretty 19329 -rate 19302 -written 19277 -stadium 19276 -holding 19265 -penalty 19262 -bed 19260 -waiting 19254 -hopes 19212 -camp 19210 -records 19210 -southern 19195 -deaths 19185 -competition 19184 -nuclear 19174 -rescue 19170 -responsible 19157 -lee 19153 -29 19140 -pulled 19127 -dogs 19116 -dress 19100 -base 19096 -sale 19089 -harry 19019 -includes 19000 -mind 18982 -gets 18978 -documents 18965 -opposition 18962 -islamic 18952 -ensure 18948 -carrying 18933 -pm 18923 -raise 18898 -lose 18887 -majority 18875 -magazine 18845 -numbers 18833 -natural 18809 -aid 18809 -sport 18802 -mayor 18801 -bodies 18794 -girlfriend 18787 -feeling 18777 -sun 18760 -village 18755 -reporters 18749 -term 18740 -figure 18706 -avoid 18702 -asking 18682 -launch 18679 -bus 18655 -winner 18650 -follow 18641 -join 18638 -concerned 18632 -intelligence 18604 -hear 18599 -presidential 18595 -paris 18553 -host 18549 -reached 18547 -struck 18545 -address 18540 -suicide 18535 -minute 18535 -fourth 18516 -palace 18509 -planning 18508 -fired 18505 -focus 18495 -messages 18492 -certain 18485 -benefits 18482 -ship 18466 -dropped 18433 -build 18395 -peace 18376 -rare 18341 -itself 18336 -planned 18335 -syrian 18309 -older 18298 -collection 18289 -population 18286 -helping 18253 -products 18243 -remember 18212 -original 18210 -normal 18199 -closed 18173 -spot 18158 -broken 18156 -hall 18154 -bid 18150 -pakistan 18146 -african 18138 -warning 18100 -successful 18085 -defence 18025 -expect 18015 -actions 18005 -crowd 17973 -clearly 17952 -lord 17942 -talks 17938 -teacher 17915 -effect 17909 -suggested 17890 -heavy 17882 -illegal 17877 -kim 17869 -watching 17845 -century 17823 -sales 17817 -meant 17813 -entire 17800 -lack 17787 -cross 17783 -gay 17758 -alongside 17756 -teams 17751 -certainly 17751 -send 17743 -apartment 17729 -restaurant 17727 -easy 17724 -barcelona 17711 -japan 17705 -starting 17661 -box 17649 -champion 17635 -placed 17635 -mailonline 17618 -compared 17601 -worst 17541 -decades 17535 -captured 17525 -andrew 17519 -cold 17507 -shop 17507 -immigration 17505 -bar 17494 -sydney 17489 -computer 17489 -protesters 17479 -style 17470 -perhaps 17427 -fit 17426 -appearance 17426 -myself 17401 -massive 17391 -democratic 17372 -snow 17353 -prevent 17344 -bridge 17338 -becoming 17337 -barack 17320 -perfect 17301 -alcohol 17268 -beautiful 17258 -shared 17190 -design 17184 -laws 17180 -male 17167 -actor 17148 -steve 17139 -whom 17134 -republicans 17129 -lady 17071 -rape 17064 -square 17063 -sorry 17054 -debate 17031 -leg 17012 -contract 16992 -extra 16987 -bush 16979 -features 16970 -places 16939 -standing 16896 -review 16894 -putting 16881 -wait 16858 -claiming 16849 -tom 16844 -winter 16832 -strike 16831 -defeat 16804 -fbi 16785 -lower 16761 -losing 16753 -hot 16751 -ways 16744 -apparently 16734 -2005 16733 -sell 16708 -continues 16690 -positive 16687 -custody 16683 -pass 16680 -filed 16674 -teenager 16644 -die 16627 -extremely 16599 -facing 16578 -god 16574 -traffic 16546 -shortly 16544 -word 16541 -italian 16526 -reasons 16507 -insisted 16490 -31 16483 -signs 16469 -device 16467 -straight 16466 -professional 16448 -aware 16439 -committed 16413 -seemed 16412 -guard 16405 -deputy 16403 -la 16397 -usually 16383 -deep 16381 -giant 16360 -double 16360 -beyond 16339 -healthy 16315 -choice 16304 -independent 16298 -flying 16286 -tough 16286 -below 16266 -culture 16262 -prices 16238 -charles 16232 -midfielder 16231 -ruled 16231 -highest 16229 -organization 16228 -patient 16208 -row 16207 -tests 16203 -eat 16182 -affected 16165 -operations 16161 -speed 16156 -draw 16150 -commission 16147 -receive 16145 -jersey 16142 -present 16113 -daniel 16107 -rock 16099 -initially 16067 -associated 16063 -provided 16038 -paper 16026 -spotted 16024 -mental 16017 -violent 16006 -olympic 16001 -fan 16000 -pregnant 15992 -annual 15988 -ukraine 15981 -version 15962 -movement 15952 -thinking 15948 -arms 15948 -walked 15942 -names 15938 -murray 15933 -conservative 15928 -mp 15923 -ministry 15919 -muslim 15918 -causing 15903 -covered 15884 -fun 15880 -eu 15873 -suspended 15871 -spread 15857 -estimated 15846 -maybe 15834 -expressed 15832 -thanks 15827 -runs 15795 -card 15769 -piece 15769 -guy 15769 -greater 15758 -eating 15720 -ceremony 15717 -spanish 15699 -romney 15688 -reality 15687 -linked 15685 -character 15671 -learn 15591 -explained 15591 -alex 15579 -items 15577 -singer 15571 -museum 15565 -wear 15560 -table 15556 -banned 15555 -attended 15553 -budget 15544 -views 15508 -proud 15499 -estate 15482 -boat 15481 -controversial 15441 -ability 15428 -voters 15426 -check 15422 -drink 15420 -concern 15397 -dark 15390 -sitting 15387 -window 15386 -employees 15384 -younger 15376 -yes 15365 -scott 15364 -200 15353 -owners 15329 -fast 15327 -increased 15297 -severe 15284 -visitors 15280 -trade 15279 -pleaded 15275 -practice 15258 -modern 15256 -freedom 15248 -finding 15223 -visited 15220 -sky 15200 -sources 15200 -knows 15191 -occurred 15185 -parliament 15173 -birthday 15169 -ebola 15169 -joe 15142 -individual 15136 -alive 15126 -crimes 15108 -quality 15088 -traditional 15061 -handed 15058 -suspected 15015 -stone 15012 -absolutely 15000 -eastern 14980 -enforcement 14949 -brand 14943 -app 14939 -garden 14928 -parties 14917 -2004 14915 -unit 14914 -protection 14900 -amazing 14900 -responsibility 14899 -kate 14888 -seat 14885 -hill 14864 -ryan 14846 -attempted 14840 -type 14836 -terrorist 14820 -growth 14819 -missed 14819 -display 14818 -investigating 14799 -nature 14784 -seem 14783 -louis 14783 -pounds 14779 -ones 14772 -protests 14769 -1,000 14768 -tournament 14767 -spokeswoman 14766 -powerful 14757 -voice 14756 -protest 14733 -boston 14725 -sheriff 14703 -democrats 14696 -via 14693 -watched 14691 -cities 14689 -newcastle 14656 -fully 14649 -developed 14642 -jack 14626 -hoping 14618 -ruling 14595 -actress 14586 -survey 14581 -drinking 14563 -answer 14555 -upon 14553 -learned 14517 -lake 14485 -agreement 14483 -assistant 14440 -approach 14427 -consider 14425 -90 14422 -shock 14421 -governor 14413 -sleep 14397 -fund 14392 -exactly 14392 -begin 14391 -regime 14373 -multiple 14365 -fair 14356 -dream 14347 -decade 14335 -value 14330 -bottom 14315 -foundation 14308 -worse 14305 -domestic 14302 -seeking 14298 -remained 14292 -physical 14290 -mike 14273 -keeping 14264 -defender 14257 -determined 14251 -artist 14251 -authority 14243 -programme 14226 -separate 14216 -taylor 14173 -seconds 14134 -selling 14130 -ireland 14120 -counts 14114 -tweeted 14088 -threatened 14077 -gives 14051 -diagnosed 14050 -beginning 14019 -channel 14017 -picked 14005 -measures 13998 -wilson 13989 -35 13978 -shocked 13976 -enjoy 13975 -respect 13973 -request 13972 -worker 13967 -arm 13965 -insurance 13939 -glass 13939 -pilot 13936 -boyfriend 13927 -behaviour 13919 -mass 13902 -touch 13840 -web 13831 -bomb 13823 -lines 13812 -recovery 13805 -science 13786 -rose 13775 -cell 13766 -required 13752 -prosecutor 13746 -hurt 13709 -simple 13689 -navy 13663 -band 13661 -serving 13659 -amid 13656 -victoria 13655 -finished 13654 -faced 13634 -tragic 13628 -improve 13622 -christian 13614 -nice 13610 -angry 13609 -korean 13603 -sites 13597 -stories 13585 -pick 13577 -lawyers 13575 -witnesses 13567 -sarah 13558 -citizens 13557 -tony 13552 -conflict 13549 -opinion 13541 -fears 13539 -ben 13538 -serve 13526 -championship 13525 -various 13513 -tragedy 13500 -fish 13491 -witness 13476 -terror 13474 -activity 13473 -vehicles 13460 -legs 13457 -accept 13447 -videos 13441 -management 13437 -paying 13433 -turkey 13432 -journey 13410 -standards 13383 -seriously 13383 -scandal 13382 -doubt 13367 -location 13358 -clothes 13357 -cuts 13336 -reading 13329 -agent 13326 -prepared 13325 -anthony 13324 -regular 13322 -drivers 13305 -georgia 13303 -expert 13299 -rain 13284 -rule 13264 -screen 13264 -gang 13259 -particular 13257 -critical 13246 -expensive 13245 -mary 13223 -sort 13206 -guests 13203 -loan 13198 -books 13184 -dad 13180 -environment 13175 -uses 13152 -photographer 13146 -bag 13145 -subject 13134 -advice 13119 -demand 13118 -writing 13115 -suit 13099 -escape 13097 -shopping 13096 -fa 13074 -addition 13067 -funeral 13060 -foot 13054 -sam 13035 -religious 13024 -leadership 12993 -article 12993 -scheduled 12986 -stands 12984 -highly 12984 -chicago 12978 -hollywood 12977 -emotional 12964 -carolina 12960 -credit 12952 -note 12951 -nick 12949 -offers 12946 -gaal 12941 -toward 12941 -israeli 12936 -beauty 12923 -flat 12912 -politics 12911 -matches 12891 -andy 12883 -rates 12883 -drop 12856 -supreme 12848 -dinner 12847 -recorded 12838 -bay 12810 -product 12800 -airlines 12790 -pool 12782 -benefit 12774 -qaeda 12772 -block 12768 -remove 12763 -taliban 12751 -memorial 12741 -tory 12730 -luxury 12713 -70 12711 -sound 12680 -dozens 12668 -scoring 12663 -iphone 12645 -moments 12638 -failure 12633 -award 12626 -equipment 12614 -photographs 12613 -finish 12610 -complex 12610 -2003 12608 -trouble 12600 -repeatedly 12597 -song 12585 -confidence 12584 -45 12573 -difference 12570 -ring 12565 -candidate 12550 -primary 12532 -simon 12531 -brothers 12530 -file 12527 -critics 12527 -tree 12525 -fly 12521 -canada 12520 -mps 12507 -ocean 12507 -attend 12487 -dr. 12487 -tottenham 12482 -sit 12475 -related 12467 -fifth 12465 -progress 12442 -virginia 12439 -connection 12430 -sides 12430 -language 12423 -surface 12401 -lawsuit 12395 -responded 12394 -clubs 12388 -temperatures 12380 -super 12361 -feature 12360 -fresh 12356 -relatives 12343 -kevin 12337 -accounts 12333 -indian 12332 -inspired 12327 -surprise 12309 -egypt 12301 -pitch 12291 -clean 12291 -passenger 12271 -targeted 12267 -colleagues 12256 -stood 12245 -score 12234 -retired 12233 -wide 12213 -steps 12211 -duty 12210 -produced 12196 -celebrate 12195 -worried 12182 -production 12179 -80 12175 -prove 12174 -lewis 12171 -u.n. 12168 -author 12166 -struggling 12165 -youtube 12121 -declined 12105 -hunt 12091 -shots 12084 -fox 12083 -cambridge 12072 -32 12063 -militants 12036 -bond 12025 -status 12017 -incredible 12013 -bear 11976 -vice 11975 -transfer 11967 -reveal 11965 -truck 11952 -material 11926 -wish 11918 -criticism 11891 -returning 11881 -declared 11875 -flights 11866 -institute 11864 -unique 11862 -tells 11858 -deadly 11852 -additional 11852 -jordan 11849 -stephen 11844 -bail 11841 -dressed 11834 -obviously 11831 -ed 11828 -bin 11791 -falling 11791 -user 11786 -province 11775 -allowing 11765 -text 11753 -audience 11749 -offering 11745 -urged 11745 -youth 11740 -grow 11739 -500 11736 -lucky 11735 -directly 11719 -add 11719 -elections 11717 -entered 11705 -signing 11705 -tiny 11698 -limited 11687 -conduct 11678 -cook 11672 -kelly 11661 -soldier 11655 -ambulance 11655 -systems 11652 -devices 11647 -symptoms 11643 -breaking 11629 -turning 11624 -kennedy 11623 -climate 11616 -virus 11607 -ill 11606 -wounded 11604 -favourite 11604 -maria 11597 -reduce 11595 -fuel 11571 -adults 11570 -fraud 11570 -ferguson 11552 -none 11545 -buildings 11545 -diet 11542 -jose 11534 -neck 11529 -southampton 11523 -2001 11519 -danger 11517 -questioned 11512 -agents 11502 -stolen 11495 -celebrity 11456 -suspects 11440 -coalition 11439 -sending 11428 -birmingham 11427 -machine 11424 -putin 11423 -2015 11418 -horse 11416 -affair 11410 -originally 11410 -prosecution 11393 -wild 11390 -unusual 11385 -plant 11365 -nasa 11358 -session 11351 -meaning 11350 -upset 11349 -thank 11346 -completed 11342 -steven 11325 -poll 11319 -rooney 11316 -wake 11315 -heat 11314 -houses 11309 -tribute 11306 -secure 11301 -threats 11291 -bringing 11288 -ceo 11284 -cameras 11275 -golf 11266 -grew 11258 -false 11254 -sick 11243 -festival 11243 -sen. 11242 -individuals 11237 -receiving 11234 -reform 11221 -villa 11212 -suggest 11201 -reaction 11193 -standard 11193 -introduced 11189 -necessary 11177 -feels 11175 -adam 11170 -daughters 11169 -princess 11165 -direct 11144 -whatever 11137 -scottish 11133 -acting 11130 -owned 11115 -involving 11114 -push 11110 -killer 11106 -saved 11106 -eric 11104 -destroyed 11102 -decide 11101 -historic 11084 -sat 11070 -rising 11057 -businesses 11050 -ham 11041 -dating 11021 -morgan 11009 -cat 11004 -overall 10995 -designer 10993 -2002 10989 -st. 10986 -seek 10984 -setting 10984 -argentina 10981 -facility 10981 -apart 10975 -active 10974 -inquiry 10969 -suggests 10964 -fighters 10963 -exercise 10963 -ride 10961 -babies 10956 -journalist 10949 -clash 10947 -wore 10935 -alan 10925 -disaster 10918 -circumstances 10917 -studies 10910 -enjoyed 10909 -arizona 10905 -everybody 10903 -survived 10903 -housing 10899 -illness 10895 -japanese 10884 -mountain 10878 -duchess 10877 -teachers 10860 -content 10853 -sons 10849 -mourinho 10847 -commercial 10838 -exchange 10834 -truth 10833 -banks 10825 -hero 10825 -miller 10820 -matt 10815 -regularly 10814 -planet 10809 -fled 10809 -noted 10801 -beaten 10797 -damaged 10792 -guns 10792 -hong 10790 -stores 10789 -leaves 10786 -failing 10782 -increasingly 10780 -gary 10770 -develop 10766 -diego 10763 -pushed 10762 -kitchen 10756 -models 10755 -drove 10751 -editor 10750 -treat 10748 -costa 10735 -activities 10728 -providing 10722 -anniversary 10720 -memory 10707 -quick 10705 -300 10702 -effects 10699 -corner 10694 -ford 10692 -keen 10687 -everton 10685 -zealand 10683 -affairs 10682 -funding 10675 -spring 10673 -adult 10660 -extreme 10654 -gop 10631 -transport 10628 -influence 10626 -income 10624 -initial 10618 -resort 10617 -tea 10605 -crashed 10596 -debt 10596 -dna 10585 -species 10578 -allows 10555 -confident 10550 -olympics 10544 -holds 10540 -split 10540 -complaint 10531 -joint 10522 -farm 10513 -complaints 10512 -knife 10506 -experienced 10480 -bigger 10470 -policies 10469 -announcement 10462 -likes 10460 -presence 10457 -communities 10457 -located 10456 -davis 10455 -bedroom 10450 -struggle 10447 -spoken 10440 -discuss 10440 -plastic 10437 -changing 10432 -ronaldo 10429 -causes 10423 -helicopter 10414 -surrounding 10403 -awards 10400 -learning 10399 -smoke 10382 -journalists 10381 -welcome 10378 -phones 10370 -teen 10360 -panel 10360 -atlanta 10359 -generation 10352 -busy 10348 -10,000 10345 -pope 10329 -pieces 10323 -yorkshire 10321 -creating 10312 -miliband 10301 -weapon 10299 -larger 10292 -colorado 10282 -produce 10278 -academy 10276 -argued 10274 -33 10272 -surveillance 10272 -interested 10267 -please 10267 -route 10261 -attempts 10261 -scheme 10255 -suarez 10250 -beating 10241 -shoot 10240 -email 10235 -ongoing 10234 -measure 10233 -sad 10220 -letters 10213 -rushed 10206 -understood 10199 -shape 10198 -potentially 10185 -kong 10177 -veteran 10162 -rebels 10161 -blame 10159 -p.m. 10149 -closer 10133 -skills 10133 -legislation 10113 -explain 10112 -prior 10105 -tim 10101 -afghan 10086 -inquest 10086 -contacted 10083 -tomorrow 10073 -relations 10062 -rich 10054 -dramatic 10053 -accepted 10051 -wine 10046 -faith 10045 -evans 10035 -discovery 10030 -featured 10029 -lying 10027 -murdered 10026 -recovered 10014 -shut 10014 -visiting 10005 -determine 10003 -investment 10002 -pull 9997 -option 9996 -resources 9995 -fallen 9994 -identity 9988 -tourists 9987 -blog 9987 -easily 9987 -elizabeth 9983 -unlikely 9982 -hospitals 9980 -wind 9980 -quarter 9977 -catch 9976 -plays 9975 -ministers 9975 -100,000 9975 -fat 9971 -viewers 9965 -ii 9961 -debut 9960 -chemical 9958 -tears 9952 -sparked 9943 -tennis 9935 -heads 9921 -identify 9910 -verdict 9903 -decisions 9902 -mistake 9901 -frank 9892 -harm 9889 -lights 9881 -happens 9874 -knowledge 9868 -miami 9867 -advantage 9860 -abc 9853 -airline 9851 -bars 9848 -hamilton 9841 -path 9835 -marine 9831 -sexually 9831 -prize 9827 -rejected 9825 -filmed 9818 -proved 9805 -respond 9797 -findings 9796 -long-term 9792 -anderson 9790 -intended 9788 -valley 9788 -staying 9779 -ohio 9770 -films 9767 -travelling 9764 -limit 9763 -proposed 9757 -rooms 9752 -breast 9750 -michelle 9747 -passing 9734 -anger 9732 -politicians 9725 -activists 9724 -silver 9718 -specific 9713 -mum 9708 -definitely 9702 -background 9700 -nurse 9699 -conversation 9697 -mostly 9691 -2000 9687 -m 9681 -enter 9669 -landing 9663 -numerous 9661 -filled 9657 -republic 9657 -plus 9656 -possibility 9656 -immediate 9651 -struggled 9648 -shirt 9642 -surprised 9641 -construction 9638 -edge 9636 -count 9629 -choose 9626 -testing 9622 -strength 9615 -ian 9615 -performed 9598 -candidates 9579 -dollars 9578 -shocking 9576 -chest 9567 -deeply 9564 -woods 9558 -happening 9553 -considering 9550 -desperate 9543 -fifa 9541 -developing 9538 -mom 9530 -cards 9523 -cast 9515 -focused 9514 -iraqi 9513 -ali 9512 -massachusetts 9510 -gathered 9506 -funds 9493 -delivered 9479 -conducted 9476 -dance 9470 -wood 9469 -effective 9465 -incidents 9464 -2016 9464 -beijing 9462 -zone 9460 -greatest 9459 -stock 9456 -inches 9452 -lots 9447 -moscow 9446 -bright 9445 -photograph 9445 -sought 9444 -journal 9443 -operating 9442 -sterling 9441 -acts 9439 -kent 9439 -saudi 9429 -hole 9426 -warm 9414 -fought 9405 -flag 9396 -degree 9396 -worldwide 9395 -henry 9388 -chances 9386 -smaller 9383 -supposed 9381 -stuck 9381 -no. 9379 -manhattan 9372 -agree 9370 -earned 9370 -relief 9370 -privacy 9362 -options 9351 -coverage 9349 -challenges 9343 -meat 9338 -willing 9336 -terrorism 9336 -yellow 9335 -stunning 9335 -messi 9333 -moon 9329 -teenage 9326 -nobody 9324 -nor 9323 -employee 9319 -publicly 9318 -blamed 9318 -wayne 9317 -milan 9311 -heading 9297 -vulnerable 9297 -direction 9292 -devastated 9287 -port 9287 -promised 9285 -marijuana 9283 -cells 9276 -noticed 9276 -write 9272 -perform 9270 -stress 9268 -native 9258 -stayed 9257 -medal 9257 -stuff 9239 -windows 9238 -buying 9236 -celebrates 9227 -36 9226 -confirm 9224 -detectives 9223 -34 9220 -presented 9220 -embassy 9213 -section 9203 -unless 9199 -possibly 9198 -rival 9197 -golden 9176 -swimming 9175 -personnel 9173 -helps 9172 -buried 9169 -driven 9165 -controversy 9155 -libya 9145 -minor 9139 -jet 9138 -trees 9135 -neither 9133 -metal 9132 -auction 9131 -increasing 9127 -thrown 9122 -humans 9121 -abandoned 9119 -seats 9118 -entertainment 9111 -largely 9104 -ticket 9097 -lane 9095 -harris 9093 -allen 9091 -clothing 9090 -brian 9089 -offences 9088 -duke 9088 -indeed 9073 -complained 9068 -vast 9063 -charlie 9058 -instagram 9055 -stabbed 9053 -possession 9041 -francisco 9041 -commissioner 9038 -divorce 9035 -fatal 9031 -landed 9023 -alexander 9021 -widely 8999 -islands 8998 -terrorists 8997 -dismissed 8996 -nfl 8990 -pupils 8990 -founder 8988 -grandmother 8979 -jim 8959 -profile 8954 -shoes 8954 -bob 8953 -behavior 8953 -marks 8949 -net 8936 -investigate 8934 -basis 8934 -roads 8930 -strategy 8918 -trained 8914 -agencies 8908 -boost 8906 -civilians 8905 -waters 8892 -matthew 8886 -raising 8880 -parking 8878 -hoped 8877 -client 8875 -factor 8870 -remaining 8866 -disappeared 8866 -friendly 8862 -bills 8860 -impressive 8858 -somebody 8853 -coffee 8850 -adds 8847 -supported 8829 -headed 8827 -guys 8822 -arab 8818 -patrick 8816 -wins 8815 -prisoners 8809 -nbc 8809 -2,000 8806 -goalkeeper 8805 -chain 8804 -tonight 8791 -imagine 8786 -capture 8784 -detective 8782 -plenty 8776 -racing 8773 -combat 8773 -offensive 8759 -ambassador 8757 -pet 8744 -neighborhood 8740 -seized 8738 -48 8735 -infection 8731 -pop 8730 -replaced 8728 -map 8726 -elderly 8716 -impossible 8708 -quiet 8707 -scenes 8705 -supply 8698 -ultimately 8698 -hull 8698 -graham 8693 -properly 8686 -targets 8681 -talent 8681 -drew 8680 -distance 8679 -voted 8679 -forest 8673 -cool 8672 -jason 8651 -heavily 8645 -supporting 8640 -classic 8639 -hidden 8628 -bags 8626 -mexican 8615 -doors 8615 -internal 8613 -gain 8606 -tested 8604 -wonderful 8604 -scale 8571 -rugby 8570 -backed 8562 -suddenly 8561 -grant 8554 -a.m. 8554 -yourself 8551 -click 8539 -digital 8535 -granted 8535 -exposed 8530 -remarks 8517 -tickets 8516 -terrible 8515 -chose 8514 -tower 8510 -express 8510 -insists 8509 -mouth 8502 -ancient 8496 -resident 8493 -fake 8489 -consumers 8478 -deliver 8472 -reveals 8467 -coroner 8463 -admits 8459 -awarded 8456 -kerry 8451 -blow 8448 -link 8448 -reputation 8447 -programs 8440 -walker 8437 -pennsylvania 8435 -collapsed 8430 -ward 8411 -elsewhere 8409 -truly 8408 -defending 8405 -asia 8402 -excited 8402 -couples 8401 -smart 8400 -horrific 8383 -sees 8376 -thinks 8374 -approved 8372 -arrive 8369 -s 8365 -bottle 8364 -watson 8363 -luis 8363 -thoughts 8363 -terry 8360 -courts 8354 -knowing 8348 -explosion 8338 -engine 8337 -strikes 8337 -zoo 8334 -assistance 8333 -locked 8331 -jonathan 8323 -criticised 8317 -leicester 8314 -iranian 8314 -division 8305 -mothers 8303 -bayern 8300 -threatening 8296 -welfare 8293 -talked 8291 -phil 8290 -vegas 8289 -reduced 8288 -easier 8286 -negative 8283 -arrival 8279 -suffer 8269 -listed 8266 -established 8263 -continuing 8258 -code 8253 -fitness 8252 -abu 8252 -affect 8249 -flew 8245 -francis 8244 -referred 8244 -incredibly 8244 -khan 8243 -firefighters 8238 -honor 8235 -nato 8235 -comfortable 8231 -rivals 8224 -notes 8224 -dutch 8222 -pink 8221 -interests 8220 -unknown 8219 -neighbours 8213 -christopher 8207 -conviction 8206 -survive 8205 -cctv 8199 -wenger 8198 -38 8198 -veterans 8196 -pointed 8193 -customer 8186 -muslims 8172 -wildlife 8171 -appropriate 8166 -notice 8166 -normally 8165 -cabinet 8163 -sets 8160 -slow 8158 -abroad 8144 -melbourne 8141 -analysis 8139 -winds 8138 -regional 8135 -ukip 8132 -escaped 8118 -feared 8118 -pregnancy 8109 -risks 8103 -argument 8102 -grounds 8101 -partners 8096 -plot 8095 -roof 8092 -balance 8090 -spirit 8088 -42 8088 -ashley 8088 -follows 8086 -sanctions 8085 -tied 8085 -dozen 8084 -flowers 8083 -task 8082 -rice 8078 -realised 8073 -followers 8067 -software 8063 -nose 8063 -grown 8059 -judges 8047 -statements 8045 -stepped 8041 -basic 8038 -scores 8036 -episode 8035 -elected 8029 -wave 8029 -cooper 8028 -rodgers 8026 -parent 8024 -starts 8024 -houston 8017 -commander 8016 -lunch 8015 -santa 8008 -cocaine 8006 -plea 8006 -atmosphere 8003 -el 8002 -hitting 7998 -document 7997 -las 7996 -hate 7991 -christie 7990 -joining 7987 -prompted 7983 -approached 7979 -powers 7978 -solution 7970 -smoking 7967 -defendant 7962 -drawn 7960 -posed 7959 -jennifer 7958 -immigrants 7956 -remote 7952 -37 7948 -lay 7947 -cleared 7945 -independence 7938 -sporting 7936 -innocent 7933 -appearances 7932 -totally 7932 -opinions 7928 -unclear 7925 -worry 7920 -knee 7913 -vital 7911 -waste 7904 -mars 7902 -cruise 7902 -dying 7900 -edward 7899 -crystal 7898 -usa 7897 -leads 7894 -defend 7890 -procedure 7889 -surrounded 7887 -joseph 7886 -drunk 7880 -searching 7863 -concluded 7860 -gulf 7854 -disorder 7851 -medicine 7848 -encourage 7845 -150 7841 -threw 7836 -tackle 7832 -preparing 7831 -require 7830 -f 7819 -jamie 7817 -tie 7813 -provides 7812 -proposal 7812 -senator 7811 -closely 7810 -michigan 7809 -5,000 7809 -jumped 7805 -gaza 7804 -attacking 7800 -broadcast 7799 -cricket 7797 -celebrities 7788 -detained 7783 -depression 7783 -chancellor 7780 -organisation 7775 -iconic 7772 -lies 7768 -lifestyle 7766 -robinson 7759 -calm 7756 -clinic 7748 -emma 7745 -solar 7742 -communications 7742 -pub 7741 -bird 7739 -slightly 7731 -compensation 7728 -permission 7725 -drama 7725 -alternative 7721 -sunderland 7720 -patrol 7717 -testified 7717 -fantastic 7717 -suspicion 7716 -moore 7711 -denies 7709 -danny 7708 -engaged 7705 -pushing 7704 -interior 7699 -aimed 7698 -ok 7694 -sleeping 7694 -sight 7693 -summit 7688 -theft 7686 -abused 7685 -dealing 7684 -understanding 7684 -moves 7683 -enjoying 7680 -forget 7679 -rural 7668 -extraordinary 7660 -replace 7659 -badly 7656 -canadian 7653 -arriving 7646 -gordon 7643 -43 7639 -e-mail 7638 -cutting 7636 -prepare 7622 -favorite 7622 -feed 7619 -naked 7619 -liberal 7617 -stomach 7616 -invited 7614 -rio 7613 -oscar 7609 -pose 7608 -1999 7606 -smile 7605 -replied 7603 -cia 7600 -rescued 7597 -sugar 7594 -scared 7593 -dispute 7591 -documentary 7590 -corruption 7590 -jessica 7589 -bike 7587 -minimum 7577 -greece 7573 -afford 7571 -b 7569 -orange 7566 -empty 7564 -investigated 7562 -essex 7555 -leeds 7553 -unfortunately 7553 -specialist 7552 -otherwise 7551 -birds 7538 -hughes 7537 -so-called 7532 -turns 7530 -racist 7524 -trapped 7518 -recalled 7511 -becomes 7503 -yard 7501 -knocked 7501 -swansea 7501 -conspiracy 7495 -pakistani 7488 -orders 7485 -thompson 7481 -sentencing 7479 -tweet 7479 -shares 7475 -overseas 7474 -involvement 7472 -gift 7470 -raid 7467 -catholic 7462 -c 7460 -nigeria 7458 -explains 7444 -behalf 7437 -hernandez 7432 -jump 7431 -weekly 7431 -dedicated 7430 -apparent 7424 -roberts 7423 -environmental 7414 -mr. 7412 -matters 7408 -brutal 7405 -referee 7402 -philip 7401 -facilities 7397 -kicked 7396 -euro 7395 -raped 7388 -drinks 7384 -palestinian 7382 -colour 7380 -milk 7379 -jewish 7375 -legend 7375 -presenter 7369 -walls 7368 -album 7365 -relatively 7359 -ad 7356 -rangers 7356 -guide 7355 -describes 7348 -campbell 7342 -hampshire 7340 -chosen 7336 -sisters 7335 -mansion 7334 -beer 7334 -votes 7328 -kick 7326 -rob 7320 -39 7315 -planes 7308 -trafford 7306 -46 7303 -compete 7301 -entirely 7300 -childhood 7299 -fewer 7299 -jimmy 7287 -begins 7287 -laid 7282 -ray 7281 -irish 7281 -solely 7281 -disappearance 7279 -hillary 7278 -meal 7277 -permanent 7275 -properties 7273 -bombing 7273 -robin 7265 -gov. 7260 -ideas 7257 -400 7250 -fame 7248 -waves 7246 -reporting 7245 -holland 7245 -lift 7245 -posting 7243 -perry 7242 -adopted 7237 -obtained 7228 -shops 7224 -rep. 7222 -characters 7210 -devastating 7206 -vision 7206 -firms 7187 -formed 7180 -territory 7179 -unveiled 7179 -trend 7176 -dry 7176 -achieve 7174 -arrests 7167 -widespread 7153 -brazilian 7151 -sharing 7150 -zimmerman 7149 -payments 7149 -projects 7148 -mile 7141 -laura 7141 -outbreak 7140 -bowl 7136 -fighter 7130 -commons 7127 -labor 7127 -disappointed 7126 -ties 7126 -metres 7126 -roy 7123 -aggressive 7122 -guards 7115 -highway 7110 -clark 7110 -connected 7108 -sandy 7108 -maintain 7106 -turkish 7104 -string 7099 -magistrates 7098 -contest 7097 -wright 7097 -testimony 7090 -dancing 7088 -taxes 7088 -promise 7082 -wounds 7081 -wimbledon 7081 -clegg 7081 -consequences 7080 -describe 7080 -maximum 7079 -justin 7076 -44 7075 -bathroom 7074 -mystery 7066 -teeth 7062 -interesting 7060 -75 7049 -writes 7049 -writer 7049 -accepting 7048 -apology 7041 -joy 7031 -missouri 7030 -dubbed 7029 -1998 7029 -laden 7026 -crucial 7023 -overnight 7020 -informed 7019 -dan 7019 -therefore 7018 -commitment 7018 -attempting 7012 -represent 7007 -oh 7005 -bristol 7005 -demanded 6999 -blast 6981 -speaker 6980 -41 6976 -anywhere 6963 -era 6958 -apply 6953 -opportunities 6946 -variety 6945 -defended 6941 -oklahoma 6936 -afraid 6932 -neil 6931 -proper 6930 -stick 6928 -di 6924 -demanding 6921 -congressional 6920 -structure 6920 -robbery 6913 -gym 6911 -stated 6908 -teenagers 6901 -asian 6896 -joke 6893 -islam 6892 -atlantic 6892 -kid 6890 -shift 6889 -awareness 6889 -headquarters 6885 -roll 6885 -attending 6881 -officially 6879 -quit 6877 -travelled 6877 -extended 6876 -65 6873 -cardiff 6862 -ukrainian 6858 -chair 6856 -intense 6855 -capable 6853 -mohammed 6851 -exclusive 6849 -offence 6849 -links 6849 -demands 6845 -athletes 6845 -crazy 6845 -promote 6835 -sean 6832 -anna 6832 -gerrard 6827 -delighted 6818 -investigations 6815 -sector 6814 -loves 6807 -typically 6804 -aim 6799 -studio 6798 -911 6791 -affiliate 6791 -dallas 6789 -formula 6786 -shadow 6782 -fee 6781 -sharp 6776 -priority 6776 -factory 6774 -edwards 6774 -alert 6773 -breakfast 6773 -campus 6772 -grey 6772 -jeremy 6772 -blair 6771 -routine 6770 -bull 6768 -founded 6768 -battery 6765 -punishment 6763 -shoulder 6758 -fees 6757 -55 6755 -rush 6752 -pleased 6752 -unlike 6745 -yards 6744 -gap 6743 -mine 6741 -occasion 6740 -recording 6740 -marked 6739 -opponents 6738 -bone 6738 -teaching 6736 -sixth 6731 -command 6730 -reaching 6728 -chocolate 6728 -fort 6728 -performing 6726 -munich 6725 -luke 6722 -bruce 6716 -hits 6715 -temperature 6715 -referring 6712 -upper 6710 -restaurants 6710 -egyptian 6705 -branch 6702 -rocket 6701 -1-0 6698 -generally 6698 -governments 6698 -2-1 6690 -wonder 6688 -osborne 6686 -taught 6682 -crying 6680 -movies 6680 -posts 6678 -amanda 6675 -hired 6674 -comedy 6673 -lisa 6673 -killings 6672 -detail 6670 -platform 6664 -interviewed 6664 -3,000 6663 -47 6663 -mitchell 6662 -probe 6662 -marathon 6659 -jurors 6658 -ending 6654 -clients 6653 -commentary 6652 -contain 6647 -puts 6647 -contained 6646 -temporary 6646 -fruit 6640 -locals 6640 -burning 6638 -davies 6637 -checks 6634 -plants 6632 -stewart 6632 -foster 6627 -abbott 6627 -basketball 6625 -inspector 6624 -falls 6623 -brief 6620 -wing 6618 -philadelphia 6611 -locations 6607 -deals 6606 -sweet 6600 -bizarre 6594 -regarding 6590 -featuring 6590 -volunteers 6590 -tourist 6590 -vessel 6587 -tall 6576 -collected 6575 -satellite 6574 -20,000 6574 -handle 6574 -celebration 6574 -rodriguez 6570 -mario 6569 -papers 6567 -drone 6561 -poverty 6560 -jane 6560 -carter 6554 -theory 6554 -winners 6554 -goods 6545 -pacific 6542 -cultural 6542 -rally 6541 -throw 6540 -burns 6540 -ipad 6538 -same-sex 6537 -packed 6537 -brilliant 6535 -combined 6533 -gained 6531 -improved 6530 -consumer 6529 -hanging 6528 -mount 6526 -tries 6525 -grave 6521 -revolution 6520 -advertising 6519 -celebrated 6519 -derby 6515 -russell 6514 -anybody 6514 -applied 6511 -sits 6509 -closing 6509 -stoke 6506 -celtic 6505 -controlled 6504 -crews 6504 -trophy 6504 -transportation 6502 -markets 6492 -neighbors 6491 -voting 6490 -neighbour 6486 -taste 6483 -horror 6481 -roger 6477 -illinois 6472 -afterwards 6470 -riding 6470 -craig 6469 -taxpayers 6469 -spokesperson 6466 -familiar 6464 -acknowledged 6460 -listen 6456 -exciting 6455 -effectively 6454 -blind 6453 -advance 6451 -commit 6451 -funny 6447 -aboard 6441 -guest 6439 -hiding 6436 -delivery 6435 -lie 6427 -connecticut 6419 -desire 6416 -civilian 6412 -package 6411 -hills 6410 -capacity 6410 -ends 6409 -brooklyn 6409 -strange 6407 -sounds 6405 -traveling 6404 -un 6403 -cats 6397 -burned 6395 -supplies 6394 -belgium 6393 -tens 6385 -producer 6381 -2-0 6380 -aside 6376 -1997 6375 -application 6374 -speculation 6372 -• 6370 -chicken 6367 -criminals 6363 -rolling 6362 -bath 6360 -mccain 6359 -diamond 6355 -democracy 6354 -poses 6353 -bell 6353 -crowds 6347 -suspicious 6340 -registered 6340 -artists 6339 -screaming 6339 -allies 6338 -kingdom 6336 -tech 6335 -globe 6335 -cable 6335 -gunman 6333 -barely 6332 -portugal 6329 -martinez 6328 -flooding 6326 -oxford 6324 -convinced 6321 -monitor 6321 -settlement 6320 -pace 6318 -x 6316 -representatives 6314 -celebrating 6314 -bench 6314 -recover 6311 -condemned 6311 -breathing 6309 -gates 6308 -booked 6306 -bosses 6306 -meals 6304 -requires 6302 -honour 6296 -stronger 6295 -hodgson 6295 -experiences 6294 -memories 6294 -lawmakers 6294 -classes 6293 -electric 6293 -occasions 6292 -types 6292 -resolution 6292 -balotelli 6291 -visits 6288 -creative 6285 -appearing 6285 -praised 6278 -earn 6278 -chase 6273 -hotels 6272 -positions 6268 -delay 6265 -alabama 6264 -attracted 6263 -bombs 6262 -youngest 6258 -hopefully 6257 -approval 6256 -shark 6254 -wealth 6252 -balls 6251 -dave 6250 -accusations 6250 -inappropriate 6245 -adams 6244 -relationships 6243 -usual 6243 -50,000 6242 -warrant 6242 -taxi 6241 -inspiration 6241 -filming 6238 -degrees 6232 -painting 6232 -encouraged 6230 -facts 6229 -diplomatic 6227 -westminster 6226 -outrage 6217 -detailed 6212 -emails 6210 -qpr 6208 -meetings 6207 -rail 6207 -firing 6206 -wealthy 6203 -apps 6200 -anonymous 6200 -values 6197 -angel 6195 -slowly 6194 -acted 6192 -switzerland 6191 -infected 6189 -existing 6188 -eve 6187 -brave 6184 -52 6182 -foods 6181 -seattle 6175 -democrat 6173 -aviation 6168 -utah 6167 -represents 6167 -marketing 6166 -amazon 6160 -castle 6158 -remarkable 6158 -teens 6155 -ordeal 6151 -hide 6148 -glasgow 6147 -attorneys 6146 -tape 6146 -representative 6143 -toll 6141 -ross 6136 -rebel 6136 -howard 6135 -titles 6135 -tensions 6135 -organizations 6133 -appeals 6130 -baseball 6129 -aston 6128 -comfort 6127 -minority 6124 -crossing 6121 -snowden 6119 -downing 6117 -duncan 6115 -susan 6111 -*** 6108 -deny 6102 -attached 6099 -hart 6098 -chef 6095 -cap 6093 -successfully 6089 -heritage 6086 -cbs 6086 -stuart 6076 -religion 6076 -worn 6074 -ages 6074 -negotiations 6071 -marry 6071 -suggesting 6070 -interviews 6070 -amy 6066 -horses 6065 -thailand 6063 -cited 6058 -cornwall 6054 -mixed 6054 -contains 6052 -grandfather 6048 -cream 6047 -entering 6045 -tiger 6045 -forever 6044 -walks 6038 -grabbed 6035 -syndrome 6033 -cuba 6031 -shelter 6031 -neighbor 6031 -debris 6030 -resulted 6029 -oldest 6027 -desert 6023 -execution 6020 -boxing 6018 -reforms 6014 -gender 6013 -colleague 6010 -assaulted 6009 -technical 6006 -racial 6006 -conservatives 6005 -blocked 6005 -searched 5998 -hurricane 5996 -cope 5996 -aaron 5995 -repeated 5994 -personally 5994 -obvious 5990 -katie 5985 -referendum 5984 -stable 5984 -formal 5983 -tradition 5982 -homeless 5982 -salt 5979 -speaks 5975 -purpose 5973 -flood 5971 -cole 5971 -predicted 5970 -nurses 5966 -stations 5964 -citizen 5964 -medication 5964 -collapse 5964 -tend 5964 -detention 5963 -49 5961 -wars 5960 -humanitarian 5959 -estimates 5956 -stole 5954 -electricity 5952 -pilots 5946 -mountains 5944 -furious 5939 -sheffield 5938 -advised 5937 -finds 5937 -therapy 5937 -keeps 5931 -peaceful 5931 -uncle 5927 -ships 5927 -iowa 5921 -mcdonald 5920 -materials 5913 -procedures 5913 -opposed 5912 -activist 5910 -tweets 5910 -dubai 5906 -household 5905 -fortune 5904 -frozen 5904 -vowed 5904 -monitoring 5903 -mention 5903 -networks 5902 -edinburgh 5901 -trains 5898 -describing 5898 -flames 5897 -employment 5889 -containing 5889 -brings 5886 -disabled 5886 -1980s 5886 -gear 5884 -throwing 5884 -grace 5883 -migrants 5881 -answers 5880 -enormous 5878 -advanced 5877 -honest 5875 -checked 5875 -harder 5874 -carbon 5867 -petition 5866 -appointed 5866 -peak 5865 -outcome 5864 -tracks 5862 -master 5861 -impressed 5860 -twins 5858 -samsung 5856 -blaze 5855 -striking 5855 -homeland 5855 -roughly 5854 -songs 5851 -expecting 5846 -importance 5844 -wound 5843 -significantly 5836 -covering 5832 -fishing 5829 -statistics 5824 -offices 5823 -kenya 5823 -stages 5819 -indicated 5816 -atletico 5816 -specifically 5815 -sustained 5814 -protected 5813 -entitled 5812 -requests 5809 -trips 5808 -toilet 5807 -visible 5807 -hunting 5801 -discussion 5799 -polls 5798 -faster 5796 -survivors 5795 -hell 5790 -analyst 5786 -holder 5784 -height 5782 -collins 5779 -passion 5779 -everywhere 5779 -strongly 5777 -constitution 5776 -units 5775 -1970s 5775 -owns 5773 -drawing 5772 -managing 5771 -regulations 5766 -1996 5759 -mirror 5757 -hosts 5756 -waited 5754 -opposite 5753 -beckham 5749 -junior 5748 -purchase 5747 -v 5745 -bears 5741 -proceedings 5741 -constant 5740 -underground 5737 -soccer 5736 -personality 5732 -accompanied 5732 -fix 5731 -dean 5730 -exhibition 5729 -contrast 5727 -bones 5727 -analysts 5727 -nelson 5724 -witnessed 5723 -manage 5721 -revenue 5715 -collect 5715 -admit 5714 -computers 5712 -jr. 5710 -argue 5710 -extensive 5707 -core 5704 -discussed 5704 -margaret 5700 -jay 5695 -barbara 5695 -retirement 5693 -sanchez 5692 -arabia 5689 -races 5689 -factors 5688 -pride 5683 -recovering 5682 -armstrong 5681 -urban 5678 -length 5677 -1994 5671 -adviser 5667 -managers 5665 -handling 5665 -studying 5664 -error 5663 -resigned 5662 -twin 5656 -lifted 5656 -tight 5654 -transferred 5649 -castro 5647 -warren 5644 -painful 5643 -warnings 5641 -garcia 5640 -lessons 5639 -torture 5637 -competitive 5637 -bureau 5636 -objects 5634 -pulling 5631 -correct 5631 -hearts 5629 -breach 5629 -begun 5628 -gp 5625 -tank 5624 -representing 5623 -reid 5622 -centers 5616 -wheel 5613 -motion 5613 -holidays 5611 -smashed 5610 -mandela 5609 -microsoft 5609 -supermarket 5607 -finance 5607 -trail 5606 -citing 5604 -constantly 5603 -swiss 5602 -arena 5599 -sensitive 5598 -spurs 5596 -helen 5594 -button 5593 -strip 5592 -exit 5586 -maryland 5584 -fill 5574 -stealing 5572 -saving 5568 -represented 5567 -anne 5565 -iron 5564 -garage 5563 -51 5562 -greek 5560 -mix 5559 -demonstrators 5559 -high-profile 5553 -manner 5553 -eggs 5552 -touched 5549 -tip 5548 -amounts 5548 -phillips 5543 -register 5542 -typical 5540 -selection 5538 -library 5537 -communication 5537 -electronic 5535 -silence 5535 -pack 5532 -charlotte 5530 -donations 5526 -delayed 5522 -entry 5521 -gallery 5520 -approximately 5520 -missile 5517 -lib 5516 -1990s 5515 -businessman 5513 -arts 5512 -pages 5512 -restrictions 5512 -inmates 5501 -elite 5501 -pentagon 5492 -intent 5491 -lovely 5486 -towns 5484 -soft 5481 -malaysia 5481 -switch 5480 -branded 5476 -scientific 5474 -rachel 5471 -1.5 5468 -forms 5468 -galaxy 5468 -encounter 5468 -popularity 5467 -disney 5465 -traveled 5464 -clarke 5463 -investors 5461 -achieved 5461 -equivalent 5460 -actual 5456 -relative 5454 -54 5452 -fined 5452 -prospect 5451 -poland 5448 -odds 5447 -except 5446 -rounds 5446 -prevention 5445 -yemen 5445 -fed 5444 -extent 5440 -hamas 5439 -targeting 5437 -unemployment 5437 -legacy 5432 -seasons 5431 -footballer 5431 -clashes 5430 -19-year-old 5429 -strict 5428 -posing 5428 -absence 5428 -headlines 5427 -belief 5426 -trading 5425 -backing 5423 -smartphone 5422 -stroke 5420 -uniform 5420 -liked 5418 -physically 5417 -industrial 5416 -flown 5412 -concept 5407 -shoppers 5407 -hat 5405 -mall 5402 -bailey 5399 -juan 5399 -survival 5398 -midlands 5397 -establish 5396 -greg 5395 -beneath 5391 -militant 5390 -grateful 5387 -keith 5387 -ourselves 5386 -detroit 5386 -chaos 5383 -biden 5379 -boots 5378 -whilst 5376 -nationwide 5376 -mainly 5372 -team-mates 5371 -noise 5370 -consultant 5369 -websites 5364 -frequently 5363 -1995 5363 -surrey 5360 -probation 5359 -roman 5358 -rocks 5357 -spectacular 5355 -bottles 5355 -enemy 5350 -discrimination 5350 -attitude 5349 -avenue 5346 -somewhere 5344 -tourism 5341 -concert 5337 -refugees 5336 -rarely 5332 -earthquake 5330 -engineer 5329 -hawaii 5325 -forcing 5325 -safely 5320 -kidnapping 5320 -al-assad 5319 -britons 5316 -stunned 5312 -equal 5309 -dates 5307 -berlin 5304 -graduate 5300 -reflect 5299 -paint 5299 -alarm 5299 -welcomed 5292 -metropolitan 5291 -directed 5289 -addressed 5286 -paramedics 5285 -throat 5285 -salary 5284 -destination 5284 -dreams 5282 -reference 5281 -designs 5278 -picking 5276 -participants 5275 -feelings 5273 -mississippi 5272 -outfit 5271 -clip 5271 -louisiana 5268 -collision 5266 -fitted 5266 -viewed 5265 -jesus 5262 -photographed 5262 -sweden 5257 -fail 5253 -burst 5253 -telephone 5251 -lawrence 5251 -federer 5250 -leaked 5245 -53 5244 -loving 5243 -aftermath 5241 -spell 5241 -excellent 5236 -novel 5235 -emily 5234 -colombia 5232 -nervous 5231 -kansas 5231 -stretch 5231 -austin 5225 -itv 5224 -cousin 5216 -radical 5215 -roma 5212 -fields 5211 -mercedes 5209 -criticized 5207 -treasury 5206 -le 5205 -seal 5204 -cheap 5203 -flu 5200 -nigel 5199 -bullet 5195 -treating 5192 -zero 5192 -sudden 5185 -baghdad 5184 -offenders 5182 -laugh 5181 -partnership 5180 -controls 5179 -mistakes 5176 -indonesia 5176 -knight 5176 -recommended 5175 -minnesota 5174 -object 5171 -crossed 5168 -returns 5168 -lifetime 5167 -essential 5165 -devon 5164 -toys 5163 -battling 5162 -alaska 5158 -ethnic 5157 -corporation 5157 -infrastructure 5156 -abortion 5151 -combination 5151 -reads 5150 -roles 5150 -realized 5146 -parade 5144 -darren 5142 -parked 5142 -deemed 5141 -breaks 5139 -viral 5136 -youngsters 5135 -sacked 5133 -qatar 5131 -brendan 5130 -islamist 5129 -max 5128 -pistorius 5128 -shore 5127 -gate 5124 -decline 5122 -deployed 5122 -somalia 5120 -entrance 5116 -dressing 5114 -prix 5114 -initiative 5112 -250 5111 -newly 5111 -tools 5107 -murphy 5106 -loud 5103 -residence 5102 -challenging 5098 -oliver 5097 -savile 5096 -appointment 5096 -tired 5088 -practices 5088 -nba 5088 -regions 5085 -singing 5085 -tennessee 5084 -performances 5083 -skull 5083 -baker 5082 -ordinary 5081 -jeff 5077 -studied 5075 -executed 5074 -principal 5070 -license 5069 -prominent 5067 -perfectly 5067 -josh 5064 -illegally 5064 -grade 5064 -oregon 5063 -guidelines 5059 -questioning 5058 -politician 5058 -nights 5057 -encouraging 5057 -airports 5053 -burnley 5053 -defensive 5045 -category 5043 -jacket 5041 -protecting 5040 -departure 5040 -dawn 5039 -exact 5038 -mcilroy 5037 -diabetes 5037 -favour 5036 -17-year-old 5036 -30,000 5035 -circuit 5032 -admitting 5031 -sony 5030 -manslaughter 5026 -luck 5021 -operate 5021 -destruction 5019 -assets 5019 -retail 5015 -freed 5015 -rome 5014 -pending 5013 -ignored 5013 -600 5012 -hosted 5012 -payment 5011 -shame 5009 -tokyo 5009 -gather 5007 -frame 5006 -choices 5005 -youngster 5004 -donated 4999 -25-year-old 4999 -sierra 4999 -toy 4999 -sand 4998 -belt 4996 -moyes 4996 -ann 4993 -murders 4990 -remembered 4990 -del 4988 -passport 4987 -forensic 4986 -parks 4983 -involves 4981 -intervention 4980 -manuel 4980 -cry 4978 -gardens 4978 -bishop 4977 -separated 4977 -broad 4970 -pronounced 4970 -settled 4969 -weak 4965 -licence 4965 -treatments 4965 -increases 4965 -bloody 4963 -deserve 4962 -thick 4961 -pole 4961 -carefully 4955 -barclays 4954 -sentences 4954 -borders 4954 -barry 4954 -intention 4954 -lions 4951 -hundred 4949 -outstanding 4948 -diana 4948 -upcoming 4947 -tool 4947 -thatcher 4945 -mentioned 4943 -warn 4942 -harassment 4941 -transplant 4940 -comedian 4940 -shed 4938 -finger 4937 -fault 4936 -56 4935 -graphic 4934 -cannabis 4933 -aims 4932 -convention 4932 -virgin 4930 -obesity 4927 -crack 4927 -welsh 4925 -bullying 4922 -reception 4919 -painted 4918 -kyle 4917 -autumn 4916 -quoted 4912 -antonio 4912 -rebecca 4907 -realise 4905 -flow 4905 -compound 4900 -dragged 4899 -guardian 4895 -proposals 4894 -ultimate 4893 -sussex 4891 -selected 4889 -soil 4889 -corporate 4889 -holmes 4888 -toddler 4878 -midnight 4875 -carries 4865 -venue 4863 -giants 4863 -engineering 4863 -exist 4861 -lowest 4861 -literally 4859 -guess 4858 -sportsmail 4858 -wrapped 4858 -organised 4857 -regardless 4856 -23-year-old 4856 -exposure 4855 -bradley 4853 -notorious 4853 -troubled 4852 -douglas 4851 -hannah 4851 -asylum 4851 -proof 4850 -1993 4849 -purchased 4847 -hunter 4847 -tips 4847 -powell 4842 -instance 4842 -jon 4841 -200,000 4841 -netherlands 4839 -android 4831 -libyan 4831 -farmers 4830 -fires 4829 -unacceptable 4828 -opponent 4826 -cloud 4825 -championships 4823 -tories 4819 -felony 4817 -rover 4817 -announce 4817 -julie 4814 -theme 4814 -rick 4812 -tesco 4811 -isolated 4810 -machines 4810 -1992 4810 -motor 4807 -beloved 4806 -/ 4805 -surgeon 4804 -boeing 4803 -commonwealth 4797 -gathering 4797 -asks 4796 -cheese 4792 -brands 4792 -engagement 4792 -smiling 4785 -shaw 4782 -nancy 4781 -22-year-old 4780 -extend 4775 -basically 4772 -gifts 4770 -reserve 4768 -pursue 4768 -spencer 4767 -louise 4766 -team-mate 4766 -sue 4764 -delays 4764 -copy 4762 -agenda 4762 -indiana 4761 -protective 4760 -assad 4759 -profits 4757 -prayers 4752 -replacement 4751 -porn 4750 -lucy 4749 -denver 4749 -muscle 4749 -djokovic 4745 -campaigns 4743 -cleveland 4741 -ahmed 4741 -make-up 4733 -engineers 4732 -clinical 4724 -magic 4724 -concrete 4721 -legally 4720 -actors 4718 -neymar 4717 -requested 4714 -winger 4714 -simpson 4711 -suburb 4711 -theatre 4706 -lauren 4704 -slam 4704 -underwent 4703 -revealing 4703 -pc 4701 -smiles 4700 -hiv 4700 -parker 4693 -hollande 4693 -500,000 4690 -letting 4686 -diagnosis 4685 -wanting 4685 -overcome 4683 -frustrated 4683 -counter 4683 -assessment 4682 -imposed 4681 -slammed 4676 -cristiano 4676 -heroes 4675 -seventh 4675 -cycle 4674 -turner 4673 -* 4673 -21-year-old 4670 -confessed 4670 -kentucky 4666 -screening 4666 -9/11 4664 -schedule 4664 -heroin 4662 -savings 4661 -trafficking 4660 -wet 4658 -16-year-old 4656 -genetic 4655 -sessions 4655 -18-year-old 4653 -damages 4653 -1990 4653 -occur 4652 -ease 4651 -addiction 4650 -24-year-old 4646 -4,000 4646 -elements 4646 -donald 4645 -wembley 4645 -boats 4643 -kidnapped 4642 -emirates 4641 -cape 4640 -trials 4639 -bleeding 4636 -maintained 4635 -secured 4631 -anymore 4628 -telegraph 4626 -weighed 4626 -alliance 4621 -everyday 4620 -cliff 4620 -substance 4618 -affects 4617 -climb 4614 -boxes 4613 -catherine 4612 -vatican 4612 -nazi 4612 -swim 4612 -assist 4611 -cake 4611 -57 4611 -burn 4610 -monaco 4609 -massacre 4609 -passes 4608 -finishing 4604 -valuable 4602 -historical 4601 -ted 4601 -20-year-old 4599 -athlete 4599 -kiss 4599 -bitter 4599 -empire 4598 -plate 4596 -chiefs 4596 -volunteer 4596 -fence 4595 -gadhafi 4594 -signal 4593 -siblings 4591 -teach 4591 -label 4585 -boasts 4585 -unconscious 4585 -mood 4585 -defendants 4585 -invasion 4584 -promises 4584 -sergio 4582 -lung 4582 -terrified 4574 -stressed 4573 -homicide 4572 -musical 4571 -downtown 4570 -terminal 4570 -hacking 4568 -vietnam 4568 -steel 4567 -resulting 4567 -seed 4566 -apologised 4566 -pledged 4565 -explosive 4564 -knox 4564 -caring 4564 -inter 4557 -careful 4556 -refusing 4555 -gray 4554 -recall 4552 -calories 4550 -ear 4548 -cdc 4544 -singapore 4543 -coat 4539 -raf 4539 -midfield 4536 -courtroom 4536 -blocks 4535 -cooking 4533 -lancashire 4532 -trauma 4531 -landscape 4529 -diseases 4529 -1960s 4525 -qc 4523 -suspension 4523 -campaigners 4520 -unprecedented 4520 -gps 4517 -competing 4517 -anfield 4516 -mad 4515 -mosque 4515 -mph 4515 -explosives 4514 -horrible 4512 -reward 4511 -color 4510 -lincoln 4509 -ferrari 4508 -samantha 4503 -suffers 4502 -spy 4502 -ski 4502 -boehner 4500 -dresses 4500 -tsarnaev 4500 -owen 4497 -discover 4497 -claire 4497 -sophie 4495 -rifle 4494 -aggravated 4492 -storms 4491 -angela 4489 -queensland 4489 -limits 4487 -swept 4486 -brady 4485 -differences 4485 -caroline 4484 -carlos 4483 -kurdish 4482 -jumping 4481 -regret 4481 -wider 4481 -improving 4479 -parliamentary 4478 -wooden 4474 -urging 4473 -solid 4468 -dealt 4467 -tablet 4467 -widow 4466 -nightmare 4465 -fate 4462 -convictions 4462 -feeding 4459 -wage 4458 -persie 4458 -lover 4457 -confronted 4455 -keeper 4452 -mitt 4452 -employed 4450 -parole 4449 -bacteria 4441 -lambert 4441 -methods 4440 -method 4438 -d 4436 -vladimir 4434 -talented 4434 -discussions 4432 -evil 4431 -realize 4431 -covers 4429 -bale 4428 -conversations 4428 -fernando 4427 -responding 4425 -tear 4423 -runway 4422 -listening 4421 -sudan 4419 -romantic 4419 -camps 4417 -courage 4416 -consistent 4412 -samples 4410 -kit 4409 -evacuated 4409 -grass 4409 -chile 4409 -celebrations 4407 -accidentally 4407 -favor 4407 -theater 4404 -technique 4403 -select 4402 -bound 4401 -carl 4399 -tactics 4399 -creation 4398 -nicole 4397 -maps 4397 -triggered 4397 -dumped 4395 -26-year-old 4395 -brooks 4395 -starring 4394 -recognition 4391 -brighton 4391 -difficulties 4390 -arguing 4388 -promising 4388 -launching 4388 -holes 4387 -larry 4386 -15-year-old 4381 -generations 4379 -sergeant 4378 -affordable 4378 -institutions 4377 -handful 4375 -1989 4373 -obamacare 4373 -arrives 4372 -severely 4371 -nursing 4369 -pizza 4369 -wisconsin 4368 -caribbean 4365 -somehow 4365 -scientist 4363 -laughing 4362 -tribunal 4362 -cafe 4361 -58 4360 -elementary 4360 -pets 4356 -stones 4356 -thin 4353 -genuine 4352 -hostage 4351 -cabin 4351 -executives 4347 -duo 4345 -brussels 4344 -highlights 4343 -ranks 4342 -relaxed 4341 -panic 4341 -smell 4340 -bite 4339 -bobby 4338 -attract 4336 -stopping 4336 -clock 4336 -somerset 4334 -constitutional 4334 -prisoner 4333 -nevada 4332 -currency 4332 -profit 4331 -3d 4331 -amendment 4330 -transition 4326 -lionel 4323 -undergo 4323 -kilometers 4322 -producing 4322 -orleans 4320 -facial 4320 -engage 4319 -reasonable 4318 -27-year-old 4317 -expenses 4317 -demonstrations 4316 -ronald 4316 -soviet 4314 -uncovered 4314 -liver 4314 -bolton 4313 -intensive 4312 -hardly 4308 -challenged 4306 -resignation 4305 -stops 4304 -rent 4303 -norway 4302 -phoenix 4302 -punched 4301 -buyers 4301 -1991 4301 -egg 4296 -movements 4294 -reserved 4294 -medals 4292 -trainer 4291 -philippines 4288 -lasted 4287 -haiti 4287 -extremists 4285 -cancelled 4284 -sri 4283 -storage 4281 -racism 4280 -seemingly 4279 -repair 4279 -murdering 4278 -deficit 4278 -rear 4274 -portrait 4270 -shootings 4269 -oxygen 4265 -anxiety 4263 -masters 4263 -rises 4261 -dust 4261 -woke 4256 -colours 4254 -naturally 4252 -applications 4247 -rapidly 4247 -highlight 4245 -jets 4245 -64 4244 -6.5 4241 -vincent 4239 -dirty 4238 -conclusion 4237 -breath 4236 -62 4233 -damaging 4233 -spots 4232 -cleaning 4232 -ron 4224 -asleep 4224 -gareth 4221 -universe 4221 -shouting 4219 -prevented 4218 -unidentified 4215 -watches 4213 -terrifying 4212 -tube 4212 -dropping 4211 -awful 4211 -finals 4210 -repeat 4206 -firearms 4204 -southwest 4201 -equally 4200 -hitler 4200 -champagne 4191 -chat 4189 -function 4189 -nadal 4188 -bombings 4187 -fingers 4185 -federation 4185 -vacation 4184 -riot 4182 -farage 4181 -grief 4181 -uruguay 4181 -disturbing 4180 -holy 4180 -rolled 4179 -scan 4178 -lab 4178 -edition 4177 -radar 4177 -complicated 4176 -glad 4175 -pointing 4173 -lengthy 4170 -uefa 4170 -ferdinand 4168 -joked 4167 -pocket 4166 -lopez 4165 -banking 4164 -exploded 4164 -circle 4163 -gross 4162 -briefly 4161 -85 4160 -temple 4159 -+ 4158 -pension 4158 -rough 4157 -cosby 4156 -peninsula 4156 -palin 4155 -explaining 4154 -loose 4154 -assembly 4150 -substantial 4150 -ideal 4149 -expand 4146 -surprising 4143 -morris 4142 -grab 4142 -underwater 4141 -prefer 4140 -examined 4139 -impression 4139 -stunt 4136 -arsene 4135 -penalties 4133 -ladies 4133 -explanation 4132 -benjamin 4132 -indicate 4131 -techniques 4131 -organized 4131 -brom 4131 -cairo 4131 -expectations 4130 -jean 4130 -alexis 4129 -cruz 4128 -meters 4125 -amnesty 4125 -railway 4122 -cemetery 4121 -wishes 4117 -autopsy 4114 -settle 4114 -experiment 4110 -rivers 4109 -shower 4108 -forgotten 4107 -queens 4106 -supports 4106 -59 4105 -snapped 4103 -desperately 4103 -stevens 4103 -northeast 4101 -tablets 4100 -na 4099 -nsa 4095 -destroy 4094 -hopeful 4094 -wheelchair 4093 -recession 4092 -mohamed 4091 -duties 4091 -advert 4090 -deadline 4089 -judgment 4086 -explore 4086 -glasses 4086 -tissue 4085 -misconduct 4083 -promoting 4081 -print 4080 -hook 4079 -67 4078 -ads 4078 -installed 4077 -operated 4073 -cheaper 4072 -erupted 4070 -stupid 4068 -heathrow 4068 -sadly 4068 -openly 4066 -dramatically 4066 -tunnel 4064 -disappointing 4064 -ft 4063 -columbia 4062 -surge 4062 -mentally 4061 -w. 4061 -airways 4061 -burglary 4060 -800 4060 -displayed 4059 -emerging 4058 -strain 4057 -63 4056 -substitute 4056 -wreckage 4054 -cycling 4054 -lebanon 4051 -employers 4050 -losses 4048 -liquid 4047 -percentage 4047 -rid 4046 -situations 4045 -coastal 4044 -deliberately 4042 -answered 4041 -climbing 4038 -72 4038 -billy 4037 -readers 4036 -gotten 4035 -divorced 4032 -radiation 4032 -operator 4032 -symbol 4029 -newspapers 4028 -nottingham 4026 -shell 4023 -carrier 4022 -liberty 4021 -jews 4021 -statue 4020 -pot 4020 -expects 4018 -middleton 4018 -fleet 4017 -ridiculous 4016 -flags 4016 -vaccine 4014 -wages 4010 -rating 4006 -pardew 4004 -bomber 4004 -spotlight 4003 -arguments 4002 -polish 4001 -brisbane 4000 -opens 4000 -ratings 3999 -3-0 3998 -continent 3998 -presidency 3998 -pattern 3993 -estimate 3991 -virtually 3990 -violation 3988 -revenge 3985 -mini 3985 -presents 3984 -nowhere 3984 -20th 3983 -fixed 3981 -silva 3980 -dominated 3979 -preliminary 3978 -bride 3978 -nsw 3977 -virtual 3976 -awaiting 3976 -lloyd 3976 -crackdown 3974 -webb 3973 -bread 3973 -staggering 3972 -lethal 3971 -comparison 3970 -acres 3970 -ankle 3969 -unions 3968 -dining 3964 -necessarily 3964 -state-run 3963 -resolve 3962 -alerted 3962 -steal 3961 -hang 3958 -juventus 3957 -aired 3957 -66 3955 -abbey 3953 -praise 3950 -signature 3950 -naval 3945 -publication 3945 -attacker 3944 -fairly 3943 -triumph 3940 -communist 3940 -indictment 3940 -unfair 3936 -r 3935 -divided 3932 -karen 3930 -files 3929 -earning 3928 -motivated 3928 -bronze 3927 -motorists 3925 -lion 3924 -distress 3923 -40,000 3923 -flooded 3920 -1,500 3919 -warns 3918 -institution 3917 -tracking 3916 -recognize 3913 -forecast 3910 -lets 3910 -watchdog 3910 -frustration 3909 -anyway 3908 -liga 3908 -closest 3907 -charities 3907 -250,000 3907 -vanished 3906 -billionaire 3905 -trio 3905 -restore 3903 -funded 3902 -cops 3902 -chamber 3901 -touching 3901 -chambers 3900 -roberto 3900 -nightclub 3899 -perez 3899 -rosberg 3898 -revelations 3898 -perspective 3898 -heels 3897 -dortmund 3897 -et 3897 -blues 3896 -dangers 3894 -festive 3894 -famously 3892 -searches 3891 -healthcare 3891 -keys 3890 -kidney 3888 -longtime 3887 -closure 3885 -ranked 3884 -donor 3884 -apologise 3883 -athletic 3883 -prompting 3881 -travelers 3879 -jerry 3879 -defeated 3878 -overwhelming 3877 -14-year-old 3876 -vs 3875 -2.5 3874 -recognised 3873 -harrison 3872 -reducing 3871 -slipped 3870 -accusing 3869 -swift 3866 -psychological 3865 -conservation 3864 -68 3863 -selfie 3862 -falcao 3862 -don 3861 -leon 3860 -lists 3860 -miracle 3859 -electrical 3859 -producers 3859 -spirits 3854 -freezing 3853 -full-time 3853 -sunshine 3852 -qualifying 3851 -29-year-old 3850 -coaches 3848 -cure 3848 -bullets 3847 -orlando 3846 -arthur 3843 -eligible 3843 -correspondent 3842 -snap 3842 -pellegrini 3840 -120 3839 -furniture 3839 -recognise 3837 -biological 3836 -valencia 3835 -missiles 3835 -drones 3834 -eighth 3834 -beaches 3834 -harvard 3834 -avoided 3834 -ivory 3833 -stabbing 3833 -menu 3831 -item 3831 -benghazi 3830 -dementia 3830 -guidance 3829 -self 3828 -belgian 3826 -highlighted 3826 -respected 3825 -chemotherapy 3822 -raw 3822 -dollar 3821 -qualified 3821 -confused 3820 -extremist 3819 -relevant 3819 -ripped 3818 -tropical 3816 -academic 3815 -fierce 3815 -undergoing 3814 -subsequently 3813 -yacht 3812 -phase 3810 -jeans 3810 -coma 3809 -submitted 3809 -converted 3809 -28-year-old 3809 -attractive 3809 -weighing 3808 -urgent 3807 -appreciate 3805 -poster 3802 -hostages 3799 -corps 3799 -unexpected 3797 -suing 3796 -legitimate 3795 -disability 3795 -casey 3795 -guinea 3794 -examination 3793 -assaulting 3791 -carpet 3791 -¿ 3790 -odd 3790 -solve 3790 -reunited 3787 -tumour 3787 -petrol 3786 -surviving 3785 -consumption 3784 -hailed 3782 -formally 3781 -stability 3780 -15,000 3780 -robertson 3778 -tornado 3775 -embarrassing 3774 -fever 3772 -harsh 3772 -bloomberg 3771 -murdoch 3771 -vegetables 3771 -attackers 3770 -desk 3770 -toronto 3769 -supporter 3769 -grandchildren 3768 -iii 3767 -tattoo 3766 -t-shirt 3766 -lampard 3766 -todd 3766 -3-1 3765 -associate 3765 -retailers 3763 -d.c. 3762 -ex-wife 3761 -capitol 3760 -moral 3760 -offender 3759 -narrow 3758 -strategic 3758 -participate 3757 -bradford 3757 -fabregas 3754 -f1 3754 -equality 3754 -basement 3753 -transcript 3752 -kinds 3752 -inc. 3752 -existence 3752 -pound 3751 -dennis 3749 -mortgage 3749 -legendary 3749 -universities 3747 -delhi 3746 -forum 3746 -rumours 3745 -hammer 3744 -albert 3742 -voices 3739 -vessels 3739 -julian 3738 -absolute 3737 -unnamed 3736 -judicial 3735 -partly 3734 -rafael 3733 -removing 3733 -subjected 3733 -soul 3733 -well-known 3731 -recognized 3731 -joshua 3729 -silent 3728 -update 3728 -ingredients 3726 -travellers 3726 -emotions 3726 -devoted 3725 -suv 3725 -swedish 3724 -demonstration 3723 -leather 3723 -lancaster 3720 -involve 3719 -missions 3718 -rely 3717 -tehran 3714 -boris 3713 -lesson 3713 -fiscal 3713 -trigger 3711 -mess 3711 -professionals 3711 -flee 3711 -ally 3710 -jewellery 3707 -flash 3706 -connect 3705 -liberia 3704 -redknapp 3704 -merkel 3703 -shooter 3702 -relation 3701 -fastest 3700 -stem 3700 -loyal 3700 -cathedral 3699 -resign 3698 -chavez 3698 -handled 3698 -25,000 3697 -gen. 3697 -aguero 3694 -urge 3694 -inner 3693 -halt 3693 -donate 3692 -hunger 3690 -tobacco 3689 -chemicals 3689 -contracts 3688 -stamford 3687 -diving 3684 -unrest 3680 -subsequent 3678 -loans 3675 -stripped 3675 -battles 3674 -visa 3673 -robot 3673 -consent 3669 -reduction 3669 -arctic 3669 -stake 3669 -unaware 3669 -helicopters 3666 -raises 3664 -cooperation 3664 -kicking 3664 -nathan 3663 -landmark 3662 -colin 3662 -barrier 3661 -embrace 3661 -comeback 3659 -regarded 3658 -arranged 3658 -uploaded 3657 -palestinians 3657 -residential 3656 -succeed 3656 -spreading 3654 -shake 3653 -herald 3652 -cargo 3651 -announcing 3650 -excessive 3650 -xi 3650 -sealed 3650 -northwest 3650 -nomination 3650 -shouted 3650 -violated 3649 -introduce 3649 -lock 3648 -elephant 3647 -suits 3646 -fleeing 3645 -floating 3645 -networking 3645 -australians 3644 -700 3640 -appealed 3640 -insist 3638 -guarantee 3636 -serves 3635 -1million 3635 -refugee 3635 -whale 3634 -spacecraft 3633 -rapid 3632 -tributes 3626 -underwear 3626 -adventure 3624 -tone 3623 -bieber 3623 -removal 3622 -proven 3622 -politically 3622 -depending 3622 -communicate 3621 -spare 3619 -portuguese 3616 -nypd 3616 -aerial 3613 -aunt 3613 -mask 3612 -classified 3612 -tons 3609 -automatically 3608 -scrutiny 3608 -preparation 3607 -canceled 3606 -photography 3605 -61 3604 -creatures 3602 -berry 3602 -tag 3602 -undercover 3600 -adrian 3599 -palm 3598 -risen 3595 -african-american 3595 -survivor 3595 -organs 3593 -qualify 3593 -prestigious 3592 -consecutive 3589 -ferry 3588 -69 3588 -excess 3588 -struggles 3588 -christine 3587 -a&e 3586 -resistance 3586 -stance 3585 -glory 3584 -sara 3583 -thus 3582 -solo 3581 -pastor 3581 -aide 3581 -jokes 3580 -minds 3580 -pensions 3580 -hazard 3577 -thai 3577 -calendar 3576 -transformed 3574 -insisting 3570 -customs 3570 -mubarak 3569 -charging 3567 -indication 3566 -two-year-old 3565 -lottery 3565 -frequent 3564 -unhappy 3561 -tours 3560 -tracked 3559 -infections 3559 -indecent 3558 -billions 3557 -sued 3555 -craft 3555 -researcher 3554 -improvement 3554 -reagan 3553 -nicholas 3553 -surely 3552 -peterson 3552 -portion 3552 -sophisticated 3551 -slept 3551 -cruel 3551 -abusing 3549 -6,000 3548 -instructions 3545 -delivering 3545 -overweight 3541 -barnes 3541 -whenever 3541 -header 3540 -fights 3540 -accurate 3539 -sgt. 3539 -doubled 3539 -prosecuting 3538 -hugely 3537 -disciplinary 3536 -apologized 3535 -publicity 3534 -latin 3534 -casualties 3534 -ceiling 3533 -1986 3533 -promotion 3531 -superior 3530 -satisfied 3529 -singh 3528 -stranded 3528 -pants 3525 -marines 3522 -endured 3522 -patterns 3522 -focusing 3521 -prescription 3521 -stream 3520 -rogers 3520 -boom 3518 -appealing 3518 -maine 3517 -buckingham 3517 -marc 3516 -violations 3515 -icon 3513 -jill 3510 -mate 3510 -somewhat 3510 -dated 3509 -bennett 3508 -argentine 3507 -underneath 3507 -lined 3505 -kiev 3504 -19th 3504 -affidavit 3504 -wolf 3503 -accidents 3500 -tipped 3496 -ryder 3495 -saints 3495 -preventing 3493 -warner 3493 -hungry 3493 -orbit 3492 -universal 3491 -designers 3490 -raping 3489 -jong 3488 -villages 3488 -governing 3488 -countryside 3488 -1988 3486 -draft 3486 -mason 3486 -pupil 3485 -leone 3485 -battled 3482 -cohen 3481 -foul 3479 -deputies 3478 -bashar 3478 -brad 3478 -thousand 3478 -amateur 3475 -fantasy 3474 -speeds 3474 -warming 3472 -l 3472 -ate 3471 -1982 3470 -boot 3469 -context 3468 -glamorous 3468 -pledge 3468 -difficulty 3467 -engines 3467 -trucks 3467 -constable 3466 -henderson 3463 -norman 3463 -surgeons 3462 -two-year 3462 -innocence 3460 -gunmen 3459 -happiness 3458 -friendship 3456 -richardson 3455 -random 3455 -tyler 3454 -manufacturers 3454 -toxic 3453 -pen 3453 -discussing 3450 -elaborate 3449 -lt. 3448 -blonde 3447 -creates 3447 -alice 3444 -commonly 3444 -comic 3443 -recalls 3442 -sturridge 3442 -goodbye 3441 -signals 3441 -beside 3439 -beef 3439 -euros 3438 -first-degree 3438 -classroom 3438 -1-1 3437 -gesture 3435 -pyongyang 3434 -victor 3432 -uncomfortable 3432 -abusive 3431 -infant 3429 -newborn 3427 -spa 3426 -opener 3426 -collecting 3426 -liam 3425 -developers 3425 -achievement 3424 -humanity 3424 -immune 3423 -ammunition 3423 -predict 3420 -distraught 3419 -unfortunate 3415 -worrying 3414 -samuel 3413 -texts 3411 -precious 3411 -generous 3410 -checking 3409 -rubbish 3409 -nominated 3407 -greeted 3406 -fatally 3406 -thames 3405 -gangs 3405 -ownership 3405 -sharks 3404 -attraction 3404 -deciding 3403 -superintendent 3403 -wire 3403 -rings 3401 -palmer 3401 -conceded 3400 -andrea 3400 -sunni 3399 -longest 3398 -copies 3398 -fines 3398 -jerusalem 3397 -restored 3396 -ac 3395 -subway 3394 -relating 3394 -presidents 3394 -pit 3391 -spends 3391 -1984 3390 -printed 3389 -scary 3389 -infamous 3388 -caps 3387 -julia 3386 -moderate 3386 -comprehensive 3386 -wheels 3386 -displays 3385 -screens 3384 -linda 3383 -membership 3382 -southeast 3381 -lucas 3380 -inspire 3377 -abdullah 3377 -loaded 3377 -climbed 3377 -excitement 3376 -starred 3375 -pornography 3375 -wells 3375 -sum 3375 -stanley 3374 -gene 3372 -acceptable 3372 -coaching 3371 -brotherhood 3370 -aids 3370 -reckless 3370 -essentially 3369 -prayer 3368 -fundraising 3368 -da 3367 -refuse 3366 -blake 3365 -deserves 3365 -taxpayer 3363 -advocates 3363 -purposes 3362 -torres 3361 -useful 3358 -airstrikes 3358 -arkansas 3357 -latter 3355 -sheet 3354 -manning 3353 -excuse 3349 -sample 3348 -stepping 3348 -toure 3347 -smartphones 3347 -bet 3346 -fulham 3345 -alzheimer 3345 -18th 3344 -heated 3343 -suggestion 3342 -flower 3341 -speeding 3340 -motive 3340 -attendance 3340 -netanyahu 3339 -thrilled 3338 -obtain 3337 -commissioned 3334 -pray 3333 -obese 3332 -filing 3332 -shoulders 3331 -costing 3331 -marie 3330 -60,000 3330 -investigator 3329 -jeffrey 3329 -cared 3329 -households 3329 -300,000 3328 -tail 3327 -neighboring 3327 -carroll 3326 -versions 3324 -passionate 3324 -keane 3321 -demonstrate 3320 -norfolk 3319 -reed 3316 -viewing 3316 -christians 3315 -advocate 3315 -audio 3314 -melissa 3313 -lightning 3313 -creature 3311 -farmer 3310 -temporarily 3309 -broadcaster 3309 -pro 3309 -chronic 3309 -slip 3308 -durham 3306 -dialogue 3302 -monster 3302 -stephanie 3301 -lorry 3299 -respectively 3298 -receives 3297 -mysterious 3297 -czech 3296 -21st 3295 -lavish 3294 -examine 3294 -tsa 3292 -structures 3291 -hometown 3290 -dorset 3290 -reviews 3289 -artificial 3289 -abducted 3289 -meets 3288 -rehabilitation 3288 -potter 3286 -europa 3286 -noting 3284 -© 3282 -donors 3282 -index 3281 -hacked 3280 -cups 3279 -regard 3279 -en 3278 -adoption 3278 -cuban 3277 -damascus 3276 -contribute 3276 -happier 3275 -punch 3275 -thanksgiving 3274 -description 3273 -hip 3273 -convince 3273 -habits 3272 -conducting 3269 -burial 3269 -wears 3269 -contribution 3267 -mayweather 3266 -supportive 3265 -requirements 3265 -burger 3264 -makers 3264 -allegation 3261 -determination 3261 -muscles 3260 -pre-season 3259 -safer 3258 -phenomenon 3258 -breathe 3257 -extension 3257 -jackie 3256 -swing 3256 -cigarettes 3255 -carol 3254 -burden 3254 -ken 3252 -horrified 3252 -stranger 3251 -pills 3248 -react 3248 -denmark 3247 -expression 3247 -haram 3246 -tanks 3243 -wings 3243 -instantly 3243 -sharon 3240 -accommodation 3239 -lap 3237 -rapper 3236 -periods 3235 -hire 3234 -choosing 3230 -30-year-old 3229 -enjoys 3229 -walsh 3228 -paintings 3227 -1980 3225 -13-year-old 3225 -boarding 3225 -disputed 3224 -t 3222 -costume 3221 -confrontation 3221 -12-year-old 3220 -dylan 3219 -styles 3216 -emissions 3215 -nigerian 3215 -timing 3213 -hosting 3211 -maker 3210 -marshall 3210 -trace 3209 -beliefs 3209 -eddie 3208 -centuries 3207 -fury 3207 -siege 3207 -cigarette 3205 -hudson 3205 -hospitalized 3204 -snake 3204 -subjects 3203 -tent 3203 -outdoor 3202 -beds 3199 -10th 3198 -comet 3197 -alonso 3197 -belonging 3197 -trailer 3196 -observers 3196 -dock 3194 -directors 3194 -releasing 3193 -detected 3193 -1979 3193 -gunshot 3192 -dem 3192 -lanka 3189 -boko 3188 -bedrooms 3188 -testify 3188 -merely 3187 -roots 3187 -hugo 3185 -approaching 3184 -influential 3184 -integrity 3183 -examples 3181 -stored 3181 -decent 3179 -competitions 3176 -intimate 3176 -blew 3175 -weighs 3175 -regulation 3175 -laboratory 3174 -relieved 3173 -mills 3173 -washed 3172 -observed 3171 -withdraw 3169 -maintenance 3169 -plain 3167 -topped 3167 -baltimore 3167 -casino 3166 -monthly 3165 -demonstrated 3165 -gunners 3163 -austria 3161 -ranging 3160 -tension 3158 -anchor 3157 -addressing 3155 -moss 3155 -enable 3154 -opted 3154 -thanked 3153 -li 3153 -donation 3152 -passage 3147 -rescuers 3146 -strangers 3144 -breasts 3144 -blackpool 3143 -leak 3142 -transported 3141 -staffordshire 3141 -catching 3140 -bang 3139 -semi-final 3139 -impose 3138 -citizenship 3137 -traditionally 3136 -harvey 3136 -coup 3136 -welbeck 3134 -grandparents 3133 -backs 3132 -pollution 3132 -venezuela 3131 -delta 3130 -95 3129 -manufacturing 3129 -norwich 3128 -ebay 3127 -organ 3127 -crushed 3125 -expanded 3125 -alleges 3125 -der 3124 -pensioner 3123 -grandson 3123 -hague 3123 -disgusting 3123 -ramsey 3122 -generated 3120 -mud 3119 -complications 3119 -establishment 3119 -wigan 3117 -inspectors 3115 -fundamental 3113 -shoe 3113 -embarrassed 3113 -bernard 3113 -sing 3112 -71 3111 -complain 3111 -reverse 3110 -1.2 3110 -formation 3109 -councillor 3109 -fda 3109 -belonged 3106 -folks 3106 -stark 3105 -secretly 3104 -solutions 3104 -estranged 3101 -councils 3100 -wives 3100 -inspection 3099 -ears 3097 -fred 3095 -consideration 3095 -three-year-old 3094 -nude 3093 -nobel 3092 -compromise 3092 -wash 3092 -inch 3092 -morrison 3090 -springs 3090 -helmet 3089 -hung 3088 -distribution 3088 -stormed 3086 -gown 3086 -spill 3085 -connections 3083 -raids 3081 -hayes 3081 -promoted 3081 -harper 3080 -richards 3080 -staged 3077 -confusion 3075 -considerable 3075 -blown 3075 -admission 3073 -holly 3073 -neville 3073 -cox 3073 -pat 3072 -lieutenant 3071 -romance 3069 -preston 3068 -complaining 3064 -bp 3064 -cruelty 3061 -drives 3061 -thieves 3061 -column 3060 -lit 3059 -ignore 3058 -unnecessary 3057 -propaganda 3056 -defenders 3056 -titled 3056 -punished 3056 -rocky 3054 -sandusky 3053 -franchise 3053 -lungs 3052 -secrets 3052 -sochi 3052 -garner 3049 -6-3 3049 -authors 3048 -ugly 3047 -nicknamed 3046 -differently 3043 -experiencing 3042 -km 3040 -priest 3039 -spray 3039 -dj 3038 -rage 3038 -shaking 3037 -discharged 3037 -cinema 3036 -trusted 3035 -detect 3035 -pleading 3033 -suite 3032 -nicolas 3032 -emotion 3032 -medics 3031 -recommendations 3031 -modest 3030 -shipping 3030 -switched 3030 -pure 3029 -slim 3029 -stairs 3028 -cage 3025 -endangered 3025 -franklin 3024 -katherine 3024 -rory 3023 -assumed 3023 -shanghai 3022 -peers 3022 -addresses 3021 -lasting 3021 -deck 3020 -examiner 3019 -killers 3018 -suburban 3017 -hackers 3016 -interim 3015 -co-founder 3015 -eurozone 3014 -competitors 3013 -inflation 3012 -osama 3012 -venture 3011 -ensuring 3011 -policeman 3011 -unemployed 3009 -trump 3009 -33-year-old 3008 -aspects 3008 -campaigning 3008 -dame 3007 -backlash 3006 -marco 3006 -underway 3003 -valued 3003 -protein 3002 -scenario 3002 -spectators 3002 -measured 3000 -re-election 2999 -rockets 2999 -bold 2999 -shy 2996 -clouds 2996 -1950s 2994 -blacks 2989 -serial 2989 -ambitious 2989 -caution 2987 -bunch 2987 -chapter 2987 -trousers 2986 -senators 2986 -sends 2986 -lighting 2985 -feedback 2985 -half-time 2985 -shield 2985 -renowned 2984 -contracted 2984 -boxer 2984 -similarly 2982 -appalling 2982 -j. 2981 -marriages 2979 -ghana 2979 -ballot 2975 -photographers 2975 -fc 2974 -irs 2974 -routes 2973 -farms 2972 -tale 2970 -preferred 2970 -committing 2969 -dakota 2967 -kane 2966 -mccarthy 2966 -heather 2965 -purple 2964 -150,000 2963 -musician 2962 -enemies 2962 -outlets 2962 -insurgents 2962 -jenkins 2962 -elephants 2961 -fixture 2959 -eager 2958 -nephew 2957 -astonishing 2957 -educational 2956 -clues 2954 -kabul 2954 -teammates 2952 -teammate 2949 -matching 2948 -instant 2946 -understands 2946 -autism 2945 -five-year 2945 -cave 2945 -duck 2944 -intelligent 2942 -penn 2941 -occupy 2941 -sally 2940 -discipline 2938 -believing 2938 -bonus 2938 -bucket 2937 -epidemic 2937 -restricted 2937 -resume 2937 -dealer 2936 -ashes 2936 -completing 2934 -chips 2934 -commented 2934 -automatic 2933 -theresa 2933 -detainees 2933 -hood 2932 -washing 2932 -laptop 2931 -monitored 2931 -tampa 2931 -joan 2930 -lips 2928 -portland 2928 -coleman 2927 -adopt 2927 -inmate 2926 -pirates 2926 -overturned 2924 -cried 2923 -sic 2923 -deserved 2923 -eaten 2923 -32-year-old 2922 -1987 2920 -assaults 2920 -departments 2920 -shirts 2919 -rented 2918 -sole 2917 -malaysian 2916 -beard 2914 -creek 2913 -preserve 2913 -nerve 2912 -benedict 2910 -principle 2910 -element 2909 -scare 2909 -pochettino 2908 -canal 2908 -bible 2907 -centres 2906 -reminder 2906 -trash 2905 -harbour 2905 -perth 2904 -doubts 2904 -developments 2904 -handing 2903 -serie 2901 -retreat 2900 -lindsay 2900 -crashing 2899 -gardner 2899 -immigrant 2898 -pleasure 2897 -privately 2897 -rehab 2896 -nominee 2896 -prepares 2895 -revolutionary 2893 -yeah 2893 -overwhelmed 2892 -chasing 2891 -tribal 2891 -arrangements 2891 -architect 2891 -bodily 2889 -programmes 2889 -towers 2889 -okay 2889 -root 2888 -disappointment 2888 -volume 2887 -affecting 2885 -puppy 2885 -sullivan 2882 -unbelievable 2881 -breakthrough 2881 -wallace 2881 -victorian 2880 -8,000 2880 -istanbul 2880 -equipped 2880 -decorated 2879 -psychiatric 2879 -carney 2878 -polar 2878 -raided 2877 -easter 2877 -outraged 2876 -gon 2875 -travels 2875 -proportion 2873 -dolphins 2872 -balcony 2872 -ninth 2872 -isolation 2872 -31-year-old 2871 -andre 2870 -rosie 2870 -practical 2869 -prosecuted 2869 -confidential 2869 -concentration 2868 -butler 2868 -occasionally 2867 -acid 2866 -cottage 2864 -bolt 2863 -natalie 2861 -shorts 2861 -tougher 2861 -mounted 2859 -torn 2859 -pursuit 2859 -renewed 2859 -hussein 2858 -manufacturer 2857 -tsunami 2857 -planets 2856 -sailing 2855 -buses 2855 -2,500 2854 -copyright 2854 -expansion 2853 -bullied 2853 -technologies 2853 -guantanamo 2853 -ruined 2852 -mother-of-two 2852 -innovation 2851 -banning 2849 -shutdown 2848 -kardashian 2847 -invest 2847 -no-one 2846 -pressed 2846 -sexy 2846 -insight 2845 -expense 2843 -suggestions 2843 -earnings 2842 -indicted 2841 -condolences 2841 -identification 2841 -tigers 2841 -rica 2841 -twist 2841 -quest 2841 -gloves 2840 -glenn 2840 -laser 2840 -scam 2840 -sufficient 2839 -weird 2838 -6-4 2838 -jo 2836 -1985 2835 -strengthen 2835 -faa 2834 -bryan 2834 -principles 2834 -assassination 2833 -knock 2833 -posters 2833 -prostitution 2833 -crimea 2832 -engaging 2830 -spin 2827 -coal 2826 -20s 2826 -reviewed 2825 -steady 2824 -haul 2824 -deeper 2823 -bergdahl 2823 -imprisonment 2821 -cop 2821 -va 2821 -croatia 2820 -administrative 2820 -belong 2818 -emerge 2818 -strongest 2818 -countless 2817 -careers 2817 -updates 2816 -argues 2816 -mainstream 2815 -dig 2814 -assisted 2813 -blasted 2812 -array 2812 -skies 2812 -77 2811 -karl 2810 -vicious 2809 -73 2809 -organisations 2808 -wilshere 2807 -retailer 2806 -amber 2806 -extradition 2806 -graves 2806 -displaced 2805 -chapman 2805 -tmz 2803 -blanket 2803 -fireworks 2802 -bali 2802 -coffin 2802 -glimpse 2801 -outfits 2801 -blackburn 2800 -lied 2800 -74 2800 -wrongdoing 2798 -bat 2797 -sells 2795 -poured 2794 -strictly 2789 -spiritual 2788 -jake 2788 -reflected 2787 -placing 2786 -counsel 2786 -sarkozy 2785 -gambling 2785 -drought 2785 -poisoning 2784 -assess 2782 -sheikh 2781 -donetsk 2781 -floods 2779 -phillip 2778 -lifting 2778 -laughed 2778 -four-year-old 2778 -gradually 2777 -peru 2776 -credited 2775 -revelation 2775 -hug 2774 -sheer 2773 -dignity 2772 -archbishop 2772 -retire 2772 -pig 2771 -prisons 2771 -graduated 2770 -unarmed 2769 -gove 2769 -paula 2769 -collective 2768 -sweeping 2767 -sensation 2767 -tremendous 2766 -vintage 2766 -apologize 2766 -secondary 2765 -negotiate 2765 -exercises 2764 -origin 2764 -suffolk 2762 -sebastian 2760 -cyber 2759 -perceived 2758 -ruth 2756 -haven 2755 -consistently 2755 -rider 2754 -distributed 2754 -generate 2754 -reacted 2753 -astronauts 2753 -lovers 2753 -heights 2753 -inquiries 2752 -chip 2752 -floors 2752 -barca 2751 -tortured 2751 -occupied 2751 -dear 2750 -traumatic 2750 -bangkok 2749 -depth 2749 -johnny 2749 -11th 2749 -ramos 2748 -1981 2745 -drag 2745 -spaniard 2744 -millionaire 2744 -permit 2744 -allowance 2741 -rubble 2740 -diversity 2740 -fancy 2740 -jr 2739 -realistic 2739 -quake 2739 -lawson 2738 -kensington 2737 -yoga 2736 -andrews 2736 -exceptional 2735 -debts 2734 -volcano 2733 -writers 2733 -errors 2733 -reflects 2732 -destinations 2732 -threaten 2732 -kenneth 2732 -proving 2730 -anonymity 2729 -reaches 2729 -assume 2729 -g 2728 -heartbroken 2726 -ellis 2726 -suitable 2726 -unpaid 2726 -workplace 2726 -pile 2725 -developer 2725 -deer 2725 -makeshift 2725 -optimistic 2724 -nixon 2722 -trademark 2722 -plunged 2721 -remembers 2721 -partially 2720 -primarily 2720 -explicit 2720 -assured 2719 -operators 2719 -paedophile 2719 -thief 2717 -phrase 2716 -grieving 2716 -pays 2715 -sensors 2715 -habit 2715 -respects 2714 -chased 2714 -vet 2714 -cyclist 2714 -publishing 2714 -sympathy 2713 -juvenile 2713 -improvements 2713 -pursuing 2710 -id 2709 -parish 2708 -bmw 2707 -seeks 2705 -pearson 2705 -resolved 2704 -norwegian 2703 -dictator 2702 -delight 2702 -clay 2700 -advances 2700 -organizers 2700 -ash 2700 -wang 2698 -rihanna 2697 -peer 2695 -runner 2695 -spaces 2693 -reuters 2692 -reactions 2692 -jan 2691 -aides 2691 -audiences 2691 -whereabouts 2690 -flies 2690 -hockey 2690 -deceased 2689 -matched 2689 -romania 2689 -francois 2689 -filling 2688 -balloon 2688 -trends 2688 -lesbian 2686 -gaining 2686 -seoul 2686 -treaty 2686 -penny 2684 -montana 2684 -firearm 2683 -dancer 2683 -topic 2683 -sorts 2682 -opera 2682 -valentine 2680 -reluctant 2679 -joel 2678 -nursery 2677 -tripoli 2676 -surprisingly 2676 -dive 2675 -visitor 2674 -lone 2673 -grip 2673 -chuck 2672 -kings 2672 -triple 2672 -germans 2672 -tommy 2670 -ex 2669 -episodes 2668 -transit 2667 -stamp 2667 -exists 2666 -p 2666 -shattered 2664 -five-year-old 2664 -life-threatening 2664 -slide 2663 -shelves 2662 -sustainable 2662 -premiere 2662 -courthouse 2661 -neglect 2661 -contractor 2660 -breakdown 2660 -rspca 2660 -channels 2655 -introduction 2655 -hardest 2655 -organic 2653 -uprising 2653 -whoever 2652 -felipe 2650 -bournemouth 2650 -drowned 2650 -chilling 2650 -mandatory 2649 -knees 2646 -99 2645 -riders 2644 -juice 2644 -congressman 2643 -polling 2641 -madison 2641 -walter 2641 -rang 2640 -saint 2639 -sizes 2639 -ethics 2638 -danish 2638 -identical 2638 -lance 2638 -trick 2637 -employer 2636 -gibson 2635 -bare 2634 -bulger 2633 -gunfire 2632 -briefing 2632 -mclaren 2632 -nonprofit 2632 -recommend 2631 -requiring 2631 -permanently 2631 -riots 2630 -gonzalez 2629 -fur 2629 -candy 2628 -jenny 2628 -quarters 2627 -guilt 2627 -indonesian 2626 -martha 2625 -agriculture 2623 -blocking 2623 -maintains 2623 -cartel 2621 -1,200 2621 -mourners 2621 -worries 2621 -travis 2621 -halloween 2619 -actively 2618 -comply 2618 -hispanic 2618 -insider 2616 -reynolds 2614 -lucrative 2614 -bo 2613 -bands 2612 -harmful 2612 -banner 2611 -7,000 2610 -retain 2610 -singles 2609 -luckily 2609 -acquitted 2609 -apartments 2609 -ashton 2607 -myanmar 2607 -credits 2607 -pippa 2607 -churchill 2606 -contaminated 2606 -cheer 2606 -populations 2606 -expanding 2605 -oral 2602 -defined 2601 -plates 2601 -lodge 2601 -borough 2600 -diverse 2600 -draws 2599 -shane 2598 -oppose 2598 -migration 2598 -rebuild 2596 -amongst 2595 -architecture 2595 -battered 2594 -relax 2594 -notified 2594 -cardiac 2594 -bearing 2594 -momentum 2592 -omar 2592 -o'brien 2591 -sufferers 2589 -greatly 2589 -richest 2589 -soap 2589 -conscious 2588 -visual 2588 -database 2588 -unlawful 2588 -indicates 2588 -congo 2586 -whales 2586 -sheep 2586 -divers 2585 -upstairs 2584 -1983 2583 -olivia 2582 -studios 2582 -hammond 2581 -foley 2581 -clever 2581 -caption 2580 -lennon 2580 -throne 2578 -999 2578 -finances 2577 -electoral 2577 -brush 2576 -anxious 2576 -heartbreaking 2575 -advisers 2575 -broader 2574 -certificate 2573 -aleppo 2573 -occurs 2573 -treats 2572 -cheshire 2569 -jesse 2568 -aspect 2568 -pipe 2568 -rubber 2567 -conventional 2567 -schoolgirl 2567 -5.5 2566 -shades 2565 -windsor 2565 -lobby 2565 -escorted 2564 -sounded 2564 -portsmouth 2563 -raheem 2563 -replacing 2562 -gains 2562 -hey 2562 -modelling 2560 -happily 2560 -quietly 2560 -cheating 2559 -supermarkets 2559 -hid 2559 -curiosity 2559 -logo 2557 -compare 2556 -wreck 2556 -seas 2555 -mediterranean 2555 -courses 2554 -evacuation 2551 -famed 2550 -outrageous 2550 -regiment 2550 -publish 2550 -hiring 2549 -colourful 2548 -airplane 2547 -persuade 2546 -spree 2546 -psg 2545 -tense 2545 -fails 2544 -powder 2543 -firmly 2543 -stays 2542 -sandra 2542 -anticipated 2542 -industries 2541 -successive 2540 -dems 2540 -mali 2540 -drunken 2539 -cute 2539 -mining 2538 -contents 2536 -brains 2536 -zimbabwe 2535 -proceeds 2535 -janet 2535 -76 2534 -1978 2532 -invested 2532 -pill 2531 -cheryl 2531 -joins 2531 -paulo 2531 -nasty 2530 -crowded 2530 -observatory 2529 -cosmetic 2529 -skiing 2529 -fitting 2525 -winston 2525 -timothy 2524 -accountable 2524 -uncertainty 2522 -contemporary 2521 -fletcher 2521 -persons 2519 -wherever 2518 -controlling 2518 -withdrawn 2517 -depressed 2516 -fathers 2516 -tap 2516 -tide 2515 -zuckerberg 2514 -jacob 2513 -etihad 2512 -iceland 2512 -creator 2512 -berlusconi 2511 -fluid 2511 -christ 2511 -kenyan 2510 -sooner 2510 -78 2510 -knives 2508 -handgun 2508 -smash 2508 -successor 2506 -freeze 2506 -1969 2504 -distinctive 2503 -liz 2503 -derek 2502 -eden 2502 -stylish 2501 -nationals 2500 -mob 2500 -breed 2500 -luggage 2499 -hugh 2499 -males 2499 -monica 2499 -'em 2499 -sunny 2498 -counterparts 2498 -formerly 2498 -mutual 2498 -treasure 2498 -earl 2497 -saddened 2497 -17th 2496 -mid 2496 -documented 2494 -finest 2494 -churches 2493 -explosions 2493 -weigh 2493 -superb 2492 -ashamed 2492 -colombian 2491 -fascinating 2491 -providers 2489 -operates 2489 -e 2489 -recruitment 2489 -curriculum 2489 -deported 2488 -beast 2487 -acknowledge 2487 -allardyce 2486 -chains 2486 -powered 2485 -exception 2483 -hearings 2482 -prey 2481 -layer 2481 -pistol 2480 -12,000 2480 -raymond 2480 -buyer 2479 -injection 2479 -aisle 2479 -sticking 2479 -miranda 2479 -vigil 2478 -withdrawal 2478 -russians 2477 -superstar 2477 -eagle 2476 -identifying 2473 -patriots 2473 -instructor 2473 -berkshire 2472 -crop 2471 -carnival 2471 -tables 2470 -frightened 2470 -pga 2470 -limbs 2470 -somali 2469 -novak 2469 -colonel 2468 -cocktail 2468 -stab 2468 -granddaughter 2468 -rumors 2466 -entrepreneur 2465 -intervene 2465 -stuffed 2463 -sticks 2463 -robbed 2463 -patch 2463 -bow 2462 -exchanged 2461 -assigned 2459 -mick 2458 -wise 2458 -bra 2458 -130 2458 -curtis 2457 -efficient 2457 -showers 2456 -vocal 2455 -2020 2453 -mode 2452 -surfaced 2451 -allege 2451 -labelled 2450 -karzai 2450 -brandon 2449 -frenchman 2449 -gaming 2449 -remind 2449 -shuttle 2448 -dishes 2448 -11-year-old 2447 -1976 2447 -interactive 2445 -stakes 2445 -oversight 2445 -epic 2443 -newtown 2443 -logan 2442 -asteroid 2442 -stating 2441 -auto 2441 -austerity 2441 -victories 2441 -unite 2440 -murderer 2440 -forecasters 2440 -fractured 2439 -pipeline 2439 -16th 2439 -authorized 2439 -approaches 2438 -vogue 2437 -scans 2437 -cab 2436 -boyle 2435 -mourning 2435 -six-year-old 2434 -trek 2434 -economics 2434 -transparency 2434 -gravity 2434 -salmond 2433 -unity 2432 -portrayed 2432 -reviewing 2432 -reminded 2431 -diplomats 2430 -o'neill 2430 -implications 2430 -embarrassment 2430 -educated 2430 -waist 2429 -cockpit 2428 -depends 2428 -foreigners 2428 -flats 2428 -christina 2428 -sheets 2428 -barred 2427 -solicitor 2425 -routinely 2425 -accidental 2424 -fiction 2422 -nicola 2422 -6-2 2422 -reign 2421 -villagers 2420 -bases 2419 -tongue 2419 -motorcycle 2419 -drops 2418 -metro 2418 -bacon 2417 -tan 2416 -toyota 2416 -bundesliga 2414 -ap 2414 -dale 2414 -levy 2414 -legislative 2414 -butter 2414 -shotgun 2414 -beverly 2414 -counties 2413 -wardrobe 2412 -400,000 2412 -diane 2412 -2018 2411 -skill 2411 -premium 2411 -dhabi 2411 -heaven 2411 -royals 2410 -shiite 2409 -sink 2409 -shook 2408 -doll 2408 -petraeus 2407 -monkey 2407 -dental 2406 -dawson 2405 -capabilities 2405 -ibrahim 2405 -mcconnell 2405 -bt 2405 -steenkamp 2404 -expressing 2404 -carlo 2402 -gregory 2401 -ecuador 2400 -accessible 2400 -consumed 2400 -sanctuary 2400 -bidding 2399 -two-thirds 2399 -cara 2397 -tattoos 2397 -grim 2397 -packages 2396 -responses 2396 -exploring 2395 -belongings 2395 -three-year 2395 -slave 2395 -quarterback 2394 -pressing 2394 -inn 2393 -pete 2393 -madeleine 2392 -concerning 2392 -obsessed 2392 -electronics 2391 -thigh 2390 -shaken 2390 -healthier 2389 -3.5 2389 -maintaining 2389 -lohan 2388 -anti-government 2388 -transgender 2387 -magazines 2387 -memorable 2387 -disorders 2386 -participating 2386 -rhetoric 2386 -artwork 2385 -wiltshire 2385 -contrary 2385 -females 2384 -amsterdam 2384 -casual 2384 -forth 2383 -robust 2383 -shortage 2383 -innovative 2382 -diary 2382 -griffin 2381 -produces 2381 -ozil 2380 -dirt 2380 -jihad 2380 -rated 2379 -rip 2379 -1963 2378 -acquired 2378 -assange 2378 -brigade 2376 -rampage 2374 -pump 2374 -costly 2374 -al-shabaab 2374 -approve 2372 -chapel 2372 -tasks 2371 -elegant 2370 -lawn 2369 -exam 2369 -salon 2369 -rolls 2368 -rides 2368 -1970 2367 -merseyside 2367 -hub 2367 -altogether 2366 -hilton 2365 -psychologist 2365 -schumacher 2365 -separately 2364 -tackling 2363 -evolution 2363 -ivan 2363 -eldest 2362 -mia 2362 -honey 2362 -free-kick 2361 -bury 2361 -broadcasting 2361 -imprisoned 2361 -abduction 2360 -blatter 2360 -patricia 2360 -richmond 2360 -2017 2359 -shall 2359 -fortunate 2358 -exclusively 2357 -branches 2357 -@@ 2356 -bridges 2356 -cancel 2355 -booking 2355 -wished 2354 -preparations 2353 -jungle 2352 -ranking 2350 -motivation 2350 -chen 2348 -pockets 2346 -donna 2345 -circus 2344 -unbeaten 2343 -discovering 2342 -telescope 2342 -mild 2342 -barrister 2341 -prescribed 2340 -holocaust 2339 -chloe 2338 -uncertain 2338 -hello 2338 -kenny 2336 -sacrifice 2336 -chairs 2335 -deborah 2335 -lords 2335 -oprah 2335 -nico 2335 -maritime 2334 -organisers 2332 -confession 2332 -possessing 2332 -securing 2331 -geneva 2331 -pan 2330 -smuggling 2330 -smooth 2330 -wade 2329 -vitamin 2329 -34-year-old 2328 -79 2327 -narrowly 2327 -brits 2326 -1968 2326 -implement 2326 -particles 2326 -blogger 2325 -purse 2324 -trayvon 2324 -spine 2323 -sharapova 2323 -charter 2323 -cord 2320 -cartoon 2320 -premises 2320 -whereas 2320 -panels 2319 -ambitions 2319 -policing 2319 -aiming 2318 -illnesses 2318 -12th 2318 -marking 2318 -overdose 2317 -bryant 2316 -350 2315 -disclosed 2315 -poorly 2314 -alison 2314 -lounge 2314 -carriers 2313 -disruption 2313 -inevitable 2313 -laying 2313 -reds 2312 -refer 2311 -griffiths 2311 -humble 2311 -invented 2310 -borussia 2310 -e. 2310 -surgeries 2309 -rupert 2309 -megan 2309 -participation 2309 -healing 2309 -sadness 2308 -von 2308 -82 2308 -angle 2308 -1974 2308 -ex-husband 2308 -dunn 2307 -skirt 2307 -download 2306 -sandwich 2306 -implemented 2306 -compiled 2305 -tune 2305 -mainland 2305 -boarded 2305 -surveyed 2304 -lily 2304 -bankruptcy 2304 -coins 2303 -costumes 2303 -ambition 2302 -curious 2302 -gloucestershire 2302 -silk 2301 -avoiding 2301 -subs 2300 -resting 2300 -nanny 2300 -lens 2299 -lonely 2298 -mata 2298 -second-degree 2298 -spared 2297 -helpful 2297 -cattle 2297 -antibiotics 2296 -recruited 2296 -camilla 2296 -convoy 2296 -f. 2295 -alexandra 2293 -besides 2293 -dick 2292 -suburbs 2292 -voluntary 2292 -lynch 2291 -lighter 2291 -recruit 2290 -rifles 2290 -captive 2290 -dublin 2289 -youths 2289 -exploration 2288 -operational 2288 -forbes 2287 -perception 2287 -wrongly 2287 -undergone 2286 -utterly 2286 -companion 2285 -hostile 2285 -monarch 2284 -catastrophic 2282 -bruises 2282 -violating 2282 -halfway 2281 -executions 2281 -blunt 2280 -contractors 2280 -jaw 2279 -fortunately 2278 -credibility 2278 -processing 2277 -robots 2277 -boasted 2277 -imminent 2276 -riley 2276 -frustrating 2275 -justify 2275 -latino 2274 -denying 2274 -uniforms 2274 -disgraced 2273 -3-2 2272 -mumbai 2272 -nebraska 2272 -introducing 2271 -critic 2271 -slight 2271 -disclose 2271 -financially 2271 -sutton 2270 -dc 2270 -sauce 2269 -intend 2269 -sofa 2268 -1972 2268 -burton 2267 -launches 2267 -1977 2266 -voter 2266 -confirmation 2265 -praying 2264 -13th 2264 -ancelotti 2263 -4-0 2263 -agrees 2262 -ghost 2260 -u.s 2260 -cult 2260 -destroying 2258 -u 2258 -morsy 2258 -hopkins 2257 -silly 2256 -permitted 2256 -critically 2255 -enterprise 2255 -blasio 2255 -declaration 2255 -n 2254 -tops 2254 -rope 2254 -wrist 2253 -magnificent 2253 -bans 2252 -strangled 2252 -arnold 2252 -idol 2252 -luxurious 2251 -greene 2251 -barriers 2250 -convert 2250 -external 2250 -deportation 2250 -applying 2250 -expedition 2249 -injuring 2249 -scrap 2249 -reject 2249 -hassan 2248 -sang 2248 -isle 2248 -contributions 2247 -exotic 2247 -15th 2247 -premature 2246 -brutally 2246 -ranch 2246 -pepper 2246 -crashes 2245 -masks 2245 -administrator 2245 -knocking 2245 -credible 2243 -breeding 2242 -vettel 2242 -10-year-old 2241 -brick 2240 -gruesome 2239 -outspoken 2239 -interstate 2239 -load 2238 -inspiring 2238 -gentle 2237 -supplied 2237 -guided 2236 -transform 2236 -canyon 2235 -wikileaks 2234 -distant 2233 -mounting 2233 -mac 2233 -katrina 2232 -grid 2232 -dose 2232 -gunned 2231 -puerto 2231 -troubles 2229 -cowell 2229 -bombers 2229 -tracy 2229 -asda 2229 -lynn 2228 -drank 2228 -typhoon 2228 -declare 2227 -ios 2227 -awesome 2226 -ahmadinejad 2226 -parkinson 2226 -favourites 2225 -ordering 2225 -independently 2225 -privilege 2224 -vomiting 2224 -weiner 2224 -mohammad 2224 -bangladesh 2223 -fiona 2222 -leap 2222 -yahoo 2222 -regrets 2222 -taiwan 2221 -submit 2221 -neighborhoods 2221 -collections 2220 -7.5 2220 -deployment 2220 -katy 2219 -beats 2219 -lakes 2218 -listened 2218 -enrique 2217 -designated 2217 -corrupt 2216 -examining 2216 -predecessor 2215 -jihadist 2215 -beyonce 2215 -deleted 2215 -specially 2215 -journalism 2215 -giggs 2214 -tweeting 2214 -bonuses 2214 -consulate 2213 -importantly 2213 -qualifier 2212 -line-up 2212 -fare 2211 -bicycle 2211 -spinal 2211 -heath 2211 -plymouth 2211 -captivity 2211 -collaboration 2210 -all-time 2209 -jubilee 2209 -crowned 2207 -gm 2207 -instructed 2206 -plight 2206 -cotton 2205 -flagship 2205 -fabric 2204 -contacts 2204 -xbox 2204 -escort 2203 -dish 2203 -firefighter 2202 -medicare 2201 -pageant 2201 -remorse 2200 -backyard 2199 -owed 2199 -cow 2199 -hms 2199 -1964 2198 -exhausted 2198 -racially 2197 -sao 2197 -labels 2197 -embraced 2197 -transparent 2196 -awkward 2195 -140 2195 -reliable 2195 -floyd 2194 -layers 2194 -outskirts 2193 -masked 2193 -84 2193 -overhaul 2193 -sections 2193 -bahrain 2193 -confront 2192 -consciousness 2191 -emotionally 2191 -acute 2191 -bravery 2191 -lands 2190 -lingerie 2190 -socialist 2189 -frankly 2189 -declaring 2188 -sciences 2188 -betting 2188 -80,000 2188 -container 2187 -theories 2187 -lip 2187 -ireport 2186 -spider 2186 -kills 2186 -wrap 2186 -slain 2186 -alien 2186 -hagel 2185 -purchases 2185 -skipper 2184 -directions 2184 -classmates 2184 -tunisia 2184 -allied 2183 -monument 2183 -advisory 2183 -daddy 2183 -zones 2183 -justices 2182 -mouse 2182 -replica 2182 -81 2182 -resist 2182 -crops 2182 -implants 2181 -neighbouring 2180 -supposedly 2180 -fraser 2180 -neighbourhood 2180 -cameroon 2179 -trillion 2179 -chan 2178 -rank 2178 -relegation 2178 -glamour 2178 -tooth 2177 -nickname 2177 -thankfully 2177 -oakland 2176 -kicks 2175 -tycoon 2174 -wendy 2174 -scattered 2173 -sank 2173 -punching 2173 -nutrition 2172 -hat-trick 2172 -considers 2172 -definition 2171 -debates 2171 -prospects 2171 -rats 2170 -participated 2170 -lamb 2170 -drilling 2169 -madonna 2168 -hats 2167 -elder 2166 -dismissal 2165 -pr 2164 -seals 2164 -accuse 2164 -honestly 2164 -idaho 2164 -cleaner 2163 -adelaide 2163 -sketch 2163 -1.3 2162 -priorities 2161 -processes 2161 -wiped 2161 -speeches 2161 -1st 2159 -rigby 2159 -minorities 2158 -footballers 2158 -influenced 2157 -fearing 2157 -feat 2156 -batteries 2155 -denial 2155 -santos 2155 -earliest 2155 -fertility 2154 -instruments 2154 -algeria 2153 -insects 2153 -rankings 2152 -transformation 2151 -sponsors 2150 -darkness 2150 -archaeologists 2150 -bent 2150 -lining 2149 -attributed 2148 -fiancee 2148 -83 2147 -patent 2145 -medium 2145 -gingrich 2145 -retiring 2145 -guitar 2145 -curb 2144 -protesting 2144 -responsibilities 2144 -risky 2142 -malcolm 2142 -soared 2141 -beatles 2141 -shepherd 2141 -urine 2139 -distressed 2139 -collided 2139 -hanged 2138 -newton 2138 -corporal 2137 -drill 2137 -coventry 2137 -genes 2137 -buffalo 2137 -daley 2137 -bug 2136 -breached 2135 -bargain 2135 -javier 2135 -poison 2135 -census 2134 -contestants 2134 -airbus 2134 -attitudes 2133 -thorough 2132 -screamed 2132 -kissing 2132 -tonnes 2131 -mercy 2130 -investments 2130 -e-mails 2130 -divide 2130 -deliberate 2130 -luiz 2129 -nolan 2129 -justified 2128 -pietersen 2128 -roadside 2128 -blaming 2127 -annually 2127 -northampton 2126 -14th 2126 -refuses 2126 -commuters 2125 -spark 2125 -espn 2124 -weekends 2124 -steam 2123 -wondering 2123 -lanza 2123 -pittsburgh 2123 -cyprus 2122 -horizon 2122 -shorter 2121 -abandon 2120 -fisher 2120 -recordings 2120 -gabriel 2119 -grocery 2119 -outer 2119 -poppy 2119 -walmart 2118 -180 2117 -televised 2117 -athens 2116 -dies 2116 -salmon 2116 -harbor 2116 -surrender 2115 -locate 2115 -raced 2115 -would-be 2115 -shannon 2114 -opposing 2114 -grows 2114 -evolved 2113 -elvis 2111 -exhibit 2110 -economies 2110 -encountered 2110 -mere 2110 -guaranteed 2110 -prostitutes 2109 -warehouse 2109 -1975 2109 -economist 2109 -cahill 2108 -physician 2108 -starbucks 2108 -ousted 2108 -900 2108 -serbia 2106 -wasted 2104 -adapt 2103 -mice 2103 -persuaded 2103 -altercation 2102 -amazed 2102 -drogba 2101 -1967 2101 -surf 2101 -log 2101 -part-time 2100 -parenting 2099 -trainers 2098 -governors 2097 -locally 2097 -illustrated 2096 -runners 2096 -disastrous 2095 -specialists 2095 -needing 2094 -persistent 2093 -nevertheless 2093 -significance 2093 -reflection 2092 -hertfordshire 2092 -digging 2092 -contributing 2092 -marcus 2092 -floral 2091 -fortnight 2091 -blessed 2090 -recipe 2090 -noble 2090 -exchanges 2089 -languages 2089 -reply 2088 -philosophy 2088 -consultation 2087 -clarkson 2087 -tragically 2086 -kieran 2086 -abuses 2085 -substances 2085 -prototype 2085 -scorer 2084 -short-term 2084 -astronaut 2083 -concentrate 2081 -slashed 2080 -notion 2080 -serena 2079 -prank 2079 -1973 2079 -waving 2078 -capability 2078 -nuts 2077 -battalion 2077 -mandate 2077 -fetch 2077 -doubles 2076 -sparking 2076 -o 2076 -agony 2075 -zara 2075 -sgt 2075 -notably 2075 -provision 2074 -diplomat 2073 -angered 2073 -sake 2073 -performers 2073 -boycott 2073 -investigative 2073 -enthusiasm 2073 -marched 2072 -dolls 2072 -picks 2072 -measuring 2071 -arabic 2071 -inform 2071 -requirement 2071 -refers 2071 -porter 2070 -artillery 2069 -four-year 2068 -ivf 2068 -bitten 2068 -hezbollah 2068 -failures 2067 -goodman 2067 -impress 2066 -undermine 2066 -achievements 2066 -commanders 2065 -withdrew 2065 -playground 2064 -sniper 2064 -salad 2064 -fragile 2064 -mccartney 2063 -crude 2063 -advise 2063 -pigs 2062 -biting 2062 -devastation 2062 -uganda 2061 -devil 2061 -mixture 2061 -muhammad 2061 -streaming 2061 -delicate 2060 -scouts 2060 -1.6 2060 -attracting 2059 -guardiola 2057 -tribe 2056 -bulls 2056 -lunar 2055 -musicians 2055 -hatred 2055 -locks 2054 -jihadists 2054 -pavement 2054 -beth 2054 -headline 2052 -circles 2052 -identities 2052 -categories 2052 -denise 2051 -driveway 2051 -dominant 2051 -gaddafi 2049 -netflix 2049 -graffiti 2049 -icy 2049 -pedro 2047 -crocodile 2046 -honored 2045 -constructed 2044 -memo 2044 -refuge 2044 -judged 2043 -militia 2043 -editorial 2043 -ralph 2043 -bailout 2042 -cesc 2042 -sperm 2042 -lego 2041 -lyrics 2041 -middlesbrough 2039 -ex-girlfriend 2039 -couch 2038 -sailors 2037 -exeter 2037 -robbie 2037 -al-qaeda 2037 -revive 2037 -bits 2034 -shapes 2034 -70,000 2034 -brewer 2033 -robben 2033 -yaya 2033 -paperwork 2032 -glen 2032 -misdemeanor 2032 -nerves 2032 -bloom 2031 -wireless 2031 -honda 2031 -script 2030 -whistle 2030 -offshore 2029 -boards 2029 -speakers 2028 -janeiro 2028 -jolie 2028 -belongs 2028 -herrera 2027 -walters 2027 -eliminate 2027 -literature 2027 -farming 2026 -sums 2026 -debbie 2026 -plotting 2025 -busiest 2024 -nail 2024 -sting 2023 -genocide 2023 -profession 2022 -exams 2022 -alike 2022 -motorway 2022 -hashtag 2022 -clashed 2022 -hasan 2022 -crane 2021 -planted 2021 -intensity 2021 -netted 2021 -guinness 2021 -negotiating 2020 -prohibited 2019 -cubs 2019 -wolves 2019 -brooke 2019 -bentley 2018 -coral 2018 -fifty 2017 -fits 2017 -montgomery 2017 -flexible 2017 -bout 2016 -separation 2016 -indicating 2016 -malala 2015 -newark 2015 -groves 2014 -newman 2014 -disabilities 2014 -robson 2014 -ellen 2014 -35-year-old 2013 -blasts 2013 -correctly 2013 -boyd 2013 -lincolnshire 2013 -sights 2012 -abdul 2012 -associates 2012 -soaring 2011 -shaped 2011 -pie 2011 -mechanical 2011 -rod 2010 -pro-russian 2010 -schemes 2009 -processed 2009 -t-shirts 2008 -releases 2007 -bump 2007 -imagined 2006 -chart 2006 -expose 2006 -inherited 2006 -aberdeen 2006 -presenting 2005 -instrument 2005 -blackberry 2005 -makeup 2004 -ribs 2004 -supervision 2004 -pin 2004 -historian 2003 -stern 2003 -provoked 2003 -appointments 2003 -darling 2002 -rental 2002 -unsuccessful 2001 -marina 2000 -components 2000 -clips 2000 -calf 1999 -arguably 1999 -suppliers 1998 -barton 1998 -advocacy 1998 -delaware 1997 -wow 1997 -offense 1996 -swelling 1996 -brink 1996 -whitehall 1995 -cub 1995 -venues 1994 -dug 1994 -wi-fi 1994 -onlookers 1993 -freely 1993 -screams 1992 -1945 1992 -laughter 1992 -genuinely 1992 -applause 1992 -conflicts 1992 -manages 1991 -thoroughly 1990 -charts 1990 -baroness 1990 -broadway 1990 -hated 1989 -intends 1989 -fossil 1989 -refusal 1989 -leo 1988 -podium 1987 -encourages 1986 -pearl 1986 -gorgeous 1986 -scout 1986 -ditch 1986 -joyce 1985 -ellie 1984 -convenience 1984 -descended 1984 -seeds 1983 -fictional 1983 -banker 1983 -gilbert 1983 -aggression 1982 -pacquiao 1982 -smoked 1981 -bubble 1981 -turf 1981 -accent 1981 -blade 1980 -paradise 1979 -dragon 1978 -relate 1978 -lanes 1977 -nearest 1977 -sunset 1976 -lindsey 1976 -88 1976 -â 1976 -fiance 1975 -sail 1974 -existed 1974 -payne 1974 -opt 1973 -stint 1973 -sainsbury 1973 -habitat 1972 -submarine 1972 -shootout 1972 -worthy 1972 -references 1972 -decides 1971 -hussain 1970 -360 1969 -repairs 1969 -echoed 1968 -animated 1968 -underage 1968 -gibbs 1967 -invitation 1967 -cracked 1966 -altitude 1966 -clearing 1966 -j 1966 -asthma 1966 -savage 1966 -pains 1966 -provider 1965 -buzz 1965 -spike 1965 -assessed 1965 -steep 1964 -jade 1964 -intentions 1964 -reunion 1964 -stretched 1963 -gemma 1963 -lebanese 1963 -160 1962 -lallana 1961 -naming 1960 -adverts 1960 -magical 1960 -ivanovic 1959 -sprawling 1959 -briton 1959 -salaries 1958 -seven-year-old 1958 -memoir 1958 -accomplished 1958 -pouring 1957 -jealous 1956 -seaside 1956 -plaza 1955 -experiments 1955 -prosthetic 1955 -counting 1955 -honeymoon 1954 -monk 1954 -hardy 1954 -mahmoud 1954 -prosecute 1954 -hottest 1954 -equaliser 1954 -sunglasses 1953 -clinics 1953 -hamstring 1953 -miners 1953 -dynamic 1953 -junk 1951 -cheek 1951 -accommodate 1951 -unwanted 1951 -bust 1950 -= 1950 -reef 1950 -depend 1950 -surgical 1950 -mobility 1950 -dependent 1949 -publisher 1949 -leaks 1949 -1971 1949 -spying 1949 -butt 1949 -scope 1948 -cooked 1948 -tribune 1948 -commerce 1948 -registration 1948 -2-2 1948 -maternity 1947 -pickup 1947 -pursued 1947 -86 1947 -par 1947 -hoffman 1947 -flesh 1946 -disputes 1946 -matthews 1946 -1966 1945 -ballet 1945 -bikini 1945 -liu 1945 -margin 1944 -36-year-old 1944 -nazis 1944 -fundraiser 1944 -daisy 1944 -downton 1944 -functions 1944 -polo 1943 -wallet 1943 -monitors 1943 -mates 1943 -respiratory 1942 -martial 1942 -skeleton 1942 -lin 1942 -tricky 1941 -leisure 1941 -hilarious 1940 -signings 1939 -endless 1939 -nike 1939 -booth 1938 -sinking 1938 -erin 1938 -manhunt 1938 -misleading 1937 -tracey 1937 -linking 1937 -criteria 1936 -versus 1936 -monetary 1936 -luther 1936 -imagination 1935 -halted 1935 -boundaries 1935 -tournaments 1935 -botched 1934 -articles 1934 -sculpture 1933 -humor 1933 -narrative 1933 -a. 1932 -tents 1932 -accuses 1932 -winfrey 1931 -tolerance 1931 -preserved 1931 -gb 1931 -dip 1931 -sworn 1930 -descent 1930 -expertise 1930 -spectrum 1930 -footsteps 1930 -high-speed 1930 -supervisor 1929 -6-1 1929 -xinhua 1928 -vets 1927 -wondered 1927 -selfies 1926 -dominic 1925 -outgoing 1925 -prostate 1925 -hardware 1925 -regain 1925 -coronation 1924 -satisfaction 1924 -pools 1924 -monroe 1923 -capsule 1923 -unborn 1923 -fernandez 1923 -co 1923 -remarkably 1922 -bowel 1921 -porto 1921 -gadget 1921 -ai 1920 -oak 1920 -clerk 1920 -clifford 1920 -shelters 1919 -proudly 1918 -toilets 1918 -portraits 1918 -teddy 1917 -scot 1917 -tina 1917 -yelling 1917 -instances 1916 -lowe 1916 -turmoil 1916 -carson 1916 -whip 1916 -vodka 1914 -bianca 1914 -presentation 1914 -belfast 1914 -confined 1914 -koeman 1913 -tricks 1913 -relaxing 1913 -becky 1913 -agreeing 1912 -athletics 1912 -enhanced 1911 -plead 1911 -enduring 1911 -ajax 1911 -judy 1910 -begged 1910 -catwalk 1910 -smashing 1909 -quinn 1909 -erdogan 1908 -pairs 1908 -shipped 1908 -dealers 1908 -traces 1907 -charitable 1907 -lodged 1907 -accessories 1907 -seizure 1906 -elevator 1905 -tore 1904 -proves 1904 -ruby 1904 -separatists 1903 -dancers 1903 -kay 1902 -loses 1902 -curry 1902 -disappear 1902 -define 1902 -110 1902 -voiced 1901 -timeline 1901 -biography 1901 -warmer 1901 -passports 1900 -housed 1900 -yemeni 1900 -tomb 1900 -straw 1900 -respondents 1900 -frightening 1900 -dairy 1898 -scots 1898 -whites 1898 -ethical 1898 -2nd 1898 -myers 1898 -decisive 1897 -teamed 1897 -hormone 1897 -heal 1897 -texting 1896 -trap 1896 -shine 1896 -heating 1896 -premiership 1896 -rev. 1896 -pulls 1895 -magnetic 1895 -horn 1894 -leaking 1894 -battlefield 1894 -utility 1894 -protester 1894 -romanian 1893 -penis 1893 -meaningful 1893 -situated 1892 -dreamed 1892 -blows 1892 -experimental 1891 -insult 1891 -flowing 1890 -precise 1890 -one-day 1890 -homosexuality 1890 -backwards 1890 -talents 1890 -ana 1889 -mercury 1888 -****** 1888 -protocol 1887 -wisdom 1886 -proceed 1886 -adequate 1886 -meantime 1885 -patience 1885 -priced 1885 -remy 1885 -87 1885 -chad 1884 -blowing 1884 -administrators 1884 -florence 1884 -holidaymakers 1883 -exploitation 1883 -schoolboy 1883 -shaun 1883 -cousins 1882 -bipartisan 1882 -shelling 1882 -rivera 1881 -morocco 1881 -pensioners 1880 -succeeded 1880 -courtesy 1880 -kathleen 1879 -daring 1879 -memphis 1878 -well-being 1878 -bulk 1878 -solidarity 1877 -surroundings 1876 -diamonds 1876 -threatens 1875 -focuses 1875 -randy 1875 -self-defense 1875 -misery 1874 -sweat 1874 -indigenous 1874 -cease-fire 1874 -magnitude 1873 -appetite 1873 -charlton 1873 -hunters 1872 -niece 1872 -obsession 1872 -lawsuits 1872 -high-end 1872 -israelis 1872 -historically 1872 -intact 1871 -notable 1871 -physics 1870 -cpr 1870 -minors 1869 -reserves 1869 -backdrop 1868 -tamerlan 1868 -reopened 1867 -mod 1866 -casting 1866 -prolific 1865 -pond 1864 -capturing 1864 -suitcase 1864 -coin 1864 -till 1863 -mauricio 1863 -spells 1863 -aurora 1862 -du 1862 -traced 1861 -award-winning 1861 -south-east 1861 -40-year-old 1861 -poised 1861 -lisbon 1861 -balanced 1860 -disagree 1860 -detailing 1860 -h 1860 -wounding 1860 -sharply 1859 -delegates 1859 -goldman 1858 -sanford 1858 -waved 1858 -infectious 1858 -entertaining 1858 -humour 1857 -calculated 1857 -austrian 1857 -internationally 1856 -trophies 1856 -mosul 1856 -reilly 1856 -sprint 1856 -mistress 1856 -console 1856 -rubio 1855 -womb 1855 -magistrate 1855 -accountability 1855 -interrogation 1855 -whitney 1855 -swat 1853 -trooper 1853 -tally 1853 -bend 1853 -inadequate 1852 -rat 1852 -honduras 1851 -tara 1851 -leslie 1851 -convincing 1851 -factories 1851 -autobiography 1850 -horrifying 1850 -jewelry 1850 -slavery 1849 -vibrant 1849 -hobby 1849 -doug 1849 -objective 1849 -predator 1849 -nest 1848 -leonard 1848 -ladder 1848 -counted 1848 -bathrooms 1847 -bowling 1847 -gloucester 1847 -barkley 1847 -bikes 1847 -globally 1846 -eagles 1846 -damning 1846 -laurent 1845 -burnt 1845 -signatures 1845 -0 1844 -snatched 1844 -slaughter 1843 -wrestling 1843 -uss 1843 -swap 1843 -cherry 1842 -leonardo 1842 -stationed 1842 -flame 1841 -attendant 1841 -campaigner 1841 -imperial 1841 -homeowners 1841 -afterward 1840 -blessing 1840 -chic 1840 -ritual 1839 -carriage 1839 -shoots 1838 -newest 1838 -arias 1838 -disposal 1838 -rocked 1836 -initiatives 1835 -lean 1835 -westwood 1835 -elliott 1835 -diesel 1835 -cartels 1834 -alter 1834 -islamabad 1834 -regulatory 1833 -cambodia 1833 -swimmer 1833 -sword 1832 -garbage 1832 -cannon 1832 -offended 1831 -representation 1831 -acceptance 1831 -first-team 1830 -cyclists 1830 -licensed 1830 -crush 1829 -radamel 1829 -bony 1829 -camping 1828 -extremism 1828 -compassion 1828 -scratch 1828 -intake 1827 -cans 1827 -doyle 1827 -surfing 1826 -tunnels 1826 -falsely 1826 -forming 1826 -confirms 1826 -injections 1825 -mackay 1825 -outlook 1825 -artistic 1825 -arson 1825 -shallow 1824 -nails 1823 -baldwin 1823 -golfer 1823 -safari 1823 -mentor 1823 -grabs 1823 -freak 1822 -1965 1822 -cries 1822 -marcos 1822 -6ft 1821 -barracks 1821 -fog 1821 -imposing 1821 -restraining 1820 -9,000 1820 -adorable 1820 -bee 1820 -x-ray 1820 -challenger 1820 -captures 1819 -crawford 1819 -**** 1819 -rounded 1818 -prostitute 1818 -containers 1817 -checkpoint 1817 -three-day 1817 -touches 1816 -miserable 1816 -underlying 1815 -negligence 1815 -grabbing 1815 -evaluation 1815 -brawl 1815 -sexuality 1815 -pleas 1814 -contempt 1814 -cumbria 1814 -klein 1814 -burgess 1814 -recommends 1813 -kanye 1812 -seizures 1812 -colors 1812 -col. 1812 -enclosure 1812 -maths 1812 -clare 1812 -breastfeeding 1812 -4.5 1812 -indoor 1811 -sickness 1811 -hike 1811 -invite 1811 -holders 1811 -ew.com 1809 -cheered 1808 -handset 1808 -scream 1807 -joking 1807 -5ft 1807 -medications 1807 -loyalty 1807 -jorge 1807 -1.4 1807 -dutchman 1807 -districts 1806 -constituency 1806 -upheld 1806 -forgive 1805 -napoli 1805 -oval 1805 -vince 1804 -vermont 1804 -headaches 1804 -compelling 1804 -gerard 1804 -laughs 1803 -iain 1803 -balloons 1802 -mistaken 1802 -1962 1802 -scars 1802 -investing 1802 -nets 1801 -begging 1801 -warfare 1800 -sponsor 1800 -adapted 1800 -prints 1800 -farrell 1800 -prophet 1799 -manor 1798 -jamaica 1798 -recruiting 1797 -upton 1797 -custom 1796 -hurting 1796 -jumps 1796 -angels 1796 -cheers 1796 -meningitis 1796 -columnist 1796 -2million 1793 -outlined 1792 -stanford 1792 -dedication 1792 -1960 1791 -airspace 1791 -des 1790 -w 1790 -dewani 1790 -scholes 1790 -invisible 1790 -delegation 1790 -terrace 1790 -les 1789 -santorum 1789 -intentionally 1789 -vancouver 1788 -corners 1788 -snacks 1788 -weddings 1788 -kercher 1787 -mccann 1787 -township 1786 -shifts 1785 -azuz 1785 -analysed 1785 -qualities 1785 -zoe 1785 -genius 1784 -muller 1784 -offending 1784 -sentiment 1784 -tactical 1783 -brett 1783 -ibrahimovic 1783 -parachute 1782 -corp. 1782 -cheering 1782 -terrain 1781 -carved 1781 -heightened 1781 -consulting 1781 -condemn 1780 -thankful 1779 -segment 1779 -ignoring 1779 -ipswich 1779 -mh17 1779 -rainfall 1778 -slogan 1778 -altered 1778 -assisting 1778 -groom 1778 -daylight 1778 -snakes 1778 -lowered 1777 -eight-year-old 1777 -smallest 1776 -pale 1776 -punish 1776 -seize 1776 -voluntarily 1775 -bonds 1775 -progressive 1774 -psychology 1774 -ecclestone 1773 -occasional 1773 -agricultural 1773 -saunders 1773 -crosses 1772 -unrelated 1772 -absent 1771 -piano 1771 -holloway 1771 -cables 1771 -colleges 1771 -rains 1771 -resorts 1770 -lump 1770 -founding 1769 -toni 1768 -35,000 1768 -shout 1768 -verbal 1768 -branson 1768 -tyson 1768 -koreans 1768 -il 1767 -observer 1767 -cows 1767 -enhance 1766 -picturesque 1766 -diverted 1766 -vacuum 1766 -immunity 1766 -unmanned 1765 -arrangement 1765 -regulators 1765 -pleasant 1765 -chin 1765 -enforce 1764 -50th 1764 -batman 1764 -graduation 1763 -hoax 1762 -shah 1762 -crater 1762 -performer 1762 -scandals 1761 -squeeze 1761 -ba 1761 -discount 1761 -freeman 1760 -mugabe 1760 -listing 1760 -lyon 1760 -father-of-two 1759 -soup 1759 -detection 1759 -1930s 1759 -provincial 1759 -airlifted 1758 -colony 1758 -jfk 1758 -judgement 1758 -watts 1758 -showdown 1758 -gentleman 1757 -revenues 1757 -gadgets 1757 -jazz 1757 -canterbury 1756 -comparing 1756 -marrying 1755 -swollen 1755 -cnn.com 1754 -hail 1754 -snack 1753 -judiciary 1753 -overhead 1751 -printing 1751 -samaritans 1751 -defeats 1751 -jefferson 1750 -m. 1750 -sharia 1750 -fraternity 1750 -fowler 1748 -verge 1748 -aspiring 1748 -twisted 1747 -kick-off 1747 -default 1746 -behave 1745 -newport 1745 -orientation 1745 -alarming 1745 -panama 1745 -technically 1745 -shields 1744 -92 1743 -disclosure 1743 -columbus 1742 -swiftly 1742 -seated 1742 -twenty 1742 -rogue 1742 -starr 1742 -novels 1741 -giroud 1741 -strikers 1741 -co. 1741 -ricky 1741 -1944 1740 -rovers 1740 -brace 1740 -37-year-old 1739 -metre 1739 -disasters 1739 -hack 1739 -saleh 1739 -theaters 1739 -skype 1738 -phoned 1737 -unfolded 1737 -undoubtedly 1737 -nominations 1736 -pressures 1735 -sponsored 1735 -graduates 1735 -exports 1735 -mature 1734 -fishermen 1734 -lured 1734 -staring 1733 -handcuffed 1733 -threshold 1733 -4-1 1733 -salvador 1733 -homemade 1732 -applicants 1732 -abraham 1732 -upside 1732 -cyrus 1732 -workout 1732 -capt. 1732 -ageing 1731 -clive 1731 -unsure 1731 -duell 1731 -partial 1730 -sinclair 1730 -ruins 1730 -educate 1730 -severed 1730 -salford 1730 -gea 1730 -savannah 1729 -dana 1729 -weakness 1728 -milestone 1728 -sidelines 1728 -endure 1728 -tackled 1727 -achieving 1726 -modified 1726 -ups 1726 -predators 1726 -unearthed 1726 -didier 1726 -blames 1725 -rap 1725 -olive 1724 -audit 1724 -derbyshire 1723 -rode 1723 -horrendous 1723 -ethan 1723 -urges 1722 -ruin 1722 -postponed 1722 -pretending 1722 -alberto 1721 -nairobi 1721 -finale 1721 -ms. 1721 -chester 1720 -vows 1720 -42-year-old 1719 -demonstrates 1719 -lifts 1719 -abbas 1719 -satellites 1719 -danielle 1719 -encounters 1719 -jihadi 1718 -interaction 1718 -concealed 1718 -meredith 1718 -drain 1717 -dzhokhar 1717 -profound 1716 -disturbed 1716 -real-life 1716 -bids 1716 -wilkinson 1716 -filmmaker 1716 -projected 1715 -carr 1715 -ex-boyfriend 1715 -slaying 1715 -minimal 1714 -addict 1714 -noah 1714 -remembrance 1713 -arabian 1713 -obligation 1713 -commentator 1713 -knot 1712 -liberties 1712 -coloured 1712 -retaliation 1712 -asset 1712 -teaches 1711 -fraction 1711 -slice 1710 -lending 1710 -kris 1710 -workforce 1710 -leigh 1709 -judging 1709 -rows 1709 -serbian 1709 -showcase 1709 -farewell 1708 -blank 1708 -agreements 1707 -enabled 1707 -600,000 1707 -chemistry 1707 -barrett 1707 -poyet 1707 -predominantly 1707 -freddie 1706 -beck 1706 -dixon 1706 -hansen 1705 -unhealthy 1705 -trusts 1705 -cleric 1704 -queue 1704 -barker 1704 -sunlight 1704 -slot 1704 -settings 1704 -grounded 1703 -dire 1703 -shells 1703 -supermodel 1702 -commitments 1702 -gomez 1702 -peters 1702 -albion 1701 -purely 1701 -betty 1700 -adventures 1700 -alternatives 1699 -toughest 1698 -marilyn 1698 -ought 1698 -nine-year-old 1697 -disgusted 1697 -packaging 1697 -fallon 1697 -kirk 1696 -dominican 1696 -pinned 1696 -partisan 1696 -clooney 1696 -commenting 1695 -overlooking 1695 -dioxide 1695 -sightings 1694 -sollecito 1694 -joey 1694 -pearce 1693 -watkins 1693 -lukaku 1693 -nepal 1693 -topics 1693 -connor 1693 -kelley 1693 -forthcoming 1693 -noon 1692 -outlet 1692 -rapist 1692 -getaway 1692 -bailed 1691 -transmission 1691 -connecting 1691 -restoration 1690 -evacuate 1690 -platforms 1689 -southwark 1689 -liberation 1689 -pneumonia 1688 -kremlin 1688 -secretary-general 1687 -volatile 1687 -parsons 1687 -administered 1687 -explorer 1686 -undocumented 1686 -extending 1685 -carey 1685 -chaotic 1685 -grenade 1684 -occupation 1684 -barrel 1684 -indianapolis 1684 -oscars 1684 -lacked 1683 -chatting 1683 -cheney 1682 -observation 1682 -servants 1682 -welcoming 1681 -veto 1681 -counterpart 1681 -priests 1681 -alleging 1681 -schalke 1681 -provocative 1680 -rand 1680 -conclusions 1680 -counseling 1679 -1.8 1679 -ransom 1679 -7-6 1679 -nina 1678 -bruno 1678 -shakespeare 1678 -tuition 1678 -silicon 1678 -sweep 1678 -gavin 1678 -hygiene 1677 -miley 1677 -cooperate 1677 -dare 1676 -cardinal 1676 -brook 1676 -outcry 1676 -trunk 1676 -angelina 1676 -wellington 1676 -lesser 1675 -recruits 1674 -damien 1674 -carer 1673 -closet 1673 -sensible 1672 -regulator 1672 -touring 1672 -cooling 1672 -sovereignty 1672 -dragging 1672 -stretches 1671 -astronomers 1671 -faithful 1671 -woodland 1671 -switching 1670 -chanting 1670 -controller 1670 -honoured 1670 -pitt 1669 -lava 1669 -valid 1669 -dual 1668 -rabbit 1668 -rushing 1667 -marching 1667 -stimulus 1667 -reader 1666 -collector 1665 -torch 1665 -psychiatrist 1665 -succession 1665 -migrant 1665 -9pm 1665 -** 1665 -phelps 1665 -whatsoever 1665 -bronx 1665 -30s 1664 -midst 1664 -oxfordshire 1664 -prizes 1664 -falklands 1664 -stepfather 1664 -reconstruction 1663 -continental 1663 -c. 1663 -dolphin 1662 -supplier 1662 -issuing 1662 -rbs 1662 -inequality 1661 -sanders 1661 -eva 1661 -answering 1661 -gaga 1660 -mogul 1660 -rude 1660 -!! 1660 -precisely 1659 -lacking 1659 -partying 1659 -locker 1659 -homs 1659 -medieval 1659 -fatigue 1659 -plagued 1659 -cakes 1658 -recommendation 1658 -jagger 1658 -rhode 1658 -quote 1657 -mocked 1657 -1.7 1657 -drafted 1656 -audi 1656 -fuller 1656 -saves 1656 -potato 1655 -albums 1655 -budgets 1655 -medicines 1655 -refund 1654 -export 1654 -handcuffs 1654 -cautious 1654 -warrior 1654 -unusually 1653 -troop 1653 -sore 1652 -stretching 1652 -strapped 1652 -takeover 1652 -depicted 1652 -recycling 1651 -irresponsible 1651 -fugitive 1651 -engulfed 1651 -shores 1651 -bulgaria 1651 -compromised 1650 -impacts 1650 -gestures 1649 -johannesburg 1649 -reversed 1649 -newsnight 1649 -retained 1648 -townsend 1648 -skinny 1648 -playboy 1648 -bounce 1648 -eliminated 1648 -lifelong 1648 -atop 1647 -cheeky 1647 -gill 1647 -syrians 1647 -sponsorship 1646 -motorbike 1646 -89 1646 -olivier 1645 -intellectual 1645 -combine 1645 -uber 1645 -raul 1645 -massage 1644 -motorist 1644 -piled 1644 -protested 1644 -efficiency 1644 -allan 1643 -backgrounds 1642 -enthusiasts 1642 -sherwood 1640 -traditions 1640 -cancers 1640 -intoxicated 1639 -hinted 1639 -24-hour 1639 -conclude 1639 -poorest 1638 -deter 1638 -eyebrows 1638 -plots 1638 -saga 1638 -unsafe 1637 -collar 1637 -100m 1636 -clause 1636 -learnt 1635 -annie 1635 -grades 1635 -suspicions 1634 -slapped 1634 -midwest 1634 -heir 1634 -93 1634 -mechanism 1634 -messaging 1634 -candles 1633 -envoy 1633 -pablo 1633 -recorder 1633 -lads 1632 -baptist 1632 -helmand 1632 -kompany 1632 -edited 1632 -quicker 1632 -seth 1632 -wyoming 1631 -resumed 1631 -six-month 1631 -snaps 1630 -likelihood 1630 -poulter 1630 -stats 1630 -clue 1630 -possessions 1630 -molly 1629 -venezuelan 1629 -nonsense 1629 -harriet 1629 -declining 1629 -harrowing 1628 -policemen 1628 -strokes 1628 -splash 1628 -inflicted 1628 -jumper 1628 -creativity 1627 -glorious 1627 -hmrc 1626 -monkeys 1626 -bruising 1626 -mrs. 1626 -applies 1626 -willingness 1625 -atomic 1623 -santiago 1623 -rumoured 1622 -darwin 1622 -credentials 1621 -sensor 1621 -observe 1621 -embroiled 1621 -mel 1620 -kidnap 1620 -dispatcher 1620 -rig 1619 -ceremonies 1619 -appalled 1619 -immense 1619 -intimidation 1618 -confirming 1618 -prolonged 1618 -teresa 1618 -servicemen 1617 -hungary 1616 -worlds 1616 -forgot 1616 -offenses 1616 -faulty 1615 -symbolic 1615 -origins 1615 -sequence 1614 -cincinnati 1614 -sectarian 1613 -kilometres 1613 -intercepted 1613 -responders 1613 -salute 1611 -boring 1611 -valerie 1610 -mh370 1610 -fabio 1610 -post-mortem 1609 -suspend 1609 -freshman 1609 -inaugural 1608 -entrepreneurs 1608 -hooked 1607 -bercow 1607 -escalated 1607 -socks 1606 -interact 1606 -chorley 1605 -transactions 1605 -insulting 1605 -quarter-final 1604 -disbelief 1604 -con 1604 -pork 1604 -corrections 1604 -smiled 1603 -0-0 1602 -fabulous 1602 -african-americans 1602 -remark 1602 -malicious 1602 -dvd 1602 -twelve 1601 -forehead 1601 -101 1601 -43-year-old 1600 -wozniacki 1600 -strauss-kahn 1600 -backpack 1600 -streak 1599 -exploited 1599 -earns 1599 -distracted 1599 -burke 1599 -copper 1599 -peacefully 1598 -tenure 1598 -dynasty 1598 -disgrace 1597 -guides 1597 -influx 1597 -hyde 1597 -northeastern 1597 -abdomen 1596 -bae 1596 -cuomo 1596 -mock 1596 -seekers 1596 -meth 1596 -upgrade 1595 -kasem 1595 -scrapped 1595 -escaping 1595 -albeit 1595 -attractions 1595 -rallies 1595 -interrupted 1595 -knockout 1594 -cleaned 1594 -brutality 1593 -rico 1593 -doping 1593 -belly 1591 -ryanair 1591 -inspirational 1591 -dominate 1590 -advises 1590 -ambulances 1590 -abilities 1589 -bother 1589 -nonetheless 1589 -gifted 1589 -charming 1589 -honours 1588 -leveson 1587 -v. 1587 -fixing 1587 -450 1587 -qualification 1587 -caller 1586 -contention 1586 -evident 1586 -hammers 1585 -buenos 1585 -panda 1585 -badge 1585 -archive 1584 -unpopular 1584 -accepts 1584 -probable 1584 -expectation 1583 -math 1583 -accomplice 1583 -uranium 1582 -births 1582 -competed 1582 -prone 1581 -ensured 1581 -thomson 1581 -tallest 1581 -gore 1580 -landlord 1580 -paralympic 1580 -sacred 1579 -41-year-old 1579 -mortality 1579 -nashville 1579 -childcare 1578 -800,000 1578 -coordination 1578 -deadliest 1578 -1.1 1578 -inauguration 1578 -patron 1577 -archives 1577 -cps 1577 -mother-of-three 1576 -finishes 1576 -caravan 1576 -fixtures 1576 -miguel 1576 -disrupt 1576 -fellaini 1576 -issa 1576 -transmitted 1575 -greenhouse 1574 -advertised 1574 -ira 1574 -mafia 1574 -ntsb 1573 -intruder 1573 -buck 1573 -verify 1573 -ranger 1572 -tales 1572 -chanel 1572 -3-d 1571 -eruption 1571 -deploy 1571 -rainbow 1571 -loads 1571 -titanic 1571 -steering 1571 -venice 1570 -shareholders 1570 -aggressively 1570 -prospective 1570 -naomi 1569 -plunge 1568 -portions 1568 -enthusiastic 1568 -alcoholic 1567 -memorabilia 1567 -kissed 1567 -coordinator 1567 -mitch 1567 -dispatched 1567 -blend 1566 -deteriorated 1566 -fiery 1566 -rant 1565 -frequency 1565 -upsetting 1565 -incoming 1565 -breaching 1564 -cultures 1563 -isaac 1563 -motors 1563 -axe 1562 -sickening 1562 -glow 1562 -saddam 1562 -paterson 1562 -waking 1562 -accuracy 1561 -rash 1561 -infants 1561 -alastair 1561 -lydia 1561 -provisions 1560 -mummy 1560 -charm 1560 -vary 1559 -forests 1559 -authentic 1559 -petersburg 1559 -coulson 1559 -petty 1559 -oldham 1559 -ing 1558 -deposit 1558 -tapes 1557 -concerts 1557 -2022 1557 -guatemala 1557 -sometime 1556 -jockey 1556 -curfew 1555 -fry 1555 -laundering 1555 -ofsted 1554 -heroic 1554 -spears 1554 -aires 1554 -popped 1553 -afp 1553 -territorial 1553 -stir 1552 -reconciliation 1552 -play-off 1551 -gus 1551 -commanding 1551 -marsh 1550 -slaves 1550 -beatrice 1550 -stalking 1550 -bias 1549 -anthem 1549 -awake 1549 -potatoes 1548 -flores 1548 -heavyweight 1548 -first-half 1548 -patterson 1547 -stumbled 1547 -install 1547 -attends 1547 -profiles 1547 -rotherham 1547 -hebdo 1547 -historians 1547 -iv 1546 -natasha 1546 -divisions 1545 -vest 1545 -tolerate 1545 -corporations 1545 -procession 1545 -yelled 1545 -turnout 1545 -clayton 1544 -necklace 1543 -benzema 1543 -bothered 1543 -nicky 1543 -brennan 1542 -cites 1542 -installation 1542 -south-west 1542 -patel 1542 -sasha 1541 -warrants 1541 -cheltenham 1541 -vanessa 1539 -penned 1539 -confiscated 1539 -iphones 1539 -consequence 1539 -arrange 1538 -emphasis 1538 -pew 1538 -bernabeu 1538 -fatalities 1538 -venus 1538 -diets 1536 -38-year-old 1536 -morales 1536 -stray 1536 -relied 1535 -reverend 1535 -indefinitely 1535 -defiant 1535 -screened 1534 -cambridgeshire 1534 -96 1534 -populated 1533 -borrowing 1533 -packs 1533 -enters 1533 -tenants 1533 -harold 1533 -earnest 1533 -s. 1533 -montreal 1532 -125 1532 -viable 1532 -yale 1531 -extradited 1531 -museums 1530 -panetta 1530 -compatriot 1529 -predictions 1529 -gala 1529 -and/or 1529 -dam 1529 -bedside 1529 -downstairs 1529 -corn 1527 -wired 1527 -gayle 1527 -amputated 1527 -beans 1526 -extinction 1526 -boosted 1525 -doses 1525 -groin 1525 -gina 1525 -wandering 1525 -strategies 1525 -solved 1525 -violently 1525 -oath 1525 -dismiss 1525 -liability 1524 -repeal 1524 -nod 1524 -format 1524 -controllers 1524 -consists 1524 -scales 1523 -possibilities 1523 -cunningham 1523 -undisclosed 1522 -pamela 1522 -detonated 1522 -cents 1522 -cowboys 1521 -postal 1521 -clippers 1521 -marble 1521 -assembled 1520 -sidewalk 1520 -accompanying 1520 -terribly 1520 -sovereign 1519 -coats 1519 -porsche 1519 -blockbuster 1519 -kindness 1519 -distinct 1518 -deepest 1518 -umbrella 1518 -lawmaker 1518 -rickie 1517 -williamson 1517 -london-based 1517 -fortunes 1517 -insurgency 1517 -imported 1517 -composed 1516 -lure 1516 -tearful 1516 -taped 1516 -announces 1516 -carole 1515 -ncaa 1515 -jackets 1514 -relay 1514 -egyptians 1514 -slumped 1514 -amir 1514 -buckinghamshire 1514 -hers 1513 -junction 1513 -exposing 1513 -billboard 1512 -stressful 1512 -bosnia 1511 -warriors 1511 -moody 1511 -baffled 1511 -relying 1511 -prop 1511 -poles 1510 -prejudice 1510 -finland 1510 -chefs 1510 -cares 1510 -vile 1510 -departed 1510 -dangerously 1510 -39-year-old 1509 -pier 1509 -mtv 1509 -davidson 1509 -likened 1508 -wept 1508 -performs 1508 -occurring 1508 -shifted 1508 -fisherman 1508 -volcanic 1508 -presumably 1507 -remainder 1507 -nationally 1507 -gossip 1507 -lengths 1506 -troubling 1506 -3,500 1506 -consensus 1506 -ronnie 1506 -quarantine 1506 -plug 1505 -competitor 1505 -98 1505 -atp 1505 -unharmed 1504 -stacey 1503 -handbag 1503 -fueled 1503 -bash 1503 -wed 1502 -standoff 1502 -billed 1502 -obstacles 1501 -latinos 1500 -wary 1500 -injected 1500 -casualty 1500 -nou 1500 -recipes 1500 -jonny 1500 -additionally 1500 -tasked 1500 -aldi 1499 -eats 1499 -quotes 1499 -therapist 1499 -investor 1499 -contestant 1498 -atkinson 1498 -demolished 1498 -panicked 1498 -rewarded 1498 -risked 1498 -addicted 1498 -rewards 1498 -extract 1498 -tends 1497 -sexist 1497 -aging 1497 -o'donnell 1496 -slowed 1496 -skilled 1496 -legislature 1495 -hbo 1495 -veterinary 1495 -sevilla 1495 -middle-class 1495 -interpretation 1495 -courtois 1495 -owe 1494 -submerged 1494 -seasonal 1494 -considerably 1493 -shirley 1493 -rays 1493 -perpetrators 1493 -arrivals 1492 -right-wing 1492 -tactic 1492 -admissions 1492 -antarctica 1491 -adjust 1491 -presenters 1491 -campaigned 1491 -decrease 1491 -breivik 1490 -basket 1490 -gdp 1490 -11,000 1489 -moreno 1489 -willis 1489 -beam 1489 -flock 1489 -melanie 1488 -forged 1488 -resource 1488 -originated 1488 -antics 1487 -majesty 1487 -inevitably 1486 -reflecting 1486 -optimism 1486 -affection 1485 -copenhagen 1484 -rojo 1484 -deila 1483 -accounting 1483 -fridge 1483 -slopes 1482 -predicts 1482 -transfers 1482 -m&s 1482 -alley 1481 -diplomacy 1481 -consume 1481 -cognitive 1481 -1/2 1481 -sweets 1480 -traders 1480 -flawed 1480 -hsbc 1479 -hiking 1479 -cautioned 1479 -miniature 1479 -contender 1478 -grove 1478 -reassure 1478 -michel 1478 -91 1477 -jackpot 1477 -emmanuel 1477 -dash 1476 -philippe 1476 -joanne 1476 -corridor 1476 -retrieve 1475 -jaguar 1474 -oxlade-chamberlain 1474 -weakened 1473 -africans 1473 -cracks 1473 -reddit 1473 -hugs 1473 -pelosi 1473 -sprayed 1473 -disrupted 1473 -tastes 1472 -dover 1472 -doomed 1472 -drawings 1472 -reactor 1472 -cardinals 1472 -prom 1472 -tanzania 1471 -roland 1471 -leaf 1471 -johns 1471 -dump 1471 -shade 1470 -reeves 1470 -barrels 1469 -congratulated 1469 -labeled 1469 -sibling 1469 -fukushima 1469 -carrie 1469 -hint 1469 -ridge 1468 -northwestern 1468 -echo 1468 -ramirez 1468 -humiliating 1468 -imf 1467 -despair 1466 -supplying 1465 -recipient 1465 -ancestors 1465 -pad 1465 -d. 1465 -ethiopia 1465 -tumor 1464 -ipads 1464 -infirmary 1464 -topless 1464 -similarities 1463 -counterterrorism 1463 -ace 1463 -frost 1463 -clutching 1463 -rallied 1463 -reiterated 1463 -hayley 1462 -bankers 1462 -ltd 1462 -zhang 1462 -sunk 1462 -gatwick 1462 -scholarship 1462 -ideology 1461 -discharge 1461 -prof 1461 -grenades 1460 -canberra 1460 -grants 1460 -protestors 1460 -johnston 1460 -squadron 1460 -long-running 1460 -gig 1459 -vaccines 1459 -piers 1459 -absurd 1459 -mann 1459 -biology 1458 -volley 1458 -robber 1458 -surveys 1458 -inability 1458 -iraqis 1457 -illicit 1457 -breaches 1457 -environments 1457 -eto'o 1457 -unwell 1456 -waits 1456 -insufficient 1456 -erected 1456 -advising 1455 -southeastern 1455 -supplements 1455 -accusation 1455 -setback 1455 -vegetable 1455 -caretaker 1455 -pedestrians 1455 -permits 1455 -vanity 1454 -transitional 1454 -cracking 1453 -graeme 1453 -bunker 1452 -bernie 1452 -allergic 1452 -delivers 1452 -farah 1452 -kathy 1451 -alfred 1451 -apollo 1451 -programming 1451 -helpless 1450 -mill 1450 -two-day 1449 -violate 1449 -hotspur 1449 -subtle 1448 -visibly 1448 -creations 1448 -robbers 1448 -itunes 1448 -wiggins 1448 -exercising 1448 -avalanche 1447 -deaf 1447 -toe 1447 -breathtaking 1447 -headache 1446 -cindy 1446 -long-time 1446 -attracts 1445 -neutral 1445 -interference 1445 -gallons 1445 -last-minute 1445 -marion 1445 -distraction 1445 -measles 1444 -8pm 1444 -litter 1444 -spite 1444 -maiden 1444 -appreciation 1444 -rwanda 1444 -publicist 1444 -ofcom 1443 -harding 1443 -beings 1443 -lashed 1443 -baggage 1443 -takeaway 1442 -implant 1442 -13,000 1442 -congregation 1442 -patrols 1442 -committees 1442 -shining 1442 -tin 1442 -watford 1442 -pioneering 1441 -framework 1441 -waitrose 1441 -contenders 1441 -reigning 1440 -monis 1440 -stocks 1440 -bolster 1439 -fried 1439 -irvine 1439 -relies 1438 -high-tech 1438 -span 1438 -thriving 1438 -fares 1437 -vienna 1437 -woodward 1437 -imaging 1437 -obligations 1437 -webster 1437 -hamburg 1437 -poole 1437 -colorful 1437 -stitches 1437 -payout 1436 -ahmad 1436 -hamid 1436 -theo 1436 -raging 1436 -entries 1436 -aeg 1435 -oceans 1435 -bishops 1435 -employ 1435 -repay 1435 -europeans 1435 -feud 1435 -misses 1435 -fraudulent 1434 -makeover 1434 -coastline 1433 -best-selling 1433 -toast 1433 -integrated 1433 -manual 1432 -ingredient 1432 -fluids 1432 -accessed 1432 -incentive 1431 -haunted 1431 -10million 1431 -greenwich 1431 -servant 1431 -***** 1431 -unveiling 1430 -stricken 1430 -boast 1430 -rev 1430 -mammals 1430 -calais 1430 -dee 1429 -mayfair 1429 -felix 1429 -giffords 1429 -gloria 1429 -painter 1429 -1953 1429 -robotic 1429 -sack 1429 -observations 1429 -lemon 1429 -voyage 1428 -milton 1428 -portfolio 1428 -adamant 1428 -displaying 1428 -suicidal 1428 -secretive 1428 -milner 1427 -lgbt 1427 -builder 1427 -pioneer 1426 -capped 1426 -thugs 1426 -technological 1426 -kindle 1426 -expelled 1425 -corey 1425 -grooming 1425 -eleven 1425 -pubs 1425 -craze 1425 -poignant 1425 -lizzie 1425 -wilderness 1425 -wearable 1423 -harassed 1423 -surreal 1423 -mack 1423 -hacker 1423 -uae 1422 -depicting 1422 -endorsed 1421 -halls 1421 -cheapest 1420 -michele 1420 -accordance 1419 -snp 1419 -spilled 1419 -lt 1419 -gays 1418 -hurts 1418 -referees 1418 -establishing 1418 -settling 1418 -navigate 1417 -1961 1417 -stokes 1417 -ceasefire 1417 -tornadoes 1417 -characteristics 1417 -mls 1417 -hazardous 1417 -nicholson 1417 -promotional 1416 -litre 1416 -imagery 1416 -mistakenly 1416 -den 1416 -surfaces 1415 -sudanese 1415 -alight 1415 -vacant 1415 -throws 1415 -quirky 1415 -pirate 1415 -clearance 1414 -stella 1414 -colbert 1414 -spectacle 1413 -stan 1413 -punches 1413 -rebuilding 1413 -carnage 1413 -spiders 1412 -burgers 1412 -nissan 1412 -80s 1412 -ipcc 1412 -shifting 1412 -ours 1411 -ginger 1411 -worship 1411 -gallagher 1411 -physicians 1410 -attendees 1410 -hybrid 1410 -landmarks 1409 -destructive 1409 -pep 1409 -greet 1408 -poisoned 1407 -islamists 1407 -diaz 1407 -iranians 1407 -blankets 1407 -roommate 1406 -cheat 1406 -tomlinson 1406 -vip 1406 -secrecy 1406 -45-year-old 1406 -comfortably 1406 -journeys 1405 -bruised 1405 -exploit 1405 -hefty 1405 -havana 1405 -precaution 1405 -bates 1405 -bells 1404 -civic 1404 -dentist 1404 -sped 1404 -obtaining 1403 -incomes 1403 -intersection 1403 -toes 1403 -antarctic 1403 -cooperating 1402 -moses 1402 -everest 1402 -thriller 1402 -incumbent 1402 -ascot 1402 -gerry 1401 -rivalry 1401 -pierre 1401 -shakes 1401 -cellphone 1401 -scarf 1401 -kroos 1400 -spouse 1400 -boutique 1400 -estonia 1400 -tire 1400 -realising 1400 -huffington 1400 -kurds 1399 -surrogate 1399 -courtney 1399 -drowning 1399 -leagues 1399 -dome 1399 -laundry 1399 -frantic 1399 -roses 1399 -toby 1398 -spat 1398 -harmed 1398 -karim 1397 -barbie 1397 -dundee 1396 -saatchi 1396 -urgently 1396 -40s 1395 -suppose 1395 -co-star 1395 -bites 1395 -mo 1395 -buddy 1395 -slogans 1395 -pretend 1395 -geographic 1394 -norton 1394 -bored 1394 -bees 1394 -banana 1394 -leopard 1393 -portable 1393 -hancock 1393 -forrest 1393 -dempsey 1393 -staging 1393 -hut 1392 -lace 1392 -tires 1391 -frontier 1391 -contacting 1391 -rightly 1390 -twickenham 1390 -husbands 1390 -practicing 1389 -tamara 1389 -hedge 1389 -highs 1388 -delicious 1388 -bypass 1388 -stafford 1388 -brakes 1388 -brittany 1388 -pedestrian 1387 -bins 1387 -failings 1387 -slipping 1387 -deprived 1387 -semifinal 1387 -steadily 1387 -medicaid 1386 -mammoth 1386 -eventual 1385 -cheated 1384 -staffers 1384 -radioactive 1384 -productive 1383 -assure 1383 -legion 1383 -lease 1383 -large-scale 1383 -downloaded 1383 -frontman 1382 -kg 1382 -relentless 1382 -reopen 1382 -abortions 1382 -provinces 1382 -wickets 1382 -suited 1382 -prevents 1382 -denis 1382 -stefan 1381 -yang 1381 -invention 1381 -atmospheric 1380 -appropriately 1380 -sloot 1380 -herd 1380 -warwickshire 1380 -evan 1380 -drum 1379 -cocktails 1379 -prosperity 1379 -militias 1379 -disciplined 1379 -summers 1379 -collectors 1379 -territories 1378 -maggie 1377 -semi-finals 1377 -corpse 1377 -barn 1377 -walton 1377 -build-up 1377 -dani 1376 -minus 1376 -accounted 1376 -conspiring 1376 -frontline 1376 -forwards 1375 -restraint 1375 -forbidden 1375 -spice 1375 -ports 1375 -enrolled 1375 -thirty 1375 -lad 1375 -casillas 1374 -mayer 1374 -paddy 1374 -clarence 1374 -limitations 1374 -exile 1373 -subsidies 1373 -expired 1373 -respective 1373 -atrocities 1373 -barnett 1373 -ironically 1372 -tubes 1372 -afghans 1372 -completion 1372 -restrict 1372 -100th 1372 -sociedad 1372 -adjourned 1371 -coveted 1371 -diners 1371 -dried 1371 -humiliated 1371 -lunchtime 1371 -remotely 1370 -paralysed 1370 -insiders 1370 -playstation 1370 -jam 1369 -municipal 1369 -feeds 1369 -zurich 1369 -spiral 1368 -paramedic 1368 -humane 1368 -paterno 1368 -unfairly 1368 -bogus 1368 -rhino 1367 -ireport.com 1367 -functioning 1367 -injustice 1367 -ecstasy 1367 -pulis 1367 -nash 1366 -1,300 1366 -endorsement 1366 -helm 1366 -commentators 1365 -calderon 1365 -outing 1365 -nra 1365 -concussion 1365 -botox 1365 -steak 1365 -merchandise 1364 -predicting 1364 -manifesto 1364 -terrier 1364 -ballots 1363 -caliber 1363 -batsman 1363 -hop 1363 -nottinghamshire 1362 -parental 1362 -yours 1362 -enabling 1362 -settlements 1361 -sensitivity 1361 -sakho 1361 -long-standing 1361 -life-saving 1360 -positioned 1360 -myth 1360 -buttons 1359 -1,600 1359 -auctioned 1359 -sunrise 1359 -calmly 1359 -intricate 1359 -cooler 1358 -schmidt 1358 -rhodes 1358 -1,400 1357 -component 1357 -rochdale 1357 -50-year-old 1357 -tabloid 1357 -kuala 1357 -two-and-a-half 1357 -allison 1357 -insurers 1356 -chilean 1356 -destined 1356 -rigorous 1356 -tossed 1356 -elliot 1356 -croydon 1356 -flexibility 1356 -sofia 1356 -minneapolis 1355 -uncommon 1355 -adjacent 1355 -3million 1355 -tumours 1355 -sandwiches 1355 -millennium 1354 -airborne 1354 -detached 1354 -tucked 1354 -negotiated 1353 -singled 1353 -innings 1353 -chronicle 1353 -benefited 1353 -spinning 1353 -16,000 1352 -woes 1352 -vs. 1352 -spies 1352 -architects 1352 -sensational 1351 -diver 1351 -resemblance 1350 -highways 1350 -tomas 1349 -mi5 1349 -stalled 1349 -fearful 1349 -landslide 1349 -™ 1349 -gamble 1349 -pint 1348 -classical 1348 -sparks 1347 -cement 1347 -sincere 1347 -packing 1347 -scar 1347 -owning 1347 -output 1346 -raft 1346 -branding 1346 -briefed 1346 -recreational 1346 -compliance 1345 -cover-up 1345 -towel 1345 -governance 1345 -mines 1345 -roosevelt 1344 -distressing 1344 -ontario 1344 -extravagant 1343 -akin 1343 -burberry 1343 -runner-up 1343 -microphone 1342 -cardboard 1342 -year-old 1342 -guru 1342 -coke 1341 -applauded 1341 -smokers 1341 -dumping 1341 -mesut 1341 -reminiscent 1341 -kristina 1340 -mentality 1340 -lively 1340 -practically 1340 -shortages 1340 -10pm 1340 -bloodshed 1340 -trevor 1339 -bella 1339 -undertaken 1339 -promptly 1338 -separatist 1338 -contamination 1338 -temper 1338 -simmons 1338 -wheeler 1338 -piles 1338 -gunshots 1338 -employs 1338 -marrow 1337 -kirby 1337 -callum 1337 -float 1337 -marshal 1337 -embedded 1336 -allah 1336 -consuming 1336 -gibraltar 1336 -ink 1336 -earthquakes 1336 -pal 1336 -admired 1336 -embracing 1335 -whopping 1335 -sin 1335 -inspections 1335 -heartfelt 1334 -bullies 1334 -sheen 1334 -gratitude 1334 -inclusion 1334 -inviting 1333 -heywood 1333 -manufactured 1332 -viewer 1332 -catholics 1332 -puppies 1332 -morsi 1332 -blanc 1332 -circulated 1331 -excluded 1331 -walcott 1331 -kuwait 1331 -tate 1330 -elbow 1330 -d'or 1329 -ruthless 1329 -bromwich 1329 -severity 1329 -stronghold 1329 -frances 1329 -comrades 1328 -hamza 1328 -dodge 1328 -froch 1328 -hallway 1327 -hatch 1327 -wasps 1327 -broker 1327 -tucker 1327 -recounted 1327 -sellers 1326 -flipped 1326 -blades 1325 -hauled 1325 -bricks 1325 -swearing 1324 -sotomayor 1324 -chanted 1324 -drummer 1324 -scooter 1323 -acknowledges 1323 -worcester 1323 -sailor 1323 -booming 1323 -notoriously 1323 -glowing 1322 -corp 1322 -flip 1322 -responds 1322 -brent 1322 -unconstitutional 1322 -literary 1322 -al-maliki 1321 -grammar 1321 -rudd 1321 -manila 1320 -fist 1320 -follow-up 1320 -joanna 1320 -greens 1320 -toppled 1320 -bean 1320 -gaps 1319 -outdoors 1319 -boasting 1319 -melting 1318 -painkillers 1318 -figured 1318 -senegal 1318 -royalty 1318 -beneficial 1318 -solitary 1318 -first-time 1317 -rochester 1317 -arlington 1317 -translated 1317 -ringing 1317 -thumbs 1317 -mathieu 1317 -macdonald 1317 -maya 1317 -surrendered 1317 -slowing 1317 -sacramento 1316 -suzanne 1316 -texted 1316 -paralyzed 1316 -skip 1315 -authenticity 1315 -one-year 1315 -traded 1315 -touchdown 1315 -built-in 1314 -contend 1314 -wildly 1314 -legislators 1314 -provisional 1313 -resemble 1313 -hicks 1313 -conceived 1313 -excuses 1313 -record-breaking 1313 -shelf 1313 -reacts 1312 -k 1312 -tenth 1312 -headteacher 1310 -stiff 1310 -academics 1310 -44-year-old 1310 -lumpur 1310 -world-class 1310 -rita 1310 -browne 1309 -purchasing 1309 -testament 1309 -humiliation 1309 -onboard 1308 -mancini 1308 -overseeing 1308 -cesar 1307 -trails 1307 -volunteered 1307 -deliveries 1306 -apologies 1306 -ariel 1306 -synthetic 1306 -nasri 1305 -25th 1305 -protects 1305 -hayden 1305 -slower 1305 -craigslist 1304 -polite 1304 -jaws 1304 -midterm 1304 -enquiries 1304 -warsaw 1304 -noises 1304 -flynn 1303 -75,000 1303 -arch 1303 -conversion 1303 -honors 1302 -mm 1302 -fractures 1302 -handwritten 1302 -thierry 1302 -brit 1302 -sharpton 1302 -expectancy 1302 -plummeted 1302 -courageous 1301 -rookie 1301 -1950 1301 -codes 1301 -steer 1300 -juarez 1300 -reduces 1300 -marshals 1300 -sami 1299 -precedent 1299 -buddhist 1299 -saturn 1299 -elevated 1299 -pasta 1299 -weir 1299 -equity 1299 -quarter-finals 1299 -lima 1298 -podolski 1298 -bachmann 1298 -allergy 1298 -cough 1298 -caucus 1297 -toured 1297 -hijacked 1297 -fashionable 1297 -distinguished 1297 -face-to-face 1296 -amassed 1296 -affiliated 1296 -induced 1296 -webber 1296 -drastic 1296 -canvas 1295 -eugenie 1295 -indians 1295 -woolwich 1295 -effectiveness 1295 -bachelor 1295 -lenders 1295 -propose 1295 -mama 1294 -proximity 1294 -themes 1293 -gut 1293 -tender 1292 -mcdonnell 1292 -usage 1292 -upmarket 1292 -enlisted 1291 -gently 1291 -stall 1291 -damon 1291 -day-to-day 1291 -sustain 1291 -structural 1291 -essence 1290 -visibility 1290 -sliding 1290 -overwhelmingly 1290 -embarked 1290 -extends 1289 -affluent 1289 -backstage 1289 -gastric 1289 -vain 1289 -garry 1289 -barber 1289 -availability 1289 -outcomes 1289 -swapped 1289 -stereotypes 1288 -choking 1287 -kingston 1287 -bowler 1287 -erik 1287 -dominance 1287 -pundit 1286 -neglected 1286 -berkeley 1286 -50s 1286 -choked 1286 -accurately 1286 -1959 1286 -autonomous 1286 -playful 1285 -coordinated 1285 -workshop 1285 -sung 1285 -contributor 1285 -jong-un 1285 -licenses 1285 -second-half 1285 -despicable 1284 -spate 1284 -stigma 1284 -3rd 1284 -visas 1283 -varied 1283 -geoff 1283 -baines 1283 -alps 1283 -poet 1283 -unstable 1282 -collapsing 1282 -rossi 1282 -suites 1282 -conscience 1282 -franco 1282 -bully 1282 -disagreed 1281 -sears 1281 -pepe 1281 -3pm 1280 -leaning 1280 -at&t 1280 -jerome 1280 -adverse 1280 -bounced 1280 -limb 1279 -annoyed 1278 -47-year-old 1278 -burglar 1278 -condemnation 1278 -epstein 1278 -crushing 1278 -fairy 1277 -tarmac 1277 -alerts 1277 -arresting 1277 -750 1276 -maradona 1276 -wonders 1276 -remembering 1276 -tightly 1276 -overlooked 1276 -lasts 1276 -progressed 1275 -daniels 1275 -certified 1275 -tribes 1275 -hugged 1275 -spurred 1275 -salvage 1274 -remanded 1274 -highlighting 1274 -fairness 1274 -doncaster 1274 -indications 1274 -deserted 1274 -cholesterol 1273 -left-wing 1273 -lloyds 1273 -30th 1273 -flashing 1273 -o2 1272 -thefts 1272 -borrowed 1272 -plains 1272 -yanukovych 1272 -resisted 1272 -o'connor 1272 -lacks 1272 -graduating 1272 -icc 1272 -bore 1272 -legends 1272 -sighting 1271 -jeffs 1271 -one-time 1271 -lazy 1271 -gupta 1271 -regards 1270 -drills 1270 -modern-day 1270 -cerebral 1270 -swimmers 1270 -inspect 1269 -unspecified 1269 -scrambled 1269 -confusing 1269 -concentrated 1269 -bahamas 1268 -folk 1268 -seahawks 1268 -motel 1267 -shrine 1267 -auckland 1267 -kazakhstan 1267 -admire 1266 -simultaneously 1266 -two-time 1266 -impacted 1266 -standings 1266 -wishing 1265 -boo 1265 -counselling 1265 -pumps 1265 -touchline 1265 -determining 1265 -implementation 1264 -z 1264 -touted 1264 -plaintiffs 1263 -downs 1263 -94 1263 -animation 1262 -goodison 1262 -malaria 1262 -instability 1261 -designing 1261 -luton 1261 -measurements 1260 -thrust 1260 -kitten 1260 -steroids 1260 -norm 1259 -handler 1259 -falcon 1259 -sneak 1259 -assuming 1258 -patriotic 1258 -meteor 1258 -inundated 1258 -intervened 1258 -delevingne 1258 -conrad 1258 -cain 1257 -homosexual 1257 -stamps 1257 -fallout 1257 -christianity 1257 -technician 1257 -q 1257 -publishers 1256 -preacher 1256 -promoter 1256 -statute 1256 -run-up 1256 -stockholm 1255 -recipients 1255 -litigation 1255 -precautions 1255 -fiorentina 1255 -caffeine 1255 -supplement 1254 -attendants 1254 -mickey 1254 -4th 1254 -grasp 1254 -ratio 1254 -bakery 1253 -marital 1253 -ipod 1253 -mickelson 1253 -pulse 1252 -grammy 1252 -liner 1251 -unpredictable 1251 -smuggled 1251 -productions 1251 -aspirations 1251 -trim 1251 -contested 1251 -witch 1250 -socially 1250 -thrive 1250 -tub 1249 -yorkers 1249 -piracy 1249 -mirrors 1249 -fruits 1249 -tel 1249 -slovenia 1249 -jacobs 1248 -tended 1248 -merit 1248 -pipes 1248 -late-night 1248 -co-workers 1248 -personalities 1247 -crouch 1247 -crawl 1247 -semifinals 1247 -builds 1247 -cluster 1247 -moms 1247 -towed 1247 -darker 1246 -pickles 1246 -meltdown 1246 -summoned 1246 -fuelled 1245 -paths 1245 -pause 1245 -carrick 1245 -homework 1245 -slope 1245 -anytime 1245 -discarded 1244 -unpleasant 1243 -princes 1243 -cech 1243 -donovan 1243 -questionable 1242 -prosecutions 1242 -forgiveness 1242 -sham 1242 -lid 1242 -staten 1242 -grisly 1242 -baking 1242 -anti-semitic 1241 -arraignment 1241 -geoffrey 1241 -arthritis 1241 -intensified 1241 -frail 1241 -meteorologist 1240 -traffickers 1240 -demonstrating 1240 -seller 1240 -needle 1240 -supervised 1240 -freedoms 1240 -170 1240 -drake 1239 -limiting 1239 -kyi 1239 -fingerprints 1239 -correctional 1238 -cathy 1237 -garnered 1237 -70s 1237 -gasoline 1237 -connects 1237 -b. 1237 -greig 1236 -prompt 1236 -chunk 1236 -jurisdiction 1236 -hospitality 1236 -notices 1236 -bake 1236 -quizzed 1236 -wasting 1236 -bc 1236 -skyline 1235 -2.4 1235 -catalogue 1235 -fragments 1235 -scent 1235 -assists 1234 -kisses 1234 -wilfried 1234 -impaired 1234 -outpouring 1234 -insane 1234 -glacier 1234 -mixing 1234 -lille 1234 -swinging 1233 -paired 1233 -thirds 1233 -creators 1233 -kerr 1233 -commands 1233 -mormon 1233 -frenzy 1233 -lama 1233 -romero 1233 -saint-germain 1232 -marketplace 1232 -minerals 1232 -geological 1232 -7-5 1232 -hurled 1231 -marvel 1231 -700,000 1230 -hospice 1230 -caylee 1230 -willie 1229 -cliffs 1229 -emailed 1228 -dinosaurs 1228 -hastings 1227 -crunch 1227 -organizing 1227 -spreads 1227 -trader 1227 -dinosaur 1227 -skeptical 1226 -warnock 1226 -lerner 1226 -cody 1226 -mcdermott 1226 -millie 1225 -halftime 1225 -l. 1225 -doorstep 1225 -liable 1225 -harvest 1225 -rift 1225 -resisting 1225 -vigilant 1225 -jordanian 1225 -await 1225 -berahino 1225 -patches 1224 -rouhani 1224 -leighton 1224 -five-star 1223 -inserted 1223 -lineker 1223 -soda 1223 -trierweiler 1223 -positively 1223 -recalling 1223 -outbreaks 1223 -commemorate 1223 -koch 1223 -posh 1223 -anticipation 1222 -unresponsive 1222 -coached 1222 -slimming 1222 -kirsty 1222 -unveil 1222 -distribute 1221 -downed 1221 -crisps 1221 -constituents 1221 -matic 1221 -avoidance 1221 -demolition 1220 -97 1220 -yankees 1220 -curved 1220 -consulted 1220 -boulder 1220 -livestock 1220 -dot 1220 -benfica 1219 -robberies 1219 -wartime 1219 -grams 1219 -wills 1219 -congratulations 1219 -l.a. 1219 -unlucky 1218 -continually 1218 -mccormack 1218 -surfers 1218 -malaga 1218 -forecasts 1218 -directing 1218 -hampton 1218 -nichols 1218 -46-year-old 1218 -harley 1218 -suu 1218 -jupiter 1217 -reeva 1217 -madness 1217 -beheading 1217 -orthodox 1217 -encouragement 1217 -tiffany 1217 -nigella 1216 -goodwill 1216 -accountant 1216 -ashore 1216 -bloc 1215 -lightly 1215 -homophobic 1215 -hydrogen 1215 -avid 1215 -zombie 1214 -accessory 1214 -hemisphere 1214 -retrial 1214 -fleming 1213 -reminds 1213 -stephens 1213 -enforced 1213 -nokia 1213 -abe 1213 -qualifications 1213 -pushes 1213 -53-year-old 1213 -claudia 1212 -tattooed 1212 -argentinian 1212 -sheila 1212 -wearer 1212 -abandoning 1211 -d-day 1211 -luxembourg 1211 -faculty 1211 -boosting 1211 -unexpectedly 1211 -sculptures 1211 -bmi 1210 -peanut 1210 -communicating 1210 -biscuits 1210 -1920s 1210 -hay 1210 -inflammation 1210 -scenarios 1210 -prague 1209 -utter 1209 -exterior 1209 -bn 1209 -defied 1209 -domain 1209 -fool 1208 -literacy 1208 -stretcher 1208 -pour 1208 -editors 1208 -towering 1208 -baked 1208 -slap 1208 -closes 1208 -penguin 1207 -kurt 1207 -alistair 1207 -hormones 1207 -forgiven 1207 -kitty 1207 -playmaker 1207 -swam 1206 -dreadful 1206 -riverside 1206 -indoors 1206 -clarify 1206 -symbols 1205 -presumed 1205 -improper 1205 -protections 1205 -torso 1205 -6pm 1205 -4g 1205 -pillow 1205 -carers 1205 -fulfill 1205 -maj. 1204 -owes 1204 -advancing 1204 -yates 1204 -brake 1204 -climbers 1203 -recreation 1203 -adebolajo 1203 -pharmacy 1203 -trent 1203 -iss 1203 -coincidence 1202 -interviewing 1202 -ki-moon 1202 -18,000 1202 -7th 1201 -flanked 1201 -swine 1201 -heaviest 1201 -swallowed 1201 -lobbying 1200 -hewitt 1200 -crowley 1200 -one-year-old 1200 -surround 1200 -r. 1200 -favorites 1199 -cart 1199 -contentious 1199 -anonymously 1199 -gamers 1199 -reasonably 1199 -median 1199 -120,000 1198 -nyc 1198 -ramsay 1198 -whistleblower 1198 -rep 1198 -jenson 1198 -integral 1198 -favoured 1198 -hears 1198 -ss 1198 -viruses 1198 -defect 1197 -richie 1197 -1000 1197 -drugged 1197 -betrayed 1197 -heidi 1197 -axed 1196 -dense 1196 -appreciated 1196 -strategist 1196 -bulgarian 1196 -privileged 1196 -milwaukee 1195 -u.s.-led 1195 -continuous 1195 -kristen 1195 -truce 1195 -oz 1194 -brass 1194 -requesting 1194 -denounced 1194 -dorothy 1194 -tailored 1194 -irene 1194 -neat 1194 -harmless 1194 -guarantees 1194 -outright 1193 -disguise 1193 -defeating 1193 -filter 1193 -quantities 1193 -closures 1193 -regulate 1192 -pine 1192 -1914 1192 -old-fashioned 1192 -mortar 1192 -60s 1191 -sitcom 1191 -kickstarter 1191 -honorary 1191 -gillard 1191 -5million 1191 -off-duty 1190 -feathers 1190 -entertainer 1190 -chiellini 1190 -selfish 1190 -restrained 1189 -marquez 1189 -ma 1189 -hard-working 1189 -consensual 1189 -amazingly 1189 -rebekah 1189 -formidable 1189 -wipe 1189 -objected 1188 -tucson 1188 -north-west 1188 -implicated 1188 -parallel 1187 -assistants 1187 -ballon 1187 -diameter 1187 -escalating 1187 -sinister 1187 -havoc 1186 -neuer 1186 -grill 1186 -demise 1186 -varying 1186 -butcher 1186 -kits 1186 -interfere 1185 -chill 1185 -outburst 1185 -singers 1185 -casket 1185 -instinct 1185 -midday 1185 -adidas 1184 -extortion 1184 -nile 1184 -dyke 1184 -filthy 1184 -pierce 1184 -tibetan 1184 -veil 1184 -bats 1184 -finn 1183 -thornton 1183 -spotting 1183 -gerald 1183 -brother-in-law 1183 -holt 1183 -1940s 1183 -maureen 1182 -energetic 1182 -left-back 1182 -condemning 1182 -squeezed 1181 -guarded 1181 -coastguard 1181 -comparisons 1181 -invaded 1181 -canine 1180 -grieve 1180 -snowfall 1180 -mums 1180 -strained 1180 -madoff 1180 -abdominal 1180 -troy 1180 -conflicting 1180 -lou 1180 -gruelling 1180 -assurances 1180 -jared 1180 -stun 1179 -frein 1179 -gases 1179 -injunction 1179 -rahman 1179 -summary 1178 -activated 1178 -kobane 1178 -stellar 1178 -inspected 1178 -decorations 1177 -urgency 1177 -commuter 1177 -chickens 1177 -python 1177 -cruises 1177 -contraception 1177 -ivy 1176 -beheaded 1176 -prefers 1176 -insect 1176 -amelia 1176 -recreate 1176 -phenomenal 1176 -hartley 1176 -caves 1176 -catastrophe 1175 -inaccurate 1175 -fascinated 1175 -qantas 1175 -upwards 1175 -veronica 1174 -passwords 1174 -thumb 1174 -sidelined 1174 -joints 1174 -lovren 1174 -deposits 1174 -bass 1173 -sachs 1173 -alves 1173 -catalan 1173 -devils 1173 -long-range 1172 -renewable 1172 -lara 1172 -successes 1171 -taser 1171 -disorderly 1171 -jacqueline 1171 -restoring 1171 -5pm 1171 -dons 1171 -wildfire 1171 -yuan 1170 -eyewitness 1169 -horrors 1169 -swan 1169 -pumping 1169 -freelance 1169 -pathway 1168 -amounted 1168 -distances 1168 -stroll 1168 -bathtub 1167 -2.3 1167 -tevez 1167 -behaved 1167 -deception 1167 -norris 1167 -malia 1167 -mri 1167 -feminine 1167 -48-year-old 1167 -shadows 1167 -aa 1166 -ranges 1166 -batch 1166 -thrilling 1166 -banners 1166 -pivotal 1166 -runoff 1166 -20million 1166 -nominees 1166 -copa 1165 -stylist 1165 -5s 1165 -!!! 1165 -kendall 1165 -autistic 1165 -overly 1165 -skirts 1165 -framed 1165 -sympathetic 1165 -harlem 1165 -coupled 1164 -foam 1164 -mit 1164 -theirs 1164 -apprehended 1164 -enables 1163 -excellence 1163 -broadband 1163 -speculate 1163 -catering 1163 -profiling 1163 -colonial 1162 -satisfy 1162 -wrecked 1162 -g20 1162 -regained 1162 -trendy 1162 -sands 1162 -speculated 1161 -thunder 1161 -mcqueen 1161 -melt 1161 -adaptation 1161 -revoked 1161 -diminished 1161 -northumberland 1161 -pathologist 1161 -galaxies 1160 -vat 1160 -midway 1160 -boulevard 1160 -embassies 1160 -revised 1159 -adoptive 1159 -palestine 1158 -accompany 1157 -reservoir 1157 -rey 1157 -lipstick 1157 -bugs 1157 -hd 1157 -enthusiast 1157 -tow 1157 -wagner 1156 -promotes 1156 -apprentice 1156 -confinement 1156 -6am 1156 -mapping 1156 -mascot 1156 -sherman 1155 -embattled 1155 -2.2 1155 -rejection 1155 -wawrinka 1155 -patrons 1155 -rhythm 1155 -reactors 1155 -quitting 1155 -researching 1154 -bleak 1154 -keyboard 1154 -swear 1154 -frames 1154 -bobbi 1154 -looming 1154 -impending 1153 -mueller 1153 -han 1153 -aviv 1153 -receipt 1153 -donating 1152 -wolverhampton 1152 -palsy 1152 -unicef 1152 -socialite 1151 -condoms 1151 -gorman 1151 -creepy 1151 -emmy 1151 -beautifully 1151 -rouge 1151 -bounty 1150 -insulin 1150 -poker 1150 -proceeded 1150 -unavailable 1149 -polled 1149 -senseless 1149 -integration 1149 -herbert 1149 -concludes 1148 -superman 1148 -manson 1148 -feast 1148 -inventor 1148 -benitez 1148 -abnormal 1148 -52-year-old 1148 -convict 1148 -password 1147 -advisor 1147 -adrift 1147 -initiated 1147 -georgian 1147 -compares 1147 -slated 1147 -verified 1147 -wholesale 1147 -carolyn 1147 -peta 1147 -cervical 1146 -370 1146 -maxwell 1146 -crippling 1146 -stadiums 1146 -penguins 1146 -relieve 1146 -tapped 1146 -trailing 1146 -war-torn 1146 -angrily 1146 -entertain 1146 -weights 1145 -pumped 1145 -wholly 1145 -5th 1145 -gorilla 1145 -49-year-old 1145 -marriott 1145 -borrow 1145 -pencil 1145 -arraigned 1145 -disqualified 1145 -raqqa 1145 -letterman 1144 -slash 1144 -builders 1143 -handles 1143 -portray 1143 -lorenzo 1143 -roller 1143 -archaeological 1143 -haitian 1143 -revival 1143 -cory 1142 -meter 1142 -rabbi 1142 -laptops 1142 -lend 1142 -defining 1141 -overthrow 1141 -radiotherapy 1141 -heavier 1141 -state-of-the-art 1141 -offspring 1141 -saracens 1141 -lorraine 1140 -hillsborough 1140 -14,000 1140 -wig 1140 -incorrect 1140 -upright 1140 -discomfort 1140 -frankie 1139 -knights 1139 -1940 1139 -wal-mart 1139 -vale 1139 -counter-terrorism 1139 -shawn 1139 -owens 1139 -belts 1139 -sq 1139 -penthouse 1138 -tolerated 1138 -resembles 1138 -choir 1138 -compounds 1138 -damian 1137 -listeners 1137 -furthermore 1137 -merchant 1137 -4pm 1137 -buys 1137 -foxes 1137 -exceptionally 1137 -fork 1137 -princeton 1136 -cookies 1136 -informant 1136 -chandler 1136 -preference 1136 -gutted 1136 -paramount 1136 -lightweight 1136 -dinners 1136 -adopting 1135 -wool 1135 -carpenter 1135 -middle-aged 1134 -keepers 1134 -rosa 1134 -flick 1134 -tearing 1134 -dzeko 1134 -slaughtered 1134 -conditioning 1134 -schoolchildren 1134 -chatted 1133 -cazorla 1133 -destiny 1133 -handsome 1133 -praising 1133 -pact 1133 -hawkins 1133 -ramadan 1133 -tipping 1133 -consultants 1132 -daytime 1132 -90,000 1132 -concordia 1132 -emperor 1132 -malik 1132 -francesca 1132 -prediction 1132 -massey 1132 -insensitive 1131 -kidneys 1131 -gale 1131 -glance 1131 -tying 1131 -mug 1130 -turtles 1130 -meyer 1130 -downturn 1130 -servers 1130 -sophia 1130 -smugglers 1130 -strait 1130 -charred 1130 -jeep 1130 -1939 1130 -7pm 1130 -6-0 1130 -10-year 1130 -occupants 1129 -ta 1129 -liberals 1129 -pretended 1129 -expressions 1129 -rampant 1129 -cummings 1129 -comparable 1128 -classed 1128 -currents 1127 -whelan 1127 -contracting 1127 -bravo 1127 -addicts 1126 -flows 1126 -lebron 1126 -disappearing 1126 -high-level 1126 -turtle 1126 -three-quarters 1126 -pretoria 1126 -downhill 1125 -secular 1125 -skating 1124 -hangs 1124 -cassidy 1124 -seafood 1124 -handsets 1123 -potent 1123 -plunging 1123 -bladder 1123 -seriousness 1123 -pardon 1122 -leicestershire 1122 -racked 1122 -besiktas 1122 -oslo 1122 -manned 1122 -stripes 1121 -rowe 1121 -isabella 1121 -paranoid 1121 -snapchat 1121 -2-year-old 1121 -perkins 1121 -gwyneth 1121 -jasmine 1120 -scathing 1120 -generating 1120 -1957 1120 -straightforward 1120 -conceal 1120 -swallow 1120 -alpine 1119 -objections 1119 -poorer 1119 -hq 1119 -disrespectful 1118 -operatives 1118 -ricardo 1118 -happiest 1118 -terrific 1118 -extinct 1117 -woken 1117 -translate 1117 -cornell 1116 -one-off 1116 -usher 1116 -scarred 1115 -smalling 1115 -exceeded 1115 -horns 1115 -homeowner 1115 -jenna 1115 -translation 1115 -multi-million 1115 -overturn 1115 -captors 1115 -navigation 1115 -goodwin 1114 -colchester 1114 -beforehand 1114 -prayed 1114 -wealthiest 1114 -nightmares 1113 -kathryn 1113 -leah 1113 -printer 1113 -britney 1113 -factions 1113 -disgraceful 1113 -presley 1112 -molested 1112 -cannes 1112 -armoured 1111 -depicts 1111 -portrayal 1111 -lecturer 1111 -kilograms 1111 -untrue 1111 -edges 1111 -scaled 1110 -fracking 1110 -jellyfish 1110 -bracelet 1110 -sequel 1110 -intercourse 1110 -allegiance 1110 -premeditated 1110 -hunted 1110 -faded 1109 -bloodied 1109 -greeting 1109 -barlow 1109 -vietnamese 1109 -revellers 1109 -copeland 1109 -mogadishu 1109 -coping 1108 -combines 1108 -artery 1108 -wheat 1108 -wesley 1108 -5-0 1107 -elaine 1107 -packet 1107 -shutting 1107 -vans 1107 -bombarded 1107 -receiver 1107 -pricing 1106 -fiancée 1106 -imports 1106 -prized 1106 -badger 1106 -hampered 1106 -life-changing 1106 -pals 1105 -wines 1105 -sentinel 1105 -acclaimed 1105 -ibiza 1105 -foundations 1105 -halifax 1105 -jakarta 1105 -seymour 1105 -hurdle 1104 -shameful 1104 -commute 1104 -unlimited 1104 -grievous 1104 -balancing 1104 -1billion 1104 -calorie 1103 -chilly 1103 -pdf 1103 -substantially 1103 -scholars 1102 -peoples 1102 -tomatoes 1102 -vernon 1101 -curve 1101 -deen 1101 -ibrox 1101 -calum 1101 -11pm 1101 -respectful 1101 -jodie 1100 -worcestershire 1100 -1948 1100 -marseille 1100 -malta 1100 -persecution 1100 -hilary 1100 -tlc 1100 -involuntary 1100 -mocking 1100 -rapes 1100 -titan 1100 -proposition 1100 -thorpe 1100 -schultz 1099 -traits 1099 -garment 1099 -compassionate 1099 -vine 1099 -acquire 1099 -emerges 1098 -ram 1098 -duration 1098 -intentional 1098 -warm-up 1098 -vivid 1098 -camden 1098 -bankrupt 1098 -lukas 1098 -two-week 1097 -bookings 1097 -finalists 1097 -harness 1097 -mcafee 1097 -barrage 1097 -dazzling 1096 -mckenzie 1096 -vidal 1096 -emulate 1096 -upgraded 1096 -nausea 1096 -poem 1096 -admiral 1095 -oven 1095 -circulation 1095 -negligent 1095 -void 1095 -feminist 1095 -essay 1095 -51-year-old 1095 -cricketer 1095 -import 1095 -vonn 1095 -centered 1095 -vandalism 1094 -countess 1094 -moran 1094 -tee 1094 -hindu 1094 -filters 1094 -twilight 1092 -laps 1092 -pogba 1092 -topping 1092 -staircase 1092 -piper 1092 -backup 1091 -machinery 1091 -circled 1091 -miscarriage 1091 -icons 1091 -masses 1091 -soar 1091 -set-up 1090 -fringe 1090 -lazio 1090 -cloth 1090 -broadly 1090 -hospitalised 1090 -leverkusen 1089 -toxicology 1089 -blogs 1089 -uproar 1089 -browser 1089 -head-to-head 1089 -raiders 1089 -forefront 1089 -giorgio 1088 -donned 1088 -depths 1088 -confronting 1088 -giles 1088 -undertake 1087 -depot 1087 -pony 1087 -terminated 1087 -transporting 1087 -ouster 1087 -generosity 1087 -southwestern 1087 -tyres 1086 -discretion 1086 -espionage 1086 -partnerships 1086 -unleashed 1086 -melted 1085 -beers 1085 -aided 1085 -berdych 1085 -eased 1085 -ravens 1085 -hazing 1084 -sept. 1084 -reservations 1084 -2.6 1084 -vauxhall 1084 -appoint 1084 -chats 1084 -guzman 1084 -nemanja 1084 -depart 1084 -sectors 1084 -unwilling 1083 -smuggle 1083 -porch 1083 -martian 1083 -msnbc 1083 -insurgent 1082 -gum 1082 -adventurous 1082 -slams 1082 -quantity 1082 -aka 1082 -amusement 1082 -2am 1081 -oversee 1081 -strewn 1081 -bushes 1081 -instruction 1081 -adebayor 1080 -soho 1080 -zlatan 1080 -varieties 1080 -needles 1080 -cosmetics 1080 -spelling 1080 -worsened 1080 -hu 1080 -fossils 1080 -loudly 1080 -interpol 1080 -aerospace 1080 -vikings 1080 -mcbride 1079 -exceed 1079 -pauline 1079 -camouflage 1079 -adolf 1079 -dui 1079 -ruler 1079 -wards 1078 -explode 1078 -normandy 1078 -slick 1078 -harrington 1078 -grain 1078 -tendency 1078 -brighter 1078 -poetry 1078 -leno 1078 -240 1078 -apartheid 1078 -non-profit 1078 -tibet 1078 -hosni 1078 -cynthia 1077 -assignment 1077 -debated 1077 -bolivia 1077 -educators 1077 -classmate 1076 -schettino 1076 -justification 1076 -ramp 1076 -avon 1076 -noel 1076 -auschwitz 1076 -yield 1075 -gutierrez 1075 -traveller 1075 -danced 1075 -reproductive 1075 -herman 1075 -tier 1075 -vertical 1075 -wrongful 1075 -emphasized 1074 -acquisition 1074 -scarlett 1074 -inhabitants 1074 -philpott 1074 -stemming 1074 -1942 1074 -trainee 1074 -bedford 1074 -informal 1074 -implementing 1074 -individually 1074 -curator 1074 -massa 1074 -frankfurt 1074 -englishman 1074 -pregnancies 1074 -mastermind 1074 -execute 1074 -preview 1073 -wires 1073 -mattress 1073 -founders 1073 -galatasaray 1073 -burma 1073 -hairdresser 1073 -1955 1073 -inclusive 1073 -ropes 1073 -sinai 1072 -niger 1072 -tomato 1072 -debuted 1072 -renting 1072 -litres 1071 -slater 1071 -goalscorer 1071 -combining 1071 -distinction 1071 -readily 1070 -45,000 1070 -maternal 1070 -persian 1070 -mechanic 1070 -musk 1070 -admiration 1070 -baton 1070 -playoff 1069 -clan 1069 -bergen 1069 -augusta 1069 -kumar 1069 -post-traumatic 1069 -renew 1069 -gillian 1068 -gascoigne 1068 -huddersfield 1068 -blunder 1068 -ortiz 1068 -khalid 1068 -echoes 1067 -xvi 1067 -mother-in-law 1067 -musharraf 1067 -reinstated 1067 -surfer 1067 -acknowledging 1067 -interactions 1066 -two-hour 1066 -peterborough 1066 -schurrle 1065 -beirut 1065 -bombed 1065 -philippine 1065 -midwife 1065 -whipped 1065 -mullen 1065 -seaworld 1065 -risking 1064 -youthful 1064 -societies 1064 -monarchy 1064 -1958 1064 -100million 1064 -startling 1064 -sci-fi 1063 -businessmen 1063 -sebelius 1063 -staple 1063 -woody 1063 -clarity 1063 -siberia 1063 -qatada 1063 -1941 1062 -fiercely 1062 -tackles 1062 -4,500 1062 -shaved 1062 -mosques 1062 -undermined 1061 -camel 1061 -leapt 1061 -upbringing 1061 -heartbeat 1061 -hip-hop 1061 -aussie 1061 -crafted 1061 -reyes 1060 -motives 1060 -fundamentally 1060 -bespoke 1059 -airstrike 1059 -timely 1059 -solving 1059 -helmets 1059 -transplants 1059 -ceremonial 1059 -translates 1059 -attire 1059 -methane 1059 -sailed 1059 -sepp 1059 -plaque 1059 -well-wishers 1059 -8am 1058 -anguish 1058 -incentives 1058 -bystanders 1058 -30million 1057 -unexplained 1057 - 1057 -galleries 1056 -thrill 1056 -opting 1056 -repeating 1056 -specify 1056 -input 1055 -dyer 1055 -passers-by 1055 -possess 1055 -rebellion 1055 -narcotics 1055 -jacksonville 1055 -bbc1 1055 -ambush 1054 -baron 1054 -curtain 1054 -father-of-three 1054 -departing 1054 -methamphetamine 1054 -deteriorating 1054 -lifeboat 1054 -professionally 1054 -demographic 1054 -break-up 1054 -oswald 1054 -organising 1054 -co-op 1053 -economically 1053 -catches 1053 -polio 1053 -eccentric 1053 -six-year 1053 -genitals 1053 -inheritance 1053 -seniors 1053 -dalai 1053 -pharmaceutical 1052 -mcdaniel 1052 -sparkling 1052 -jobless 1052 -intimidating 1052 -shouts 1052 -binge 1052 -revolt 1051 -dissent 1051 -develops 1051 -pollard 1051 -erica 1050 -slides 1050 -pornographic 1050 -lewd 1049 -morton 1049 -frog 1049 -illustration 1049 -ailing 1049 -starving 1049 -1,800 1049 -farther 1049 -illusion 1048 -intriguing 1048 -elena 1048 -circular 1048 -abramovich 1048 -welch 1048 -residency 1048 -festivals 1048 -weaker 1048 -popping 1048 -resistant 1047 -spectator 1047 -crow 1047 -obstacle 1047 -disturbance 1047 -inc 1047 -impoverished 1047 -barrow 1047 -harassing 1046 -fatty 1046 -40th 1046 -replicate 1046 -disneyland 1046 -unanimously 1046 -pam 1046 -ecb 1045 -carlisle 1045 -evaluate 1045 -courier 1045 -envelope 1045 -kimberly 1045 -walt 1045 -nut 1045 -solicitors 1044 -paws 1044 -oversees 1044 -pow 1044 -festivities 1044 -wolfsburg 1044 -licensing 1044 -medically 1044 -armored 1044 -ct 1044 -contagious 1044 -bouchard 1043 -assertion 1043 -barking 1042 -jenner 1042 -jeb 1042 -splashed 1042 -londoners 1042 -consular 1042 -benson 1042 -nuisance 1042 -canary 1042 -bumper 1042 -goodell 1041 -fountain 1041 -spacex 1041 -alfie 1041 -peruvian 1041 -ireporter 1040 -clearer 1040 -rife 1040 -squirrel 1040 -improvised 1040 -fuels 1039 -swindon 1039 -greenpeace 1039 -whisky 1039 -maid 1039 -nikki 1039 -thanking 1039 -disregard 1039 -pressured 1039 -circulating 1039 -proposing 1038 -spitzer 1038 -thinner 1038 -fond 1038 -eugene 1038 -exploits 1038 -real-time 1038 -brunt 1037 -bungalow 1037 -strengthening 1037 -verizon 1037 -alvaro 1037 -seating 1037 -clint 1036 -mother-of-one 1036 -goat 1036 -co-author 1036 -packets 1036 -aliens 1036 -levi 1036 -sober 1036 -facilitate 1036 -rebuilt 1036 -lashes 1036 -warwick 1035 -cheeks 1035 -xavi 1035 -shiny 1035 -assassinated 1035 -mortgages 1035 -intimidated 1034 -opposes 1034 -classrooms 1034 -assessing 1034 -quarterfinals 1034 -comics 1034 -moor 1033 -lapd 1033 -butterfly 1033 -organize 1033 -registry 1033 -stare 1033 -enraged 1032 -speedy 1032 -starved 1032 -charleston 1032 -rested 1032 -turbines 1031 -concepts 1031 -duggan 1031 -grayling 1031 -queues 1031 -17,000 1031 -guitarist 1031 -toned 1031 -goldberg 1030 -meyers 1030 -compulsory 1030 -ortega 1030 -sotheby 1030 -honesty 1029 -farrow 1029 -flurry 1029 -350,000 1029 -man-made 1029 -offerings 1029 -uruguayan 1029 -characterized 1029 -jude 1029 -accessing 1029 -sagna 1029 -orphanage 1029 -4-2 1028 -funerals 1028 -rodger 1028 -covert 1028 -wooded 1028 -unfit 1028 -verdicts 1028 -pilgrims 1028 -holden 1027 -moammar 1027 -ideological 1027 -highlands 1027 -stepmother 1027 -cordoned 1027 -strains 1027 -runaway 1027 -stack 1026 -banksy 1026 -vicky 1026 -sufferer 1026 -flavour 1026 -neal 1026 -lamborghini 1025 -superhero 1025 -greed 1025 -outdated 1025 -handy 1025 -y 1025 -decreased 1025 -barbecue 1025 -griffith 1025 -irwin 1025 -sect 1025 -associations 1024 -composition 1024 -understandably 1024 -explored 1024 -recycled 1024 -unofficial 1024 -peaks 1024 -documentation 1024 -rodman 1023 -replies 1023 -isil 1023 -flares 1023 -warrington 1023 -3am 1023 -ballistic 1023 -nowadays 1023 -maduro 1023 -discoveries 1023 -fifteen 1023 -hungarian 1023 -thrones 1023 -anders 1022 -alarmed 1022 -warmth 1022 -anton 1022 -calvin 1021 -bribery 1021 -instrumental 1021 -travolta 1021 -tanker 1021 -correspondence 1021 -juror 1021 -9am 1021 -marker 1021 -sleek 1020 -aggregate 1020 -streams 1020 -photographing 1020 -lax 1020 -stems 1019 -murderers 1019 -260 1019 -booker 1018 -ditched 1018 -neurological 1018 -morale 1018 -perfume 1018 -awaits 1018 -cookie 1018 -lately 1017 -1947 1017 -lookout 1017 -victorious 1017 -mikel 1017 -anwar 1016 -arjen 1016 -cowboy 1016 -fracture 1016 -legitimacy 1016 -12-month 1016 -messy 1016 -vaccination 1015 -gchq 1015 -traumatised 1015 -erotic 1014 -moreover 1014 -invasive 1014 -watchers 1014 -heartbreak 1014 -competent 1014 -allergies 1014 -vice-president 1013 -hugging 1013 -regulated 1013 -rowling 1013 -girlfriends 1013 -apples 1013 -railroad 1013 -soaked 1013 -convenient 1012 -patrolling 1012 -suicides 1012 -leveled 1011 -clad 1011 -carla 1011 -rugged 1011 -certainty 1011 -favored 1010 -disgust 1010 -eclipse 1010 -clinging 1010 -pudding 1010 -alpha 1009 -tainted 1009 -unesco 1009 -bilbao 1009 -estates 1009 -cameraman 1009 -checkpoints 1009 -tempted 1009 -reconnaissance 1009 -cellar 1009 -sergey 1009 -desirable 1008 -stacked 1008 -compelled 1008 -cured 1008 -poroshenko 1008 -sr. 1008 -bluetooth 1008 -topshop 1008 -pumpkin 1007 -2.7 1007 -pledges 1007 -full-back 1007 -realizing 1007 -milky 1007 -verbally 1007 -loop 1007 -carmen 1007 -cosmic 1007 -dismal 1007 -epa 1006 -dickson 1006 -airing 1006 -rainforest 1006 -concede 1006 -futuristic 1006 -morrisons 1006 -shropshire 1006 -precision 1006 -airliner 1006 -analyse 1005 -frantically 1005 -distributing 1005 -floated 1005 -tumblr 1005 -statues 1005 -elusive 1005 -rooftop 1005 -airplanes 1004 -alvarez 1004 -logic 1004 -turbulent 1004 -triggering 1004 -dmitry 1003 -boundary 1003 -lacey 1003 -notoriety 1003 -notify 1003 -splitting 1003 -fsa 1003 -miriam 1003 -damn 1003 -elle 1002 -clown 1002 -annoying 1002 -nap 1002 -empathy 1002 -port-au-prince 1002 -hooded 1002 -90s 1002 -unlock 1001 -exempt 1001 -aimee 1001 -staples 1000 -wrapping 1000 -slump 1000 -congratulate 1000 -enacted 1000 -productivity 1000 -troopers 1000 -wrists 1000 -doha 1000 -looting 999 -unanswered 999 -corpses 999 -brushed 999 -alicia 999 -flocked 999 -by-election 999 -pundits 999 -cressida 998 -vulnerability 998 -transaction 998 -useless 998 -eerie 998 -mourn 998 -hips 998 -aiding 998 -scolari 997 -zaha 997 -behaving 997 -accomplish 997 -imam 997 -bacterial 997 -glittering 997 -4million 997 -pots 997 -footwear 997 -garrett 997 -4am 997 -windy 997 -rooted 997 -sonia 997 -skier 997 -definitive 996 -gardening 996 -monty 996 -vitamins 996 -ensemble 996 -liquor 996 -sweetheart 996 -plotted 996 -obscene 996 -one-third 995 -777 995 -tractor 995 -medvedev 995 -gunpoint 995 -indict 995 -inadvertently 995 -mint 995 -lesley 995 -landscapes 995 -mayo 995 -chooses 995 -cartoons 995 -drinkers 995 -planting 995 -dehydration 994 -wight 994 -authorization 994 -phrases 994 -earrings 994 -quiz 993 -renovation 993 -flare 993 -6-year-old 993 -tumble 993 -gel 993 -wan 993 -coca-cola 992 -iceberg 992 -lecture 992 -tb 992 -jewels 992 -maguire 992 -cancellation 992 -comforted 991 -functional 991 -oncoming 991 -lavrov 991 -puzzle 991 -waitress 991 -relegated 991 -marouane 991 -wakefield 991 -sincerely 990 -claimants 990 -ruben 990 -abundance 990 -cease 990 -chartered 990 -debit 990 -unsuccessfully 990 -evasion 990 -genre 990 -hispanics 990 -greenland 990 -mcgregor 989 -entourage 989 -cuisine 989 -fingerprint 989 -tvs 989 -banging 989 -ton 989 -trinity 989 -auctions 988 -evra 988 -irishman 988 -lotus 988 -54-year-old 988 -dye 988 -bilateral 988 -gangster 988 -nutrients 988 -bubbles 988 -diy 988 -swipe 987 -nationality 987 -caesarean 987 -withstand 987 -h. 987 -parry 987 -broadcasters 986 -mccoist 986 -cantor 986 -pinpoint 986 -budapest 986 -organise 986 -termination 986 -karachi 986 -insults 986 -ptsd 986 -coutinho 986 -5-1 986 -bucks 985 -modi 985 -italians 985 -outline 985 -8-year-old 985 -majors 985 -infantry 985 -rodney 984 -trench 984 -asteroids 984 -frederick 984 -antique 984 -hostel 984 -francesco 984 -irony 984 -embargo 984 -inconsistent 983 -hawking 983 -lobster 983 -higgins 983 -sultan 983 -reductions 983 -loftus 983 -amish 983 -beams 983 -informing 982 -glossy 982 -misuse 982 -pique 982 -squads 981 -obstruction 981 -lawful 980 -transforming 980 -curse 980 -financing 980 -basin 980 -punk 980 -secluded 979 -ovarian 979 -oversaw 979 -lockdown 979 -thread 979 -1952 979 -1,700 979 -dea 979 -ignited 979 -bales 978 -midwives 978 -isaf 978 -dealings 978 -brightest 978 -disc 978 -t. 977 -helena 977 -p. 977 -vera 977 -outreach 977 -scenery 977 -jessie 977 -ducks 977 -willian 977 -weed 977 -edwin 977 -clot 976 -andreas 976 -gmt 976 -daly 976 -escalation 976 -extensively 976 -stiviano 976 -alejandro 976 -viktor 976 -manny 976 -dustin 975 -nutritional 975 -fitzgerald 975 -rim 975 -enrollment 975 -pops 975 -easing 975 -conferences 975 -1m 975 -tyre 974 -barge 974 -vi 974 -meadows 973 -unauthorized 973 -adele 973 -unified 973 -1954 973 -accelerated 973 -offside 972 -justine 972 -rallying 972 -reclaim 972 -pup 972 -strengthened 972 -dorm 972 -miraculously 972 -daunting 972 -fries 971 -bald 971 -ferocious 971 -likewise 971 -infrared 971 -airs 971 -bisexual 971 -raged 971 -aquarium 971 -nascar 971 -blizzard 970 -787 970 -wraps 970 -protocols 969 -backers 969 -delaying 969 -tahrir 969 -unfold 969 -delete 968 -mk 968 -rendition 968 -unsurprisingly 968 -lbs 968 -carlton 968 -shamed 968 -harman 968 -remnants 967 -scientology 967 -shopper 967 -nintendo 967 -plague 967 -rudy 967 -etc. 967 -115 967 -advertisement 966 -sausage 966 -obliged 966 -desired 966 -gao 966 -sour 966 -hawk 966 -orbiting 966 -shrinking 966 -expires 966 -villarreal 966 -petit 966 -villain 966 -bart 966 -grilled 966 -four-day 965 -prohibition 965 -feinstein 965 -decision-making 965 -defoe 965 -sharif 965 -torrential 965 -computing 965 -accustomed 964 -snowy 964 -processor 964 -superstorm 964 -muddy 964 -resilience 964 -bodyguard 964 -cabins 963 -bhutto 963 -economists 963 -hmp 963 -molecules 963 -scanning 963 -artifacts 963 -claus 963 -thunderstorms 962 -unsuspecting 962 -darfur 962 -crisp 962 -info 962 -endangerment 962 -foolish 961 -café 961 -thoughtful 961 -mountainous 961 -tapping 961 -renaissance 961 -endurance 960 -upbeat 960 -adequately 960 -heinous 960 -mertesacker 960 -witnessing 960 -noisy 960 -7am 960 -heiress 959 -fold 959 -swell 959 -dane 959 -evade 959 -busted 959 -rockefeller 959 -uphold 958 -medina 958 -littered 958 -hale 958 -revived 957 -wildfires 957 -walkers 957 -scholar 957 -employing 956 -sketches 956 -spanning 956 -cobb 956 -lauer 956 -dreamliner 955 -roth 955 -u-turn 955 -landings 955 -9th 954 -betrayal 954 -dwarf 954 -thug 953 -roast 953 -rhys 953 -editing 953 -stunts 953 -upscale 953 -marino 952 -endangering 952 -trending 952 -bubbly 952 -jeopardy 952 -neon 952 -tyneside 952 -rejecting 952 -continuously 952 -airfield 952 -fairfax 952 -5-year-old 952 -overtime 952 -namely 951 -messenger 951 -utmost 951 -hodge 951 -accordingly 951 -compensate 951 -readings 950 -problematic 950 -nye 950 -criticise 950 -noticing 950 -disagreement 950 -divisive 950 -fiancé 950 -boiling 950 -sticky 949 -payday 949 -optical 949 -surplus 949 -systematic 949 -unprovoked 949 -cska 949 -architectural 949 -huhne 949 -co-host 949 -reacting 949 -grips 949 -labrador 949 -devised 949 -headset 948 -thermal 948 -bitcoin 948 -assessments 948 -10am 948 -trustees 948 -accord 948 -petra 948 -sacking 947 -grassroots 947 -renamed 947 -courtyard 947 -220 946 -wta 946 -picnic 946 -jacques 946 -cam 946 -leanne 946 -ligue 946 -specifics 946 -200m 946 -slovakia 946 -overheard 946 -exploiting 946 -750,000 945 -spouses 945 -hln 945 -sweeney 945 -sliced 945 -zip 945 -health.com 945 -sugary 945 -sorrow 945 -duped 945 -discriminatory 945 -wicket 945 -abigail 944 -debra 944 -criticizing 944 -forge 944 -mugshot 944 -strips 944 -sacrifices 944 -cycles 944 -blackmail 944 -wikipedia 944 -violates 943 -safeguard 943 -exaggerated 943 -flaws 943 -3-year-old 943 -casually 943 -three-month 943 -freestyle 943 -censorship 943 -photographic 943 -colder 943 -culinary 942 -subscribers 942 -converting 942 -tinder 942 -eviction 942 -warship 942 -55-year-old 942 -prominence 942 -atm 941 -turnbull 941 -engagements 941 -tragedies 941 -arrogant 941 -prohibits 941 -northamptonshire 941 -traveler 941 -logistics 941 -wandered 941 -inflatable 940 -defiance 940 -anticipate 940 -bless 940 -foremost 940 -sylvia 940 -ineffective 940 -anelka 940 -chorus 940 -ranged 939 -yorker 939 -spur 939 -groomed 939 -11am 939 -radcliffe 939 -headphones 939 -hardship 939 -bayer 939 -pins 938 -filipino 938 -jamaican 938 -simeone 938 -sanaa 938 -heel 938 -1,100 938 -crises 938 -clutch 938 -marketed 938 -rejects 938 -bipolar 938 -markings 938 -smells 938 -louisville 937 -careless 937 -clergy 937 -reptile 937 -congestion 937 -debilitating 937 -cramped 937 -fulton 936 -rowing 936 -themed 936 -bouncing 936 -sewage 936 -h1n1 936 -sharma 936 -stricter 936 -self-esteem 936 -honolulu 936 -romelu 935 -perched 935 -flagged 935 -mats 935 -surprises 935 -manufacture 935 -undertaking 935 -assumption 935 -interpreted 935 -ioc 935 -defences 934 -smear 934 -broadwell 934 -batting 933 -basle 933 -paralysis 933 -councillors 933 -gusts 932 -inciting 932 -perished 932 -hawaiian 932 -tanya 932 -desperation 932 -unmarked 932 -mega 932 -back-to-back 932 -goalless 932 -fuss 931 -monte 931 -bosnian 931 -dragons 931 -4-year-old 931 -robyn 931 -chants 931 -counterfeit 931 -clinch 931 -mouths 931 -profitable 931 -scanner 931 -g4s 931 -detector 931 -nova 930 -burglars 930 -practiced 930 -north-east 930 -chopped 930 -crumbling 930 -slayings 930 -collectively 930 -sanitation 930 -aclu 930 -magnate 929 -mauled 929 -millionaires 929 -volumes 929 -callous 928 -fearless 928 -electorate 928 -hints 928 -inconvenience 928 -szczesny 928 -samir 928 -judith 928 -sikh 927 -relocated 927 -hikes 927 -ravaged 927 -susceptible 927 -prescriptions 927 -waterloo 927 -epilepsy 927 -reconsider 927 -mighty 927 -nightly 927 -genetically 926 -vaz 926 -hurry 926 -possessed 926 -brenda 926 -perks 926 -gowns 926 -lifeless 926 -defends 926 -ignorance 926 -patriot 925 -lays 925 -zach 925 -kylie 925 -ons 925 -elton 925 -californian 925 -co-operation 925 -dumb 925 -groundbreaking 925 -bedfordshire 925 -tia 925 -liar 924 -alec 924 -automated 924 -harrods 924 -freezer 924 -glove 923 -keegan 923 -influences 923 -wicked 923 -newt 923 -paltrow 923 -repaired 923 -occurrence 923 -1956 923 -6th 923 -sub 923 -evenings 922 -sister-in-law 922 -60-year-old 922 -brightly 922 -rests 922 -ovation 922 -laurie 922 -iniesta 922 -jen 922 -idiot 921 -culprit 921 -peshawar 921 -britannia 921 -twenties 921 -gcse 921 -volkswagen 921 -vein 921 -dude 920 -jar 920 -irrelevant 920 -centre-back 920 -psychologists 920 -maynard 920 -consolation 920 -al-awlaki 920 -toddlers 920 -1943 919 -americas 919 -revered 919 -nationalist 919 -zuma 918 -jurgen 918 -directive 918 -tostee 918 -froome 917 -spun 917 -parenthood 917 -withdrawing 917 -lent 917 -prescott 917 -rosemary 917 -monks 917 -filmmakers 917 -dickens 916 -forster 916 -emblazoned 916 -collects 916 -ligament 916 -cosy 916 -slid 916 -quo 916 -muscular 916 -khamenei 916 -111 916 -vigorously 915 -sodium 915 -mcmahon 915 -algerian 915 -byron 915 -scalp 915 -satirical 915 -paedophiles 915 -primaries 914 -concessions 914 -randall 914 -battersea 914 -tampering 914 -ethiopian 914 -heist 914 -cereal 913 -unanimous 913 -naive 913 -restart 913 -three-time 913 -sheridan 913 -sukumaran 913 -doherty 913 -nathaniel 913 -upload 913 -classics 913 -deterrent 912 -bowe 912 -generals 912 -rabbits 912 -volleyball 912 -placement 912 -°c 912 -beacon 912 -pints 912 -billionaires 912 -documenting 912 -lowering 911 -cleaners 911 -actresses 911 -pies 911 -misunderstanding 911 -peshmerga 911 -pandas 911 -denim 911 -vinci 910 -jennings 910 -cynical 910 -spontaneous 910 -pontiff 910 -175 910 -sorted 909 -taller 909 -labs 909 -bleed 909 -counselor 909 -usb 909 -scuffle 909 -hence 909 -broncos 909 -winding 909 -distract 908 -ruiz 908 -bets 908 -rams 908 -midweek 908 -consult 908 -ravi 908 -orion 907 -discounts 907 -drastically 907 -stash 907 -sprinter 907 -becker 907 -slender 907 -buttocks 907 -onion 906 -perceptions 906 -chevrolet 906 -parody 906 -connolly 906 -booze 906 -swans 906 -resilient 906 -edgar 906 -alright 905 -cleanup 905 -belarus 905 -doubling 904 -disruptive 904 -understandable 904 -sexism 904 -cecil 904 -mimic 904 -snapping 904 -gardener 904 -routh 904 -greets 904 -emergence 903 -evolving 903 -negotiation 903 -crammed 903 -vow 903 -attributes 903 -statutory 903 -rewarding 903 -consortium 903 -8.5 903 -shelly 903 -handbags 902 -panorama 902 -usain 902 -steele 902 -separating 902 -anita 902 -jnr 902 -anti-social 901 -reindeer 901 -quebec 901 -marcelo 901 -dads 901 -paints 901 -snyder 901 -bred 901 -cane 901 -meghan 901 -fibre 901 -winters 901 -vargas 900 -mineral 900 -regimes 900 -angles 900 -marr 900 -cardiovascular 900 -1918 900 -wellbeing 900 -mi6 899 -expire 899 -adhd 899 -cho 899 -tags 899 -perverting 899 -anchorage 899 -hi 899 -haunt 899 -pitched 899 -massively 898 -reassured 898 -knowles 898 -prematurely 898 -testifying 898 -beatings 898 -eleanor 898 -reeling 898 -longstanding 898 -fathered 898 -bunny 897 -sixties 897 -razor 897 -debuchy 897 -huntsman 897 -week-long 897 -ripping 896 -stripping 896 -haunting 896 -insanity 896 -trolley 896 -bastion 896 -weinstein 896 -pelvis 896 -azarenka 896 -tanning 896 -transferring 895 -hurdles 895 -kfc 895 -tighten 895 -siberian 895 -dent 895 -mend 894 -stacy 894 -mclaughlin 894 -arrow 894 -enrichment 894 -tasty 894 -crescent 894 -dolan 894 -overshadowed 894 -edged 894 -curled 894 -angus 894 -haircut 894 -shave 893 -robbing 893 -announcements 893 -illustrious 893 -mcdowell 893 -contests 893 -disguised 893 -howe 893 -netting 893 -winchester 893 -mat 892 -emanuel 892 -antiques 892 -sinkhole 892 -tighter 892 -cafes 892 -carragher 892 -profoundly 892 -sergei 892 -qatari 891 -panoramic 891 -flanagan 891 -cairns 891 -ultrasound 891 -dominique 891 -scouting 891 -accelerate 891 -ejected 891 -pham 891 -evolve 891 -stride 891 -interval 891 -perimeter 891 -rusty 891 -105 890 -andres 890 -stand-off 889 -eastwood 889 -candidacy 889 -emergencies 889 -propofol 889 -3.2 889 -sox 889 -randomly 889 -velvet 889 -staffer 889 -sportsman 889 -mandy 888 -contingent 888 -replay 888 -kai 888 -mentions 888 -marred 888 -much-needed 888 -beverage 888 -securities 888 -ernest 888 -iq 888 -eduardo 888 -vague 888 -pod 888 -devout 888 -shoved 888 -grande 888 -dull 887 -substituted 887 -slate 887 -burnham 887 -forensics 887 -improves 887 -cristina 887 -oasis 886 -plaintiff 886 -jails 886 -punishments 886 -tuna 886 -barbaric 886 -arranging 886 -distinguish 886 -compact 885 -auburn 885 -paces 885 -croatian 885 -trott 885 -constructive 885 -schoolgirls 885 -internally 885 -scooped 885 -brides 885 -bloggers 884 -ribbon 884 -vieira 884 -mignolet 884 -showcased 884 -charismatic 884 -eliminating 884 -treasurer 884 -observing 884 -platinum 883 -disperse 883 -bondi 883 -molestation 883 -appliances 883 -waugh 883 -5am 883 -sleeps 883 -easyjet 883 -evicted 882 -cooperative 882 -ambushed 882 -provoke 882 -embryos 882 -cupboard 882 -weston 882 -arose 882 -manipulated 882 -hollow 882 -three-bedroom 882 -jovetic 881 -deflected 881 -naughty 881 -shia 881 -geography 881 -dusty 881 -trespassing 881 -dietary 881 -e-cigarettes 881 -bursts 881 -hs2 881 -jarvis 880 -jointly 880 -emory 880 -medic 880 -crippled 880 -dvds 880 -roaming 880 -eye-catching 880 -taxis 880 -siri 879 -fulfilling 879 -hepatitis 879 -criticising 879 -reinforced 879 -orchestra 879 -entertained 879 -beaming 879 -unused 879 -flint 878 -arc 878 -hutton 878 -finalist 878 -demons 878 -davey 878 -locking 878 -unlawfully 878 -henning 878 -tricked 877 -methodist 877 -goldsmith 877 -sobbed 877 -caliphate 877 -bermuda 877 -x-rays 877 -savvy 877 -identifies 877 -lynne 877 -idyllic 876 -mangala 876 -dashed 876 -guiding 876 -liaison 876 -tammy 876 -surged 876 -leukaemia 876 -morally 876 -tulsa 876 -welcomes 875 -maloney 875 -anni 875 -gripped 875 -coincide 875 -edmonds 875 -freeway 875 -folded 875 -humidity 875 -bursting 875 -isla 875 -skeletons 875 -stirred 874 -bribes 874 -charlene 874 -prevalent 874 -pele 874 -rendered 874 -unchanged 874 -ched 874 -innes 874 -deeds 874 -retrieved 874 -alligator 874 -professionalism 874 -candid 873 -self-inflicted 873 -masterpiece 873 -powerless 873 -conceding 873 -extraordinarily 873 -volunteering 873 -amusing 873 -adm. 873 -samoa 873 -1.9 873 -absorb 873 -glitter 873 -oscar-winning 872 -farc 872 -overseen 872 -valle 872 -fanatics 872 -stockport 872 -sas 872 -bono 872 -fumes 872 -stimulate 872 -shrink 872 -diaries 872 -warden 872 -missionary 871 -56-year-old 871 -low-cost 871 -jayden 871 -internationals 871 -lifestyles 871 -windscreen 871 -carriageway 871 -pa 870 -garrido 870 -commercials 870 -ander 870 -rubbing 870 -stoppage 870 -wu 870 -viii 870 -sported 870 -server 869 -tissues 869 -modeling 869 -shrapnel 869 -monuments 869 -rulings 869 -adjusted 869 -extensions 869 -ensued 869 -tiles 869 -york-based 868 -brainchild 868 -230 868 -bravely 868 -7.30 868 -stemmed 868 -adorned 868 -pitches 868 -januzaj 868 -awe 868 -countdown 868 -takeoff 867 -downfall 867 -colon 867 -dynamics 867 -dictatorship 867 -dossier 867 -kidnappers 867 -bowie 867 -traps 867 -thibaut 867 -vastly 866 -lenses 866 -lankan 866 -romeo 866 -marin 866 -fulfilled 866 -armour 866 -duffy 866 -bowls 866 -cooke 866 -advantages 865 -rosetta 865 -23rd 865 -candle 865 -surpassed 865 -lingering 865 -fronts 865 -elect 865 -celsius 864 -granting 864 -crocodiles 864 -trolls 864 -skrtel 864 -freight 864 -unnoticed 864 -subscribe 864 -relates 864 -ironic 864 -timetable 863 -installing 863 -renault 863 -mastectomy 863 -olympian 863 -byrne 862 -claw 862 -authorised 862 -yosemite 862 -promotions 862 -succumbed 862 -knowingly 862 -abby 861 -cheque 861 -650 861 -hackney 861 -galactic 861 -cholera 861 -deng 861 -brunette 860 -brazen 860 -vendors 860 -inland 860 -low-income 860 -exclusion 860 -waterfront 860 -consistency 860 -mold 860 -high-risk 860 -shareholder 860 -dessert 859 -pricey 859 -aesthetic 859 -exhibited 859 -glue 859 -alexandria 859 -naples 859 -abide 859 -wake-up 859 -treasures 859 -handouts 858 -stormy 858 -resolutions 858 -dejan 858 -upstate 858 -diagnose 858 -confidentiality 858 -sobbing 857 -fusion 857 -7-year-old 857 -0.5 857 -9-year-old 857 -saad 857 -esther 856 -ho 856 -laurence 856 -dicaprio 856 -gateway 856 -cm 856 -''' 856 -ferrer 856 -adrenaline 855 -criticize 855 -omaha 855 -2pm 855 -renovated 855 -napolitano 855 -22,000 855 -josie 855 -drip 855 -perfection 855 -schizophrenia 855 -skyscraper 855 -timber 855 -sushi 855 -third-party 854 -wong 854 -swung 854 -slamming 854 -variations 854 -10m 854 -pristine 854 -dunham 854 -sleeves 854 -navas 854 -aviva 854 -derailed 854 -selecting 853 -knicks 853 -spiked 853 -dispatch 853 -juncker 853 -mammal 853 -sized 853 -treacherous 853 -ella 852 -arise 852 -fences 852 -scramble 852 -offset 852 -draped 852 -50million 852 -keynes 852 -1936 852 -terraced 852 -concentrating 852 -honoring 852 -cuddle 851 -erratic 851 -fascination 851 -endeavour 851 -stratford 851 -convey 851 -analyzed 851 -bridget 851 -parcel 850 -progression 850 -decay 850 -skinner 850 -bathing 850 -gospel 850 -reservation 850 -endorse 850 -poachers 849 -bonnie 849 -inappropriately 849 -poaching 849 -forums 849 -coe 849 -hanson 849 -sufficiently 848 -consoles 848 -pits 848 -redundant 848 -abruptly 848 -ecstatic 848 -chewing 848 -shearer 848 -grimes 848 -debating 848 -cages 848 -bridger 848 -serb 847 -persona 847 -sucked 847 -turnaround 847 -mackenzie 847 -khedira 847 -mep 847 -salisbury 847 -stonehenge 847 -motoring 847 -pirlo 847 -continents 847 -farmhouse 847 -pro-democracy 847 -gymnastics 846 -govern 846 -sanctioned 846 -gregg 846 -couture 846 -phd 846 -descendants 846 -logged 846 -zabaleta 846 -levine 846 -favorable 846 -ankles 846 -detainee 845 -floss 845 -ava 845 -hostility 845 -lifeline 845 -purportedly 845 -standby 845 -refrain 845 -dejesus 845 -rub 845 -gleneagles 845 -biker 845 -62-year-old 844 -interface 844 -indies 844 -flattering 844 -implanted 844 -letizia 844 -dejected 844 -holed 844 -conceive 844 -bouncer 843 -branislav 843 -edible 843 -publications 843 -homecoming 843 -vehemently 843 -uncover 843 -silverman 843 -sprung 843 -afforded 843 -falcons 843 -doe 843 -vinson 842 -preservation 842 -extracted 842 -terminally 842 -stamped 842 -custodial 842 -forecaster 842 -footing 842 -brewing 842 -thighs 842 -artworks 841 -banter 841 -loaned 841 -loser 841 -break-in 841 -regretted 841 -ricciardo 841 -bumped 841 -tuned 841 -noticeable 841 -goodness 840 -misled 840 -crawling 840 -inflated 840 -vicar 840 -smarter 840 -loophole 840 -weaken 840 -paolo 840 -withheld 840 -pike 840 -vii 840 -newlyweds 840 -recognizes 840 -hype 839 -bordeaux 839 -unbearable 839 -ploughed 839 -naacp 839 -spacious 839 -chelmsford 839 -close-up 838 -substitutes 838 -managerial 838 -someday 838 -knightsbridge 838 -poultry 838 -coconut 838 -kashmir 838 -sleepy 838 -8th 837 -dreaming 837 -proportions 837 -schwartz 837 -nov. 837 -cruising 837 -taunted 837 -derived 837 -downward 837 -lithuania 837 -sings 836 -swore 836 -right-back 836 -adultery 836 -outages 836 -modelled 836 -towels 836 -plush 836 -salesman 836 -mother-of-four 836 -objectives 836 -provocation 835 -anti-gay 835 -hurricanes 835 -construct 835 -flared 835 -shipments 835 -soldado 835 -3.6 835 -payroll 835 -margins 835 -a-list 835 -leaping 835 -midfielders 835 -dyche 835 -monsters 835 -peaches 834 -defamation 834 -nexus 834 -disgruntled 834 -conjunction 834 -bulletin 834 -far-right 834 -roofs 833 -castillo 833 -guarding 833 -jules 833 -newer 833 -lamela 833 -son-in-law 833 -surrounds 833 -shoplifting 833 -mindset 833 -think-tank 833 -poisonous 832 -quantum 832 -bumps 832 -overjoyed 832 -eriksen 832 -middlesex 832 -alarms 832 -flashed 832 -roar 832 -amanpour 832 -proteins 831 -thrashed 831 -birthplace 831 -entitlement 831 -priceless 831 -ants 831 -hubble 831 -depict 831 -quran 831 -furry 830 -sickened 830 -atkins 830 -20-year 830 -3.3 830 -allocated 830 -declares 830 -fulfil 830 -safest 829 -claudio 829 -ellison 829 -unsettled 829 -genital 829 -pest 829 -purported 829 -curves 829 -howell 829 -co2 829 -vampire 829 -linkedin 829 -awoke 829 -bustling 829 -championed 828 -thwarted 828 -jonas 828 -predatory 828 -brilliantly 828 -chung 828 -curtains 828 -centenary 828 -oman 828 -hans 828 -orchestrated 827 -stringent 827 -carver 827 -barbour 827 -pac 827 -sanction 827 -descend 827 -co-worker 827 -ensures 827 -java 827 -falkland 827 -premiums 827 -exchanging 826 -totalling 826 -shin 826 -blistering 826 -dimaggio 826 -tab 826 -scrambling 826 -texture 826 -unreasonable 826 -incorporated 826 -discourage 825 -mikhail 825 -kaufman 825 -dilemma 825 -medallist 825 -reminding 825 -peaked 825 -conway 825 -microwave 824 -imitation 824 -rosenberg 824 -motto 824 -attic 824 -silicone 824 -hazel 824 -uniformed 824 -year-long 823 -neanderthals 823 -retro 823 -prohibit 823 -nautical 823 -exhaustion 823 -dec. 823 -intimidate 823 -ew 823 -dipped 823 -samaritan 823 -examinations 823 -elsa 822 -misty 822 -bonnet 822 -orphans 822 -exploding 822 -housekeeper 821 -1am 821 -tummy 821 -sacrificed 821 -inflammatory 821 -beginnings 821 -mosquito 821 -manaus 821 -homage 820 -necessity 820 -malibu 820 -ernst 820 -scenic 820 -ufo 820 -barnsley 820 -tirelessly 820 -footprint 820 -crystals 820 -semi 820 -intel 820 -chunks 820 -wax 820 -ego 819 -cancellations 819 -broadcasts 819 -replacements 819 -kemp 819 -pelle 819 -lesbians 819 -weaponry 819 -completes 819 -constitute 819 -lows 818 -amendments 818 -diocese 818 -macy 818 -highland 818 -abdel 818 -o'reilly 817 -fidel 817 -vouchers 817 -anti-doping 817 -kobani 817 -kidnappings 817 -mitigation 817 -decree 817 -marvin 817 -gu 817 -onset 817 -petr 817 -brandishing 816 -mechanics 816 -globes 816 -propelled 816 -vineyard 816 -al-nusra 816 -pooch 816 -loughner 816 -gorillas 816 -frieden 815 -2.8 815 -ventures 815 -hanna 815 -16million 815 -aloft 815 -rasmussen 815 -agitated 815 -shaping 814 -dorner 814 -dogged 814 -tick 814 -long-awaited 814 -reno 814 -embark 813 -vicente 813 -leverage 813 -harming 813 -sweater 813 -1937 813 -railways 813 -solomon 813 -outage 813 -malawi 813 -obscure 813 -evolutionary 812 -insights 812 -recess 812 -punishing 812 -reinforce 812 -chant 812 -mahmood 812 -selhurst 811 -climbs 811 -monoxide 811 -religions 811 -eastenders 811 -fabian 811 -head-on 811 -docked 811 -trilogy 811 -basics 811 -1915 811 -dickinson 811 -bianchi 811 -overcame 811 -ceilings 811 -lunches 811 -135 811 -archie 810 -wide-ranging 810 -starvation 810 -maze 810 -packer 810 -cowardly 810 -scarborough 810 -variation 810 -vidic 810 -lidl 810 -dismay 810 -joachim 810 -sophomore 809 -ticking 809 -bikers 809 -posture 809 -takeaways 809 -feline 809 -mould 809 -dos 809 -probing 808 -bureaucracy 808 -graphics 808 -quoting 808 -weibo 808 -slippery 808 -nguyen 808 -murderous 808 -vaccinated 808 -welby 808 -differ 808 -replaces 808 -rituals 808 -biblical 807 -angola 807 -daredevil 807 -constabulary 807 -participant 807 -lagos 807 -much-loved 807 -swathes 807 -confessions 806 -cite 806 -hovering 806 -behavioural 806 -evangelical 806 -poppies 806 -kitchens 806 -sawyer 806 -devotion 806 -right-hand 806 -first-class 806 -infidelity 806 -fielding 806 -5.30 806 -outpost 805 -personalised 805 -backlog 805 -judd 805 -crawley 805 -corcoran 805 -faint 805 -listens 805 -waived 805 -60th 805 -sotloff 805 -pathetic 805 -tunisian 805 -keystone 805 -jinping 805 -cheerful 804 -criticisms 804 -ikea 804 -untouched 804 -fanatic 804 -downey 804 -er 804 -lloris 804 -moroccan 804 -wii 804 -diarrhea 804 -staffing 804 -hooper 803 -hangover 803 -interpreter 803 -arteries 803 -htc 803 -indicator 803 -3.7 803 -crosby 802 -julio 802 -boateng 802 -sympathies 802 -intern 802 -salvation 802 -lush 802 -self-proclaimed 802 -edit 802 -unlocked 802 -enjoyable 802 -practising 801 -mccoy 801 -jelly 801 -explicitly 801 -redskins 801 -triumphed 801 -hikers 801 -telecommunications 801 -skulls 801 -all-star 800 -unseen 800 -astonished 800 -stumbling 800 -divine 800 -ventilator 800 -binding 800 -paso 800 -thiago 800 -towie 800 -connie 800 -stand-up 800 -gypsy 800 -souls 800 -high-ranking 800 -haines 799 -slew 799 -drifted 799 -proceeding 799 -fragrance 799 -businesswoman 799 -cod 799 -deportivo 799 -valdes 799 -sandringham 799 -sim 799 -remedy 799 -condemns 799 -kittens 799 -temptation 799 -o'clock 798 -mayhem 798 -complexity 798 -companions 798 -6.30 798 -lahore 798 -top-flight 798 -barring 798 -communal 797 -ideals 797 -accuser 797 -majestic 797 -libraries 797 -barbados 797 -bitterly 797 -accomplices 797 -burglaries 797 -fend 797 -donaldson 797 -paralympics 797 -physique 797 -stevie 796 -stoke-on-trent 796 -mushrooms 796 -limelight 796 -wessex 796 -indefinite 796 -granite 796 -vent 796 -blurred 796 -glaciers 796 -artefacts 796 -jan. 796 -noses 796 -jimenez 796 -dimitrov 795 -senses 795 -vocabulary 795 -absorbed 795 -rational 795 -selective 794 -mechanisms 794 -mcguire 794 -napoleon 794 -nasser 794 -als 794 -misguided 794 -kandahar 794 -forcibly 794 -logical 794 -swarm 794 -sedan 794 -prigg 794 -manipulation 794 -reliant 793 -ridiculed 793 -blockade 793 -president-elect 793 -clipped 793 -translator 793 -prowess 792 -seizing 792 -novelty 792 -star-studded 792 -shortlist 792 -exited 792 -ambassadors 792 -tenant 792 -fernandes 792 -handguns 792 -dalton 792 -researched 792 -hiv/aids 792 -earners 792 -royce 791 -adored 791 -cavani 791 -trenches 791 -ballroom 791 -receipts 791 -desktop 791 -1pm 791 -four-time 791 -influenza 791 -barefoot 791 -density 791 -equestrian 791 -enforcing 790 -jogging 790 -habitable 790 -strive 790 -cleverley 790 -resuscitate 790 -pendleton 790 -advertisers 790 -belle 790 -zambia 790 -reza 790 -tasmania 790 -dobson 790 -70-year-old 790 -racer 790 -swapping 790 -paddington 790 -flawless 789 -tirade 789 -asserted 789 -ruptured 789 -morphine 788 -2.1 788 -103 788 -practise 788 -cisse 788 -gaze 788 -obamas 788 -dwight 788 -blatant 788 -chop 788 -damp 788 -excruciating 788 -novelist 787 -striped 787 -spawned 787 -boiled 787 -mortem 787 -loading 786 -flour 786 -putt 786 -presided 786 -7,500 786 -diarrhoea 786 -chang 786 -woollaston 786 -vowing 786 -corridors 786 -postings 786 -drift 786 -springfield 786 -friedman 785 -nugent 785 -preserving 785 -eagerly 785 -owl 785 -disadvantaged 785 -cheerleader 785 -crest 785 -thereby 785 -58-year-old 785 -surcharge 785 -faux 785 -peacekeepers 785 -knots 785 -breeds 785 -paparazzi 785 -unfamiliar 784 -pascal 784 -vermaelen 784 -battleground 784 -mckenna 784 -manipulate 784 -unthinkable 784 -second-largest 784 -fireball 784 -ribery 784 -clemency 784 -slurs 784 -surrogacy 784 -tuck 784 -schweinsteiger 783 -blackwater 783 -lewinsky 783 -24th 783 -wiping 783 -harmony 783 -microscope 783 -esa 783 -huckabee 783 -gcses 783 -ucla 783 -hogan 783 -meditation 783 -vicinity 782 -offend 782 -reese 782 -wanderers 782 -anderlecht 782 -3.8 782 -h.w. 782 -kayla 782 -molesting 782 -pyramid 782 -attach 782 -kyrgios 782 -idf 781 -klitschko 781 -smoothly 781 -non 781 -nishikori 781 -first-ever 781 -tudor 781 -lyons 781 -conor 781 -removes 781 -turks 781 -lucia 781 -tones 781 -limp 780 -1946 780 -wielding 780 -phantom 780 -stevenson 780 -buckley 780 -pitcher 780 -rematch 780 -albuquerque 779 -moisture 779 -triggers 779 -progressing 779 -rhinos 779 -strasbourg 779 -kindergarten 779 -qualifiers 779 -bullock 779 -resentment 779 -pilgrimage 778 -landrieu 778 -schneiderlin 778 -lang 778 -specialized 778 -propulsion 778 -arteta 778 -hm 778 -26,000 778 -versatile 778 -toulon 778 -65-year-old 778 -paternity 778 -190 778 -retweeted 778 -holdings 777 -cipriani 777 -triangle 777 -ludicrous 777 -wallis 777 -charger 777 -assailant 777 -1938 777 -silverstone 777 -rolf 777 -predictable 777 -fedex 777 -specialises 777 -iker 777 -snipers 777 -futures 777 -greenwood 777 -arturo 777 -edin 777 -59-year-old 776 -childbirth 776 -fireplace 776 -alexa 776 -mara 776 -crossbar 776 -applaud 776 -fahrenheit 776 -hotline 775 -overtake 775 -strangling 775 -scanners 775 -cyclone 775 -matteo 775 -detectors 775 -dow 775 -jab 774 -merry 774 -bottoms 774 -klinsmann 774 -dishonest 774 -weiss 774 -co-owner 774 -ronny 774 -l-r 774 -6million 774 -galloway 774 -gauge 774 -mommy 774 -coaster 774 -cork 774 -eyewitnesses 773 -fliers 773 -paige 773 -readiness 773 -alba 773 -willow 773 -safeguards 773 -clough 773 -explorers 773 -bundle 772 -birdies 772 -3g 772 -limbaugh 772 -carrington 772 -poking 772 -prehistoric 772 -sentiments 772 -miraculous 772 -cavendish 772 -pick-up 771 -christchurch 771 -partnered 771 -copied 771 -deport 771 -monopoly 771 -veins 771 -atlas 770 -rib 770 -63-year-old 770 -touchscreen 770 -predecessors 770 -gated 770 -physicist 770 -loic 770 -polished 769 -fills 769 -strings 769 -lg 769 -kutcher 769 -agonising 769 -unsolved 769 -controversially 769 -viking 769 -drums 768 -swings 768 -schneider 768 -cellino 768 -jokingly 768 -turnover 768 -bowed 768 -romanians 768 -gye 768 -elders 768 -g. 768 -57-year-old 768 -saturated 768 -onslaught 768 -frustrations 768 -dudley 768 -rotting 767 -mcginley 767 -waterfall 767 -sheds 767 -dismissing 767 -apparel 767 -housewives 767 -berries 767 -eighties 767 -arrows 766 -kirchner 766 -whatsapp 766 -merits 766 -jagielka 766 -condo 766 -orbits 766 -institutional 766 -mins 766 -dignitaries 765 -carriages 765 -tripadvisor 765 -bananas 765 -shale 765 -impromptu 765 -malware 765 -mcnamara 765 -hector 765 -slashing 765 -particle 765 -alternate 764 -lester 764 -accomplishments 764 -picasso 764 -valentino 764 -statewide 764 -beg 764 -commonplace 764 -tagged 764 -bouts 764 -tesla 764 -10.30 764 -re-elected 764 -hypocrisy 763 -hooker 763 -contends 763 -retains 763 -hammered 763 -warships 763 -buffett 763 -lizard 763 -audrey 763 -cochran 763 -wolfe 763 -menus 763 -lakers 763 -sleeve 763 -module 762 -liberian 762 -administer 762 -daryl 762 -grin 762 -simone 762 -nadia 762 -intoxication 762 -mcloughlin 761 -stresses 761 -bearded 761 -autographs 761 -ibm 761 -descriptions 761 -patrice 761 -kangaroo 761 -booed 761 -nielsen 761 -jumpers 760 -grievances 760 -270 760 -maher 760 -pity 760 -landfill 760 -blond 760 -kagan 760 -homegrown 760 -inflict 760 -co-pilot 760 -looted 760 -weaknesses 759 -abusers 759 -realities 759 -elise 759 -mcnair 759 -incarcerated 759 -taj 759 -2013-14 759 -fast-food 759 -overcrowded 759 -kosovo 759 -22nd 759 -hoodie 758 -groceries 758 -planetary 758 -dances 758 -interfering 758 -precautionary 758 -vick 758 -wander 758 -tamil 758 -retribution 757 -xinjiang 757 -surname 757 -rethink 757 -flush 757 -infuriated 757 -consultancy 757 -acquittal 757 -entities 757 -showcasing 757 -intercept 757 -jay-z 757 -ounces 757 -bubba 757 -dotted 757 -sclerosis 757 -kurdistan 757 -jetblue 757 -suppress 757 -scissors 757 -segregation 756 -addictive 756 -glee 756 -taboo 756 -dove 756 -simpler 756 -mansfield 756 -clocked 756 -repercussions 756 -hypothermia 756 -cater 755 -greaves 755 -donning 755 -ottawa 755 -1949 755 -graveyard 755 -cd 755 -grossly 755 -evaluated 755 -unconventional 755 -morgue 755 -silvio 755 -flashes 755 -racy 755 -orphaned 755 -subsidiary 755 -dangling 755 -130,000 755 -illustrate 754 -cleverly 754 -lamar 754 -multi-millionaire 754 -bowman 754 -drifting 754 -loft 754 -markovic 754 -bottled 754 -arming 754 -exhibits 754 -unfolding 754 -recognisable 753 -loch 753 -wipes 753 -anglia 753 -populous 753 -insistence 753 -sexting 753 -1912 753 -fade 753 -wwii 753 -sherlock 753 -wolff 753 -props 753 -headmaster 752 -olson 752 -salmonella 752 -nicotine 752 -upward 752 -nieto 752 -divert 752 -grandma 752 -spitting 752 -searchers 752 -three-and-a-half 752 -scrum 751 -uninsured 751 -cornish 751 -overdue 751 -08457 751 -easiest 751 -mosquitoes 751 -wizard 751 -volcanoes 751 -operative 751 -ince 751 -mist 751 -decapitated 750 -chamberlain 750 -8.30 750 -storing 750 -deploying 750 -burnett 750 -five-day 750 -rolls-royce 750 -remarked 750 -behaviors 750 -smithsonian 750 -seventies 750 -dives 750 -pratt 750 -tightened 750 -hobbit 750 -dictate 749 -resorted 749 -rein 749 -vendor 749 -saeed 749 -capsized 749 -unimaginable 749 -ensuing 749 -bundy 749 -disposable 749 -beau 749 -season-long 749 -queuing 749 -digestive 749 -injecting 749 -basildon 749 -drained 749 -eradicate 749 -kramer 749 -cove 749 -scanned 748 -hardline 748 -take-off 748 -annan 748 -discounted 748 -gods 748 -49ers 748 -medalist 748 -thrashing 748 -mobbed 748 -jihadis 748 -gandhi 748 -prep 747 -excavation 747 -powerhouse 747 -mayoral 747 -analysing 747 -millwall 747 -fiji 747 -lineup 747 -footballing 747 -co-founded 747 -outlawed 747 -jumpsuit 746 -soundtrack 746 -short-lived 746 -irving 746 -champ 746 -blighted 746 -hierarchy 746 -aol 746 -mcgrath 746 -best-known 746 -signaled 745 -hates 745 -recreated 745 -professors 745 -spotify 745 -authoritarian 745 -cruiser 745 -stuttgart 745 -depressing 745 -zelaya 744 -colleen 744 -vegetation 744 -dislike 744 -26th 744 -sway 744 -murky 744 -vomit 744 -julien 744 -generator 744 -23,000 744 -dismantled 744 -phoebe 744 -bowled 743 -undermining 743 -fateful 743 -hummels 743 -shelley 743 -coffins 743 -ecosystem 743 -generates 743 -michaela 743 -rocking 743 -integrate 743 -gentlemen 743 -darts 742 -deliberations 742 -notification 742 -aluminium 742 -vegetarian 742 -beale 742 -12million 742 -tyne 742 -analyze 742 -reluctance 742 -muse 742 -stared 742 -jermaine 742 -nearing 742 -meteorite 742 -incorporate 742 -shocks 741 -underwood 741 -oxfam 741 -faked 741 -stefano 741 -composer 741 -duct 741 -technicians 741 -bodyguards 741 -breeze 741 -cot 741 -clara 741 -sutherland 741 -isabel 741 -osman 741 -alumni 741 -cbd 741 -shunned 741 -eruptions 740 -incorrectly 740 -institutes 740 -o'neal 740 -healthcare.gov 740 -strengths 740 -filner 739 -creditors 739 -scratches 739 -arbitrary 739 -richer 739 -guerrero 739 -pairing 739 -reus 739 -rammed 739 -trafalgar 739 -leaflets 739 -coincided 739 -carcass 738 -providence 738 -yewtree 738 -jindal 738 -creams 738 -tasting 738 -foiled 738 -spoof 738 -shipman 738 -sec 738 -seismic 738 -bookmakers 738 -kraft 738 -quarterfinal 738 -politico 738 -malm 738 -kepler 737 -hour-long 737 -capello 737 -subdued 737 -bundled 737 -gin 737 -communicated 737 -mona 737 -goose 737 -undated 737 -hartlepool 737 -pandemic 737 -pediatric 737 -forty 737 -dyson 737 -slit 737 -high-quality 737 -vegan 737 -g8 737 -anaesthetic 736 -darrell 736 -proclaimed 736 -65,000 736 -lauderdale 736 -magpies 736 -dec 736 -ignorant 736 -deferred 736 -southend 736 -skipped 735 -dummy 735 -terri 735 -fashioned 735 -reprieve 735 -openness 735 -prevail 735 -archaeologist 735 -exodus 735 -peppers 735 -chilli 735 -degrading 735 -chrome 735 -timed 735 -raleigh 735 -width 735 -leaps 735 -grueling 734 -lenient 734 -unscathed 734 -o'hare 734 -submarines 734 -zakaria 734 -hoover 734 -truman 734 -inject 734 -webcam 734 -chained 734 -recognizing 734 -subscription 734 -paypal 734 -rack 734 -discontent 734 -palermo 734 -waziristan 733 -buggy 733 -doused 733 -8million 733 -recovers 733 -grapes 733 -exceptions 733 -unmarried 733 -tangled 733 -boyhood 733 -coldest 733 -bbc2 733 -payouts 733 -zachary 733 -simulator 732 -mosley 732 -rioting 732 -immensely 732 -gotze 732 -minimise 732 -preventable 732 -interviewer 732 -'n' 732 -dived 732 -praises 732 -paved 732 -defects 732 -fia 731 -caldwell 731 -cancerous 731 -motherhood 731 -derogatory 731 -aligned 731 -standstill 731 -schumer 731 -georgina 731 -amused 731 -oculus 731 -khalifa 731 -carswell 731 -father-of-one 730 -tripped 730 -borini 730 -ny 730 -specializes 730 -violin 730 -chopper 730 -jailing 730 -explores 730 -wharf 730 -auctioneers 730 -utd 730 -casts 730 -claws 729 -legalization 729 -initials 729 -onstage 729 -pigeon 729 -graph 729 -2050 729 -jazeera 729 -vault 729 -captained 729 -gourmet 729 -self-defence 729 -advocating 729 -chess 729 -interventions 729 -rum 729 -botswana 729 -interestingly 728 -shaky 728 -scuba 728 -downgraded 728 -ankara 728 -ablaze 728 -inhalation 728 -160,000 728 -chairwoman 728 -spielberg 728 -cadbury 728 -detain 728 -yachts 728 -bargaining 728 -summed 728 -sandals 728 -vuitton 728 -mane 728 -trajectory 727 -gigantic 727 -minimize 727 -columns 727 -yearly 727 -biologist 727 -soaking 727 -practitioners 727 -calculations 727 -mecca 727 -garments 727 -1951 727 -flyers 727 -slur 727 -colored 727 -o'mara 727 -restricting 727 -curling 726 -au 726 -golfers 726 -educating 726 -kvitova 726 -latvia 726 -hpv 726 -yvonne 726 -shipment 726 -tsonga 726 -pledging 726 -organizer 726 -bras 726 -18-month 725 -advertisements 725 -installations 725 -vagina 725 -leukemia 725 -adulthood 725 -ethnicity 725 -rex 725 -heap 725 -jang 725 -conditional 725 -lager 725 -ollie 725 -blazing 725 -shrewsbury 725 -sol 725 -handlers 725 -1.30 724 -browsing 724 -ware 724 -jewel 724 -dots 724 -flung 724 -commended 724 -colts 724 -dine 723 -anorexia 723 -femail 723 -armitage 723 -slack 723 -rachael 723 -dunes 723 -67-year-old 723 -gabrielle 723 -fraudster 723 -tian 723 -sadie 723 -marcel 723 -flavours 723 -hind 723 -sonar 722 -ayatollah 722 -ridden 722 -spear 722 -9.30 722 -erosion 722 -genome 722 -firemen 722 -jodi 722 -humorous 722 -horne 722 -state-owned 722 -detrimental 722 -darkest 722 -apache 722 -sesame 721 -airasia 721 -euthanasia 721 -outlining 721 -rees 721 -bystander 721 -shone 721 -pounced 721 -ornate 721 -104 721 -scouring 721 -malnutrition 721 -keller 721 -trades 721 -raikkonen 721 -shelby 721 -deadlock 720 -experimenting 720 -carving 720 -cqc 720 -aqap 720 -father-in-law 720 -gallon 720 -frenzied 720 -compounded 720 -seven-year 720 -gaffe 720 -workouts 719 -gough 719 -turbine 719 -ugandan 719 -shrimp 719 -roundabout 719 -marches 719 -wrinkles 719 -odyssey 719 -turbulence 719 -al-baghdadi 719 -lamp 719 -unfounded 719 -bamboo 719 -lois 719 -concluding 718 -improperly 718 -algae 718 -starter 718 -burmese 718 -stables 718 -comprised 718 -singleton 718 -einstein 718 -myths 718 -lahm 717 -stickers 717 -genetics 717 -1917 717 -four-bedroom 717 -beverley 717 -coulibaly 717 -birdie 717 -four-month 716 -fly-half 716 -federico 716 -inherit 716 -penchant 716 -sheltered 716 -lindt 716 -bounds 716 -schedules 716 -roam 716 -mendes 716 -conventions 716 -rowan 716 -bridal 715 -sunnis 715 -visually 715 -consisting 715 -rot 715 -lauded 715 -3.4 715 -goddess 715 -toulouse 715 -vaughan 715 -mustard 715 -raonic 715 -ultra 715 -cull 715 -heyday 715 -belize 714 -cinemas 714 -silverware 714 -presbyterian 714 -santi 714 -director-general 714 -incognito 714 -paxman 714 -presiding 714 -ings 714 -no-fly 714 -hazards 714 -malky 714 -halal 714 -rainy 714 -28th 713 -back-up 713 -jolly 713 -amputee 713 -27th 713 -probability 713 -roster 713 -afc 713 -nani 713 -slices 713 -brentford 713 -gaping 713 -levin 713 -baez 712 -condom 712 -alleviate 712 -baths 712 -stature 712 -chaired 712 -hit-and-run 712 -sneakers 712 -restriction 712 -goggles 712 -dexter 712 -pearls 712 -collier 712 -pavilion 712 -contingency 711 -louder 711 -schwarzenegger 711 -lu 711 -racecourse 711 -vista 711 -catalyst 711 -elimination 711 -lapse 711 -defines 711 -rubin 711 -grains 711 -o'leary 711 -preferences 711 -efficiently 711 -dodd 711 -weeping 711 -wonderland 711 -therapies 711 -dominating 711 -cordon 710 -chihuahua 710 -cologne 710 -cocoa 710 -beverages 710 -olsen 710 -dunne 710 -disproportionate 710 -comedians 710 -overs 710 -flavor 710 -maracana 710 -wit 710 -regent 710 -ministerial 710 -poked 710 -mexicans 709 -peel 709 -aspen 709 -chi 709 -mao 709 -machete 709 -notre 709 -hampstead 709 -khaled 709 -clicking 709 -2030 708 -videotaped 708 -arabs 708 -dashboard 708 -retaining 708 -hartford 708 -resembled 708 -shorten 708 -flourish 708 -downloading 708 -wheeled 708 -autonomy 708 -fisheries 708 -hysterical 708 -hanks 708 -embraces 708 -logs 708 -coughing 708 -deficits 708 -tindall 707 -empower 707 -pedigree 707 -buzzing 707 -sphere 707 -recognises 707 -stocked 707 -symptom 707 -zac 707 -golds 707 -pillar 707 -acre 707 -peacock 707 -isles 707 -clinched 707 -audition 707 -faye 707 -reliance 707 -tasted 706 -cpl. 706 -obe 706 -caleb 706 -crowe 706 -fatality 706 -captains 706 -rumored 706 -hardcore 706 -vests 706 -rehearsal 706 -untreated 706 -fading 706 -revolver 705 -dysfunction 705 -deprivation 705 -resurgence 705 -ethic 705 -rulers 705 -astronomical 705 -skiers 705 -chrysler 705 -nuptials 705 -defy 705 -bosque 705 -favors 705 -myriad 704 -reunite 704 -sinatra 704 -55,000 704 -burying 704 -libel 704 -strangely 704 -stealth 704 -plaster 704 -24/7 704 -beamed 704 -bain 704 -'80s 704 -eternal 704 -ruining 704 -townhouse 704 -taxpayer-funded 704 -amended 704 -hulk 704 -commandos 703 -certificates 703 -semi-automatic 703 -mauresmo 703 -butterflies 703 -billie 703 -sustainability 703 -riddled 703 -schaefer 703 -flamboyant 703 -uphill 702 -sharper 702 -working-class 702 -spoiled 702 -varies 702 -rebound 702 -luca 702 -taco 702 -tori 702 -64-year-old 702 -bowen 702 -ten-year-old 702 -fooled 702 -campuses 701 -menopause 701 -hardworking 701 -winehouse 701 -greeks 701 -70th 701 -innovations 701 -perjury 701 -pakistanis 701 -salah 701 -unaccompanied 701 -wilkins 701 -24,000 701 -roaring 701 -haley 701 -maurice 701 -rutgers 701 -syrup 700 -systematically 700 -ill-fated 700 -homosexuals 700 -stocking 700 -flattened 700 -ritchie 700 -fantasies 700 -commando 700 -winnings 700 -imperative 700 -sammy 700 -obey 700 -leafy 700 -dole 700 -kaka 700 -renee 700 -circling 699 -gonzalo 699 -captaincy 699 -shaft 699 -worsening 699 -oppression 699 -numb 699 -stump 699 -anti-semitism 699 -correction 699 -healed 699 -menace 699 -swooped 699 -workshops 699 -violet 699 -jensen 698 -boobs 698 -smelled 698 -hurley 698 -midtown 698 -warhol 698 -indicators 698 -pads 698 -talbot 698 -bradshaw 698 -ample 698 -pens 698 -bark 698 -pcs 698 -archer 698 -adnan 698 -hurtful 698 -jess 697 -minivan 697 -koscielny 697 -labelling 697 -thirteen 697 -140,000 697 -kimberley 697 -softer 697 -indulge 697 -abuser 697 -rescuing 697 -dubious 697 -tuberculosis 697 -jasper 697 -grinning 697 -landfall 697 -philipp 697 -extra-time 697 -privileges 697 -61-year-old 697 -intrigued 696 -accumulated 696 -us$ 696 -escalate 696 -bliss 696 -guardians 696 -high-powered 696 -huts 696 -barricades 696 -noaa 696 -toss 696 -spans 696 -spraying 695 -rubbed 695 -papua 695 -inferno 695 -gradual 695 -metals 695 -planners 695 -snatch 694 -sims 694 -usda 694 -waiter 694 -selfless 694 -geldof 694 -rotten 694 -strachan 694 -savers 694 -submission 694 -paramilitary 694 -sienna 694 -sounding 693 -socket 693 -mutilated 693 -hesitate 693 -gbagbo 693 -apparatus 693 -skyscrapers 693 -trailed 693 -delaney 693 -thereafter 693 -captives 693 -coordinate 693 -assassin 693 -browns 692 -fats 692 -anastasia 692 -punitive 692 -reasoning 692 -third-degree 692 -yielded 692 -physiotherapy 692 -scoop 692 -fargo 691 -50m 691 -donkey 691 -igor 691 -biased 691 -plus-size 691 -relocate 691 -unrealistic 691 -klan 691 -strap 690 -hathaway 690 -endanger 690 -strides 690 -yu 690 -topple 690 -longevity 690 -soak 690 -4.2 690 -wen 689 -blumenthal 689 -1916 689 -darlington 689 -hinckley 689 -monastery 689 -rattled 689 -hindsight 689 -oust 689 -beleaguered 689 -aden 689 -blasting 688 -outsiders 688 -deposed 688 -disrespect 688 -1930 688 -swimsuit 688 -friction 688 -corrected 688 -mutation 687 -fluffy 687 -garlic 687 -grappling 687 -lola 687 -ha 687 -27,000 687 -brantly 687 -overboard 687 -outset 687 -stained 687 -nuns 686 -plucked 686 -enriched 686 -lander 686 -zoos 686 -mantle 686 -cubic 686 -stirring 686 -bojan 686 -pic 686 -enormously 686 -demi 686 -adhere 686 -mural 686 -550 686 -leaned 686 -punishable 685 -groped 685 -incomplete 685 -gateshead 685 -peggy 685 -setbacks 685 -sabotage 685 -georgetown 685 -couric 685 -robshaw 684 -wreath 684 -pollen 684 -departures 684 -canon 684 -splashing 684 -activism 684 -jonah 684 -advertise 684 -gatherings 684 -stardom 684 -crucially 684 -switches 684 -deepwater 684 -probes 684 -quarantined 683 -chateau 683 -motorcade 683 -consequently 683 -moeen 683 -stag 683 -recorders 683 -eight-year 683 -hostess 683 -projections 683 -oct. 683 -organisms 683 -on-board 683 -lilly 683 -ushered 682 -bud 682 -wes 682 -linebacker 682 -complainant 682 -paddle 682 -gmail 682 -farmland 682 -shedding 682 -deterioration 682 -ledge 681 -tumbling 681 -alberta 681 -merger 681 -contributes 681 -sweating 681 -ominous 681 -zidane 681 -overcoming 681 -patio 681 -1933 681 -hairstyle 681 -altar 681 -chongqing 681 -hopefuls 681 -lil 681 -slum 681 -cremated 680 -averaged 680 -mustafa 680 -ridicule 680 -tidal 680 -compliment 680 -halo 680 -mascherano 680 -equalised 680 -cube 679 -blinded 679 -nicely 679 -oceanic 679 -telescopes 679 -positioning 679 -draper 679 -nudity 679 -2012-13 679 -commenter 679 -stewards 679 -intending 679 -crab 679 -spiegel 679 -glitch 679 -willy 679 -4.30 679 -pointless 679 -unintended 679 -menacing 679 -diner 679 -unaccounted 679 -powerball 678 -referral 678 -sirens 678 -semi-detached 678 -scratched 678 -libyans 678 -cherished 678 -mulberry 678 -expenditure 678 -flushing 678 -poke 678 -snapshot 678 -commissioners 678 -dysfunctional 678 -cumberbatch 677 -80-year-old 677 -incarceration 677 -freshly 677 -negredo 677 -steroid 677 -+1 677 -hurling 677 -vying 677 -pave 677 -greats 677 -yougov 677 -obituary 677 -dior 677 -homer 677 -commercially 677 -rails 677 -negotiators 676 -on-screen 676 -caracas 676 -fairytale 676 -colt 676 -nate 676 -realm 676 -stubborn 676 -blackout 676 -spit 676 -det 675 -baltic 675 -feldman 675 -gridlock 675 -levelled 675 -melinda 675 -patents 675 -budding 675 -colonies 675 -composure 675 -caviar 675 -envy 675 -glastonbury 675 -desmond 675 -milly 675 -dell 675 -doubted 675 -prestige 674 -gallup 674 -madden 674 -suck 674 -halved 674 -giraffe 674 -lime 674 -persist 674 -lewthwaite 674 -2000s 674 -calcium 674 -lagoon 674 -lewandowski 674 -155 674 -engineered 674 -simulation 674 -janice 674 -remission 674 -fin 673 -whiskey 673 -staunch 673 -coleen 673 -schofield 673 -deed 673 -elisabeth 673 -hails 673 -calculate 673 -uv 673 -resigning 673 -amp 673 -cyril 673 -yellowstone 672 -reptiles 672 -cue 672 -kassig 672 -mysteriously 672 -albany 672 -columbine 672 -motorsport 672 -southgate 672 -keating 672 -obsessive 672 -amenities 672 -lena 672 -klopp 672 -huddled 672 -culprits 672 -oecd 672 -brothel 671 -taps 671 -tumultuous 671 -hotter 671 -maidstone 671 -slade 671 -bait 671 -dispose 671 -implied 671 -declines 671 -warmed 671 -comforting 671 -freya 670 -harlequins 670 -loyalists 670 -clean-up 670 -daughter-in-law 670 -sourced 670 -wifi 670 -prognosis 670 -filibuster 670 -libor 670 -tides 670 -2.30 670 -needless 670 -dictionary 670 -rutherford 670 -e! 670 -expectant 670 -cooks 670 -cling 669 -subcommittee 669 -insulted 669 -confederate 669 -bratton 669 -o'shea 669 -timberlake 669 -inclined 669 -swallowing 669 -entity 669 -sought-after 669 -culminated 669 -o'sullivan 669 -cuadrado 669 -eton 669 -worshippers 669 -claude 669 -reclusive 668 -len 668 -instincts 668 -rigged 668 -responsive 668 -screenplay 668 -airmen 668 -ark 668 -motorcycles 668 -evelyn 668 -fernandinho 668 -asperger 668 -satisfying 668 -perceive 668 -bulbs 668 -quell 668 -blazer 668 -wonga 668 -mid-air 668 -kaymer 667 -organiser 667 -blitz 667 -unimpressed 667 -aniston 667 -giuliani 667 -stein 667 -disco 667 -clancy 667 -hillside 667 -bellevue 667 -102 667 -xabi 667 -transmit 666 -dart 666 -faults 666 -pups 666 -ak-47 666 -squirrels 666 -navarrette 666 -shinseki 666 -3.30 666 -resembling 666 -anbar 666 -heartache 666 -dialysis 666 -cherie 666 -rocker 666 -cash-strapped 666 -two-bedroom 666 -reliability 665 -cache 665 -chisora 665 -awaited 665 -braved 665 -confess 665 -evacuations 665 -competes 665 -hose 665 -coordinating 665 -overtaken 665 -safeguarding 665 -slips 665 -shockingly 665 -sherry 665 -clamp 665 -systemic 665 -danczuk 665 -yazidi 665 -cpl 665 -testosterone 665 -greedy 665 -countered 665 -westerners 665 -grayson 664 -tangible 664 -craven 664 -oblivious 664 -logging 664 -edmund 664 -fuelling 664 -feces 664 -kimmel 664 -invade 664 -complied 664 -newborns 663 -tidy 663 -mischief 663 -plasma 663 -aluminum 663 -eileen 663 -dial 663 -quentin 663 -besieged 663 -algorithm 663 -behavioral 663 -aloud 663 -desks 662 -marquee 662 -cloudy 662 -kouachi 662 -notebook 662 -clattenburg 662 -scratching 662 -synonymous 662 -warplanes 662 -collisions 662 -nestled 662 -incapable 662 -tumbled 662 -enquirer 662 -guildford 662 -discusses 661 -naismith 661 -slots 661 -wrestled 661 -limbo 661 -ipo 661 -rapists 661 -stove 661 -everett 661 -blindness 661 -aboriginal 661 -overrun 661 -froze 660 -stung 660 -crimean 660 -celebratory 660 -sorting 660 -outlaw 660 -trove 660 -hen 660 -saido 660 -quipped 660 -spider-man 660 -choke 660 -triathlon 660 -supercar 660 -tenerife 660 -gammy 659 -underestimated 659 -welshman 659 -achilles 659 -humbled 659 -lectures 659 -gucci 659 -supervisors 659 -annabel 659 -pancreatic 659 -'90s 659 -painstakingly 659 -exits 659 -4.7 659 -receptionist 658 -explanations 658 -start-up 658 -thornhill 658 -suzannah 658 -three-week 658 -sheldon 658 -hoy 658 -atrocity 658 -colback 658 -enterprises 658 -guthrie 658 -freud 658 -epicenter 658 -inherent 657 -crossings 657 -portman 657 -insomnia 657 -amal 657 -iqbal 657 -startup 657 -madagascar 657 -lurking 657 -shipwreck 657 -perpetrator 657 -platini 657 -ideally 656 -stalls 656 -zelizer 656 -15million 656 -irregular 656 -spoon 656 -riches 656 -gabby 656 -condone 656 -amos 656 -segments 656 -dearly 656 -camped 656 -restrictive 656 -magnet 655 -petroleum 655 -11.30 655 -automotive 655 -oprah.com 655 -gillespie 655 -golfing 655 -marussia 655 -worthwhile 655 -etiquette 655 -tsvangirai 655 -oversized 655 -graft 655 -seasoned 655 -chipped 655 -badges 654 -hotspots 654 -mansour 654 -jealousy 654 -bloke 654 -a-level 654 -tiananmen 654 -warmest 654 -busch 654 -administering 654 -burgeoning 654 -botelho 654 -waged 654 -wedged 654 -developmental 654 -essentials 653 -balances 653 -triplets 653 -polanski 653 -showcases 653 -pinto 653 -weaver 653 -higgs 653 -constitutes 653 -licences 653 -kidd 653 -focal 653 -ferrell 653 -toffees 652 -filings 652 -three-hour 652 -oils 652 -turin 652 -farberov 652 -undecided 652 -croft 652 -traction 652 -dimensions 652 -sticker 652 -combating 652 -logistical 652 -depiction 652 -in-flight 652 -fetus 652 -paw 652 -birthdays 652 -avery 651 -impunity 651 -binky 651 -enfield 651 -stalemate 651 -lb 651 -amtrak 651 -dolly 651 -pigeons 651 -faulkner 651 -stuffing 651 -maturity 651 -reset 651 -exclude 651 -5-3 651 -puppet 651 -half-hour 651 -storey 651 -buchanan 651 -swimwear 650 -expresses 650 -prosperous 650 -worm 650 -commissioning 650 -stationary 650 -rafa 650 -barney 650 -ole 650 -litvinenko 650 -escapes 650 -chalk 650 -28,000 650 -clots 650 -plantation 650 -4x4 650 -slightest 650 -buzzfeed 650 -hoops 650 -convertible 650 -tights 650 -recurring 650 -asbo 650 -eisenhower 650 -clifton 649 -deposition 649 -inseparable 649 -liberated 649 -tracker 649 -teachings 649 -jackman 649 -fcc 649 -haiyan 649 -climber 649 -mindful 649 -mellon 649 -fizzy 649 -inhumane 649 -stashed 648 -pistols 648 -katz 648 -eurostar 648 -beta 648 -tulisa 648 -robotics 648 -downloads 648 -albania 648 -zombies 648 -limousine 648 -peacekeeping 648 -burrell 648 -mound 648 -last-16 648 -nitrogen 648 -lighthouse 648 -casinos 648 -crust 647 -prevalence 647 -doctrine 647 -koran 647 -storming 647 -mandarin 647 -al-hilli 647 -murder-suicide 647 -dissolved 647 -painstaking 647 -parma 647 -106 647 -jahi 647 -clyde 647 -layout 647 -zoom 647 -islanders 647 -congolese 647 -classy 647 -snejana 646 -voicemail 646 -notifications 646 -televisions 646 -extras 646 -terminals 646 -scheduling 646 -venom 646 -diabetic 646 -derelict 646 -gmc 646 -restrain 646 -chadwick 646 -iris 646 -fists 646 -caitlin 646 -bombshell 646 -teased 646 -pena 646 -mckinnon 646 -nationalists 646 -five-bedroom 646 -strand 646 -bethany 645 -entwistle 645 -scaffolding 645 -ukrainians 645 -utilities 645 -intruders 645 -embarking 645 -flyer 645 -dissidents 645 -marty 645 -4.4 645 -paraphernalia 645 -lasers 645 -consisted 644 -editor-in-chief 644 -steered 644 -bragged 644 -gopro 644 -reformed 644 -gag 644 -u.s.-based 644 -weight-loss 644 -belgrade 644 -homicides 644 -offline 644 -hijacking 644 -suffocated 644 -cooperated 644 -grimm 644 -one-way 644 -refreshing 643 -eid 643 -settlers 643 -whirlwind 643 -fearsome 643 -melanoma 643 -favours 643 -cleavage 643 -california-based 643 -sausages 642 -debacle 642 -saif 642 -2019 642 -circumstance 642 -450,000 642 -alias 642 -assailants 642 -unwittingly 642 -bouquet 642 -blagojevich 642 -paraded 642 -environmentally 642 -scarce 642 -valve 642 -vibe 641 -drown 641 -coward 641 -racket 641 -bicycles 641 -harrow 641 -sykes 641 -chores 641 -strauss 641 -bizarrely 641 -wojciech 641 -precinct 641 -6,500 641 -molecular 641 -morality 641 -excluding 640 -ghosts 640 -vertonghen 640 -vivienne 640 -waterproof 640 -undergraduate 640 -tray 640 -counselors 640 -simpsons 640 -writings 640 -pervert 640 -bedding 640 -christening 640 -terminate 640 -pop-up 640 -baxter 640 -bingo 640 -bleach 640 -illustrates 640 -stereotype 640 -hotspot 639 -crutches 639 -bidder 639 -baird 639 -hands-on 639 -spanned 639 -idle 639 -ailments 639 -hairs 639 -davos 639 -juve 639 -tails 638 -routines 638 -metallic 638 -kirsten 638 -skepticism 638 -kerri 638 -4.3 638 -paving 638 -franck 638 -docks 637 -flaw 637 -kayak 637 -dianne 637 -strawberry 637 -reassuring 637 -trustee 637 -sian 637 -rigid 637 -compatible 637 -aziz 637 -incompetent 637 -anti-corruption 637 -invaluable 637 -dieting 637 -dynamo 637 -certification 637 -crump 637 -occupying 637 -dim 637 -treasured 637 -installment 636 -runways 636 -capt 636 -one-on-one 636 -daytona 636 -mesa 636 -mirren 636 -villas 636 -sauna 636 -kosher 636 -additions 636 -68-year-old 636 -static 636 -discriminated 636 -referenced 636 -fouled 636 -streamed 635 -veterinarian 635 -astronomer 635 -carrots 635 -sub-saharan 635 -tightening 635 -ant 635 -smog 635 -swann 635 -clutches 635 -papal 635 -conspired 635 -op-ed 635 -unnecessarily 635 -benteke 635 -fabrics 635 -ecuadorian 634 -handwriting 634 -phi 634 -gems 634 -bon 634 -alma 634 -syracuse 634 -mercedes-benz 634 -quarry 634 -growers 634 -goats 634 -plummeting 634 -opulent 634 -sampling 634 -categorically 634 -fundamentalist 634 -specimen 634 -outlines 633 -black-and-white 633 -renewal 633 -flair 633 -vaughn 633 -boil 633 -al-zawahiri 633 -hobbs 633 -nic 633 -godfather 633 -fitzpatrick 633 -arfa 633 -steph 632 -synagogue 632 -mcdonough 632 -domino 632 -examines 632 -uneasy 632 -mangled 632 -h&m 632 -mlb 632 -downpours 632 -crossley 632 -frampton 632 -abolished 632 -dramas 632 -debenhams 632 -horner 632 -containment 631 -fundraisers 631 -slapping 631 -goalscoring 631 -hopeless 631 -affiliates 631 -unjust 631 -habitats 631 -windfall 631 -gosnell 631 -luna 631 -mathematical 631 -fueling 631 -blueprint 631 -volvo 631 -luz 631 -benefiting 631 -30-year 631 -billboards 631 -abdulmutallab 631 -maribor 630 -incurred 630 -dismembered 630 -refined 630 -mccluskey 630 -weber 630 -balding 630 -ministries 630 -connectivity 630 -mgm 630 -nrl 630 -illuminated 630 -toxins 630 -hutchinson 630 -adolescent 630 -garros 630 -mitigating 630 -clement 630 -hadley 629 -relaxation 629 -stephenson 629 -walsall 629 -acoustic 629 -dehydrated 629 -gem 629 -paraguay 629 -rioters 629 -bros. 629 -rene 629 -worms 629 -lorries 629 -month-long 628 -joker 628 -empowered 628 -shoreline 628 -springsteen 628 -separates 628 -jp 628 -dodgy 628 -sustaining 628 -psychotic 628 -briggs 628 -tract 628 -brazilians 628 -etan 628 -friendships 627 -lamented 627 -landlords 627 -merrill 627 -eiffel 627 -alcoholism 627 -cadillac 627 -cider 627 -adebowale 627 -inexperienced 627 -bemused 627 -quickest 627 -guerrilla 627 -baggies 627 -instructors 626 -beads 626 -preferring 626 -helpline 626 -cushion 626 -spilling 626 -enjoyment 626 -tunes 626 -waging 626 -gearing 626 -dissident 626 -cottages 626 -capita 626 -footprints 626 -financier 625 -mornings 625 -dared 625 -pasadena 625 -viagra 625 -pocketed 625 -munoz 625 -gael 625 -namibia 625 -rotating 625 -astronomy 625 -postpone 625 -exacerbated 624 -thyroid 624 -commanded 624 -haskell 624 -six-figure 624 -embryo 624 -nominate 624 -generic 624 -reflective 624 -suspending 624 -balmoral 624 -sheik 624 -freshwater 624 -heroics 624 -comprehend 624 -humphrey 624 -dayton 624 -passer-by 624 -enquiry 624 -furore 624 -clocks 623 -shrouded 623 -archdiocese 623 -crept 623 -horribly 623 -respectable 623 -pbs 623 -firsthand 623 -15-year 623 -patty 623 -invites 623 -marissa 623 -jaime 623 -detecting 622 -bakr 622 -sneaking 622 -befriended 622 -facto 622 -monumental 622 -hijab 622 -lambie 622 -maldonado 622 -moons 622 -birch 622 -blur 622 -benchmark 622 -dined 622 -preceded 622 -papa 622 -zardari 622 -arpaio 622 -horton 622 -doorway 622 -dorchester 622 -dimension 621 -shard 621 -islington 621 -unwelcome 621 -responsibly 621 -slimmer 621 -leggings 621 -cassini 621 -silently 621 -motogp 621 -affleck 621 -buffet 621 -totaling 621 -residences 621 -executing 621 -neatly 621 -storyline 620 -low-key 620 -mysteries 620 -feather 620 -regrettable 620 -digest 620 -knocks 620 -moratorium 620 -mistook 620 -moustache 620 -archaeology 620 -recommending 620 -crimestoppers 620 -equation 620 -davenport 620 -manuscript 619 -greening 619 -chimpanzees 619 -sandberg 619 -amputation 619 -apes 619 -immoral 619 -hardened 619 -brewery 619 -first-hand 619 -200million 619 -mince 619 -spam 619 -benches 619 -mariano 619 -huang 619 -sage 619 -recollection 619 -4.6 619 -lumps 619 -sabrina 619 -fabricated 618 -mating 618 -copying 618 -7-1 618 -virtue 618 -renovations 618 -malnourished 618 -discreet 618 -titanium 618 -prada 618 -grown-up 618 -ya 618 -skipping 618 -hectic 618 -ennis 618 -tandem 618 -cate 618 -foreman 617 -statins 617 -admirers 617 -dependence 617 -insecure 617 -fgm 617 -yingluck 617 -inspires 617 -66-year-old 617 -formations 617 -orient 617 -pleads 617 -deteriorate 617 -norms 617 -foil 617 -corby 617 -signalled 617 -unfinished 616 -acclaim 616 -mole 616 -o'connell 616 -cadets 616 -bauer 616 -145 616 -menendez 615 -arkell 615 -veered 615 -fawcett 615 -cafeteria 615 -unstoppable 615 -startled 615 -virginity 615 -slang 614 -domination 614 -19,000 614 -campsite 614 -patten 614 -obstructing 614 -dhs 614 -superiors 614 -c-section 614 -willingly 614 -scarring 614 -screenings 614 -styling 614 -moines 614 -finnish 614 -horseback 613 -firestorm 613 -mapped 613 -angelo 613 -criminally 613 -irina 613 -stalked 613 -floodwaters 613 -handshake 613 -submissions 613 -coles 613 -berg 613 -bending 612 -manners 612 -motivate 612 -drilled 612 -cola 612 -wyatt 612 -railings 612 -400m 612 -post-match 612 -liking 612 -parted 612 -disagreements 612 -pounding 612 -fema 612 -billing 612 -sichuan 611 -29th 611 -forgetting 611 -definite 611 -skeletal 611 -kobe 611 -hadi 611 -tar 611 -underestimate 611 -likeness 611 -boxers 611 -voyager 611 -chew 611 -empowering 610 -arbitration 610 -triumphant 610 -dwp 610 -sandro 610 -hove 610 -unheard 610 -exceeding 610 -hostilities 610 -remorseful 610 -comforts 610 -intensely 610 -shelled 610 -percy 610 -smartwatch 610 -interacting 610 -wee 609 -lea 609 -oyster 609 -spelled 609 -2.9 609 -duly 609 -rican 609 -gunner 609 -steward 609 -olympiacos 609 -shoot-out 609 -bertrand 609 -guts 609 -fuselage 609 -raining 608 -compromising 608 -avail 608 -placards 608 -saldanha 608 -hawthorns 608 -controversies 608 -sixteen 608 -spaghetti 608 -exemption 608 -unruly 608 -hi-tech 608 -emptied 608 -kennel 608 -styled 608 -owing 608 -fahmy 608 -mentioning 608 -commotion 608 -impartial 608 -emphatic 607 -pram 607 -dodgers 607 -clintons 607 -mirallas 607 -carpets 607 -siro 607 -solemn 607 -colliding 607 -westfield 607 -5c 607 -cameo 607 -mbe 607 -mistreatment 607 -peek 606 -ledger 606 -necks 606 -ludogorets 606 -mankind 606 -attachment 606 -foreclosure 606 -rout 606 -hirst 606 -witty 606 -misrata 606 -unilateral 606 -characteristic 605 -snl 605 -demichelis 605 -bracelets 605 -shortcomings 605 -buckets 605 -performance-enhancing 605 -firstly 605 -kin 605 -deli 605 -basil 605 -arrogance 605 -mast 604 -dhaka 604 -treadmill 604 -reversal 604 -inflicting 604 -benign 604 -rapids 604 -under-21 604 -concussions 604 -bonding 604 -piling 603 -pre-match 603 -harvested 603 -laureate 603 -bridesmaids 603 -handheld 603 -125,000 603 -macneill 603 -afl 603 -pinch 603 -stripper 603 -stalin 603 -edith 603 -romans 603 -afloat 603 -generators 603 -whisked 602 -mcclaren 602 -radically 602 -backward 602 -annette 602 -reshuffle 602 -yen 602 -ridley 602 -handmade 602 -acquiring 602 -reefs 602 -spicy 602 -maldives 602 -disrupting 602 -digitally 602 -vigilante 602 -repression 602 -tailor 602 -114 602 -conte 602 -expansive 602 -ceased 601 -derrick 601 -bagged 601 -johansson 601 -shapps 601 -fledgling 601 -sturgeon 601 -drawer 601 -crawled 601 -ashcroft 601 -exquisite 601 -composite 601 -lymphoma 601 -gq 601 -scaling 601 -asa 601 -landslides 601 -keynote 601 -rave 601 -classification 601 -showered 600 -marginal 600 -moussa 600 -21,000 600 -sceptical 600 -fencing 600 -nadine 600 -jeremiah 600 -emphasize 600 -3.1 600 -stansted 600 -learns 600 -versace 600 -christiane 600 -280 600 -onwards 600 -climax 600 -radicalised 600 -statistical 600 -wrestler 600 -loneliness 600 -a380 600 -aug. 599 -markers 599 -dealership 599 -mae 599 -ale 599 -avatar 599 -tacloban 599 -orbital 599 -ye 599 -registering 599 -1910 599 -col 599 -lol 599 -macmillan 599 -enhancing 599 -infringement 598 -bum 598 -intrusion 598 -atf 598 -lori 598 -yeovil 598 -pradesh 598 -worthless 598 -icing 598 -anatomy 598 -vodafone 598 -limestone 598 -umbrellas 598 -corbett 598 -pryce 598 -k. 598 -forsyth 598 -heterosexual 598 -reggie 598 -nineties 598 -acquaintance 598 -wa 598 -ansar 598 -chicks 598 -caterham 598 -distorted 598 -colossal 598 -112 598 -haynes 598 -patiently 597 -raphael 597 -isolate 597 -vicki 597 -jammed 597 -beaver 597 -crossroads 597 -aguilar 597 -preventive 597 -specimens 597 -feyenoord 597 -ceos 597 -princesses 597 -sonny 597 -modric 597 -chalet 597 -wickham 597 -decency 597 -lam 597 -abhorrent 597 -exhausting 597 -governed 596 -swirling 596 -coco 596 -odin 596 -mercer 596 -sicily 596 -overcrowding 596 -ramires 596 -occupational 596 -glucose 596 -carnegie 596 -niche 596 -anti-terror 596 -advocated 596 -sanogo 596 -automobile 596 -chargers 596 -myspace 596 -samaras 596 -3m 596 -mclean 596 -audacious 595 -klose 595 -boob 595 -spirited 595 -zetas 595 -frankel 595 -housewife 595 -toro 595 -parting 595 -beasts 595 -ribbons 595 -shrugged 595 -smiley 595 -mcalpine 595 -adoptions 595 -light-hearted 595 -starlet 594 -breakaway 594 -dante 594 -quigley 594 -gorge 594 -dread 594 -autograph 594 -onions 594 -humphries 594 -tempting 594 -2,400 594 -com 594 -behind-the-scenes 594 -fools 593 -primates 593 -rents 593 -drink-driving 593 -echoing 593 -morse 593 -interstellar 593 -7million 593 -esteban 593 -73-year-old 593 -llc 593 -leroy 593 -ploy 593 -belo 593 -gen 593 -giglio 593 -vacations 593 -wintour 593 -congenital 593 -combinations 593 -high-flying 593 -shaving 593 -comets 593 -mandzukic 593 -plausible 592 -ria 592 -differing 592 -carly 592 -rotation 592 -disposed 592 -ringleader 592 -rendering 592 -blindfolded 592 -slums 592 -tussle 592 -placenta 592 -apocalypse 592 -u.k. 592 -therapeutic 592 -daybreak 592 -confederation 592 -sponge 592 -jeanne 592 -on-going 592 -vanilla 592 -defected 592 -scarves 591 -pi 591 -confided 591 -austen 591 -tame 591 -beyoncé 591 -900,000 591 -saliva 591 -barbed 591 -swoop 591 -junta 591 -lexus 591 -curls 591 -supper 591 -obscured 591 -reinforcements 591 -levinson 591 -rashid 591 -ceramic 591 -disapproval 591 -passions 591 -deni 591 -buddies 590 -cobra 590 -malfunction 590 -earmarked 590 -swelled 590 -respite 590 -fife 590 -faiths 590 -clarified 590 -etched 590 -roared 590 -dugard 590 -ire 590 -rodent 590 -jacqui 590 -reconstructive 590 -clements 589 -wintry 589 -multinational 589 -remake 589 -moores 589 -bye 589 -heed 589 -scoured 589 -assurance 589 -cashier 589 -ulster 589 -treble 589 -famine 589 -suitcases 589 -reece 589 -dialled 589 -kindly 589 -hayward 589 -whereby 588 -aap 588 -torturing 588 -protracted 588 -robes 588 -disproportionately 588 -conductor 588 -pkk 588 -four-hour 588 -118 588 -850 587 -showbiz 587 -diminish 587 -check-in 587 -equalizer 587 -108 587 -quartet 587 -sapphire 587 -labeling 587 -roe 587 -bragging 587 -shortfall 587 -bonhams 587 -cropped 587 -disclosures 587 -os 587 -concession 587 -degeneres 587 -abbottabad 587 -banquet 586 -non-stop 586 -monrovia 586 -collman 586 -refunds 586 -cilic 586 -hateful 586 -unauthorised 586 -pros 586 -slough 585 -subsidy 585 -sanjay 585 -motions 585 -disagrees 585 -swamped 585 -comprehension 585 -aruba 585 -encrypted 585 -discs 585 -pierson 585 -shakhtar 585 -gash 584 -33,000 584 -defectors 584 -invading 584 -screw 584 -philanthropist 583 -exhibitions 583 -redmond 583 -yugoslavia 583 -keyes 583 -navarro 583 -quarterly 583 -goalkeepers 583 -adventurer 583 -50p 583 -embankment 583 -lana 583 -unforgettable 583 -pereira 583 -beards 583 -magnotta 583 -perugia 583 -delightful 583 -losers 583 -musa 583 -bacary 583 -domestically 583 -chocolates 583 -mcculloch 583 -cambodian 583 -fiber 583 -luka 582 -radius 582 -dependency 582 -kessler 582 -lien 582 -radicals 582 -gazette 582 -valdez 582 -persuading 582 -challengers 582 -ass 582 -winnie 582 -derail 582 -watershed 582 -doc 582 -excel 582 -academies 581 -brandy 581 -ismail 581 -robbins 581 -propel 581 -willoughby 581 -firefight 581 -tayyip 581 -ancestor 581 -herbal 581 -joaquin 581 -popcorn 581 -fraudulently 581 -boiler 581 -refinery 581 -idlib 581 -playoffs 581 -repairing 581 -sodomy 581 -bromley 581 -disparity 581 -vanderbilt 580 -captioned 580 -jew 580 -12.5 580 -negotiator 580 -boardwalk 580 -decks 580 -spikes 580 -allowances 580 -specialised 580 -leila 580 -mcgovern 580 -proactive 580 -fraught 580 -jams 579 -pe 579 -co-stars 579 -negatively 579 -aspire 579 -torched 579 -lieberman 579 -165 579 -mongolia 579 -bremen 579 -primitive 579 -tormented 579 -cooker 579 -railing 579 -vertebrae 579 -pro-government 579 -sundays 579 -recognizable 579 -dotcom 579 -luring 578 -uninjured 578 -regis 578 -undefeated 578 -gangsters 578 -accomplishment 578 -reversing 578 -irregularities 578 -chick 578 -abstract 578 -sealing 578 -macleod 578 -forestry 577 -abundant 577 -blitzer 577 -twists 577 -insecurity 577 -opium 577 -reins 577 -notting 577 -grail 577 -strategically 577 -long-distance 577 -cords 577 -civilization 576 -heartland 576 -goddard 576 -salman 576 -nostalgia 576 -depp 576 -stirling 576 -faeces 576 -fiasco 576 -r&b 576 -valuables 576 -octopus 576 -tatum 576 -bribe 576 -battering 576 -concentrations 576 -miner 576 -schooling 576 -jarrett 576 -stalker 576 -scalia 576 -truss 575 -chestnut 575 -bing 575 -newcomer 575 -?! 575 -accolade 575 -sores 575 -intolerance 575 -ounce 575 -kaiser 575 -conquer 575 -workmen 575 -swerved 575 -sampdoria 575 -emerald 575 -apologizing 575 -dismayed 574 -vigorous 574 -abnormalities 574 -whitehead 574 -perilous 574 -1922 574 -bollywood 574 -decor 574 -pritchard 574 -standout 574 -syndicate 574 -reluctantly 574 -delph 574 -behaviours 574 -fraudsters 574 -frosty 574 -upgrades 574 -complimentary 574 -jamal 574 -discriminate 574 -gripping 574 -alain 574 -hatched 574 -malone 574 -kidding 574 -chimney 574 -underlined 574 -sr 574 -2m 574 -remedies 574 -dishonesty 573 -punters 573 -horizonte 573 -halle 573 -needlessly 573 -ashworth 573 -detonate 573 -trusting 573 -cookery 573 -pleasance 573 -wallabies 573 -paranoia 573 -sneijder 573 -assumptions 573 -cigar 573 -tymoshenko 573 -clapper 573 -fm 573 -supremacist 573 -gran 573 -peas 573 -widows 572 -cellular 572 -barren 572 -comprises 572 -opta 572 -pillars 572 -taiwanese 572 -torment 572 -mango 572 -cone 572 -4-6 572 -wrath 572 -armor 572 -jars 572 -revamped 572 -peanuts 572 -clicked 572 -juveniles 572 -woo 572 -vivian 572 -retreated 572 -foe 572 -mockery 572 -q&a 571 -felon 571 -leahy 571 -interrogated 571 -tyrone 571 -invitations 571 -selma 571 -fu 571 -interpret 571 -fluent 571 -leftist 571 -scrawled 571 -hotly 571 -genoa 571 -weary 571 -stitched 571 -finalized 570 -smith-spark 570 -sprouts 570 -pip 570 -hearse 570 -interiors 570 -geek 570 -rods 570 -eliot 570 -resolving 570 -exiting 570 -culmination 570 -gail 570 -rug 570 -infiltrated 570 -massimo 570 -runners-up 570 -armistice 569 -falkirk 569 -affectionate 569 -norovirus 569 -injure 569 -cavalry 569 -330 569 -retina 569 -capitalism 569 -bezos 569 -moose 569 -tabs 569 -tarnished 568 -redundancy 568 -pugh 568 -polk 568 -wagon 568 -specified 568 -second-placed 568 -cedar 568 -grimsby 568 -bonas 568 -inventory 568 -rolex 568 -traumatized 568 -sermon 568 -plummet 568 -redemption 568 -lawless 568 -tip-off 568 -gras 568 -adjusting 568 -gomis 568 -tram 568 -ventured 567 -rocketed 567 -collaborated 567 -trafficked 567 -vw 567 -painfully 567 -ventura 567 -zhou 567 -sedated 567 -independents 567 -decorate 567 -dwindling 567 -niall 567 -fiesta 567 -meadow 567 -coated 567 -intimacy 566 -crook 566 -straps 566 -eta 566 -lash 566 -fireman 566 -parcels 566 -typing 566 -flintoff 566 -higuain 566 -babysitter 566 -eli 566 -premise 566 -jubilant 566 -updating 566 -fortress 566 -rounding 566 -adjustments 566 -grumpy 566 -malls 565 -divorcing 565 -messed 565 -nationalities 565 -passionately 565 -nightclubs 565 -pierced 565 -laboratories 565 -proposes 565 -disadvantage 565 -unsustainable 565 -avoids 565 -loosely 565 -dugout 565 -plume 565 -uniquely 565 -ransacked 565 -maneuver 565 -nun 565 -biographer 564 -statistic 564 -haye 564 -positives 564 -abrupt 564 -gong 564 -henri 564 -wycombe 564 -sobriety 564 -cuddly 564 -cheng 564 -praia 564 -kilos 564 -akbar 564 -cleansing 564 -co-operate 564 -stares 564 -complexion 564 -pulitzer 564 -luhansk 564 -chechen 564 -decked 564 -low-level 564 -foray 563 -aaa 563 -cyanide 563 -1920 563 -theatrical 563 -giordano 563 -astor 563 -coerced 563 -mehdi 563 -decoration 563 -swarmed 563 -vp 563 -grandchild 563 -eatery 563 -ballmer 563 -racketeering 563 -mathematics 562 -sigurdsson 562 -cheques 562 -mermaid 562 -defying 562 -dismantle 562 -heston 562 -mortuary 562 -flirting 562 -oaks 562 -unreliable 562 -devote 562 -harrogate 562 -mutilation 562 -welterweight 562 -lyndon 562 -sheryl 562 -holidaying 562 -portraying 562 -triumphs 562 -raffaele 562 -rodents 562 -clears 562 -conan 562 -yazidis 562 -bulldog 561 -adapting 561 -windshield 561 -environmentalists 561 -disused 561 -shrunk 561 -restroom 561 -selfridges 561 -parades 561 -passive 561 -westgate 561 -drainage 561 -pioneered 561 -affiliation 561 -4.8 561 -hank 561 -pastry 560 -scrapping 560 -chick-fil-a 560 -lexi 560 -assemble 560 -gma 560 -contraceptive 560 -hairy 560 -3-6 560 -restructuring 560 -ospina 560 -71-year-old 560 -odegaard 559 -bastian 559 -change.org 559 -plumes 559 -hesitation 559 -charlottesville 559 -statesman 559 -incidence 559 -ascent 559 -craters 559 -inspecting 559 -dalglish 559 -sinaloa 559 -riyadh 559 -loopholes 559 -equatorial 559 -collapses 559 -relentlessly 558 -nassau 558 -modeled 558 -pyjamas 558 -shake-up 558 -fourteen 558 -flop 558 -beige 558 -candlelight 558 -undermines 558 -accusers 558 -apologising 558 -drug-related 558 -namesake 558 -beware 558 -frogs 558 -fared 557 -deficiency 557 -ventilation 557 -unbelievably 557 -souvenir 557 -hubs 557 -lucie 557 -reprimanded 557 -'70s 557 -bonded 557 -2,300 557 -firth 557 -desires 557 -melwood 557 -mashable 557 -107 557 -paused 557 -brixton 557 -dumpster 557 -match-fixing 557 -gardeners 557 -ginsburg 557 -granada 557 -postman 557 -blackett 556 -contemplating 556 -decorating 556 -poems 556 -knighthood 556 -claimant 556 -flashpoint 556 -measurement 556 -cherish 556 -persecuted 556 -uk-based 556 -undetected 556 -hamm 556 -okinawa 556 -self-described 556 -convicts 556 -overlooks 556 -slalom 556 -impulse 556 -rodwell 556 -sacks 556 -self-styled 556 -recognising 556 -commodity 556 -bun 555 -blacked 555 -conley 555 -henson 555 -incest 555 -dyed 555 -gil 555 -archipelago 555 -morrissey 555 -jerseys 555 -fran 555 -sporadic 555 -forging 555 -azerbaijan 555 -32,000 555 -compass 555 -dowd 555 -heralded 555 -johan 555 -barr 555 -clerics 555 -nepalese 555 -consultations 555 -snoop 555 -nicki 555 -asos 555 -mullins 554 -xavier 554 -fiat 554 -smoker 554 -waterboarding 554 -scholarships 554 -breakup 554 -passages 554 -brilliance 554 -rebel-held 554 -yousafzai 554 -talisman 554 -cruised 554 -eliza 554 -josephine 554 -merged 554 -kph 554 -stay-at-home 554 -viciously 553 -waterways 553 -thaksin 553 -drenched 553 -callers 553 -ethos 553 -secondly 553 -psv 553 -erase 553 -squeezing 553 -cardigan 553 -mansions 553 -rnli 553 -alternatively 553 -cross-country 553 -chokehold 553 -exercised 553 -hagan 553 -widower 553 -wichita 553 -custom-made 553 -adversity 553 -johnstone 553 -69-year-old 553 -spaniel 553 -activate 553 -shampoo 553 -helens 552 -petitions 552 -denials 552 -feb. 552 -hobart 552 -intrusive 552 -gibbons 552 -on-air 552 -mcintyre 552 -incensed 552 -unlicensed 552 -oil-rich 552 -arcade 552 -dhoni 552 -bracing 552 -solace 552 -incurable 552 -ineligible 552 -adel 552 -tarantino 551 -broccoli 551 -brigades 551 -indirectly 551 -prominently 551 -densely 551 -nasal 551 -ernie 551 -disfigured 551 -otto 551 -stockton 551 -germain 551 -mcallister 551 -rumor 551 -in-house 550 -capsules 550 -splits 550 -dough 550 -odor 550 -rehearsals 550 -all-clear 550 -10:30 550 -gathers 550 -crib 550 -cutter 550 -zebra 550 -peculiar 550 -destroyer 550 -taxation 550 -bedtime 550 -midland 550 -petrified 550 -citation 550 -mortified 550 -thor 550 -haqqani 550 -dignified 550 -mantra 550 -mallorca 550 -avert 549 -impeachment 549 -metabolism 549 -imran 549 -munitions 549 -haitians 549 -endorsements 549 -braun 549 -repaid 549 -facade 549 -vance 549 -programmed 549 -shepard 549 -frazier 549 -prevailed 548 -awarding 548 -auditorium 548 -evie 548 -altering 548 -prototypes 548 -1.25 548 -eroded 548 -asia-pacific 548 -,000 548 -4s 548 -ripe 548 -doting 548 -hamlet 548 -298 548 -writebol 548 -cbc 548 -circumcision 547 -gymnast 547 -psychologically 547 -expeditions 547 -ktla 547 -wakes 547 -coli 547 -urinating 547 -uploading 547 -alana 547 -articulate 547 -lettuce 547 -225 547 -sinjar 547 -wedge 547 -micro 547 -fein 547 -burrows 547 -hitman 547 -320 547 -bulletproof 547 -canopy 547 -hooks 546 -ubiquitous 546 -hermann 546 -outbursts 546 -insert 546 -180,000 546 -sniffer 546 -creeping 546 -blink 546 -go-ahead 546 -upheaval 546 -segregated 546 -brew 546 -epsom 546 -stimulation 546 -patriotism 546 -basel 545 -rangel 545 -scandalous 545 -spoil 545 -faction 545 -maxim 545 -aground 545 -squalid 545 -rogen 545 -accents 545 -marathons 545 -seven-time 545 -anglican 545 -60million 545 -martins 545 -underworld 545 -iaea 545 -artificially 545 -subpoena 545 -brin 545 -mcdonalds 545 -pillows 545 -rollout 545 -dreaded 544 -hester 544 -defraud 544 -marley 544 -banged 544 -asif 544 -31st 544 -krul 544 -turing 544 -misunderstood 544 -guessed 544 -pedal 544 -ied 544 -enclave 544 -edgy 544 -temples 543 -fling 543 -legalize 543 -restitution 543 -acceleration 543 -twenty20 543 -frigid 543 -sided 543 -minaj 543 -snub 543 -pulmonary 543 -gt 543 -parasite 543 -shooters 543 -30m 542 -gubernatorial 542 -severance 542 -angie 542 -warburton 542 -yonhap 542 -helium 542 -djs 542 -washington-based 542 -believers 542 -divides 542 -discredit 542 -politely 542 -paddock 542 -mattered 542 -dislocated 542 -langley 542 -courted 542 -blasphemy 542 -unsuitable 541 -spaniards 541 -transcripts 541 -troupe 541 -forceful 541 -intestines 541 -populist 541 -tilt 541 -twister 541 -fibrosis 541 -deutsche 541 -heaton 541 -prosthetics 541 -abs 541 -20m 541 -fairer 541 -spearheaded 541 -connors 541 -breastfeed 541 -inception 541 -kagawa 541 -blackman 540 -farce 540 -criminality 540 -mesh 540 -gigs 540 -colombo 540 -egan 540 -timeless 540 -stenson 540 -nyad 540 -gothic 540 -al-qaida 540 -peugeot 540 -congresswoman 540 -packers 540 -cashing 540 -foes 540 -tadic 539 -quincy 539 -saddle 539 -dedicate 539 -muted 539 -maxi 539 -conservationists 539 -henrik 539 -extinguished 539 -lifeguard 539 -eccles 539 -cavity 539 -rebuffed 539 -trivial 539 -christensen 539 -survives 539 -allred 539 -rigging 539 -tripled 539 -starters 539 -communism 539 -filipe 538 -creep 538 -stinging 538 -valuation 538 -proliferation 538 -amelie 538 -emaciated 538 -kara 538 -clandestine 538 -aiden 538 -prefecture 538 -kristin 538 -compartment 538 -legalized 538 -zolfagharifard 538 -asiana 538 -beheadings 538 -squash 538 -petite 538 -motorcyclist 538 -full-scale 538 -simulated 538 -m25 538 -advent 538 -cardiologist 537 -firework 537 -feasible 537 -185 537 -teesside 537 -mcarthur 537 -n-word 537 -cubans 537 -miami-dade 537 -kelsey 537 -unsupervised 537 -larsson 537 -granny 537 -widening 537 -world-famous 537 -fertile 537 -hendrix 537 -portrays 536 -sonic 536 -finalised 536 -yvette 536 -trapping 536 -i. 536 -entrenched 536 -lacy 536 -knickers 536 -canadians 536 -brow 536 -briefings 536 -ritz 536 -microscopic 536 -commemorative 536 -excavated 536 -recep 536 -inhabited 535 -motorola 535 -biologists 535 -udinese 535 -bureaucratic 535 -frisky 535 -dizzy 535 -nigerians 535 -coating 535 -refurbished 535 -u2 535 -little-known 535 -afield 535 -kendrick 534 -first-round 534 -distracting 534 -cross-examination 534 -revamp 534 -precarious 534 -constraints 534 -koh 534 -figuring 534 -feisty 534 -bartender 534 -shapiro 534 -1932 534 -shovel 534 -prohibiting 534 -keneally 534 -seafront 534 -reopening 534 -front-runner 534 -wellness 534 -hamptons 533 -puncture 533 -self-employed 533 -walkway 533 -libertarian 533 -confessing 533 -follower 533 -orbiter 533 -raucous 533 -investigates 533 -belcher 533 -incompetence 533 -mcintosh 533 -saul 533 -laced 532 -flotilla 532 -ashraf 532 -warne 532 -assert 532 -squares 532 -9.5 532 -policymakers 532 -rwandan 532 -exonerated 532 -forearm 532 -choudary 532 -xl 532 -homelessness 532 -unconditional 532 -in-depth 532 -geared 532 -hawks 532 -aquatic 532 -objection 532 -freeing 532 -soto 532 -132 532 -lockett 532 -demonstrator 532 -packaged 532 -gofundme 531 -drying 531 -showpiece 531 -deplorable 531 -freezes 531 -calgary 531 -enclosed 531 -sharrouf 531 -wording 531 -newsroom 531 -excelled 531 -coy 531 -1935 531 -five-time 531 -abducting 531 -wilcox 531 -gosling 531 -72-year-old 531 -taunts 531 -venables 531 -origi 531 -cannons 531 -lifeguards 531 -hampden 531 -legia 530 -val 530 -somers 530 -regulars 530 -handover 530 -grammys 530 -flipping 530 -worsen 530 -refrigerator 530 -stately 530 -cheetah 530 -atoms 530 -25million 530 -chechnya 530 -dismantling 530 -advancement 530 -entirety 530 -discouraged 530 -dividing 530 -antibiotic 530 -carcasses 530 -germs 529 -ignition 529 -pluto 529 -satire 529 -scarlet 529 -memorials 529 -irritation 529 -mar 529 -bulb 529 -grantham 529 -electrician 529 -theodore 529 -pervasive 529 -tweed 529 -coffers 529 -geographical 529 -noodles 529 -mascara 529 -engraved 529 -impairment 529 -coates 529 -hapless 528 -irrational 528 -° 528 -specialty 528 -replays 528 -treason 528 -indecently 528 -perspectives 528 -contributors 528 -carbohydrates 528 -postcard 528 -tireless 528 -xu 528 -maricopa 528 -venomous 528 -sewer 528 -sporty 528 -noticeably 528 -animosity 528 -reforming 527 -quashed 527 -ormond 527 -microbes 527 -stepson 527 -ouattara 527 -tariffs 527 -cartoonist 527 -hoard 527 -keira 526 -beset 526 -irritated 526 -239 526 -icelandic 526 -wrestle 526 -corresponding 526 -authorize 526 -grainy 526 -maidana 526 -hubbard 526 -norwood 526 -eligibility 526 -alerting 526 -alleyway 526 -neurons 526 -henley 526 -mayors 526 -trojan 526 -vibrations 526 -vantage 526 -tentative 526 -affectionately 526 -acids 526 -exiled 526 -complacency 526 -snowstorm 525 -armani 525 -ashya 525 -foreseeable 525 -complains 525 -eyesight 525 -conman 525 -burt 525 -1900 525 -diva 525 -mare 525 -wheelchairs 525 -boosts 525 -floats 525 -crusade 525 -garages 525 -maverick 525 -stanton 525 -meryl 525 -cornered 525 -khloe 525 -mccabe 525 -catapulted 525 -souza 525 -mushroom 525 -blouse 524 -swede 524 -intercontinental 524 -jurassic 524 -manipulating 524 -parlour 524 -parallels 524 -miscarriages 524 -mitigate 524 -ombudsman 524 -narrowed 524 -coined 524 -harlow 524 -thwart 524 -great-grandmother 524 -astonishingly 524 -atlantis 524 -cregan 524 -elche 524 -glazer 524 -guangzhou 524 -shameless 524 -fugitives 523 -nicholls 523 -toledo 523 -outlandish 523 -hallucinations 523 -outsider 523 -bonfire 523 -nicaragua 523 -0.7 523 -palms 523 -accelerating 523 -palaces 523 -structured 523 -heats 523 -mauro 523 -pre-existing 523 -spence 522 -voucher 522 -unprotected 522 -looms 522 -shack 522 -icloud 522 -enroll 522 -encryption 522 -jos 522 -render 522 -reminders 522 -hoddle 522 -asians 522 -marsden 522 -terraces 522 -pharrell 522 -pinterest 522 -caucuses 522 -garland 522 -boyce 522 -biodiversity 522 -shortest 522 -booster 521 -mosaic 521 -proponents 521 -huntington 521 -armies 521 -gogh 521 -reassurance 521 -susie 521 -clyne 521 -cutting-edge 521 -ovaries 521 -sexes 521 -grotesque 521 -potro 521 -fischer 521 -platoon 521 -limo 520 -3-3 520 -sandbags 520 -hammersmith 520 -ping 520 -attenborough 520 -moussavi 520 -photoshoot 520 -bloodstream 520 -syed 520 -carve 520 -assertions 520 -128 520 -fusilier 520 -backroom 520 -747 520 -postcards 520 -wearers 520 -riviera 520 -livingston 520 -premiered 520 -hydraulic 520 -jermain 520 -decidedly 519 -clinically 519 -mcchrystal 519 -loaf 519 -fasting 519 -streep 519 -jutting 519 -casing 519 -lamps 519 -culpable 519 -salem 519 -ip 519 -bernstein 519 -pandora 519 -sloan 519 -tutu 519 -sloane 519 -lashing 519 -ecological 519 -guidetti 519 -honduran 518 -fifties 518 -optional 518 -adolescents 518 -contended 518 -mahal 518 -ferries 518 -magician 518 -aldridge 518 -thumping 518 -introduces 518 -meats 518 -jayne 518 -chemist 518 -badgers 518 -orchard 518 -twisting 518 -pragmatic 518 -basque 518 -elk 518 -6-7 518 -skate 518 -rightful 518 -bashir 518 -kcna 517 -motivational 517 -mistreated 517 -hyundai 517 -fitter 517 -priscilla 517 -texans 517 -sauber 517 -connelly 517 -diversion 517 -te'o 517 -sandiford 517 -unleash 517 -draconian 517 -gwen 516 -muster 516 -biggs 516 -op 516 -diafra 516 -messing 516 -9mm 516 -fronted 516 -cnn/orc 516 -baseless 516 -potts 516 -mythical 516 -cullen 516 -lender 516 -stain 516 -digits 516 -valleys 516 -almighty 516 -long-haul 516 -dello 515 -tiredness 515 -guild 515 -greste 515 -osbourne 515 -motorbikes 515 -chap 515 -expletive 515 -gee 515 -carrot 515 -veg 515 -bloated 515 -amidst 515 -cramps 515 -qaeda-linked 515 -trunks 515 -paisley 515 -chili 515 -loo 515 -newell 515 -uprisings 514 -concedes 514 -stockpile 514 -torrent 514 -canoe 514 -endemic 514 -boone 514 -mandated 514 -allianz 514 -expulsion 514 -passerby 514 -umar 514 -stapleton 514 -scares 514 -recklessly 514 -whipping 513 -concealing 513 -cooled 513 -pharmacies 513 -36,000 513 -tequila 513 -humility 513 -uzbekistan 513 -unravel 513 -sparkle 513 -harvesting 513 -sinn 513 -vandals 513 -mattresses 513 -sorely 513 -kony 513 -sails 513 -photoshop 513 -wanda 513 -foxconn 513 -20ft 513 -draining 513 -macabre 513 -amends 513 -bibi 513 -panicking 512 -nuri 512 -comrade 512 -evidently 512 -levante 512 -furiously 512 -chatter 512 -unaffected 512 -neighbourhoods 512 -babe 512 -gonzales 512 -annexation 512 -elias 512 -uneven 512 -shoving 512 -roux 512 -tombs 512 -gears 512 -flavors 512 -minibus 511 -disturbances 511 -goode 511 -1925 511 -cheerleaders 511 -sculptor 511 -coincides 511 -woolly 511 -espanyol 511 -numbered 511 -slippers 511 -clasico 511 -warranted 511 -indirect 511 -dougherty 511 -salads 511 -cystic 511 -rink 511 -trimmed 511 -establishments 511 -guessing 511 -ee 511 -berezovsky 511 -decomposed 511 -spiralling 511 -kirkova 511 -practised 511 -inaction 511 -chemo 511 -mired 510 -porridge 510 -compton 510 -italia 510 -dwyer 510 -untold 510 -merah 510 -bludgeoned 510 -imaginary 510 -concerted 510 -anti-aircraft 510 -roche 510 -tolerant 510 -weymouth 510 -fray 510 -170,000 510 -o'callaghan 510 -heck 510 -salma 510 -alyssa 510 -bargains 510 -unhcr 510 -fortified 510 -1928 509 -manoeuvre 509 -30mph 509 -succeeding 509 -stairwell 509 -2,200 509 -anchored 509 -hhs 509 -substantive 509 -sublime 509 -tearfully 509 -confuse 509 -seeker 509 -khmer 509 -queued 509 -hastily 509 -glock 509 -skydiving 509 -caballero 509 -watchful 509 -father-of-four 509 -petro 509 -overlook 509 -prescribe 509 -116 509 -staffed 509 -applicant 509 -60mph 509 -clung 509 -brushing 509 -spitfire 508 -blossom 508 -heroism 508 -daraa 508 -linen 508 -ofgem 508 -fondly 508 -pussy 508 -buttler 508 -pecking 508 -supernatural 508 -planner 508 -sana 508 -throats 508 -astounding 508 -emre 508 -herbs 508 -handball 508 -psaki 508 -verification 508 -insured 507 -mendoza 507 -impressing 507 -competitiveness 507 -redacted 507 -lieu 507 -watergate 507 -mundane 507 -hesitant 507 -strife 507 -marca 507 -macau 507 -demeanor 507 -manu 507 -nspcc 507 -avoidable 507 -natwest 506 -lodging 506 -1800s 506 -hodgekiss 506 -khobragade 506 -punctured 506 -leans 506 -bjorn 506 -mindy 506 -a-levels 506 -bazaar 506 -antenna 506 -flank 506 -converts 506 -seabed 506 -bradbury 506 -moat 506 -bridesmaid 506 -residue 506 -vetting 506 -excerpts 506 -philips 506 -mccall 506 -laos 506 -ft. 506 -overtaking 506 -re 506 -elevation 506 -heinz 505 -tremendously 505 -grazing 505 -mckay 505 -kidman 505 -dilapidated 505 -spied 505 -amman 505 -marianne 505 -saturdays 505 -5m 505 -199 505 -chipping 505 -averaging 505 -keogh 504 -novosti 504 -savio 504 -braces 504 -dispersed 504 -catalonia 504 -conception 504 -vacated 504 -distinctly 504 -backbenchers 504 -pseudonym 504 -inherently 504 -antony 504 -analyses 504 -fatima 504 -maui 504 -10ft 504 -constellation 504 -strongholds 504 -soliciting 504 -trans 503 -willingham 503 -hereford 503 -rudolph 503 -lawton 503 -editions 503 -vega 503 -hustle 503 -hailey 503 -watering 503 -marian 503 -juliet 503 -abrams 503 -hearn 503 -plugged 503 -t-mobile 503 -peng 503 -demo 503 -lagarde 503 -caulker 503 -grinding 502 -wacky 502 -4-2-3-1 502 -procedural 502 -sluggish 502 -analyzing 502 -650,000 502 -postcode 502 -ex-partner 502 -aidan 502 -margot 502 -wilkes 502 -equals 502 -exemplary 502 -edl 502 -boyfriends 502 -donnelly 502 -importing 502 -complement 502 -second-hand 502 -eriksson 502 -fetched 502 -sync 502 -decimated 502 -appointing 502 -estuary 502 -bucharest 502 -wigs 502 -satisfactory 502 -yogurt 502 -physio 502 -eckert 502 -miracles 502 -disillusioned 501 -acquaintances 501 -stevan 501 -flowed 501 -shelved 501 -guangdong 501 -weakening 501 -zamora 501 -borne 501 -ignite 501 -booted 501 -stoppers 501 -akron 501 -aborted 501 -eye-watering 501 -punjab 501 -nodded 501 -super-rich 501 -fictitious 500 -proxy 500 -thorne 500 -sneaked 500 -hercules 500 -commemorating 500 -clothed 500 -skis 500 -elomar 500 -infinity 500 -emit 500 -daiichi 500 -eco-friendly 500 -constituencies 500 -elegance 500 -crimson 500 -wilde 500 -mobiles 500 -flora 500 -firepower 500 -meps 500 -30-minute 500 -indecency 500 -bikinis 500 -glare 500 -uncanny 500 -doris 500 -unattended 499 -illustrations 499 -overflowing 499 -eoin 499 -hebrew 499 -sewing 499 -swamp 499 -bogey 499 -phone-hacking 499 -descending 499 -delegate 499 -forcefully 499 -1934 499 -khartoum 499 -unison 499 -heartwarming 499 -chaplain 499 -doughty 499 -gravel 499 -kirkuk 499 -greatness 499 -vice-chairman 499 -culturally 498 -braced 498 -celebs 498 -scams 498 -lookalike 498 -pfa 498 -diploma 498 -caucasus 498 -watt 498 -westchester 498 -o'grady 498 -piccadilly 498 -bray 498 -illegitimate 498 -mutations 498 -persisted 498 -intravenous 498 -dowler 498 -sidney 498 -lobbied 498 -erika 498 -shines 498 -sarin 497 -martyrs 497 -chord 497 -russ 497 -1929 497 -bangor 497 -madame 497 -uterus 497 -side-effects 497 -slotted 497 -fascist 497 -mariah 497 -ancestry 497 -commemoration 497 -montpellier 497 -infertility 497 -exhumed 497 -conquered 497 -vaginal 497 -realises 497 -hadfield 497 -ambrose 497 -bereaved 497 -decker 497 -commissions 497 -cosmopolitan 497 -serviceman 496 -ultraviolet 496 -smelling 496 -chesterfield 496 -flanker 496 -alaskan 496 -biscuit 496 -extracts 496 -eyelashes 496 -18-month-old 496 -iaaf 496 -11.5 496 -zimmer 496 -intrepid 496 -disappoint 496 -cuddling 496 -countrymen 496 -judgments 496 -untimely 496 -co-operative 496 -forgery 496 -evenly 496 -round-the-clock 495 -chengdu 495 -bride-to-be 495 -protestant 495 -'60s 495 -compounding 495 -printers 495 -cv 495 -spills 495 -navigating 495 -leash 495 -denny 495 -peril 495 -sip 495 -referencing 495 -abort 495 -recount 495 -convoys 495 -x-men 495 -home-made 495 -lenny 494 -reich 494 -crumbled 494 -considerations 494 -piercing 494 -franz 494 -kimi 494 -refurbishment 494 -kruger 494 -sutter 494 -comcast 494 -primark 494 -hp 494 -stains 494 -shabaab 494 -roach 494 -believer 494 -cds 494 -dipping 494 -christy 494 -momentous 493 -buddha 493 -condolence 493 -maestro 493 -aung 493 -6.7 493 -109 493 -nfc 493 -disruptions 493 -workload 493 -meticulous 493 -disconnected 493 -kgb 493 -tanner 493 -110,000 493 -prandelli 493 -bled 492 -patriarch 492 -abedin 492 -shiites 492 -clusters 492 -reprehensible 492 -quad 492 -graaf 492 -recounts 492 -yields 492 -hiatus 492 -transatlantic 492 -hobbies 492 -mcgee 492 -lunged 492 -hess 492 -hectares 492 -stressing 492 -demon 492 -cleaver 492 -a&m 491 -collateral 491 -avenues 491 -deposited 491 -hides 491 -videotape 491 -m6 491 -unsettling 491 -impressions 491 -5,500 491 -mischievous 491 -billowing 491 -perpetrated 491 -expands 491 -bidders 491 -sentimental 491 -remarried 491 -wwe 491 -mdma 491 -distractions 491 -dazed 491 -high-rise 491 -pence 491 -post-war 490 -eastbourne 490 -collagen 490 -knit 490 -tortoise 490 -unannounced 490 -byrd 490 -jean-claude 490 -universally 490 -silhouette 490 -ubs 490 -determines 490 -ronan 490 -rohingya 490 -kinect 490 -teasing 490 -distrust 490 -michaels 490 -2billion 490 -lazar 490 -dormant 490 -contemplate 489 -210 489 -21s 489 -2011-12 489 -bengal 489 -juggling 489 -hemingway 489 -itinerary 489 -heroine 489 -maple 489 -ricin 489 -youngs 489 -ditching 489 -digs 489 -lambasted 489 -4-3 489 -0.3 489 -martino 489 -semester 489 -undertook 489 -amc 489 -franchises 489 -aeroplane 489 -customary 489 -mcguinness 488 -unhurt 488 -bombardment 488 -berger 488 -wardens 488 -memoirs 488 -hopping 488 -inducted 488 -hama 488 -127 488 -bondage 488 -sorority 488 -boroughs 488 -cowan 488 -comprising 488 -edison 488 -puberty 488 -reside 488 -driscoll 488 -loom 488 -salazar 488 -crafts 488 -eliaquim 488 -ty 488 -sock 488 -arduous 488 -pitching 488 -walid 487 -assassinate 487 -debuts 487 -larson 487 -mervyn 487 -tonne 487 -philae 487 -coasts 487 -mackintosh 487 -conned 487 -home-grown 487 -pyramids 487 -pinnacle 487 -simulate 487 -two-month 487 -dapper 487 -unification 487 -regeneration 487 -cautions 487 -progresses 486 -milos 486 -confrontations 486 -mir 486 -zahra 486 -co-hosts 486 -tug 486 -curvy 486 -fielded 486 -montenegro 486 -batter 486 -harass 486 -grind 486 -yan 486 -crazed 486 -tongue-in-cheek 486 -steinberg 486 -ornaments 486 -arafat 486 -bragg 486 -blunders 486 -ultimatum 486 -psychiatry 486 -sweltering 486 -shoutout 486 -sahara 485 -orangutan 485 -awakening 485 -simplicity 485 -vetoed 485 -calmed 485 -inscription 485 -diapers 485 -doom 485 -spectacularly 485 -havens 485 -token 485 -wbc 485 -sunken 485 -rocket-propelled 485 -jug 485 -rove 485 -ppi 485 -skincare 485 -snail 485 -comb 485 -axelrod 485 -737 485 -costello 485 -shambles 485 -droplets 484 -databases 484 -stalwart 484 -rescues 484 -pods 484 -cross-border 484 -brokers 484 -financed 484 -nightingale 484 -oriental 484 -taunton 484 -shelvey 484 -meg 484 -entrants 484 -awash 484 -waiver 484 -brunswick 484 -stench 484 -beneficiaries 484 -calves 484 -ayman 484 -preventative 484 -faking 484 -emeritus 484 -fanfare 484 -khalil 484 -sideways 483 -scrub 483 -boca 483 -prism 483 -backfired 483 -75-year-old 483 -exported 483 -fertilizer 483 -walk-in 483 -peyton 483 -calming 483 -exhaust 483 -ling 483 -michelin 483 -whitaker 483 -paediatric 483 -approving 483 -afar 483 -commenters 483 -emblem 482 -gushing 482 -rakitic 482 -haider 482 -aback 482 -weaving 482 -northumbria 482 -fenton 482 -needy 482 -administrations 482 -ottoman 482 -surging 482 -homophobia 482 -combative 482 -masterminded 482 -ufc 482 -finch 482 -deterred 482 -bowers 482 -galveston 482 -regal 482 -peck 481 -gallacher 481 -combing 481 -four-star 481 -one-bedroom 481 -eurovision 481 -snubbed 481 -southbound 481 -embarrass 481 -coupe 481 -im 481 -yankee 481 -spinach 481 -radwanska 481 -grudge 481 -lagerfeld 481 -yell 481 -crumpled 481 -magath 481 -patterned 481 -minster 481 -manually 481 -chadli 481 -bartoli 481 -histories 480 -silenced 480 -clout 480 -sunscreen 480 -meteorological 480 -brittan 480 -warmly 480 -two-goal 480 -grizzly 480 -beaumont 480 -lobbyists 480 -olympia 480 -psychic 480 -pixie 480 -isabelle 480 -primate 480 -taunting 480 -preaching 480 -ciudad 480 -laguardia 480 -goldstein 480 -pebble 480 -strolling 480 -indie 480 -complacent 480 -gravitational 479 -juices 479 -nhl 479 -40million 479 -metropolis 479 -phases 479 -hunts 479 -nativity 479 -cabinets 479 -aches 479 -flocking 479 -valls 479 -indicative 479 -74-year-old 479 -scorers 479 -redevelopment 479 -pizzas 479 -dodging 479 -polly 479 -7.2 479 -nuland 479 -rodrigo 478 -rust 478 -roadway 478 -lombardi 478 -admittedly 478 -out-of-hours 478 -forwarded 478 -dwayne 478 -007 478 -ibf 478 -ku 478 -whichever 478 -crooks 478 -lothian 478 -representations 478 -mirrored 478 -worryingly 478 -half-brother 478 -11:30 478 -stevenage 478 -ghaith 478 -intervening 478 -anglesey 478 -lame 478 -candice 478 -wettest 478 -dijk 477 -leniency 477 -diagnostic 477 -melody 477 -polluted 477 -kettle 477 -exceeds 477 -testicular 477 -publicized 477 -savagely 477 -cavaliers 477 -nel 477 -mehsud 477 -fragment 477 -mombasa 477 -marcia 477 -satin 477 -barricade 477 -gravely 477 -tariq 477 -ballooned 477 -scoreline 477 -solihull 477 -seeded 477 -initiate 477 -brookes 477 -speechless 476 -vindicated 476 -whiplash 476 -seamus 476 -shirtless 476 -hindered 476 -reap 476 -fleetwood 476 -haas 476 -dizziness 476 -pioneers 476 -raunchy 476 -dwelling 476 -shetland 476 -strategists 476 -folding 476 -stopper 476 -anchors 476 -peach 476 -camille 476 -caravans 476 -bionic 476 -scudamore 476 -guise 476 -conducts 476 -mccollum 476 -gatland 475 -90th 475 -vehicular 475 -patton 475 -in-form 475 -medley 475 -pleasing 475 -ashford 475 -muamba 475 -piloted 475 -parkhead 475 -designation 475 -mid-1990s 475 -insulation 475 -whistles 475 -anti-terrorism 475 -ah 474 -anti-abortion 474 -wei 474 -inscribed 474 -chevy 474 -one-sided 474 -vulgar 474 -panther 474 -democratically 474 -rankin 474 -similarity 474 -matilda 474 -scorched 474 -anesthetic 474 -3.9 474 -therapists 474 -crap 474 -rowdy 474 -shriver 474 -villas-boas 474 -high-resolution 474 -friendlies 474 -mccormick 474 -indifferent 474 -appellate 474 -salutes 474 -shrien 474 -airwaves 473 -destroys 473 -fins 473 -jemima 473 -depardieu 473 -penetrate 473 -paycheck 473 -humbling 473 -selena 473 -salim 473 -clashing 473 -customised 473 -manly 473 -natalia 472 -ninja 472 -bellew 472 -doubtful 472 -orphan 472 -homo 472 -adjoining 472 -palette 472 -phobia 472 -squid 472 -chopping 472 -dillon 472 -grillo 472 -soyuz 472 -euthanized 472 -listings 472 -cal 472 -livingstone 472 -hackett 472 -edinson 472 -torquay 472 -sens. 472 -cemeteries 472 -downside 472 -rosen 472 -borrowers 472 -visitation 472 -85,000 472 -documentaries 472 -resuscitation 472 -irreversible 471 -psychiatrists 471 -healthily 471 -emitted 471 -propeller 471 -motionless 471 -relics 471 -yoghurt 471 -inconsistencies 471 -sissoko 471 -elijah 471 -plumber 471 -humpback 471 -vlaar 471 -10-day 471 -pitted 471 -skater 471 -cherif 471 -brows 471 -louvre 471 -fungus 471 -clementi 470 -barroso 470 -glued 470 -strolled 470 -outings 470 -sniff 470 -labott 470 -capitals 470 -contostavlos 470 -anticipating 470 -giuseppe 470 -xilai 470 -proton 470 -pulp 470 -42,000 470 -eighteen 470 -parrot 470 -coburn 470 -defensively 470 -titans 470 -specter 470 -deportations 469 -waded 469 -inconclusive 469 -portal 469 -dictated 469 -downplayed 469 -chernobyl 469 -menswear 469 -stabilize 469 -sexiest 469 -adjustment 469 -bowlers 469 -cordoba 469 -karaoke 469 -almond 469 -deserving 469 -stupidity 469 -belinda 469 -swastika 469 -indictments 469 -vickers 469 -relevance 469 -4.1 469 -77-year-old 469 -campers 468 -whaling 468 -gracious 468 -pt 468 -chauffeur 468 -impassioned 468 -centred 468 -evaded 468 -calculating 468 -hoisted 468 -confidently 468 -e-cigarette 468 -kale 468 -lymph 468 -relic 468 -heartless 468 -bewildered 468 -whips 468 -aisles 468 -conclusive 468 -forehand 468 -pelvic 468 -characterised 468 -defenses 468 -ponder 468 -mays 468 -tossing 468 -greenwald 468 -cayman 468 -macpherson 467 -on-site 467 -chimps 467 -caregivers 467 -misplaced 467 -83-year-old 467 -spaceship 467 -woven 467 -mladic 467 -accountants 467 -dusk 467 -backside 467 -biopic 467 -correa 467 -serene 467 -vortex 467 -doreen 467 -dripping 467 -blisters 467 -batons 467 -marlon 467 -bio 466 -non-existent 466 -frequented 466 -fritzl 466 -tata 466 -wiring 466 -submitting 466 -captivated 466 -hamper 466 -visions 466 -mayan 466 -carvalho 466 -liquids 466 -heavy-handed 466 -dolce 466 -chronicles 466 -129 466 -disappears 466 -heatwave 466 -heaped 466 -vaccinations 466 -rollercoaster 465 -incursion 465 -crossfire 465 -nearer 465 -decreasing 465 -belgravia 465 -dashing 465 -atherton 465 -telecom 465 -robe 465 -affirmative 465 -121 465 -suppressed 465 -lowry 465 -immaculate 465 -deluge 465 -nonviolent 465 -lynda 465 -rooftops 465 -123 465 -invoked 465 -ripple 465 -olga 465 -stave 465 -antibodies 465 -scherzinger 465 -leaflet 465 -frum 464 -bruni 464 -shady 464 -pretrial 464 -durable 464 -calibre 464 -maersk 464 -quieter 464 -tending 464 -napa 464 -southport 464 -meaningless 464 -consist 464 -nappies 464 -cabbage 464 -incriminating 464 -olds 464 -contador 464 -sizable 464 -81-year-old 464 -mentoring 464 -erupting 464 -wonderfully 464 -registrar 464 -ora 464 -gloomy 464 -amend 464 -mendez 464 -90-minute 464 -milestones 464 -hoylake 464 -functionality 464 -rationale 464 -enoch 464 -vending 463 -heirs 463 -browse 463 -self-driving 463 -extremes 463 -dahl 463 -6.2 463 -then-president 463 -penetration 463 -parisian 463 -elon 463 -chases 463 -oracle 463 -quadruple 463 -npr 463 -law-abiding 463 -trait 463 -salty 463 -superstars 463 -mcclain 463 -comical 463 -wilder 462 -touchdowns 462 -stumble 462 -sigh 462 -predicament 462 -overtook 462 -coca 462 -avengers 462 -britton 462 -i.e. 462 -wrecking 462 -sinks 462 -languishing 462 -painkiller 462 -indifference 462 -basilica 462 -meteorologists 462 -lescott 462 -staggered 462 -ealing 462 -ness 462 -alessandro 462 -unethical 462 -bureaucrats 462 -ponies 462 -enlarged 462 -barricaded 462 -self-confessed 462 -depraved 462 -rowland 462 -perch 462 -soften 462 -hangar 462 -moniker 462 -lovingly 462 -regan 462 -honorable 461 -cinderella 461 -ruse 461 -stillborn 461 -tracing 461 -extradite 461 -reproduction 461 -wba 461 -roasted 461 -watchdogs 461 -meme 461 -hare 461 -flourished 461 -newsweek 461 -beggars 461 -magna 461 -baskets 461 -acronym 461 -pre 461 -cancun 461 -centimetres 461 -songwriter 461 -secretaries 461 -expo 461 -cbe 461 -mcmillan 461 -digger 461 -empowerment 460 -atheist 460 -epl 460 -scapegoat 460 -regulating 460 -interrogations 460 -deschamps 460 -totals 460 -jenkinson 460 -curl 460 -nine-month 460 -governmental 460 -emigrated 460 -trashed 460 -skins 460 -bobo 460 -corrective 460 -wasteful 460 -panthers 460 -professions 460 -immature 460 -suri 460 -sutcliffe 459 -edging 459 -incorporating 459 -looters 459 -al-bashir 459 -pans 459 -trotter 459 -rotate 459 -giovanni 459 -4ft 459 -seminole 459 -colouring 459 -six-week 459 -lbc 459 -leung 459 -squandered 459 -wallets 459 -billings 459 -puzzled 459 -penelope 459 -openings 459 -tibetans 459 -four-and-a-half 458 -contrasting 458 -british-born 458 -goers 458 -tijuana 458 -indiscriminate 458 -marx 458 -distanced 458 -antidepressants 458 -7.6 458 -inspects 458 -transformers 458 -wannabe 458 -diouf 458 -apologizes 458 -hamburger 458 -bordering 457 -brokered 457 -out-of-control 457 -joplin 457 -5.4 457 -intestine 457 -airman 457 -chapters 457 -mortars 457 -saloon 457 -samson 457 -crackers 457 -sylvester 457 -swath 457 -tai 457 -carded 457 -chimp 457 -grossed 457 -gland 457 -taarabt 457 -auctioneer 457 -visionary 457 -unconfirmed 457 -halting 456 -slowdown 456 -semen 456 -sprinting 456 -evaluating 456 -raiding 456 -periodically 456 -tankers 456 -uttered 456 -contradict 456 -sweatshirt 456 -rumour 456 -year-round 456 -knightley 456 -0.1 456 -call-up 455 -modify 455 -nutritionist 455 -fast-moving 455 -daphne 455 -larisa 455 -beetle 455 -majorca 455 -foreground 455 -rarity 455 -potassium 455 -next-generation 455 -shears 455 -disturb 455 -inverness 455 -curly 455 -screenwriter 455 -pediatrics 455 -rue 455 -cricketers 455 -npower 455 -karate 455 -buckle 455 -sombre 455 -hue 455 -michoacan 455 -umpire 455 -lowly 455 -enthusiastically 454 -sizeable 454 -lavender 454 -lillian 454 -radiant 454 -brushes 454 -5-4 454 -setup 454 -pesticides 454 -andros 454 -suleman 454 -cinnamon 454 -hopped 454 -molotov 454 -horsemeat 454 -coolest 454 -ingenious 454 -far-reaching 454 -close-knit 453 -maddie 453 -demolish 453 -tendulkar 453 -eclectic 453 -veiled 453 -owls 453 -directorate 453 -fixes 453 -bows 453 -impeccable 453 -detractors 453 -left-hand 453 -acupuncture 453 -withholding 453 -porcelain 453 -yusuf 453 -extinguish 453 -canals 453 -cadet 453 -thicker 453 -underdog 453 -kat 453 -12.30 453 -diallo 453 -implies 453 -seldom 453 -pottery 453 -compensated 453 -potholes 452 -cupcakes 452 -phuket 452 -12-hour 452 -intrigue 452 -boardroom 452 -glitzy 452 -sleet 452 -ligaments 452 -ecosystems 452 -cabella 452 -andrade 452 -felonies 452 -nailed 452 -koala 452 -hover 452 -english-language 452 -kia 452 -zarate 452 -kaplan 452 -factual 452 -klux 452 -funnel 452 -northbound 452 -culminating 452 -undo 452 -isco 451 -leyton 451 -mix-up 451 -libby 451 -rios 451 -parishioners 451 -shea 451 -bestselling 451 -surpass 451 -barnet 451 -deco 451 -2,600 451 -cherokee 451 -sensory 451 -delicacy 451 -horizontal 451 -gunning 451 -outweigh 451 -specifications 451 -tuilagi 451 -martyr 451 -slows 451 -yasmin 451 -pellets 451 -practitioner 451 -accolades 451 -30ft 451 -anti-muslim 450 -plateau 450 -snr 450 -williamsburg 450 -sweeps 450 -erratically 450 -5.2 450 -275 450 -schizophrenic 450 -beneficiary 450 -plumbing 450 -shredded 450 -126 450 -storytelling 450 -imply 450 -modesty 450 -superficial 450 -newcomers 450 -escobar 450 -newlywed 450 -adoring 450 -stakeholders 450 -detox 450 -acne 450 -sediment 450 -113 450 -parasites 449 -cvs 449 -eyebrow 449 -adore 449 -maximise 449 -side-by-side 449 -chassis 449 -dier 449 -pulses 449 -fonte 449 -embellished 449 -sabella 449 -limped 449 -first-choice 449 -scripts 449 -offshoot 449 -12-year 449 -bnp 449 -launcher 449 -boomers 449 -algarve 449 -donuts 449 -bland 449 -tutor 449 -royalties 449 -apiece 449 -ftse 449 -open-air 448 -milne 448 -favourable 448 -hauling 448 -neonatal 448 -ramifications 448 -mourned 448 -bernardino 448 -creed 448 -louie 448 -pepsi 448 -rye 448 -tempo 448 -all-out 448 -5.6 448 -amin 448 -melee 448 -halves 448 -carjacking 448 -selby 448 -2025 448 -dorries 448 -scorching 448 -tranquil 448 -bosch 448 -bloomfield 448 -sleepless 448 -manziel 447 -entice 447 -hatton 447 -monsoon 447 -rac 447 -wiseman 447 -gloss 447 -cozy 447 -geese 447 -feds 447 -kappa 447 -midwifery 447 -magnets 447 -thrived 447 -immersed 447 -ronaldinho 447 -turquoise 447 -outnumbered 447 -ghitis 447 -retrospective 447 -beachfront 447 -greetings 446 -convened 446 -civilisation 446 -dagestan 446 -sevens 446 -harshly 446 -logos 446 -sighted 446 -waterfalls 446 -judo 446 -newquay 446 -merchants 446 -grease 446 -migraine 446 -2,700 446 -bashara 446 -fabianski 446 -mvp 446 -continuity 446 -jiang 446 -teller 446 -demba 446 -go-to 446 -reconstruct 446 -anya 446 -swims 446 -bulgarians 445 -flap 445 -schmeichel 445 -striving 445 -fergie 445 -feng 445 -kc 445 -night-time 445 -lockheed 445 -somber 445 -skye 445 -foul-mouthed 445 -cantlie 445 -dprk 445 -tessa 445 -janmaat 445 -dominates 445 -daycare 445 -clacton 445 -ingested 445 -originating 445 -ming 445 -mozambique 445 -relish 445 -baggy 445 -woodstock 445 -werder 445 -cartilage 445 -validity 444 -emission 444 -o'malley 444 -entrepreneurial 444 -breadth 444 -bolts 444 -shattering 444 -berman 444 -bigotry 444 -nusra 444 -breyer 444 -vr 444 -terence 444 -shenzhen 444 -gul 444 -149 444 -complication 444 -rt 444 -dom 444 -pythons 443 -mustang 443 -bitcoins 443 -appreciates 443 -whistleblowers 443 -70mph 443 -erupt 443 -ulloa 443 -reproduce 443 -livelihood 443 -ketchup 443 -shinawatra 443 -inked 443 -equates 443 -martina 443 -trailers 443 -snooker 443 -bling 443 -decorative 443 -simmonds 443 -lim 443 -camels 443 -strands 443 -jaguars 443 -ayla 443 -rambling 442 -ticked 442 -monti 442 -pinning 442 -thirties 442 -realizes 442 -clampdown 442 -pics 442 -cosmos 442 -migraines 442 -cautiously 442 -squarely 442 -tomkins 442 -bolstered 442 -ea 442 -hawke 442 -cher 442 -trekking 442 -fleeting 442 -great-grandfather 442 -hinder 442 -disclosing 442 -sacra 441 -headlights 441 -taipei 441 -borneo 441 -n. 441 -respecting 441 -rag 441 -roddick 441 -baden-clay 441 -lucan 441 -lessen 441 -aberdeenshire 441 -repayments 441 -macfarlane 441 -middleweight 441 -repeats 441 -launchers 441 -neptune 441 -alton 441 -roommates 441 -collaborative 441 -monreal 441 -bert 441 -mundo 440 -thirsty 440 -long-lasting 440 -tendencies 440 -burgled 440 -shortlisted 440 -thirst 440 -journals 440 -meticulously 440 -withhold 440 -tennant 440 -londoner 440 -flashy 440 -touting 440 -last-ditch 440 -340 440 -graziano 440 -chibok 440 -abductions 440 -catalog 440 -dermatologist 440 -sophistication 440 -medicinal 440 -se 440 -'cause 440 -pickering 440 -senna 439 -reclaimed 439 -astra 439 -collaborate 439 -darcy 439 -play-offs 439 -chimpanzee 439 -correspondents 439 -isa 439 -hepburn 439 -kneeling 439 -disconnect 439 -rejoin 439 -benn 439 -adkins 439 -yulia 439 -pancras 439 -otter 439 -syringe 439 -alto 439 -fuming 439 -bogota 439 -hallmark 439 -tax-exempt 439 -uc 439 -martyn 439 -manipulative 439 -reinstate 439 -longest-serving 438 -kelvin 438 -carlson 438 -congratulates 438 -inner-city 438 -38,000 438 -myles 438 -vacancy 438 -nicest 438 -mg 438 -camper 438 -prius 438 -wowed 438 -umunna 438 -duff 438 -duel 438 -fracturing 438 -rothschild 438 -kayleigh 438 -compulsive 438 -telford 438 -oysters 438 -comparatively 438 -shortened 438 -vanishing 438 -smothered 438 -carbs 438 -passersby 437 -metric 437 -boar 437 -covent 437 -buildup 437 -jordi 437 -toxin 437 -boles 437 -strawberries 437 -20-minute 437 -supersonic 437 -trialled 437 -oppressive 437 -orderly 437 -abel 437 -quaint 437 -inventors 437 -eluded 437 -third-placed 437 -argos 437 -omega 437 -pharmacist 436 -boon 436 -7lb 436 -5.3 436 -playbook 436 -revisit 436 -playwright 436 -diaper 436 -chism 436 -guillermo 436 -clair 436 -josef 436 -scourge 436 -365 436 -imbalance 436 -shackled 436 -th 435 -omission 435 -wheatley 435 -godfrey 435 -chills 435 -glover 435 -lendl 435 -km/h 435 -balconies 435 -lexington 435 -inflamed 435 -unprofessional 435 -protruding 435 -bolivian 435 -ki 435 -papiss 435 -stoked 435 -brody 435 -mead 435 -deflect 435 -saudis 435 -drury 435 -ignores 435 -bracket 435 -self-conscious 435 -paste 435 -citrus 435 -chatham 435 -knitted 435 -affray 435 -bodybuilding 435 -fuse 434 -stephane 434 -oddly 434 -stud 434 -tristan 434 -stampede 434 -caesar 434 -neurologist 434 -fellowship 434 -eerily 434 -co-defendant 434 -killgore 434 -raked 434 -shopkeeper 434 -eddy 434 -ponzi 434 -bison 434 -granderson 434 -bode 434 -vase 434 -cues 434 -pardoned 434 -outback 434 -intolerable 434 -accommodations 434 -cranes 434 -kebab 434 -houghton 434 -haze 434 -competence 433 -appease 433 -midterms 433 -contradicts 433 -retention 433 -emir 433 -policewoman 433 -downright 433 -cessna 433 -sane 433 -scotch 433 -nicklaus 433 -announcer 433 -roamed 433 -patz 433 -cantona 433 -fe 433 -infancy 433 -bittersweet 433 -bader 433 -neo-nazi 433 -legality 433 -albino 433 -malian 433 -chunky 433 -keown 432 -thorn 432 -masculine 432 -ivorian 432 -bassett 432 -mideast 432 -terrestrial 432 -scandinavian 432 -taping 432 -gandolfini 432 -grigor 432 -sunbathing 432 -charisma 432 -incendiary 432 -linger 432 -biopsy 432 -oestrogen 432 -redwood 432 -flamini 432 -caramel 432 -lufthansa 432 -deirdre 432 -santander 432 -paranormal 432 -stepdaughter 432 -odi 432 -romantically 432 -itchy 432 -ancestral 432 -folds 432 -filtered 432 -hideout 432 -backbone 432 -hard-line 432 -iced 432 -clueless 431 -driverless 431 -wrexham 431 -nephews 431 -re-opened 431 -relayed 431 -gracie 431 -seeming 431 -reina 431 -cons 431 -shelton 431 -wisely 431 -togo 431 -ntc 431 -demographics 431 -heavens 431 -instinctively 431 -lapses 431 -garrison 431 -brood 431 -bartlett 431 -entrances 431 -soviets 431 -ghetto 431 -drayton 431 -sordid 431 -envelopes 431 -84-year-old 431 -stoned 431 -flea 431 -huddle 430 -.22 430 -groping 430 -typed 430 -tusks 430 -brunei 430 -mccullough 430 -photojournalist 430 -dictates 430 -approves 430 -fast-track 430 -horizons 430 -candiotti 430 -fun-loving 430 -seventeen 430 -puma 430 -tran 430 -humid 430 -third-round 430 -o'toole 430 -cocker 430 -dissatisfaction 430 -alexandre 430 -impatient 430 -spooky 430 -cummins 430 -uyghurs 430 -broward 430 -interception 430 -cllr 430 -zouma 430 -lebedev 429 -214 429 -joyous 429 -cobain 429 -textile 429 -moderation 429 -stroller 429 -mannequin 429 -ireporters 429 -anc 429 -alfonso 429 -tnt 429 -salvaged 429 -feminism 429 -briefs 429 -goalkeeping 429 -moors 429 -correlation 429 -winslet 429 -vengeance 429 -zimbabwean 429 -nacho 429 -tasteless 429 -swords 429 -anthrax 429 -nawaz 429 -8:30 429 -orgasm 429 -standpoint 429 -gawker 429 -putnam 428 -3,200 428 -doctorate 428 -traitor 428 -fittings 428 -scandinavia 428 -marvellous 428 -troublesome 428 -bryce 428 -jog 428 -tremors 428 -inexpensive 428 -wandsworth 428 -100ft 428 -1931 428 -larceny 428 -bafta 428 -guam 428 -transpired 428 -derailment 428 -renovating 428 -sirte 428 -eurosceptic 428 -hashtags 428 -janay 428 -societal 427 -smeared 427 -preyed 427 -continuation 427 -shielded 427 -methadone 427 -16m 427 -mauritius 427 -purity 427 -bender 427 -stacks 427 -117 427 -humberside 427 -hollie 427 -ground-breaking 427 -breakout 427 -dawes 427 -chow 427 -clubhouse 427 -felicity 427 -hysteria 427 -kirstie 427 -prevailing 427 -deepening 427 -sheeran 427 -licking 427 -flattered 426 -feats 426 -blight 426 -sars 426 -reel 426 -estadio 426 -kite 426 -birdman 426 -phenomena 426 -equator 426 -suez 426 -peering 426 -plainly 426 -browning 426 -faso 426 -baffling 426 -ratified 426 -tess 426 -skyfall 426 -furnishings 426 -cheaply 426 -spins 426 -joyful 425 -moored 425 -nonpartisan 425 -militarily 425 -childs 425 -giggling 425 -kidnapper 425 -hickox 425 -zenit 425 -carts 425 -frying 425 -towing 425 -depleted 425 -bangladeshi 425 -9:30 425 -waller 425 -hurtling 425 -decade-long 425 -severn 425 -salts 425 -screwed 425 -tang 425 -futile 425 -133 425 -endeavor 424 -escorts 424 -82-year-old 424 -albans 424 -averages 424 -timbuktu 424 -6.8 424 -booths 424 -full-blown 424 -marchers 424 -vell 424 -neuroscience 424 -bigfoot 424 -widowed 424 -80th 424 -hamad 424 -ferris 424 -retires 424 -soy 424 -sloppy 424 -onlooker 424 -tao 424 -footpath 424 -variable 424 -spokane 424 -sa 424 -caterpillar 424 -penal 424 -peres 424 -reconcile 424 -montage 423 -thom 423 -projection 423 -shoichet 423 -testicles 423 -bandages 423 -anomaly 423 -bunting 423 -fitch 423 -shaqiri 423 -extort 423 -deforestation 423 -coyle 423 -blended 423 -dispel 423 -niagara 423 -statistically 423 -forde 423 -roache 423 -nestle 423 -questionnaire 423 -drinker 423 -sipping 423 -harare 423 -notch 423 -hourly 423 -pfizer 423 -rehman 423 -diminutive 423 -nodes 423 -cigars 423 -gloom 422 -eaton 422 -exert 422 -lukasz 422 -drains 422 -negatives 422 -2.0 422 -boutiques 422 -butts 422 -serge 422 -javid 422 -124 422 -infighting 422 -bungled 422 -monstrous 422 -frontrunner 422 -umbilical 422 -stebner 422 -edmonton 422 -erased 422 -gwent 422 -danes 422 -diagnoses 422 -dong 422 -retake 422 -nannies 422 -defuse 422 -troll 422 -stints 422 -entrusted 421 -elbows 421 -huntley 421 -bourbon 421 -clichy 421 -one-man 421 -accelerator 421 -dividends 421 -5.8 421 -78-year-old 421 -cruciate 421 -pitbull 421 -hassle 421 -xxx 421 -12m 421 -sedative 421 -steamy 421 -1911 421 -velodrome 421 -flier 421 -cnbc 421 -crohn 421 -provoking 421 -pampered 421 -pancreas 421 -stringer 421 -alliances 421 -a-listers 421 -haron 421 -discredited 421 -redesigned 421 -nostalgic 421 -chubby 421 -inferior 421 -spices 421 -iwatch 420 -budge 420 -mutually 420 -gurney 420 -guede 420 -enner 420 -dresden 420 -broom 420 -coptic 420 -hiroshima 420 -forbid 420 -serum 420 -crafting 420 -cookbook 420 -magaluf 420 -witches 420 -strayed 420 -equine 420 -supt 420 -sans 420 -casa 420 -yo 420 -nebula 420 -anti 420 -disrepair 420 -scum 420 -armband 420 -halep 420 -rousseff 420 -hitchcock 420 -guatemalan 420 -calculation 420 -avenge 420 -vigilantes 420 -invaders 420 -nighttime 420 -brock 419 -nightlife 419 -satan 419 -1926 419 -upped 419 -fillers 419 -5.7 419 -enact 419 -rae 419 -bind 419 -kershaw 419 -stamina 419 -toobin 419 -bangs 419 -°f 419 -marginalized 419 -cloak 419 -unflattering 419 -nabil 419 -mishap 419 -latex 419 -purporting 419 -transfusion 419 -um 419 -varane 419 -snowman 419 -himalayan 419 -collide 419 -chilled 419 -leased 419 -padilla 418 -jethro 418 -heller 418 -contrasts 418 -rebuke 418 -spiralled 418 -hogan-howe 418 -rambold 418 -romford 418 -defective 418 -l'wren 418 -aldrin 418 -internship 418 -500million 418 -collaborating 418 -0.2 418 -reviewer 418 -gratification 418 -neanderthal 418 -tito 418 -strangle 418 -wheelie 418 -inexcusable 418 -sid 418 -contemplated 418 -tariff 418 -psychosis 418 -119 418 -380 417 -gladys 417 -unfazed 417 -palo 417 -braves 417 -minogue 417 -vetted 417 -larvae 417 -greer 417 -kilometre 417 -nutritious 417 -woodwork 417 -seinfeld 417 -warring 417 -mismanagement 417 -lizards 417 -helplessly 417 -coroners 417 -bestseller 417 -deir 417 -al-shabab 417 -spooked 417 -gaffes 417 -morley 417 -ocd 417 -wozniak 417 -motorways 417 -kew 417 -appropriations 417 -eastleigh 417 -rubenstein 417 -keaton 417 -gosh 417 -handicap 417 -gilmore 417 -unveils 417 -rite 417 -blooms 417 -induce 416 -leftover 416 -lacrosse 416 -prize-winning 416 -singer-songwriter 416 -regaining 416 -hallmarks 416 -aggravating 416 -racers 416 -intently 416 -cursed 416 -lewes 416 -copacabana 416 -tallahassee 416 -trout 416 -unloaded 416 -seatbelt 416 -burkina 416 -earhart 416 -sms 416 -fairfield 416 -mimics 416 -buffer 416 -heart-breaking 416 -levee 416 -suggestive 416 -spot-kick 416 -crowns 416 -lifespan 416 -malignant 416 -lag 416 -harms 415 -jillian 415 -prescribing 415 -cornwell 415 -intensify 415 -victimized 415 -kelli 415 -docking 415 -villains 415 -urinary 415 -excavations 415 -122 415 -exclaimed 415 -resumes 415 -crabs 415 -symphony 415 -replicated 415 -snow-covered 415 -bamford 415 -rosicky 414 -shaming 414 -philosophical 414 -tumors 414 -entertainers 414 -cravings 414 -half-sister 414 -anxiously 414 -run-in 414 -raspberry 414 -rodeo 414 -burgundy 414 -sewn 414 -burials 414 -rants 414 -lovell 414 -widened 414 -sen 414 -darby 414 -paton 414 -bolted 414 -begum 414 -backseat 414 -samba 414 -shaheen 414 -wynn 413 -joao 413 -erect 413 -marlborough 413 -abdi 413 -aleksandar 413 -arenas 413 -apologetic 413 -6.6 413 -beacons 413 -lust 413 -rennard 413 -canaveral 413 -alcohol-related 413 -barclay 413 -nih 413 -derided 413 -relive 413 -souvenirs 413 -10.5 413 -durbin 413 -unaided 413 -azpilicueta 413 -macho 412 -locating 412 -refereeing 412 -russo 412 -lott 412 -retaliate 412 -300million 412 -autos 412 -leinster 412 -caine 412 -hardships 412 -2012/13 412 -relaxes 412 -khou 412 -layered 412 -cycled 412 -deloitte 412 -barnard 412 -wallpaper 412 -newbury 412 -patrolled 412 -novice 412 -gable 412 -drafting 412 -jumbo 412 -mers 412 -maxine 412 -niro 412 -must-have 412 -ponds 412 -gleaming 412 -elysee 412 -pedophile 411 -mojave 411 -federally 411 -verde 411 -psy 411 -ngo 411 -exhibiting 411 -profiled 411 -alfredo 411 -redesign 411 -point-blank 411 -assignments 411 -deluxe 411 -satanic 411 -theoretical 411 -mozart 411 -ballance 411 -alibi 411 -pouch 411 -harsher 411 -unreal 411 -cubicle 411 -sportsmen 411 -mislead 411 -posthumously 411 -massacres 411 -pediatrician 411 -bb 411 -dorsey 411 -remand 410 -mentors 410 -nora 410 -raja 410 -merlin 410 -kardashians 410 -1924 410 -evading 410 -merge 410 -cryptic 410 -cemented 410 -ar-15 410 -glimpses 410 -misfortune 410 -beatty 410 -scripted 410 -sucking 410 -wolfgang 410 -wand 410 -sadistic 410 -sturdy 410 -hijack 410 -priory 410 -hanover 410 -strongman 409 -bashing 409 -directs 409 -hammam 409 -uncontrollably 409 -prosper 409 -signaling 409 -omitted 409 -succeeds 409 -shipyard 409 -funniest 409 -armchair 409 -invalid 409 -bathed 409 -endorsing 409 -ref 409 -76-year-old 409 -clarification 409 -hailing 409 -kadyrbayev 409 -plastered 409 -sit-in 409 -theoretically 409 -six-month-old 409 -20/20 409 -mcmanus 409 -immobile 409 -choi 409 -subdue 409 -habib 409 -grassy 409 -aspirin 409 -eight-month 408 -fey 408 -neurosurgeon 408 -hoffa 408 -exposes 408 -162 408 -herefordshire 408 -mee 408 -crist 408 -purposely 408 -tins 408 -lund 408 -voicing 408 -suppression 408 -astounded 408 -scientifically 408 -upsets 408 -overgrown 408 -signalling 408 -electronically 408 -hornets 408 -lansley 408 -underscores 408 -live-in 408 -kei 408 -revelers 408 -stings 408 -annoyance 408 -goodwood 408 -nuggets 407 -comedic 407 -warehouses 407 -declan 407 -davison 407 -ameobi 407 -qualifies 407 -whitewash 407 -sanderson 407 -landon 407 -override 407 -fours 407 -fazio 407 -chewed 407 -non-league 407 -yearbook 407 -undiagnosed 407 -leaped 407 -toppling 407 -pre-trial 407 -6.3 407 -denounce 407 -egregious 407 -algorithms 407 -capitalist 407 -watertown 407 -hamlets 407 -cancelling 407 -kolo 407 -educator 407 -40mph 407 -penalised 407 -fluke 407 -upfront 407 -evict 407 -abolish 407 -legalizing 407 -staffs 407 -tavern 407 -lamont 407 -banda 407 -rs 407 -ion 407 -armenia 406 -saline 406 -doughnut 406 -dryer 406 -canisters 406 -robles 406 -tucking 406 -year-on-year 406 -sheppard 406 -acrobatic 406 -recoup 406 -surges 406 -unparalleled 406 -lacerations 406 -antoine 406 -knifed 406 -understated 406 -rages 406 -jamieson 406 -pyne 406 -stallone 405 -nine-year 405 -purge 405 -differs 405 -lo 405 -brigham 405 -1927 405 -astute 405 -indulging 405 -bundles 405 -propped 405 -mistrust 405 -shakira 405 -juniors 405 -sightseeing 405 -6.4 405 -bild 405 -legislator 405 -attaching 405 -fibres 405 -ordinance 405 -jockeys 405 -stubbs 405 -contractions 405 -loretta 405 -whisper 405 -gruber 405 -publishes 405 -ltd. 405 -afridi 405 -lars 405 -powering 405 -myuran 405 -dvla 405 -concierge 405 -deepened 405 -antiquities 405 -pas 405 -crematorium 405 -co-chairman 405 -critique 405 -drawers 404 -sweaty 404 -fatah 404 -genesis 404 -aron 404 -acosta 404 -trampled 404 -trinidad 404 -foetus 404 -waive 404 -enviable 404 -emphatically 404 -andrei 404 -transfusions 404 -genitalia 404 -communion 404 -over-the-counter 404 -cinematic 404 -canned 404 -government-run 403 -toothpaste 403 -stool 403 -witherspoon 403 -sensing 403 -cheerleading 403 -detects 403 -19th-century 403 -parkway 403 -1901 403 -goto 403 -prudent 403 -bashed 403 -brightness 403 -life-long 403 -sterile 403 -kung 403 -complicit 403 -refuted 403 -dams 403 -yahoo! 403 -fabrice 403 -ravine 403 -reelection 403 -shinzo 403 -0.8 403 -cary 402 -excitedly 402 -hargreaves 402 -15-minute 402 -impacting 402 -fars 402 -misinformation 402 -cabaye 402 -hallways 402 -suspiciously 402 -screws 402 -dengue 402 -anaheim 402 -cruelly 402 -rotterdam 402 -sioux 402 -maude 402 -jailhouse 402 -coped 402 -magnussen 402 -confederations 402 -4-4-2 402 -figueroa 402 -kenyatta 402 -executioner 402 -scraps 402 -vineyards 402 -0.6 402 -blessings 402 -hash 401 -riga 401 -plum 401 -distributor 401 -manifest 401 -mulcaire 401 -nieces 401 -lawns 401 -weeds 401 -adversaries 401 -stade 401 -mixes 401 -antonia 401 -trespass 401 -termed 401 -ascertain 401 -jenni 401 -undue 401 -rochelle 401 -12:30 401 -knitting 401 -compliments 401 -rfu 401 -euan 401 -lambs 401 -admiring 401 -hitch 401 -variant 401 -admirer 401 -outstretched 401 -macedonia 401 -repressive 401 -understatement 401 -mashable.com 400 -niqab 400 -lyme 400 -infamously 400 -cremation 400 -droppings 400 -foothills 400 -contraband 400 -aura 400 -ultimo 400 -discourse 400 -prides 400 -lister 400 -fouls 400 -intermediate 400 -downgrade 400 -sixty 400 -thunderstorm 400 -turkeys 400 -deceptive 400 -telecoms 400 -sowell 400 -undeterred 400 -kolarov 400 -backbench 400 -workings 400 -gustavo 400 -repetitive 400 -maclean 400 -decider 400 -canister 400 -rajoy 400 -168 400 -curley 400 -liposuction 400 -deficiencies 400 -duran 400 -surveying 400 -specialising 400 -clemens 400 -sucks 399 -multitude 399 -radicalized 399 -min 399 -kathmandu 399 -inhaling 399 -2010-11 399 -precursor 399 -cypriot 399 -blowout 399 -moldova 399 -alder 399 -bookies 399 -2014-15 399 -leopards 399 -witchcraft 399 -lulu 399 -crates 399 -verse 399 -pageants 399 -bluntly 399 -excessively 399 -kyrgyzstan 399 -barristers 399 -mukpo 399 -gilani 399 -eternity 399 -banished 399 -cross-party 399 -starve 399 -incompatible 399 -najib 399 -three-month-old 399 -imaginable 399 -5:30 399 -viola 398 -yves 398 -1908 398 -dina 398 -pounded 398 -1,900 398 -pianist 398 -high-security 398 -neutrality 398 -journalistic 398 -disarray 398 -moderates 398 -gould 398 -mid-atlantic 398 -emmett 398 -reprisals 398 -quota 398 -mileage 398 -bourne 398 -bro 398 -tendon 398 -seychelles 398 -kiwi 398 -usgs 398 -etienne 398 -advisors 398 -clinicians 398 -relocation 398 -wingspan 398 -radios 398 -lauder 398 -fondness 398 -preceding 398 -7:30 398 -foy 398 -woolworths 398 -carry-on 397 -placebo 397 -decried 397 -municipality 397 -haemorrhage 397 -zinedine 397 -adriano 397 -courting 397 -fetish 397 -realistically 397 -gritty 397 -shadowy 397 -deceit 397 -34,000 397 -doughnuts 397 -loughborough 397 -1900s 397 -m1 397 -clowns 397 -siding 397 -hartman 397 -begs 397 -addison 397 -rnc 397 -stainless 397 -mairead 397 -barley 397 -saltwater 397 -well-liked 397 -leathers 397 -sideline 397 -greenfield 396 -defenceless 396 -shutter 396 -run-ins 396 -vapour 396 -auditions 396 -emmys 396 -dissolve 396 -transplanted 396 -3000 396 -totti 396 -tacoma 396 -ces 396 -s4 396 -vigo 396 -resonate 396 -bower 396 -lockerbie 396 -iggy 396 -electromagnetic 396 -gomes 396 -excerpt 396 -bronson 396 -schiller 396 -whitman 396 -dentists 396 -fore 396 -initiation 396 -calendars 395 -whitey 395 -santana 395 -goodbyes 395 -axis 395 -hypocritical 395 -pathways 395 -livelihoods 395 -relishing 395 -ingrid 395 -paulinho 395 -capitalize 395 -hypothetical 395 -badminton 395 -bitterness 395 -beasley 395 -heineken 395 -emilio 395 -texan 395 -7.8 395 -suspensions 395 -hypothesis 395 -rupture 395 -pires 395 -ghani 395 -lennox 395 -recapture 395 -stead 395 -braking 395 -desolate 395 -dakar 394 -tazhayakov 394 -1923 394 -solskjaer 394 -degenerative 394 -gust 394 -cecilia 394 -paracetamol 394 -compression 394 -lz 394 -blogging 394 -philosopher 394 -vunipola 394 -splinter 394 -alnwick 394 -rooting 394 -obsolete 394 -1919 394 -upholding 394 -showtime 394 -frida 394 -jaw-dropping 394 -hatchet 394 -irritating 394 -in-laws 394 -alamo 394 -tebow 394 -last-gasp 394 -re-entry 393 -merritt 393 -elated 393 -spinner 393 -acutely 393 -tartan 393 -reputed 393 -zeppelin 393 -analytics 393 -pancake 393 -sneiderman 393 -hindley 393 -cartier 393 -eindhoven 393 -arising 393 -latina 393 -outpatient 393 -flooring 393 -implying 393 -partridge 393 -decaying 393 -compatriots 393 -mounds 393 -fainted 393 -notched 393 -della 393 -mandates 393 -apologises 393 -cold-blooded 393 -wilkerson 393 -anti-american 393 -wilmington 393 -boating 393 -albanian 393 -corpus 393 -glaring 393 -dartmouth 392 -buff 392 -patchy 392 -impasse 392 -truro 392 -emirate 392 -multicultural 392 -crowdfunding 392 -oliveira 392 -smashes 392 -trustworthy 392 -daniela 392 -soured 392 -chiles 392 -baba 392 -b&b 392 -cleanse 392 -fresno 392 -kaye 392 -mullet 392 -clumsy 392 -dagenham 392 -6:30 392 -summons 392 -hales 392 -fridays 392 -bbq 392 -baugh 392 -mrsa 392 -lifeboats 392 -scour 392 -regina 392 -forgiving 392 -phony 391 -e-mailed 391 -tahoe 391 -degrade 391 -progressively 391 -flake 391 -marbella 391 -bays 391 -scotia 391 -modes 391 -bailiffs 391 -mop 391 -marjorie 391 -brainwashed 391 -cut-price 391 -graced 391 -torrid 391 -carefree 391 -ames 391 -checkout 391 -invictus 391 -hyatt 391 -snails 391 -connery 391 -idiots 391 -penultimate 391 -hopper 391 -confesses 391 -7.1 391 -shove 391 -specials 391 -gayet 391 -amphibious 391 -reignited 391 -50mph 391 -withdrawals 391 -pickens 391 -hippo 391 -mccaw 391 -2:30 390 -grounding 390 -sell-out 390 -shivering 390 -brookings 390 -darkened 390 -gerardo 390 -ouch 390 -accumulation 390 -canning 390 -absentee 390 -360-degree 390 -fostering 390 -testifies 390 -unchecked 390 -cornerstone 390 -waterway 390 -4m 390 -real-world 390 -overpass 390 -amazon.com 390 -precipitation 389 -imaginative 389 -pineapple 389 -up-to-date 389 -m4 389 -denton 389 -lithium 389 -aylesbury 389 -like-minded 389 -religiously 389 -rowley 389 -capitalise 389 -grocer 389 -somalis 389 -tsar 389 -mullah 389 -25-year 389 -buffon 389 -hurst 389 -lazarus 389 -pimp 389 -jourdan 389 -erie 389 -haute 389 -boer 389 -secs 389 -probed 389 -folklore 389 -1500 388 -deceived 388 -disrepute 388 -dwarfs 388 -escalator 388 -attribute 388 -inhuman 388 -85-year-old 388 -manic 388 -five-minute 388 -stockings 388 -meriam 388 -contradictory 388 -fad 388 -celta 388 -daft 388 -penetrated 388 -denouncing 388 -crewe 388 -uh 388 -raider 388 -necklaces 388 -dodged 388 -check-up 388 -prolong 388 -floodwater 388 -padded 388 -malice 388 -acevedo 388 -pedrosa 387 -combed 387 -optimal 387 -constructing 387 -throttle 387 -1921 387 -noor 387 -gypsies 387 -che 387 -luxuries 387 -unspeakable 387 -scalise 387 -bitch 387 -unleashing 387 -torpedo 387 -grilling 387 -migrate 387 -annexed 387 -ecology 387 -stumps 387 -drier 387 -diminishing 387 -1:30 387 -aamer 387 -devotees 387 -huston 387 -awkwardly 387 -consolidate 387 -girly 387 -extraction 387 -canceling 387 -goodluck 387 -blazes 387 -10-minute 387 -boulders 387 -m.d. 387 -sky-high 387 -ballerina 387 -mediation 387 -jerreat 387 -cleary 387 -lochte 387 -insurer 386 -walkout 386 -nurseries 386 -tango 386 -headscarf 386 -foothold 386 -patented 386 -second-round 386 -guitars 386 -isleworth 386 -mulligan 386 -sundance 386 -decomposing 386 -droves 386 -susanna 386 -cul-de-sac 386 -shawcross 386 -rosso 386 -foliage 386 -bulging 386 -eulogy 386 -hypertension 386 -hard-pressed 386 -authorizing 386 -power-sharing 386 -30-day 386 -nato-led 386 -prime-time 386 -kenyans 386 -preparedness 386 -16gb 386 -15m 386 -vinyl 386 -baseline 386 -childless 385 -pests 385 -vented 385 -agbonlahor 385 -julius 385 -casings 385 -tuning 385 -chu 385 -spurned 385 -councilman 385 -hogg 385 -bani 385 -phased 385 -librarian 385 -coordinates 385 -military-style 385 -battlefields 385 -sins 385 -samurai 385 -drive-by 385 -tempt 385 -jewellers 385 -eyeliner 385 -siren 385 -43,000 385 -chute 385 -batsmen 385 -pings 385 -legitimately 385 -zennie 385 -forbids 385 -bugatti 385 -leaker 385 -emerson 385 -resides 385 -boisterous 385 -dwell 385 -coherent 385 -crumble 385 -palatial 385 -irons 385 -great-grandchildren 385 -sandler 385 -circa 385 -unorthodox 385 -voyeurism 385 -kourtney 384 -six-bedroom 384 -noonan 384 -90-year-old 384 -nappy 384 -droughts 384 -greyhound 384 -speculating 384 -infinite 384 -teaming 384 -instituted 384 -awry 384 -censored 384 -nervously 384 -u21 384 -haringey 384 -tasered 384 -randolph 384 -burj 384 -skimpy 384 -cheats 384 -macbook 384 -6m 384 -accrington 384 -compressed 384 -7.3 384 -jobseekers 384 -alvarenga 384 -tyrant 384 -miroslav 384 -relapse 384 -toner 384 -sprang 384 -co-ordinated 383 -salzburg 383 -partied 383 -retracted 383 -copycat 383 -squatters 383 -9.99 383 -grafts 383 -grape 383 -startups 383 -disliked 383 -crete 383 -slab 383 -oranges 383 -marylebone 383 -jeter 383 -experimented 383 -softly 383 -callahan 383 -embroidered 383 -grit 383 -vito 383 -dispatchers 383 -filth 383 -cromwell 383 -infestation 383 -top-level 383 -admirable 383 -caters 383 -viscount 383 -family-friendly 383 -frock 383 -reeve 383 -ives 383 -correctness 383 -swanson 383 -infect 383 -legislatures 382 -racking 382 -armenian 382 -headmistress 382 -cnet 382 -renewing 382 -redmayne 382 -nan 382 -coercion 382 -sumptuous 382 -flesh-eating 382 -applicable 382 -two-minute 382 -juicy 382 -monfils 382 -milligrams 382 -hereditary 382 -cmdr. 382 -wrongfully 382 -emphasised 382 -unc 382 -bosworth 382 -rana 381 -trident 381 -wealthier 381 -telly 381 -honourable 381 -revolving 381 -getafe 381 -grosvenor 381 -disdain 381 -obi 381 -electrodes 381 -recluse 381 -counters 381 -kyoto 381 -grassley 381 -bends 381 -destabilize 381 -sugars 381 -rucksack 381 -kaur 381 -sylvain 381 -lambeth 381 -potters 381 -bulky 381 -ketamine 381 -blanco 381 -searing 381 -abi 381 -dion 381 -livermore 381 -light-years 381 -farrah 381 -poundland 381 -augustine 381 -coded 381 -recreating 381 -unilaterally 381 -usada 381 -hammering 381 -berth 381 -expats 381 -enrich 381 -simmering 381 -ramon 381 -delusional 380 -brinsley 380 -cellphones 380 -hordes 380 -commodities 380 -ripper 380 -oakley 380 -thaw 380 -aspiration 380 -isner 380 -versa 380 -supremo 380 -mortal 380 -markedly 380 -tasers 380 -infested 380 -arches 380 -micah 380 -asbestos 380 -taxing 380 -138 380 -comedies 379 -mimicking 379 -sensed 379 -occupant 379 -sensations 379 -pharmaceuticals 379 -gasping 379 -instructing 379 -mandelson 379 -bulge 379 -excrement 379 -customized 379 -flammable 379 -vic 379 -y' 379 -chaney 379 -nadir 379 -widen 379 -corinthians 379 -g-20 379 -depictions 378 -fancied 378 -nipple 378 -burley 378 -cagliari 378 -todashev 378 -sabine 378 -ari 378 -swaths 378 -alvarado 378 -dar 378 -kinda 378 -analogy 378 -ko 378 -ringo 378 -restless 378 -headstone 378 -undone 378 -bethlehem 378 -rhonda 378 -lafayette 378 -allegri 378 -dwarfed 378 -restive 378 -double-decker 378 -ten-year 378 -fashions 378 -gastrointestinal 378 -seaman 378 -influencing 378 -loot 378 -dusan 378 -blackwell 378 -pranks 378 -morals 378 -75th 378 -tread 378 -bandit 377 -sumatra 377 -8.3 377 -conjoined 377 -personalized 377 -suleiman 377 -jabs 377 -mcleod 377 -taxed 377 -stimulant 377 -lanarkshire 377 -kellie 377 -neuman 377 -tusk 377 -breeders 377 -batty 377 -stereo 377 -skewed 377 -curran 377 -conservatism 377 -plank 377 -treaties 377 -flatly 377 -pixels 377 -new-found 377 -newsquiz 377 -mta 377 -traore 377 -twerking 377 -cavalier 377 -grange 377 -eponymous 377 -75million 377 -grass-roots 377 -resurfaced 377 -deleting 377 -unnatural 377 -sag 377 -assassinations 377 -scraped 377 -allure 377 -grad 377 -waterhouse 377 -deployments 377 -minded 377 -tanned 377 -hatfield 377 -commencement 377 -horsepower 377 -220,000 377 -superheroes 377 -manageable 376 -ache 376 -cost-effective 376 -ike 376 -commander-in-chief 376 -interns 376 -plaudits 376 -rousing 376 -yohan 376 -vines 376 -800m 376 -low-lying 376 -ned 376 -tight-lipped 376 -swells 376 -frigate 376 -rundown 376 -dressage 376 -showering 376 -wrangling 376 -suede 376 -scant 376 -corvette 376 -spacey 376 -lindo 376 -tiara 376 -snatching 376 -modules 376 -verses 376 -lorna 376 -convent 376 -fonda 376 -3ft 376 -throngs 376 -canteen 376 -self-confidence 376 -brianna 376 -fuentes 375 -swayed 375 -stoner 375 -wahlberg 375 -hoop 375 -lithuanian 375 -morecambe 375 -glam 375 -rescuer 375 -144 375 -mears 375 -intervals 375 -freaked 375 -huma 375 -revoke 375 -8m 375 -terrorized 375 -milford 375 -sprays 375 -centrist 375 -surgically 375 -bereavement 375 -sarcastic 375 -heavyweights 375 -straits 375 -flakes 375 -salvatore 374 -notifying 374 -complicity 374 -micky 374 -215 374 -mudslides 374 -davy 374 -ape 374 -conservatory 374 -depended 374 -iplayer 374 -deem 374 -backpacks 374 -privatisation 374 -spewing 374 -defunct 374 -incite 374 -exporting 374 -lofty 374 -levant 374 -hazell 374 -procurement 374 -jun 374 -creme 374 -entrepreneurship 374 -quakes 374 -smack 374 -shellie 374 -locomotive 374 -fluorescent 374 -breathed 374 -georges 374 -dice 374 -smyth 374 -dominguez 374 -stosur 374 -8,500 374 -yuri 374 -garfield 374 -resounding 374 -newham 373 -top-secret 373 -compromises 373 -mans 373 -totaled 373 -taxman 373 -theatres 373 -inaccessible 373 -burlesque 373 -underweight 373 -kofi 373 -hazmat 373 -stoning 373 -shopped 373 -pontiac 373 -disallowed 373 -2,800 373 -class-action 373 -self-harm 373 -chaplin 373 -panned 373 -teamwork 373 -menzies 373 -millennials 373 -kilo 373 -mcenroe 373 -hal 373 -10-man 373 -tell-all 373 -hues 373 -jacobson 373 -poached 373 -ethel 373 -amputate 373 -131 373 -flex 373 -strangulation 372 -nunn 372 -bumpy 372 -bletchley 372 -aroused 372 -philanthropy 372 -nests 372 -goldfish 372 -jo-wilfried 372 -tahmooressi 372 -nemesis 372 -mandeville 372 -paz 372 -vardy 372 -squared 372 -basra 372 -creamy 372 -jk 372 -fer 372 -1913 372 -conscientious 372 -longer-term 372 -comprise 372 -eyed 372 -pellet 372 -healey 372 -microchip 372 -mathews 372 -unfaithful 372 -atheists 372 -240,000 372 -jetliner 372 -dresser 372 -enhancement 372 -one-hour 372 -komisarjevsky 372 -suki 372 -explodes 372 -smoky 371 -abandonment 371 -half-century 371 -adept 371 -mic 371 -sportswear 371 -boos 371 -plethora 371 -gillingham 371 -infused 371 -charcoal 371 -o.j. 371 -jigsaw 371 -blunkett 371 -world-renowned 371 -bile 371 -mitochondrial 371 -virtues 371 -displacement 371 -gangnam 371 -cristian 371 -vinegar 371 -broaden 371 -altitudes 371 -mcvey 371 -ridiculously 371 -irresistible 371 -chandelier 371 -giveaway 371 -ph.d. 371 -inventive 371 -exemptions 371 -slabs 371 -negro 371 -ftc 371 -cassandra 370 -figurines 370 -brigadier 370 -manuscripts 370 -sermons 370 -watery 370 -revel 370 -clapham 370 -purposefully 370 -kang 370 -phnom 370 -nickel 370 -nirvana 370 -borno 370 -diligence 370 -cornelius 370 -defection 370 -over-the-top 370 -agnieszka 370 -microphones 370 -choreographed 370 -warms 370 -milder 370 -masterpieces 370 -cashed 370 -downpour 370 -nasdaq 370 -barron 370 -strickland 369 -clapping 369 -tyranny 369 -circuits 369 -now-defunct 369 -simplest 369 -greener 369 -shroud 369 -alienated 369 -uninhabited 369 -terra 369 -nolen 369 -zhejiang 369 -dirk 369 -suffocating 369 -levied 369 -disciplines 369 -biking 369 -sac 369 -frederik 369 -fullest 369 -bluff 369 -informants 369 -tj 369 -woodhouse 369 -nominating 369 -abuja 369 -latter-day 369 -fright 369 -able-bodied 369 -steubenville 368 -shahid 368 -kohli 368 -permitting 368 -imagining 368 -1lb 368 -stow 368 -payback 368 -ainslie 368 -skateboard 368 -fireplaces 368 -congested 368 -rancho 368 -ticks 368 -syringes 368 -teaspoons 368 -disappearances 368 -invoices 368 -cuddles 368 -aussies 368 -motivations 368 -discrepancy 368 -jong-il 368 -deserts 368 -downstream 368 -mateo 368 -careered 368 -concorde 368 -respectfully 368 -mastered 368 -molten 368 -plugs 367 -belmont 367 -bullard 367 -nursed 367 -e-commerce 367 -tracksuit 367 -amazement 367 -cracker 367 -clijsters 367 -447 367 -brig. 367 -hospitalization 367 -baylor 367 -hoskins 367 -airbnb 367 -idols 367 -supremacy 367 -oxide 367 -exhaustive 367 -conform 367 -semi-official 367 -castles 367 -peripheral 367 -erick 367 -hinting 367 -parc 367 -racehorse 367 -whittaker 367 -seawater 367 -littlefield 366 -thickness 366 -six-hour 366 -enigma 366 -acrimonious 366 -marlene 366 -zainab 366 -mummified 366 -undiscovered 366 -tagging 366 -vigilance 366 -speedboat 366 -nurture 366 -calmer 366 -mercia 366 -himalayas 366 -comey 366 -queries 366 -hines 366 -trampoline 366 -spire 366 -gatsby 366 -renegotiate 366 -uyghur 366 -flu-like 366 -deflated 366 -predictably 366 -els 366 -plowed 366 -underscored 366 -osaka 366 -pensacola 366 -craving 366 -nabbed 366 -gravy 366 -4.9 366 -invent 366 -pardons 366 -asserting 365 -mobilized 365 -oops 365 -rompuy 365 -centimeters 365 -roswell 365 -horse-drawn 365 -meade 365 -missionaries 365 -3,600 365 -lick 365 -serenity 365 -lehman 365 -ids 365 -bolasie 365 -unwavering 365 -deepen 365 -hoffenheim 365 -kali 365 -cybersecurity 365 -outward 365 -itching 365 -4:30 365 -perennial 365 -raza 365 -monique 365 -195 365 -amr 365 -vacationing 365 -nicer 365 -infiltrate 365 -34th 365 -co-ordinator 365 -anti-islam 365 -rationing 365 -grenoble 365 -persistence 365 -cutler 365 -sepsis 364 -expel 364 -40p 364 -pastime 364 -cucumber 364 -hatem 364 -ideye 364 -applauds 364 -tarp 364 -orangutans 364 -mckinley 364 -seminar 364 -prioritise 364 -ghostly 364 -supervise 364 -dartford 364 -headlined 364 -clicks 364 -pantaleo 364 -reassignment 364 -7-0 364 -disable 364 -unresolved 364 -huntington-whiteley 364 -firefighting 364 -radaronline 364 -phyllis 364 -l'oreal 363 -hard-fought 363 -possesses 363 -fuzzy 363 -edna 363 -memorandum 363 -rabies 363 -ask.fm 363 -demeaning 363 -bynes 363 -dawkins 363 -deliberation 363 -tuscany 363 -aggressor 363 -clientele 363 -gluten 363 -underpants 363 -unprepared 363 -babysitting 363 -sos 363 -processors 363 -7.7 363 -brentwood 363 -tania 363 -scorsese 363 -springer 363 -screeners 363 -boredom 363 -anti-war 363 -stephan 362 -criticizes 362 -displeasure 362 -fay 362 -opportunistic 362 -undersea 362 -judi 362 -cumberland 362 -adriana 362 -gabon 362 -shinji 362 -heater 362 -sexton 362 -identifiable 362 -eyeing 362 -jetted 362 -vulnerabilities 362 -lsd 362 -notts 362 -bauman 362 -prompts 362 -rebellious 362 -2013/14 362 -wading 362 -memos 362 -sleeper 362 -mila 362 -exasperated 362 -unavoidable 362 -nuevo 361 -britt 361 -dungeon 361 -ezequiel 361 -mcconaughey 361 -gisele 361 -herb 361 -step-by-step 361 -desserts 361 -stimulating 361 -freaking 361 -chronicled 361 -conveyed 361 -flicked 361 -two-story 361 -pelted 361 -orchid 361 -pressuring 361 -50ft 361 -hr 361 -reservoirs 361 -masse 361 -aftershocks 361 -spacewalk 361 -contradicted 361 -inventions 361 -thrash 361 -felled 361 -139 361 -airway 360 -eco 360 -79-year-old 360 -truthful 360 -uddin 360 -dented 360 -adlington 360 -glendale 360 -uncles 360 -bevan 360 -420 360 -ozone 360 -unrepentant 360 -housemate 360 -penitentiary 360 -spaceshiptwo 360 -kilometer 360 -binoculars 360 -life-size 360 -jurisdictions 360 -prairie 360 -centrepiece 360 -carlin 360 -partnering 360 -negativity 360 -motherwell 360 -distributors 360 -bowles 360 -mcgowan 360 -nurturing 360 -durban 360 -premieres 360 -o'neil 360 -slut 360 -pemberton 360 -irate 359 -mcfadden 359 -myleene 359 -hedges 359 -shrewd 359 -37,000 359 -barak 359 -undisputed 359 -meddling 359 -siegel 359 -12,500 359 -blends 359 -sociology 359 -glider 359 -porous 359 -proportionate 359 -ponytail 359 -anal 359 -temperament 359 -snooping 359 -presentations 359 -harf 359 -holistic 359 -differentiate 359 -sled 359 -brat 359 -divulge 359 -strenuously 359 -innocuous 359 -yourselves 359 -distasteful 359 -cutbacks 359 -hariri 359 -blatantly 359 -unjustified 359 -syriza 359 -cotswolds 359 -sandstone 359 -parameters 359 -entangled 358 -realization 358 -1.50 358 -gorbachev 358 -caved 358 -pawn 358 -alli 358 -agonizing 358 -weakest 358 -jacuzzi 358 -door-to-door 358 -on-loan 358 -resuming 358 -anti-depressants 358 -villiers 358 -ravel 358 -reviving 358 -orchestrating 358 -pryor 358 -fresh-faced 358 -noriega 358 -stockpiles 358 -floored 358 -seduced 358 -originate 358 -gilles 358 -fatigues 358 -deanna 358 -murals 358 -avonte 358 -brothels 358 -improbable 358 -scrape 358 -cashman 357 -scoresheet 357 -vomited 357 -mathew 357 -mantel 357 -degradation 357 -drink-drive 357 -clutched 357 -dismisses 357 -catch-up 357 -swartz 357 -emilia 357 -suzuki 357 -wirelessly 357 -ida 357 -busquets 357 -ibe 357 -aberystwyth 357 -footballs 357 -at-risk 357 -mcgill 357 -6in 357 -zion 357 -defrauded 357 -o'keefe 357 -audible 357 -amicable 357 -shekau 357 -jadeja 357 -undergoes 357 -kitted 357 -pretext 357 -wafer 357 -casper 357 -versailles 357 -hornet 357 -superbly 357 -sequestration 357 -0800 555 111 356 -surface-to-air 356 -t20 356 -effortlessly 356 -zumba 356 -spontaneously 356 -powdered 356 -reaffirmed 356 -cushions 356 -uttar 356 -redding 356 -changer 356 -dishwasher 356 -marta 356 -rectify 356 -eczema 356 -klausner 356 -congressmen 356 -esteem 356 -buns 356 -viability 356 -cte 356 -imogen 356 -virgil 356 -kkk 356 -markus 356 -flaming 356 -faisal 356 -tremor 356 -rockies 356 -profusely 356 -gervais 356 -rarest 356 -brandished 356 -valor 356 -maddox 356 -137 356 -gameplay 355 -stout 355 -rehabilitate 355 -nesting 355 -all-rounder 355 -carta 355 -sectioned 355 -counsellor 355 -vacancies 355 -studded 355 -invariably 355 -groundwater 355 -upgrading 355 -squat 355 -jocelyn 355 -otis 355 -restraints 355 -chlorine 355 -lifesaving 355 -commuting 355 -illusions 355 -7.4 355 -cartridges 355 -woeful 355 -norma 355 -matriarch 355 -incorporates 355 -yelp 355 -sociable 355 -trenton 355 -lampedusa 355 -beak 355 -udall 355 -restricts 355 -shi'ite 355 -wentworth 354 -meteorites 354 -shotguns 354 -mailed 354 -8.6 354 -tease 354 -nidal 354 -gazing 354 -immersive 354 -paddling 354 -bunk 354 -minsk 354 -gushed 354 -metabolic 354 -up-and-coming 354 -philanthropic 354 -avlon 354 -bedrock 354 -yeates 354 -big-name 354 -mobilize 354 -manpower 354 -blending 354 -bottas 354 -spin-off 354 -emphasise 354 -admires 354 -quits 354 -five-month-old 354 -disk 354 -136 354 -blooming 354 -sunbeds 353 -banish 353 -3:30 353 -blackouts 353 -tepco 353 -clogged 353 -storeys 353 -gettysburg 353 -ospreys 353 -irrespective 353 -pembrokeshire 353 -pipelines 353 -pancakes 353 -conveyor 353 -six-day 353 -rescheduled 353 -spectacles 353 -erickson 353 -bomb-making 353 -fingertips 353 -unsealed 353 -sven 353 -compliant 353 -horman 353 -alvin 353 -combs 353 -balaclava 353 -self-imposed 353 -extramarital 353 -glands 353 -skeptics 353 -peeling 353 -layoffs 353 -aguilera 353 -unduly 353 -penh 353 -rutland 353 -parr 353 -narrowing 353 -lanterns 353 -gainesville 353 -absorbing 353 -quotas 353 -clerical 352 -az 352 -stills 352 -ipods 352 -pattinson 352 -post-election 352 -splendid 352 -lantern 352 -muir 352 -rappers 352 -sniffing 352 -centerpiece 352 -kinnock 352 -payers 352 -chilton 352 -fareed 352 -cultivated 352 -handout 352 -escorting 352 -moth 352 -momentarily 352 -uplifting 352 -hormonal 352 -laidlaw 352 -acapulco 352 -rebate 352 -jeanette 352 -yarmouth 352 -commemorations 352 -gardiner 352 -observes 352 -vividly 352 -christened 352 -matchday 352 -ducked 352 -bodybuilder 352 -ag 351 -uva 351 -all-round 351 -self-made 351 -catcher 351 -balfour 351 -enticing 351 -tasmanian 351 -gigi 351 -60m 351 -coronado 351 -hakim 351 -bandaged 351 -broadmoor 351 -well-placed 351 -somme 351 -tribesmen 351 -consul 351 -mobster 351 -definitively 351 -esquire 351 -remote-controlled 351 -worded 351 -mccanns 351 -reckon 351 -garnett 351 -penniless 351 -crusader 351 -naughton 351 -wwf 351 -recurrence 351 -8ft 351 -neural 351 -eubank 351 -dictators 351 -molecule 351 -amputations 351 -lewisham 351 -cartwright 350 -wayward 350 -oyston 350 -cones 350 -tees 350 -patsy 350 -ferreira 350 -kangaroos 350 -neared 350 -grief-stricken 350 -izzy 350 -circumstantial 350 -wally 350 -appreciative 350 -examiners 350 -single-handedly 350 -insp 350 -nuremberg 350 -time.com 350 -tote 350 -166 350 -slimmed 350 -wbo 350 -jennie 350 -camacho 350 -euphoria 350 -gervinho 350 -cranston 350 -labeouf 350 -cruyff 350 -rake 350 -comfy 350 -88-year-old 350 -threads 350 -bohn 350 -riddle 350 -blinds 350 -blyth 350 -graceful 350 -bavarian 350 -skelton 350 -moist 350 -felton 350 -sidewalks 350 -evacuating 350 -enlargement 350 -salsa 349 -brasilia 349 -busby 349 -fountains 349 -four-month-old 349 -tolokonnikova 349 -huntelaar 349 -blackened 349 -immortalised 349 -addictions 349 -yamaha 349 -sobering 349 -tongues 349 -glide 349 -adolescence 349 -litany 349 -multimillion-dollar 349 -brisk 349 -480 349 -lobbyist 349 -perpetual 349 -munster 349 -physicists 349 -instigated 349 -qureshi 349 -ammonia 349 -tal 349 -hurd 349 -greenville 349 -invincible 349 -occupies 349 -agility 349 -promoters 349 -glitches 349 -svelte 349 -aristocrat 348 -vader 348 -irreplaceable 348 -resumption 348 -chinatown 348 -gang-raped 348 -viewpoint 348 -baja 348 -4in 348 -roulette 348 -christoph 348 -countryman 348 -washes 348 -facilitating 348 -ballard 348 -maroon 348 -nods 348 -errands 348 -strikingly 348 -greenberg 348 -jd 348 -coloccini 348 -undercut 348 -trusty 348 -ripley 348 -excursions 348 -contraption 348 -hearty 348 -healy 348 -augmented 348 -knowledgeable 348 -simons 348 -breezy 348 -soggy 348 -resorting 348 -mcgeady 348 -vacate 348 -rung 347 -buoyed 347 -brewster 347 -sparkly 347 -uncontrollable 347 -charmed 347 -sanity 347 -inquisitive 347 -mmr 347 -garth 347 -dreamt 347 -enlist 347 -5.1 347 -rayo 347 -fayed 347 -commits 347 -matrix 347 -metadata 347 -sopranos 347 -koenig 347 -150million 347 -anarchy 347 -fungal 347 -securely 347 -plaques 347 -mainz 347 -defrauding 347 -sequester 347 -electrocuted 347 -rumble 347 -monochrome 347 -helpers 347 -residual 347 -sofas 347 -whitby 347 -throwback 347 -rami 347 -simulations 347 -carina 347 -ur 347 -eaters 347 -seven-day 347 -run-down 347 -punctuated 347 -borger 347 -oahu 347 -woolf 347 -snowboarding 347 -jelena 347 -tiring 347 -gambler 347 -connector 347 -combatants 346 -steeped 346 -bulldogs 346 -locke 346 -irrigation 346 -nordic 346 -stumped 346 -raking 346 -mont 346 -wristband 346 -cost-cutting 346 -forecasting 346 -defra 346 -doggy 346 -141 346 -candidly 346 -erroneous 346 -ranting 346 -deafening 346 -sina 346 -hideous 346 -cambiasso 346 -constables 346 -bailouts 346 -newsreader 346 -decisively 346 -centuries-old 346 -gram 346 -conspicuous 346 -avocado 346 -endoscopy 346 -spector 345 -smelly 345 -rained 345 -autopsies 345 -gylfi 345 -gazprom 345 -psi 345 -7/7 345 -fodder 345 -madam 345 -effortless 345 -outs 345 -14-year 345 -thinning 345 -cupboards 345 -6.9 345 -touts 345 -berbatov 345 -pharaoh 345 -brittney 345 -solar-powered 345 -tapper 345 -thc 345 -doma 345 -pasty 345 -bendtner 345 -declarations 345 -low-fat 345 -textbook 345 -alligators 345 -flashbacks 345 -perverse 345 -selections 345 -pavel 345 -timid 345 -xiao 345 -expat 345 -forties 345 -discontinued 345 -reiterate 344 -savoy 344 -memes 344 -sponsoring 344 -chests 344 -malloy 344 -legions 344 -impetus 344 -bouquets 344 -lavezzi 344 -ison 344 -unscrupulous 344 -chantelle 344 -co-wrote 344 -routledge 344 -nibali 344 -confrontational 344 -bskyb 344 -emboldened 344 -hijackers 344 -court-ordered 344 -unwarranted 344 -13-year 344 -masterchef 344 -dampen 344 -hooliganism 344 -time-lapse 344 -cardio 344 -interpretations 344 -scottsdale 344 -clone 344 -turk 344 -seamlessly 344 -halliburton 344 -prankster 344 -shaker 344 -lcc 344 -reputations 344 -barra 344 -collars 344 -seacrest 344 -mclelland 344 -allocation 343 -10st 343 -necessities 343 -flagging 343 -sherpa 343 -karma 343 -yeo 343 -sculpted 343 -honed 343 -ono 343 -jj 343 -unregulated 343 -chinook 343 -vela 343 -trolleys 343 -dormitory 343 -plastics 343 -sarajevo 343 -robins 343 -obeidallah 343 -spade 343 -cid 343 -textbooks 343 -spca 343 -overhauled 343 -franks 343 -pcc 343 -rupees 343 -fossilised 343 -orthopaedic 343 -demoted 343 -kerobokan 343 -aliases 342 -montgomerie 342 -8.1 342 -firehouse 342 -dismissive 342 -recaptured 342 -complying 342 -kennels 342 -santo 342 -tuxedo 342 -bellamy 342 -obligated 342 -vertically 342 -peppered 342 -enterovirus 342 -conclave 342 -big-money 342 -marmite 342 -tripping 342 -carrera 342 -oleg 342 -two-storey 342 -pooley 342 -darryl 342 -evidenced 342 -154 342 -picket 342 -commenced 342 -superiority 342 -infographic 342 -three-storey 342 -ri 342 -cobham 342 -bloodiest 342 -al-islam 341 -darth 341 -stagnant 341 -lew 341 -zagreb 341 -grapple 341 -transforms 341 -insignificant 341 -impersonating 341 -buddhists 341 -red-faced 341 -currencies 341 -in-store 341 -emery 341 -melvin 341 -masipa 341 -retriever 341 -cascade 341 -geeks 341 -unfolds 341 -x-rated 341 -falluja 341 -yao 341 -mcmanaman 341 -good-looking 341 -wardrobes 341 -capping 341 -fabled 341 -prodigy 341 -oily 341 -salons 341 -macquarie 341 -petitioned 341 -shuttered 341 -inoperable 341 -roper 341 -preached 341 -arwa 341 -recruiters 341 -holidaymaker 341 -constructors 341 -defamatory 341 -caged 341 -gaulle 341 -stifling 341 -incubation 340 -ab 340 -mythology 340 -reconnect 340 -modifications 340 -envisioned 340 -promenade 340 -moaning 340 -nonstop 340 -11st 340 -wirral 340 -basingstoke 340 -richter 340 -andorra 340 -antioxidants 340 -usc 340 -tobias 340 -uprooted 340 -karina 340 -foreigner 340 -jeopardize 340 -apocalyptic 340 -espresso 340 -herds 340 -juries 340 -hand-held 340 -generational 340 -quick-thinking 340 -dobbs 340 -scotsman 340 -humiliate 340 -cartagena 340 -feathered 340 -monet 340 -assumes 340 -142 340 -interrogators 340 -wetlands 340 -high-definition 339 -airliners 339 -snowball 339 -snapshots 339 -pardo 339 -freedman 339 -natalee 339 -manicured 339 -inventing 339 -tax-free 339 -stitch 339 -lowers 339 -latvian 339 -re-open 339 -keenan 339 -freddy 339 -8-0 339 -telephoned 339 -huawei 339 -niki 339 -anthropology 339 -rations 339 -monterey 339 -torino 339 -pomp 339 -230,000 339 -newfound 339 -stabilise 339 -jintao 338 -cleanliness 338 -d-california 338 -backfire 338 -advisories 338 -goran 338 -ladders 338 -doomsday 338 -elites 338 -erupts 338 -lioness 338 -shockwaves 338 -douglass 338 -simultaneous 338 -toothbrush 338 -accredited 338 -monarchs 338 -yatsenyuk 338 -incapacitated 338 -cabs 338 -align 338 -defies 338 -unbroken 338 -whitmore 338 -hound 338 -frivolous 338 -mater 338 -blossomed 338 -skit 338 -crave 338 -wolverine 338 -fogle 338 -fiddle 338 -divorcee 337 -flushed 337 -milligan 337 -!!!! 337 -equip 337 -belfort 337 -hovered 337 -dinamo 337 -tricia 337 -unintentional 337 -one-two 337 -arlene 337 -conflicted 337 -recycle 337 -u.s.-mexico 337 -grandeur 337 -devin 337 -tubs 337 -kahn 337 -forcible 337 -censor 337 -unreservedly 337 -fetal 337 -gambia 337 -anti-apartheid 337 -burqa 337 -summon 337 -discrepancies 337 -orr 337 -ore 337 -everglades 337 -neurology 337 -sebastien 337 -howarth 337 -mone 337 -closeness 337 -cylinders 337 -gandy 336 -11million 336 -rewritten 336 -heskey 336 -cowards 336 -speculative 336 -eyelids 336 -duvet 336 -woodrow 336 -whispered 336 -democracies 336 -coombs 336 -amounting 336 -cuffed 336 -interracial 336 -diagram 336 -debutant 336 -delgado 336 -100mph 336 -compel 336 -aquino 336 -maximize 336 -breeder 336 -cass 336 -raven 336 -brewers 336 -dartmoor 336 -walled 336 -affront 336 -geordie 336 -scoreboard 336 -tamir 336 -fightback 336 -constituted 336 -11-month-old 336 -shimmering 336 -pear 336 -bowing 336 -canton 336 -subsided 336 -si 336 -petals 336 -gingerbread 336 -corsa 335 -olly 335 -lei 335 -ghanaian 335 -claret 335 -incubator 335 -stamping 335 -25m 335 -perk 335 -tuc 335 -solstice 335 -squalor 335 -episcopal 335 -best-seller 335 -164 335 -stewardship 335 -jody 335 -symbolism 335 -mugs 335 -alito 335 -herring 335 -annex 335 -constituent 335 -swanky 335 -revert 335 -rainwater 335 -onshore 335 -facelift 335 -stroud 335 -whitley 335 -lewin 335 -prejudices 335 -n.y. 335 -reconstructed 335 -pennies 335 -surpassing 335 -weathered 335 -stand-in 335 -cheekbones 335 -galliano 335 -voodoo 335 -decommissioned 335 -cunning 335 -judaism 335 -lesotho 334 -trickle 334 -kieron 334 -specializing 334 -non-muslims 334 -lineman 334 -sweethearts 334 -bolshoi 334 -guo 334 -mei 334 -smacked 334 -childish 334 -paternal 334 -finsbury 334 -piloting 334 -encampment 334 -poo 334 -confines 334 -vasquez 334 -minnie 334 -shahzad 334 -coronary 334 -soubry 334 -5-2 334 -gimmick 334 -natives 334 -preach 334 -perplexed 334 -tsipras 334 -lakeside 334 -court-martial 334 -mensch 334 -replicas 334 -enzyme 334 -pastries 334 -shrug 334 -gymnasium 334 -breaker 334 -tempers 333 -five-hour 333 -louisa 333 -massages 333 -wicker 333 -pisa 333 -illustrator 333 -148 333 -schindler 333 -payload 333 -unassuming 333 -soon-to-be 333 -venturing 333 -esparza 333 -worst-case 333 -discriminating 333 -ladbrokes 333 -pcso 333 -harwood 333 -whore 333 -ostrich 333 -b.c. 333 -elm 333 -khyber 333 -gabbana 333 -152 333 -terrorised 333 -ada 333 -cesare 333 -missy 333 -gergen 333 -co-authored 333 -impressively 333 -pte 332 -clinching 332 -perseverance 332 -leach 332 -israeli-palestinian 332 -rendezvous 332 -houthi 332 -ussr 332 -massacred 332 -wags 332 -lugo 332 -dreamworks 332 -abercrombie 332 -gurley 332 -hardman 332 -corona 332 -gilmour 332 -mimi 332 -homepage 332 -accumulate 332 -aptly 332 -consented 332 -mains 332 -strung 332 -settles 332 -peeled 332 -patti 332 -extracting 332 -once-in-a-lifetime 332 -cultivation 332 -sparse 332 -tiller 332 -synod 332 -mba 332 -payton 332 -agnes 332 -ops 332 -hinges 332 -embryonic 332 -yeast 332 -12ft 332 -fringes 332 -studs 332 -deformed 332 -aristocratic 331 -untenable 331 -gasquet 331 -filler 331 -auxiliary 331 -insemination 331 -corriere 331 -ordnance 331 -nucleus 331 -commuted 331 -curt 331 -crooked 331 -hops 331 -betsy 331 -long-lost 331 -chichester 331 -ritzer 331 -damned 331 -detaining 331 -breathless 331 -skidded 331 -masterminding 331 -gabriella 331 -gagging 331 -afterlife 331 -lesions 331 -verona 331 -protector 331 -curbs 331 -pursuits 331 -christi 331 -0.4 331 -non-governmental 330 -laurel 330 -chops 330 -scunthorpe 330 -westboro 330 -inbox 330 -all-american 330 -87-year-old 330 -relocating 330 -preschool 330 -mainstay 330 -nyu 330 -tripp 330 -dunk 330 -hibs 330 -jeweller 330 -daniele 330 -talia 330 -32million 330 -robb 330 -soiled 330 -flourishing 330 -motivating 330 -fenway 330 -heisman 330 -compile 330 -raj 330 -palma 330 -incarnation 330 -non-violent 330 -proclaiming 330 -luciano 330 -adversely 330 -addenbrooke 330 -inexplicably 330 -mujahid 330 -vita 330 -hubert 330 -botanical 330 -pro-life 330 -mchugh 330 -arrears 330 -conserve 330 -sailboat 330 -katharine 330 -guzan 330 -zola 330 -halliwell 330 -grader 330 -sse 330 -exec 330 -eastmond 330 -antigua 329 -soros 329 -lakeland 329 -tatters 329 -8.2 329 -tampered 329 -leaderboard 329 -butch 329 -wildstein 329 -terminator 329 -brooch 329 -olsson 329 -pixel 329 -puppets 329 -keenly 329 -wrought 329 -sikhs 329 -shay 329 -minnows 329 -five-month 329 -sauces 329 -crass 329 -hefner 329 -fungi 329 -equate 329 -waterlow 329 -underside 329 -dixie 329 -inactive 329 -plied 329 -emile 329 -cup-winning 329 -bethesda 329 -alcoholics 329 -christophe 329 -declassified 328 -mummies 328 -panhandle 328 -hog 328 -o'hara 328 -discovers 328 -provocations 328 -emits 328 -acquisitions 328 -cauldron 328 -recounting 328 -paralympian 328 -terriers 328 -scorn 328 -pixar 328 -outfitted 328 -lizzy 328 -horowitz 328 -skyrocketed 328 -ngos 328 -presumptive 328 -hiddink 328 -deadlines 328 -stanislas 328 -eminem 328 -pasco 328 -coldplay 328 -unclaimed 328 -reinforces 328 -charms 328 -grievance 328 -grandpa 328 -lounges 328 -tuscaloosa 328 -scaring 328 -burdens 328 -ice-cream 328 -kazakh 328 -keene 328 -season-ending 328 -martel 328 -jedinak 328 -waning 327 -cradle 327 -ruud 327 -fathom 327 -cumulative 327 -dutton 327 -fund-raising 327 -chandeliers 327 -highest-paid 327 -cyst 327 -smokes 327 -seagulls 327 -easton 327 -eyelid 327 -undeniable 327 -wreaths 327 -ipsa 327 -indiscriminately 327 -deliberating 327 -alphabet 327 -trey 327 -laughable 327 -cashmere 327 -persists 327 -collider 327 -reginald 327 -kilogram 327 -eavesdropping 327 -saviour 327 -reboot 327 -spring/summer 327 -fruition 327 -shielding 327 -andrey 327 -uptick 327 -elf 327 -collie 327 -backer 327 -exploratory 327 -whispering 327 -bary 327 -inserting 327 -abetting 327 -mazzaglia 327 -seamless 327 -sprints 327 -tfl 327 -furnished 327 -partizan 327 -crate 327 -mccready 326 -condoleezza 326 -kell 326 -aga 326 -re-enactment 326 -hiker 326 -siem 326 -milo 326 -parveen 326 -meteors 326 -unknowingly 326 -podcast 326 -147 326 -on-off 326 -osvaldo 326 -woolley 326 -bronte 326 -sequences 326 -65th 326 -kearney 326 -retirees 326 -highbury 326 -evasive 326 -liberate 326 -underdogs 326 -mass. 326 -kinder 326 -smartly 326 -chromosome 326 -almeria 326 -breakthroughs 326 -bunga 326 -waltham 326 -deprive 326 -molester 326 -veils 326 -suitors 326 -soothing 326 -doj 326 -descendant 326 -neeson 325 -jogger 325 -guerra 325 -habitual 325 -waltz 325 -wired.com 325 -wholesome 325 -authored 325 -rinehart 325 -affinity 325 -rediscovered 325 -treatable 325 -steelers 325 -monchengladbach 325 -madeline 325 -hipster 325 -nylon 325 -bagram 325 -repatriation 325 -4-3-3 325 -overlap 325 -gums 325 -neglecting 325 -wheelchair-bound 325 -souness 325 -dumps 325 -sling 325 -graceland 325 -benoit 325 -mistaking 325 -ramped 325 -smitten 325 -neurone 325 -kellogg 325 -aces 325 -fairway 325 -repayment 325 -bunkers 325 -v8 325 -julianne 325 -periodic 325 -savaged 325 -puff 325 -lever 325 -eminent 325 -magee 325 -siobhan 325 -skydive 325 -soca 325 -alexei 324 -rushes 324 -montero 324 -in-out 324 -iodine 324 -gallantry 324 -spruce 324 -snapper 324 -self-help 324 -next-door 324 -centre-half 324 -cas 324 -ransoms 324 -31,000 324 -helsinki 324 -pollutants 324 -2,100 324 -brawn 324 -arid 324 -bathe 324 -reclining 324 -melton 324 -beckett 324 -embezzlement 324 -harmon 324 -50-50 324 -slovenian 324 -minecraft 324 -perfected 324 -yoko 324 -thicke 324 -erectile 324 -moped 324 -seaweed 324 -arousal 324 -rosario 324 -folly 324 -cures 324 -bumping 324 -swerving 324 -lateral 324 -cutlery 324 -pirelli 324 -eclipsed 324 -40ft 324 -vorm 324 -unrecognisable 324 -gasp 324 -amassing 324 -zaragoza 324 -apprehend 324 -diffuse 323 -hajj 323 -finley 323 -magma 323 -collarbone 323 -sparring 323 -whitehouse 323 -limping 323 -maggots 323 -american-born 323 -rivalries 323 -kerb 323 -caregiver 323 -huddlestone 323 -chequers 323 -decades-long 323 -gobsmacked 323 -channing 323 -dredging 323 -jetty 323 -rustic 323 -vocals 323 -workplaces 323 -sidekick 323 -nca 323 -storied 323 -fabius 323 -disposing 323 -brinkley 323 -tot 323 -front-line 323 -off-limits 323 -dazzled 322 -borland 322 -bellingham 322 -messaged 322 -furor 322 -oscar-nominated 322 -martyrdom 322 -griezmann 322 -burlington 322 -tethered 322 -floppy 322 -contraceptives 322 -osteoporosis 322 -affordability 322 -e-book 322 -solider 322 -tinker 322 -churning 322 -piero 322 -shafilea 322 -sharpe 322 -primetime 322 -gsa 322 -gilded 322 -gilberto 322 -dissatisfied 322 -septicaemia 322 -quins 322 -communists 322 -tilly 322 -unreported 322 -soaps 322 -optimum 322 -153 322 -pertaining 322 -134 322 -groundwork 322 -headteachers 322 -syndicated 322 -expanse 322 -blush 322 -godin 322 -blurry 321 -eight-month-old 321 -kouyate 321 -tabled 321 -drags 321 -newmarket 321 -alesha 321 -stereotypical 321 -hiv-positive 321 -currie 321 -asserts 321 -bernadette 321 -consciously 321 -cylinder 321 -gyms 321 -gianni 321 -finely 321 -zulu 321 -kristi 321 -lawfully 321 -kavanagh 321 -bach 321 -ardent 321 -filin 321 -showroom 321 -brighten 321 -pines 321 -pillay 321 -boxed 321 -misinterpreted 321 -backbencher 321 -flogging 321 -tiote 321 -one-of-a-kind 321 -jens 321 -underlines 321 -anthropologist 321 -golan 321 -loosen 321 -aj 320 -2.50 320 -psycho 320 -depriving 320 -atom 320 -mai 320 -atrocious 320 -inhaled 320 -fab 320 -supervising 320 -klass 320 -flimsy 320 -achievable 320 -kampala 320 -decades-old 320 -smuggler 320 -powys 320 -calamity 320 -werner 320 -fanning 320 -poodle 320 -9.3 320 -steamed 320 -abidjan 320 -blanchett 320 -magnum 320 -burr 320 -deceive 320 -clermont 320 -puyol 320 -nov 320 -cadaver 320 -sonya 320 -down-to-earth 320 -ostensibly 320 -howes 320 -ethanol 320 -misused 320 -plutonium 320 -mayday 320 -.45 320 -mudslide 320 -romo 320 -hershey 320 -wag 320 -contradiction 320 -shards 320 -plebgate 319 -farmed 319 -harshest 319 -narrator 319 -3-5-2 319 -promo 319 -renegotiation 319 -pajamas 319 -23-man 319 -naseer 319 -earner 319 -pervez 319 -hoarding 319 -intermittent 319 -hekmati 319 -regulates 319 -whoopi 319 -cgi 319 -svetlana 319 -palpable 319 -stew 319 -nanjing 319 -80million 319 -gaunt 319 -celeste 319 -j.k. 319 -pl 319 -inquirer 319 -contesting 319 -vandenburg 319 -curbing 319 -guevara 319 -celestial 319 -munir 319 -latham 319 -odour 319 -f**k 319 -cattermole 319 -barrie 319 -pilates 319 -bate 319 -oligarch 319 -lollipop 318 -ossetia 318 -unsubstantiated 318 -nasir 318 -courtship 318 -court-appointed 318 -wink 318 -preachers 318 -hannibal 318 -jaycee 318 -kingpin 318 -el-sisi 318 -pre-order 318 -supercars 318 -bengals 318 -oppressed 318 -isolating 318 -apprehension 318 -indulged 318 -mets 318 -megrahi 318 -mt. 318 -disobedience 318 -gurlitt 318 -kaczynski 318 -arises 318 -nomadic 318 -edmunds 318 -tempered 318 -md 318 -junctions 318 -warped 318 -butchered 318 -encased 318 -facilitated 318 -playfully 318 -slovakian 318 -kareem 318 -fingernails 317 -newsletter 317 -rewrite 317 -wracked 317 -pug 317 -skid 317 -muammar 317 -mahoney 317 -afflicted 317 -krim 317 -impulsive 317 -chong 317 -zack 317 -begovic 317 -landowners 317 -lead-up 317 -dips 317 -dai 317 -glanfield 317 -winery 317 -suns 317 -cubes 317 -polygamy 317 -cougar 317 -django 317 -mannequins 317 -cursing 317 -giggles 317 -imprint 317 -brumfield 317 -medway 317 -emwazi 317 -booms 317 -reprimand 317 -299 317 -sewol 317 -kingsley 317 -sever 317 -playa 317 -plentiful 317 -supernova 316 -cheesy 316 -close-range 316 -infertile 316 -zinc 316 -goody 316 -bryn 316 -presently 316 -shuts 316 -quan 316 -misconceptions 316 -cleese 316 -retaliated 316 -pauses 316 -monde 316 -heart-warming 316 -levs 316 -redundancies 316 -mehmet 316 -ypg 316 -sprinted 316 -off-campus 316 -edgbaston 316 -icrc 316 -bardsley 316 -grazed 316 -wreaked 316 -alamuddin 316 -gabriele 316 -javi 316 -probate 316 -marrakech 316 -weave 316 -foxx 316 -textiles 316 -photoshopped 316 -lagging 316 -counterproductive 316 -bohemian 316 -coincidentally 316 -paltry 316 -cohesion 316 -kyron 316 -jacintha 316 -stomachs 316 -mitsubishi 315 -asphalt 315 -rigs 315 -juno 315 -ousting 315 -transmitting 315 -scarcely 315 -recharge 315 -moderator 315 -accreditation 315 -unmasked 315 -sheltering 315 -mute 315 -never-ending 315 -distillery 315 -bookstore 315 -unattractive 315 -carat 315 -andes 315 -thistle 315 -dermot 315 -dershowitz 315 -koreas 315 -socialism 315 -harden 315 -slurred 315 -counter-attack 315 -waistline 315 -18million 315 -croc 315 -worthington 315 -riviere 315 -shandong 315 -bandits 315 -stewardess 315 -unsightly 315 -buster 315 -elastic 315 -renovate 315 -hairstyles 315 -kagame 315 -zeus 315 -handbook 315 -repealed 315 -principals 315 -neolithic 315 -chamakh 315 -cnnmoney 315 -tongo 315 -eleventh 314 -alasdair 314 -mcgraw 314 -malpractice 314 -sajid 314 -evaluations 314 -parnell 314 -falmouth 314 -advertiser 314 -carton 314 -jelavic 314 -ariana 314 -mamadou 314 -martini 314 -tomic 314 -eritrea 314 -racists 314 -frederic 314 -goa 314 -shimon 314 -napier 314 -strapless 314 -mutant 314 -marxist 314 -stifle 314 -tack 314 -pumpkins 313 -affirmed 313 -seductive 313 -defibrillator 313 -rahm 313 -resolute 313 -etc 313 -sls 313 -complicate 313 -fanny 313 -waiters 313 -sewers 313 -buckled 313 -flemmi 313 -belligerent 313 -devolution 313 -plos 313 -tikrit 313 -retails 313 -phelan 313 -metaphor 313 -edf 313 -commence 313 -nerd 313 -excused 313 -solange 313 -giraffes 313 -bigelow 313 -tentatively 313 -nears 313 -pinpointed 313 -geologist 313 -knifepoint 313 -gagged 313 -fluctuations 313 -sigma 313 -cornyn 313 -urn 313 -petersen 313 -johansen 312 -upkeep 312 -teixeira 312 -mildly 312 -telegram 312 -ding 312 -dre 312 -ac360 312 -raccoon 312 -intestinal 312 -woakes 312 -incomprehensible 312 -terrence 312 -cannibal 312 -10-month-old 312 -hrw 312 -margate 312 -flds 312 -versatility 312 -knock-on 312 -programmer 312 -heckled 312 -rentals 312 -kendra 312 -absconded 312 -karla 312 -mugged 312 -rector 312 -socialising 312 -bearer 312 -forfeit 312 -pastoral 312 -mammogram 312 -alina 312 -ultra-orthodox 311 -revolves 311 -citroen 311 -mash 311 -yells 311 -speedway 311 -highly-rated 311 -inaccuracies 311 -chao 311 -buds 311 -overdoses 311 -consoled 311 -plundered 311 -snuck 311 -rmt 311 -capriles 311 -basking 311 -strengthens 311 -alluded 311 -mediocre 311 -shred 311 -kolkata 311 -jeddah 311 -jabhat 311 -migrating 311 -lurid 311 -ramadi 311 -woodlands 311 -impulses 311 -gunnar 311 -adelson 311 -evacuees 311 -awakened 311 -zuniga 311 -reared 311 -dime 311 -alterations 311 -decomposition 311 -closed-door 311 -citigroup 311 -tam 311 -dembele 311 -smoother 311 -surfboard 311 -chainsaw 311 -altman 311 -avila 311 -thorny 310 -psyche 310 -tweaked 310 -well-respected 310 -alarmingly 310 -knack 310 -denpasar 310 -stadio 310 -mansell 310 -yearning 310 -roxy 310 -payoff 310 -kayaking 310 -decadent 310 -zealander 310 -sherri 310 -buckles 310 -bao 310 -ansari 310 -machetes 310 -baumgartner 310 -mistrial 310 -canio 310 -parton 310 -undergraduates 310 -alas 310 -uncovering 310 -abou 310 -scrum-half 310 -overheating 310 -pegged 310 -milke 310 -taker 310 -co-director 310 -foyer 310 -propane 310 -benton 310 -steaming 310 -myler 310 -gillette 310 -third-place 310 -houthis 310 -icu 310 -carmel 310 -decorator 310 -electrons 310 -frequencies 310 -guardianship 310 -devoid 310 -tonga 310 -blockage 310 -gunn 310 -fenerbahce 310 -evoke 310 -will.i.am 310 -elgin 310 -radicalization 310 -sfa 310 -diverting 310 -ds 310 -revisited 310 -divorces 309 -automakers 309 -mariupol 309 -corinna 309 -geyser 309 -flatter 309 -ieds 309 -l/cpl 309 -monterrey 309 -costas 309 -bungling 309 -pleasures 309 -cradling 309 -ave 309 -faltering 309 -oft 309 -14million 309 -flickr 309 -codenamed 309 -redgrave 309 -gmp 309 -dora 309 -tantrum 309 -breaths 309 -hindering 309 -310 309 -bandwagon 309 -tabby 309 -wasteland 309 -defector 309 -andersen 309 -flops 309 -brahimi 309 -overheated 309 -mollie 309 -vips 309 -manhole 309 -35th 309 -eater 309 -plough 309 -marius 309 -extravaganza 309 -graf 309 -guernsey 309 -reflections 309 -hives 309 -takers 309 -defiantly 308 -backhand 308 -adorn 308 -tait 308 -single-engine 308 -feral 308 -255 308 -flicks 308 -warheads 308 -carmichael 308 -bestowed 308 -recruiter 308 -left-footed 308 -:-rrb- 308 -theorists 308 -bettencourt 308 -hamish 308 -dwellers 308 -palate 308 -loyalist 308 -superpower 308 -aubrey 308 -screwdriver 308 -condominium 308 -resonance 308 -pagan 308 -walliams 308 -duet 308 -beatrix 308 -swerve 308 -clutter 308 -stiles 308 -shovels 308 -bologna 308 -superyacht 308 -theron 308 -converged 308 -aig 308 -magnesium 308 -implication 308 -clocking 308 -chipotle 308 -cellulite 307 -bakers 307 -unpunished 307 -godmother 307 -flak 307 -attorney-general 307 -unseasonably 307 -kobayashi 307 -lyric 307 -efficacy 307 -vice-captain 307 -cowering 307 -moritz 307 -manchin 307 -thrift 307 -endowment 307 -loops 307 -medinah 307 -alienating 307 -lumia 307 -montoya 307 -championing 307 -younes 307 -rausing 307 -exchequer 307 -encompasses 307 -******* 307 -two-state 307 -euromillions 307 -penning 307 -collis 307 -bernice 307 -senegalese 307 -all-important 307 -disoriented 307 -f-16 307 -16-year 307 -snoring 307 -huth 307 -high-energy 307 -headley 307 -choo 307 -srebrenica 307 -censors 307 -theology 307 -roberta 307 -batista 307 -all-inclusive 307 -kaitlyn 307 -gotham 307 -spiraling 306 -laces 306 -benny 306 -interrupt 306 -fastest-growing 306 -asphyxiation 306 -vents 306 -two-way 306 -1890 306 -equalized 306 -ebony 306 -transsexual 306 -brailsford 306 -embodies 306 -minimalist 306 -exhilarating 306 -radicalisation 306 -heart-wrenching 306 -hand-in-hand 306 -bruise 306 -dracula 306 -wiser 306 -al-libi 306 -flirt 306 -mountainside 306 -manquillo 306 -flips 306 -latte 306 -foggy 306 -wildest 306 -generously 306 -haigh 306 -hodges 306 -crusaders 306 -puzzles 306 -break-ins 306 -botha 306 -inconceivable 306 -abbot 306 -gaston 306 -rudi 306 -onetime 306 -self-taught 306 -beattie 306 -lancet 306 -nisman 305 -hotbed 305 -pothole 305 -tactically 305 -disbanded 305 -maneuvers 305 -vapor 305 -8.4 305 -15ft 305 -cabaret 305 -edwardian 305 -woefully 305 -tunis 305 -mcauliffe 305 -luncheon 305 -unintentionally 305 -acton 305 -saville 305 -4,800 305 -deacon 305 -duane 305 -bonham 305 -goma 305 -sachin 305 -legalise 305 -elisa 305 -onassis 305 -repatriated 305 -cockroaches 305 -highness 305 -doorman 305 -time-consuming 305 -7m 305 -off-season 305 -kharkiv 305 -whim 305 -228 305 -dunga 305 -tonic 305 -vu 305 -badawi 305 -long-serving 305 -dench 305 -climates 305 -navigator 305 -platt 305 -hersman 305 -jamil 305 -centennial 305 -apprenticeship 305 -3,400 305 -saucy 304 -hundley 304 -littering 304 -captivating 304 -smokey 304 -knoxville 304 -altidore 304 -pastel 304 -brittle 304 -shrines 304 -consolidated 304 -mt 304 -registers 304 -well-off 304 -spoilt 304 -latitude 304 -reviewers 304 -argo 304 -shuffle 304 -4,200 304 -priebus 304 -baryalei 304 -gliding 304 -hysterectomy 304 -ge 304 -gi 304 -spalding 304 -stalling 304 -es 304 -statutes 304 -camelot 304 -oats 304 -injury-time 304 -jobseeker 304 -averted 304 -msf 304 -appliance 304 -appleton 304 -sagging 304 -yannick 303 -coney 303 -kick-start 303 -tolls 303 -tailoring 303 -sevastopol 303 -hayman 303 -curtailed 303 -overruled 303 -cannibalism 303 -laird 303 -foie 303 -gabor 303 -guerrillas 303 -quintessential 303 -nunez 303 -snacking 303 -trumpet 303 -pacemaker 303 -vanish 303 -fruitless 303 -corset 303 -configuration 303 -originals 303 -hone 303 -aiken 303 -complainants 303 -airbase 303 -abnormally 303 -gillibrand 303 -genres 303 -seau 302 -chai 302 -seminars 302 -manger 302 -springboks 302 -reps 302 -landers 302 -gamer 302 -coax 302 -realism 302 -perfecting 302 -strugglers 302 -sayah 302 -mooney 302 -pollsters 302 -four-legged 302 -dwindled 302 -posthumous 302 -countering 302 -albright 302 -transocean 302 -brazile 302 -firebrand 302 -softball 302 -nodding 302 -mayes 302 -iguala 302 -maddison 302 -talal 302 -preparatory 302 -blood-stained 302 -outfield 302 -withers 302 -fast-growing 302 -guesthouse 302 -shortness 302 -redfern 302 -humphreys 302 -excesses 302 -unequal 302 -chelyabinsk 302 -mounts 302 -commodore 302 -hutchins 302 -blanketed 302 -sweaters 302 -hendricks 302 -longing 302 -steals 301 -congratulating 301 -placid 301 -beagle 301 -biomedical 301 -walnut 301 -blockbusters 301 -benazir 301 -pay-off 301 -fco 301 -locog 301 -quay 301 -fixer 301 -lyle 301 -morphed 301 -cassie 301 -bouncy 301 -behold 301 -drunkenly 301 -avian 301 -continual 301 -varela 301 -mastiff 301 -hamer 301 -freetown 301 -elkins 301 -off-road 301 -gazzetta 301 -roadshow 301 -interfax 301 -tractor-trailer 301 -ffp 301 -bubbling 301 -ferrante 301 -bingham 301 -burbank 301 -alessandra 301 -osborn 301 -facebook.com 301 -mortenson 301 -excursion 301 -2009-10 301 -soothe 301 -search-and-rescue 301 -housework 301 -whiting 301 -hickenlooper 301 -underscore 301 -scrutinised 301 -fashionista 301 -grady 301 -fairs 300 -causeway 300 -velocity 300 -mobs 300 -khaki 300 -waistband 300 -malmo 300 -fanned 300 -shackles 300 -inning 300 -sulphur 300 -hossein 300 -flirtatious 300 -9million 300 -hideaway 300 -co-defendants 300 -flirty 300 -botham 300 -borderline 300 -flynt 300 -ayrshire 300 -textures 300 -graze 300 -yangon 300 -nurtured 300 -muhammed 300 -bridgend 300 -geithner 300 -umpires 300 -deficient 300 -sacrificing 300 -30-second 300 -crispy 300 -hamster 300 -cabrera 300 -stand-out 300 -sar 300 -multiply 300 -5000 300 -ciancia 300 -multiplayer 300 -us-led 300 -isaiah 300 -utash 300 -molina 300 -subjecting 300 -photo-sharing 300 -revolutionaries 300 -gia 300 -woodman 300 -devolved 300 -ingram 300 -redford 300 -wobbly 299 -wharton 299 -gabe 299 -householders 299 -canines 299 -35million 299 -poets 299 -resonates 299 -slaughterhouse 299 -yousuf 299 -lends 299 -presumption 299 -confiscation 299 -voicemails 299 -polarizing 299 -first-person 299 -nino 299 -anesthesia 299 -eradicated 299 -greasy 299 -scariest 299 -jerk 299 -vandalised 299 -solis 299 -renounce 299 -jacks 299 -long-held 299 -moviegoers 299 -juggle 299 -ashleigh 299 -impersonator 299 -loudest 299 -oct 299 -laid-back 299 -laborers 299 -hmv 299 -miniseries 299 -crotch 299 -tight-knit 299 -lotion 299 -ashe 299 -modification 299 -braving 299 -vampires 299 -surveyor 299 -dialect 299 -frostbite 299 -persistently 298 -competency 298 -multi-billion 298 -male-dominated 298 -chico 298 -whispers 298 -disqualification 298 -insignia 298 -mania 298 -1600 298 -endlessly 298 -143 298 -reconciled 298 -villager 298 -kern 298 -varsity 298 -inconvenient 298 -after-school 298 -duggar 298 -furyk 298 -garb 298 -streamlined 298 -pavements 298 -mattel 298 -ludwig 298 -zak 298 -filtering 298 -racks 298 -wedeman 298 -scooters 298 -trujillo 298 -perils 298 -turnpike 298 -assortment 298 -unhappiness 298 -induction 298 -geologists 297 -stilettos 297 -dykes 297 -pitts 297 -stockbroker 297 -shun 297 -burner 297 -scooping 297 -inhabit 297 -13.5 297 -niño 297 -clamped 297 -narratives 297 -darius 297 -86-year-old 297 -conner 297 -2011/12 297 -7ft 297 -fragmented 297 -co-conspirators 297 -incitement 297 -coding 297 -glimmer 297 -knighted 297 -matured 297 -moods 297 -dulles 297 -pours 297 -realisation 297 -birthing 297 -allenby 297 -ewing 297 -attentions 297 -lemonade 297 -exponentially 297 -haunts 297 -adler 297 -stomping 297 -tolkien 297 -inroads 297 -nationalism 297 -surrendering 297 -derry 297 -smoothie 297 -reckons 297 -heavenly 297 -plankton 297 -ogden 297 -tor 297 -font 297 -linesman 297 -fir 296 -vijay 296 -statehood 296 -spillett 296 -youssef 296 -1909 296 -h7n9 296 -newfoundland 296 -8.8 296 -acknowledgement 296 -ernesto 296 -articulated 296 -aca 296 -sigg 296 -olympians 296 -nook 296 -frighten 296 -liege 296 -jagged 296 -concocted 296 -fca 296 -hibbert 296 -unease 296 -revise 296 -chisholm 296 -confronts 296 -residing 296 -hindus 296 -refunded 296 -laguna 296 -polymer 296 -peckham 296 -español 296 -faldo 296 -valet 296 -dieters 296 -lech 296 -hgv 296 -vendetta 296 -benevolent 296 -exxon 296 -cheyenne 296 -fest 296 -invoke 296 -spieth 296 -consumes 296 -hands-free 296 -pranksters 296 -cultivate 296 -17.5 296 -outed 296 -antlers 296 -railed 296 -sc 296 -well-documented 296 -galapagos 296 -glacial 296 -maltese 296 -spiderman 296 -9st 296 -naturalized 296 -macrae 296 -infecting 296 -rescinded 295 -forgo 295 -pineda 295 -sullenberger 295 -disparities 295 -presses 295 -solicitation 295 -neckline 295 -175,000 295 -bloodbath 295 -pollock 295 -ulcers 295 -'n 295 -ghraib 295 -sandbanks 295 -ebb 295 -risque 295 -louboutin 295 -kissinger 295 -exposures 295 -flanders 295 -ferocity 295 -polygraph 295 -vaguely 295 -hand-picked 295 -foetal 295 -wanyama 295 -argyll 295 -hens 295 -relinquish 295 -misdiagnosed 295 -figo 295 -anti-ageing 295 -oulson 295 -ambiguous 295 -hanoi 295 -vested 295 -khodorkovsky 294 -revolutions 294 -spanking 294 -millimetres 294 -nudge 294 -pout 294 -carvings 294 -wailing 294 -intellect 294 -unload 294 -leftovers 294 -blaine 294 -zayn 294 -narendra 294 -spasms 294 -stalk 294 -high-stakes 294 -icebreaker 294 -unhelpful 294 -offload 294 -reckoned 294 -unequivocal 294 -161 294 -lynx 294 -spoils 294 -mazda 294 -goulding 294 -jett 294 -wildebeest 294 -accessorised 294 -overthrown 294 -indy 294 -reimburse 294 -kuyt 294 -meyler 294 -lafferty 294 -hoboken 294 -lawes 294 -eight-hour 294 -hermes 294 -packham 294 -bandage 294 -indefensible 294 -convene 294 -overturning 294 -labourer 294 -cuffs 294 -mcnally 294 -carousel 294 -fatherhood 294 -intimately 294 -lutz 294 -buxton 294 -seven-month 294 -normality 293 -950 293 -epitome 293 -abject 293 -47,000 293 -caucasian 293 -janine 293 -intensifying 293 -ivey 293 -kirkland 293 -retreats 293 -kuwaiti 293 -mooted 293 -givenchy 293 -paratroopers 293 -formats 293 -lenin 293 -6.1 293 -honouring 293 -romain 293 -redress 293 -s&p 293 -triangular 293 -defer 293 -mora 293 -cloned 293 -elevators 293 -tarrant 293 -skips 293 -5-7 293 -underprivileged 293 -refueling 293 -gator 293 -vazquez 293 -driest 293 -folkestone 293 -czechoslovakia 293 -marsha 293 -omega-3 293 -culling 293 -sandwiched 293 -thurman 293 -aisha 293 -cbi 293 -alsup 293 -third-largest 293 -boise 293 -a1 292 -spleen 292 -a320 292 -pacing 292 -cuff 292 -plc 292 -gunfight 292 -ingrained 292 -timer 292 -oregonian 292 -fused 292 -holyrood 292 -183 292 -horan 292 -280,000 292 -grossman 292 -hyperactivity 292 -amateurs 292 -directives 292 -livery 292 -gales 292 -balaclavas 292 -liang 292 -royale 292 -buckland 292 -dizzying 292 -testimonies 292 -boumeddiene 292 -pigment 292 -blackfriars 292 -wares 292 -1905 292 -tweak 292 -charley 292 -pantomime 292 -enshrined 292 -engulfing 292 -softened 292 -migrated 292 -alderweireld 292 -drugging 292 -notebooks 292 -abode 292 -cherries 292 -modestly 292 -three-dimensional 292 -castleford 292 -receptors 292 -candace 292 -withering 292 -bridgewater 292 -kailai 292 -coyote 292 -profiting 292 -fritz 291 -tm 291 -hiked 291 -mailbox 291 -takings 291 -1-year-old 291 -proclamation 291 -familiarity 291 -decreases 291 -undeniably 291 -second-highest 291 -six-time 291 -rebounds 291 -mcnulty 291 -oligarchs 291 -replying 291 -savoie 291 -semis 291 -sabah 291 -1903 291 -pastors 291 -whooping 291 -chuckle 291 -sourcing 291 -hardin 291 -celeb 291 -fsb 291 -quetta 291 -shatter 291 -xanax 291 -scrapes 291 -well-established 291 -regimen 291 -prejean 291 -differed 291 -jpmorgan 291 -boldly 291 -159 291 -half-dozen 291 -arbor 291 -174 291 -underline 291 -navalny 291 -concurrently 291 -interruption 291 -fetching 291 -widodo 291 -sheriffs 290 -bryson 290 -imperfect 290 -nocturnal 290 -parkland 290 -5.9 290 -constantine 290 -contractual 290 -polka 290 -warsi 290 -second-floor 290 -grotto 290 -somerville 290 -jena 290 -catchphrase 290 -showbusiness 290 -harkin 290 -alam 290 -40-year 290 -caf 290 -balmy 290 -three-match 290 -nakoula 290 -schulz 290 -domesticated 290 -revolutionise 290 -skakel 290 -deranged 290 -kickoff 290 -rspb 290 -utoya 290 -kerber 290 -32nd 290 -157 290 -fullerton 290 -kaboul 290 -peña 290 -templar 289 -dinghy 289 -accountancy 289 -protagonist 289 -atms 289 -spewed 289 -pamplona 289 -resonated 289 -oj 289 -sub-zero 289 -apt 289 -wavelengths 289 -veterinarians 289 -instilled 289 -esteemed 289 -inefficient 289 -implored 289 -unplanned 289 -accommodating 289 -enforcer 289 -pfc. 289 -orgy 289 -melts 289 -cant 289 -puddle 289 -durant 289 -550,000 289 -irreparable 289 -7.9 289 -abiding 289 -watered 289 -notions 289 -sampson 289 -jewell 289 -finer 289 -drips 289 -teeming 289 -irbil 289 -patronising 289 -younis 289 -42nd 289 -holley 289 -disparaging 288 -emphasizes 288 -j.j. 288 -conglomerate 288 -co-ordination 288 -fergus 288 -brandt 288 -forecourt 288 -autopilot 288 -witheridge 288 -wield 288 -rowlands 288 -gamma 288 -bogart 288 -lethargic 288 -accessibility 288 -waddington 288 -piggy 288 -hannover 288 -bellerin 288 -brash 288 -loanee 288 -hiccups 288 -ayers 288 -3billion 288 -9.4 288 -slicing 288 -junkie 288 -sangakkara 288 -kendal 288 -cobbled 288 -checkered 288 -marginally 288 -sow 288 -housekeeping 288 -trainees 288 -weightlifting 288 -diluted 288 -bothering 288 -left-leaning 288 -liaising 288 -urinated 288 -yorke 288 -redefine 288 -sprawled 288 -clapton 287 -clearest 287 -bombardier 287 -mcneill 287 -grimshaw 287 -apprehensive 287 -farley 287 -assembling 287 -brunch 287 -jaden 287 -playmate 287 -portas 287 -overtly 287 -butchers 287 -unequivocally 287 -biometric 287 -racegoers 287 -intuitive 287 -obliterated 287 -unexploded 287 -racetrack 287 -nagging 287 -tile 287 -roker 287 -murnaghan 287 -funky 287 -frat 287 -baptism 287 -remuneration 287 -falsified 287 -haggard 287 -m23 287 -scattering 287 -detriment 287 -rekindled 287 -microbial 287 -cereals 287 -radioed 287 -29,000 287 -camaraderie 287 -seven-month-old 287 -teaser 287 -adversary 287 -46,000 287 -mule 287 -rabbitohs 287 -match.com 287 -snowboarder 287 -multimillionaire 287 -cleans 287 -mother-of-five 287 -sensationally 287 -maghreb 287 -yobs 287 -under-fire 287 -absentia 287 -leds 287 -meteorology 286 -excite 286 -british-based 286 -nyong 286 -repealing 286 -extraterrestrial 286 -bainbridge 286 -unwise 286 -45-minute 286 -bavaria 286 -auditors 286 -whistle-blower 286 -diame 286 -d-new 286 -translating 286 -workman 286 -lockhart 286 -maids 286 -exacerbate 286 -impounded 286 -toto 286 -nao 286 -flailing 286 -stiletto 286 -revision 286 -flashlight 286 -sultry 286 -crabtree 286 -freiburg 286 -pushchair 286 -liabilities 286 -weighted 286 -knott 286 -alawite 286 -slumdog 286 -nutrient 286 -metz 286 -exiles 286 -suitability 286 -reliably 286 -misconception 286 -all-white 286 -178 286 -172 286 -mid-1980s 286 -farouk 286 -lavished 285 -sneaky 285 -jedi 285 -circulate 285 -kray 285 -delia 285 -mindless 285 -rodham 285 -doctored 285 -curtail 285 -semiautomatic 285 -abolition 285 -1895 285 -375 285 -duchy 285 -tinted 285 -sold-out 285 -hard-hit 285 -reigned 285 -aroma 285 -hounds 285 -mahan 285 -emphasizing 285 -dusting 285 -demeanour 285 -confetti 285 -companionship 285 -mausoleum 285 -autoimmune 285 -gladiator 285 -dispatches 285 -airship 285 -ballooning 285 -polonsky 285 -inexplicable 285 -cordle 285 -inconsolable 285 -livid 285 -placard 285 -ebert 285 -pitfalls 285 -rites 285 -plump 285 -hairdressers 285 -158 285 -invitational 285 -age-old 285 -arisen 285 -coppola 285 -smartest 285 -falconer 285 -17-year 284 -collegiate 284 -full-size 284 -plywood 284 -referrals 284 -alignment 284 -stonewall 284 -retreating 284 -wreak 284 -rehabilitated 284 -besic 284 -specs 284 -half-mile 284 -dynamite 284 -parachuted 284 -suvs 284 -2in 284 -crimewatch 284 -liberating 284 -mustique 284 -nacer 284 -wpc 284 -manoeuvres 284 -ihs 284 -intolerant 284 -mingle 284 -scents 284 -tebbit 284 -wwi 284 -vocational 284 -talksport 284 -retrospect 284 -jace 284 -morbidly 284 -existential 284 -beatle 284 -springboard 284 -forthright 284 -vibration 284 -scatter 284 -extermination 284 -bel 284 -errani 284 -jetting 284 -psychedelic 284 -detour 284 -jaya 284 -wits 283 -oviedo 283 -shoddy 283 -bangalore 283 -gabi 283 -k-9 283 -traverse 283 -noose 283 -monteith 283 -imitate 283 -coupons 283 -physiotherapist 283 -medellin 283 -swirled 283 -conveniently 283 -unqualified 283 -aly 283 -dias 283 -believable 283 -pencils 283 -cleopatra 283 -recanted 283 -year-end 283 -mathematician 283 -fauna 283 -barman 283 -epileptic 283 -violinist 283 -lycra 283 -mutiny 283 -pantry 283 -dropout 283 -xv 283 -hannity 283 -fdr 283 -zehaf-bibeau 283 -ochoa 283 -glennie 283 -jonjo 283 -oates 283 -yanukovich 283 -mellor 283 -privy 283 -harrell 283 -saffron 283 -reunions 283 -sleeveless 283 -enigmatic 283 -painters 283 -civilized 283 -156 283 -850,000 283 -estimating 283 -balkans 283 -gatlin 283 -260,000 283 -scoliosis 283 -discretionary 283 -taxidermy 282 -preposterous 282 -ranchers 282 -colvin 282 -irritable 282 -whyte 282 -noodle 282 -unrelenting 282 -parlor 282 -tipple 282 -nt 282 -expertly 282 -hounded 282 -uninterrupted 282 -1-2 282 -snowdon 282 -rai 282 -disintegrated 282 -messina 282 -flicking 282 -craftsmanship 282 -high-value 282 -uncontrolled 282 -landlady 282 -energies 282 -wrestlers 282 -poehler 282 -attwood 282 -clubbing 282 -leyland 282 -pessimistic 282 -landscaped 282 -pompeii 282 -cornerback 282 -partygoers 282 -wildcard 282 -mementos 282 -goss 282 -mashed 282 -mdc 282 -strangest 282 -guaranteeing 282 -three-judge 282 -circumvent 282 -justifying 282 -burch 281 -ramallah 281 -legalised 281 -groupon 281 -finney 281 -fiercest 281 -goodies 281 -al-jazeera 281 -smoothies 281 -mustache 281 -commemorates 281 -cuyahoga 281 -valves 281 -heene 281 -oncologist 281 -mastercard 281 -haters 281 -onesie 281 -37th 281 -finalise 281 -million-dollar 281 -urinate 281 -headquartered 281 -su 281 -44th 281 -jana 281 -johann 281 -empty-handed 281 -cheekbone 281 -tilted 281 -osteoarthritis 281 -52,000 281 -shabby 281 -tills 281 -goal-line 281 -sunflower 281 -fabrication 281 -tome 281 -levitt 281 -polarized 281 -mid-september 281 -mourns 281 -knapp 281 -lynette 281 -imitating 281 -scrubs 281 -sampled 281 -11-year 281 -mcpherson 281 -lowndes 281 -bains 281 -severing 281 -swears 281 -vitesse 281 -eradication 281 -besotted 281 -manmohan 281 -millar 281 -33rd 281 -seedy 280 -drifts 280 -not-for-profit 280 -57th 280 -trolling 280 -anecdotes 280 -buddhism 280 -zookeepers 280 -well-heeled 280 -100ml 280 -staked 280 -savior 280 -compilation 280 -rebounded 280 -appendix 280 -sheehan 280 -aintree 280 -larkin 280 -golovkin 280 -dishonestly 280 -partisanship 280 -serrano 280 -halfpenny 280 -reusable 280 -linden 280 -geraldine 280 -longed 280 -boson 280 -cortex 280 -fudge 280 -demure 280 -odessa 280 -agnew 280 -printable 280 -te 280 -illuminate 280 -buoyant 280 -punt 280 -merging 280 -cramer 280 -fearne 280 -myra 280 -refineries 279 -eamonn 279 -erode 279 -noir 279 -kristy 279 -rowed 279 -neda 279 -cupertino 279 -clauses 279 -midazolam 279 -axes 279 -roadways 279 -1896 279 -calculator 279 -misdemeanors 279 -macclesfield 279 -no-nonsense 279 -engages 279 -pienaar 279 -amputees 279 -diaspora 279 -fashionistas 279 -lauda 279 -constance 279 -complicating 279 -sobs 279 -rubbished 279 -edits 279 -fervent 279 -lutheran 279 -gutter 279 -grandmothers 279 -speroni 279 -80mph 279 -essays 279 -mokbel 279 -parading 279 -incursions 279 -7.45 279 -tranmere 279 -zanzibar 279 -www.samaritans.org 279 -aerodynamic 279 -rainier 279 -transformative 279 -co-accused 279 -moderately 279 -sw19 279 -interpreters 279 -robinho 279 -lifelike 279 -kapoor 279 -spanx 279 -presume 279 -thrills 279 -markel 279 -agendas 278 -physiological 278 -hasty 278 -18-year 278 -csi 278 -integrating 278 -scorpion 278 -astonishment 278 -maxima 278 -cha 278 -knuckles 278 -loner 278 -@ 278 -ricci 278 -myrtle 278 -riggs 278 -dusted 278 -inflate 278 -ever-present 278 -unapologetic 278 -bebe 278 -bea 278 -mingled 278 -mower 278 -nitrate 278 -.38 278 -diligent 278 -elves 278 -constrained 278 -telephones 278 -rejoined 278 -automobiles 278 -wasp 278 -receding 278 -hoodies 278 -geopolitical 278 -unforgiving 278 -notwithstanding 278 -sur 278 -edie 278 -subpoenas 278 -cockroft 278 -290 278 -mcdowall 278 -dennehy 278 -clerks 277 -canyons 277 -jung 277 -mauling 277 -appalachian 277 -seventy 277 -1906 277 -1902 277 -hooligans 277 -sinkholes 277 -woking 277 -constitutionality 277 -sweetest 277 -roundup 277 -167 277 -varnish 277 -purses 277 -gov 277 -bharara 277 -fanatical 277 -untested 277 -untouchable 277 -landscaping 277 -truffles 277 -locator 277 -pleitgen 277 -moto 277 -new-look 277 -gearbox 277 -open-minded 277 -razed 277 -downtime 277 -fayetteville 277 -rosy 277 -eyeball 277 -unwitting 277 -maarten 277 -justifies 277 -zmapp 277 -e-books 277 -cnn/opinion 277 -crockett 277 -simona 277 -archaic 277 -nando 277 -gifford 277 -ramps 277 -poise 277 -undress 277 -hana 277 -duplex 277 -elland 276 -emitting 276 -a4 276 -anti-inflammatory 276 -tabloids 276 -arsenic 276 -scruffy 276 -aneurysm 276 -disarm 276 -forfeiture 276 -accompanies 276 -muffin 276 -dentistry 276 -ordained 276 -post-natal 276 -far-flung 276 -sleepover 276 -glances 276 -unilever 276 -169 276 -ifs 276 -nordstrom 276 -mujahideen 276 -corrosive 276 -woodford 276 -salam 276 -joleon 276 -hangout 276 -vigils 276 -clawed 276 -bachelorette 276 -elmo 276 -electing 276 -ingenuity 276 -bonkers 276 -wabc 276 -bereft 276 -disposition 276 -mc 276 -amoeba 276 -cheddar 276 -scraping 276 -evergreen 276 -anecdotal 276 -murrayfield 276 -wedges 276 -nowak 276 -mcqueary 276 -mined 276 -poisons 276 -charney 276 -gbh 276 -bongo 276 -celia 276 -uxbridge 276 -krakow 276 -rv 276 -occupations 276 -bindi 276 -insecurities 276 -truffle 275 -north-south 275 -hairline 275 -bookmaker 275 -preece 275 -rosenfeld 275 -beetles 275 -barritt 275 -papandreou 275 -rawalpindi 275 -beaton 275 -wrecks 275 -contemporaries 275 -skydiver 275 -howling 275 -hippos 275 -simplify 275 -assuring 275 -pocketing 275 -know-how 275 -acknowledgment 275 -cervix 275 -shied 275 -intellectually 275 -reputable 275 -anjem 275 -xx 275 -opaque 275 -username 275 -ave. 275 -ipl 275 -ginola 275 -preying 275 -poolside 275 -gored 275 -mcconville 275 -dailymail.com 275 -lupita 275 -carles 275 -overriding 275 -overloaded 275 -tapestry 275 -czar 275 -septic 275 -regency 275 -thinly 275 -donkeys 275 -attentive 275 -pacs 275 -tendons 274 -boils 274 -tenacity 274 -steadfast 274 -strippers 274 -150th 274 -lausanne 274 -fellows 274 -crespo 274 -bushy 274 -nipples 274 -jeffery 274 -bandmates 274 -housemates 274 -60ft 274 -villegas 274 -rollingstone.com 274 -soleil 274 -parachutes 274 -10p 274 -bannister 274 -plow 274 -heron 274 -overt 274 -equalise 274 -gasps 274 -contrasted 274 -mayonnaise 274 -kurtz 274 -truths 274 -bruyne 274 -zhao 274 -tatiana 274 -phoning 274 -tunbridge 274 -bygone 274 -179 274 -dosage 274 -2ft 274 -frauds 273 -labourers 273 -five-and-a-half 273 -ironing 273 -headingley 273 -catchy 273 -sacha 273 -linguistic 273 -jordon 273 -brazenly 273 -stuttering 273 -familia 273 -ilford 273 -mcewan 273 -donnie 273 -dario 273 -twain 273 -subscriptions 273 -mews 273 -5billion 273 -agger 273 -writhing 273 -illuminating 273 -westbrook 273 -meow 273 -informs 273 -catfish 273 -comic-con 273 -jimi 273 -rosenthal 273 -levies 273 -vials 273 -booty 273 -vitali 273 -moshe 273 -puzzling 273 -spectre 273 -feminists 273 -downplay 273 -armando 273 -eyesore 273 -arapahoe 273 -glum 273 -levees 273 -hedgehog 273 -autumn/winter 273 -pharmacists 273 -self-portrait 273 -abbie 272 -co-operating 272 -pipped 272 -putney 272 -mccaskill 272 -ghastly 272 -blip 272 -academically 272 -pausing 272 -shuttles 272 -fdny 272 -1888 272 -hurried 272 -all-female 272 -taksim 272 -resent 272 -uribe 272 -mormons 272 -corroborated 272 -champs 272 -herpes 272 -organizational 272 -hopelessly 272 -tryst 272 -asher 272 -resigns 272 -compiling 272 -beachside 272 -haim 272 -pedals 272 -yi 272 -vann 272 -spaceflight 272 -manufactures 272 -roundly 272 -hypocrite 272 -obstruct 272 -205 272 -encore 272 -rafferty 272 -on-demand 272 -strenuous 272 -top-ranked 272 -deliberated 272 -clapped 272 -coastguards 272 -nicknames 272 -trouser 272 -ritz-carlton 272 -1865 272 -filipinos 272 -behead 272 -anxieties 272 -assertive 272 -7lbs 272 -three-minute 272 -heartthrob 272 -salaam 272 -aqua 272 -antwerp 271 -proctor 271 -bolden 271 -galway 271 -starmer 271 -irna 271 -threesome 271 -pounce 271 -ps 271 -lra 271 -imposes 271 -imposition 271 -lineage 271 -fink 271 -bissonnette 271 -250million 271 -asma 271 -landowner 271 -neiman 271 -tentacles 271 -pentobarbital 271 -foodie 271 -fall-out 271 -rania 271 -madiba 271 -trays 271 -alicante 271 -engel 271 -gory 271 -lynton 271 -meteogroup 271 -brochure 271 -luftwaffe 271 -reservist 271 -edmondson 271 -profanity 271 -munch 271 -rower 271 -sedatives 271 -mccray 271 -hasse 271 -kalashnikov 271 -narain 271 -beauties 271 -coo 271 -foraging 271 -radford 271 -en-suite 271 -cheeses 271 -acidic 271 -stylists 271 -drc 271 -whitlam 271 -s5 271 -cresswell 271 -incidentally 271 -scrutinized 270 -vallecano 270 -calif. 270 -abound 270 -liars 270 -82,000 270 -treehouse 270 -dyes 270 -miah 270 -dune 270 -reigns 270 -sleigh 270 -reignite 270 -gold-plated 270 -ascend 270 -hua 270 -conqueror 270 -occupancy 270 -1904 270 -hobson 270 -crocker 270 -three-story 270 -gaffer 270 -belted 270 -viva 270 -joni 270 -oncology 270 -samuels 270 -interfered 270 -sitter 270 -sousa 270 -forked 270 -vincenzo 270 -resuscitated 270 -325 270 -hindi 270 -meatballs 270 -instruct 270 -deodorant 270 -quash 269 -smouldering 269 -berndt 269 -protestor 269 -interacted 269 -schoolies 269 -weekday 269 -13st 269 -weeklong 269 -plying 269 -hires 269 -michelin-starred 269 -lehmann 269 -awe-inspiring 269 -tiebreak 269 -english-speaking 269 -sender 269 -mi 269 -mv 269 -industrialized 269 -davutoglu 269 -rocha 269 -inaugurated 269 -huntsville 269 -al-masri 269 -bahraini 269 -ibuprofen 269 -full-length 269 -posturing 269 -start-ups 269 -reykjavik 269 -montague 269 -mountaineering 269 -dewsbury 269 -cobalt 269 -supermodels 269 -primed 269 -206 269 -divock 269 -merthyr 269 -reasoned 269 -disarmament 269 -falsifying 269 -ingesting 269 -worrisome 269 -reimbursed 269 -toughen 269 -self-declared 269 -boycotted 269 -sadler 269 -disaffected 269 -tacky 268 -fern 268 -juba 268 -al-sadr 268 -1907 268 -antidote 268 -spatial 268 -permissible 268 -pistole 268 -joys 268 -energized 268 -spiraled 268 -login 268 -12st 268 -mystical 268 -airbags 268 -handicapped 268 -doubters 268 -rockers 268 -numbness 268 -swarovski 268 -vermin 268 -misogyny 268 -headgear 268 -thumped 268 -asparagus 268 -dfid 268 -broughton 268 -fostered 268 -flagrant 268 -transitioning 268 -brendon 268 -chuka 268 -constitutionally 268 -planck 268 -winless 268 -e.g. 268 -roars 268 -preferable 268 -brag 268 -turbo 268 -simplistic 268 -matty 268 -peg 268 -incision 268 -template 268 -gasped 268 -lacklustre 268 -delusions 268 -giza 268 -dujardin 268 -enzymes 268 -devise 268 -10billion 268 -omg 267 -sledgehammer 267 -self-control 267 -smelt 267 -abdelaziz 267 -mcfarlane 267 -jak 267 -dunkin' 267 -po 267 -overdosed 267 -farrar 267 -redman 267 --rcb- 267 -prosthesis 267 -stoic 267 -larsen 267 -amnesia 267 -salkeld 267 -north-eastern 267 -forklift 267 -pro-russia 267 -projector 267 -swiping 267 -wythenshawe 267 -mckee 267 -reg 267 -allotment 267 -ins 267 -hrt 267 -thumbs-up 267 -olympus 267 -cirque 267 -civilised 267 -ward-prowse 267 -ledley 267 -wagging 267 -braley 267 -m5 267 -7in 267 -tad 267 -hallam 267 -o'driscoll 267 -pastures 267 -encountering 267 -bassist 267 -ramping 267 -ceres 267 -zoological 267 -abubakar 267 -utilize 267 -emoji 267 -peat 267 -41st 267 -icebergs 266 -angst 266 -curators 266 -bafetimbi 266 -8.7 266 -kind-hearted 266 -syphilis 266 -fainting 266 -concoction 266 -ebel 266 -anti-bullying 266 -arif 266 -berated 266 -innovate 266 -archers 266 -bungalows 266 -cartoonists 266 -jilted 266 -maison 266 -2010/11 266 -onus 266 -repel 266 -hinge 266 -darpa 266 -forbidding 266 -tranquility 266 -wholeheartedly 266 -caricature 266 -sarasota 266 -conservationist 266 -ursula 266 -fx 266 -hitched 266 -malfunctioned 266 -overalls 266 -tierney 266 -conquest 266 -ansa 266 -proudest 266 -cusp 266 -khorasan 266 -brecon 266 -sun-times 266 -custard 266 -warlord 266 -itv1 266 -14-time 266 -3,300 265 -wellies 265 -rancher 265 -fracas 265 -lounging 265 -wasserman 265 -filly 265 -reckoning 265 -lotto 265 -vlad 265 -signify 265 -disapprove 265 -precariously 265 -stale 265 -mardi 265 -kilimanjaro 265 -parity 265 -skinned 265 -basu 265 -lanier 265 -step-father 265 -sedentary 265 -miscavige 265 -quenelle 265 -bilal 265 -commend 265 -jesuit 265 -considerate 265 -coruna 265 -notimex 265 -kone 265 -jenkin 265 -resettlement 265 -rectangular 265 -surrogates 265 -nyberg 265 -graphene 265 -laver 265 -preferential 265 -beltran 265 -estrada 265 -blackmailed 265 -romp 265 -p&o 265 -antelope 265 -gerri 265 -danica 265 -dereck 265 -fattah 265 -summits 265 -snaresbrook 265 -bust-up 265 -lavatory 265 -merrick 265 -geun-hye 265 -retweets 264 -executors 264 -small-scale 264 -lahood 264 -snout 264 -jock 264 -reggae 264 -nugget 264 -prays 264 -fooling 264 -impaled 264 -grossing 264 -swaying 264 -100g 264 -fleece 264 -unwind 264 -gall 264 -8in 264 -crompton 264 -wristbands 264 -oldfield 264 -laudrup 264 -celtics 264 -unlocking 264 -set-piece 264 -sedation 264 -belichick 264 -intellectuals 264 -non-muslim 264 -bogeys 264 -destitute 264 -mugshots 264 -marksman 264 -spf 264 -remit 264 -aspirational 264 -headless 264 -impractical 264 -hammock 264 -finder 264 -camouflaged 264 -indiegogo 264 -assign 264 -storylines 264 -banknotes 264 -bearings 264 -roadblocks 264 -rebuttal 264 -haircuts 264 -reinforcing 264 -flashback 264 -choppy 264 -napping 264 -hughton 264 -pfeiffer 264 -full-body 264 -maasai 264 -saxon 264 -raab 264 -johannes 264 -tout 264 -occasioning 264 -luxe 264 -leith 264 -automaker 264 -markey 264 -hilltop 263 -¬ 263 -dietrich 263 -furness 263 -crowning 263 -augustus 263 -savills 263 -zuccotti 263 -otional 263 -maserati 263 -gangland 263 -kbr 263 -mckeon 263 -225,000 263 -franken 263 -incremental 263 -sparsely 263 -eureka 263 -a-rod 263 -refute 263 -lisicki 263 -relieving 263 -9.7 263 -vie 263 -unsatisfactory 263 -geology 263 -nmc 263 -fella 263 -400million 263 -fascism 263 -visceral 263 -cypress 263 -impartiality 263 -nano 263 -gleaned 263 -davion 263 -seclusion 263 -macs 263 -browsers 263 -repertoire 263 -szathmary 263 -suitably 263 -psychopath 263 -rikers 263 -toxicity 263 -silverleib 263 -discreetly 263 -dogg 263 -sargent 263 -domenico 263 -hulu 263 -rockwell 263 -assassins 263 -feeder 263 -waldorf 263 -millers 263 -malt 263 -skunk 263 -bergoglio 263 -consigned 263 -guido 263 -fatwa 263 -degraded 263 -270,000 262 -dowling 262 -dorado 262 -tacos 262 -adaptations 262 -clap 262 -t.j. 262 -puck 262 -arnhem 262 -envisaged 262 -gustav 262 -torches 262 -relented 262 -pong 262 -kaepernick 262 -lula 262 -mott 262 -hard-hitting 262 -antonin 262 -clearwater 262 -slaughtering 262 -agile 262 -focussed 262 -laredo 262 -symons 262 -snare 262 -dettori 262 -techcrunch 262 -conditioner 262 -highest-ranking 262 -cloning 262 -3in 262 -pembroke 262 -breakers 262 -otters 262 -winkler 262 -jovial 262 -groove 262 -vilified 262 -optic 262 -beetroot 262 -stacking 262 -cuevas 262 -collymore 262 -36th 262 -wolfson 262 -bulldozers 262 -beavers 262 -socioeconomic 262 -parkes 262 -anne-marie 262 -pre-school 262 -back-and-forth 262 -state-funded 261 -magnified 261 -pebbles 261 -beret 261 -lawlessness 261 -casablanca 261 -calculates 261 -ruffled 261 -narcissistic 261 -3,800 261 -tantrums 261 -ii-listed 261 -expressive 261 -politburo 261 -wallaby 261 -outcast 261 -sweeter 261 -grandsons 261 -embodied 261 -aggrieved 261 -reinvent 261 -coolly 261 -baubles 261 -restarted 261 -disregarded 261 -organism 261 -plugging 261 -literal 261 -check-ups 261 -nonprofits 261 -ayoze 261 -siena 261 -roadblock 261 -daffodils 261 -advertises 261 -diligently 261 -criticises 261 -applauding 261 -mccourt 261 -g7 261 -thrives 261 -rouse 261 -turban 261 -refreshed 261 -249 261 -2024 261 -post-dispatch 261 -dwellings 261 -hacks 261 -succumb 261 -straighten 261 -tahir 261 -natal 261 -regrettably 261 -tenacious 261 -expenditures 261 -messiah 261 -charting 261 -philly 261 -machu 260 -regenerate 260 -deporting 260 -klm 260 -dimbleby 260 -karadzic 260 -scepticism 260 -approximate 260 -descends 260 -exceedingly 260 -440 260 -popstar 260 -nacional 260 -yahya 260 -doctoral 260 -earmarks 260 -manhood 260 -clumps 260 -rudimentary 260 -aarons 260 -hush 260 -dearest 260 -sardinia 260 -infringed 260 -algieri 260 -batches 260 -egos 260 -nineteen 260 -clearances 260 -yousef 260 -fenwick 260 -bounces 260 -dislodged 260 -huguely 260 -old-school 260 -self-sufficient 260 -intergovernmental 260 -jawbone 260 -subways 260 -medium-sized 260 -then-girlfriend 260 -latched 260 -publicised 260 -catalans 260 -snowmobile 260 -netball 260 -ukba 260 -culpability 260 -wladimir 260 -barrymore 260 -floodgates 260 -pueblo 260 -deductions 260 -top-10 260 -squats 260 -imagines 259 -plantations 259 -framing 259 -sbs 259 -frontal 259 -vascular 259 -backpackers 259 -sift 259 -spoiler 259 -blustery 259 -aviator 259 -mid-november 259 -rebello 259 -subsidized 259 -obscurity 259 -domingo 259 -seville 259 -reshape 259 -tropez 259 -hard-earned 259 -impede 259 -lees 259 -playhouse 259 -grins 259 -folder 259 -hoyer 259 -colton 259 -plaguing 259 -nine-month-old 259 -soars 259 -430 259 -bowden 259 -tutoring 259 -hanley 259 -government-backed 259 -greenery 259 -kohl 259 -jeffries 259 -ante 259 -fussy 259 -panamanian 259 -authoritative 259 -dci 259 -toms 259 -overflow 259 -splattered 259 -farnell 259 -mowed 259 -bari 259 -lyn 259 -knock-out 259 -tundra 259 -millen 259 -selectors 259 -tonev 259 -gunpowder 259 -hula 259 -garish 258 -mohamud 258 -minted 258 -deactivated 258 -reservists 258 -costco 258 -rehearsing 258 -unpublished 258 -doubting 258 -squashed 258 -warranty 258 -auntie 258 -precincts 258 -rarer 258 -kano 258 -envision 258 -kamal 258 -scouted 258 -smiths 258 -pascoe 258 -lunatic 258 -provenance 258 -trawling 258 -parramatta 258 -sep 258 -lopes 258 -humber 258 -longoria 258 -mariam 258 -dunedin 258 -crouched 258 -baer 258 -rashes 258 -detentions 258 -cranberry 258 -carrillo 258 -eventing 258 -unsteady 258 -alienate 258 -beckenbauer 258 -reaper 258 -cupcake 258 -haslam 258 -hormuz 258 -intertwined 258 -reveller 258 -secures 258 -guadalupe 258 -joran 258 -whittle 258 -ehud 258 -merciless 258 -reappeared 258 -diseased 258 -calhoun 258 -reprisal 258 -catholicism 258 -health-care 258 -surabaya 258 -patchwork 258 -ranieri 258 -stretchered 257 -idris 257 -48,000 257 -conversely 257 -hounslow 257 -sifting 257 -brandenburg 257 -stefanovic 257 -enamel 257 -backpacker 257 -lottie 257 -airbag 257 -catalina 257 -defaced 257 -rush-hour 257 -soles 257 -delights 257 -lunge 257 -dunblane 257 -swinson 257 -osce 257 -bayford 257 -mackie 257 -scrabble 257 -reformist 257 -cms 257 -amedy 257 -fitbit 257 -scrotum 257 -giggle 257 -101st 257 -f-35 257 -half-way 257 -korovin 257 -throng 257 -kohn 257 -rattle 257 -romano 257 -coldfield 257 -succumbing 257 -interrupting 257 -edelman 257 -dreamliners 257 -equalities 257 -matthias 257 -pronounce 257 -fact-finding 257 -schiphol 257 -bros 257 -covington 257 -sein 257 -56,000 257 -flip-flops 257 -swirl 257 -astoria 257 -duress 257 -brunn 256 -breitbart 256 -nj.com 256 -sheringham 256 -chapo 256 -physicality 256 -juliana 256 -pendant 256 -mccaul 256 -zelda 256 -heart-shaped 256 -ponting 256 -bartholomew 256 -collin 256 -aryan 256 -curated 256 -goodyear 256 -lapierre 256 -kasper 256 -hearst 256 -resignations 256 -toasted 256 -ex-lover 256 -stuntman 256 -nascent 256 -kwon 256 -solicit 256 -lakewood 256 -finisher 256 -phillipos 256 -grylls 256 -em 256 -fixated 256 -vandalized 256 -hertha 256 -dumfries 256 -resurrection 256 -confidant 256 -182 256 -ahmet 256 -emanating 256 -ill-advised 256 -grandkids 256 -errol 256 -proponent 256 -nana 256 -aguiar 256 -fortaleza 256 -ive 256 -swarms 256 -luisa 256 -tilley 256 -municipalities 256 -craved 256 -unelected 256 -venerable 256 -wrench 256 -decatur 256 -preoccupied 256 -eibar 256 -herzog 256 -apoel 256 -yun 256 -rusting 256 -pre-tax 255 -91-year-old 255 -colman 255 -scarlets 255 -bridgeport 255 -buyout 255 -degale 255 -stoop 255 -herzegovina 255 -brenner 255 -soprano 255 -alcatraz 255 -alum 255 -shrank 255 -crossover 255 -curiously 255 -deflection 255 -endings 255 -mocks 255 -tilbury 255 -wizards 255 -paradox 255 -twelvetrees 255 -ninety 255 -blazers 255 -ricoh 255 -attaches 255 -rip-off 255 -intercepting 255 -rakhine 255 -blinding 255 -fiddling 255 -recuperating 255 -compose 255 -warhead 255 -mussolini 255 -principality 255 -nrc 255 -meek 255 -putts 255 -54,000 255 -zazi 255 -shrubs 255 -abduct 255 -lain 255 -rokoduguni 255 -enriching 255 -disabling 255 -50/50 255 -preseason 255 -valhalla 255 -aus 255 -hakken 255 -massed 255 -amiss 255 -genders 255 -chandra 255 -koppenhaver 254 -fresco 254 -mercilessly 254 -affidavits 254 -unfettered 254 -1,250 254 -sludge 254 -marler 254 -brunel 254 -citations 254 -sprinkled 254 -inzaghi 254 -72nd 254 -jolt 254 -teas 254 -checklist 254 -verdasco 254 -dissuade 254 -skateboarding 254 -binary 254 -skylar 254 -muscat 254 -on-field 254 -plunkett 254 -metro-north 254 -bolstering 254 -trackers 254 -fellowes 254 -subtly 254 -ornament 254 -flabbergasted 254 -kerrigan 254 -rizzo 254 -survivalist 254 -schoolteacher 254 -tau 254 -hernia 254 -enchanted 254 -mucus 254 -mauritania 254 -freeh 254 -dries 254 -georgie 254 -cratty 254 -michelangelo 254 -bottlenose 254 -incoherent 254 -eventful 254 -majeed 254 -butterfield 254 -janitor 254 -attractiveness 254 -rattling 254 -secretariat 254 -calder 254 -powerfully 254 -hirsch 254 -bautista 254 -strode 254 -floor-length 254 -unseat 254 -polarization 254 -trisha 253 -handyman 253 -foresee 253 -vh1 253 -limitation 253 -barista 253 -johnnie 253 -bickering 253 -willfully 253 -uci 253 -ktvu 253 -technologically 253 -cavern 253 -optics 253 -lumley 253 -mos 253 -paratrooper 253 -jeopardise 253 -mallory 253 -glyn 253 -seaboard 253 -fakes 253 -cripple 253 -subterranean 253 -travers 253 -fontaine 253 -unoccupied 253 -felicia 253 -mcqueeney 253 -strives 253 -moseley 253 -resurgent 253 -rainforests 253 -instructs 253 -listener 253 -socialise 253 -hunan 253 -adrienne 253 -hadid 253 -horticultural 253 -pears 253 -groundhog 253 -89-year-old 253 -expatriates 253 -abdication 253 -rumbled 253 -halappanavar 253 -mid-december 253 -rhythms 253 -wicketkeeper 253 -venetian 253 -kermit 253 -feel-good 253 -huw 253 -willful 253 -headbutted 253 -high-altitude 253 -uncharted 253 -151 253 -taronga 253 -dougie 253 -sampras 253 -wrigley 253 -totalled 253 -livestrong 253 -snag 253 -cutters 252 -hoist 252 -cern 252 -kilpatrick 252 -sandoval 252 -gluten-free 252 -unproven 252 -puncheon 252 -uncompromising 252 -burly 252 -obstetrician 252 -fairground 252 -indycar 252 -cinemascore 252 -archery 252 -choudhury 252 -outnumber 252 -velez 252 -spearhead 252 -luckiest 252 -bouncers 252 -kp 252 -tuareg 252 -ekaterina 252 -popes 252 -travesty 252 -krkic 252 -propellers 252 -farcical 252 -papacy 252 -destabilizing 252 -barneys 252 -non-eu 252 -sizzling 252 -je 252 -poetic 252 -lighten 252 -callaghan 252 -highgate 252 -evaporated 252 -neill 252 -conroy 252 -unseeded 252 -exaggeration 252 -ttp 252 -redness 252 -arianna 252 -directory 252 -62,000 252 -jonbenet 252 -afternoons 252 -amino 252 -under-age 252 -176 252 -agatha 252 -parasitic 252 -sneezing 252 -limitless 252 -batey 252 -stingray 252 -monza 251 -suppressing 251 -standardized 251 -synchronised 251 -holliday 251 -audits 251 -pre-recorded 251 -brando 251 -moisturiser 251 -penetrating 251 -re-enter 251 -4x100m 251 -celina 251 -waned 251 -muppets 251 -ps4 251 -1898 251 -torre 251 -gristina 251 -eateries 251 -telecast 251 -shellfish 251 -age-related 251 -tectonic 251 -steyn 251 -pitting 251 -spheres 251 -in/out 251 -ferried 251 -swingers 251 -nab 251 -persuasive 251 -blueprints 251 -brand-new 251 -monsignor 251 -27million 251 -face-down 251 -pelly 251 -pol 251 -routed 251 -duplicate 251 -gamblers 251 -adaptive 251 -nat 251 -pudsey 251 -dunbar 251 -reuniting 251 -behar 251 -marko 251 -innovators 251 -kipling 251 -70million 251 -digit 251 -2lb 251 -schwarzer 251 -reborn 251 -barbarians 251 -gesturing 251 -tiago 251 -e3 251 -forks 251 -liters 251 -63rd 251 -ridges 251 -singular 250 -capone 250 -shopkeepers 250 -localised 250 -recite 250 -kazan 250 -koalas 250 -9,500 250 -pont 250 -koke 250 -landline 250 -doran 250 -elevate 250 -half-naked 250 -odinga 250 -xie 250 -maori 250 -francisco-based 250 -spearheading 250 -widest 250 -droudis 250 -bacterium 250 -irsay 250 -magdalene 250 -disorientated 250 -abysmal 250 -net-a-porter 250 -jankovic 250 -nongovernmental 250 -dornan 250 -nastasic 250 -hysterically 250 -lieutenants 250 -catania 250 -chattanooga 250 -parishes 250 -chauffeur-driven 250 -howells 250 -mishaps 250 -euston 250 -laing 250 -swaps 250 -5lb 250 -playgrounds 250 -arch-rivals 250 -lipman 250 -arne 250 -eight-day 250 -abrahams 250 -outsourcing 250 -malvinas 250 -21st-century 250 -picchu 250 -barges 250 -pooh 250 -morrow 250 -purpose-built 250 -thunderous 250 -ballpark 249 -sensual 249 -segal 249 -delicacies 249 -vitally 249 -trademarks 249 -460 249 -kitzhaber 249 -l'equipe 249 -backtracked 249 -concord 249 -ted.com 249 -boa 249 -pinched 249 -elbaradei 249 -teething 249 -cyberspace 249 -abnormality 249 -9.6 249 -ji 249 -brough 249 -abba 249 -mowing 249 -sparrow 249 -delegations 249 -jaylen 249 -regroup 249 -12-week 249 -granger 249 -2-6 249 -possessive 249 -plainclothes 249 -membrane 249 -krasnodar 249 -collaborations 249 -frugal 249 -floorboards 249 -cuccinelli 249 -qualms 249 -machado 249 -programmers 249 -zaharie 249 -spicer 249 -fresher 248 -hamdi 248 -landry 248 -orman 248 -discouraging 248 -tsb 248 -justina 248 -annecy 248 -herat 248 -davina 248 -margarita 248 -lupus 248 -3,700 248 -sly 248 -dieter 248 -2014/15 248 -clambered 248 -furtado 248 -tonya 248 -llewellyn 248 -stabilized 248 -counteract 248 -mavericks 248 -bakersfield 248 -repugnant 248 -usaid 248 -zheng 248 -responder 248 -hrh 248 -187 248 -22-month-old 248 -donahue 248 -bibles 248 -bungee 248 -mcstay 248 -karanka 248 -rushdie 248 -blackfish 248 -man-of-the-match 248 -blossoming 248 -das 248 -departs 248 -captor 248 -sprees 248 -zayed 248 -silky 248 -davie 248 -renal 248 -covenant 248 -pathogens 248 -snorting 248 -reinstatement 248 -mirage 248 -sartorial 248 -coquelin 248 -chiltern 248 -hoare 248 -guineas 248 -pummeled 248 -suffocation 247 -sunbed 247 -voronov 247 -vilanova 247 -xherdan 247 -68,000 247 -healthiest 247 -visor 247 -liza 247 -endeavors 247 -schroeder 247 -blanche 247 -hustler 247 -characterize 247 -18.5 247 -transmitter 247 -antitrust 247 -lange 247 -mallet 247 -bowyer 247 -unscheduled 247 -exporter 247 -akpom 247 -inman 247 -naylor 247 -briefcase 247 -charlize 247 -incur 247 -rojas 247 -birkin 247 -spaceport 247 -deformity 247 -heaps 247 -basements 247 -scorned 247 -missteps 247 -schuster 247 -rampaging 247 -cricketing 247 -crickets 247 -bothers 247 -officiating 247 -ordinarily 247 -peddling 247 -healer 247 -cardenas 247 -dividend 247 -245 247 -dummies 247 -265 247 -teetering 247 -whitelocks 247 -burka 247 -alisa 247 -heady 247 -middletons 247 -tallinn 247 -wane 247 -shepherds 247 -chevron 247 -weimann 247 -evangelist 247 -rhyme 247 -humanely 247 -grover 246 -substandard 246 -moffat 246 -jibe 246 -shaggy 246 -mercenaries 246 -stabbings 246 -lest 246 -cordial 246 -obnoxious 246 -burrow 246 -travelmail 246 -tart 246 -three-man 246 -no-go 246 -burris 246 -gladly 246 -blaring 246 -one-night 246 -unger 246 -crouching 246 -399 246 -grinned 246 -seminal 246 -overshadow 246 -steaks 246 -eastbound 246 -krishna 246 -snagged 246 -9.1 246 -resin 246 -gratuitous 246 -kunis 246 -retrieving 246 -serbs 246 -dismembering 246 -ufos 246 -citadel 246 -taunt 246 -kok 246 -kenney 246 -jarrod 246 -pelletier 246 -sarcastically 246 -deadlocked 246 -swagger 246 -schilling 246 -brandis 246 -bamber 246 -battleship 246 -shambolic 246 -1880 246 -designate 246 -gallipoli 246 -wielded 246 -augmentation 246 -heinrich 246 -cools 246 -audacity 246 -‚ 246 -reintroduced 246 -rediscover 246 -pak 246 -penalized 246 -39th 246 -rapturous 246 -social-networking 246 -errant 246 -conducive 246 -trevino 246 -lai 246 -saab 245 -smug 245 -untoward 245 -anzac 245 -chaffetz 245 -n.j. 245 -amplified 245 -michaella 245 -klaus 245 -rourke 245 -tagline 245 -5in 245 -swarming 245 -odierno 245 -wylie 245 -ramming 245 -gracefully 245 -sxsw 245 -karin 245 -pre-emptive 245 -morbid 245 -karlsen 245 -mindanao 245 -conceptual 245 -bonanza 245 -hot-button 245 -9.2 245 -jaunt 245 -wainwright 245 -californians 245 -manure 245 -ethically 245 -hydrated 245 -savoury 245 -11.2 245 -outfitters 245 -bowes 245 -mind-boggling 245 -publicise 245 -gallo 245 -manley 245 -variants 245 -catwalks 245 -weigh-in 245 -cheetahs 245 -marietta 245 -overpowered 245 -miscarried 245 -tickle 245 -stubbornly 245 -valtteri 245 -tooting 245 -wuhan 245 -evoked 245 -multi-coloured 245 -collaborators 245 -solitude 245 -177 245 -hilda 245 -execs 245 -twin-engine 245 -wiley 245 -navajo 244 -peev 244 -conjure 244 -guardsman 244 -pluck 244 -worsley 244 -mikael 244 -pedersen 244 -e.coli 244 -dawned 244 -husky 244 -innocently 244 -crows 244 -yong 244 -ros 244 -shoreditch 244 -beautician 244 -flyover 244 -nessie 244 -writ 244 -rembrandt 244 -gauld 244 -peaking 244 -streisand 244 -trumped 244 -chlamydia 244 -82nd 244 -complementary 244 -punta 244 -joss 244 -wesson 244 -drizzle 244 -smalls 244 -leanna 244 -yoo 244 -bullish 244 -fourth-round 244 -incredulous 244 -philippa 244 -stowe 244 -custodian 244 -djibouti 244 -converse 244 -emmerdale 244 -absences 244 -pelican 244 -kaesong 244 -projectile 244 -tameside 244 -bramall 244 -dax 244 -apprenticeships 244 -shatner 244 -kilmarnock 244 -trickery 244 -rakes 244 -emptying 244 -pesticide 244 -abreu 244 -profited 244 -dwarfism 244 -drummond 244 -pokes 244 -dished 244 -ruthlessly 244 -lah 244 -locales 243 -gower 243 -akhtar 243 -thud 243 -ideologies 243 -celine 243 -reflex 243 -luigi 243 -insistent 243 -zero-tolerance 243 -merchandising 243 -evo 243 -bentaleb 243 -ems 243 -hutcheson 243 -conservancy 243 -bigamy 243 -dachshund 243 -wrongs 243 -innate 243 -physiology 243 -trialling 243 -liners 243 -winched 243 -narcotic 243 -linear 243 -breakdowns 243 -uniting 243 -furthest 243 -feb 243 -coffey 243 -toiletries 243 -x-factor 243 -repetition 243 -7-inch 243 -corden 243 -mariana 243 -meehan 243 -rank-and-file 243 -unconvinced 243 -shauna 243 -westbound 243 -staunchly 243 -diarra 243 -mondays 243 -gravesend 243 -gilchrist 243 -dubois 243 -wootton 243 -100-year-old 243 -braden 242 -renta 242 -hand-written 242 -luminous 242 -re-establish 242 -virulent 242 -.5 242 -mma 242 -tulip 242 -sentebale 242 -189 242 -lte 242 -patched 242 -tester 242 -isobel 242 -invoking 242 -costner 242 -trumps 242 -lull 242 -overran 242 -insatiable 242 -hive 242 -geo 242 -indebted 242 -second-class 242 -14.5 242 -debaltseve 242 -unsolicited 242 -receptive 242 -aceh 242 -jumpsuits 242 -fryberg 242 -mayra 242 -stomped 242 -apathy 242 -regatta 242 -cradled 242 -morrell 242 -ailment 242 -thinkers 242 -spc. 242 -farnborough 242 -192 242 -crease 242 -atsu 242 -snorkeling 242 -hibernian 242 -unchallenged 242 -anarchist 242 -pressurised 242 -lingard 242 -hawker 242 -weekdays 242 -venezuelans 242 -distortion 242 -briscoe 242 -testicle 242 -sfo 242 -maven 242 -meng 241 -missoni 241 -winfield 241 -paragraph 241 -fowle 241 -formative 241 -snowboard 241 -aching 241 -330,000 241 -humanoid 241 -infusion 241 -shrek 241 -hemp 241 -incontinence 241 -majorities 241 -gemini 241 -spines 241 -valium 241 -panasonic 241 -entry-level 241 -conclusively 241 -psoriasis 241 -cannock 241 -pringle 241 -hermit 241 -ticketing 241 -saud 241 -yorkville 241 -ocampo 241 -biologically 241 -poe 241 -pennington 241 -conceivable 241 -well-preserved 241 -flack 241 -glaxosmithkline 241 -leavers 241 -rollins 241 -klum 241 -elective 241 -gallop 241 -tydfil 241 -hawthorn 241 -incumbents 241 -kinky 241 -off-field 241 -fruity 241 -windpipe 241 -leipzig 241 -elba 241 -1800 241 -contraction 241 -computer-generated 241 -tamaulipas 241 -aforementioned 241 -bern 241 -vibrating 241 -budweiser 240 -aunts 240 -refine 240 -assemblies 240 -standalone 240 -mailing 240 -subpoenaed 240 -postponement 240 -beckford 240 -contagion 240 -beaufort 240 -noxious 240 -cultivating 240 -fraternities 240 -comptroller 240 -dodger 240 -bedouin 240 -snooze 240 -hampering 240 -leisurely 240 -opts 240 -95,000 240 -lugar 240 -courant 240 -ballesteros 240 -sanitary 240 -unwillingness 240 -angelica 240 -dipietro 240 -dilma 240 -craftsmen 240 -modernization 240 -45th 240 -hoods 240 -135,000 240 -clings 240 -game-changer 240 -u.s.-born 240 -subjective 240 -anomalies 240 -katelyn 240 -scantily 240 -pooches 240 -overton 240 -kirwan 240 -191 240 -undetermined 240 -44,000 240 -dutschke 240 -traci 240 -wendi 240 -thesis 240 -fetuses 240 -ter 240 -padding 240 -dorsal 240 -montezemolo 240 -wilshaw 240 -amaral 240 -micheletti 240 -lindegaard 240 -conversions 240 -alisha 240 -tripod 240 -bihar 240 -anti-terrorist 240 -stroking 239 -undergrowth 239 -shunning 239 -callan 239 -pangolin 239 -unforgivable 239 -super-g 239 -tawfeeq 239 -fattest 239 -head-to-toe 239 -hoses 239 -8.9 239 -lackluster 239 -videographer 239 -brigitte 239 -inept 239 -bonner 239 -blood-alcohol 239 -atalanta 239 -azzurri 239 -commandments 239 -chennai 239 -bam 239 -rolando 239 -smartwatches 239 -relaunch 239 -devious 239 -gumtree 239 -breanna 239 -assesses 239 -animations 239 -kaine 239 -busts 239 -pilgrim 239 -furstenberg 239 -emotive 239 -lousy 239 -eliminates 239 -souter 239 -p.j. 239 -gianluigi 239 -angelique 239 -fusiliers 239 -marbles 239 -sumatran 239 -persuasion 239 -mohawk 239 -dreadlocks 239 -capaldi 239 -clarets 239 -avalon 239 -himmler 239 -dispensed 239 -4k 239 -srinagar 239 -annapolis 239 -immortal 239 -amphetamine 239 -2-3 239 -ennis-hill 239 -drive-thru 239 -chatty 239 -lowell 239 -shalit 239 -unchained 239 -feasting 239 -cote 239 -betis 239 -tompkins 239 -qadri 238 -1.35 238 -geraghty 238 -fades 238 -ayala 238 -quid 238 -ayotte 238 -collusion 238 -correcting 238 -outbuildings 238 -insidious 238 -frankenstein 238 -singling 238 -tipster 238 -isi 238 -complexities 238 -skydivers 238 -trafficker 238 -classify 238 -insulated 238 -interpreting 238 -beethoven 238 -legg 238 -goalie 238 -k2 238 -flamingo 238 -echr 238 -9.8 238 -devine 238 -legislate 238 -henan 238 -detergent 238 -oakeshott 238 -us-based 238 -mentored 238 -airlift 238 -295 238 -hanger 238 -bobbing 238 -volgograd 238 -hazy 238 -dimitar 238 -munro 238 -radiator 238 -uninhabitable 238 -pri 238 -5p 238 -solanke 238 -mowbray 238 -battery-powered 238 -epidemiology 238 -suffice 238 -overview 238 -shackleton 238 -renounced 238 -scammers 238 -michal 238 -altrincham 238 -jiabao 238 -deterrence 238 -24million 238 -wilmots 238 -saluted 238 -weatherman 238 -high-pressure 238 -nhtsa 238 -nettles 237 -topical 237 -locum 237 -aesthetics 237 -geometric 237 -prosecco 237 -sumo 237 -rawlings 237 -blinking 237 -prioritize 237 -coyotes 237 -cynicism 237 -careerbuilder.com 237 -toughness 237 -hawke-petit 237 -steakhouse 237 -hardest-hit 237 -fleets 237 -adjustable 237 -autocratic 237 -mcneil 237 -paulson 237 -saba 237 -jetstar 237 -governs 237 -yolanda 237 -claridge 237 -dupont 237 -rhyl 237 -people.com 237 -revolting 237 -adhesive 237 -mcshane 237 -adhered 237 -swindled 237 -zoey 237 -tabitha 237 -jobcentre 237 -rajapaksa 237 -grandstand 237 -deval 237 -corbin 237 -elia 237 -10.1 237 -brownlee 237 -tutorials 237 -innuendo 237 -quinnipiac 237 -broadchurch 237 -pimps 237 -accumulating 237 -styler 237 -16.5 237 -anti-racism 237 -rightfully 237 -grapefruit 237 -behemoth 237 -full-on 237 -hawkes 237 -crossfit 237 -cessation 237 -yorks 237 -lodges 237 -weep 237 -justifiable 237 -15-month-old 237 -serco 237 -tweaks 237 -unrestricted 236 -authorisation 236 -monmouth 236 -blood-soaked 236 -urumqi 236 -candlelit 236 -rollers 236 -66,000 236 -dorian 236 -scheibe 236 -transient 236 -epsilon 236 -adjusts 236 -psa 236 -detachment 236 -family-run 236 -ferraris 236 -rodas 236 -mas 236 -subdivision 236 -artisan 236 -socialists 236 -frey 236 -clamping 236 -refresh 236 -tozer 236 -getty 236 -blender 236 -qualcomm 236 -tombstone 236 -abedine 236 -yanked 236 -uwe 236 -pathology 236 -pulpit 236 -alford 236 -pheasant 236 -facetime 236 -conde 236 -470 236 -stateside 236 -macro 236 -andriy 236 -rhetorical 236 -confectionery 236 -500m 236 -whitfield 236 -mach 236 -bona 236 -bong 236 -brimming 236 -zarif 236 -scrubbed 236 -cirencester 236 -amniotic 236 -193 236 -ejection 236 -skyrocketing 236 -upside-down 236 -stipulated 236 -ewan 236 -renz 236 -colosseum 236 -restructure 236 -janis 236 -blazed 236 -instinctive 236 -percival 236 -pinot 236 -mcmath 236 -duval 236 -outweighed 236 -lcd 236 -borg 236 -retorted 236 -peppa 235 -spitfires 235 -covertly 235 -cenotaph 235 -nicol 235 -vases 235 -401 235 -diggers 235 -morata 235 -211 235 -219 235 -foreign-born 235 -plaid 235 -engulf 235 -o. 235 -235 235 -thoroughbred 235 -gerhard 235 -gaskell 235 -aficionados 235 -186 235 -liquidation 235 -pre-dawn 235 -diagnosing 235 -retaliatory 235 -whistling 235 -swathe 235 -menino 235 -fujian 235 -moira 235 -qz8501 235 -amphetamines 235 -end-of-life 235 -razak 235 -gal 235 -antennas 235 -highclere 235 -hutchison 235 -environmentalist 235 -3.0 235 -barajas 235 -anaemia 235 -undressed 235 -wenjian 235 -saddest 235 -sprinters 235 -exorcism 235 -nip 235 -tweeter 235 -preferably 235 -karadsheh 235 -saratoga 235 -spenders 235 -spas 235 -therese 235 -vitaly 235 -stranding 235 -yachting 235 -constipation 235 -latch 235 -28million 235 -culled 235 -madeira 235 -bariatric 235 -eyeballs 235 -onward 235 -cr 235 -bamako 235 -frontiers 235 -boro 235 -kibby 235 -instyle 235 -grappled 234 -catered 234 -buick 234 -rapping 234 -receivers 234 -bribed 234 -axel 234 -unsecured 234 -austere 234 -mingling 234 -cultured 234 -shakur 234 -danube 234 -thein 234 -one-child 234 -fortitude 234 -501 234 -megaupload 234 -overlapping 234 -anti-tank 234 -dialed 234 --18 234 -fabricio 234 -contradictions 234 -krueger 234 -spock 234 -mis-selling 234 -colney 234 -eintracht 234 -purdy 234 -majid 234 -ix 234 -intermediary 234 -torrance 234 -ammo 234 -forlorn 234 -scg 234 -clipping 234 -wren 234 -hickman 234 -steadfastly 234 -seduce 234 -six-party 234 -chievo 234 -190,000 234 -iberia 234 -marries 234 -blu-ray 234 -cedric 234 -jovi 234 -streaks 234 -30c 234 -cs 234 -minh 234 -vanguard 234 -pivot 233 -baboons 233 -mori 233 -furlong 233 -nemtsov 233 -enterprising 233 -trappings 233 -ucl 233 -c-130 233 -ashram 233 -acorn 233 -brodie 233 -jeh 233 -mid-table 233 -chimed 233 -elmir 233 -bertie 233 -behest 233 -commemorated 233 -fogh 233 -conundrum 233 -ironman 233 -much-anticipated 233 -johanna 233 -indispensable 233 -yep 233 -emin 233 -stretton 233 -geraint 233 -south-eastern 233 -strutting 233 -ovary 233 -deems 233 -apostasy 233 -headway 233 -skimming 233 -115,000 233 -twentieth 233 -hockaday 233 -heptathlon 233 -mascots 233 -toad 233 -ethnically 233 -successors 233 -kramaric 233 -credlin 233 -merton 233 -unicorn 233 -slumber 233 -scruggs 233 -disputing 233 -onscreen 233 -ely 233 -choreographer 233 -investec 233 -sully 233 -hyper 233 -navratilova 233 -prejudiced 233 -lovable 233 -orphanages 233 -keyboards 233 -castellanos 233 -disparate 233 -38th 233 -unforced 233 -overpaid 233 -vindictive 233 -fidelity 233 -caddie 233 -hse 233 -dieudonne 233 -incapacity 233 -aguirre 233 -r.i.p 232 -thorning-schmidt 232 -boomed 232 -32gb 232 -accrued 232 -imaginations 232 -crum 232 -ashtiani 232 -unbalanced 232 -consummate 232 -swinton 232 -douse 232 -renzi 232 -mp3 232 -inhale 232 -wetsuit 232 -topeka 232 -tactile 232 -watermelon 232 -query 232 -nbcnews.com 232 -waffle 232 -nuanced 232 -whimsical 232 -0.9 232 -grasping 232 -case-by-case 232 -atrium 232 -signage 232 -palliative 232 -zaid 232 -pattison 232 -x. 232 -breakfasts 232 -principled 232 -bernanke 232 -chilcot 232 -inflame 232 -8st 232 -rear-facing 232 -waxman 232 -swiped 232 -h5n1 232 -drumming 232 -betraying 232 -begala 232 -thong 232 -invader 232 -ypres 232 -expiration 232 -kennedys 232 -enclosures 232 -kamara 232 -lithium-ion 232 -stools 232 -typhoons 232 -tech-savvy 232 -bovine 232 -texas-based 232 -postage 232 -angering 232 -baghdadi 232 -primrose 232 -erwin 232 -bsa 232 -top-four 231 -neverland 231 -aqim 231 -sequins 231 -argentines 231 -robredo 231 -41,000 231 -helper 231 -gerber 231 -clockwise 231 -henthorn 231 -elisabetta 231 -muirfield 231 -sunsets 231 -kettering 231 -kuchar 231 -world-record 231 -unregistered 231 -lawrie 231 -loveable 231 -sherpas 231 -frontbencher 231 -small-town 231 -antiviral 231 -pizzeria 231 -reiss 231 -qunu 231 -balmain 231 -halve 231 -luxor 231 -66th 231 -centrifuges 231 -digestion 231 -all-male 231 -garza 231 -mid-october 231 -grandad 231 -downwards 231 -bridgeman 231 -ashdown 231 -flowering 231 -bestiality 231 -ericsson 231 -hutu 231 -10:45 231 -salomon 231 -squatting 231 -freshmen 231 -dopamine 231 -short-range 231 -ledwith 231 -hemming 231 -catt 231 -bolder 230 -legit 230 -fronting 230 -schneiderman 230 --lcb- 230 -faull 230 -seminary 230 -extinguisher 230 -bateman 230 -leaky 230 -alcala 230 -relinquished 230 -guardsmen 230 -sorkin 230 -positivity 230 -overwhelm 230 -shi 230 -favre 230 -sandhurst 230 -transplantation 230 -hinton 230 -sassoon 230 -post-9 230 -runny 230 -dogfighting 230 -mediator 230 -labyrinth 230 -slowest 230 -unaffordable 230 -10:15 230 -mid-january 230 -curvature 230 -biotech 230 -bivens 230 -racquet 230 -barns 230 -sash 230 -co-conspirator 230 -caa 230 -subsidised 230 -penrith 230 -combo 230 -lautenberg 230 -hidalgo 230 -whitlock 230 -clouded 230 -likens 230 -century-old 230 -wretched 230 -yunnan 230 -indignation 230 -pimlico 230 -substantiated 230 -rui 230 -bard 230 -disowned 230 -pantilimon 230 -173 230 -taft 230 -totalitarian 230 -hockley 230 -crossbow 230 -1.15 230 -revolved 229 -felons 229 -melville 229 -ornamental 229 -reappear 229 -mumsnet 229 -1.75 229 -mongolian 229 -caveat 229 -thrower 229 -hutchings 229 -economical 229 -knelt 229 -nogales 229 -cyberbullying 229 -obligatory 229 -argus 229 -pallets 229 -carvajal 229 -taurus 229 -keanu 229 -rippon 229 -abyss 229 -dfe 229 -desiree 229 -_ 229 -dancefloor 229 -marque 229 -hayat 229 -corrie 229 -outposts 229 -slow-moving 229 -blacklist 229 -angled 229 -salter 229 -moe 229 -ajmal 229 -volatility 229 -voltage 229 -abductor 229 -bardot 229 -ctv 229 -lauterbach 229 -ruck 229 -crowbar 229 -facials 229 -withdraws 229 -markoff 229 -boozy 229 -toon 229 -hummer 229 -anterior 229 -betray 229 -1863 229 -annoy 229 -adventurers 229 -normalcy 229 -guggenheim 229 -foreclosed 229 -crufts 229 -demolishing 229 -bunnies 229 -humboldt 229 -gast 229 -eyewear 229 -negligible 229 -strapping 229 -four-week 229 -jessa 229 -concourse 229 -agence 229 -enlisting 228 -stitching 228 -hodgkin 228 -12:01 228 -antioxidant 228 -al-obeidy 228 -spokesmen 228 -zane 228 -216 228 -bolivar 228 -indonesians 228 -dearborn 228 -noteworthy 228 -wilfred 228 -converge 228 -exaggerating 228 -axing 228 -53,000 228 -gotti 228 -ex-england 228 -additives 228 -hyypia 228 -asio 228 -thani 228 -cheeseburger 228 -edmond 228 -soriano 228 -strut 228 -junkies 228 -center-right 228 -strathclyde 228 -muthana 228 -plebs 228 -kodak 228 -roderick 228 -harboring 228 -lear 228 -lomas 228 -pin-up 228 -vin 228 -chalobah 228 -inmarsat 228 -impressionable 228 -trembling 228 -ambient 228 -meares 228 -fractious 228 -lolita 228 -fallujah 228 -stripe 228 -jeered 228 -emailing 228 -gage 228 -delve 228 -langford 228 -trysts 228 -stairway 228 -ghz 228 -mbps 228 -azari 228 -adamson 228 -clear-cut 228 -schooled 228 -tillis 228 -irfan 228 -rfk 228 -barged 228 -flapping 228 -hui 228 -blokes 228 -woodard 228 -pamphlet 228 -stoking 228 -shortcut 227 -restaurateur 227 -haste 227 -lamppost 227 -knesset 227 -foal 227 -405 227 -pennant 227 -perilously 227 -genevieve 227 -earth-like 227 -four-minute 227 -almonds 227 -scented 227 -poverty-stricken 227 -ploughing 227 -journal-constitution 227 -1897 227 -deceiving 227 -weston-super-mare 227 -bog 227 -straining 227 -thatched 227 -martosko 227 -shutters 227 -stink 227 -famer 227 -batted 227 -moni 227 -tamworth 227 -confidante 227 -fragrances 227 -chronicling 227 -mossad 227 -handcuff 227 -stefani 227 -fleas 227 -aw14 227 -holbrooke 227 -momentary 227 -cumbersome 227 -bangui 227 -amphibians 227 -bosnia-herzegovina 227 -tucks 227 -matted 227 -misusing 227 -sculpting 227 -tsarnaeva 227 -barks 227 -roxanne 227 -twinkle 227 -barons 227 -streamline 227 -cuddled 227 -box-office 227 -stowaway 227 -mace 227 -heckler 227 -499 227 -kiki 227 -co-founders 227 -disciples 227 -featherstone 227 -dispense 227 -rafah 227 -stott 227 -co-chair 227 -spanier 227 -2008-09 227 -cw 227 -hydro 227 -outlaws 227 -law-enforcement 227 -busier 226 -erection 226 -mother-to-be 226 -vaccinate 226 -oxycodone 226 -perdue 226 -clasp 226 -defecting 226 -ditches 226 -mutch 226 -kyung 226 -espinoza 226 -huff 226 -cruisers 226 -usable 226 -swab 226 -non-emergency 226 -embers 226 -ghostbusters 226 -skywalker 226 -lpga 226 -womenswear 226 -bravado 226 -alanna 226 -icac 226 -egging 226 -revulsion 226 -anew 226 -far-fetched 226 -federations 226 -non-lethal 226 -potty 226 -sequencing 226 -massimiliano 226 -distributes 226 -primera 226 -10.8 226 -walkabout 226 -peroxide 226 -under-21s 226 -unbeknown 226 -gun-control 226 -belmarsh 226 -bacall 226 -promiscuous 226 -orcas 226 -rags 226 -top-class 226 -impala 226 -high-performance 226 -bedridden 226 -admin 226 -leiva 226 -gosport 226 -muriel 226 -dew 225 -orgasms 225 -tallulah 225 -feliciano 225 -analytical 225 -eroding 225 -sirhan 225 -summertime 225 -hydration 225 -bushfires 225 -cora 225 -ph 225 -steely 225 -money-laundering 225 -salacious 225 -cysts 225 -substitution 225 -obertan 225 -reimbursement 225 -socio-economic 225 -havel 225 -mammoths 225 -?? 225 -kohler 225 -valladolid 225 -stimulates 225 -simplified 225 -wedlock 225 -contending 225 -146 225 -oxley 225 -communicates 225 -waterlogged 225 -dislodge 225 -receptions 225 -fertilization 225 -mpg 225 -listeria 225 -colds 225 -one-and-a-half 225 -illiterate 225 -awoken 225 -jin 225 -rummenigge 225 -worshipers 225 -zhu 225 -delicately 225 -rocco 225 -ramis 225 -elitist 225 -illogical 225 -punctuality 225 -gibb 225 -marshes 225 -lair 225 -made-up 225 -stromsgodset 225 -waivers 225 -cuckoo 225 -trish 225 -hoya 225 -mid-20s 225 -semi-naked 225 -half-term 225 -endearing 225 -manicure 225 -espinosa 225 -methodology 225 -ainsworth 225 -edict 225 -visuals 225 -walden 225 -pubic 225 -50-year 225 -coil 225 -teaspoon 225 -grands 225 -youssif 225 -fugate 224 -dictating 224 -findlay 224 -utilized 224 -minimally 224 -glitz 224 -mousa 224 -peloton 224 -mulling 224 -tallied 224 -starkly 224 -snowe 224 -whitechapel 224 -effigy 224 -figurehead 224 -thoughtless 224 -paediatrician 224 -impassable 224 -zanu-pf 224 -renegade 224 -tortoises 224 -vultures 224 -senderos 224 -rowntree 224 -reintroduce 224 -credence 224 -prawns 224 -allotted 224 -micro-blogging 224 -forrester 224 -ansel 224 -adobe 224 -kr 224 -overhauling 224 -benz 224 -opcw 224 -yarn 224 -hempstead 224 -transformer 224 -likeable 224 -scandal-hit 224 -pulsating 224 -indescribable 224 -inquests 224 -dorrans 224 -mannone 224 -hickey 224 -cram 224 -pile-up 224 -tedious 224 -16-year-olds 224 -turvill 224 -kors 224 -indelible 224 -grovelling 224 -yummy 224 -airshow 224 -consulates 224 -stroked 224 -sicilian 224 -raffle 224 -provo 224 -karbala 224 -tricking 224 -arbeloa 224 -behaves 224 -layne 224 -sadiq 224 -six-and-a-half 224 -ciaran 224 -midwestern 224 -fourth-placed 224 -pleasantly 224 -unsurprising 224 -chelsy 224 -pollster 224 -send-off 224 -caicos 224 -respondent 224 -exorbitant 223 -mulholland 223 -throes 223 -aldo 223 -knife-wielding 223 -bipartisanship 223 -instalment 223 -enslaved 223 -kwiatkowski 223 -present-day 223 -denison 223 -sharm 223 -detract 223 -arundel 223 -hovers 223 -phe 223 -fast-tracked 223 -cosgrove 223 -multimedia 223 -bmx 223 -web-based 223 -advancements 223 -faltered 223 -harrisburg 223 -anabolic 223 -mame 223 -wollongong 223 -inequalities 223 -sorensen 223 -bergkamp 223 -foursquare 223 -imams 223 -apec 223 -mcspadden 223 -motorcyclists 223 -discord 223 -fruitful 223 -despondent 223 -corral 223 -bemoaned 223 -obscenities 223 -pato 223 -unsanitary 223 -doodle 223 -pre-sale 223 -tupac 223 -attain 223 -thad 223 -brompton 223 -11:15 223 -homestead 223 -florentino 223 -carrey 223 -blancos 223 -prenatal 223 -josé 223 -goodes 223 -pfister 223 -straddling 223 -airy 223 -kan 223 -die-hard 223 -husain 223 -goodall 223 -thinker 223 -undisturbed 223 -caretakers 223 -vandenberg 222 -seppi 222 -zaire 222 -mcfarland 222 -firsts 222 -athena 222 -brandi 222 -frenetic 222 -64gb 222 -e-reader 222 -variables 222 -flavoured 222 -haworth 222 -retract 222 -olmert 222 -coachella 222 -abu-salha 222 -overreach 222 -collaborator 222 -estonian 222 -emil 222 -wellesley 222 -holman 222 -nair 222 -rajasthan 222 -ras 222 -escalates 222 -mujahedeen 222 -republican-controlled 222 -beginners 222 -appointees 222 -bluefin 222 -lawler 222 -parliamentarians 222 -ovens 222 -waite 222 -launder 222 -deviant 222 -mchale 222 -tarnish 222 -boleyn 222 -rapport 222 -kneel 222 -dnc 222 -ahlers 222 -geller 222 -indira 222 -3.50 222 -1bn 222 -12:15 222 -mcclure 222 -bonneville 222 -antisocial 222 -nagin 222 -darlene 222 -orton 222 -unrivalled 222 -bargained 222 -holtby 222 -manatee 222 -amalfitano 222 -infringe 222 -machynlleth 222 -drunkenness 222 -no-brainer 222 -fasciitis 222 -349 222 -leapfrog 222 -aventador 222 -reproduced 222 -al-sharia 222 -amarillo 222 -whiff 222 -gillett 222 -mariners 222 -righteous 222 -bulimia 222 -alloy 222 -johnathan 222 -ground-based 221 -tweaking 221 -al-sham 221 -nast 221 -lingered 221 -snared 221 -inclination 221 -masterclass 221 -auditioned 221 -karimi 221 -utensils 221 -burundi 221 -piecing 221 -brute 221 -wide-eyed 221 -smoldering 221 -moorland 221 -cnnopinion 221 -bugged 221 -reverted 221 -560 221 -museveni 221 -well-educated 221 -sketched 221 -rahul 221 -zubeidat 221 -gleefully 221 -bullet-proof 221 -easdale 221 -quantitative 221 -early-morning 221 -duvalier 221 -spousal 221 -hemsworth 221 -whiteley 221 -spirituality 221 -beaded 221 -twiggy 221 -encephalopathy 221 -gilligan 221 -second-place 221 -540 221 -planks 221 -favela 221 -21-day 221 -misrepresented 221 -famu 221 -ensembles 221 -moles 221 -horseshoe 221 -masai 221 -unsung 221 -captions 221 -potency 221 -antiquated 221 -headlock 221 -milking 221 -moussaoui 221 -anti-islamic 221 -silvia 221 -interfaith 221 -well-deserved 221 -henin 221 -jonathon 221 -7-eleven 221 -breather 221 -2100 221 -resurrected 221 -epidemiologist 221 -traitors 221 -paedophilia 221 -crucifixion 221 -landmines 221 -screenshot 221 -siddiqui 221 -oktoberfest 221 -slung 221 -softening 221 -amina 221 -enrico 221 -pint-sized 221 -leger 221 -seve 221 -nccl 221 -dodson 221 -thermometer 221 -addis 221 -brownies 220 -carbohydrate 220 -ferrying 220 -connotations 220 -decipher 220 -17:00 220 -keeley 220 -gaye 220 -kinsella 220 -pun 220 -forested 220 -role-playing 220 -hasselbeck 220 -anti-discrimination 220 -11:20 220 -resented 220 -curie 220 -vegetative 220 -reciting 220 -professed 220 -pieced 220 -impropriety 220 -beanie 220 -gemili 220 -bab 220 -esack 220 -1850 220 -warp 220 -itar-tass 220 -crackdowns 220 -320,000 220 -dolores 220 -near-death 220 -chardon 220 -jomana 220 -neuroblastoma 220 -tyrell 220 -sew 220 -divergent 220 -formulated 220 -air-conditioning 220 -favoring 220 -baywatch 220 -beluga 220 -1861 220 -1862 220 -zodiac 220 -loggerheads 220 -pea 220 -autographed 220 -dales 220 -ripa 220 -savanna 220 -underwhelming 220 -militiamen 220 -permafrost 220 -zapata 220 -projecting 220 -mcveigh 220 -crucified 220 -blue-collar 220 -coutts 220 -skittles 220 -bora 220 -inca 220 -coaxed 219 -ruddy 219 -barrera 219 -injustices 219 -adrenalin 219 -suisse 219 -weeknights 219 -barts 219 -dino 219 -nauru 219 -femur 219 -pickett 219 -piping 219 -plucky 219 -huerta 219 -duckworth 219 -tightrope 219 -dummett 219 -six-pack 219 -leech 219 -decibels 219 -airstrip 219 -disservice 219 -beryl 219 -booing 219 -r-ohio 219 -mister 219 -ibs 219 -krista 219 -hard-core 219 -kiir 219 -sistine 219 -depressive 219 -rd 219 -nicosia 219 -nantucket 219 -pre-planned 219 -stabilised 219 -elicited 219 -spoiling 219 -lowland 219 -winged 219 -headband 219 -high-street 219 -innocents 219 -machar 219 -macarthur 219 -asheville 219 -unconditionally 219 -rosler 219 -hurl 219 -lunges 219 -steinhauser 219 -janko 219 -eredivisie 219 -hallowed 219 -phosphorus 219 -goff 219 -barmaid 219 -weeps 219 -abdulrahman 219 -s3 219 -sb 219 -dressing-room 219 -redistributed 219 -lockers 219 -karlie 219 -lau 219 -titchmarsh 219 -komo 218 -hangeland 218 -non-executive 218 -cyberattacks 218 -neuroscientist 218 -yellin 218 -clemente 218 -pickled 218 -sleepers 218 -90-day 218 -awhile 218 -buy-out 218 -quintessentially 218 -jehovah 218 -abattoir 218 -brownsville 218 -naturalist 218 -melamine 218 -gauntlet 218 -cores 218 -vc 218 -sabbath 218 -ingham 218 -scarcity 218 -resourceful 218 -lymphatic 218 -downsize 218 -unionist 218 -2.25 218 -philly.com 218 -three-way 218 -two-part 218 -hellish 218 -autumnal 218 -sosa 218 -mock-up 218 -lighthearted 218 -mortimer 218 -orwell 218 -bribing 218 -thurrock 218 -mcauley 218 -trina 218 -cocoon 218 -malaise 218 -5k 218 -tangle 218 -evangelicals 218 -esme 218 -ama 218 -hennessy 218 -ugliest 218 -rafts 218 -multi-million-pound 218 -jubilation 218 -hinds 218 -49th 218 -pinching 218 -fittest 218 -overstated 218 -swearing-in 218 -tabak 218 -unmistakable 218 -welles 218 -budd 218 -ramshackle 218 -gambled 218 -fathering 218 -untrained 217 -reps. 217 -murillo 217 -40m 217 -pondering 217 -beefed 217 -hyderabad 217 -aghast 217 -ulbricht 217 -first-floor 217 -moya 217 -weiwei 217 -unnerving 217 -mathis 217 -fillings 217 -hymns 217 -slander 217 -membranes 217 -detonation 217 -chequered 217 -soviet-era 217 -campylobacter 217 -taskforce 217 -cotto 217 -cotterill 217 -conciliatory 217 -keir 217 -longleat 217 -chrissy 217 -letta 217 -needham 217 -rigorously 217 -11.4 217 -kop 217 -validated 217 -sewell 217 -kiosk 217 -hamzah 217 -barbs 217 -67th 217 -nimble 217 -avalanches 217 -government-funded 217 -experimentation 217 -rebuked 217 -8lb 217 -attendee 217 -wilbur 217 -worst-hit 217 -tokens 217 -pruitt 217 -64,000 217 -1864 217 -grille 217 -anatoly 217 -enda 217 -ballistics 217 -tutankhamun 217 -deb 217 -embodiment 217 -abidal 217 -wani 217 -raquel 217 -exuberant 217 -misjudged 217 -equitable 216 -stephany 216 -cochrane 216 -slipper 216 -conceiving 216 -add-ons 216 -formaldehyde 216 -galbraith 216 -accosted 216 -piazza 216 -bundchen 216 -arthurs 216 -inhaler 216 -overthrew 216 -wfaa 216 -karp 216 -instill 216 -chronically 216 -bistro 216 -scribbled 216 -cc 216 -motown 216 -commutes 216 -deregulation 216 -shanty 216 -selig 216 -balochistan 216 -allocate 216 -angling 216 -congregate 216 -saenz 216 -antrim 216 -hendry 216 -beached 216 -defections 216 -layla 216 -pygmy 216 -alderman 216 -20c 216 -trot 216 -brawley 216 -thelma 216 -feasibility 216 -glamor 216 -88th 216 -reactive 216 -archibald 216 -diplomatically 216 -unbeatable 216 -patently 216 -villota 216 -twenty-five 216 -soledad 216 -partition 216 -barb 216 -ethereal 216 -hebron 216 -multi 216 -goodnight 216 -essien 216 -redditch 216 -overload 216 -roald 216 -interceptions 216 -terminating 216 -puddings 216 -hahn 216 -collingwood 216 -fast-paced 216 -daredevils 215 -migratory 215 -headdress 215 -reuben 215 -stratton 215 -waco 215 -devlin 215 -coentrao 215 -spoons 215 -ducking 215 -tarpaulin 215 -electron 215 -lucid 215 -1200 215 -medecins 215 -chp 215 -bilingual 215 -ozzy 215 -lice 215 -apprentices 215 -wingers 215 -three-point 215 -double-digit 215 -snippets 215 -flouting 215 -worthing 215 -batmobile 215 -doubtless 215 -manuals 215 -mansoor 215 -blanca 215 -lukewarm 215 -larger-than-life 215 -dignitas 215 -cannavaro 215 -florida-based 215 -reunification 215 -4chan 215 -riled 215 -eighty 215 -enlightened 215 -symbolise 215 -din 215 -kebabs 215 -klain 215 -spotty 215 -10.4 215 -willpower 215 -expletives 215 -breath-taking 215 -93-year-old 215 -disembark 215 -sincerity 215 -charing 215 -libi 215 -treacy 215 -227 215 -antoinette 215 -fridges 215 -troicki 215 -ignacio 215 -hotelier 215 -264 215 -pee 215 -enrolling 215 -padgett 215 -rumsfeld 215 -absorption 215 -puskas 215 -stoddard 215 -rantzen 215 -cj 215 -minot 215 -nuclear-armed 215 -unfurled 215 -selects 215 -archway 215 -4lb 215 -sacco 214 -feuding 214 -ax 214 -hogwarts 214 -suction 214 -perthshire 214 -fauci 214 -chore 214 -hunk 214 -cotswold 214 -superimposed 214 -210,000 214 -hairdressing 214 -maimed 214 -renters 214 -shell-shocked 214 -unknowns 214 -soot 214 -ciro 214 -goofy 214 -factored 214 -chimes 214 -> 214 -hatchback 214 -vail 214 -couriers 214 -adores 214 -two-seater 214 -kinkade 214 -hypnosis 214 -saleem 214 -swedes 214 -comparative 214 -pariah 214 -norad 214 -stimulated 214 -verity 214 -swain 214 -chiffon 214 -163 214 -mobsters 214 -concurrent 214 -open-ended 214 -embolism 214 -tinkering 214 -months-long 214 -serotonin 214 -splurge 214 -replicating 214 -motivates 214 -refuel 214 -heartlands 214 -stationery 214 -yearlong 214 -alameda 214 -likening 214 -devoured 214 -99p 214 -buffs 214 -conditioned 214 -fluoride 214 -subaru 214 -infuriating 214 -208 214 -sorcery 214 -riverbank 214 -ascended 214 -anticipates 214 -sensitivities 214 -refrigerated 214 -enhances 214 -red-handed 214 -caskets 214 -establishes 214 -tantamount 214 -skates 214 -katarina 214 -tribeca 214 -billion-dollar 214 -suitor 214 -twelfth 214 -matheson 214 -maiduguri 214 -arden 214 -mccarron 214 -fatigued 214 -± 214 -shafik 214 -resurrect 214 -fangs 214 -active-duty 214 -prelude 214 -insure 214 -donnelley 214 -monahan 214 -shaikh 214 -puffy 213 -earring 213 -reinvented 213 -expressway 213 -microblogging 213 -caplan 213 -nichole 213 -mckinlay 213 -barbecues 213 -attained 213 -hamlin 213 -condensed 213 -strident 213 -flo 213 -looping 213 -tourette 213 -teal 213 -wada 213 -188 213 -carted 213 -xiang 213 -1879 213 -publicize 213 -adage 213 -bloomington 213 -chemically 213 -boycotting 213 -remnant 213 -floor-to-ceiling 213 -uncertainties 213 -vergini 213 -m&m 213 -bustamante 213 -redus 213 -tufts 213 -gondola 213 -gyngell 213 -tyrrell 213 -greenhill 213 -anti-immigration 213 -aladdin 213 -slingshot 213 -hurtled 213 -inquired 213 -tell-tale 213 -phone-in 213 -recourse 213 -demilitarized 213 -selina 213 -sal 213 -cardona 213 -soma 213 -redeem 213 -auctioning 213 -encrusted 213 -femininity 213 -excavating 213 -45p 213 -veering 213 -elsie 213 -stopover 213 -upstream 213 -goliath 213 -glancing 213 -chiang 213 -bilic 213 -pell 213 -hardliners 213 -vitriol 213 -diyala 213 -abrasions 213 -canaries 213 -deathbed 213 -rottweiler 213 -turan 213 -authentication 213 -def 213 -fertilisation 213 -frontieres 213 -meanings 213 -abdulaziz 213 -pasties 213 -hobbled 213 -rotated 213 -combustion 213 -cancels 213 -southall 213 -grenada 212 -cumming 212 -napkin 212 -admiralty 212 -uptake 212 -gitmo 212 -terrell 212 -reeled 212 -maoist 212 -palais 212 -cringe 212 -micrograms 212 -markham 212 -reliving 212 -laundered 212 -10,500 212 -brewed 212 -out-of-court 212 -ump 212 -hollis 212 -cathay 212 -alleys 212 -comedienne 212 -consolidation 212 -lilley 212 -balm 212 -faiers 212 -walgreens 212 -mccluskie 212 -unconvincing 212 -whittington 212 -lightening 212 -ex-president 212 -tribunals 212 -primer 212 -yin 212 -clamber 212 -characterization 212 -kari 212 -mistresses 212 -giddy 212 -tristram 212 -sliver 212 -waxing 212 -loyola 212 -spartak 212 -soweto 212 -emancipation 212 -va. 212 -vindication 212 -yoon 212 -circumference 212 -gelman 212 -punter 212 -hologram 212 -evokes 212 -exporters 212 -seagull 212 -fawkes 212 -dooley 212 -strootman 212 -arroyo 212 -bopara 212 -warriena 212 -unforeseen 212 -2d 212 -osbon 212 -outdone 212 -harnesses 212 -sockets 212 -ridgeway 212 -uplift 211 -paragliding 211 -hanlon 211 -dumbfounded 211 -transports 211 -rielle 211 -newly-released 211 -seething 211 -golgowski 211 -sabina 211 -wavy 211 -binder 211 -oatmeal 211 -salinger 211 -transitions 211 -64th 211 -clambering 211 -sucker 211 -misadventure 211 -firefox 211 -fewest 211 -raring 211 -chopra 211 -mcg 211 -soya 211 -khat 211 -gulbis 211 -multimillion 211 -never-before-seen 211 -muck 211 -encyclopedia 211 -five-week 211 -kerner 211 -palacio 211 -snodgrass 211 -mid-july 211 -eternally 211 -exoplanets 211 -parenting.com 211 -beggar 211 -galore 211 -booby 211 -entitlements 211 -390 211 -multi-national 211 -92-year-old 211 -saddled 211 -felines 211 -cautionary 211 -figure-hugging 211 -cabo 211 -spanish-language 211 -jameson 211 -maddy 211 -transmissions 211 -dissolution 211 -tingling 211 -velasquez 211 -coulter 211 -bhutan 211 -aldershot 211 -rejoice 211 -nistelrooy 211 -woe 211 -flirted 211 -haddad 211 -como 211 -ellington 211 -evocative 211 -inspectorate 211 -la. 211 -hauser 210 -· 210 -nominal 210 -39,000 210 -hammar 210 -trawler 210 -13million 210 -balked 210 -nicklas 210 -prodded 210 -annabelle 210 -layton 210 -toth 210 -sit-down 210 -sleazy 210 -allay 210 -learner 210 -granddaughters 210 -glamorgan 210 -1899 210 -catheter 210 -peking 210 -windermere 210 -scolded 210 -refining 210 -bridlington 210 -lowery 210 -subside 210 -drawbacks 210 -crank 210 -moderated 210 -mhra 210 -astrophysics 210 -jansen 210 -coups 210 -undesirable 210 -sledge 210 -165,000 210 -placate 210 -benin 210 -penney 210 -aitken 210 -hennessey 210 -unconcerned 210 -run-off 210 -chloroform 210 -embody 210 -pointe 210 -caron 210 -botanic 210 -excludes 210 -keylor 210 -kitchener 210 -starkey 210 -shevchenko 210 -gynaecologist 210 -dreamers 210 -nonexistent 210 -complies 210 -mink 210 -inhibit 210 -glaad 210 -dubuisson 210 -steiner 210 -forgave 210 -foxnews.com 210 -mercurial 210 -ofqual 210 -overland 210 -olives 210 -mid-march 210 -skirmish 209 -funk 209 -scuppered 209 -zoopla 209 -loomed 209 -glynn 209 -chairmen 209 -pointer 209 -thrifty 209 -launchbury 209 -courtside 209 -elude 209 -fenced 209 -sweats 209 -operas 209 -degeneration 209 -stranglehold 209 -elation 209 -rebirth 209 -definitions 209 -heart-stopping 209 -meier 209 -bai 209 -hindocha 209 -dominick 209 -dali 209 -520 209 -imperious 209 -katniss 209 -hooves 209 -execution-style 209 -holiest 209 -285 209 -lawnmower 209 -agassi 209 -middle-income 209 -nikolai 209 -millennia 209 -sixes 209 -ever-growing 209 -correspond 209 -preserves 209 -sympathizers 209 -nader 209 -persson 209 -sparing 209 -scrappy 209 -centrica 209 -vertigo 209 -implements 209 -masculinity 209 -loren 209 -blaise 209 -unfavorable 209 -buttock 209 -stallion 209 -compartments 209 -uni 209 -brca1 209 -pyrotechnics 209 -atoll 209 -48th 209 -mcgarry 209 -disclaimer 209 -horrifically 209 -worshipped 209 -diversify 209 -captaining 209 -cut-off 209 -libido 209 -orthopedic 209 -snooki 209 -seams 209 -frowned 208 -drexel 208 -peralta 208 -hollingsworth 208 -unloading 208 -incense 208 -charade 208 -subtitles 208 -sedate 208 -free-kicks 208 -abdo 208 -disappointments 208 -principally 208 -kmart 208 -bannatyne 208 -spurious 208 -dube 208 -nectar 208 -dispatching 208 -horizontally 208 -eurasia 208 -ragged 208 -reassigned 208 -provocatively 208 -forte 208 -travelodge 208 -handlebars 208 -butte 208 -ni 208 -620 208 -11:45 208 -busting 208 -trucking 208 -docklands 208 -uncooperative 208 -86th 208 -slovyansk 208 -hendrick 208 -consenting 208 -motta 208 -gleason 208 -clones 208 -767 208 -nipped 208 -krystal 208 -amok 208 -denham 208 -repelled 208 -crumbs 208 -moan 208 -unites 208 -russel 208 -58,000 208 -1860 208 -schmitt 208 -beachgoers 208 -abdallah 208 -hockney 208 -entails 208 -kristine 208 -vogel 208 -fagan 208 -summery 208 -uninvited 208 -perverted 208 -perpetuate 208 -anichebe 208 -malvern 207 -caro 207 -mishandled 207 -89th 207 -glanced 207 -tutsis 207 -olympiakos 207 -prickly 207 -69th 207 -venison 207 -bane 207 -southsea 207 -springing 207 -lay-off 207 -panties 207 -univision 207 -veer 207 -interred 207 -infiltrating 207 -mme 207 -shorthand 207 -dukes 207 -bagging 207 -almasy 207 -deluged 207 -clenched 207 -drubbing 207 -pointedly 207 -supplemental 207 -12.45 207 -earle 207 -vocalist 207 -blue-eyed 207 -abstain 207 -trimming 207 -ballad 207 -creatively 207 -mumps 207 -expatriate 207 -sant 207 -ger 207 -painless 207 -rcmp 207 -maharaj 207 -kicker 207 -blister 207 -cascading 207 -rosamund 207 -hochsprung 207 -death-defying 207 -elie 207 -categorised 207 -twisters 207 -nur 207 -pia 207 -confiscate 207 -crunching 207 -maneuvering 207 -basing 207 -recife 207 -gastroenteritis 207 -alluring 207 -robustly 207 -blueberries 207 -amd 207 -garzon 207 -disenfranchised 207 -closets 207 -derbies 207 -isaacson 207 -looser 207 -materialise 206 -protagonists 206 -skirmishes 206 -overworked 206 -sprout 206 -custom-built 206 -blige 206 -safaris 206 -fundamentalists 206 -nine-day 206 -wrenching 206 -luminaries 206 -upturned 206 -strood 206 -dentures 206 -ayrton 206 -crossrail 206 -hawthorne 206 -incessant 206 -57,000 206 -jungles 206 -jada 206 -bulldozer 206 -herve 206 -recklessness 206 -insurmountable 206 -objecting 206 -breaststroke 206 -balcombe 206 -suzuka 206 -forza 206 -purchasers 206 -eel 206 -bask 206 -slush 206 -deducted 206 -keyhole 206 -eight-week 206 -waldo 206 -mcmullen 206 -fairbanks 206 -yukawa 206 -vergara 206 -clergyman 206 -11.3 206 -screeching 206 -mouthed 206 -e-readers 206 -abuzz 206 -bayou 206 -mobilizing 206 -achieves 206 -zen 206 -flatmate 206 -wanders 206 -87th 206 -catapult 206 -rumbling 206 -nineveh 206 -copley 206 -paddles 206 -calculus 206 -triomphe 206 -geelong 206 -celibacy 206 -baring 206 -rowers 206 -winnipeg 206 -maize 206 -mckinney 205 -mystified 205 -hesitated 205 -1880s 205 -rugs 205 -lockwood 205 -fiennes 205 -barter 205 -approachable 205 -hollyoaks 205 -congregations 205 -nuttall 205 -life-support 205 -silhouettes 205 -officiated 205 -windmill 205 -picnics 205 -starfish 205 -pirated 205 -dope 205 -simferopol 205 -sketchy 205 -garnering 205 -dung 205 -deviate 205 -oblivion 205 -kinetic 205 -ratchet 205 -ill-gotten 205 -wrappers 205 -mullin 205 -adonis 205 -meteoric 205 -dann 205 -prost 205 -biofuels 205 -gulliver 205 -lucian 205 -bustle 205 -eloise 205 -iman 205 -nam 205 -berating 205 -fizz 205 -4.7-inch 205 -dagger 205 -brosnan 205 -contradicting 205 -fittingly 205 -follicles 205 -1.45 205 -booklet 205 -globalization 205 -pwc 205 -72,000 205 -kilauea 205 -meeks 205 -appleby 205 -trivia 205 -gang-rape 205 -infiltration 205 -connell 205 -parched 205 -gent 205 -mules 205 -lux 205 -symbolize 205 -198 205 -stiffness 205 -gujarat 205 -countryfile 205 -long-suffering 205 -dimitri 205 -rips 205 -occurrences 205 -starry 205 -disapproved 205 -dingo 205 -ashby 205 -pahoa 205 -nightfall 205 -lobbed 205 -chaudhry 205 -cato 205 -goering 205 -violators 204 -seeping 204 -carp 204 -dribble 204 -coughs 204 -fillet 204 -impressionist 204 -wont 204 -record-setting 204 -greta 204 -potomac 204 -flux 204 -popularly 204 -nickelodeon 204 -11:23 204 -reopens 204 -yeti 204 -11:05 204 -bloomsbury 204 -unbreakable 204 -indigo 204 -baa 204 -attainment 204 -mcc 204 -goffin 204 -solicited 204 -avalos 204 -rafters 204 -sanctity 204 -10k 204 -earls 204 -leal 204 -yasser 204 -greensboro 204 -activation 204 -ill-treatment 204 -siphoned 204 -lobe 204 -indulgence 204 -enthused 204 -churchyard 204 -til 204 -inactivity 204 -abyan 204 -tobago 204 -bremner 204 -kidderminster 204 -infinitely 204 -tuscan 204 -squires 204 -oosthuizen 204 -dimmed 204 -2021 204 -fending 204 -theological 204 -anti-immigrant 204 -tanzanian 204 -zvonareva 204 -sharpened 204 -asha 204 -una 204 -shafts 204 -tuttosport 204 -sayers 204 -17,500 204 -dotson 204 -rehearsed 204 -knuckle 204 -swabs 204 -deep-sea 204 -crocs 204 -vistas 204 -bloating 204 -klay 204 -undersecretary 203 -425 203 -cassano 203 -llambias 203 -vacca 203 -penzance 203 -veyron 203 -tie-break 203 -fisa 203 -mather 203 -phishing 203 -smallpox 203 -dmv 203 -ellicott 203 -mediate 203 -trucker 203 -haris 203 -hamill 203 -neathway 203 -uden 203 -fertiliser 203 -befitting 203 -disingenuous 203 -excise 203 -godolphin 203 -emilie 203 -untapped 203 -shabazz 203 -crypt 203 -seafloor 203 -hough 203 -paperback 203 -emulating 203 -mabel 203 -seb 203 -wheeling 203 -sabbatical 203 -envious 203 -mutated 203 -news.com.au 203 -inscriptions 203 -scheming 203 -igniting 203 -chua 203 -10.6 203 -polygamist 203 -patronage 203 -yoshida 203 -holm 203 -adopters 203 -hoyt 203 -mursi 203 -pilkington 203 -valiant 203 -projectiles 203 -fiorina 203 -pandering 203 -chiefly 203 -unjustly 203 -6lb 203 -memento 203 -signatories 203 -most-wanted 203 -conti 203 -amplify 203 -sunburn 203 -underbelly 202 -furnace 202 -ratios 202 -conquering 202 -embarrassingly 202 -devi 202 -aquariums 202 -baseman 202 -distin 202 -virat 202 -10-month 202 -dekalb 202 -buzzed 202 -beech 202 -1892 202 -bulletins 202 -helle 202 -ectopic 202 -mesmerising 202 -precedence 202 -carell 202 -gang-related 202 -snug 202 -dedicating 202 -epidemics 202 -reformer 202 -symptomatic 202 -10km 202 -jiangsu 202 -gazza 202 -grooms 202 -chetham 202 -bpa 202 -geiger 202 -chaser 202 -pitiful 202 -bakri 202 -assures 202 -volts 202 -webs 202 -paulista 202 -p5 202 -envisions 202 -volleyed 202 -scuffles 202 -spurring 202 -redirect 202 -playlist 202 -daubed 202 -whistler 202 -scammed 202 -huguette 202 -unravelled 202 -whittled 202 -top-selling 202 -annuity 202 -milburn 202 -femen 202 -gestapo 202 -itch 202 -geisha 202 -braintree 202 -10:40 202 -fulfillment 202 -guideline 202 -angelou 202 -teret 202 -distancing 202 -lleyton 202 -caitlyn 202 -joko 202 -staircases 202 -baptised 202 -yo-yo 202 -furnish 202 -centrally 202 -montevideo 202 -rennie 201 -enticed 201 -nasr 201 -200th 201 -bernardo 201 -tatler 201 -volusia 201 -quip 201 -bib 201 -cementing 201 -78,000 201 -voluminous 201 -bushland 201 -jed 201 -phipps 201 -on-trend 201 -alf 201 -nj 201 -newsstand 201 -nikica 201 -gleeson 201 -shattuck 201 -nell 201 -loyalties 201 -salinas 201 -chemists 201 -retailing 201 -bourdain 201 -pre-election 201 -coworkers 201 -ill-health 201 -remi 201 -see-through 201 -one-fifth 201 -percentages 201 -behring 201 -drawdown 201 -swooping 201 -doorbell 201 -susannah 201 -r&a 201 -lapping 201 -koskinen 201 -pickle 201 -ss15 201 -mainline 201 -tasteful 201 -family-owned 201 -occured 201 -immersion 201 -intuition 201 -hird 201 -unfathomable 201 -frolicking 201 -disks 201 -blissfully 201 -hippie 201 -dispensing 201 -rudin 201 -airtime 201 -sautner 201 -all-night 201 -muzzle 201 -fanciful 201 -millimeters 201 -ve 201 -interconnected 201 -exclusivity 201 -zalkalns 201 -competitively 201 -tether 201 -orb 201 -10.3 201 -shipley 201 -paychecks 201 -lma 201 -headbutt 201 -enveloped 201 -oakes 201 -toffee 201 -servings 201 -intercom 201 -ducati 201 -wiles 201 -pasture 201 -townsville 201 -olimpico 201 -timeframe 200 -non-life-threatening 200 -expedited 200 -hilly 200 -looped 200 -ahmadi 200 -marquis 200 -sensibly 200 -burdened 200 -bogged 200 -lore 200 -frocks 200 -recede 200 -barometer 200 -sequels 200 -faith-based 200 -hartsfield-jackson 200 -quantify 200 -friedrich 200 -bristow 200 -patston 200 -bravest 200 -prawn 200 -academia 200 -maidan 200 -wrinkle 200 -llp 200 -umarov 200 -mozdir 200 -hating 200 -loeb 200 -ziegler 200 -zoomed 200 -nikita 200 -mourner 200 -bruins 200 -favouring 200 -cheery 200 -magnus 200 -ripples 200 -fishy 200 -4billion 200 -t.i. 200 -copious 200 -202 200 -reviled 200 -alkmaar 200 -bryon 200 -224 200 -brackets 200 -pinellas 200 -belgians 200 -10in 200 -ut 200 -assemblyman 200 -inward 200 -sardines 200 -waikiki 200 -purdue 200 -obstructive 200 -tillman 200 -ills 200 -fuji 200 -lobsters 200 -amorous 200 -anglo-saxon 200 -niko 200 -karrubi 200 -psychotherapist 200 -loosened 200 -periphery 199 -realtor 199 -pro-moscow 199 -dnainfo 199 -gusty 199 -albatross 199 -soderling 199 -willetts 199 -pampering 199 -moonlight 199 -lourdes 199 -zynga 199 -moser 199 -unopened 199 -donates 199 -unspoken 199 -polluting 199 -exoskeleton 199 -inlet 199 -spores 199 -pitchers 199 -methanol 199 -raton 199 -whittingdale 199 -55th 199 -repellent 199 -43rd 199 -allahu 199 -stegen 199 -jemma 199 -arredondo 199 -longest-running 199 -inge 199 -twente 199 -courteous 199 -keita 199 -absorbs 199 -open-source 199 -plunges 199 -budgetary 199 -breast-feeding 199 -banked 199 -lyft 199 -azalea 199 -unearth 199 -nik 199 -tahiti 199 -distort 199 -enormity 199 -eng 199 -osasuna 199 -forlan 199 -then-boyfriend 199 -640 199 -18th-century 199 -collage 199 -loos 199 -orkney 199 -neknominate 199 -mid-afternoon 199 -mind-blowing 199 -syndicates 199 -graders 199 -10-15 199 -allman 199 -kirkpatrick 199 -cartons 199 -caerphilly 199 -sept 199 -bering 199 -voyages 199 -favorably 199 -elmore 199 -shaffer 199 -tofu 199 -aorta 198 -spades 198 -cardiomyopathy 198 -lp 198 -anaphylactic 198 -carols 198 -a.j. 198 -encroaching 198 -20mph 198 -paradigm 198 -corals 198 -mammograms 198 -pall 198 -slumping 198 -lafforgue 198 -splashes 198 -disheartening 198 -meager 198 -widnes 198 -dougall 198 -virgins 198 -soups 198 -shacknai 198 -sweetness 198 -meagher 198 -lulzsec 198 -affirm 198 -flavia 198 -ll 198 -fervor 198 -chernoff 198 -benito 198 -purports 198 -315 198 -skaters 198 -breastfed 198 -anchorman 198 -f/a 198 -gouffran 198 -grosjean 198 -matuidi 198 -industrialist 198 -blueberry 198 -fryer 198 -huber 198 -celery 198 -fallopian 198 -quintero 198 -ashman 198 -butland 198 -linn 198 -anguished 198 -steers 198 -lastly 198 -chime 198 -antidepressant 198 -nagasaki 198 -spartan 198 -solent 198 -creighton 198 -orgies 198 -1889 198 -1881 198 -7.0 198 -garda 198 -bret 198 -selfridge 198 -motorised 198 -baden 198 -ashlee 198 -rafting 198 -adherence 198 -utilizing 198 -frustrate 198 -intermittently 198 -spawn 198 -glazed 198 -vitality 198 -left-foot 198 -byproduct 198 -stepanek 198 -bagel 198 -shand 198 -legoland 198 -dookhan 198 -for-profit 198 -tutorial 198 -mcdonagh 198 -bypassed 198 -enthralled 197 -faithfully 197 -a3 197 -willard 197 -r-arizona 197 -chromosomes 197 -notw 197 -proms 197 -flexing 197 -attest 197 -corroborate 197 -zucker 197 -cyberattack 197 -e.on 197 -shayk 197 -illustrating 197 -11:00 197 -18s 197 -pushy 197 -r-texas 197 -mid-april 197 -party-goers 197 -leesa 197 -galileo 197 -dappy 197 -eurosceptics 197 -quintana 197 -sadr 197 -forfeited 197 -pound-for-pound 197 -sleepwalking 197 -lowestoft 197 -goop 197 -norquist 197 -4,300 197 -hunch 197 -slime 197 -naps 197 -seddon 197 -brancheau 197 -heighten 197 -civility 197 -lilian 197 -josep 197 -chiriches 197 -sidwell 197 -jcb 197 -wiggle 197 -stances 197 -blower 197 -skilful 197 -formality 197 -bartley 197 -cortez 197 -stockpiled 197 -hulme 197 -ebook 197 -roeder 197 -cano 197 -selves 197 -gullit 197 -must-see 197 -gaffney 197 -strays 197 -unquestionably 197 -complimented 197 -mugging 197 -peake 197 -sandi 197 -trimmings 197 -rekindle 197 -pollack 197 -blakely 197 -wcvb 196 -musicals 196 -tiniest 196 -ration 196 -marmaris 196 -camry 196 -maliki 196 -vimeo 196 -disrespected 196 -mishandling 196 -boaters 196 -augsburg 196 -cheadle 196 -fax 196 -fofana 196 -oriented 196 -nz 196 -obstructed 196 -retweet 196 -276 196 -conning 196 -high-school 196 -maisie 196 -diaoyu 196 -mam 196 -favelas 196 -falco 196 -macgregor 196 -prowl 196 -seven-bedroom 196 -exaggerate 196 -recreates 196 -splendour 196 -palazzo 196 -investigatory 196 -calabasas 196 -dislikes 196 -biotechnology 196 -300m 196 -wrenn 196 -pelicans 196 -b&q 196 -questionnaires 196 -lashings 196 -discern 196 -sniffed 196 -idiotic 196 -puffing 196 -lovato 196 -71st 196 -toning 196 -gabriela 196 -bha 196 -hostels 196 -sporadically 196 -16:00 196 -six-yard 196 -newsagents 196 -certify 196 -memorably 196 -olazabal 196 -honing 196 -eh 196 -overblown 196 -crunchy 196 -stipulates 196 -32m 196 -eltham 196 -rhea 196 -530 196 -al.com 196 -hadron 196 -qing 196 -inpatient 196 -liaisons 196 -url 196 -raving 196 -2015-16 196 -nahla 196 -out-of-state 196 -mephedrone 196 -knee-length 196 -spiritually 196 -horatio 196 -misbehaving 196 -escalade 195 -petting 195 -nerdy 195 -ranted 195 -jacksons 195 -repository 195 -walkways 195 -kesha 195 -blizzards 195 -motif 195 -75m 195 -fra 195 -associating 195 -pesos 195 -15-month 195 -suzy 195 -allergens 195 -seven-hour 195 -brice 195 -toaster 195 -superstitious 195 -181 195 -serpentine 195 -susana 195 -pietz 195 -belton 195 -sloop 195 -propensity 195 -stop-and-frisk 195 -kudos 195 -bombarding 195 -loughton 195 -clockwork 195 -gosselin 195 -stand-alone 195 -prophecy 195 -weakens 195 -hafez 195 -harris-moore 195 -15.5 195 -calcutta 195 -stoker 195 -ginsberg 195 -minders 195 -2lbs 195 -helipad 195 -south-western 195 -holster 195 -ymca 195 -■ 195 -redirected 195 -humankind 195 -paloma 195 -tree-lined 195 -misgivings 195 -migrationwatch 195 -salia 195 -sisi 195 -heeded 195 -perfumes 195 -grids 195 -tappin 195 -diaby 195 -gratifying 195 -bigoted 195 -disembarked 195 -awfully 195 -deterring 195 -deeney 195 -fullback 195 -consternation 195 -despised 195 -infatuated 195 -voiceover 195 -jamming 195 -wilhelm 195 -roundtable 195 -garter 195 -pro-choice 195 -manitoba 195 -enquired 195 -feinberg 195 -imminently 195 -aristocracy 195 -rubs 195 -dropbox 195 -proclaim 195 -haha 195 -femme 195 -eyre 195 -boaden 195 -roped 195 -cotter 195 -wobble 194 -corinne 194 -otherworldly 194 -peeking 194 -roscoe 194 -anti-drug 194 -fei 194 -acrylic 194 -mcdougall 194 -galley 194 -assorted 194 -pane 194 -nantes 194 -spherical 194 -myung-bak 194 -empires 194 -flt 194 -slow-motion 194 -aversion 194 -sassy 194 -hipsters 194 -yolk 194 -tycoons 194 -unconscionable 194 -brokerage 194 -eyeshadow 194 -rossiter 194 -patted 194 -norse 194 -ode 194 -thermomix 194 -12.4 194 -livers 194 -front-page 194 -tailor-made 194 -anz 194 -pay-per-view 194 -50-over 194 -seatbelts 194 -well-meaning 194 -cavalli 194 -chesapeake 194 -reverses 194 -unkempt 194 -etonian 194 -biogenesis 194 -kluivert 194 -fantastically 194 -narrower 194 -cay 194 -encephalitis 194 -kindest 194 -mockingbird 194 -pesky 194 -win-win 194 -modular 194 -camerons 194 -novartis 194 -convulsions 194 -hada 194 -timepiece 194 -joyner 194 -lesser-known 194 -messia 194 -lovejoy 194 -aubameyang 194 -pyle 194 -lob 194 -detonating 194 -droid 194 -sceptics 194 -wright-phillips 194 -deep-seated 194 -londonderry 194 -zest 194 -boogie 194 -polkinghorne 194 -angeles-based 194 -proficient 194 -shayanna 194 -high-capacity 194 -battalions 194 -canvassing 194 -condoned 193 -tenuous 193 -bushfire 193 -caked 193 -landis 193 -mekong 193 -woodley 193 -cockerill 193 -2007-08 193 -postmen 193 -videoed 193 -veal 193 -symbolically 193 -enhancements 193 -elegantly 193 -fabricating 193 -rodrigues 193 -ol 193 -cuthbert 193 -peered 193 -ralf 193 -lansing 193 -cebu 193 -bonaparte 193 -2.20 193 -nesbitt 193 -accommodated 193 -bartomeu 193 -belvedere 193 -movember 193 -resupply 193 -attachments 193 -senatorial 193 -two-month-old 193 -dyslexia 193 -marksmen 193 -zionist 193 -capitan 193 -pained 193 -nostrils 193 -all-around 193 -fazed 193 -chalked 193 -hate-filled 193 -vaart 193 -handcrafted 193 -a350 193 -florist 193 -cheick 193 -repulsive 193 -11.6 193 -carats 193 -exerted 193 -ashoka 193 -givens 193 -12:10 193 -high-class 193 -truckers 193 -plug-in 193 -porte 193 -injures 193 -vivien 193 -deathly 193 -tupelo 193 -laments 193 -minefield 193 -cortisol 193 -superhuman 193 -gut-wrenching 193 -censure 193 -vmas 193 -infamy 193 -forerunner 193 -letterbox 193 -dont 193 -cameroonian 193 -nicks 193 -harmonious 193 -hemorrhagic 193 -tt 193 -ringleaders 193 -leasing 193 -blacklisted 193 -brownie 193 -attendances 193 -macshane 193 -leases 193 -12:45 193 -deakin 193 -helmer 192 -mussels 192 -jama 192 -peep 192 -fraternal 192 -masturbating 192 -dribbling 192 -whack 192 -coursework 192 -necrotizing 192 -laurean 192 -republican-led 192 -postponing 192 -232 192 -to-do 192 -odom 192 -10lb 192 -17-year-olds 192 -mackerel 192 -tussauds 192 -kloss 192 -indulgent 192 -leopold 192 -plagiarism 192 -cheikhou 192 -anti-isis 192 -sphinx 192 -660 192 -humphrys 192 -landau 192 -dolby 192 -round-the-world 192 -rafinha 192 -16ft 192 -slaps 192 -oni 192 -aretha 192 -polaroid 192 -dependable 192 -regimental 192 -common-sense 192 -horace 192 -sinus 192 -goldfinger 192 -proverbial 192 -babylon 192 -dermatology 192 -flopped 192 -relativity 192 -all-black 192 -dormitories 192 -disheveled 192 -colette 192 -tussles 192 -unifying 192 -schoolboys 192 -mccallum 192 -swimsuits 192 -clogging 192 -elicit 192 -zooming 192 -non-essential 192 -whiskers 192 -impossibly 192 -pawson 192 -209 192 -nusa 192 -broadening 192 -daschle 192 -commune 192 -gothenburg 192 -cognac 192 -invasions 192 -flagstaff 192 -aluko 192 -bulgari 192 -hardwick 192 -haney 192 -600million 192 -gait 192 -unguarded 192 -dede 192 -upham 192 -geddes 192 -lasagne 192 -burned-out 192 -janelle 192 -roadster 192 -crawls 192 -propping 192 -synagogues 192 -subcontinent 192 -genie 192 -bagley 192 -german-born 192 -corrupted 192 -lorena 192 -unpredictability 192 -charted 192 -tiled 192 -compatibility 192 -quigg 191 -pows 191 -lomax 191 -angkor 191 -three-course 191 -pg 191 -40-minute 191 -deane 191 -schrenker 191 -56th 191 -testers 191 -furloughs 191 -emblematic 191 -farthing 191 -sherborne 191 -haifa 191 -holyfield 191 -rufus 191 -one-to-one 191 -clamour 191 -medically-induced 191 -tractors 191 -emt 191 -blob 191 -mouthpiece 191 -speck 191 -4.99 191 -cunneen 191 -marooned 191 -electrified 191 -auroras 191 -goth 191 -brethren 191 -3.15 191 -howie 191 -hadrian 191 -handpicked 191 -begg 191 -reflux 191 -10cm 191 -mari 191 -bush-era 191 -benatia 191 -ticketed 191 -inebriated 191 -creasy 191 -vial 191 -radioactivity 191 -vinnie 191 -gassed 191 -bertha 191 -poplar 191 -hishammuddin 191 -kong-based 191 -wingsuit 191 -genomes 191 -marek 191 -gottlieb 191 -zoning 191 -smacking 191 -record-keeping 191 -unclassified 191 -dab 191 -chamonix 191 -servicing 191 -retiree 191 -trans-atlantic 191 -crafty 191 -multiculturalism 191 -rhinoceros 191 -yadav 191 -abhisit 191 -proprietary 191 -unturned 191 -trillions 191 -elongated 191 -transnational 191 -luscious 191 -match-winning 191 -trailblazer 191 -cbo 191 -witt 190 -scoops 190 -wry 190 -streatham 190 -delinquency 190 -hemorrhage 190 -hernando 190 -pro-gun 190 -isps 190 -metcalfe 190 -jibes 190 -vegetarians 190 -wisniewski 190 -feeble 190 -vaults 190 -'50s 190 -j.d. 190 -anorexic 190 -253 190 -playtime 190 -keighley 190 -184 190 -deep-fried 190 -epiphany 190 -stunted 190 -pdsa 190 -maj 190 -wheldon 190 -mcevoy 190 -best-loved 190 -anti-defamation 190 -farnham 190 -coogan 190 -kuo 190 -taekwondo 190 -fibers 190 -720 190 -walthamstow 190 -trimingham 190 -eloquent 190 -amendola 190 -bookstores 190 -reconnected 190 -2gb 190 -22.5 190 -ma'am 190 -11.1 190 -a330 190 -kiwis 190 -flippers 190 -conspirators 190 -snarling 190 -misogynistic 190 -99.9 190 -authorise 190 -renewables 190 -17-month-old 190 -sponges 190 -masking 190 -supervisory 190 -morph 190 -scaremongering 190 -garratt 190 -proficiency 190 -osprey 190 -1885 190 -pigmentation 190 -ababa 190 -houseboat 190 -20billion 190 -jarring 190 -plumped 190 -omen 190 -knits 190 -194 190 -cowardice 190 -chords 190 -immunization 190 -o'rourke 190 -henman 190 -intensifies 190 -zambrano-montes 190 -ammonium 190 -overarching 190 -henrique 190 -kittel 190 -kristian 190 -anthems 190 -sadio 190 -combatant 190 -patek 190 -cruickshank 190 -cosmonaut 190 -asean 190 -piste 190 -bbc3 190 -four-storey 190 -originates 190 -terrorism-related 189 -self-serving 189 -eastward 189 -suis 189 -caterpillars 189 -posse 189 -cutie 189 -680 189 -11,500 189 -fordham 189 -fantastical 189 -comprehensively 189 -chirac 189 -tapas 189 -pendulum 189 -merck 189 -vallverdu 189 -000 189 -hannon 189 -bma 189 -filippo 189 -psychotherapy 189 -cocky 189 -jefferies 189 -embed 189 -athleticism 189 -komen 189 -shreds 189 -al-sisi 189 -tyrannosaurus 189 -14.99 189 -helms 189 -cronin 189 -xmas 189 -shan 189 -stagnation 189 -2,900 189 -gyan 189 -marketplaces 189 -orca 189 -killeen 189 -polygamous 189 -deduction 189 -transmits 189 -thump 189 -cronies 189 -vaulted 189 -stony 189 -split-second 189 -dunlop 189 -bandwidth 189 -lags 189 -stratosphere 189 -transcend 189 -reinvigorate 189 -wass 189 -entail 189 -internships 189 -lonnie 189 -profitability 189 -unfriendly 189 -scully 189 -lingers 189 -twitching 189 -rosales 189 -fawlty 189 -hasbro 189 -khomeini 189 -florian 189 -17million 189 -vesta 189 -al-megrahi 189 -lubbock 189 -hants 189 -barbeque 189 -abu-jamal 189 -darted 189 -approvals 189 -slug 189 -air-conditioned 189 -tenor 189 -bst 189 -qin 189 -egged 189 -avastin 189 -relished 189 -footwork 189 -tyra 188 -co-hosted 188 -kerosene 188 -evils 188 -five-point 188 -leavenworth 188 -jeers 188 -fadel 188 -cooley 188 -discharging 188 -holbrook 188 -secession 188 -ceramics 188 -bandmate 188 -deans 188 -hammami 188 -crested 188 -kinshasa 188 -grate 188 -mozilla 188 -cortege 188 -cronulla 188 -ng 188 -contrived 188 -well-connected 188 -pais 188 -adamantly 188 -shilton 188 -aria 188 -1/4 188 -batten 188 -rotor 188 -10:00 188 -cut-out 188 -kinney 188 -foursome 188 -pre-sentence 188 -fragility 188 -viewership 188 -sexualised 188 -euphoric 188 -philbin 188 -anadolu 188 -stagecoach 188 -bleeds 188 -kitsch 188 -reassess 188 -terrorizing 188 -gk 188 -dodi 188 -headsets 188 -sidner 188 -218 188 -adhering 188 -harnessing 188 -thee 188 -stiller 188 -giuliano 188 -amicably 188 -set-pieces 188 -admonished 188 -mouthful 188 -mctague 188 -collated 188 -faroe 188 -bridging 188 -victimised 188 -deeming 188 -heinze 188 -wildcat 188 -rancadore 188 -affections 188 -n.c. 188 -mena 187 -well-paid 187 -adrien 187 -receded 187 -zintan 187 -pat-down 187 -vinas 187 -monologue 187 -hearted 187 -contreras 187 -evin 187 -9news 187 -grealish 187 -miserables 187 -undoing 187 -winch 187 -tenancy 187 -rolfe 187 -cliche 187 -loathe 187 -jargon 187 -proportional 187 -inclement 187 -gehrig 187 -birdied 187 -cuadrilla 187 -intercepts 187 -monaghan 187 -blushes 187 -screenshots 187 -growths 187 -maliciously 187 -peels 187 -barked 187 -poly 187 -oneself 187 -mons 187 -observance 187 -biram 187 -catamaran 187 -68th 187 -hari 187 -diddy 187 -agitation 187 -ged 187 -brannan 187 -languished 187 -madly 187 -gurkha 187 -immortalized 187 -motorized 187 -gaia 187 -sipped 187 -out-of-work 187 -warcraft 187 -footbridge 187 -uncapped 187 -festival-goers 187 -hymn 187 -woodall 187 -revere 187 -realms 187 -fortnum 187 -menezes 187 -clipper 187 -tomboy 187 -slovak 187 -half-staff 187 -sutil 187 -climatic 187 -hibernation 187 -cavernous 187 -fizzled 187 -gsk 187 -ceri 187 -ec 187 -hulkenberg 187 -al-britani 187 -marauding 187 -cancer-free 187 -lytham 187 -leonid 187 -terminology 187 -guadalajara 187 -graded 187 -makarova 187 -cramp 187 -latakia 187 -cortese 187 -masts 187 -instyle.com 187 -shourd 187 -modifying 187 -verbier 186 -thanet 186 -smothering 186 -methodical 186 -defenseless 186 -participates 186 -teed 186 -newsstands 186 -ulcer 186 -scallops 186 -mites 186 -treading 186 -flaps 186 -mid-morning 186 -apnea 186 -256 186 -recited 186 -obedience 186 -hewlett 186 -baz 186 -hemmings 186 -ss14 186 -waitresses 186 -abstinence 186 -thinnest 186 -kody 186 -warplane 186 -four-man 186 -shacks 186 -11:59 186 -pro-gadhafi 186 -hushed 186 -nutter 186 -accords 186 -ballack 186 -redhead 186 -grazia 186 -tutors 186 -snorkelling 186 -fairchild 186 -coffees 186 -streaking 186 -rashad 186 -rearing 186 -cpac 186 -ascending 186 -echelons 186 -huffman 186 -stilts 186 -cohesive 186 -antibacterial 186 -ferrie 186 -acquainted 186 -lifetimes 186 -donohue 186 -hardening 186 -marston 186 -superbug 186 -chuffed 186 -darden 186 -highgrove 186 -hendon 186 -figurine 186 -rowett 186 -burwell 186 -ak-47s 186 -moray 186 -gerwen 186 -webpage 186 -lachlan 186 -repaying 186 -salvo 186 -floundering 186 -lopsided 186 -accelerometer 186 -340,000 186 -organizes 186 -vokes 186 -headbutting 186 -quinton 186 -aspired 186 -huh 186 -self-determination 186 -under-18s 186 -make-a-wish 186 -rehomed 186 -marlow 186 -armenians 186 -lapel 186 -zahau 186 -crowding 186 -gresham 186 -spengler 186 -150m 185 -alston 185 -belated 185 -impeach 185 -modernize 185 -procure 185 -lupo 185 -lipsy 185 -bypassing 185 -vulcan 185 -interspersed 185 -fairways 185 -sedwick 185 -quadrupled 185 -work-life 185 -round-trip 185 -ludlow 185 -enid 185 -undamaged 185 -postseason 185 -nervousness 185 -roh 185 -cyclones 185 -47th 185 -trekked 185 -med 185 -first-leg 185 -jerez 185 -tamiflu 185 -rahim 185 -sediments 185 -zeal 185 -ambien 185 -photons 185 -superbowl 185 -lv 185 -consecutively 185 -bleakley 185 -suspecting 185 -grammer 185 -crème 185 -commendation 185 -work-related 185 -lawrenson 185 -shephard 185 -preaches 185 -gatorade 185 -bute 185 -harp 185 -8.45 185 -dita 185 -fhm 185 -nikon 185 -filmmaking 185 -pre-orders 185 -discus 185 -sumner 185 -atwood 185 -landlocked 185 -spray-painted 185 -babes 185 -mixer 185 -artifact 185 -denzel 185 -10.7 185 -pre-christmas 185 -gregor 185 -allsopp 185 -59th 185 -26million 185 -mollier 185 -grasses 185 -geographically 185 -petkovic 185 -15-year-olds 185 -friedel 185 -11:10 185 -11:17 185 -ferrero 185 -reiterating 185 -arum 185 -grouped 185 -coverings 185 -murrieta 185 -arched 185 -inking 185 -middletown 185 -hangzhou 185 -heseltine 185 -discerning 185 -piercings 185 -rickety 185 -leprosy 185 -campground 185 -neutron 185 -baftas 185 -clarifying 185 -typo 185 -elizabethan 185 -roll-out 185 -eras 185 -javad 185 -suspense 184 -steed 184 -protons 184 -grower 184 -locusts 184 -incheon 184 -gulfstream 184 -tri-series 184 -att 184 -infractions 184 -slurring 184 -constand 184 -griggs 184 -colgan 184 -doolittle 184 -ideologically 184 -prudential 184 -grunge 184 -non-white 184 -match-winner 184 -11:25 184 -geragos 184 -face-off 184 -63,000 184 -social-media 184 -fattal 184 -winkleman 184 -11:08 184 -bradlee 184 -wiesenthal 184 -banbury 184 -luo 184 -utilise 184 -straight-sets 184 -gannon 184 -father-of-five 184 -scooby 184 -shuffled 184 -insolvency 184 -k9 184 -scoffed 184 -capoue 184 -leandro 184 -film-maker 184 -smears 184 -parisien 184 -tourniquet 184 -feted 184 -re-arrested 184 -gentz 184 -over-50s 184 -woodcock 184 -importation 184 -high-pitched 184 -cartridge 184 -fastened 184 -sap 184 -henchmen 184 -cucumbers 184 -dystrophy 184 -churned 184 -212 184 -meribel 184 -blackadder 184 -il-sung 184 -94-year-old 184 -unraveling 184 -pelt 184 -newly-promoted 184 -cesarean 184 -tulle 184 -dangled 184 -divulged 184 -headscarves 184 -midair 184 -lia 184 -cyber-bullying 184 -acrobatics 184 -dispersants 184 -randi 184 -astana 184 -wetter 184 -trinkets 184 -snowflakes 184 -hud 184 -compositions 184 -ain 184 -victors 184 -yawning 184 -translucent 184 -rouble 184 -img 184 -caste 184 -drafts 184 -umm 184 -reebok 184 -slotting 184 -spinoff 184 -crowd-funding 184 -schlupp 184 -redfearn 183 -argentinians 183 -hyenas 183 -rotary 183 -innovator 183 -newsagent 183 -arseniy 183 -mystic 183 -scripture 183 -ticker 183 -nonsensical 183 -willett 183 -plant-based 183 -wilton 183 -protestants 183 -implicit 183 -mil 183 -harpo 183 -counterinsurgency 183 -stambouli 183 -deference 183 -murat 183 -lockyer 183 -minshull 183 -17st 183 -knee-jerk 183 -shih 183 -co-anchor 183 -mangan 183 -kean 183 -baroque 183 -nikolay 183 -anime 183 -bullion 183 -juncture 183 -childline 183 -near-earth 183 -accession 183 -susanne 183 -reels 183 -co-ordinate 183 -pimping 183 -9in 183 -blemishes 183 -wellcome 183 -antiretroviral 183 -wfp 183 -inkling 183 -sodastream 183 -squabbling 183 -attributable 183 -pvc 183 -comres 183 -hickory 183 -wojcicki 183 -florissant 183 -equalising 183 -chastain 183 -1886 183 -subordinates 183 -fiore 183 -rear-ended 183 -tobin 183 -ivor 183 -197 183 -curnow 183 -interruptions 183 -turley 183 -kpmg 183 -mindfulness 183 -morten 183 -viens 183 -biofuel 183 -deft 183 -gilad 183 -loosening 183 -foodies 183 -armageddon 183 -hotshot 183 -mcginn 183 -exclaims 183 -flurries 183 -sh 183 -nippon 183 -rudder 183 -spiky 183 -fosters 183 -nope 183 -toothless 183 -27.5 183 -castration 183 -activating 182 -commandant 182 -boosters 182 -over-65s 182 -temperley 182 -empress 182 -enforcers 182 -corwin 182 -walrus 182 -cruden 182 -mourdock 182 -pedestal 182 -7.99 182 -atv 182 -unconnected 182 -kimball 182 -apnoea 182 -left-handed 182 -dazzle 182 -gestation 182 -cronkite 182 -subsidiaries 182 -incinerated 182 -o'keeffe 182 -5lbs 182 -marquess 182 -willem-alexander 182 -keel 182 -12:07 182 -4,700 182 -atta 182 -juggernaut 182 -textured 182 -lanzarote 182 -handiwork 182 -harpercollins 182 -impeccably 182 -rebranded 182 -inhospitable 182 -witch-hunt 182 -well-to-do 182 -perrin 182 -frown 182 -reclaiming 182 -m'bala 182 -obstetricians 182 -lament 182 -cropper 182 -10.2 182 -compensatory 182 -bjp 182 -predictive 182 -heynckes 182 -bridcutt 182 -skimmed 182 -2023 182 -coercive 182 -bikie 182 -garde 182 -dilute 182 -solves 182 -self-contained 182 -perforated 182 -pooled 182 -lilac 182 -photogenic 182 -huntingdon 182 -martorano 182 -rename 182 -netizens 182 -darcey 182 -soames 182 -envisage 182 -hamburgers 182 -north-western 182 -mina 182 -kilt 182 -peer-to-peer 182 -hearsay 182 -rotates 182 -cath 182 -bismarck 181 -aristotle 181 -capacities 181 -showman 181 -revolutionize 181 -enchanting 181 -samira 181 -substitutions 181 -muntari 181 -numerical 181 -pretends 181 -overtures 181 -26-year 181 -intersections 181 -raine 181 -fished 181 -clover 181 -solidly 181 -trough 181 -suspends 181 -murkowski 181 -7.50 181 -waiving 181 -dispenser 181 -anti-piracy 181 -congrats 181 -spooner 181 -escapees 181 -ratcheted 181 -megaphone 181 -waverley 181 -11:40 181 -inquire 181 -cibulkova 181 -binds 181 -macaque 181 -legalisation 181 -carte 181 -revisions 181 -invoice 181 -begich 181 -uzi 181 -corker 181 -vahey 181 -six-foot 181 -bleus 181 -apex 181 -riccardo 181 -firmer 181 -proactively 181 -schenecker 181 -togetherness 181 -1.20 181 -grouse 181 -leeway 181 -synchronized 181 -hem 181 -saban 181 -cos 181 -roosters 181 -krystle 181 -richly 181 -braille 181 -wronged 181 -v&a 181 -progressives 181 -conjured 181 -great-grandson 181 -kubrick 181 -abolishing 181 -blenheim 181 -ginny 181 -neurosurgery 181 -long-sleeved 181 -ghoulish 181 -savea 181 -stardust 181 -85th 181 -serengeti 181 -spacesuit 181 -excalibur 181 -grasped 181 -dottie 181 -dia 181 -intrinsic 181 -demotion 181 -ake 181 -afro 181 -whitening 181 -wat 181 -piglets 181 -substantiate 181 -thoroughfare 181 -buccaneers 180 -disinfectant 180 -11:21 180 -granddad 180 -dallas-fort 180 -meagre 180 -wriggle 180 -3,100 180 -enlightenment 180 -bleaching 180 -robach 180 -clans 180 -doritos 180 -67p 180 -gattuso 180 -fondled 180 -armand 180 -elaborated 180 -ox 180 -crooner 180 -pro-western 180 -overstepped 180 -firecrackers 180 -morcombe 180 -manatees 180 -infiniti 180 -zsa 180 -postcodes 180 -bois 180 -boundless 180 -mot 180 -repossessed 180 -arcadia 180 -desecration 180 -5.45 180 -liberalism 180 -10:58 180 -stahl 180 -accra 180 -enzo 180 -secondhand 180 -nobu 180 -pathological 180 -authenticated 180 -salted 180 -specialties 180 -dhl 180 -cleft 180 -astrazeneca 180 -unto 180 -naeem 180 -al-abadi 180 -ie 180 -icardi 180 -pepsico 180 -chowdhury 180 -redcar 180 -survation 180 -zain 180 -leyva 180 -durand 180 -jian 180 -interferes 180 -al-kasasbeh 180 -muppet 180 -ions 180 -quarrel 180 -bolognese 180 -heichel 180 -andrej 180 -peacetime 180 -pry 180 -decima 180 -fascists 180 -fielder 180 -question-and-answer 180 -drawn-out 180 -thornberry 180 -armpit 180 -abc7 180 -specification 180 -symposium 180 -saver 180 -packard 180 -thresholds 180 -nimoy 180 -arfield 180 -multibillion-dollar 180 -removable 180 -stifled 180 -assombalonga 180 -aplomb 180 -edis 180 -aquatics 180 -cdr 180 -clwyd 180 -raps 180 -transporter 180 -yum 180 -mid-1970s 180 -rylan 180 -13,500 180 -puffs 179 -buk 179 -wean 179 -sapp 179 -implausible 179 -citi 179 -nelly 179 -miu 179 -armbands 179 -extremities 179 -stis 179 -lockout 179 -satnav 179 -mid-june 179 -kerala 179 -kath 179 -prevails 179 -shona 179 -redefined 179 -macksville 179 -alopecia 179 -dockery 179 -canvases 179 -quinoa 179 -infects 179 -saginaw 179 -patties 179 -recollections 179 -folsom 179 -raoul 179 -implicate 179 -maidenhead 179 -warts 179 -foreseen 179 -rockaway 179 -harrelson 179 -medel 179 -supermoon 179 -moaned 179 -atrophy 179 -435 179 -michu 179 -reddy 179 -stowed 179 -tempest 179 -abating 179 -bupa 179 -poppins 179 -lina 179 -billiards 179 -wiese 179 -tweeters 179 -pantheon 179 -cheekily 179 -imperfections 179 -unremarkable 179 -angelic 179 -conservator 179 -durango 179 -tinned 179 -harnessed 179 -brazier 179 -peabody 179 -coloring 179 -anglo 179 -lewis-mcchord 179 -complemented 179 -symbolizes 179 -re-evaluate 179 -dulwich 179 -undetectable 179 -gargantuan 179 -dishing 179 -2.15 179 -knitwear 179 -11:50 179 -preside 179 -misha 179 -milkshake 179 -2009/10 179 -lynsey 179 -reintegration 179 -tinsel 179 -perfectionist 179 -acpo 179 -deseret 179 -troublemakers 179 -compost 179 -anniversaries 179 -convincingly 179 -contra 179 -snickers 179 -nasrallah 179 -concentrates 179 -appointee 179 -reparations 179 -goldstone 179 -bikini-clad 179 -velez-mitchell 179 -banded 179 -padstow 179 -almagro 179 -adaptable 178 -playlists 178 -revolve 178 -rayner 178 -milliseconds 178 -15st 178 -gusto 178 -gestured 178 -quad-core 178 -hosepipe 178 -12:20 178 -curing 178 -marketers 178 -guildhall 178 -tickled 178 -suffrage 178 -birkenhead 178 -mcguigan 178 -concealment 178 -compressions 178 -konrad 178 -translations 178 -anmer 178 -scantily-clad 178 -grizzlies 178 -devoting 178 -quips 178 -donatella 178 -toads 178 -trawl 178 -tots 178 -partisans 178 -electrifying 178 -picky 178 -origami 178 -dufner 178 -reaffirm 178 -sickle 178 -tortilla 178 -sympathize 178 -concealer 178 -rainbows 178 -domains 178 -delirious 178 -blogged 178 -backline 178 -haddin 178 -ringed 178 -taya 178 -hayek 178 -blanchard 178 -verifying 178 -grindr 178 -11.8 178 -inconsistency 178 -wran 178 -workable 178 -whitwell 178 -conduit 178 -silo 178 -nitrous 178 -fanbase 178 -757 178 -nasheed 178 -brokaw 178 -shuttleworth 178 -subsidise 178 -mountaineer 178 -krentcil 178 -zavala 178 -reoffending 178 -depots 178 -dewey 178 -maurer 178 -gladiators 178 -andersson 178 -fawn 178 -kearns 178 -biceps 178 -201 178 -sorties 178 -rudolf 178 -contactless 178 -anarchists 178 -piggin 178 -schwarz 178 -firewood 178 -wrangle 178 -awlaki 178 -mexican-american 178 -cheung 178 -dribbles 178 -lansbury 178 -crucifix 178 -lakshmi 178 -frayed 178 -priesthood 178 -plows 178 -buoy 178 -overdrive 178 -psychoactive 178 -all-party 178 -awol 178 -sevenoaks 178 -flute 178 -cassette 178 -nada 178 -chiara 178 -palpitations 178 -joggers 178 -chronological 178 -millerberg 178 -co-creator 178 -61st 178 -boycotts 178 -lal 178 -reddish 178 -prod 178 -uncharacteristically 177 -wooed 177 -midriff 177 -quail 177 -hulking 177 -sociologist 177 -inbound 177 -moulded 177 -rockstar 177 -coakley 177 -gilliam 177 -testimonial 177 -217 177 -hitmen 177 -first-year 177 -gadd 177 -dashcam 177 -rocketing 177 -flare-up 177 -six-point 177 -heil 177 -chiswick 177 -floodlights 177 -panesar 177 -resplendent 177 -decking 177 -mal 177 -48-hour 177 -kitterman 177 -teri 177 -matson 177 -tajikistan 177 -decreed 177 -campos 177 -sentamu 177 -al-asiri 177 -structurally 177 -eradicating 177 -anatomical 177 -vicarage 177 -cohn 177 -grandiose 177 -socialize 177 -overpowering 177 -fermented 177 -inexperience 177 -farzana 177 -wilmslow 177 -blanks 177 -counter-terror 177 -gizmodo 177 -nero 177 -insides 177 -asghar 177 -realty 177 -matisse 177 -51st 177 -darnell 177 -faulted 177 -individuality 177 -scurrying 177 -masonry 177 -chastised 177 -françois 177 -auditor 177 -receptor 177 -teapot 177 -purification 177 -shawl 177 -terracotta 177 -endures 177 -jozy 177 -kawasaki 177 -carjacked 177 -gels 177 -tully 177 -cazeneuve 177 -prue 177 -1.99 177 -becca 177 -ak47 177 -greenaway 177 -troyer 177 -mayhew 177 -swarbrick 177 -grandparent 177 -poop 177 -barratt 177 -c4 177 -loaves 177 -ch 177 -lascelles 177 -deulofeu 177 -specialise 177 -pro-independence 177 -bartenders 177 -westmead 177 -objectionable 177 -stephanopoulos 177 -54th 177 -allie 176 -chrissie 176 -gipsy 176 -janssen 176 -camber 176 -fannie 176 -pasquale 176 -tailbacks 176 -trimester 176 -oesophagus 176 -hooking 176 -weirdest 176 -loudspeaker 176 -gennady 176 -zine 176 -flocks 176 -mcclean 176 -hedgehogs 176 -upping 176 -tardis 176 -nadezhda 176 -numeracy 176 -cavities 176 -238 176 -1300 176 -geffen 176 -fleur 176 -belmar 176 -11:26 176 -ps3 176 -pieters 176 -apatow 176 -buzzer 176 -scrubbing 176 -nail-biting 176 -in-person 176 -weather-related 176 -morell 176 -deformities 176 -pay-offs 176 -6lbs 176 -depravity 176 -paleo 176 -executor 176 -nutshell 176 -sedgwick 176 -informative 176 -sena 176 -orozco 176 -javed 176 -bello 176 -14-month-old 176 -sybrina 176 -venting 176 -barnum 176 -simulating 176 -quartz 176 -chikungunya 176 -navigated 176 -unfairness 176 -laude 176 -mieses 176 -oreo 176 -tramp 176 -shiraz 176 -5,800 176 -odemwingie 176 -11m 176 -mikey 176 -most-watched 176 -mulled 176 -20p 176 -207 176 -amie 176 -patagonia 176 -disguises 176 -durante 176 -wiretaps 176 -glenda 176 -suso 176 -liv 176 -denomination 176 -richman 176 -tattooing 176 -regev 176 -cy 176 -paxton 176 -loudspeakers 176 -carnarvon 176 -artefact 176 -requisite 176 -polycystic 176 -humanities 176 -dodds 176 -9.15 175 -self-immolation 175 -arrowhead 175 -veggies 175 -12:08 175 -caen 175 -paves 175 -abundantly 175 -cabbie 175 -mousse 175 -cisco 175 -thereof 175 -monika 175 -dearth 175 -cock 175 -ready-made 175 -bewildering 175 -counsellors 175 -nutritionists 175 -centre-right 175 -appraisal 175 -heirloom 175 -excavate 175 -7-4 175 -pyrenees 175 -bordered 175 -dhawan 175 -cottle 175 -arrington 175 -veritable 175 -275,000 175 -sulaiman 175 -munching 175 -nec 175 -lk 175 -panetti 175 -parliaments 175 -50billion 175 -maul 175 -magnifying 175 -fouling 175 -navi 175 -noda 175 -10:11 175 -sickly 175 -blindly 175 -militancy 175 -relaunched 175 -gaultier 175 -bambi 175 -othman 175 -aircrafts 175 -carrow 175 -cornick 175 -sweeteners 175 -olbermann 175 -infante 175 -re-examine 175 -moths 175 -motorhome 175 -nia 175 -stunner 175 -barzee 175 -jorgensen 175 -'til 175 -oxytocin 175 -coalitions 175 -provocateur 175 -socceroos 175 -porters 175 -1882 175 -msnbc.com 175 -sufi 175 -hunched 175 -drab 175 -parkour 175 -serviced 175 -warzone 175 -gifs 175 -millard 175 -wrest 175 -whoa 175 -racehorses 175 -almeida 175 -vivacious 175 -outen 175 -codeine 175 -immeasurable 175 -utopia 175 -hingis 175 -cellars 175 -burnt-out 175 -practises 175 -rtl 175 -crolla 175 -14st 175 -haji 174 -foreclosures 174 -saakashvili 174 -yoda 174 -snuggle 174 -haughton 174 -troublemaker 174 -12:25 174 -magnolia 174 -meulensteen 174 -pla 174 -beit 174 -vertebra 174 -gudjohnsen 174 -nunes 174 -self-interest 174 -indistinguishable 174 -zahir 174 -interacts 174 -bumbling 174 -saylor 174 -two-term 174 -cockerel 174 -quango 174 -ne 174 -11:09 174 -fortnightly 174 -worsens 174 -grading 174 -cosmonauts 174 -distinguishing 174 -wriggling 174 -16st 174 -clio 174 -roost 174 -washer 174 -geri 174 -11:35 174 -arellano 174 -bcs 174 -modernist 174 -idealistic 174 -magdalena 174 -transgressions 174 -salas 174 -52nd 174 -absurdity 174 -ponce 174 -ecologist 174 -vaseline 174 -purposeful 174 -arnaud 174 -reserved.this 174 -pores 174 -brotherly 174 -measurable 174 -well-received 174 -erecting 174 -well-loved 174 -scorpions 174 -ambrosio 174 -immerse 174 -jessop 174 -wagons 174 -cohorts 174 -12:35 174 -handshakes 174 -stereotyping 174 -oftentimes 174 -ww2 174 -hbos 174 -dissimilar 174 -dambusters 174 -five-set 174 -derives 174 -minuscule 174 -midi 174 -priti 174 -hatches 174 -16-month-old 174 -tans 174 -rabid 174 -feasts 174 -canaria 174 -wavelength 174 -underdeveloped 174 -fabricant 174 -renders 174 -sell-off 174 -spelt 174 -mcgowen 174 -mid-term 174 -godzilla 174 -10:25 174 -sera 174 -automation 174 -multilateral 174 -demos 174 -big-screen 174 -gun-toting 174 -judgements 174 -seleka 174 -madman 174 -rhymes 174 -silvestre 174 -mellow 174 -utilised 174 -dimon 174 -undoubted 174 -tendered 174 -replenish 174 -nerve-wracking 174 -miserably 174 -castor 174 -disseminated 174 -dens 174 -breadwinner 173 -squeaky 173 -anglian 173 -undervalued 173 -12:00 173 -sprinkling 173 -carling 173 -suffocate 173 -shami 173 -betts 173 -tbilisi 173 -cluttered 173 -keeler 173 -aubry 173 -fixation 173 -fundamentals 173 -naïve 173 -aeronautical 173 -husband-to-be 173 -sanctuaries 173 -slugger 173 -gezi 173 -low-budget 173 -drifter 173 -cockroach 173 -eisenberg 173 -homeopathy 173 -berserk 173 -noelle 173 -shaanxi 173 -housebound 173 -vorderman 173 -mimicked 173 -blot 173 -mutv 173 -itineraries 173 -lhc 173 -selectively 173 -gaughan 173 -ayre 173 -fret 173 -ibn 173 -aha 173 -macular 173 -stasi 173 -roadworks 173 -cochlear 173 -romped 173 -impediment 173 -kenji 173 -lbj 173 -incidences 173 -flees 173 -alters 173 -babar 173 -infringing 173 -mahogany 173 -u.s.-backed 173 -esposito 173 -tibia 173 -schaffner 173 -mid-august 173 -ever-changing 173 -fsu 173 -overstretched 173 -reaping 173 -metallica 173 -pats 173 -dietitian 173 -mccullum 173 -spar 173 -20-month-old 173 -reaped 173 -zebras 173 -seven-and-a-half 173 -ayrault 173 -couches 173 -uncomfortably 173 -movers 173 -barrassed 173 -crayfish 173 -torbay 173 -manifestation 173 -benefactor 173 -kirkham 173 -popularized 173 -mezvinsky 173 -horst 173 -pinehurst 173 -banjo 173 -freighter 173 -chucked 173 -marisa 173 -rinse 173 -melon 173 -mejia 173 -narrated 173 -chicago-based 173 -maroney 173 -10:08 173 -13ft 173 -showings 173 -collared 173 -tipsarevic 173 -complexes 173 -pvt. 173 -asap 173 -exes 173 -ominously 173 -paine 173 -crowther 173 -hardwood 172 -parsley 172 -nerds 172 -step-mother 172 -replayed 172 -evangelista 172 -3lb 172 -chameleon 172 -macintosh 172 -afoul 172 -anti-austerity 172 -byers 172 -mountaintop 172 -fentanyl 172 -mahrez 172 -clippings 172 -outbound 172 -kalou 172 -mixed-race 172 -shaquille 172 -unleaded 172 -13.8 172 -tabor 172 -gif 172 -rubik 172 -juliette 172 -isotopes 172 -globo 172 -overdraft 172 -coupon 172 -kusa 172 -diehard 172 -guardrail 172 -hoey 172 -drive-through 172 -viewpoints 172 -inheriting 172 -adulation 172 -al-hashimi 172 -mair 172 -russian-speaking 172 -yawn 172 -marmalade 172 -twigs 172 -moura 172 -state-controlled 172 -hpa 172 -calamitous 172 -4,400 172 -butlers 172 -410 172 -mercado 172 -mountbatten 172 -denning 172 -depay 172 -dissenting 172 -maim 172 -roadmap 172 -gestede 172 -dewolf 172 -entrant 172 -haq 172 -unworkable 172 -ergency 172 -stockwell 172 -pliers 172 -lido 172 -godwin 172 -benidorm 172 -collectibles 172 -sodas 172 -sessegnon 172 -transitioned 172 -daniella 172 -maligned 172 -slopestyle 172 -bodywork 172 -informally 172 -kaarma 172 -cali 172 -byzantine 172 -herders 172 -wonky 172 -cf 172 -rehearse 172 -prosecutorial 172 -rabbis 172 -obscenity 172 -vasectomy 172 -aqsa 172 -cretaceous 172 -kehm 172 -fuhrer 172 -allegiances 172 -life-sized 172 -gongaware 172 -gwinnett 172 -wayside 172 -spiced 172 -apron 171 -hoeness 171 -objectively 171 -nouri 171 -53rd 171 -viaduct 171 -qi 171 -onesies 171 -tunic 171 -moir 171 -lviv 171 -airfare 171 -ataturk 171 -galvanized 171 -yarnold 171 -vitro 171 -farron 171 -lillie 171 -organises 171 -tong 171 -arnautovic 171 -lauryn 171 -lunsford 171 -persevere 171 -provence 171 -luster 171 -push-ups 171 -cosmo 171 -novices 171 -expedite 171 -retractable 171 -wanton 171 -klaas-jan 171 -eliott 171 -artistry 171 -jonsson 171 -kenyon 171 -wickstead 171 -cruddas 171 -traditionalists 171 -walken 171 -georgios 171 -nicked 171 -ybarra 171 -headlining 171 -jaymi 171 -finnigan 171 -105,000 171 -azzam 171 -two-piece 171 -hasselbaink 171 -hurriedly 171 -cissy 171 -marriner 171 -ramzan 171 -downsizing 171 -rogerson 171 -mayfield 171 -ascension 171 -huckaby 171 -prieto 171 -livescience 171 -kabc 171 -chimneys 171 -mulgrew 171 -royston 171 -stockpiling 171 -provost 171 -deluded 171 -saberi 171 -childrens 171 -alcaraz 171 -aruban 171 -qingdao 171 -viktoria 171 -royle 171 -kass 171 -erred 171 -fatter 171 -thawing 171 -u. 171 -10:44 171 -octogenarian 171 -orchards 171 -10:27 171 -diabetics 171 -harrier 171 -summing 171 -12.7 171 -hum 171 -wilful 171 -haroon 171 -trepidation 171 -palacios 171 -prying 171 -84,000 171 -destroyers 171 -one-handed 171 -lichfield 171 -bridgen 171 -grapples 170 -irreverent 170 -camberwell 170 -schock 170 -freefall 170 -unmoved 170 -targett 170 -dermond 170 -abramson 170 -immigrated 170 -flat-screen 170 -flanks 170 -akram 170 -embroidery 170 -gully 170 -hoof 170 -patting 170 -finnegan 170 -audited 170 -marques 170 -six-inch 170 -reprise 170 -costumed 170 -evolves 170 -dawlish 170 -parisians 170 -contrition 170 -nicklinson 170 -tailed 170 -tsunamis 170 -metlife 170 -supremely 170 -heavily-armed 170 -tact 170 -12:06 170 -17-time 170 -vies 170 -mea 170 -chloride 170 -b12 170 -brasil 170 -anas 170 -rascal 170 -archived 170 -ndrangheta 170 -englert 170 -glows 170 -peasant 170 -eagle-eyed 170 -rifled 170 -mortensen 170 -deadspin 170 -allegheny 170 -linux 170 -gutsy 170 -strictest 170 -money-making 170 -kashmiri 170 -latimer 170 -biases 170 -gouged 170 -menstrual 170 -metzger 170 -localized 170 -flickering 170 -fincher 170 -6.50 170 -19.99 170 -overdosing 170 -ewen 170 -montano 170 -1-6 170 -top-of-the-range 170 -74,000 170 -4-4 170 -10:49 170 -camara 170 -hodson 170 -10:20 170 -3ds 170 -mid-flight 170 -day-long 170 -forgets 170 -hadiya 170 -tepid 170 -gorging 170 -leaner 170 -spina 170 -circumcised 170 -enrollees 170 -anti-anxiety 170 -romances 170 -hideouts 170 -annulled 169 -neale 169 -380,000 169 -1890s 169 -peston 169 -kunming 169 -boomer 169 -copd 169 -queenstown 169 -fantz 169 -rutledge 169 -saks 169 -messer 169 -assam 169 -ysl 169 -peasants 169 -fujita 169 -sensibility 169 -12:23 169 -intricately 169 -dabbled 169 -whisk 169 -tichelman 169 -gaines 169 -cherice 169 -farr 169 -bobbie 169 -geometry 169 -enlarge 169 -subordinate 169 -two-game 169 -6.45 169 -11:01 169 -aortic 169 -liberator 169 -alla 169 -low-paid 169 -seaton 169 -corrigan 169 -flybe 169 -mcwilliams 169 -1870 169 -refrained 169 -arbabsiar 169 -mobilization 169 -befriending 169 -matija 169 -11:48 169 -10:56 169 -extravagance 169 -corresponded 169 -upended 169 -uptown 169 -10:31 169 -altmann 169 -kovalev 169 -prefect 169 -eunice 169 -shura 169 -validate 169 -fide 169 -peerage 169 -placements 169 -llorente 169 -fumed 169 -refocus 169 -poring 169 -anti-establishment 169 -manga 169 -hubei 169 -reverence 169 -curbed 169 -hypnotherapy 169 -translators 169 -sindh 169 -lobo 169 -sharpest 169 -heartened 169 -loitering 169 -singaporean 169 -contrite 169 -elin 169 -shakil 169 -458 169 -meted 169 -karam 169 -yarde 169 -222 169 -expedia 169 -eye-opening 169 -modernity 169 -boynton 169 -flotation 169 -dannatt 169 -bookshop 169 -climes 169 -freckles 169 -linings 169 -windswept 169 -magically 169 -derriere 169 -headstones 169 -tugs 169 -stork 169 -uncharacteristic 169 -mitochondria 169 -rejuvenate 169 -unionists 169 -4.0 169 -grown-ups 169 -becks 169 -subconscious 169 -plinth 169 -genus 169 -sandwell 169 -hofstra 168 -200mph 168 -seam 168 -aeroplanes 168 -groaning 168 -neo 168 -tattered 168 -ghomeshi 168 -12:02 168 -metrolink 168 -steffen 168 -didcot 168 -pissed 168 -volt 168 -adorning 168 -dowry 168 -yuma 168 -southbank 168 -spongebob 168 -pentecostal 168 -darn 168 -underpass 168 -goings 168 -atlanta-based 168 -unraveled 168 -callie 168 -restorative 168 -ethnicities 168 -bobsled 168 -saggy 168 -cnnstudentnews.com 168 -cedars-sinai 168 -torah 168 -darlow 168 -bunches 168 -t-rex 168 -stumbles 168 -1893 168 -herded 168 -surveyors 168 -jaeger 168 -conlon 168 -mj 168 -cactus 168 -danbury 168 -marnie 168 -karren 168 -straubenzee 168 -hagen 168 -outfielder 168 -dewhurst 168 -hitherto 168 -shortening 168 -sunil 168 -drunks 168 -thank-you 168 -signatory 168 -gamal 168 -etsy 168 -yas 168 -meaty 168 -four-wheel 168 -gilroy 168 -moyer 168 -gwynedd 168 -northward 168 -amass 168 -12:11 168 -recurrent 168 -protege 168 -arquette 168 -rejoining 168 -greggs 168 -redbridge 168 -altice 168 -jerzy 168 -heals 168 -imploded 168 -fairies 168 -bleacher 168 -refreshments 168 -microwaves 168 -winslow 168 -unusable 168 -manus 168 -mohamad 168 -governor-general 168 -lorne 168 -carolinas 168 -guarin 168 -jamjoom 168 -carberry 168 -preet 168 -medunjanin 168 -state-sponsored 168 -sarai 168 -restrooms 168 -exhumation 168 -3d-printed 168 -occupiers 168 -kyodo 168 -melo 168 -canny 168 -171 168 -kickstart 168 -eject 168 -cognition 168 -alyokhina 168 -chatsworth 168 -heckling 168 -metrics 168 -alloa 168 -wi 168 -edano 168 -flushes 167 -cashpoint 167 -cooney 167 -mukherjee 167 -hand-made 167 -burnout 167 -shrugs 167 -ironed 167 -nuneaton 167 -quarterbacks 167 -golding 167 -whoops 167 -raju 167 -outlay 167 -3lbs 167 -24-year-olds 167 -centre-forward 167 -185,000 167 -simonsen 167 -super-sized 167 -curfews 167 -eels 167 -factbook 167 -smedley 167 -pullman 167 -1873 167 -vineberg 167 -terrorising 167 -lugansk 167 -weldon 167 -fertilised 167 -caddy 167 -terre 167 -laureates 167 -portrayals 167 -replete 167 -republics 167 -ringside 167 -dunkirk 167 -stiffer 167 -durkin 167 -a-lister 167 -gertrude 167 -newsom 167 -ambiguity 167 -leblanc 167 -duma 167 -waddell 167 -gridiron 167 -indi 167 -asphyxia 167 -soulmate 167 -changi 167 -top-notch 167 -life-like 167 -breweries 167 -prejudicial 167 -wollaston 167 -rejuvenation 167 -creaking 167 -batkid 167 -breton 167 -kowalski 167 -lorde 167 -blondes 167 -masoud 167 -alkhshali 167 -tamim 167 -sob 167 -500ft 167 -asamoah 167 -tempestuous 167 -breathes 167 -rhiannon 167 -france-presse 167 -reuse 167 -weier 167 -beltway 167 -scrapbook 167 -puffed 167 -piglet 167 -seine 167 -customize 167 -glut 167 -rut 167 -anglers 167 -alluding 167 -corgis 167 -affiliations 167 -musings 167 -hassoun 167 -camaro 167 -lackland 167 -teague 167 -plata 167 -18st 167 -marlin 167 -4000 167 -perino 167 -d-nevada 167 -yue 167 -javelin 167 -lokomotiv 167 -yudhoyono 167 -schiavo 167 -kilda 167 -blah 167 -drinkwater 167 -eurostat 167 -paroled 166 -curable 166 -reformers 166 -nightmarish 166 -bennet 166 -templeton 166 -aspinall 166 -infusions 166 -weinberg 166 -drop-off 166 -blushing 166 -infidels 166 -transponder 166 -harvests 166 -moulton 166 -micheal 166 -talkative 166 -haulage 166 -leanings 166 -meds 166 -237 166 -11:24 166 -truancy 166 -finite 166 -clinician 166 -2.45 166 -semi-autonomous 166 -diagonal 166 -11:06 166 -countrywide 166 -milosevic 166 -90mph 166 -aloof 166 -bared 166 -methodically 166 -nowsch 166 -quirk 166 -second-tier 166 -islamophobia 166 -hallucinogenic 166 -strolls 166 -370,000 166 -upson 166 -sti 166 -5.5-inch 166 -outdo 166 -ac/dc 166 -encompass 166 -dpp 166 -rambo 166 -67,000 166 -schiavone 166 -10c 166 -dingy 166 -angler 166 -hairdo 166 -ritter 166 -antonis 166 -stateless 166 -goatee 166 -songwriters 166 -downes 166 -submissive 166 -artur 166 -yogi 166 -lascivious 166 -zubizarreta 166 -millimetre 166 -someplace 166 -12billion 166 -cheesecake 166 -father-of-six 166 -layoff 166 -discard 166 -masturbation 166 -rapped 166 -guesses 166 -adjudged 166 -lsu 166 -record-holder 166 -standardised 166 -itn 166 -broderick 166 -affords 166 -flask 166 -hospitalizations 166 -gunter 166 -11:18 166 -fr 166 -carcinoma 166 -papyrus 166 -maccabi 166 -derision 166 -irb 166 -oled 166 -southwell 166 -preakness 166 -baku 166 -hitter 166 -resided 166 -stabilizing 166 -inverted 166 -contaminants 166 -tomahawk 166 -pronunciation 166 -wiretapping 166 -polarised 166 -12.6 166 -1km 166 -weller 166 -timmy 166 -predominately 166 -79th 166 -broomfield 166 -alimony 166 -cbp 166 -impeding 166 -samoan 166 -335 166 -bunyan 166 -cavill 166 -winsor 166 -sympathisers 165 -4lbs 165 -3.45 165 -chuckles 165 -joann 165 -one-month 165 -blindsided 165 -12:05 165 -peacemaker 165 -one-quarter 165 -facets 165 -henna 165 -reince 165 -airforce 165 -masterful 165 -skoda 165 -soulful 165 -pret 165 -punctuation 165 -hellfire 165 -well-earned 165 -arnall 165 -mikayla 165 -unrecognizable 165 -tiaras 165 -bmj 165 -biographies 165 -blackwood 165 -duffel 165 -\ 165 -stoves 165 -condos 165 -mental_floss 165 -jugs 165 -grassland 165 -causal 165 -bodega 165 -non-invasive 165 -proclaims 165 -molest 165 -incline 165 -5,200 165 -multinationals 165 -marlise 165 -burrito 165 -zeta 165 -low-cut 165 -conyers 165 -creeps 165 -largo 165 -10:38 165 -inadvertent 165 -semenya 165 -stefanie 165 -resentful 165 -probabilities 165 -redefining 165 -sherrod 165 -candies 165 -millennial 165 -buffy 165 -chambliss 165 -mothers-to-be 165 -baucus 165 -hyndman 165 -andover 165 -meserve 165 -marie-louise 165 -soils 165 -raves 165 -snowflake 165 -wreaking 165 -gamboa 165 -wallenda 165 -souleymane 165 -rohan 165 -shackell 165 -tilting 165 -durability 165 -insecticide 165 -soul-searching 165 -fluctuating 165 -auditioning 165 -10.9 165 -guang 165 -parling 165 -star-ledger 165 -ivo 165 -duvall 165 -quantico 165 -yemenis 165 -front-facing 165 -hospitable 165 -splintered 165 -shunt 165 -gooch 165 -flicker 165 -overzealous 165 -miscalculation 165 -snip 165 -duarte 165 -raaf 165 -roasting 165 -venizelos 165 -headers 165 -iglesias 165 -12.2 165 -co-owned 165 -curtin 165 -scuttled 165 -steffon 165 -contours 165 -cargill 165 -1700s 165 -super-middleweight 165 -estevez 165 -dod 165 -east-west 164 -shire 164 -kuta 164 -cookers 164 -front-row 164 -bexley 164 -biodegradable 164 -servitude 164 -ratification 164 -treks 164 -whining 164 -fassbender 164 -anecdote 164 -shaven 164 -hybrids 164 -lundberg 164 -cowley 164 -nine-hour 164 -dunford 164 -parachuting 164 -broadened 164 -nida 164 -toughened 164 -hock 164 -vbs.tv 164 -glorified 164 -dando 164 -cohort 164 -ostentatious 164 -7-3 164 -1-3 164 -bikram 164 -fijian 164 -lustig 164 -thrombosis 164 -eye-popping 164 -marquinhos 164 -magpie 164 -corgi 164 -alaba 164 -materialize 164 -machel 164 -opportunist 164 -customise 164 -bouchart 164 -soundly 164 -uso 164 -initiating 164 -norland 164 -intermediaries 164 -grammy-winning 164 -mcclellan 164 -flyby 164 -bounding 164 -mart 164 -daoud 164 -moulin 164 -sprinkle 164 -zeta-jones 164 -mild-mannered 164 -radicalism 164 -mozzarella 164 -parrots 164 -westpac 164 -scripps 164 -corolla 164 -condescending 164 -203 164 -amit 164 -out-of-touch 164 -e.t. 164 -unblemished 164 -cuellar 164 -nazir 164 -mcgoldrick 164 -docile 164 -lululemon 164 -disarmed 164 -kirkcaldy 164 -fragmentation 164 -manifested 164 -makhachkala 164 -welder 164 -self-service 164 -yellen 164 -cross-dressing 164 -abdicated 164 -on-court 164 -cynically 164 -ramen 164 -onerous 164 -pigments 164 -unapproved 164 -choreography 164 -ezzor 164 -padraig 164 -xue 164 -boatwright 164 -avril 164 -croat 164 -devour 164 -shenanigans 164 -skied 164 -baboon 164 -shafer 164 -isaacs 164 -opulence 164 -critters 164 -heaters 164 -copyrighted 163 -787s 163 -elevations 163 -newsome 163 -73rd 163 -grubby 163 -sportsmanship 163 -cappuccino 163 -png 163 -kightly 163 -purnell 163 -hewlett-packard 163 -lemons 163 -overcast 163 -kaci 163 -fro 163 -acl 163 -dromey 163 -meyiwa 163 -crustaceans 163 -revving 163 -11:29 163 -gideon 163 -bathurst 163 -name-calling 163 -materialized 163 -variously 163 -moggy 163 -eastman 163 -jittery 163 -forage 163 -groundswell 163 -furloughed 163 -pileup 163 -ageism 163 -wooing 163 -lavishly 163 -regiments 163 -trance 163 -10:37 163 -jumeirah 163 -stylus 163 -manti 163 -nudging 163 -rea 163 -grenadier 163 -franc 163 -cornea 163 -razor-sharp 163 -glistening 163 -victorians 163 -edson 163 -ceded 163 -magnay 163 -polyester 163 -scalpel 163 -disciplining 163 -coop 163 -adriatic 163 -cissokho 163 -kauai 163 -r-kentucky 163 -jean-marc 163 -lunging 163 -naik 163 -kabir 163 -necker 163 -comscore 163 -desist 163 -flemming 163 -katia 163 -precocious 163 -mojo 163 -yunus 163 -anaesthetist 163 -carnivorous 163 -foxy 163 -defund 163 -seduction 163 -centimetre 163 -paris-based 163 -annals 163 -tenderness 163 -630 163 -3.99 163 -jardine 163 -rena 163 -unbeknownst 163 -aragon 163 -angolan 163 -sitcoms 163 -thou 163 -enacting 163 -vinter 163 -tulane 163 -rhianna 163 -respirator 163 -87,000 163 -1kg 163 -bankrolled 163 -glides 163 -herne 163 -matlock 163 -cameramen 163 -cirillo 163 -well-funded 163 -molinari 163 -mentalfloss.com 163 -lettering 163 -warlords 163 -afriyie 162 -ill-equipped 162 -slinky 162 -heaving 162 -colonists 162 -23million 162 -brokenshire 162 -specially-designed 162 -misdeeds 162 -kline 162 -derive 162 -shari 162 -hans-joachim 162 -westerly 162 -formby 162 -foxcatcher 162 -friedland 162 -urdu 162 -opt-in 162 -re-entering 162 -factually 162 -knee-high 162 -disprove 162 -11:49 162 -pressurized 162 -4,600 162 -kian 162 -farris 162 -elaborating 162 -greenhouses 162 -maxx 162 -10:53 162 -10:54 162 -freezers 162 -nervy 162 -asteras 162 -dissemination 162 -parrish 162 -tunisians 162 -wastes 162 -devonshire 162 -ade 162 -obstetrics 162 -hostage-taking 162 -chardonnay 162 -castaway 162 -silt 162 -weaves 162 -11-year-olds 162 -cuban-american 162 -zookeeper 162 -driveways 162 -diagrams 162 -googled 162 -nasuwt 162 -ryde 162 -2040 162 -okcupid 162 -sneeze 162 -zinkhan 162 -tewkesbury 162 -minder 162 -pauley 162 -home-schooled 162 -sassuolo 162 -three-star 162 -low-calorie 162 -psychopathic 162 -outrageously 162 -powerhouses 162 -marcello 162 -gs 162 -11:34 162 -olic 162 -aristocrats 162 -kiran 162 -pertinent 162 -stalks 162 -wcbs 162 -foden 162 -penhaul 162 -youzhny 162 -great-great 162 -trippier 162 -mica 162 -rivas 162 -triage 162 -shunted 162 -line-out 162 -lapland 162 -leona 162 -peoria 162 -jonnie 162 -gartner 162 -segway 162 -holidayed 162 -fournier 162 -fernandez-versini 162 -levers 162 -profumo 162 -pathologists 162 -lobbies 162 -appendicitis 162 -cevallos 162 -wad 162 -cutest 162 -repubblica 162 -irked 162 -tomasz 161 -liaoning 161 -doves 161 -bloodhound 161 -12-gauge 161 -wearables 161 -jolted 161 -capitalised 161 -zedong 161 -student-athletes 161 -mantis 161 -petitioning 161 -rectified 161 -abrasive 161 -seven-figure 161 -err 161 -ashura 161 -finlay 161 -234 161 -opinionated 161 -flashlights 161 -newhaven 161 -westward 161 -11:13 161 -fifpro 161 -gentry 161 -calmness 161 -asmir 161 -deandre 161 -18m 161 -aerosols 161 -squadrons 161 -aptitude 161 -nestor 161 -schiffer 161 -human-like 161 -vertebrate 161 -unopposed 161 -11:47 161 -11:46 161 -11:42 161 -brockovich 161 -12.3 161 -mahroug 161 -10:55 161 -10:57 161 -no-show 161 -hatching 161 -motivator 161 -1894 161 -ratify 161 -17th-century 161 -10:32 161 -argentinean 161 -pussycat 161 -straighteners 161 -9.9 161 -fumble 161 -spawning 161 -baillie 161 -conservatorship 161 -rothwell 161 -demonic 161 -getaways 161 -11.7 161 -bare-chested 161 -purcell 161 -previews 161 -walesa 161 -overpriced 161 -refutes 161 -shillings 161 -laboured 161 -quezada 161 -chun 161 -heroically 161 -aleksandr 161 -thursdays 161 -rosary 161 -unconsciousness 161 -cpi 161 -11:33 161 -bifida 161 -ipsos 161 -navigational 161 -quilt 161 -wheelbarrow 161 -deafness 161 -pre-eclampsia 161 -free-for-all 161 -inflating 161 -seattle-based 161 -sterilization 161 -shorty 161 -adoration 161 -mya 161 -taxiing 161 -a4e 161 -schapelle 161 -sequin 161 -kvyat 161 -happy-go-lucky 161 -cillessen 161 -walnuts 161 -full-fledged 161 -welling 161 -10:28 161 -importer 161 -boyer 161 -ralston 161 -mccarty 161 -virtuous 161 -reinventing 161 -klerk 161 -cloaked 161 -glorifying 161 -kiera 161 -seedorf 160 -desertion 160 -two-mile 160 -jostling 160 -schreiber 160 -talley 160 -vermeer 160 -02 160 -ang 160 -10:10 160 -sharpen 160 -finalize 160 -fec 160 -roseville 160 -karima 160 -inspirations 160 -brooding 160 -missoula 160 -flustered 160 -limousines 160 -procuring 160 -decoy 160 -mobil 160 -kasich 160 -mcgillvary 160 -frees 160 -10:51 160 -10:50 160 -maturing 160 -semblance 160 -hager 160 -11:04 160 -winkfield 160 -nom 160 -amazonian 160 -10:17 160 -burnside 160 -maurizio 160 -punto 160 -talabani 160 -rotors 160 -aman 160 -formulate 160 -khawaja 160 -18-year-olds 160 -204 160 -invincibles 160 -fong 160 -nutella 160 -sudbury 160 -.40 160 -grime 160 -unify 160 -opec 160 -misspelled 160 -rusted 160 -third-floor 160 -squaring 160 -reardon 160 -1070 160 -millionth 160 -clemson 160 -undeclared 160 -mittal 160 -1-800-273-8255 160 -colorectal 160 -cowen 160 -cannonball 160 -229 160 -zellweger 160 -curries 160 -ipc 160 -lids 160 -caledonian 160 -mull 160 -slocum 160 -11:54 160 -alienation 160 -batchelor 160 -downsides 160 -engraving 160 -wineries 160 -lamenting 160 -boylston 160 -houla 160 -obie 160 -mirroring 160 -3.25 160 -indiscretions 160 -mehserle 160 -pews 160 -jamestown 160 -miniscule 160 -prowling 160 -shreveport 160 -garvey 160 -lindy 160 -steadman 160 -paceman 160 -state-backed 160 -vice-presidential 160 -despise 160 -honorably 159 -abductors 159 -sincerest 159 -hangovers 159 -al-rishawi 159 -philomena 159 -helene 159 -bolland 159 -muffins 159 -acumen 159 -scion 159 -panish 159 -pd 159 -sheena 159 -pallbearers 159 -scepovic 159 -set-top 159 -khadder 159 -11:27 159 -nevin 159 -11:11 159 -longo 159 -implicitly 159 -sharkey 159 -balkan 159 -strahan 159 -alun 159 -circuses 159 -clem 159 -kostas 159 -58th 159 -goldie 159 -supermassive 159 -headphone 159 -woodhead 159 -tutsi 159 -hancocks 159 -gmb 159 -scrolling 159 -shuffling 159 -war-era 159 -60-year 159 -std 159 -frei 159 -bullingdon 159 -senkaku 159 -channeled 159 -colliery 159 -motley 159 -unjustifiable 159 -snorted 159 -breathalyser 159 -darting 159 -typewriter 159 -trebled 159 -howedes 159 -diminishes 159 -pedophiles 159 -ranta 159 -world-wide 159 -buggies 159 -u-boat 159 -farmington 159 -adapter 159 -yokohama 159 -seuss 159 -nutty 159 -london-born 159 -georg 159 -vrij 159 -spartans 159 -archivist 159 -broady 159 -gags 159 -disrupts 159 -endgame 159 -equipping 159 -lyndsey 159 -cronut 159 -figaro 159 -inglis 159 -sensibilities 159 -rafsanjani 159 -pairings 159 -bleached 159 -dispensary 159 -11:19 159 -prouder 159 -vociferous 159 -fondling 159 -signifies 159 -acrobats 159 -bessey 159 -superdrug 159 -firings 159 -fiscally 159 -ampatuan 159 -impenetrable 159 -giggled 159 -meningococcal 159 -symmetrical 159 -rectory 159 -360,000 159 -taboos 159 -pennines 159 -mastery 159 -englishmen 159 -dev 159 -charbonnier 159 -ageas 159 -emporium 159 -lagged 159 -rancic 159 -snyderman 159 -overeating 159 -five-figure 159 -1859 159 -alexia 159 -saadi 159 -colonialism 159 -temperate 159 -colitis 159 -12:40 159 -underpaid 159 -accelerates 159 -day-lewis 159 -epidural 159 -americana 159 -destabilise 159 -pascale 158 -ivins 158 -non-life 158 -schoolmates 158 -surfacing 158 -lemur 158 -baga 158 -atari 158 -channelled 158 -hasten 158 -ayesha 158 -keqiang 158 -hovercraft 158 -dressings 158 -indignity 158 -aerosol 158 -risqué 158 -hair-raising 158 -uighur 158 -stilton 158 -ricans 158 -upturn 158 -islander 158 -shipbuilding 158 -estes 158 -donut 158 -match-up 158 -amjad 158 -activates 158 -savory 158 -maillaud 158 -devotee 158 -guus 158 -radulova 158 -1877 158 -monson 158 -exacting 158 -schiff 158 -rao 158 -anzhi 158 -bendigo 158 -twigg 158 -scintillating 158 -russian-backed 158 -years-long 158 -marshmallows 158 -infringements 158 -encompassing 158 -ute 158 -okore 158 -svalbard 158 -10:34 158 -ladyman 158 -inhofe 158 -meerkat 158 -woollen 158 -predominant 158 -transformational 158 -valcke 158 -leto 158 -well-intentioned 158 -villanueva 158 -alaa 158 -livni 158 -canes 158 -englewood 158 -limited-edition 158 -tresses 158 -astrophysicist 158 -alegre 158 -ladue 158 -videolink 158 -diabolical 158 -shasta 158 -abd 158 -impotence 158 -2.75 158 -12:33 158 -second-most 158 -infrequent 158 -11:32 158 -mlk 158 -bu 158 -226 158 -erskine 158 -11:16 158 -dcf 158 -verratti 158 -co-operated 158 -246 158 -248 158 -delinquent 158 -waistlines 158 -eases 158 -clasped 158 -bucklebury 158 -org 158 -nemo 158 -323 158 -fives 158 -silvester 158 -pouncing 158 -mensa 158 -validation 158 -9lb 158 -chasm 158 -delves 158 -exxonmobil 158 -tiers 158 -spirals 158 -doodles 158 -icm 158 -eurotunnel 158 -10:05 158 -penicillin 158 -resale 158 -coley 158 -78th 158 -wildcats 158 -2.99 158 -unsavoury 158 -uploads 158 -leibovitz 158 -guardia 158 -dealerships 158 -mujica 158 -embarks 158 -inadmissible 157 -bladed 157 -yeonpyeong 157 -top-rated 157 -eeg 157 -etna 157 -rosser 157 -guillen 157 -12:22 157 -western-backed 157 -good-natured 157 -straightened 157 -parliamentarian 157 -debuting 157 -hampson 157 -cheonan 157 -reassurances 157 -arbitrarily 157 -dares 157 -speculators 157 -ticketmaster 157 -apr 157 -wombat 157 -danilo 157 -stupidly 157 -duluth 157 -carnivores 157 -imprisoning 157 -devizes 157 -camm 157 -chagrin 157 -tubing 157 -81st 157 -devault 157 -rheumatoid 157 -reams 157 -choppers 157 -impair 157 -11:02 157 -misrepresentation 157 -outsourced 157 -amer 157 -file-sharing 157 -review-journal 157 -riveting 157 -sloth 157 -derivatives 157 -roemer 157 -cookson 157 -cornet 157 -gifting 157 -platter 157 -rickey 157 -superdome 157 -cybercrime 157 -580 157 -oozing 157 -overuse 157 -atacama 157 -banon 157 -baha'i 157 -bailing 157 -rauf 157 -nevis 157 -haka 157 -lse 157 -mumbling 157 -gravestone 157 -crusades 157 -hitters 157 -blurring 157 -lejeune 157 -domes 157 -waddle 157 -slip-up 157 -revocation 157 -beckhams 157 -welding 157 -radars 157 -mcallen 157 -c.j. 157 -11:14 157 -hillman 157 -matosevic 157 -matte 157 -bs 157 -evian 157 -castel 157 -5.99 157 -bridgegate 157 -samar 157 -plucking 157 -decrying 157 -beghal 157 -indignant 157 -unacceptably 157 -pearly 157 -kiosks 157 -interrogate 157 -nibble 157 -grainger 157 -veracruz 157 -chapple 157 -6-5 157 -exacerbating 157 -pittman 157 -rechargeable 157 -rota 157 -pro-european 157 -stds 157 -nima 157 -trinny 157 -ensign 157 -neo-nazis 157 -coasters 157 -intersex 157 -são 157 -hairspray 157 -resolutely 157 -like-for-like 157 -tod 157 -300ft 157 -timor 157 -unfulfilled 157 -gissendaner 156 -hippies 156 -solano 156 -cnn-ibn 156 -seydou 156 -giro 156 -multiplied 156 -fremont 156 -corr 156 -pj 156 -gijon 156 -ncis 156 -modernise 156 -frenchay 156 -baltacha 156 -faraj 156 -rijeka 156 -uttering 156 -11:22 156 -misfit 156 -householder 156 -rutte 156 -keck 156 -wftv 156 -skegness 156 -sheremetyevo 156 -self-portraits 156 -mag 156 -emperors 156 -sues 156 -bethnal 156 -hogs 156 -incited 156 -all-conquering 156 -kassim 156 -ezekiel 156 -comatose 156 -stent 156 -clarissa 156 -1815 156 -auditory 156 -quadriplegic 156 -idly 156 -saucer 156 -bicep 156 -partake 156 -wheezing 156 -somalian 156 -tilikum 156 -hume 156 -sculpt 156 -martelly 156 -motifs 156 -gretchen 156 -ribeiro 156 -solidified 156 -instantaneous 156 -hockenheim 156 -southerners 156 -canales 156 -ocala 156 -corrugated 156 -sirleaf 156 -onuoha 156 -halliday 156 -11:39 156 -crabb 156 -obsessively 156 -two-man 156 -technicality 156 -trumpeted 156 -northerly 156 -clustered 156 -wager 156 -tasha 156 -pamphlets 156 -milito 156 -azul 156 -dandy 156 -caesars 156 -uneducated 156 -napkins 156 -wetland 156 -29.99 156 -cauliflower 156 -biggar 156 -26.2 156 -worldly 156 -anchoring 156 -prasad 156 -stocky 156 -aarp 156 -meltdowns 156 -self-harming 156 -effected 156 -lcp 156 -myer 156 -tamoxifen 156 -rosol 156 -rubens 156 -jayawardene 156 -harbin 156 -crain 156 -lumpy 156 -chopsticks 155 -recuperate 155 -woodside 155 -swanepoel 155 -jeannette 155 -cristobal 155 -oxycontin 155 -rhythmic 155 -snarled 155 -12:26 155 -churkin 155 -lectured 155 -fickle 155 -toiled 155 -backstroke 155 -fluff 155 -surreptitiously 155 -mcleish 155 -carlyle 155 -shangri-la 155 -moguls 155 -communique 155 -shaded 155 -contour 155 -inert 155 -emigrate 155 -webcams 155 -hein 155 -macklemore 155 -sabha 155 -low-risk 155 -geeky 155 -estimation 155 -marchisio 155 -bigot 155 -queried 155 -cede 155 -legacies 155 -seeped 155 -brabham 155 -spelman 155 -ke 155 -storyteller 155 -eman 155 -bicester 155 -whitbread 155 -gorges 155 -boardman 155 -exempted 155 -elliptical 155 -insensitivity 155 -zooms 155 -timbers 155 -addington 155 -conran 155 -edelsten 155 -implicating 155 -bloodthirsty 155 -takeout 155 -lilies 155 -mile-long 155 -yucatan 155 -jara 155 -12:12 155 -npd 155 -uconn 155 -colby 155 -dissected 155 -lightest 155 -peskov 155 -mondeo 155 -wilds 155 -11:36 155 -margarine 155 -20st 155 -snowballs 155 -coyne 155 -stalwarts 155 -songwriting 155 -shear 155 -snowstorms 155 -all-in-one 155 -keg 155 -leamington 155 -trickling 155 -sounders 155 -escapades 155 -franchitti 155 -sia 155 -pacers 155 -undertaker 155 -balloting 155 -sarver 155 -u.s.-bound 155 -200-year-old 155 -extra-marital 155 -shoplifter 155 -d' 155 -brim 155 -305 155 -horschel 155 -300-year-old 155 -forking 155 -brine 155 -milliner 155 -bowels 155 -fenty 155 -shergold 155 -charnley 155 -gfh 155 -reincarnation 155 -pro-union 155 -dissection 155 -semi-professional 155 -rc 155 -palatable 155 -puddles 155 -maryam 154 -full-sized 154 -tulloch 154 -second-biggest 154 -savita 154 -gimmicks 154 -junaid 154 -draghi 154 -bugging 154 -puns 154 -hainan 154 -jeju 154 -fuses 154 -farmville 154 -conspiracies 154 -liter 154 -serenaded 154 -scrolls 154 -lineout 154 -chino 154 -tbs 154 -coronavirus 154 -devastate 154 -disturbingly 154 -mintel 154 -pay-out 154 -re-opening 154 -macaroni 154 -11:41 154 -infuriate 154 -well-behaved 154 -newsday 154 -vitter 154 -understudy 154 -iona 154 -campfire 154 -1830 154 -starlight 154 -vengeful 154 -martens 154 -newey 154 -footy 154 -libertadores 154 -marland 154 -christmases 154 -ligety 154 -nuances 154 -multitasking 154 -downer 154 -queer 154 -astley 154 -russian-made 154 -indeterminate 154 -childbearing 154 -cousteau 154 -superpowers 154 -philharmonic 154 -bootcamp 154 -sprawl 154 -doubly 154 -bambang 154 -breakneck 154 -rejuvenated 154 -sunbathers 154 -impersonation 154 -gribkowsky 154 -serpent 154 -horrid 154 -perpetuating 154 -lark 154 -insightful 154 -stepbrother 154 -roque 154 -electrically 154 -fogarty 154 -radel 154 -radek 154 -1883 154 -malfunctioning 154 -well-trained 154 -domenech 154 -solemnly 154 -tenet 154 -narrowest 154 -bushnell 154 -medial 154 -one-week 154 -narcolepsy 154 -elapsed 154 -adjunct 154 -foaming 154 -imploring 154 -albrecht 154 -raina 154 -demjanjuk 154 -purged 154 -stinks 154 -aerodrome 154 -normalize 154 -adrenal 154 -gladstone 154 -madeley 154 -8gb 154 -100km 154 -nowell 154 -marotta 154 -dsk 154 -eau 154 -mallon 154 -fallin 154 -6,300 154 -counterfeiting 154 -statin 153 -kerrick 153 -gunbattle 153 -jest 153 -viper 153 -memberships 153 -sauer 153 -leakage 153 -wailed 153 -kohlschreiber 153 -kolstad 153 -mid-season 153 -ply 153 -mumford 153 -moulds 153 -sagan 153 -postpartum 153 -colom 153 -231 153 -bombard 153 -hitching 153 -toshiba 153 -capitulation 153 -420,000 153 -252 153 -hallows 153 -holiness 153 -euthanize 153 -pursues 153 -pretense 153 -unceremoniously 153 -cunard 153 -ukranian 153 -rifleman 153 -supplemented 153 -bequeathed 153 -dusseldorf 153 -1851 153 -formulas 153 -easley 153 -wavered 153 -5st 153 -irritate 153 -sheeting 153 -10:35 153 -testino 153 -rathband 153 -torching 153 -kwok 153 -node 153 -coarse 153 -s&m 153 -pernicious 153 -menachem 153 -administers 153 -200ft 153 -unreleased 153 -11.9 153 -u21s 153 -transfixed 153 -intrusions 153 -swastikas 153 -pastore 153 -repressed 153 -molloy 153 -no1 153 -geophysical 153 -befriend 153 -affirmation 153 -metcalf 153 -ita 153 -nil 153 -acquit 153 -samarra 153 -rosalind 153 -swirls 153 -rybolovlev 153 -prettiest 153 -vodianova 153 -35m 153 -dachau 153 -dalian 153 -poirot 153 -anti-virus 153 -whole-life 153 -11:55 153 -roadkill 153 -galloping 153 -necropsy 153 -moreno-ocampo 153 -10:47 153 -krauss 153 -ocado 153 -billows 153 -ayres 153 -clam 153 -grandee 153 -live-action 153 -davide 153 -2p 153 -fang 153 -on-time 153 -averse 153 -braga 153 -artisans 153 -medal-winning 153 -culp 153 -coffman 153 -muddled 153 -commandeered 153 -dirtiest 153 -five-match 153 -chided 153 -crowdsourcing 152 -clawing 152 -ruddock 152 -marney 152 -butler-sloss 152 -superstition 152 -74th 152 -limassol 152 -coastlines 152 -boal 152 -barmy 152 -fashanu 152 -half-an-hour 152 -snohomish 152 -artem 152 -waxwork 152 -gymnasts 152 -baptized 152 -endometriosis 152 -toppings 152 -epping 152 -api 152 -negligently 152 -procter 152 -idolised 152 -jitters 152 -pasok 152 -bor 152 -wick 152 -roo 152 -pattaramon 152 -iucn 152 -neutered 152 -intractable 152 -4.50 152 -bulldozed 152 -backpacking 152 -lecturing 152 -materialised 152 -ka 152 -splendor 152 -aftershock 152 -whitewater 152 -latitudes 152 -lampooned 152 -penises 152 -pestered 152 -12:16 152 -oskar 152 -bachelet 152 -learners 152 -analog 152 -carmarthenshire 152 -llama 152 -hump 152 -sulfur 152 -aslan 152 -kos 152 -wendell 152 -breezed 152 -ayn 152 -kalamazoo 152 -stricker 152 -big-time 152 -plotters 152 -salesmen 152 -12:17 152 -ratcliffe 152 -misdemeanour 152 -dunlap 152 -9.45 152 -possum 152 -475 152 -militaries 152 -dubbing 152 -colossus 152 -laceration 152 -giudice 152 -bhs 152 -shingles 152 -calabria 152 -jaafari 152 -llanelli 152 -orchestrate 152 -11:12 152 -findus 152 -bucking 152 -thea 152 -readying 152 -tana 152 -four-story 152 -wyllie 152 -wadi 152 -lonsdale 152 -11:57 152 -zealanders 152 -baptiste 152 -hk$ 152 -sympathise 152 -spanish-speaking 152 -halsall 152 -toil 152 -clunky 152 -inextricably 152 -v12 152 -dousing 152 -inescapable 152 -stunningly 152 -gyllenhaal 152 -puss 152 -kehoe 152 -hindrance 152 -sonoma 152 -flouted 152 -yasin 152 -authorizes 152 -misfiring 152 -utrecht 152 -outcrop 152 -helix 152 -prodigious 152 -plowing 152 -jezebel 152 -vitriolic 152 -muttering 152 -birthmark 152 -haywood 152 -talk-show 151 -tracheotomy 151 -deservedly 151 -kasab 151 -cayenne 151 -airtran 151 -well-equipped 151 -speer 151 -first-term 151 -wilders 151 -bakeries 151 -stockman 151 -sportswoman 151 -bayley 151 -ein 151 -gecko 151 -timelapse 151 -444 151 -levenson 151 -patrolman 151 -channeling 151 -henrietta 151 -marvelous 151 -outwards 151 -clubbed 151 -nozzle 151 -chalmers 151 -marginalised 151 -intoxicating 151 -kilbride 151 -churchgoers 151 -pales 151 -bramble 151 -germ 151 -raisins 151 -guangxi 151 -armada 151 -dang 151 -grooves 151 -bala 151 -cirrhosis 151 -stubble 151 -rackets 151 -coronal 151 -chillies 151 -drywall 151 -mika 151 -ravindra 151 -12:49 151 -hain 151 -burgle 151 -medicated 151 -no-frills 151 -sinful 151 -weaved 151 -volleys 151 -funneled 151 -denominations 151 -12:18 151 -anti-smoking 151 -kwame 151 -crone 151 -brightened 151 -mountaineers 151 -tomlin 151 -nifty 151 -pungent 151 -oppenheimer 151 -felstead 151 -marcin 151 -optician 151 -short-sighted 151 -raif 151 -dykstra 151 -farthest 151 -croissant 151 -harbaugh 151 -thrill-seekers 151 -stinking 151 -self-belief 151 -holleran 151 -sideshow 151 -mufti 151 -familial 151 -sweetie 151 -akp 151 -wogan 151 -freshness 151 -absconding 151 -cots 151 -parmesan 151 -meandering 151 -giver 151 -smacks 151 -11:58 151 -slog 151 -gls 151 -smithfield 151 -nhk 151 -sofitel 151 -tk 151 -td 151 -307 151 -mugger 151 -tipton 151 -woodruff 151 -hydroelectric 151 -coal-fired 151 -harem 151 -confederacy 151 -12.50 151 -supremacists 151 -criminalize 151 -radiological 151 -faring 151 -fining 151 -gerges 151 -msp 151 -rm 151 -kirkby 151 -seti 151 -nadella 151 -superfood 151 -parried 151 -devyani 150 -albinism 150 -bellies 150 -atanes 150 -viejo 150 -squeezes 150 -backdoor 150 -pronouncements 150 -langsford 150 -gerrie 150 -325,000 150 -nefarious 150 -12:29 150 -thought-provoking 150 -mouldy 150 -serialised 150 -roving 150 -liftoff 150 -kiro 150 -virginia-based 150 -chariot 150 -allam 150 -maslin 150 -newmark 150 -pre-ordered 150 -11:07 150 -stuffy 150 -nene 150 -muffled 150 -cavorting 150 -snowballed 150 -stuxnet 150 -moj 150 -devouring 150 -roc 150 -second-generation 150 -condon 150 -snedeker 150 -orpington 150 -ever-increasing 150 -reade 150 -parodies 150 -erroneously 150 -laurels 150 -halstead 150 -roona 150 -disconcerting 150 -margie 150 -harlequin 150 -computerised 150 -enamored 150 -saxony 150 -mikhael 150 -debutante 150 -double-amputee 150 -voss 150 -fairmont 150 -loftus-cheek 150 -mandating 150 -bahia 150 -buoyancy 150 -42.5 150 -sunroof 150 -ja 150 -12:19 150 -clean-cut 150 -23-year 150 -fabulously 150 -chasers 150 -dickey 150 -co-owns 150 -dedmon 150 -cockney 150 -temptations 150 -erich 150 -payoffs 150 -hebrides 150 -three-part 150 -stretford 150 -counter-productive 150 -post-it 150 -meshaal 150 -critter 150 -karl-heinz 150 -charmaine 150 -fended 150 -shivers 150 -retarded 150 -over-zealous 150 -shocker 150 -coughed 150 -46th 150 -teary 150 -soft-spoken 150 -antennae 150 -roanoke 150 -241 150 -archeologists 150 -kadyrov 150 -carine 150 -composers 150 -abscess 150 -marwan 150 -triathlons 150 -7st 150 -finalising 150 -neath 150 -open-top 150 -monies 150 -avant-garde 150 -inevitability 150 -thermostat 150 -paddled 150 -sergi 150 -gorbuntsov 150 -gout 150 -basterds 150 -arda 150 -sabre 150 -11:31 150 -regenerative 150 -washout 150 -cbt 150 -amyotrophic 150 -bushmaster 150 -tebbutt 149 -checkouts 149 -tugboat 149 -fahy 149 -whiter 149 -10lbs 149 -ji-sung 149 -howls 149 -sayed 149 -grouping 149 -modernisation 149 -sirius 149 -encircled 149 -eurasian 149 -21c 149 -dickerson 149 -unsupported 149 -mastering 149 -slugs 149 -raccoons 149 -sunflowers 149 -de-escalate 149 -kira 149 -standup 149 -rahane 149 -amuse 149 -al-adha 149 -caernarfon 149 -backtrack 149 -red-hot 149 -louth 149 -flavored 149 -altruistic 149 -beckwith 149 -on-camera 149 -estelle 149 -coils 149 -ponte 149 -debatable 149 -tegan 149 -emu 149 -zimbabweans 149 -anti-regime 149 -chadian 149 -timescale 149 -kekua 149 -stipulate 149 -mcghee 149 -hooters 149 -frescoes 149 -uma 149 -ligature 149 -auld 149 -pointers 149 -paced 149 -yedlin 149 -paleontologist 149 -debunked 149 -denayer 149 -mariner 149 -hallucinating 149 -amaya 149 -undulating 149 -mbeki 149 -unassailable 149 -uncut 149 -dub 149 -benchmarks 149 -stymied 149 -benigno 149 -kiely 149 -flatbed 149 -despot 149 -62nd 149 -ig 149 -land-based 149 -cai 149 -drug-taking 149 -woodbridge 149 -16-hour 149 -bot 149 -r-south 149 -anti-gun 149 -25-yard 149 -stand-by 149 -haddock 149 -durst 149 -pinault 149 -al-aqsa 149 -5-inch 149 -in-app 149 -collett 149 -routing 149 -crispin 149 -aug 149 -libertarians 149 -snowdonia 149 -kailua 149 -kaya 149 -savagery 149 -columnists 149 -symmetry 149 -13-month-old 149 -hells 149 -baher 149 -extinguishers 149 -alwen 149 -sperling 149 -lurched 149 -tabarez 149 -gables 149 -hildebrand 149 -must-win 149 -mcelroy 149 -capobianco 149 -spillway 149 -raiola 149 -rifling 149 -glaucoma 149 -terroristic 149 -jekyll 149 -picker 149 -re-emerged 149 -aspires 149 -thokozile 149 -mayflower 149 -compulsion 149 -dix 149 -nor'easter 149 -paco 149 -secretions 149 -turkmenistan 149 -shiner 149 -politicized 149 -redeveloped 149 -landfills 149 -7billion 149 -dreary 148 -subconsciously 148 -andi 148 -swaziland 148 -gianfranco 148 -pylons 148 -cordova 148 -sponsorships 148 -submersible 148 -littleton 148 -courageously 148 -opiates 148 -embezzling 148 -pacino 148 -treve 148 -trawled 148 -osha 148 -12:21 148 -lurk 148 -forays 148 -roiled 148 -cameos 148 -equalled 148 -doral 148 -xiaomi 148 -masseuse 148 -trudge 148 -reversible 148 -palfrey 148 -tmz.com 148 -rogge 148 -ayew 148 -arbiter 148 -languish 148 -fattening 148 -imposter 148 -twitch 148 -mattingly 148 -eads 148 -12.8 148 -bellini 148 -dampened 148 -second-term 148 -transcends 148 -128gb 148 -elasticity 148 -hummingbird 148 -larissa 148 -gittany 148 -goebbels 148 -one-eyed 148 -hyena 148 -buena 148 -non-surgical 148 -recognizance 148 -maktoum 148 -elaborately 148 -thwarting 148 -instigating 148 -liberace 148 -talktalk 148 -resurface 148 -pagano 148 -video-sharing 148 -willem 148 -lashkar 148 -12:51 148 -dabiq 148 -tull 148 -sill 148 -hedonistic 148 -btp 148 -interviewers 148 -flaunting 148 -lock-up 148 -rectal 148 -bannan 148 -frazer 148 -fob 148 -guyana 148 -refs 148 -redeemer 148 -pinochet 148 -swindle 148 -hijacker 148 -obesity-related 148 -europol 148 -harbouring 148 -schuyler 148 -jarman 148 -cranial 148 -registrations 148 -223 148 -unafraid 148 -funnyman 148 -undeveloped 148 -jahan 148 -suave 148 -qbe 148 -regents 148 -lovelace 148 -hawkish 148 -??? 148 -biel 148 -working-age 148 -gibney 148 -dinant 148 -neurotic 148 -grinder 148 -dupe 148 -hmas 148 -cropping 148 -emptiness 148 -borealis 148 -impersonate 148 -enclaves 148 -starch 148 -argyle 148 -scargill 148 -admissible 148 -matanov 148 -anesthesiologist 148 -10:01 148 -interagency 148 -erasing 148 -saluting 148 -brookfield 148 -club-record 148 -anemia 148 -winkle 148 -trapp 148 -fingertip 148 -convergence 148 -5cm 148 -fringed 148 -polytechnic 148 -leary 147 -chartwell 147 -ar 147 -0-1 147 -tacit 147 -12:09 147 -morning-after 147 -vadim 147 -juanfran 147 -priebke 147 -alcohol-fuelled 147 -uhuru 147 -opiate 147 -brokering 147 -belhadj 147 -det. 147 -brancato 147 -jaded 147 -lynching 147 -matron 147 -fai 147 -dockyard 147 -bombay 147 -well-dressed 147 -zarrella 147 -mirka 147 -cormier 147 -twenty-four 147 -monash 147 -bastille 147 -gaby 147 -tempe 147 -redouble 147 -blackface 147 -sergeants 147 -high-intensity 147 -electors 147 -mini-series 147 -taylor-johnson 147 -risk-taking 147 -mused 147 -gcc 147 -tinnitus 147 -progesterone 147 -4-inch 147 -valeria 147 -oceanside 147 -helmut 147 -imax 147 -waterman 147 -expressly 147 -strutted 147 -subversive 147 -rockland 147 -peeping 147 -shatto 147 -attleborough 147 -ten-day 147 -topography 147 -massaging 147 -colombians 147 -essendon 147 -headpiece 147 -lenox 147 -nauseous 147 -rewriting 147 -cafferkey 147 -unmatched 147 -dominatrix 147 -rump 147 -pilar 147 -gowing 147 -merited 147 -plundering 147 -lippi 147 -reinforcement 147 -troh 147 -australian-born 147 -goldwater 147 -popularised 147 -stress-related 147 -withstood 147 -chalets 147 -galling 147 -15-20 147 -knutsford 147 -pointy 147 -9ft 147 -fedora 147 -frisk 147 -plano 147 -arsonist 147 -colonoscopy 147 -al-thani 147 -railroads 147 -keywords 147 -aeronautics 147 -hyped 147 -10:41 147 -garnier 147 -shoveling 147 -kovac 147 -trademarked 147 -snorkel 147 -reconsidered 147 -installments 147 -licked 147 -stratfor 147 -10:02 147 -game-changing 147 -hun 147 -razors 147 -hodgkinson 147 -pasha 147 -sainthood 147 -injunctions 147 -endeavours 147 -grays 147 -76,000 147 -10:59 147 -tinged 147 -recuperation 147 -careering 147 -old-style 147 -canoeing 146 -calorific 146 -shirk 146 -cline 146 -langdon 146 -revising 146 -lactose 146 -shortfalls 146 -sari 146 -vida 146 -kibaki 146 -12:03 146 -joshi 146 -corresponds 146 -garbutt 146 -demint 146 -chillingly 146 -meagan 146 -octavia 146 -storefront 146 -ric 146 -60094 146 -multi-year 146 -megapixel 146 -alibaba 146 -southernmost 146 -breuer 146 -one-match 146 -11:28 146 -sendai 146 -wishful 146 -13.4 146 -leveller 146 -shoelaces 146 -deflation 146 -handley 146 -1891 146 -composites 146 -licks 146 -wagyu 146 -durrant 146 -f-word 146 -unleashes 146 -make-shift 146 -extrajudicial 146 -10.45 146 -daria 146 -asian-american 146 -murdock 146 -kym 146 -chivers 146 -co-authors 146 -stimuli 146 -summoning 146 -borisov 146 -cutts 146 -couzens 146 -ridgway 146 -alsbury 146 -augusto 146 -bolinger 146 -agra 146 -iditarod 146 -pitman 146 -newscast 146 -525 146 -mercenary 146 -leveling 146 -fossett 146 -mngeni 146 -anti-israel 146 -powerpoint 146 -sauvignon 146 -wsb 146 -shariah 146 -lecturers 146 -swamps 146 -friendliness 146 -minced 146 -cowered 146 -calvert 146 -diwali 146 -surfed 146 -isotope 146 -oquendo 146 -psni 146 -petrov 146 -blurted 146 -larke 146 -19-year 146 -abdicate 146 -7.25 146 -crutch 146 -kosilek 146 -35-year 146 -zia 146 -thacker 146 -boucher 146 -hutchence 146 -cnngo 146 -seluk 146 -wie 146 -branching 146 -admirably 146 -eclipses 146 -masquerading 146 -neilson 146 -migrations 146 -stirs 146 -10:21 146 -weightlessness 146 -poacher 146 -pap 146 -2500 146 -netmums 146 -12:14 146 -s6 146 -reattached 146 -guagua 146 -khatallah 146 -calender 146 -uranus 146 -oxbridge 146 -prasetyo 146 -rehoming 146 -kathie 146 -yukon 146 -re-sign 146 -self-worth 146 -re-tweeted 145 -officiate 145 -nerve-racking 145 -karageorge 145 -amherst 145 -limerick 145 -hwang 145 -policed 145 -barden 145 -hookers 145 -pu 145 -lenz 145 -sicker 145 -starship 145 -reneged 145 -connective 145 -obeyed 145 -nicaraguan 145 -fawaz 145 -flout 145 -revolutionized 145 -straying 145 -gta 145 -zeitgeist 145 -aphrodisiac 145 -outclassed 145 -shulman 145 -tuff 145 -orioles 145 -five-storey 145 -faro 145 -guangcheng 145 -f-type 145 -diversified 145 -cookbooks 145 -afoot 145 -bargo 145 -27m 145 -bastard 145 -numbering 145 -engrossed 145 -oceanfront 145 -discontinue 145 -wrong-doing 145 -schwartzel 145 -tennessean 145 -geothermal 145 -marcela 145 -not-guilty 145 -smother 145 -echols 145 -gere 145 -maud 145 -match-day 145 -yielding 145 -belafonte 145 -overtones 145 -m62 145 -open-plan 145 -variability 145 -sequoia 145 -big-spending 145 -titus 145 -twinkling 145 -purchaser 145 -inching 145 -shuai 145 -rummaging 145 -simeon 145 -24-year 145 -poppo 145 -provisionally 145 -dreading 145 -mystique 145 -sweetener 145 -iframes 145 -four-door 145 -rosenbaum 145 -draxler 145 -manfred 145 -disguising 145 -gino 145 -concurred 145 -al-khatib 145 -latif 145 -8billion 145 -oblige 145 -u.s.-south 145 -watton 145 -yousufzai 145 -clique 145 -mcnamee 145 -alderley 145 -mitterrand 145 -action-packed 145 -smattering 145 -renaming 145 -fomenting 145 -tetris 145 -21-year 145 -bankroll 145 -hollister 145 -neurofibromatosis 145 -para 145 -festering 145 -prose 145 -dwyane 145 -narcissism 145 -gestational 145 -tantalising 145 -alamos 145 -braszczok 145 -mccririck 145 -1in 145 -mehr 145 -telltale 145 -hippocampus 145 -niggling 145 -invests 145 -cuttings 145 -hussey 145 -khamis 145 -super-fast 145 -ranches 145 -babysit 145 -8.50 145 -katharina 145 -populace 145 -lay-by 144 -non-partisan 144 -pawlenty 144 -dictatorships 144 -monasteries 144 -edwina 144 -chairmanship 144 -coen 144 -drug-resistant 144 -brain-dead 144 -skidmore 144 -epicentre 144 -roofing 144 -fumbled 144 -valbuena 144 -stretchers 144 -favourably 144 -spacewalks 144 -menlo 144 -jolla 144 -equating 144 -handkerchief 144 -800million 144 -military-backed 144 -sato 144 -08:07 144 -velcro 144 -mitra 144 -humanist 144 -iftikhar 144 -12:54 144 -fag 144 -skyler 144 -moot 144 -nalbandian 144 -bolduan 144 -kuznetsova 144 -detectable 144 -nudged 144 -rickard 144 -underpinning 144 -72-hour 144 -lipsticks 144 -kisiel 144 -massoud 144 -d-massachusetts 144 -liechtenstein 144 -outlawing 144 -73,000 144 -shen 144 -muth 144 -bearden 144 -decry 144 -nadya 144 -mistreating 144 -algiers 144 -seawall 144 -blood-spattered 144 -bussell 144 -mistletoe 144 -upholds 144 -bethenny 144 -10:03 144 -flutter 144 -dermatologists 144 -preclude 144 -amar 144 -ciara 144 -sprained 144 -long-haired 144 -radisson 144 -sweeper 144 -crushes 144 -newington 144 -bela 144 -clamoring 144 -alawites 144 -top-down 144 -prado 144 -binmen 144 -clams 144 -upsurge 144 -imelda 144 -maximum-security 144 -spreadsheet 144 -lijun 144 -craftsman 144 -distorting 144 -erectus 144 -externally 144 -banfield 144 -polynesian 144 -12:32 144 -gammon 144 -civics 144 -trashing 144 -putter 144 -tenets 144 -finchley 144 -ww1 144 -r-rated 144 -caprice 144 -hawkeye 144 -margo 144 -congratulatory 144 -premeditation 144 -tbsp 144 -7.20 144 -passover 144 -el-sheikh 144 -kravitz 144 -minimizing 144 -11:56 144 -reapply 144 -real-estate 144 -lucinda 144 -cana 144 -marketable 144 -inducing 144 -desecrated 144 -dateline 144 -smirk 144 -grosse 144 -hydrant 144 -caustic 144 -yellows 144 -wields 144 -reruns 144 -geary 144 -across-the-board 144 -jawad 144 -prodding 144 -virts 144 -personable 144 -cdu 144 -slimy 144 -09:50 144 -intelligence-gathering 144 -mints 144 -stradivarius 144 -silvestri 144 -whizzing 144 -cataracts 144 -graff 144 -ashland 144 -caving 144 -16.4 144 -no10 144 -12:44 144 -home-cooked 144 -ill-judged 144 -hendrie 144 -high-five 144 -polynesia 144 -blindfold 144 -purring 143 -roos 143 -pashtun 143 -eyelash 143 -19.5 143 -felixstowe 143 -additive 143 -fla. 143 -bettered 143 -deja 143 -loathing 143 -bloodstained 143 -lessened 143 -ikrima 143 -scalding 143 -degenerate 143 -sinead 143 -zeid 143 -22million 143 -semiofficial 143 -ushers 143 -frontbench 143 -o'donoghue 143 -tailgating 143 -gimenez 143 -bogdan 143 -ravages 143 -lenovo 143 -oarfish 143 -dereliction 143 -goh 143 -hairless 143 -pimentel 143 -flag-waving 143 -x-rayed 143 -m3 143 -evander 143 -dissolving 143 -polonium-210 143 -mjadzelics 143 -stoltenberg 143 -videotaping 143 -kranjcar 143 -20-week 143 -maddocks 143 -5,400 143 -unperturbed 143 -firestone 143 -anaconda 143 -footed 143 -ubani 143 -mughal 143 -cleo 143 -townships 143 -tribulations 143 -rachelle 143 -10:13 143 -27-year 143 -dwi 143 -09:46 143 -undead 143 -duwayne 143 -high-octane 143 -5.20 143 -publically 143 -rupturing 143 -cancer-stricken 143 -bosh 143 -12:13 143 -mitzvah 143 -portillo 143 -jani 143 -voracious 143 -verjee 143 -bagpipes 143 -scotty 143 -reschedule 143 -brainwashing 143 -truthfully 143 -mcavoy 143 -dla 143 -profanities 143 -aaib 143 -shawna 143 -tamed 143 -off-shore 143 -vandal 143 -freaks 143 -exorcist 143 -biennial 143 -kyrgyz 143 -pleasurable 143 -kacey 143 -toothbrushes 143 -arrhythmia 143 -searle 143 -gadahn 143 -primal 143 -hermione 143 -porcupine 143 -foreword 143 -upstaged 143 -flinders 143 -′ 143 -oro 143 -10:42 143 -envoys 143 -10:22 143 -ismael 143 -alternating 143 -trudeau 143 -davids 143 -oleksandr 143 -kasey 143 -anthropologists 143 -schuchat 143 -illumination 143 -punditry 143 -harbors 143 -indisputable 143 -hamby 143 -19-month-old 143 -hollen 143 -tabasco 142 -douma 142 -bullring 142 -pouches 142 -wardak 142 -gerhartsreiter 142 -contaminate 142 -287 142 -stratford-upon-avon 142 -detachable 142 -31.5 142 -digiacomo 142 -pius 142 -spoilers 142 -nzonzi 142 -self-titled 142 -coliseum 142 -3p 142 -shrinks 142 -winn 142 -barnardo 142 -braids 142 -anus 142 -quarantines 142 -starlings 142 -emirati 142 -yonkers 142 -unclean 142 -mong 142 -18c 142 -outlying 142 -huey 142 -allo 142 -configured 142 -idc 142 -hyperemesis 142 -baguette 142 -orrin 142 -springbok 142 -quadcopter 142 -doted 142 -retrain 142 -remover 142 -billington 142 -force-fed 142 -bracknell 142 -anti-eu 142 -meerkats 142 -scilly 142 -mangrove 142 -relived 142 -24m 142 -quinlan 142 -infanticide 142 -yugoslav 142 -karan 142 -solutionsvideo 142 -center-left 142 -managementvideo 142 -cushioned 142 -toting 142 -09:45 142 -trowbridge 142 -45million 142 -symonds 142 -mira 142 -kristie 142 -aesha 142 -kuykendall 142 -cliché 142 -bespectacled 142 -wray 142 -shelving 142 -izaguirre 142 -platformvideo 142 -smoke-free 142 -westin 142 -exemplified 142 -maryville 142 -paola 142 -dilated 142 -unseemly 142 -slayer 142 -coolant 142 -out-of-date 142 -inouye 142 -belatedly 142 -disqualify 142 -humvee 142 -turkmen 142 -munroe 142 -delorean 142 -khost 142 -tomes 142 -saltire 142 -lemurs 142 -assaidi 142 -fates 142 -clyburn 142 -ferrets 142 -twerk 142 -evaporate 142 -hospices 142 -drexler 142 -gastroenterologist 142 -247 142 -242 142 -gazelle 142 -minneapolis-st 142 -crick 142 -carville 142 -shayne 142 -269 142 -lentz 142 -10:48 142 -exco 142 -1485 142 -kam 142 -ebooks 142 -prima 142 -diaphragm 142 -barbers 142 -panelists 142 -anhui 142 -hippy 142 -lodger 142 -northrop 142 -sheraton 142 -whitstable 142 -ilya 142 -outrun 142 -fognini 142 -rifts 141 -chertoff 141 -yeung 141 -kfor 141 -torrey 141 -sabotaged 141 -pontypridd 141 -myhill 141 -inter-american 141 -kivu 141 -13.2 141 -hummus 141 -profiteering 141 -above-average 141 -whittingham 141 -precipitated 141 -trekkers 141 -dannii 141 -everly 141 -rabona 141 -totality 141 -noma 141 -notching 141 -foresight 141 -tablespoons 141 -r-california 141 -evictions 141 -facilitator 141 -rodolfo 141 -valentina 141 -1884 141 -wedded 141 -transgendered 141 -bossy 141 -jeantel 141 -fahad 141 -apostle 141 -suge 141 -prepaid 141 -costel 141 -goo 141 -vibes 141 -valentines 141 -mau 141 -sparta 141 -pegida 141 -off-camera 141 -1857 141 -graca 141 -12:04 141 -chaps 141 -geysers 141 -savidge 141 -77,000 141 -617 141 -barrios 141 -6.99 141 -tuiasosopo 141 -bronzer 141 -tit-for-tat 141 -norwegians 141 -folau 141 -underpinned 141 -ny1 141 -sequined 141 -lbw 141 -int 141 -sledging 141 -ola 141 -09:01 141 -proprietor 141 -amira 141 -brierley 141 -lapsed 141 -makin 141 -electrocution 141 -actionable 141 -blackmailing 141 -09:25 141 -zaw 141 -syncs 141 -fowl 141 -mails 141 -alissa 141 -incurring 141 -pushback 141 -dispensaries 141 -byrom 141 -jalal 141 -byrnes 141 -bedlam 141 -ten-minute 141 -tumbles 141 -equations 141 -haditha 141 -desolation 141 -11:38 141 -weeting 141 -kinnear 141 -mathias 141 -var 141 -cradles 141 -boarders 141 -aerobics 141 -near-fatal 141 -schleck 141 -hospitalisation 141 -castres 141 -jimmie 141 -11:51 141 -u.s.-china 141 -groundless 141 -cathedrals 141 -tugging 141 -amazonia 141 -yilmaz 141 -conveying 141 -savour 141 -thrusting 141 -mertens 141 -broth 141 -polonium 141 -boren 141 -aguigui 141 -fourth-largest 141 -tu 141 -306 141 -sawn-off 141 -barreled 141 -carphone 141 -inflight 141 -baath 141 -prequel 141 -excitable 141 -wrapper 141 -jeopardized 141 -forceps 141 -ol' 141 -ricketts 141 -drug-trafficking 141 -omissions 141 -snuggling 141 -morin 141 -whizz 141 -yobe 141 -handsomely 141 -wptv 141 -farid 141 -254 141 -lumumba 141 -cleland 141 -anand 141 -rafiq 140 -faraway 140 -internacional 140 -oxo 140 -purvis 140 -wilmore 140 -¹ 140 -throwaway 140 -endorses 140 -09:39 140 -luhrmann 140 -vegemite 140 -acura 140 -3,900 140 -miramonte 140 -pakhtunkhwa 140 -ridiculing 140 -mutate 140 -skim 140 -repatriate 140 -laferrara 140 -anti-aging 140 -misinformed 140 -agonisingly 140 -villaraigosa 140 -demetrius 140 -pleated 140 -dungarees 140 -radiocarbon 140 -end-of-season 140 -mover 140 -terrorize 140 -journeyed 140 -neutralize 140 -20-30 140 -moo 140 -snipes 140 -turney 140 -bronwyn 140 -quadruplets 140 -alter-ego 140 -usefulness 140 -rescind 140 -preservative 140 -contributory 140 -brawling 140 -baht 140 -comebacks 140 -rebranding 140 -starwood 140 -playback 140 -gdansk 140 -stat 140 -rooster 140 -uniqueness 140 -gamez 140 -butchering 140 -zyl 140 -branched 140 -10:36 140 -distortions 140 -dues 140 -gingerly 140 -aitor 140 -4/5 140 -tipper 140 -ammar 140 -punishes 140 -strappy 140 -polishing 140 -gills 140 -bloodless 140 -kingfisher 140 -nuclear-powered 140 -barbra 140 -armoury 140 -burdensome 140 -offbeat 140 -haaretz 140 -10:14 140 -foxtel 140 -revolutionised 140 -ingraham 140 -step-daughter 140 -martine 140 -valentin 140 -ganges 140 -lysenko 140 -cost-of-living 140 -cadre 140 -loya 140 -395 140 -waistcoat 140 -bersani 140 -lanny 140 -jamison 140 -remedial 140 -coiffed 140 -babbitt 140 -watchmen 140 -despairing 140 -crippen 140 -sarcoma 140 -foursomes 140 -keeling 140 -wsvn 140 -tron 140 -coppa 140 -ghavami 140 -snowboarders 140 -audley 140 -hostin 140 -stomach-churning 140 -gangrene 140 -protectors 140 -hand-painted 140 -gnawing 140 -12:38 140 -kiefer 140 -community-based 140 -cataract 140 -ico 140 -santas 140 -canvassed 140 -expiring 140 -spotless 140 -centralized 140 -boarder 140 -year-and-a-half 140 -zipped 140 -herold 140 -d2 140 -madigan 140 -helplessness 140 -baiji 140 -payer 140 -pre-contract 140 -hynes 140 -exoplanet 140 -randle 140 -opus 140 -80ft 140 -subset 140 -elmohamady 140 -low-skilled 140 -castaneda 140 -10:52 140 -flintshire 140 -zeidan 140 -misshapen 140 -rattlesnake 140 -predates 140 -simms 140 -wilshire 139 -gallstones 139 -czechs 139 -04 139 -triumphantly 139 -lifeblood 139 -second-in-command 139 -bearers 139 -innsbruck 139 -sq/ft 139 -vaillancourt 139 -matador 139 -fantasist 139 -veracity 139 -phaedra 139 -liquidity 139 -whistleblowing 139 -vulture 139 -brize 139 -whistle-blowers 139 -habitually 139 -jokey 139 -feeders 139 -overreaction 139 -diatribe 139 -siphoning 139 -thurlbeck 139 -statham 139 -involuntarily 139 -fishman 139 -11:03 139 -sterner 139 -ishant 139 -rethinking 139 -pjanic 139 -150ft 139 -collectible 139 -ahrendts 139 -vaizey 139 -nutt 139 -mildred 139 -nikola 139 -maicon 139 -fuchsia 139 -turton 139 -darwen 139 -stillbirth 139 -livorno 139 -disheartened 139 -orchids 139 -refuelling 139 -graces 139 -free-flowing 139 -alcantara 139 -pandemonium 139 -labour-run 139 -velasco 139 -bestsellers 139 -coelho 139 -1856 139 -hertz 139 -xxiii 139 -1800 333 000 139 -tenths 139 -capuchin 139 -hibbard 139 -ingestion 139 -furs 139 -1887 139 -ngc 139 -drugstore 139 -jingle 139 -gan 139 -liliane 139 -gravitas 139 -purest 139 -adherents 139 -gallant 139 -inhibitors 139 -rickets 139 -valverde 139 -wholesalers 139 -capitalized 139 -u.s.a. 139 -submachine 139 -nisbet 139 -12:53 139 -hopelessness 139 -riff 139 -rothenberg 139 -stealthy 139 -dormer 139 -baha'is 139 -falter 139 -amirah 139 -beastie 139 -abdul-rahman 139 -84th 139 -spec 139 -criminology 139 -paulina 139 -ronson 139 -homicidal 139 -belting 139 -appreciating 139 -over-sized 139 -hao 139 -dunhill 139 -8-6 139 -dodo 139 -electrics 139 -07:40 139 -beijing-based 139 -sulley 139 -on-stage 139 -byline 139 -2006-07 139 -telecommunication 139 -arranges 139 -ghb 139 -13:00 139 -pavey 139 -townend 139 -podesta 139 -unvaccinated 139 -dupre 139 -hilfiger 139 -out-of-town 139 -gentler 139 -rus 139 -welden 139 -winder 139 -11:52 139 -substation 139 -hasidic 139 -socializing 139 -pro-palestinian 139 -spotters 139 -swallows 139 -humbly 139 -thundery 139 -tranche 139 -ingest 139 -privates 139 -12:46 139 -bachelors 139 -straddles 139 -raspberries 139 -pathogen 139 -francs 139 -expeditionary 138 -hydrate 138 -un-islamic 138 -clog 138 -houllier 138 -subscriber 138 -ak 138 -condoning 138 -deserting 138 -testy 138 -bristles 138 -gratified 138 -melrose 138 -polices 138 -408 138 -striding 138 -mcfly 138 -schoep 138 -07:54 138 -telam 138 -glittery 138 -07:53 138 -emmerson 138 -fishes 138 -51,000 138 -shamelessly 138 -industrious 138 -sleaze 138 -beatie 138 -kyra 138 -sips 138 -times-picayune 138 -shearing 138 -victimisation 138 -astray 138 -21million 138 -tropics 138 -quito 138 -kurzweil 138 -florin 138 -boyata 138 -arg 138 -revisiting 138 -photocall 138 -77th 138 -swipes 138 -mckellen 138 -virunga 138 -blouses 138 -herr 138 -speculates 138 -manhandled 138 -defterios 138 -superfast 138 -exploitative 138 -yew 138 -freer 138 -dudes 138 -oceanographic 138 -science-fiction 138 -12-day 138 -wiener 138 -metastatic 138 -coren 138 -boswell 138 -ivica 138 -caesarian 138 -10:18 138 -askew 138 -carrasco 138 -insulate 138 -lurks 138 -grandest 138 -koum 138 -kayaks 138 -mukasey 138 -aromatic 138 -12:59 138 -dnr 138 -hamed 138 -piped 138 -contemplation 138 -embossed 138 -herding 138 -07:09 138 -unpopularity 138 -equated 138 -co-ordinating 138 -abedini 138 -endeared 138 -pessimism 138 -arcs 138 -affable 138 -strollers 138 -pharma 138 -downie 138 -gizmo 138 -shakers 138 -deepak 138 -jowell 138 -bozize 138 -reused 138 -benetton 138 -dissertation 138 -missive 138 -matchup 138 -ivanov 138 -insular 138 -mooring 138 -moyles 138 -snell 138 -mucklow 138 -four-match 138 -feyerick 138 -peris 138 -scranton 138 -meatpacking 138 -branstad 138 -80m 138 -flaherty 138 -breck 138 -snowing 138 -mings 138 -playroom 138 -196 138 -schmitz 138 -blackness 138 -sahar 138 -nazism 138 -shudder 138 -tzu 138 -100billion 138 -arun 138 -bumblebee 138 -gerst 138 -niles 138 -fearon 138 -kinks 138 -opt-out 138 -carding 138 -preventer 138 -differential 138 -close-ups 138 -espana 138 -hutch 138 -wishers 138 -milling 138 -pa. 138 -abstained 138 -sarcasm 138 -unabated 138 -phobias 138 -cinematography 138 -deceitful 138 -sawyers 138 -woodson 138 -peacocks 138 -montes 138 -chairing 138 -frills 138 -cachay 138 -featherweight 138 -researches 138 -pg-13 138 -freeport 138 -whistled 138 -12:41 138 -12:43 138 -6000 138 -bullfighting 138 -elbowing 138 -reintroduction 138 -cleanly 138 -whitmarsh 138 -07:39 138 -valles 138 -fig 137 -blimp 137 -functioned 137 -denote 137 -latent 137 -decrepit 137 -07:17 137 -fancies 137 -guinea-bissau 137 -dishevelled 137 -rubles 137 -wining 137 -silas 137 -feverish 137 -tsp 137 -valery 137 -one-shot 137 -tallies 137 -lotions 137 -hydrocarbons 137 -powders 137 -12:28 137 -immunisation 137 -u.n.-backed 137 -loathed 137 -one-minute 137 -sneaker 137 -bernhard 137 -07:55 137 -heartburn 137 -private-sector 137 -bligh 137 -toasting 137 -whomever 137 -instructional 137 -aoki 137 -v-neck 137 -pocono 137 -downloadable 137 -21.5 137 -pro-am 137 -kcal 137 -13.3 137 -tugged 137 -prudence 137 -sprinkler 137 -dutifully 137 -macaques 137 -155,000 137 -videotapes 137 -watercraft 137 -lianne 137 -279 137 -re-examined 137 -prohibitive 137 -1876 137 -06:00 137 -revamping 137 -leann 137 -fugro 137 -blairs 137 -dietician 137 -halloran 137 -facilitates 137 -mcknight 137 -vinod 137 -zambian 137 -assisi 137 -recyclable 137 -rhett 137 -passers 137 -fitzroy 137 -wetherell 137 -dogan 137 -hallett 137 -ringling 137 -watercolour 137 -11in 137 -melodies 137 -natanz 137 -skipton 137 -smoothing 137 -thalidomide 137 -msaad 137 -double-dip 137 -mobilised 137 -downgrading 137 -12c 137 -disintegrate 137 -11.45 137 -kerstin 137 -remix 137 -twirl 137 -alden 137 -contented 137 -flappy 137 -cupid 137 -toomey 137 -pacman 137 -shubert 137 -tamer 137 -angular 137 -raffles 137 -awad 137 -pennyhill 137 -freaky 137 -stagger 137 -malay 137 -stearns 137 -amphibian 137 -forman 137 -blockages 137 -brca 137 -ses 137 -spaced 137 -bento 137 -contexts 137 -bran 137 -laporte 137 -eucalyptus 137 -devising 137 -sparred 137 -670 137 -outplayed 137 -aegon 137 -08:52 137 -gamut 137 -bosphorus 137 -partington 137 -bruges 137 -10:46 137 -vuelta 137 -banerjee 137 -conchita 137 -plagues 137 -calligraphy 137 -squabbles 137 -deserter 137 -dorms 137 -mahatma 137 -earpiece 137 -fishery 137 -ascendancy 137 -faris 137 -12pm 137 -fredrik 137 -divisional 137 -top-tier 137 -mackay-steven 137 -invertebrates 136 -1860s 136 -torturous 136 -friendliest 136 -feeney 136 -10:26 136 -ecg 136 -nanotechnology 136 -sweatpants 136 -escalators 136 -22-year 136 -renfrewshire 136 -cawley 136 -caverns 136 -andromeda 136 -womack 136 -tsang 136 -regalia 136 -rennes 136 -sharpness 136 -defacing 136 -jorgeson 136 -06:44 136 -watchman 136 -trams 136 -rpi 136 -concacaf 136 -pma 136 -wmd 136 -deep-rooted 136 -breathlessness 136 -consort 136 -05:55 136 -capitalists 136 -konchesky 136 -faceless 136 -juanita 136 -irreconcilable 136 -fervently 136 -lumber 136 -ellesmere 136 -conakry 136 -rmb 136 -safina 136 -oso 136 -grub 136 -wishlist 136 -house-to-house 136 -mein 136 -guiana 136 -shoemaker 136 -marshmallow 136 -corrupting 136 -spanked 136 -glories 136 -title-winning 136 -15.6 136 -turrets 136 -terrify 136 -indomitable 136 -bronzed 136 -two-legged 136 -neese 136 -balinese 136 -antonella 136 -83rd 136 -solvent 136 -fuqua 136 -wed. 136 -arnault 136 -censured 136 -ex-marine 136 -nathalie 136 -annes 136 -baldock 136 -gulls 136 -14.3 136 -ricocheted 136 -pinera 136 -re-entered 136 -britain-based 136 -firewall 136 -waists 136 -paignton 136 -atmospheres 136 -gambino 136 -25ft 136 -three-game 136 -sula 136 -pate 136 -kunar 136 -around-the-clock 136 -hoc 136 -estee 136 -menagerie 136 -unwrapped 136 -reorganisation 136 -thais 136 -fuel-efficient 136 -06:31 136 -zawahiri 136 -exasperation 136 -masih 136 -mid-30s 136 -ripken 136 -gillan 136 -muniz 136 -night-vision 136 -gipsies 136 -war-ravaged 136 -268 136 -interfaces 136 -latics 136 -nudist 136 -heavy-duty 136 -reticent 136 -witney 136 -goalscorers 136 -tattooist 136 -choe 136 -outweighs 136 -re-create 136 -baum 136 -cinder 136 -10:24 136 -valuing 136 -hribal 136 -imani 136 -hearses 136 -cambridges 136 -passageway 136 -nonchalant 136 -lostprophets 136 -1,350 136 -kuznetsov 136 -drug-fuelled 136 -mono 136 -09:57 136 -09:54 136 -kitkat 136 -analyzes 136 -thaek 136 -bigots 136 -nationalistic 136 -parra 135 -flinging 135 -jupp 135 -school-age 135 -hon 135 -untrustworthy 135 -krezolek 135 -childhoods 135 -passer 135 -cecile 135 -exhibitors 135 -scientologists 135 -luczak 135 -right-footed 135 -ly 135 -rainer 135 -paraplegic 135 -molineux 135 -pant 135 -al-arab 135 -imdb 135 -astaire 135 -prise 135 -azaria 135 -shapewear 135 -silvers 135 -lawlor 135 -lazcano 135 -blakeley 135 -carne 135 -megawatts 135 -splc 135 -kart 135 -prioritised 135 -abarca 135 -asim 135 -bookable 135 -haystack 135 -dfw 135 -salafist 135 -composing 135 -then-wife 135 -handbrake 135 -grigg 135 -!!!!! 135 -enzi 135 -emphysema 135 -blythe 135 -ultras 135 -isaby 135 -jeopardy! 135 -get-together 135 -gump 135 -nav 135 -speciality 135 -fee-paying 135 -uavs 135 -sit-ins 135 -sowing 135 -cuppa 135 -blakelock 135 -creeks 135 -hashim 135 -10:16 135 -alternates 135 -reverberated 135 -emani 135 -hawley 135 -swayze 135 -dilemmas 135 -adheres 135 -rhubarb 135 -vacationers 135 -billowed 135 -downbeat 135 -backstreet 135 -boteach 135 -07:44 135 -ito 135 -doorways 135 -12-inch 135 -jaffe 135 -muniesa 135 -lurch 135 -andoni 135 -profane 135 -wilfully 135 -surry 135 -sheath 135 -dials 135 -juneau 135 -front-runners 135 -apostolic 135 -financiers 135 -armpits 135 -taut 135 -timings 135 -kingman 135 -aero 135 -deviation 135 -bronchitis 135 -endo 135 -emphasising 135 -pizarro 135 -thrusters 135 -originality 135 -referendums 135 -hamann 135 -maleficent 135 -cross-section 135 -ensues 135 -football-related 135 -veggie 135 -headdresses 135 -privately-owned 135 -gregoire 135 -11:44 135 -dynamism 135 -take-home 135 -vern 135 -bevy 135 -marysville 135 -hafiz 135 -amisom 135 -changeable 135 -all-terrain 135 -callously 135 -haired 135 -gromit 135 -partiers 135 -gull 135 -adopts 135 -jpl 135 -pomegranate 135 -sycamore 135 -sniping 135 -krokodil 134 -retraining 134 -quaid 134 -naso 134 -chapa 134 -afobe 134 -spiteful 134 -rearrested 134 -perpetuated 134 -tisdale 134 -09:33 134 -raptor 134 -misunderstandings 134 -conveys 134 -rickshaw 134 -downcast 134 -multi-storey 134 -backcountry 134 -seep 134 -jameis 134 -south-central 134 -bartra 134 -fluctuated 134 -subcontractor 134 -flaunt 134 -yesteryear 134 -singletons 134 -tics 134 -4.25 134 -infra-red 134 -verstappen 134 -06:42 134 -06:45 134 -arithmetic 134 -zaatari 134 -grills 134 -bas 134 -re-homed 134 -santorini 134 -spaulding 134 -ismaaiyl 134 -brondby 134 -humming 134 -redeemed 134 -boulton 134 -claustrophobic 134 -goblin 134 -payable 134 -lizzi 134 -morais 134 -councilor 134 -cardiology 134 -spyder 134 -skillful 134 -single-sex 134 -short-haul 134 -decorum 134 -manta 134 -accomplishing 134 -immortality 134 -biggest-ever 134 -rec 134 -wads 134 -yann 134 -flamengo 134 -parkin 134 -chirlane 134 -disadvantages 134 -drummers 134 -burglarized 134 -shiver 134 -mcmillian 134 -tamar 134 -scrapyard 134 -xenophobic 134 -mako 134 -foregone 134 -windowless 134 -antalya 134 -5,300 134 -mccrory 134 -after-party 134 -reyna 134 -airbrushed 134 -shankar 134 -josiah 134 -hani 134 -antiquity 134 -grunwald 134 -richness 134 -homely 134 -whirlpool 134 -j.p. 134 -fennell 134 -mukhtar 134 -potions 134 -silverton 134 -11:37 134 -kip 134 -ultra-conservative 134 -beckenham 134 -merion 134 -mcrae 134 -masseur 134 -fl 134 -janney 134 -handily 134 -biomass 134 -displacing 134 -jessops 134 -viv 134 -spooks 134 -570 134 -propelling 134 -pricewaterhousecoopers 134 -paid-for 134 -non-fiction 134 -maida 134 -agencia 134 -marcy 134 -crudely 134 -xander 134 -roebuck 134 -eckley 134 -hypnotic 134 -329 134 -glorify 134 -jordanians 134 -hobbling 134 -valdosta 134 -salami 134 -high-fat 134 -accumulations 134 -6-foot 134 -weil 134 -vibrate 134 -wtsp 134 -bootle 134 -activision 134 -centurion 134 -angers 134 -foolishly 134 -rossoneri 134 -burroughs 134 -sexier 134 -hinged 134 -sanger 134 -unbecoming 134 -chaperone 134 -triceratops 134 -mid-may 134 -mustapha 134 -martyred 134 -throttled 134 -geneticist 134 -saddleworth 134 -prestatyn 134 -bfmtv 134 -pouting 134 -eder 134 -jackass 134 -wasilla 134 -stoppage-time 134 -benedikt 133 -tormentors 133 -revoir 133 -menial 133 -hutson 133 -jean-pierre 133 -bdsm 133 -brito 133 -2st 133 -blasphemous 133 -diablo 133 -directv 133 -nonessential 133 -forbade 133 -defaulting 133 -213 133 -gaggle 133 -breda 133 -brut 133 -toe-to-toe 133 -orson 133 -wsb-tv 133 -one-liners 133 -coinciding 133 -e.l. 133 -stallworth 133 -dillard 133 -randwick 133 -khatami 133 -scavenging 133 -20-something 133 -minding 133 -unofficially 133 -hewson 133 -10:33 133 -bluefin-21 133 -dwindle 133 -turret 133 -dissipated 133 -shadowed 133 -caliph 133 -matchmaker 133 -revitalize 133 -swatting 133 -welwyn 133 -10:39 133 -reworked 133 -headwear 133 -ren 133 -rem 133 -widstrand 133 -milanic 133 -genghis 133 -hemel 133 -al-zaidi 133 -highest-grossing 133 -paralyzing 133 -hutus 133 -sangatte 133 -begley 133 -eight-and-a-half 133 -thru 133 -pain-free 133 -hoyle 133 -haig 133 -kunduz 133 -three-goal 133 -opioid 133 -760 133 -boyish 133 -1549 133 -salerno 133 -penalise 133 -location-based 133 -late-term 133 -erdington 133 -emotionless 133 -ales 133 -sparingly 133 -spurt 133 -cre 133 -post-world 133 -commentating 133 -surfboards 133 -17:30 133 -sybil 133 -jonchuck 133 -pima 133 -iraqiya 133 -rhs 133 -chanbua 133 -whammy 133 -padlock 133 -a.d. 133 -conjures 133 -shearin 133 -grapevine 133 -lapped 133 -resurfacing 133 -legalising 133 -buttery 133 -confer 133 -moxley 133 -rajiv 133 -joiner 133 -blm 133 -overfishing 133 -beebe 133 -terminations 133 -12:39 133 -08:15 133 -ppp 133 -bagels 133 -baited 133 -motorsports 133 -henley-on-thames 133 -65ft 133 -shaman 133 -37.5 133 -1840 133 -brownfield 133 -cleanest 133 -maastricht 133 -tinie 133 -porpoises 133 -16-month 133 -all-day 133 -llandudno 133 -wastewater 133 -tricycle 133 -counseled 133 -boomerang 133 -shredding 133 -eclipsing 133 -shafiq 133 -bayliss 133 -priciest 133 -10:07 133 -four-point 133 -evades 133 -microblog 133 -catterick 133 -introductions 133 -piedmont 133 -glazing 133 -sifted 133 -cantwell 133 -lhasa 133 -doldrums 133 -adcock 133 -uluru 133 -holographic 133 -12:42 133 -trachea 133 -fifth-placed 133 -hruby 133 -rp 133 -aeroflot 133 -colluded 133 -cover-ups 133 -trickled 133 -crewmen 133 -lan 133 -stupor 132 -tuesdays 132 -ruslan 132 -beresford 132 -arboretum 132 -07:11 132 -agm 132 -shui 132 -uzbek 132 -09:37 132 -09:38 132 -magnitsky 132 -kahlo 132 -vergne 132 -n-dubz 132 -mavis 132 -stoldt 132 -donny 132 -12:27 132 -bankrolling 132 -4wd 132 -forgives 132 -biz 132 -gwyn 132 -07:57 132 -citibank 132 -callaway 132 -yardley 132 -silencing 132 -skidding 132 -gourdel 132 -kickbacks 132 -sefton 132 -wilks 132 -overcharged 132 -08:21 132 -nepali 132 -259 132 -nk 132 -betancourt 132 -06:25 132 -relaying 132 -schoolwork 132 -08:45 132 -4.15 132 -harbours 132 -7-2 132 -illegals 132 -oscar-winner 132 -dallas/fort 132 -abate 132 -07:24 132 -rad 132 -slurry 132 -urdangarin 132 -hanukkah 132 -carlsbad 132 -wls 132 -jojo 132 -affixed 132 -sumwalt 132 -hissing 132 -mcmaster 132 -dukan 132 -evidence-based 132 -mortally 132 -dueling 132 -loehmann 132 -frans 132 -09:40 132 -tora 132 -mid-range 132 -10:19 132 -hour-and-a-half 132 -culpa 132 -waterside 132 -peregrine 132 -nayef 132 -pennetta 132 -slay 132 -supercomputer 132 -gongs 132 -underlining 132 -09:03 132 -conductive 132 -07:04 132 -computational 132 -sarcophagus 132 -waterboarded 132 -specifying 132 -safarova 132 -jerky 132 -embalming 132 -iguana 132 -cob 132 -claps 132 -radiologist 132 -nilsen 132 -pimm 132 -08:18 132 -layering 132 -southward 132 -majoring 132 -diversions 132 -star-telegram 132 -debutantes 132 -tomeka 132 -08:30 132 -heat-related 132 -squabble 132 -antibody 132 -locomotives 132 -28m 132 -panache 132 -godane 132 -imprinted 132 -basins 132 -mancuso 132 -blu 132 -11:53 132 -solidify 132 -gwynn 132 -saturation 132 -sonja 132 -medallists 132 -self-published 132 -gundogan 132 -co-written 132 -dozier 132 -zander 132 -longview 132 -inquiring 132 -shutdowns 132 -trounced 132 -wicks 132 -docherty 132 -dei 132 -bookshelves 132 -whacked 132 -16th-century 132 -doss 132 -barboza 132 -new-born 132 -ginkel 132 -nukes 132 -feigned 132 -hoarder 132 -schengen 132 -forearms 132 -osorio 132 -wildman 132 -in-car 132 -12:50 132 -tyrants 132 -elveden 132 -bork 132 -gethin 132 -refurbishing 132 -handstand 132 -shisha 132 -al-ahly 132 -infer 132 -hampers 132 -dmitri 131 -correlated 131 -salaheddin 131 -transmitters 131 -postmortem 131 -mostafa 131 -campo 131 -undergarments 131 -39.99 131 -kingsman 131 -inset 131 -consignment 131 -turkana 131 -infraction 131 -mistry 131 -09:31 131 -moron 131 -1066 131 -76th 131 -linguist 131 -vocation 131 -06:48 131 -avram 131 -taiz 131 -expiry 131 -gallen 131 -nome 131 -wuterich 131 -butterworth 131 -obr 131 -crash-landed 131 -disagreeing 131 -schoolyard 131 -fao 131 -belmoktar 131 -08:23 131 -jabbed 131 -left-hander 131 -fraizer 131 -1850s 131 -bucked 131 -earthly 131 -shanahan 131 -sephora 131 -13.7 131 -06:20 131 -abatement 131 -montrose 131 -stooges 131 -moi 131 -sordell 131 -10-point 131 -in-state 131 -qaeda-affiliated 131 -whedon 131 -posey 131 -cammisano 131 -overrule 131 -debauchery 131 -solyndra 131 -gianluca 131 -third-generation 131 -malek 131 -alex. 131 -lehrer 131 -grigorieva 131 -mclennan 131 -tutelage 131 -ahn 131 -paktika 131 -90million 131 -problem-solving 131 -ferrier 131 -empowers 131 -jamaal 131 -nas 131 -dogma 131 -lehmberg 131 -dumplings 131 -whopper 131 -jovan 131 -pinger 131 -blemish 131 -cutoff 131 -broadbent 131 -07:08 131 -24.99 131 -trotting 131 -pre-race 131 -aloha 131 -daze 131 -modesto 131 -dowager 131 -reattach 131 -dainty 131 -discourages 131 -uneventful 131 -nonfiction 131 -j.r. 131 -optimist 131 -snuff 131 -refill 131 -gallas 131 -lynchburg 131 -anti-capitalist 131 -caffeinated 131 -07:43 131 -deity 131 -pooling 131 -49,000 131 -redistricting 131 -kirilenko 131 -halts 131 -immaculately 131 -atypical 131 -evers 131 -moaz 131 -gurus 131 -pta 131 -hoe 131 -high-income 131 -marge 131 -ill-fitting 131 -sydney-based 131 -10.15 131 -remarking 131 -cortes 131 -gravidarum 131 -maren 131 -tumult 131 -pinnock 131 -leeward 131 -prepping 131 -waterstones 131 -mind-set 131 -refrigeration 131 -conserving 131 -chuckling 131 -2012-2013 131 -pinky 131 -rna 131 -helt 131 -niamh 131 -janette 131 -compostela 131 -craziness 131 -lengthen 131 -welker 131 -fast-forward 131 -m40 131 -unbiased 131 -shipwrecks 131 -grimace 131 -301 131 -chippenham 131 -10-12 131 -11-day 131 -terror-related 131 -olaf 131 -dmitrichenko 131 -dunfermline 131 -92nd 131 -renoir 131 -3.20 131 -syfy 131 -miscommunication 131 -capote 131 -wald 131 -silos 131 -09:56 131 -north-central 131 -delusion 131 -desai 131 -nieves 131 -duigan 131 -sapiens 131 -reputedly 131 -assigning 131 -turchynov 131 -prioritising 131 -wiese-mack 131 -500th 131 -599 130 -bandstand 130 -x-37b 130 -snaking 130 -romario 130 -homesick 130 -waxed 130 -hurdler 130 -cyclical 130 -butlins 130 -odds-on 130 -seamstress 130 -steaua 130 -slitting 130 -categorized 130 -frumpy 130 -kurtley 130 -imperialism 130 -unsigned 130 -benched 130 -30,000-a-year 130 -musically 130 -beehive 130 -19st 130 -haverhill 130 -flemington 130 -massaged 130 -juju 130 -fashion-forward 130 -aanholt 130 -f-22 130 -undaunted 130 -bonney 130 -skyrocket 130 -comer 130 -lyrical 130 -kayakers 130 -p.s. 130 -non-proliferation 130 -impatience 130 -capobiancos 130 -specialize 130 -luzon 130 -brevard 130 -ml 130 -kilburn 130 -pituitary 130 -100-meter 130 -feckless 130 -casanova 130 -ensuite 130 -brews 130 -postwar 130 -usman 130 -333 130 -sylvie 130 -leek 130 -portia 130 -plural 130 -self-appointed 130 -obsessions 130 -bradenton 130 -weaned 130 -cronyism 130 -09:42 130 -acetaminophen 130 -09:49 130 -cerys 130 -vos 130 -blared 130 -epithets 130 -piccard 130 -docket 130 -bodes 130 -10s 130 -r-new 130 -12:52 130 -sharman 130 -commendable 130 -mcinnes 130 -adorns 130 -collides 130 -chiwetel 130 -arbitrator 130 -pekerman 130 -afzal 130 -mathematicians 130 -lili 130 -router 130 -irresponsibility 130 -kauffman 130 -rehtaeh 130 -hartnett 130 -ostracized 130 -pullout 130 -marshawn 130 -6.15 130 -2011-2012 130 -no-no 130 -375,000 130 -hagman 130 -combustible 130 -paget 130 -jilly 130 -12:31 130 -12.99 130 -prozac 130 -1500m 130 -06:56 130 -06:55 130 -teenaged 130 -6c 130 -hairdryer 130 -courtiers 130 -jean-louis 130 -shaughnessy 130 -drawback 130 -boho 130 -usurped 130 -bounded 130 -conklin 130 -bailiff 130 -490 130 -doubtfire 130 -quirks 130 -first-graders 130 -1869 130 -tiede 130 -lafave 130 -appoints 130 -terrance 130 -pretentious 130 -kerviel 130 -juniper 130 -6billion 130 -lis 130 -emphasises 130 -moutinho 130 -zipper 130 -christo 130 -lame-duck 130 -creech 130 -hanif 130 -veneer 130 -toyboy 130 -neuberger 130 -880 130 -jutkiewicz 130 -chart-topping 130 -langham 130 -coon 130 -confluence 130 -eldorado 130 -spiking 130 -grandstanding 130 -littlewoods 130 -snitch 130 -06:53 130 -matrimonial 130 -hangouts 130 -exceptionalism 130 -accelerant 130 -veron 130 -outstripping 130 -30billion 130 -taxable 130 -bayonet 130 -trichotillomania 130 -seven-minute 129 -nord 129 -in-built 129 -harte 129 -kneels 129 -fevers 129 -hermitage 129 -mid-2000s 129 -jogged 129 -miura 129 -grahame 129 -anti-gadhafi 129 -8.15 129 -wurst 129 -tylenol 129 -yongbyon 129 -lighters 129 -tierra 129 -17:01 129 -ramin 129 -coriander 129 -hecklers 129 -annexe 129 -peer-reviewed 129 -reining 129 -f**king 129 -lefty 129 -anti-police 129 -osiris 129 -taveras 129 -08:04 129 -trooping 129 -fifth-round 129 -bumble 129 -anheuser-busch 129 -251 129 -25c 129 -13.6 129 -renown 129 -aromas 129 -deerfield 129 -o'gorman 129 -baldness 129 -gaelic 129 -08:44 129 -rommel 129 -rimsha 129 -ground-floor 129 -condor 129 -lough 129 -lakey 129 -trappe 129 -runaways 129 -qaida 129 -cheika 129 -wallin 129 -erbil 129 -napoleonic 129 -yee 129 -impeached 129 -bexleyheath 129 -realtors 129 -nightspot 129 -seitz 129 -rewind 129 -creamer 129 -berkowitz 129 -spectrometer 129 -scaffold 129 -civilizations 129 -pickers 129 -precedents 129 -ineffectual 129 -doorsteps 129 -hanford 129 -liquefied 129 -delilah 129 -diazepam 129 -marmont 129 -flat-out 129 -northolt 129 -laxatives 129 -objectivity 129 -hornby 129 -kamel 129 -mosman 129 -ars 129 -sills 129 -materially 129 -branca 129 -presides 129 -by-product 129 -houten 129 -cobbles 129 -jodhi 129 -populate 129 -hasselhoff 129 -padres 129 -sweetened 129 -breads 129 -stockton-on-tees 129 -popemobile 129 -troika 129 -anil 129 -reeds 129 -judgmental 129 -post-season 129 -farooq 129 -efe 129 -1.65 129 -irresponsibly 129 -07:49 129 -amphitheatre 129 -infatuation 129 -measly 129 -low-wage 129 -yamamoto 129 -growling 129 -06:52 129 -dystopian 129 -fissures 129 -crime-fighting 129 -seared 129 -wiretap 129 -kaitlin 129 -glaswegian 129 -seneca 129 -unbridled 129 -jeweler 129 -geniuses 129 -n'zogbia 129 -marmara 129 -nats 129 -freitas 129 -eb 129 -forward-thinking 129 -unisex 129 -snowmen 129 -sprinklers 129 -tarantula 129 -ruislip 129 -suzie 129 -anointed 129 -consolidating 129 -88,000 129 -10:29 129 -wrestles 129 -selflessness 129 -bidve 129 -kidston 129 -low-flying 129 -fearlessly 129 -fareham 129 -shiite-led 129 -ying 129 -10:04 129 -vilks 129 -bathers 129 -twinkies 129 -buckling 129 -mulumbu 129 -shrimpton 129 -polanco 129 -6,200 129 -introductory 129 -observant 129 -mismatched 128 -yangtze 128 -hantavirus 128 -confessional 128 -bhatti 128 -a$ 128 -weidenfeller 128 -krispy 128 -appallingly 128 -thunderbolt 128 -balenciaga 128 -wobbling 128 -adoboli 128 -cantonese 128 -mid-level 128 -ponders 128 -09:30 128 -unwritten 128 -rightmove 128 -sizing 128 -binders 128 -marlboro 128 -sisterhood 128 -shareholding 128 -o'loughlin 128 -ferociously 128 -corrosion 128 -brca2 128 -shakeup 128 -aerodynamics 128 -precipice 128 -romneys 128 -inderdeep 128 -culver 128 -microgravity 128 -nonviolence 128 -goins 128 -luge 128 -one-stop 128 -08:27 128 -malin 128 -unlit 128 -seabra 128 -dispersal 128 -278 128 -langer 128 -graphically 128 -09:20 128 -rebrand 128 -1872 128 -sha 128 -sherrie 128 -menzel 128 -bonobos 128 -maier 128 -discernible 128 -squeamish 128 -sheepish 128 -herts 128 -14m 128 -pccs 128 -allotments 128 -shoplifters 128 -langton 128 -exmouth 128 -incandescent 128 -haleigh 128 -rushkoff 128 -liken 128 -iranian-american 128 -endowed 128 -steny 128 -saws 128 -09:48 128 -korean-american 128 -middleman 128 -throttling 128 -wyndham 128 -rustling 128 -probert 128 -daw 128 -vallarta 128 -sanctioning 128 -12:57 128 -retardant 128 -liverpudlian 128 -emmons 128 -2day 128 -24-carat 128 -gridlocked 128 -refrigerators 128 -hindenburg 128 -zubaydah 128 -quaker 128 -inched 128 -leyte 128 -heeled 128 -temps 128 -barakat 128 -azamat 128 -giaccherini 128 -stacie 128 -ringer 128 -modus 128 -highest-profile 128 -xe 128 -congregated 128 -marianna 128 -mercifully 128 -hai 128 -crockery 128 -atsb 128 -pozo 128 -unkind 128 -three-drug 128 -selflessly 128 -panathinaikos 128 -sturm 128 -insanely 128 -bram 128 -fragrant 128 -shootouts 128 -alkaline 128 -11-hour 128 -triumphing 128 -rsa 128 -mustered 128 -arsonists 128 -danvers 128 -abta 128 -recoveries 128 -mostyn 128 -clotting 128 -undemocratic 128 -teeing 128 -computerized 128 -wil 128 -swum 128 -convicting 128 -freda 128 -bludgeoning 128 -lepage 128 -departmental 128 -gutting 128 -ami 128 -lorenz 128 -2bn 128 -intriguingly 128 -chastity 128 -arm-in-arm 128 -spaniels 128 -sedona 128 -lavoie 128 -consumerism 128 -tantalizing 128 -2g 128 -inuit 128 -barzani 128 -09:53 128 -fluffed 128 -16.7 128 -foi 128 -r8 128 -clavell 128 -sit-ups 128 -rosanna 128 -grimaces 128 -underpin 128 -halibut 128 -androgynous 128 -retrieval 128 -amritsar 128 -huxtable 128 -theroux 127 -heralds 127 -reformation 127 -lorient 127 -weighty 127 -american-islamic 127 -d'souza 127 -citizenry 127 -pepperoni 127 -philosophies 127 -demarco 127 -blissful 127 -thetford 127 -xi'an 127 -powdery 127 -embalmed 127 -beefing 127 -whisperer 127 -brede 127 -07:50 127 -arty 127 -peddle 127 -masterminds 127 -catchment 127 -repainted 127 -budgeting 127 -impregnated 127 -lionsgate 127 -osteen 127 -sprite 127 -distilled 127 -emojis 127 -cardwell 127 -sriracha 127 -serra 127 -crayons 127 -horticulture 127 -allyson 127 -mouth-to-mouth 127 -brincidofovir 127 -outgunned 127 -mon 127 -seashore 127 -05:59 127 -photon 127 -ratko 127 -force-feeding 127 -murs 127 -dupree 127 -unplug 127 -shola 127 -kites 127 -furnishing 127 -airtight 127 -watters 127 -relishes 127 -hairstylist 127 -haidara 127 -superstore 127 -fullness 127 -macklin 127 -microorganisms 127 -indiscretion 127 -morpurgo 127 -prerequisite 127 -selector 127 -linklater 127 -jackal 127 -anneclaire 127 -gah 127 -gonorrhea 127 -sako 127 -nah 127 -at-home 127 -sarandon 127 -sledgehammers 127 -07:21 127 -maitland 127 -hashish 127 -laverne 127 -stabilization 127 -rain-soaked 127 -difficile 127 -lila 127 -csiro 127 -rerouted 127 -thieving 127 -attleboro 127 -neely 127 -swampy 127 -albrighton 127 -dept. 127 -then-secretary 127 -ignatius 127 -brightening 127 -rowsell 127 -retrospectively 127 -graphs 127 -contravention 127 -springtime 127 -pretence 127 -bongiorno 127 -accc 127 -haryana 127 -socrates 127 -cowes 127 -simba 127 -seaport 127 -odell 127 -two-lane 127 -myerson 127 -hatcher 127 -10.10 127 -electra 127 -naysayers 127 -tyrannical 127 -mamma 127 -werewolf 127 -arlen 127 -abkhazia 127 -yarnell 127 -gators 127 -gothamist 127 -american-made 127 -self-incrimination 127 -dour 127 -star-spangled 127 -0.08 127 -peppermint 127 -workmates 127 -scuffed 127 -spierer 127 -first-born 127 -bedded 127 -temblor 127 -moynihan 127 -pro-business 127 -chupacabra 127 -coerce 127 -chorlton 127 -kimono 127 -fendi 127 -aon 127 -reiterates 127 -uninspiring 127 -locale 127 -spilt 127 -hurricane-force 127 -domineering 127 -re-run 127 -igloo 127 -barbarism 127 -cheerfully 127 -motherf 127 -christa 127 -brochures 127 -msc 127 -willcox 127 -vertebrates 127 -sunbathe 127 -sun-sentinel 127 -whiston 127 -sunbury 127 -shined 127 -1870s 127 -relays 126 -testimonials 126 -plumbers 126 -caterer 126 -ravaging 126 -maguindanao 126 -postgraduate 126 -rumbles 126 -07:13 126 -annihilation 126 -uav 126 -quicken 126 -ballman 126 -engle 126 -cinco 126 -institutionalized 126 -tinkler 126 -analogue 126 -19million 126 -guerilla 126 -rik 126 -hoards 126 -pylon 126 -stadia 126 -buy-to-let 126 -erakat 126 -rimmel 126 -landmine 126 -diller 126 -rubies 126 -three-set 126 -capri 126 -infidel 126 -palau 126 -asada 126 -mow 126 -leia 126 -parishioner 126 -supermax 126 -fender 126 -defamed 126 -nafissatou 126 -full-backs 126 -rock-bottom 126 -naga 126 -fangio 126 -jaundice 126 -hammerhead 126 -satchel 126 -ovenden 126 -kaylee 126 -lift-off 126 -impeded 126 -mims 126 -empathetic 126 -b-52 126 -winona 126 -jailbreak 126 -garnish 126 -reinvention 126 -09:47 126 -yak 126 -criado-perez 126 -paintwork 126 -clump 126 -brownback 126 -ksl 126 -rhimes 126 -bandana 126 -thy 126 -melancholy 126 -hectare 126 -undignified 126 -lavandera 126 -rifkind 126 -lures 126 -10-hour 126 -posterity 126 -bakewell 126 -carley 126 -enforces 126 -sediuk 126 -locust 126 -semesa 126 -incessantly 126 -imitated 126 -temporal 126 -chesney 126 -guacamole 126 -mid-90s 126 -removals 126 -wilkie 126 -aurier 126 -scaly 126 -affirming 126 -gibbon 126 -elio 126 -12:37 126 -aristide 126 -off-the-cuff 126 -scholarly 126 -93,000 126 -aircrew 126 -pled 126 -olio 126 -scruff 126 -13:07 126 -luc 126 -tightens 126 -loafers 126 -mccauley 126 -dubai-based 126 -livia 126 -jawline 126 -platonic 126 -countenance 126 -1868 126 -tweddle 126 -conquests 126 -monsieur 126 -aesthetically 126 -anti-depressant 126 -own-brand 126 -mitrovic 126 -confiscating 126 -autonomously 126 -rothkopf 126 -gambian 126 -breedlove 126 -statuses 126 -gelsenkirchen 126 -savages 126 -drage 126 -18:01 126 -western-style 126 -houdini 126 -parodied 126 -oddity 126 -16,500 126 -wkmg 126 -inquisition 126 -airships 126 -opposites 126 -ferrigno 126 -hares 126 -operandi 126 -09:55 126 -09:58 126 -sat-nav 126 -expelling 126 -smirking 126 -harmeet 126 -mopeds 126 -salehi 126 -pacifist 126 -huddling 126 -11lb 126 -brooker 126 -hebei 125 -partick 125 -paraglider 125 -green-on-blue 125 -african-born 125 -dictatorial 125 -kevlar 125 -omani 125 -hasina 125 -huangs 125 -savor 125 -well-worn 125 -riveted 125 -electrode 125 -overheat 125 -flatten 125 -pre-war 125 -radiology 125 -shams 125 -30cm 125 -flaky 125 -oshie 125 -samutsevich 125 -12:24 125 -equinox 125 -rainey 125 -ghent 125 -artemis 125 -formulation 125 -massachusetts-based 125 -harewood 125 -hakimullah 125 -07:59 125 -kitching 125 -crepe 125 -08:01 125 -introverted 125 -disrespecting 125 -belies 125 -rani 125 -20th-century 125 -blackberries 125 -buoys 125 -outsized 125 -forstall 125 -rhine 125 -zeena 125 -13:37 125 -doggie 125 -buchenwald 125 -smallwood 125 -non-native 125 -abbasi 125 -05:57 125 -bernd 125 -armory 125 -ayahuasca 125 -spender 125 -caricatures 125 -07:25 125 -unsold 125 -regularity 125 -scrooge 125 -overpower 125 -disobeying 125 -malfunctions 125 -312 125 -leveraging 125 -aggravate 125 -wristwatch 125 -revels 125 -waldron 125 -mesmerizing 125 -advantageous 125 -dieback 125 -burkhart 125 -08:48 125 -grafton 125 -ina 125 -anthology 125 -somaliland 125 -bernal 125 -eagerness 125 -valour 125 -kruis 125 -spaceships 125 -carelessly 125 -jugular 125 -adderall 125 -straws 125 -caseworker 125 -nouveau 125 -wilding 125 -aborting 125 -wesleyan 125 -perumal 125 -janes 125 -hamsters 125 -usernames 125 -naturalization 125 -hygienic 125 -tipsy 125 -denali 125 -harald 125 -much-maligned 125 -suspenders 125 -jaffa 125 -interceptor 125 -8-1 125 -wardle 125 -underestimating 125 -bhp 125 -bonn 125 -safeway 125 -comanche 125 -cowed 125 -nondescript 125 -chomping 125 -tightness 125 -tice 125 -06:30 125 -prest 125 -2.35 125 -tyne-wear 125 -henshaw 125 -rama 125 -flinch 125 -terse 125 -nobility 125 -schaffer 125 -blooded 125 -xiaoping 125 -odours 125 -262 125 -timelines 125 -enmity 125 -ailes 125 -god-given 125 -ruff 125 -second-year 125 -ransacking 125 -grander 125 -10:23 125 -1080p 125 -corfu 125 -conformity 125 -hollinghurst 125 -kaspersky 125 -ado 125 -harries 125 -pickups 125 -iowans 125 -eritrean 125 -bergman 125 -disseminating 125 -ljungberg 125 -catalunya 125 -maharashtra 125 -kiln 125 -shortcuts 125 -dynasties 125 -cushing 125 -hitchhiker 125 -wasilewski 125 -maha 125 -stags 125 -eldridge 125 -out-of-pocket 125 -tulips 125 -07:31 125 -weybridge 125 -atwater 124 -slimline 124 -grasslands 124 -snappy 124 -decontamination 124 -off-piste 124 -dispelled 124 -abdulla 124 -chuckled 124 -braithwaite 124 -surnames 124 -kirkwood 124 -hayfever 124 -siphon 124 -2016-17 124 -09:36 124 -obedient 124 -self-immolations 124 -leland 124 -dara 124 -3gs 124 -huskies 124 -touch-screen 124 -longmont 124 -caspian 124 -gravestones 124 -guillaume 124 -prem 124 -oceania 124 -08:06 124 -08:05 124 -llodra 124 -blackmore 124 -clitoris 124 -06:47 124 -outlived 124 -blow-dry 124 -rawlinson 124 -offensives 124 -odeon 124 -lta 124 -showgirl 124 -queiroz 124 -2 1/2 124 -bou 124 -chiapas 124 -m8 124 -preservatives 124 -wiggles 124 -lundergan 124 -lovett 124 -phenomenally 124 -chums 124 -reflexes 124 -motes 124 -amen 124 -displeased 124 -targetted 124 -bevin 124 -triton 124 -in-game 124 -avenger 124 -16.99 124 -bey 124 -grisham 124 -gallic 124 -danville 124 -adair 124 -carmelo 124 -mattia 124 -appetites 124 -injectable 124 -noone 124 -overbearing 124 -curvaceous 124 -cross-examined 124 -special-needs 124 -greenbelt 124 -dietz 124 -colville 124 -internment 124 -corsica 124 -midsummer 124 -mullan 124 -ayr 124 -edt 124 -perish 124 -pensive 124 -jalil 124 -noun 124 -sweetly 124 -gynaecological 124 -laila 124 -baghdatis 124 -sodden 124 -roku 124 -viacom 124 -chesley 124 -07:42 124 -knysz 124 -austell 124 -08:10 124 -pomeranian 124 -baikonur 124 -hornchurch 124 -reposted 124 -near-miss 124 -transcanada 124 -windslowe 124 -smu 124 -ladylike 124 -lun 124 -sentry 124 -fontana 124 -trekker 124 -v6 124 -autry 124 -243 124 -giuliana 124 -eusebio 124 -jump-start 124 -anthea 124 -winton 124 -06:12 124 -viewings 124 -clarins 124 -unnerved 124 -anja 124 -sd 124 -legroom 124 -lamu 124 -dissipate 124 -wood-burning 124 -22st 124 -pcp 124 -prancing 124 -moreton 124 -urologist 124 -harbored 124 -ballast 124 -baluchi 124 -g-8 124 -vickie 124 -suttles 124 -repent 124 -shoal 124 -awkwardness 124 -delano 124 -worrall 124 -contingencies 124 -alertness 124 -bandar 124 -circulatory 124 -lethargy 124 -mettle 124 -perceives 124 -karolina 124 -murali 124 -09:52 124 -amphitheater 124 -lexicon 124 -gunships 124 -elixir 124 -carpark 124 -cutsem 124 -howl 124 -red-brick 124 -doo 124 -jogs 124 -tyrol 124 -gravitate 124 -dorrell 124 -watertight 124 -rebelled 123 -ceases 123 -madine 123 -mcraven 123 -dharmasena 123 -08:08 123 -loew 123 -suresh 123 -escapade 123 -arsenals 123 -manolo 123 -teary-eyed 123 -bluster 123 -outperformed 123 -09:32 123 -verifiable 123 -epo 123 -pettit 123 -havering 123 -kroenke 123 -panto 123 -frenchmen 123 -redevelop 123 -trotted 123 -236 123 -hyperbole 123 -johnsons 123 -one-piece 123 -all-new 123 -kirkman 123 -janowicz 123 -hyland 123 -half-mast 123 -fibreglass 123 -emulated 123 -shaneah 123 -tiered 123 -capes 123 -1874 123 -blacksmith 123 -06:05 123 -renditions 123 -craves 123 -concoctions 123 -kreme 123 -semaan 123 -reliever 123 -11:43 123 -culminates 123 -godparents 123 -rapprochement 123 -goal-scoring 123 -lite 123 -hennepin 123 -merry-go-round 123 -perversion 123 -dinah 123 -mohr 123 -flowery 123 -tempah 123 -gainsborough 123 -binge-drinking 123 -charl 123 -gentrification 123 -6oz 123 -harboured 123 -tracie 123 -kiddie 123 -flogged 123 -96,000 123 -mirza 123 -samburu 123 -subsurface 123 -fourth-degree 123 -stinson 123 -yad 123 -twenty-two 123 -bakkal 123 -checkup 123 -toenails 123 -reshaped 123 -d68 123 -two-step 123 -espoused 123 -reclassified 123 -unambiguous 123 -willey 123 -clubbers 123 -citywide 123 -decrees 123 -12:55 123 -ccg 123 -baiting 123 -09:06 123 -attributing 123 -overspending 123 -millisieverts 123 -skateboarder 123 -pelham 123 -beachy 123 -surcharges 123 -50-foot 123 -instil 123 -songstress 123 -5:2 123 -bodmin 123 -09:27 123 -9.20 123 -laszlo 123 -spinks 123 -four-inch 123 -dysplasia 123 -stretchy 123 -women-only 123 -barbershop 123 -biochemistry 123 -itf 123 -12:34 123 -muhamed 123 -mees 123 -squandering 123 -dumbarton 123 -reims 123 -blanton 123 -whitehurst 123 -flannel 123 -hashi 123 -espaà 123 -genk 123 -gehry 123 -matos 123 -ribble 123 -bayeux 123 -effusive 123 -caffrey 123 -upstart 123 -cramping 123 -'60 123 -falcone 123 -06:13 123 -sidmouth 123 -267 123 -seamer 123 -three-mile 123 -versed 123 -dimly 123 -lik 123 -doled 123 -teese 123 -llamas 123 -gotcha 123 -iberian 123 -olarn 123 -alternately 123 -delved 123 -hoteliers 123 -1p 123 -selfishness 123 -crux 123 -86,000 123 -instigator 123 -closed-circuit 123 -vandoorne 123 -coincidental 123 -12:56 123 -09:51 123 -postnatal 123 -reprinted 123 -mayall 123 -dominica 123 -guinean 123 -matamoros 123 -coals 123 -revolts 123 -keselowski 123 -07:35 123 -alexey 122 -mccracken 122 -schulte 122 -700million 122 -whisker 122 -fructose 122 -jacoby 122 -maura 122 -nee 122 -staking 122 -dillinger 122 -radioshack 122 -dozing 122 -09:34 122 -head-first 122 -alavi 122 -alessio 122 -scrutinise 122 -collison 122 -disintegration 122 -ejiofor 122 -assyrian 122 -takata 122 -unhygienic 122 -barahona 122 -interchangeable 122 -arron 122 -hangers 122 -naivety 122 -outstripped 122 -hays 122 -r-florida 122 -valuations 122 -battlegrounds 122 -08:02 122 -ratcheting 122 -grissom 122 -handel 122 -mash-up 122 -kingswood 122 -wily 122 -chromecast 122 -terminally-ill 122 -dunstable 122 -hourglass 122 -tarps 122 -orally 122 -o'dwyer 122 -rehabilitating 122 -château 122 -272 122 -surpasses 122 -pathfinder 122 -rialto 122 -landsberry 122 -barnaby 122 -06:06 122 -sterilised 122 -blackjack 122 -mid-life 122 -pharaohs 122 -fearnley-whittingstall 122 -refurbish 122 -archeological 122 -maples 122 -padlocked 122 -aligning 122 -bankstown 122 -mortals 122 -inflation-busting 122 -raisman 122 -keri 122 -xlviii 122 -aggressors 122 -10:06 122 -wigglesworth 122 -2013-2014 122 -worldview 122 -mouth-watering 122 -aerobic 122 -burritos 122 -improvise 122 -omelette 122 -subsistence 122 -kozak 122 -onyango 122 -beatification 122 -edouard 122 -prohibitions 122 -herod 122 -earphones 122 -drax 122 -foote 122 -convening 122 -mid-way 122 -kocha 122 -adl 122 -exertion 122 -societe 122 -carnivore 122 -gurion 122 -censoring 122 -rapporteur 122 -cockfighting 122 -holger 122 -06:15 122 -quarter-mile 122 -manama 122 -honking 122 -talons 122 -talked-about 122 -09:26 122 -coetzee 122 -steepest 122 -schieffer 122 -halpern 122 -murcia 122 -guandique 122 -snow-capped 122 -aldean 122 -breathable 122 -mingora 122 -p.m 122 -croissants 122 -exuberance 122 -dickie 122 -xp 122 -pattaya 122 -83,000 122 -pulver 122 -hemorrhaging 122 -almunia 122 -evgeny 122 -gravesite 122 -garcetti 122 -civilisations 122 -06:54 122 -06:58 122 -low-grade 122 -canfield 122 -kamui 122 -bottling 122 -hob 122 -whiz 122 -restarting 122 -belarusian 122 -coolness 122 -cutout 122 -nourishment 122 -hunky 122 -lecce 122 -08:50 122 -karting 122 -tacopina 122 -jibril 122 -kristoff 122 -lucero 122 -guterres 122 -lovebirds 122 -ppl 122 -gurkhas 122 -riyad 122 -snoozing 122 -momma 122 -quizzes 122 -screech 122 -lamm 122 -churn 122 -horde 122 -makenzie 122 -ulysses 122 -oldman 122 -11-month 122 -summarily 122 -liqueur 122 -full-page 122 -hell-bent 122 -gauck 122 -beauchamp 122 -10:09 122 -culpo 122 -after-hours 122 -tandy 122 -lower-income 122 -shrub 122 -infliction 122 -banff 122 -inwards 122 -errand 122 -anti-western 122 -bacup 122 -bastia 122 -91st 122 -teeny 122 -offseason 122 -tracts 122 -chivalry 122 -fabricate 122 -sangin 121 -voter-approved 121 -rurik 121 -jese 121 -sark 121 -cerci 121 -breathalyzer 121 -ushering 121 -flings 121 -kiribati 121 -flag-draped 121 -woledge 121 -overlay 121 -najibullah 121 -turboprop 121 -outwardly 121 -simoncelli 121 -reburied 121 -highlanders 121 -incestuous 121 -saskatchewan 121 -rippled 121 -encroachment 121 -blinked 121 -elisha 121 -bronco 121 -silhouetted 121 -smearing 121 -dugher 121 -06:43 121 -13:15 121 -frontage 121 -framingham 121 -cellmate 121 -dilshan 121 -cliven 121 -lionesses 121 -06:32 121 -devotes 121 -05:54 121 -rima 121 -317 121 -bendy 121 -roller-coaster 121 -aerosmith 121 -reverting 121 -whitewashed 121 -355 121 -kyl 121 -jakub 121 -readmitted 121 -brickwork 121 -self-deprecating 121 -soderbergh 121 -curses 121 -beardsley 121 -colostomy 121 -defused 121 -sketching 121 -accentuate 121 -smudge 121 -launer 121 -mean-spirited 121 -impart 121 -braxton 121 -bev 121 -mid-19th 121 -enthralling 121 -sequenced 121 -deplored 121 -privatization 121 -100-year 121 -tint 121 -tradesmen 121 -morehouse 121 -dryness 121 -telomeres 121 -appropriated 121 -bricklayer 121 -rylance 121 -rifi 121 -sub-standard 121 -newsworthy 121 -seniority 121 -aliza 121 -topham 121 -cao 121 -fleeced 121 -bunbury 121 -laziness 121 -gracing 121 -mcadams 121 -luciana 121 -wombs 121 -spurr 121 -roseanne 121 -offends 121 -justgiving 121 -12:36 121 -08:12 121 -hedley 121 -polaris 121 -threaded 121 -flirtation 121 -protestations 121 -haile 121 -cubicles 121 -berets 121 -zanetti 121 -craziest 121 -07:56 121 -bulford 121 -06:35 121 -hauls 121 -voluptuous 121 -eichmann 121 -farsi 121 -lucasfilm 121 -merida 121 -portly 121 -mouthing 121 -replicates 121 -distinctions 121 -06:16 121 -trick-or-treating 121 -e4 121 -co-starred 121 -spasm 121 -muddle 121 -cairngorms 121 -uninterested 121 -lisi 121 -snuffed 121 -broome 121 -brig 121 -308 121 -simulators 121 -takahashi 121 -frontrunners 121 -tousled 121 -orban 121 -urination 121 -leggy 121 -neutralise 121 -upholstery 121 -sidestep 121 -onside 121 -tosh 121 -weightloss 121 -recoil 121 -microbiology 121 -then-prime 121 -energize 121 -gulp 121 -geologic 121 -alteration 121 -holcomb 121 -dov 121 -today.com 121 -squealing 121 -swindling 120 -snapdragon 120 -amato 120 -09:15 120 -suruc 120 -4st 120 -'92 120 -candor 120 -unsavory 120 -07:19 120 -beep 120 -homeopathic 120 -clean-shaven 120 -trier 120 -betfair 120 -pye 120 -10:12 120 -poznan 120 -hurrah 120 -bluegrass 120 -hand-drawn 120 -10-week 120 -07:52 120 -givers 120 -nichola 120 -bein 120 -kamchatka 120 -dnipro 120 -nazi-occupied 120 -melanin 120 -reshaping 120 -stigmatized 120 -zahid 120 -emoticons 120 -quilliam 120 -97.3 120 -goodger 120 -triangles 120 -groundsman 120 -diageo 120 -08:25 120 -benaud 120 -rpg 120 -under-20 120 -2oz 120 -onsite 120 -paderborn 120 -chileans 120 --4 120 -dreamy 120 -daimler 120 -asymmetrical 120 -eluding 120 -refereed 120 -caped 120 -goldeneye 120 -ruskin 120 -fenn 120 -gurung 120 -frenchwoman 120 -cello 120 -hokkaido 120 -cassim 120 -candelaria 120 -amex 120 -pneumatic 120 -feasted 120 -six-minute 120 -straightaway 120 -starboard 120 -starlets 120 -kigali 120 -o'carroll 120 -pondered 120 -slaven 120 -entrapment 120 -maidens 120 -spitz 120 -antivirus 120 -jaber 120 -elbagir 120 -three-page 120 -slag 120 -truckloads 120 -necc 120 -snoopy 120 -exclaiming 120 -12:58 120 -teton 120 -09:00 120 -sunseeker 120 -triathlete 120 -07:07 120 -tarzan 120 -brittain 120 -mis-sold 120 -junko 120 -ledbetter 120 -09:24 120 -juicing 120 -doumbia 120 -cathcart 120 -740 120 -disorientation 120 -conceivably 120 -redlands 120 -ceop 120 -saxby 120 -avocados 120 -decapitation 120 -cma 120 -hold-up 120 -18ft 120 -sidestepped 120 -honeybees 120 -tavares 120 -traumas 120 -9to5mac 120 -month-old 120 -hachette 120 -pragmatism 120 -cruzeiro 120 -tweezers 120 -low-tech 120 -scoreless 120 -alta 120 -j.c. 120 -pinewood 120 -macbeth 120 -space-age 120 -fermentation 120 -drowsy 120 -ev-d68 120 -hyperactive 120 -08:55 120 -makayla 120 -typhoid 120 -2026 120 -2015/16 120 -13:24 120 -13lb 120 -seamen 120 -furthering 120 -asbury 120 -terrains 120 -deepdale 120 -entombed 120 -kongers 120 -acidity 120 -acrimony 120 -furze 120 -homily 120 -torpedoed 120 -maniac 120 -kurd 120 -cathartic 120 -1805 120 -horsham 120 -lawley 120 -marciano 120 -mineirao 120 -lymphoblastic 120 -full-face 120 -blount 120 -sommer 120 -strep 120 -justifiably 120 -assassinating 120 -countermeasures 120 -ra 120 -mahdi 120 -rfc 120 -attribution 120 -kayden 120 -lac 120 -cabot 120 -predawn 119 -17:41 119 -brownlow 119 -sussman 119 -qataris 119 -caledonia 119 -nudes 119 -manipulator 119 -trup 119 -385 119 -aunty 119 -andré 119 -sweatshirts 119 -chappell 119 -hyperloop 119 -burnell 119 -virtuoso 119 -preoccupation 119 -barcode 119 -09:35 119 -disseminate 119 -crewman 119 -mannerisms 119 -elmer 119 -straight-a 119 -elbowed 119 -microchips 119 -spfl 119 -arak 119 -statehouse 119 -grope 119 -dorsett 119 -amenity 119 -clitheroe 119 -shankly 119 -right-leaning 119 -moriarty 119 -eugenia 119 -hexagon 119 -tami 119 -08:28 119 -construed 119 -in-demand 119 -brightly-coloured 119 -up-front 119 -stepped-up 119 -hartley-parkinson 119 -sustainably 119 -06:27 119 -13:30 119 -denser 119 -leeches 119 -laney 119 -08:40 119 -transformations 119 -simmer 119 -restores 119 -incisive 119 -hanwell 119 -hanningfield 119 -alp 119 -6st 119 -intrinsically 119 -taiji 119 -al-ahram 119 -29million 119 -focussing 119 -near-perfect 119 -dyslexic 119 -valedictorian 119 -lutfi 119 -biographical 119 -tuttle 119 -orbiters 119 -mersey 119 -anais 119 -kubica 119 -mongrel 119 -conjecture 119 -09:44 119 -sherriff 119 -mittens 119 -post-christmas 119 -wyn 119 -overreacted 119 -astbury 119 -basked 119 -288 119 -286 119 -connotation 119 -5.25 119 -afl-cio 119 -emanuele 119 -eloquently 119 -seven-week 119 -marlins 119 -corbisiero 119 -voice-activated 119 -coons 119 -dungeons 119 -145,000 119 -mahinda 119 -07:05 119 -unpalatable 119 -14.7 119 -jessen 119 -frosts 119 -cumbrian 119 -daddies 119 -extortionate 119 -fitton 119 -axle 119 -cesena 119 -façade 119 -hyannis 119 -tabernacle 119 -tornados 119 -dragonfly 119 -cervantes 119 -piotr 119 -daniil 119 -word-of-mouth 119 -cast-iron 119 -thurston 119 -71,000 119 -salva 119 -chauffeured 119 -foulkes 119 -atleti 119 -segolene 119 -evert 119 -redneck 119 -08:35 119 -terminus 119 -mergers 119 -charmer 119 -culminate 119 -unreserved 119 -hexham 119 -lafreniere 119 -mcpartland 119 -mingo 119 -gobi 119 -outta 119 -implanting 119 -derivative 119 -nicu 119 -dellinger 119 -piecemeal 119 -14-year-olds 119 -interplanetary 119 -328 119 -linguistics 119 -plaskon 119 -bums 119 -groggy 119 -renata 119 -clifftop 119 -ostracised 119 -heaney 119 -edgington 119 -observatories 119 -olfactory 119 -roddy 119 -finishers 119 -adan 119 -perdomo 119 -pontoon 119 -naftali 119 -benham 119 -torpedoes 119 -tuvalu 119 -veronika 119 -khadija 119 -mayans 119 -catalogued 119 -simulates 119 -jalalabad 119 -mediocrity 119 -anti-drugs 119 -09:59 119 -sheri 119 -mermaids 119 -symbolises 119 -prudham 119 -jing 119 -kentish 119 -ooh 119 -great-uncle 119 -well-publicized 119 -undercooked 119 -2003-04 119 -gainsbourg 118 -bharati 118 -nuance 118 -mother-of-six 118 -minetti 118 -bcci 118 -wgn 118 -closings 118 -buckeyes 118 -atiya 118 -backyards 118 -barrassing 118 -yekaterina 118 -thundered 118 -ruckus 118 -brantley 118 -mally 118 -munby 118 -transcended 118 -shetty 118 -harriers 118 -bardwell 118 -aborigines 118 -5.50 118 -overflowed 118 -belaid 118 -10-foot 118 -gbi 118 -expletive-laden 118 -plotts 118 -navies 118 -gynecologist 118 -situ 118 -dependents 118 -ege 118 -groningen 118 -jantjie 118 -militarization 118 -233 118 -plummets 118 -spotter 118 -manuka 118 -groans 118 -vigor 118 -nontraditional 118 -08:24 118 -portfolios 118 -schmid 118 -heirlooms 118 -delving 118 -dredge 118 -06:28 118 -strip-searched 118 -chested 118 -wnba 118 -galacticos 118 -matterhorn 118 -charters 118 -leant 118 -joystick 118 -hyman 118 -pre-game 118 -1858 118 -permissions 118 -hillingdon 118 -granville 118 -ex-convict 118 -probiotic 118 -blackheath 118 -taxiway 118 -gad 118 -d'angelo 118 -housekeepers 118 -jaipur 118 -paleontologists 118 -paprika 118 -snowed 118 -hand-crafted 118 -funke 118 -284 118 -mallett 118 -c-17 118 -sprightly 118 -pay-as-you-go 118 -schwab 118 -125th 118 -heigl 118 -dominika 118 -ariane 118 -07:03 118 -equities 118 -pestering 118 -uncensored 118 -likud 118 -non-believers 118 -same-day 118 -breezes 118 -jetpack 118 -eds 118 -rebook 118 -pocognoli 118 -obeying 118 -badu 118 -gazes 118 -425,000 118 -ulterior 118 -non-payment 118 -tuskegee 118 -backheel 118 -contorted 118 -bluebell 118 -sprouted 118 -08:16 118 -gooey 118 -pietro 118 -remington 118 -footnote 118 -veitch 118 -enslavement 118 -moisturising 118 -8.20 118 -necropolis 118 -brees 118 -uighurs 118 -barbera 118 -esp 118 -unspoilt 118 -subsidize 118 -neutrinos 118 -thorson 118 -kick-started 118 -blockers 118 -sayreville 118 -moans 118 -281 118 -hagupit 118 -a.k.a. 118 -deblase 118 -juggles 118 -ezell 118 -nazia 118 -abyei 118 -kuhn 118 -mahiki 118 -mayne 118 -gender-neutral 118 -long-established 118 -irritant 118 -mohammadi 118 -minuteman 118 -marvels 118 -nomads 118 -alassane 118 -nusakambangan 118 -halen 118 -coverup 118 -wraparound 118 -fecal 118 -08:26 118 -lubricant 118 -leonie 118 -persevered 118 -lon 118 -agreeable 118 -prams 118 -hoda 118 -wali 118 -500-year-old 118 -osage 118 -contaminating 118 -balearic 118 -multi-agency 118 -meridian 118 -philanthropists 118 -killian 118 -on-the-spot 118 -veuve 118 -granola 118 -exerting 118 -natacha 118 -indio 118 -rodallega 118 -catastrophes 118 -swire 118 -blacktown 118 -12:48 118 -finger-pointing 118 -wai 118 -silks 118 -gilks 118 -nonchalantly 118 -attrition 117 -marni 117 -budgeted 117 -pious 117 -rigg 117 -handcuffing 117 -leotard 117 -sexualisation 117 -muses 117 -myeloid 117 -hiccup 117 -midlife 117 -dumber 117 -imprison 117 -bail-out 117 -13:19 117 -kitt 117 -rahr 117 -bushell 117 -yoselyn 117 -canzani 117 -undercarriage 117 -sharknado 117 -larose 117 -puckett 117 -lvmh 117 -hawes 117 -fete 117 -gale-force 117 -649 117 -06:41 117 -georgians 117 -jean-paul 117 -curitiba 117 -13:16 117 -chainsaws 117 -courtrooms 117 -2.40 117 -stimulants 117 -lemmon 117 -substituting 117 -rms 117 -debt-ridden 117 -scrutinize 117 -quads 117 -early-season 117 -gullible 117 -tangerine 117 -rippling 117 -05:53 117 -stowaways 117 -mola 117 -drowsiness 117 -cardosa 117 -buss 117 -upper-class 117 -ajar 117 -inputs 117 -lugano 117 -lockley 117 -pre-arranged 117 -beaconsfield 117 -two-shot 117 -rabbani 117 -sandstorm 117 -hynde 117 -yorkshireman 117 -a-league 117 -wark 117 -anzor 117 -organist 117 -lumbar 117 -rackauckas 117 -wainstein 117 -workless 117 -best-dressed 117 -tawdry 117 -goldilocks 117 -1878 117 -sarwar 117 -gaynor 117 -angina 117 -30-foot 117 -rubella 117 -61,000 117 -well-wisher 117 -282 117 -regretting 117 -congregants 117 -steamboat 117 -macon 117 -9m 117 -boston-based 117 -lodgings 117 -animator 117 -heave 117 -19c 117 -iteration 117 -gripes 117 -palladium 117 -hypersonic 117 -grangemouth 117 -nilsson 117 -touchscreens 117 -disintegrating 117 -noe 117 -grinch 117 -khdeir 117 -speechwriter 117 -foot-long 117 -keener 117 -hochman 117 -gaylord 117 -04:35 117 -high-visibility 117 -illiteracy 117 -prenuptial 117 -catchphrases 117 -prerogative 117 -23.5 117 -shahmalak 117 -perplexing 117 -abed 117 -hindmarch 117 -zee 117 -dredged 117 -tsarni 117 -waffles 117 -bouazizi 117 -forts 117 -horseracing 117 -08:34 117 -bardem 117 -05:28 117 -verges 117 -obscuring 117 -etherington 117 -compensating 117 -miami-based 117 -masia 117 -05:41 117 -fluency 117 -graveside 117 -unappealing 117 -vladivostok 117 -nonlethal 117 -yeltsin 117 -displace 117 -festooned 117 -hessler 117 -canseco 117 -kukucova 117 -petal 117 -mahony 117 -linning 117 -non-european 117 -schulman 117 -gavel 117 -debby 117 -mothercare 117 -e-fit 117 -bowery 117 -bellfield 117 -zhuang 117 -gloriously 117 -dewine 117 -underscoring 117 -staph 117 -aiello 117 -joslin 117 -palmdale 117 -moldovan 117 -bi 117 -jain 117 -towered 117 -beaker 117 -o'dowd 117 -hijackings 117 -cabos 117 -distinguishes 117 -inverdale 117 -20-foot 117 -readied 117 -heber 116 -firecracker 116 -push-up 116 -beaulieu 116 -lewington 116 -a5 116 -graciously 116 -07:16 116 -melzer 116 -atos 116 -darrin 116 -gelding 116 -poignantly 116 -empirical 116 -giancarlo 116 -guerre 116 -lev 116 -wing-back 116 -deceptively 116 -lepore 116 -cabral 116 -tyree 116 -birthright 116 -pinocchio 116 -tannehill 116 -rotunda 116 -spyware 116 -urooj 116 -all-powerful 116 -guilfoyle 116 -machine-gun 116 -haves 116 -99.99 116 -slr 116 -stocker 116 -06:49 116 -elms 116 -big-budget 116 -shoulder-length 116 -08:29 116 -blossoms 116 -13.1 116 -critiques 116 -therein 116 -relatable 116 -wael 116 -db5 116 -wondrous 116 -mohler 116 -aegean 116 -emeralds 116 -kroger 116 -maoists 116 -gol 116 -7.15 116 -disfigurement 116 -abominable 116 -acorns 116 -joyride 116 -antihistamines 116 -mothering 116 -mandla 116 -genovese 116 -southerly 116 -atika 116 -nanette 116 -twirling 116 -maxime 116 -1600s 116 -meatball 116 -wizardry 116 -workday 116 -larijani 116 -09:41 116 -trickier 116 -pre-world 116 -hopman 116 -2030s 116 -ivanisevic 116 -konye 116 -tyrese 116 -amending 116 -scriptures 116 -bellusci 116 -heatwaves 116 -resettled 116 -well-prepared 116 -ewood 116 -mork 116 -tauranga 116 -frayne 116 -workaholic 116 -radiohead 116 -ferns 116 -flattening 116 -burgling 116 -abid 116 -r.i.p. 116 -narita 116 -morriston 116 -ellsworth 116 -skylight 116 -gazed 116 -lind 116 -banal 116 -footfall 116 -babel 116 -spew 116 -idling 116 -expeditiously 116 -rigour 116 -bratislava 116 -sabourin 116 -liptak 116 -banquets 116 -cacophony 116 -cunliffe 116 -amis 116 -saha 116 -croats 116 -ditto 116 -two-match 116 -welford 116 -briana 116 -recline 116 -weightlifter 116 -par-five 116 -non-traditional 116 -overreacting 116 -quilted 116 -pursuant 116 -revelry 116 -13:08 116 -quds 116 -cleethorpes 116 -liaise 116 -mnd 116 -frankland 116 -firmness 116 -baby-faced 116 -05:40 116 -transcribed 116 -thrush 116 -o'farrell 116 -caveman 116 -fabrizio 116 -cross-channel 116 -1776 116 -accorded 116 -post-apocalyptic 116 -watcher 116 -consoling 116 -jaaskelainen 116 -aso 116 -sandown 116 -knotted 116 -cellist 116 -bouvier 116 -1841 116 -anti-fracking 116 -molins 116 -presto 116 -bots 116 -litmus 116 -soham 116 -10:43 116 -milano 116 -pcsos 116 -scupper 116 -oliva 116 -beeb 116 -hourlong 116 -minimized 116 -cilla 116 -hayne 116 -relinquishing 116 -unravelling 116 -mcgurk 116 -retried 116 -untitled 116 -krebs 116 -neurodegenerative 116 -appropriation 116 -nabi 116 -jam-packed 116 -fumbling 116 -bollards 116 -stockdale 116 -embellishment 116 -crossword 116 -pompous 116 -taryn 116 -khoo 116 -beal 116 -fazlullah 116 -mcdonnells 116 -19:05 116 -730 116 -foo 116 -horned 116 -augment 116 -crayon 116 -kaia 116 -rousey 116 -esperance 116 -messier 115 -dimartino 115 -09:16 115 -09:18 115 -kindles 115 -unenviable 115 -howson 115 -ziggy 115 -hierro 115 -07:12 115 -07:14 115 -auspicious 115 -extricate 115 -carma 115 -e-type 115 -lombardo 115 -attache 115 -reassert 115 -andrus 115 -deplore 115 -hitachi 115 -anti-assad 115 -p1 115 -ullah 115 -invisibility 115 -barth 115 -8oz 115 -lookalikes 115 -destro 115 -tamayo 115 -wallow 115 -aerobatic 115 -sinitta 115 -probiotics 115 -08:00 115 -harley-davidson 115 -om 115 -frailties 115 -manmade 115 -superbugs 115 -miscarry 115 -05:15 115 -05:12 115 -lakhdar 115 -twa 115 -bobsleigh 115 -bowser 115 -erotica 115 -ducklings 115 -13:18 115 -veep 115 -self-doubt 115 -08:20 115 -robby 115 -257 115 -jeopardising 115 -friel 115 -bubonic 115 -donegal 115 -levelling 115 -creole 115 -smyrna 115 -orwellian 115 -unloved 115 -07:10 115 -exudes 115 -yachtsman 115 -instantaneously 115 -centro 115 -rumblings 115 -13:01 115 -humberto 115 -05:38 115 -mumtaz 115 -folic 115 -bucklew 115 -classically 115 -gush 115 -government-controlled 115 -95-year-old 115 -xiv 115 -jammu 115 -paler 115 -safi 115 -rookies 115 -baquba 115 -percussion 115 -puget 115 -constructions 115 -boathouse 115 -small-business 115 -castigated 115 -rolled-up 115 -slacks 115 -side-effect 115 -jumble 115 -kantor 115 -nahyan 115 -motels 115 -townspeople 115 -nutcracker 115 -kindred 115 -1814 115 -samimokbel81_dm 115 -conmen 115 -sirigu 115 -buffeted 115 -yearn 115 -foetuses 115 -sexualized 115 -verne 115 -visualisation 115 -contouring 115 -seasoning 115 -49.99 115 -07:22 115 -eastwards 115 -09:07 115 -pacey 115 -rathbun 115 -19:58 115 -huron 115 -flaunted 115 -gacy 115 -antonov 115 -matchmaking 115 -clarita 115 -mitigated 115 -tatchell 115 -respectability 115 -jeeps 115 -09:22 115 -gennaro 115 -bridgwater 115 -1.40 115 -trapeze 115 -jeering 115 -bellows 115 -tits 115 -tenfold 115 -bronstein 115 -anglicans 115 -plunder 115 -jean-michel 115 -uniformly 115 -hatteras 115 -07:48 115 -sixth-form 115 -ksdk 115 -zips 115 -party-backed 115 -dislocating 115 -emigration 115 -i-95 115 -thongs 115 -immanuel 115 -drian 115 -pelting 115 -retirements 115 -hoxton 115 -maximus 115 -06:50 115 -discrete 115 -juppe 115 -seven-match 115 -haggis 115 -dasha 115 -13:03 115 -13:02 115 -stipulation 115 -gusting 115 -65million 115 -pre-tournament 115 -oap 115 -gallardo 115 -mismatch 115 -excavator 115 -furlough 115 -migaloo 115 -ostreicher 115 -dha 115 -disquiet 115 -cwmbran 115 -aficionado 115 -mer 115 -1845 115 -knee-deep 115 -dreamer 115 -dead-end 115 -non-hispanic 115 -3st 115 -russert 115 -rebates 115 -beaks 115 -8lbs 115 -predisposition 115 -omarjan 115 -amaze 115 -borges 115 -teamsters 115 -wynter 115 -avatars 115 -mischa 115 -sheaffer 115 -sp 115 -albanians 115 -non-hodgkin 115 -recharged 115 -petn 115 -mccandless 115 -flannery 115 -career-high 115 -gnomes 115 -waldeck 115 -07:38 115 -under-16s 115 -retard 115 -carranza 115 -vercammen 115 -ribcage 115 -oratory 115 -07:34 115 -moderators 115 -misconstrued 114 -alfa 114 -jacobi 114 -rafik 114 -broussard 114 -09:17 114 -merle 114 -dirrell 114 -derriford 114 -1400 114 -mastracchio 114 -muqtada 114 -dwarfing 114 -two-person 114 -foolhardy 114 -paediatrics 114 -pageantry 114 -40c 114 -ravenel 114 -boxy 114 -d'isere 114 -vibrancy 114 -sloping 114 -2010-2011 114 -glazers 114 -9.50 114 -payloads 114 -ballads 114 -raisin 114 -work-rate 114 -co-payment 114 -d'etat 114 -eight-minute 114 -killough 114 -hartwell 114 -samui 114 -kingdoms 114 -singed 114 -wambach 114 -ivanka 114 -bognor 114 -communicator 114 -bastrop 114 -tareq 114 -tissier 114 -docs 114 -penile 114 -moustafa 114 -bantamweight 114 -outsource 114 -magdalen 114 -gardos 114 -own-goal 114 -sensitively 114 -implosion 114 -sugg 114 -holborn 114 -geist 114 -12.1 114 -yea 114 -teddies 114 -roethlisberger 114 -sun-kissed 114 -ensue 114 -riser 114 -crime-ridden 114 -smurfs 114 -cyr 114 -shallows 114 -sculptors 114 -catalogues 114 -equaled 114 -debutants 114 -14-day 114 -zipping 114 -sledding 114 -shiffrin 114 -marsupial 114 -5,600 114 -popeye 114 -intangible 114 -goetz 114 -interchange 114 -0-60mph 114 -aspca 114 -journeyman 114 -mange 114 -nazarbayev 114 -gentile 114 -broken-down 114 -vibrator 114 -aberration 114 -discounting 114 -auditing 114 -dug-out 114 -arouse 114 -fluctuate 114 -on-line 114 -07:20 114 -dimensional 114 -2c 114 -mikaela 114 -mackey 114 -tetley 114 -infirm 114 -phablet 114 -rudisha 114 -1789 114 -io 114 -specifies 114 -laborer 114 -pox 114 -karlovic 114 -nellie 114 -opioids 114 -phobos 114 -arcane 114 -trampling 114 -slimani 114 -siebold 114 -ready-to-wear 114 -euthanised 114 -ruffle 114 -motorcycling 114 -grazes 114 -18-24 114 -manolas 114 -arch-rival 114 -mediated 114 -al-hussein 114 -acer 114 -strongly-worded 114 -regrouped 114 -d'italia 114 -under-performing 114 -beasant 114 -yeager 114 -perignon 114 -diop 114 -one-size-fits-all 114 -vero 114 -extracurricular 114 -berkley 114 -ato 114 -wagga 114 -pyjama 114 -empoli 114 -g.i. 114 -05:23 114 -complicates 114 -superfoods 114 -twenty-one 114 -observational 114 -pinks 114 -oakwood 114 -telethon 114 -mumbled 114 -boozing 114 -shabiha 114 -awaken 114 -pat-downs 114 -skirting 114 -childminder 114 -13:25 114 -4c 114 -romany 114 -homebuyers 114 -5.15 114 -crucible 114 -repulsed 114 -colluding 114 -plummer 114 -contentment 114 -arriva 114 -326 114 -duels 114 -rosell 114 -caribou 114 -fuchs 114 -mated 114 -nordegren 114 -pierre-emerick 114 -bullet-riddled 114 -conspicuously 114 -two-page 114 -brescia 114 -crusoe 114 -heeringa 114 -unreasonably 114 -jeopardizing 114 -generale 114 -napalm 114 -retort 114 -krieger 114 -hubby 114 -friendlier 114 -double-edged 114 -changsha 114 -germantown 114 -trudges 114 -hassell 114 -vilma 114 -rees-mogg 114 -soaks 114 -diario 114 -fico 114 -preppy 114 -daca 114 -exquisitely 114 -07:30 114 -milkshakes 114 -hooligan 113 -lippert 113 -bulbous 113 -melons 113 -09:13 113 -mccollom 113 -macaulay 113 -basten 113 -grumbling 113 -07:18 113 -teo 113 -manifestly 113 -greenest 113 -221 113 -urns 113 -distaste 113 -dkny 113 -brisman 113 -crediting 113 -06:46 113 -pegg 113 -snows 113 -hatay 113 -how-to 113 -07:58 113 -cheaters 113 -eavesdrop 113 -halperin 113 -taggart 113 -raison 113 -child-friendly 113 -leeson 113 -medford 113 -re-offending 113 -provokes 113 -six-week-old 113 -salahi 113 -profess 113 --5 113 -kieu 113 -prettier 113 -born-again 113 -zuroff 113 -monsanto 113 -bonfires 113 -wholemeal 113 -jia 113 -hydrocodone 113 -fireballs 113 -girardi 113 -fossilized 113 -06:01 113 -hilariously 113 -consequential 113 -780 113 -enright 113 -boggs 113 -holdall 113 -dft 113 -mutt 113 -converging 113 -efficiencies 113 -rudyard 113 -netto 113 -ramsgate 113 -yanga-mbiwa 113 -turnovers 113 -top-end 113 -komodo 113 -auerbach 113 -nine-time 113 -adventist 113 -goodfellas 113 -wisbech 113 -cashmore 113 -blackhawks 113 -non-medical 113 -denounces 113 -pawns 113 -skyward 113 -remittances 113 -extra-terrestrial 113 -al-mabhouh 113 -wrinkled 113 -parchment 113 -bacca 113 -knowlton 113 -gallows 113 -government-owned 113 -understaffed 113 -personas 113 -weds 113 -misbehavior 113 -barça 113 -59,000 113 -2:1 113 -gabba 113 -dark-haired 113 -67p/churyumov-gerasimenko 113 -audrie 113 -kucherena 113 -whalley 113 -rom 113 -filip 113 -glendora 113 -hazare 113 -babeu 113 -zenaida 113 -dissect 113 -sighs 113 -chum 113 -salis 113 -slapstick 113 -glaser 113 -gagarin 113 -656 113 -cpt 113 -leveraged 113 -carmaker 113 -cougars 113 -victimization 113 -segovia 113 -05:22 113 -enforceable 113 -rosemarie 113 -raptors 113 -bloods 113 -doppelganger 113 -coulthard 113 -onyx 113 -grosskreutz 113 -127,000 113 -rosie-ann 113 -denuclearization 113 -portals 113 -lounger 113 -hypocrites 113 -tat 113 -injury-hit 113 -ska 113 -cynics 113 -suthep 113 -dukakis 113 -345 113 -empathize 113 -birdsong 113 -subsidising 113 -norte 113 -dw 113 -jacque 113 -ayling 113 -ephemeral 113 -waring 113 -horseman 113 -idyll 113 -siemens 113 -impresses 113 -euphrates 113 -zoology 113 -transferable 113 -excites 113 -discriminates 113 -priestland 113 -caldera 113 -deftly 113 -wednesdays 113 -19:01 113 -timms 113 -troyan 113 -dele 113 -reveled 113 -hirscher 113 -superfan 113 -unaccountable 113 -rfa 113 -tamper 113 -tacked 113 -canoes 113 -vicksburg 113 -arduino 113 -cabbies 113 -595 112 -toiling 112 -dismemberment 112 -mens 112 -09:19 112 -punting 112 -liebherr 112 -cancer-causing 112 -08 112 -mongol 112 -fahd 112 -casiraghi 112 -bmws 112 -mullany 112 -amla 112 -oozes 112 -loved-up 112 -19:50 112 -dialects 112 -chitwood 112 -discharges 112 -novosibirsk 112 -logically 112 -elwyn 112 -00:00 112 -first-generation 112 -wonka 112 -epoch 112 -24.5 112 -ernests 112 -abcnews.com 112 -moustaches 112 -misstep 112 -lucille 112 -08:03 112 -murad 112 -oc 112 -rimmer 112 -brics 112 -westergaard 112 -snaked 112 -scud 112 -barbican 112 -frisco 112 -ortega-hernandez 112 -undercard 112 -mosaics 112 -refit 112 -barrichello 112 -nahr-e 112 -apa 112 -sadat 112 -jeffers 112 -parmitano 112 -masha 112 -underfunded 112 -langdale 112 -minis 112 -08:43 112 -portobello 112 -willmott 112 -thundering 112 -puppeteer 112 -pro-israel 112 -jackett 112 -ex-soldier 112 -marinated 112 -brixham 112 -alarmist 112 -alqudsi 112 -supercharged 112 -term-time 112 -boilers 112 -line-ups 112 -legged 112 -unsatisfied 112 -redistribution 112 -welt 112 -faint-hearted 112 -politeness 112 -pugs 112 -psych 112 -now-infamous 112 -chantal 112 -mccrea 112 -fisk 112 -shimmer 112 -21-month-old 112 -kogan 112 -nils 112 -neglectful 112 -kazemi 112 -bearable 112 -write-off 112 -jerad 112 -spares 112 -muttered 112 -1812 112 -antioch 112 -kerslake 112 -yank 112 -florals 112 -brawls 112 -environmentally-friendly 112 -caiman 112 -jaclyn 112 -cloths 112 -high-protein 112 -thirty-two 112 -potted 112 -04:53 112 -loungers 112 -07:28 112 -kaleidoscope 112 -square-foot 112 -ensnared 112 -wonderkid 112 -cred 112 -honorees 112 -underserved 112 -whims 112 -gist 112 -yarmouk 112 -compiles 112 -blocs 112 -doling 112 -exemplifies 112 -nous 112 -hotshots 112 -rockford 112 -weitzman 112 -tonsils 112 -inserts 112 -monkees 112 -escapism 112 -ratzinger 112 -mightily 112 -hunley 112 -04:32 112 -laet 112 -urbanization 112 -hollowed 112 -gender-based 112 -time-trial 112 -cofe 112 -overstate 112 -flippant 112 -zolpidem 112 -fast-flowing 112 -07:45 112 -adil 112 -privatised 112 -nigh 112 -pleb 112 -08:32 112 -22c 112 -furby 112 -clementine 112 -roughed 112 -06:38 112 -dewy 112 -indigestion 112 -13:04 112 -dimmer 112 -abreast 112 -08:54 112 -08:51 112 -consults 112 -recharging 112 -alleyways 112 -disenchanted 112 -interbreeding 112 -hoek 112 -inns 112 -entertains 112 -regular-season 112 -facades 112 -mismanaged 112 -harlan 112 -werfel 112 -all-stars 112 -bottleneck 112 -mkhitaryan 112 -wiz 112 -plumb 112 -mariusz 112 -13:35 112 -stutter 112 -crafton 112 -18:00 112 -microbiologist 112 -akinfenwa 112 -claremont 112 -nashua 112 -bahrami 112 -17.6 112 -latching 112 -farrenkopf 112 -uno 112 -302 112 -tactician 112 -tarts 112 -picketing 112 -space-time 112 -pau 112 -kaleka 112 -eye-witness 112 -riva 112 -flamingos 112 -unearthing 112 -phasing 112 -snelling 112 -gripe 112 -stair 112 -coker 112 -fundamentalism 112 -wellwishers 112 -eustace 112 -sesay 112 -worshipping 112 -bobcats 112 -ammons 112 -buzzard 112 -materialistic 112 -withered 112 -04:42 112 -bse 112 -purists 112 -303 112 -limited-overs 112 -fis 111 -dunkley 111 -steen 111 -09:12 111 -09:14 111 -cheater 111 -lga 111 -¥ 111 -conventionally 111 -calibrated 111 -eight-bedroom 111 -shapely 111 -gretzky 111 -indestructible 111 -non-alcoholic 111 -dumbing 111 -incongruous 111 -wrangler 111 -16-time 111 -nicholl 111 -perches 111 -emnes 111 -election-year 111 -klebold 111 -conjugal 111 -ogilvie 111 -pus 111 -oi 111 -goalposts 111 -tilda 111 -then-sen 111 -yarra 111 -fahey 111 -mehta 111 -06:40 111 -pre-eminent 111 -rent-free 111 -longs 111 -120million 111 -tetanus 111 -05:32 111 -sulphuric 111 -retweeting 111 -fremantle 111 -wavering 111 -08:42 111 -08:41 111 -08:49 111 -advisable 111 -277 111 -decision-makers 111 -chickenpox 111 -sahel 111 -modernised 111 -1871 111 -isl 111 -petri 111 -06:08 111 -drug-dealing 111 -whiteman 111 -minicab 111 -gobbled 111 -duerson 111 -waterville 111 -jiangxi 111 -entwined 111 -chewbacca 111 -torque 111 -exhume 111 -felonious 111 -unwashed 111 -acropolis 111 -seacat 111 -cyborg 111 -moet 111 -burkhardt 111 -u-boats 111 -winemaker 111 -co-owners 111 -8:45 111 -marc-andre 111 -bec 111 -speedo 111 -33-year 111 -red-carpet 111 -dietetic 111 -09:43 111 -spartacus 111 -pore 111 -jet2 111 -hisham 111 -kayaker 111 -looker 111 -velazquez 111 -etching 111 -cbeebies 111 -manassero 111 -coworker 111 -yar 111 -amadou 111 -alitalia 111 -fabiola 111 -09:08 111 -touchy 111 -rorke 111 -musab 111 -blunt-force 111 -reinstating 111 -tonsillitis 111 -jima 111 -mitcham 111 -kennebunk 111 -limon 111 -gazidis 111 -lehigh 111 -aya 111 -keepsake 111 -granero 111 -yob 111 -chinese-made 111 -hours-long 111 -sharjah 111 -aronson 111 -flattery 111 -3-4 111 -ramblers 111 -antenatal 111 -gladdis 111 -wideman 111 -resettle 111 -cursory 111 -tredwell 111 -crescendo 111 -goring 111 -500g 111 -barrington 111 -adapts 111 -oaxaca 111 -breen 111 -killen 111 -jeannie 111 -06:57 111 -on-call 111 -practicality 111 -ebenezer 111 -button-down 111 -andreu 111 -kgo 111 -illuminates 111 -ferraro 111 -investiture 111 -reis 111 -aine 111 -06:19 111 -06:14 111 -06:10 111 -06:11 111 -fleischer 111 -broad-based 111 -two-inch 111 -cruiserweight 111 -en-route 111 -reserving 111 -feltham 111 -mckinsey 111 -pseudonyms 111 -madge 111 -13:49 111 -renshaw 111 -mutton 111 -unflappable 111 -decadence 111 -spastic 111 -reconvene 111 -barrack 111 -4.45 111 -hitchhiking 111 -madsen 111 -etch 111 -shania 111 -mcmillen 111 -blackstone 111 -davydenko 111 -chechens 111 -riskier 111 -choirs 111 -awami 111 -overreaching 111 -zuckerman 111 -edgware 111 -décor 111 -20:01 111 -tellingly 111 -basij 111 -culkin 111 -moreland 111 -five-mile 111 -12:47 111 -cerantonio 111 -hoisting 111 -dorn 111 -skyscanner 111 -stepchildren 111 -feuds 111 -toney 111 -chiseled 111 -relapsed 110 -midsomer 110 -andean 110 -galvin 110 -transvestite 110 -verma 110 -quintuplets 110 -rockall 110 -dubrovnik 110 -carb 110 -decompose 110 -torrez 110 -hazzard 110 -640,000 110 -grocers 110 -carelessness 110 -clovis 110 -glaze 110 -ebadi 110 -bodysuit 110 -17:02 110 -soybean 110 -9lbs 110 -appellant 110 -galilee 110 -angrier 110 -acc 110 -sander 110 -helmed 110 -pino 110 -gooding 110 -hinchingbrooke 110 -ezra 110 -carmona 110 -l'arc 110 -bridgestone 110 -iwo 110 -sung-yueng 110 -yamal 110 -6.40 110 -halfon 110 -afcon 110 -drug-free 110 -borcina 110 -underperforming 110 -antifreeze 110 -13:32 110 -08:47 110 -brant 110 -rrp 110 -knob 110 -trotters 110 -concepcion 110 -kevorkian 110 -06:04 110 -pring 110 -pulaski 110 -siam 110 -jorelys 110 -perverts 110 -totes 110 -20:00 110 -cassius 110 -pushkar 110 -mastectomies 110 -museo 110 -blusher 110 -light-heavyweight 110 -subculture 110 -millilitres 110 -shakespearean 110 -mauna 110 -garfunkel 110 -unwieldy 110 -violins 110 -paintbrush 110 -richland 110 -grandfathers 110 -slaviansk 110 -paraguayan 110 -05:42 110 -walther 110 -zapatero 110 -22,500 110 -tailgate 110 -masquerade 110 -unrestrained 110 -vietor 110 -meditating 110 -fonts 110 -molded 110 -tug-of-war 110 -richey 110 -unaids 110 -4-5 110 -18-years-old 110 -pediatricians 110 -ithaca 110 -clogs 110 -kakadu 110 -crowder 110 -deepens 110 -09:02 110 -dumas 110 -two-night 110 -downtrodden 110 -off-screen 110 -volke 110 -screener 110 -holograms 110 -balboa 110 -forwarding 110 -qwabe 110 -gowdy 110 -effie 110 -driftwood 110 -09:23 110 -12-foot 110 -highly-anticipated 110 -siamese 110 -florent 110 -ill-informed 110 -equivalents 110 -shirzad 110 -borat 110 -under-25s 110 -funnelled 110 -patry 110 -bellagio 110 -gatehouse 110 -aba 110 -lacoste 110 -08:38 110 -08:19 110 -aux 110 -05:05 110 -bloodshot 110 -cantu 110 -c-sections 110 -schlossberg 110 -plessis 110 -06:59 110 -wherein 110 -costolo 110 -colne 110 -05:29 110 -warrantless 110 -06:34 110 -nittany 110 -aloe 110 -lossiemouth 110 -riverdale 110 -jools 110 -industrialised 110 -kmov 110 -overcomes 110 -14,500 110 -ewe 110 -villeneuve 110 -06:17 110 -prostheses 110 -343 110 -fasten 110 -seibert 110 -vote-rigging 110 -263 110 -disinterested 110 -canapes 110 -dictionaries 110 -sibley 110 -regionally 110 -jokers 110 -bolter 110 -gansu 110 -hotels.com 110 -6,700 110 -bbc4 110 -intifada 110 -scavenger 110 -mcsweeney 110 -overcoat 110 -belittled 110 -evoking 110 -jenn 110 -kevin-prince 110 -epps 110 -headfirst 110 -egerton 110 -chia 110 -talker 110 -organically 110 -sleds 110 -dispensation 110 -macphail 110 -disapproving 110 -reputational 110 -predisposed 110 -benneteau 110 -glided 110 -gretna 110 -phillies 110 -daggers 110 -governorship 110 -spout 110 -paralegal 110 -mcclelland 110 -chihuahuas 110 -napster 110 -freund 110 -reenactment 110 -importers 110 -pro- 110 -schoolhouse 109 -#bringbackourgirls 109 -al-muhajiroun 109 -cardoso 109 -hillsides 109 -usmanov 109 -rostov 109 -now-famous 109 -thurmond 109 -bleachers 109 -summonses 109 -myhomeideas.com 109 -bleimeyer 109 -heart-rending 109 -untroubled 109 -bookshelf 109 -overthrowing 109 -dink 109 -well-armed 109 -custer 109 -panelling 109 -shabana 109 -envied 109 -oxnard 109 -d'arcy 109 -hesketh 109 -australis 109 -ria-novosti 109 -05:11 109 -resultant 109 -collazo 109 -sahin 109 -harpoon 109 -gulen 109 -vaclav 109 -08:22 109 -realsimple.com 109 -restructured 109 -stephenie 109 -05:31 109 -05:30 109 -amyloid 109 -masterson 109 -marquette 109 -06:26 109 -06:24 109 -13:31 109 -morlock 109 -two-year-olds 109 -taint 109 -gambaccini 109 -bancroft 109 -built-up 109 -gliders 109 -swag 109 -boi 109 -tiff 109 -zawiya 109 -formalities 109 -gorton 109 -06:09 109 -mb 109 -siegfried 109 -wearside 109 -raúl 109 -alexian 109 -candlestick 109 -incisions 109 -nagy 109 -gormley 109 -mobilise 109 -opined 109 -fluttering 109 -334 109 -wyngarden 109 -birther 109 -rudely 109 -meir 109 -stencil 109 -archival 109 -bodyweight 109 -homebase 109 -developmentally 109 -lancs 109 -neumann 109 -dashes 109 -safeguarded 109 -lashkar-e-tayyiba 109 -channelling 109 -hilaria 109 -saskia 109 -peeters 109 -15.8 109 -grasshopper 109 -solheim 109 -blistered 109 -steffi 109 -dugdale 109 -102,000 109 -grasshoppers 109 -blurb 109 -storeroom 109 -xenophobia 109 -homelands 109 -swish 109 -royalist 109 -greys 109 -northwards 109 -democratic-led 109 -newly-built 109 -271 109 -palme 109 -allegra 109 -sects 109 -brightman 109 -burk 109 -leiby 109 -electronica 109 -lassana 109 -allentown 109 -09:21 109 -09:29 109 -pom 109 -boullier 109 -contractually 109 -rhodri 109 -prolonging 109 -waratahs 109 -sohail 109 -constructively 109 -840 109 -utilising 109 -clay-court 109 -lavery 109 -fulford 109 -bracken 109 -steamer 109 -05:04 109 -liyuan 109 -rhoades 109 -excruciatingly 109 -subbed 109 -pty 109 -08:39 109 -inaudible 109 -bodycon 109 -impairments 109 -toying 109 -05:26 109 -affliction 109 -storefronts 109 -egalitarian 109 -hunkered 109 -fo 109 -primordial 109 -08:53 109 -tight-fitting 109 -confide 109 -buiter 109 -meldrum 109 -santon 109 -father-son 109 -etchings 109 -06:18 109 -waldman 109 -16.8 109 -galleria 109 -asd 109 -braked 109 -rotted 109 -heartening 109 -maccoll 109 -ferrara 109 -thrillers 109 -schaeuble 109 -duty-free 109 -13:46 109 -litters 109 -ulrich 109 -avowed 109 -dp 109 -chou 109 -add-on 109 -17.4 109 -dunaway 109 -950,000 109 -sprouting 109 -nutmeg 109 -jls 109 -s.s. 109 -26.5 109 -slideshow 109 -boyband 109 -positional 109 -30-40 109 -bermondsey 109 -trollope 109 -backstory 109 -orthodoxy 109 -ratner 109 -r&d 109 -helman 109 -procured 109 -trudged 109 -spotlights 109 -manganese 109 -bantham 109 -06:51 109 -920 109 -hazelnut 109 -magenta 109 -griner 109 -98,000 109 -facet 109 -18-hole 109 -liveable 109 -ipanema 109 -endangers 109 -earthy 109 -reconciling 108 -ecologically 108 -unhinged 108 -plus-sized 108 -challis 108 -meena 108 -dannel 108 -09:11 108 -individualism 108 -07:15 108 -gauke 108 -gents 108 -bemoaning 108 -headline-grabbing 108 -pullen 108 -tamils 108 -affirms 108 -prosser 108 -frighteningly 108 -outpacing 108 -shiva 108 -instituting 108 -lyndhurst 108 -counsell 108 -440,000 108 -go-between 108 -revved 108 -princely 108 -csa 108 -25mph 108 -cygnus 108 -ex-con 108 -keilar 108 -dati 108 -pang 108 -08:09 108 -clumsily 108 -hayworth 108 -soffer 108 -hannigan 108 -rowell 108 -convoluted 108 -call-outs 108 -diagonally 108 -sonora 108 -whirl 108 -pamper 108 -intensively 108 -merced 108 -05:36 108 -riera 108 -kenema 108 -1700 108 -lanvin 108 -wastage 108 -drapes 108 -fatale 108 -jour 108 -hysterics 108 -rearrange 108 -antiseptic 108 -baying 108 -pulsar 108 -arin 108 -gerd 108 -30-man 108 -reyaad 108 -teleprompter 108 -scrambles 108 -scientologist 108 -broadside 108 -tosses 108 -wroe 108 -epithet 108 -nourishing 108 -highlander 108 -genealogy 108 -bollard 108 -menez 108 -495 108 -hydrotherapy 108 -invalidated 108 -skew 108 -papworth 108 -anti-poverty 108 -stakhovsky 108 -ruffalo 108 -mclendon 108 -haphazard 108 -amundsen 108 -thrall 108 -deniers 108 -tantawi 108 -then-fiancee 108 -attainable 108 -haaland 108 -elevating 108 -moro 108 -re-enact 108 -sayyaf 108 -greenway 108 -07:01 108 -cramming 108 -protectionist 108 -unflinching 108 -alhambra 108 -depositing 108 -gambit 108 -super-yacht 108 -left-right 108 -subversion 108 -jinan 108 -pinsky 108 -marinko 108 -excommunicated 108 -townhouses 108 -07:47 108 -130million 108 -soreness 108 -assessors 108 -hufford 108 -marcella 108 -wwd 108 -07:41 108 -malaysians 108 -well-regarded 108 -legionnaires 108 -tenderly 108 -08:36 108 -08:37 108 -salgado 108 -wafa 108 -setter 108 -tenders 108 -banknote 108 -trimmer 108 -dents 108 -guan 108 -06:33 108 -dionne 108 -white-collar 108 -05:43 108 -rohit 108 -dedicates 108 -13:21 108 -multiplying 108 -micra 108 -celibate 108 -exmoor 108 -chabad 108 -4d 108 -1848 108 -decapitate 108 -510 108 -denisovans 108 -9billion 108 -glencoe 108 -saigon 108 -spellman 108 -bestow 108 -pastimes 108 -energy-efficient 108 -blow-up 108 -firstgroup 108 -davuluri 108 -vashem 108 -locksmith 108 -inference 108 -fiberglass 108 -predictor 108 -reding 108 -youngest-ever 108 -tharoor 108 -mushy 108 -gardasil 108 -diff 108 -biracial 108 -hurls 108 -humanly 108 -c'mon 108 -jakob 108 -kprc 108 -mezzanine 108 -drumbeat 108 -reiner 108 -abbreviated 108 -500ml 108 -francoise 108 -esher 108 -7:45 108 -04:40 108 -waleed 108 -hitchin 108 -snuggled 107 -archbishops 107 -portico 107 -taster 107 -rfid 107 -clink 107 -souped-up 107 -equilibrium 107 -esophagus 107 -marikana 107 -statuette 107 -saskya 107 -guha 107 -kambangan 107 -health-related 107 -exponential 107 -plouffe 107 -traversing 107 -haase 107 -show-stopping 107 -2009-2010 107 -longley 107 -cabana 107 -brodsky 107 -pld 107 -gnome 107 -sacre 107 -twenty-three 107 -herron 107 -end-of-year 107 -ou 107 -aftershave 107 -ogura 107 -goad 107 -charla 107 -streaked 107 -wyeth 107 -paredes 107 -locket 107 -french-born 107 -ratliff 107 -herbie 107 -25-minute 107 -sugar-free 107 -brutalized 107 -rowena 107 --2 107 -habitation 107 -azad 107 -r.e.m. 107 -cpsc 107 -semifinalist 107 -prendergast 107 -wu-tang 107 -windsurfing 107 -5.0 107 -mardy 107 -circuitry 107 -inanimate 107 -pozner 107 -macheda 107 -pasted 107 -nourish 107 -brazilian-born 107 -shu 107 -neves 107 -gok 107 -foodborne 107 -in-n-out 107 -dufresne 107 -conscription 107 -sneezes 107 -sodomized 107 -inglourious 107 -winks 107 -coolers 107 -ducts 107 -downplaying 107 -fully-fledged 107 -spats 107 -forgetful 107 -valparaiso 107 -conductors 107 -high-waisted 107 -outerwear 107 -5,700 107 -dwarves 107 -keswick 107 -well-kept 107 -chard 107 -thunderbirds 107 -28-year 107 -swahili 107 -vermeulen 107 -europe-wide 107 -oddball 107 -bayswater 107 -astrid 107 -quotation 107 -progeria 107 -pricier 107 -joblessness 107 -camorra 107 -kike 107 -timerman 107 -h3n2 107 -criterion 107 -jc 107 -07:27 107 -creagh 107 -sown 107 -4,100 107 -boden 107 -myelin 107 -langone 107 -lightyear 107 -devalued 107 -intravenously 107 -devastatingly 107 -karroum 107 -alize 107 -resold 107 -scumbag 107 -ruthlessness 107 -fed-up 107 -styrofoam 107 -alleyne 107 -branagh 107 -corkins 107 -broads 107 -moloney 107 -dominion 107 -midas 107 -ruble 107 -afire 107 -publicizing 107 -raytheon 107 -08:14 107 -blairite 107 -alayban 107 -heroines 107 -lederman 107 -pickpockets 107 -wh 107 -halley 107 -fryers 107 -ucsb 107 -zabiullah 107 -cower 107 -genteel 107 -decode 107 -mediators 107 -wheaton 107 -06:39 107 -ovulation 107 -chittock 107 -08:56 107 -08:57 107 -palaeontologists 107 -receptionists 107 -filtration 107 -oxford-educated 107 -fibrous 107 -tightest 107 -socialized 107 -mellas 107 -sheared 107 -14-month 107 -previewed 107 -rearranged 107 -mineiro 107 -photosynthesis 107 -icann 107 -kirklees 107 -democratic-controlled 107 -krause 107 -brolin 107 -4.20 107 -millimeter 107 -eroshevich 107 -amateurish 107 -seeding 107 -robocop 107 -wahl 107 -borthwick 107 -natale 107 -13:44 107 -three-times 107 -airspeed 107 -brill 107 -peri 107 -cylindrical 107 -11.15 107 -250m 107 -melgen 107 -gauze 107 -cb 107 -ce 107 -untreatable 107 -dps 107 -halving 107 -stiverne 107 -knowsley 107 -burgh 107 -rincon 107 -offloaded 107 -293 107 -murthy 107 -hine 107 -lesion 107 -ramzi 107 -camilo 107 -deteriorates 107 -09:04 107 -dexterity 107 -jukebox 107 -cusack 106 -kimura 106 -curate 106 -daunted 106 -freshen 106 -qe 106 -quinones 106 -millward 106 -hungover 106 -sopa 106 -30.5 106 -fidell 106 -ojo 106 -tegucigalpa 106 -executioners 106 -ley 106 -brookline 106 -59.7 106 -6,800 106 -exonerate 106 -bette 106 -dysphoria 106 -sun-drenched 106 -spires 106 -altai 106 -bequest 106 -renner 106 -rousseau 106 -toowoomba 106 -mccardel 106 -strong-willed 106 -germaine 106 -turnstiles 106 -careened 106 -magicians 106 -unpatriotic 106 -quietest 106 -pistons 106 -self-respect 106 -zale 106 -5km 106 -forester 106 -starstruck 106 -810 106 -frome 106 -13:12 106 -mms 106 -8-month-old 106 -twine 106 -unplugged 106 -05:39 106 -bruni-sarkozy 106 -cataclysmic 106 -supernovae 106 -06:29 106 -sulfate 106 -antares 106 -dasaolu 106 -layby 106 -swoon 106 -bos 106 -cohabiting 106 -campsites 106 -punctures 106 -gregarious 106 -isc 106 -dissolves 106 -manicures 106 -staunton 106 -sol-ju 106 -pritzker 106 -cascades 106 -punchline 106 -functionally 106 -331 106 -eastham 106 -magda 106 -occult 106 -strandings 106 -senanayake 106 -470,000 106 -work-out 106 -freebies 106 -about-face 106 -chevonea 106 -garlands 106 -decommissioning 106 -spiro 106 -workhouse 106 -davern 106 -hole-in-one 106 -salvadoran 106 -slavyansk 106 -garber 106 -ilk 106 -bilby 106 -jonchuk 106 -archetypal 106 -trudie 106 -ting 106 -liberman 106 -busan 106 -convulsing 106 -post-race 106 -bernat 106 -manifestations 106 -crewed 106 -sandeep 106 -humes 106 -secularism 106 -hightower 106 -naegleria 106 -sapphires 106 -effigies 106 -hemlines 106 -caddies 106 -cad 106 -sightseers 106 -donilon 106 -hands-off 106 -lusty 106 -dunning 106 -pallet 106 -madejski 106 -centrepoint 106 -audubon 106 -inhibitions 106 -jazmin 106 -denigrate 106 -hagar 106 -genocidal 106 -heightening 106 -ef5 106 -non-compliance 106 -copes 106 -airflow 106 -manifests 106 -dark-skinned 106 -kitsap 106 -blunnie 106 -05:08 106 -winklevoss 106 -tatyana 106 -caplin 106 -moderating 106 -cossacks 106 -sundown 106 -tandoh 106 -delirium 106 -norwalk 106 -maldon 106 -lichtenstein 106 -sncf 106 -acai 106 -scapegoats 106 -usps 106 -13:05 106 -saxophone 106 -slider 106 -overruns 106 -244 106 -swoops 106 -vikki 106 -10kg 106 -jettisoned 106 -bluebirds 106 -bodice 106 -probst 106 -vigour 106 -tavenner 106 -13:28 106 -multifaceted 106 -domed 106 -stormont 106 -likable 106 -muggers 106 -criminologist 106 -equalize 106 -roaches 106 -6,600 106 -longchamp 106 -purified 106 -hattersley 106 -currys 106 -mid-february 106 -adjournment 106 -chutes 106 -halane 106 -bosco 106 -ijaz 106 -blacking 106 -melatonin 106 -incredulity 106 -480,000 106 -altercations 106 -prioritized 106 -alecia 106 -5.75 106 -artful 106 -ferret 106 -beckloff 106 -boarded-up 106 -amrani 106 -russian-born 106 -militarized 106 -frilly 106 -achievers 106 -cagey 106 -19:04 106 -batt 106 -grads 106 -hubris 106 -crosshairs 106 -bresnan 106 -ambassadorial 106 -sion 106 -groomsmen 106 -janbua 106 -commoner 106 -roused 106 -d.c 106 -heaping 106 -outshone 106 -biopsies 106 -berwick 105 -aer 105 -pipah 105 -simi 105 -dmz 105 -armadillo 105 -bristled 105 -burruss 105 -instilling 105 -amara 105 -lapin 105 -topman 105 -d-illinois 105 -kayani 105 -bebo 105 -kenan 105 -capa 105 -mctear 105 -hard-court 105 -grieves 105 -dearden 105 -american-style 105 -preconditions 105 -excelling 105 -murfreesboro 105 -cranky 105 -hosed 105 -jardim 105 -talackova 105 -aprons 105 -emblems 105 -stig 105 -wanless 105 -pilger 105 -macintyre 105 -honeycomb 105 -prophetic 105 -stinger 105 -jez 105 -girlie 105 -ex-footballer 105 -lammy 105 -taro 105 -fibrillation 105 -zaman 105 -gillies 105 -wells-burr 105 -absentees 105 -perm 105 -nw 105 -policewomen 105 -press-ups 105 -ashdod 105 -adly 105 -armaments 105 -pfc 105 -galen 105 -kickboxing 105 -aleksander 105 -vetoes 105 -kung-fu 105 -crackling 105 -1875 105 -soon-yi 105 -1,750 105 -phthalates 105 -tolstoy 105 -tikka 105 -honked 105 -salcedo 105 -xerox 105 -medium-range 105 -shakoor 105 -punch-up 105 -four-poster 105 -ddos 105 -yasukuni 105 -plated 105 -barham 105 -rozelle 105 -garcía 105 -aztec 105 -rafi 105 -motocross 105 -0-62mph 105 -stu 105 -06:22 105 -19:35 105 -abandons 105 -caulfield 105 -carty 105 -coasting 105 -perspex 105 -kcra 105 -priestley 105 -instigate 105 -algebra 105 -prayuth 105 -sinner 105 -slings 105 -spd 105 -surrealist 105 -arachnid 105 -brandishes 105 -45mph 105 -uncaring 105 -basescu 105 -sputnik 105 -helpfully 105 -chariots 105 -walk-on 105 -conservatively 105 -sani 105 -19:57 105 -untied 105 -well-organized 105 -07:00 105 -on-again 105 -launchpad 105 -swales 105 -pms 105 -herrmann 105 -292 105 -susilo 105 -jenise 105 -pieter 105 -proxies 105 -teargas 105 -splayed 105 -nkunda 105 -hand-to-hand 105 -tormentor 105 -post-game 105 -defaulted 105 -irritability 105 -mathilde 105 -sanctum 105 -bestival 105 -newly-elected 105 -solids 105 -edberg 105 -designating 105 -open-heart 105 -well-rounded 105 -sportscaster 105 -19:00 105 -inconvenienced 105 -signifying 105 -cardigans 105 -big-serving 105 -15cm 105 -asian-americans 105 -lambesis 105 -personified 105 -16.3 105 -algerians 105 -abingdon 105 -n'ts 105 -13:06 105 -366 105 -08:58 105 -7,200 105 -oas 105 -braided 105 -f-150 105 -byu 105 -sororities 105 -flightaware.com 105 -5/5 105 -chessington 105 -unstuck 105 -energised 105 -sewed 105 -deploys 105 -al-hassan 105 -rationally 105 -assimilation 105 -hydrocephalus 105 -18:06 105 -estrella 105 -06:03 105 -one-tenth 105 -varlamov 105 -murtaza 105 -akai 105 -capitalizing 105 -arshad 105 -high-calorie 105 -pax 105 -calle 105 -picture-perfect 105 -marti 105 -norah 105 -tidying 105 -commonsense 105 -torchbearer 105 -detach 105 -embryology 105 -fishers 105 -sl 105 -fmri 105 -grenfell 105 -19:06 105 -tracksuits 105 -by-elections 105 -azores 105 -newsman 105 -milla 105 -flightless 105 -mormonism 105 -sleepovers 105 -four-page 105 -revitalise 105 -bluewater 104 -splints 104 -bamiyan 104 -anti-psychotic 104 -pegasus 104 -fung 104 -minerva 104 -chetry 104 -treachery 104 -² 104 -ex-manchester 104 -untidy 104 -bolling 104 -peet 104 -unfashionable 104 -cecily 104 -steeper 104 -shanks 104 -expressionless 104 -greenspan 104 -sawed 104 -cowdenbeath 104 -virginians 104 -peeing 104 -scampering 104 -thameslink 104 -anatolian 104 -pegs 104 -negotiates 104 -icicles 104 -1,450 104 -derick 104 -stomp 104 -kamala 104 -quesada 104 -wilma 104 -scorecard 104 -superbike 104 -intents 104 -05:19 104 -05:16 104 -445 104 -buttercup 104 -nv 104 -goce 104 -tbi 104 -zsl 104 -gamma-ray 104 -13:33 104 -brooking 104 -pilloried 104 -sheepdog 104 -ouseley 104 -rigours 104 -everyman 104 -ara 104 -ard 104 -weetabix 104 -bares 104 -kantar 104 -slobodan 104 -05:52 104 -beginner 104 -hashemi 104 -directorial 104 -06:07 104 -faecal 104 -relisha 104 -retraction 104 -rectum 104 -12lb 104 -gop-led 104 -hofman 104 -625 104 -dispersant 104 -islet 104 -nomad 104 -12.9 104 -1854 104 -wechat 104 -ramona 104 -fingernail 104 -duffield 104 -heptathlete 104 -unproductive 104 -d'ivoire 104 -sorrell 104 -executes 104 -nameless 104 -pavarotti 104 -prepped 104 -j-lo 104 -k.j. 104 -discarding 104 -parente 104 -dimming 104 -brower 104 -unattainable 104 -galicia 104 -karas 104 -15.4 104 -decayed 104 -higham 104 -godaddy 104 -goldsmiths 104 -shotton 104 -steeply 104 -in-room 104 -gendarmes 104 -modell 104 -harnum 104 -foolproof 104 -al-fitr 104 -readership 104 -07:02 104 -misread 104 -fingerprinted 104 -gaseous 104 -preconceptions 104 -lightbulb 104 -xlix 104 -two-tier 104 -reinhardt 104 -coq 104 -tre 104 -winking 104 -fayette 104 -mohsen 104 -dozed 104 -résumé 104 -unverified 104 -3-month-old 104 -05:09 104 -samia 104 -riverbed 104 -childlike 104 -masood 104 -anti-fascist 104 -sharples 104 -lengthening 104 -hoggle 104 -straightening 104 -stingrays 104 -multiplex 104 -16:01 104 -alia 104 -08:31 104 -cockburn 104 -dialling 104 -diagnostics 104 -05:21 104 -codename 104 -ghislaine 104 -eliminator 104 -bourque 104 -namibian 104 -shawnee 104 -keisuke 104 -wither 104 -spits 104 -seabrook 104 -abidine 104 -selfie-takers 104 -saxons 104 -04:50 104 -1867 104 -easel 104 -hashimoto 104 -plurality 104 -nepotism 104 -witham 104 -wagoner 104 -munn 104 -nygard 104 -synergy 104 -disjointed 104 -sciutto 104 -gilt 104 -caches 104 -literate 104 -neck-and-neck 104 -belittle 104 -then-husband 104 -ouse 104 -copped 104 -corroborating 104 -copperfield 104 -federalist 104 -vladislav 104 -kama 104 -belmonte 104 -radaronline.com 104 -flourishes 104 -arran 104 -grimy 104 -arrondissement 104 -ott 104 -retrained 104 -windmills 104 -rouen 104 -blundering 104 -darko 104 -fermin 104 -walford 104 -kock 104 -kasparov 104 -underwhelmed 104 -mpaa 104 -squire 104 -paragraphs 104 -07:37 104 -heads-up 103 -10-inch 103 -cornflakes 103 -takedown 103 -shoesmith 103 -ambivalent 103 -nemmouche 103 -870 103 -labelle 103 -yossi 103 -keats 103 -3/5 103 -smythe 103 -bluffs 103 -jai 103 -halford 103 -foxman 103 -negev 103 -anjelica 103 -preconceived 103 -vice-chancellor 103 -bally 103 -widdecombe 103 -csx 103 -adjourn 103 -wakatsuki 103 -emigrating 103 -dongguan 103 -triad 103 -bolger 103 -langston 103 -23c 103 -drudge 103 -lecter 103 -wtc 103 -autobiographical 103 -conferred 103 -pedophilia 103 -scouse 103 -subspecies 103 -tawny 103 -silvery 103 -subvert 103 -® 103 -rapunzel 103 -belfry 103 -dbs 103 -wooten 103 -city-based 103 -lacerated 103 -adolfo 103 -geronimo 103 -hostesses 103 -petrino 103 -better-off 103 -gopher 103 -lombard 103 -oyelowo 103 -meditate 103 -narrows 103 -melendez 103 -plantagenet 103 -scuttle 103 -el-wahabi 103 -insertion 103 -dustbin 103 -disregarding 103 -sh*t 103 -brownstein 103 -martinis 103 -sign-up 103 -irma 103 -rioted 103 -14c 103 -retry 103 -otionally 103 -annum 103 -jirga 103 -allowable 103 -sven-goran 103 -tedeschi 103 -gumuchian 103 -slimmers 103 -enactment 103 -splint 103 -16-day 103 -arrays 103 -kaley 103 -thickening 103 -panacea 103 -hoilett 103 -risotto 103 -verdant 103 -foodstuffs 103 -grammatical 103 -fjords 103 -gourlay 103 -impersonated 103 -battlestar 103 -vices 103 -fiddled 103 -six-part 103 -pied 103 -bgc 103 -430,000 103 -gatcombe 103 -33million 103 -wall-to-wall 103 -mamas 103 -ju 103 -susceptibility 103 -shelve 103 -07:23 103 -far-left 103 -spot-fixing 103 -stalag 103 -merson 103 -3aw 103 -dnp 103 -glean 103 -reproducing 103 -feigning 103 -high-heeled 103 -boomtown 103 -ordeals 103 -rockville 103 -09:28 103 -bharatiya 103 -pujara 103 -heatstroke 103 -sixth-placed 103 -haemorrhaging 103 -tremmel 103 -self-regulation 103 -quagmire 103 -franciscan 103 -wycherley 103 -darien 103 -bastards 103 -pontefract 103 -oberoi 103 -high-priced 103 -sattar 103 -dermal 103 -megapixels 103 -searchlight 103 -goaded 103 -rampal 103 -myla 103 -panes 103 -irreparably 103 -mahon 103 -multi-cultural 103 -willows 103 -bosom 103 -janata 103 -smooch 103 -irretrievably 103 -uterine 103 -settee 103 -reorganization 103 -stress-free 103 -06:36 103 -hmm 103 -warmers 103 -redhill 103 -mediums 103 -malden 103 -575 103 -half-volley 103 -alumnus 103 -bundling 103 -blogosphere 103 -scotto 103 -ophthalmologist 103 -13:20 103 -two-dimensional 103 -fads 103 -kerrie 103 -checker 103 -leapfrogged 103 -rizzoli 103 -tianjin 103 -revelled 103 -tinkerbell 103 -vyacheslav 103 -wetherby 103 -hamdan 103 -showjumping 103 -gustaf 103 -whimpering 103 -screamer 103 -304 103 -westerwelle 103 -weathering 103 -kowloon 103 -mockingjay 103 -round-up 103 -struts 103 -badar 103 -sf 103 -scipione 103 -burge 103 -black-tie 103 -12-mile 103 -jersey-based 103 -retaliating 103 -melilla 103 -reformists 103 -18:02 103 -reigate 103 -mourad 103 -dvt 103 -appiah 103 -retardation 103 -blue-chip 103 -d.a. 103 -scones 103 -epitomised 103 -lucien 103 -rakesh 103 -parthenon 103 -thackray 102 -traditionalist 102 -elly 102 -hiroshi 102 -suddards 102 -wgc 102 -binghamton 102 -burnham-on-sea 102 -abeid 102 -re-education 102 -al-khawaja 102 -cheetham 102 -weirdo 102 -dill 102 -contemplates 102 -anti-u.s. 102 -unhindered 102 -delighting 102 -oort 102 -money-saving 102 -co-sponsored 102 -davila 102 -enyeama 102 -climbdown 102 -mankato 102 -deschanel 102 -skillfully 102 -budi 102 -sniffs 102 -undeserved 102 -tarpischev 102 -redoubt 102 -drenching 102 -07:51 102 -minimising 102 -orchestras 102 -lumped 102 -handfuls 102 -gorham 102 -kyushu 102 -season-ticket 102 -13:14 102 -ching 102 -helton 102 -chertsey 102 -grunting 102 -apd 102 -maneuvered 102 -666 102 -banco 102 -sorrows 102 -06:21 102 -magnificently 102 -grozny 102 -caregiving 102 -radovan 102 -toxicologist 102 -petrozzino 102 -photovoltaic 102 -moenchengladbach 102 -mousavi 102 -announcers 102 -handsworth 102 -hdmi 102 -tripling 102 -now-wife 102 -crevasse 102 -reactionary 102 -low-carb 102 -unscripted 102 -equaling 102 -lobes 102 -saldate 102 -scurvy 102 -spinners 102 -trade-off 102 -biloxi 102 -illawarra 102 -5g 102 -albinos 102 -yusra 102 -muslera 102 -one-story 102 -massing 102 -symantec 102 -parochial 102 -gashes 102 -accumulates 102 -mccloud 102 -ledbury 102 -trendsetter 102 -bbfc 102 -3cm 102 -15billion 102 -escentual.com 102 -cryer 102 -sepang 102 -single-family 102 -non-human 102 -warranties 102 -thirty-five 102 -selenski 102 -startlingly 102 -powerboat 102 -19:52 102 -maría 102 -grint 102 -14.6 102 -ffion 102 -four-letter 102 -shoigu 102 -persia 102 -yom 102 -squarepants 102 -huegill 102 -nour 102 -05:50 102 -cog 102 -orbited 102 -moralez 102 -enshrine 102 -extorting 102 -gamely 102 -crunches 102 -ldl 102 -yousaf 102 -07:46 102 -end-to-end 102 -salvaging 102 -merciful 102 -wesh 102 -08:17 102 -annulment 102 -funnier 102 -non-food 102 -ural 102 -w1 102 -depositions 102 -beadle 102 -16:05 102 -08:33 102 -hotmail 102 -head-butted 102 -anaphylaxis 102 -swank 102 -bolthole 102 -dingle 102 -06:37 102 -menorah 102 -fk 102 -three-quarter 102 -hillier 102 -shading 102 -sandford 102 -froggatt 102 -rizvi 102 -trudy 102 -petitioner 102 -devert 102 -belen 102 -13:22 102 -droplet 102 -adjudicator 102 -26m 102 -hotseat 102 -corkscrew 102 -agar 102 -11.20 102 -reaffirms 102 -piqued 102 -great-granddaughter 102 -chakrabarti 102 -saraj 102 -curtailing 102 -vittorio 102 -menopausal 102 -schlemmer 102 -anaesthesia 102 -predetermined 102 -15c 102 -wham 102 -cianci 102 -streamlining 102 -connick 102 -rusbridger 102 -rabat 102 -badat 102 -mino 102 -continuum 102 -enceladus 102 -reichert 102 -haggerty 102 -consciences 102 -thiopental 102 -19:07 102 -hippopotamus 102 -sheepskin 102 -probationary 102 -!? 102 -heists 102 -misfits 102 -rr 102 -wannabes 102 -northfield 102 -eason 102 -implore 102 -boston.com 102 -guillotine 102 -squirt 102 -todt 102 -14-hour 102 -goosebumps 102 -17:42 101 -af 101 -dismissals 101 -interrupts 101 -sneaks 101 -idealism 101 -stapp 101 -agrawal 101 -silda 101 -04:00 101 -ti 101 -corsets 101 -dade 101 -teleconference 101 -flax 101 -navigates 101 -indecision 101 -smarts 101 -roundabouts 101 -frisbee 101 -retook 101 -pentonville 101 -bountiful 101 -flatbush 101 -freelancer 101 -baidu 101 -frolander 101 -atelier 101 -iridescent 101 -mahmud 101 -pina 101 -freshers 101 -fretting 101 -incomparable 101 -torchbearers 101 -stipend 101 -subatomic 101 -shockwave 101 -broadwater 101 -kruse 101 -organisational 101 -shuttled 101 -gaudy 101 -then-u.s. 101 -right-to-die 101 -5mp 101 -c.k. 101 -zeroed 101 -culverhouse 101 -hudgens 101 -austereo 101 -waterborne 101 -devolve 101 -secreted 101 -vaunted 101 -boothroyd 101 -philandering 101 -13:55 101 -eye-to-eye 101 -re-established 101 -walpole 101 -55mph 101 -three-inch 101 -statesmen 101 -seamark 101 -mandarins 101 -337 101 -cufflinks 101 -nikkei 101 -lollies 101 -ros-lehtinen 101 -76ers 101 -recessions 101 -tapered 101 -beholden 101 -zawahri 101 -khattala 101 -edicts 101 -ripon 101 -fillets 101 -curd 101 -bodybuilders 101 -thabo 101 -100kg 101 -illusionist 101 -fairhead 101 -mandell 101 -miramar 101 -one-party 101 -giovanna 101 -anti-submarine 101 -interstates 101 -particulars 101 -berate 101 -pacheco 101 -07:29 101 -asquith 101 -za 101 -17:51 101 -lolly 101 -brunner 101 -mallard 101 -paya 101 -onrushing 101 -05:46 101 -lennay 101 -borrows 101 -basalt 101 -thusha 101 -mass-produced 101 -19:53 101 -rawson 101 -macia 101 -hygienist 101 -palmetto 101 -roiling 101 -14:08 101 -scoff 101 -demonize 101 -salafi 101 -terrorise 101 -hsieh 101 -circumnavigate 101 -edibles 101 -radicalise 101 -fortify 101 -reminiscing 101 -non-religious 101 -19-year-olds 101 -fransisco 101 -salih 101 -bayonets 101 -mathieson 101 -islamism 101 -perera 101 -jemaah 101 -arab-israeli 101 -unworthy 101 -kushner 101 -welded 101 -acar 101 -parlors 101 -nascimento 101 -cpre 101 -08:59 101 -capers 101 -ecowas 101 -ruseva 101 -galanter 101 -05:49 101 -shenandoah 101 -meri 101 -mythological 101 -faultless 101 -hoardings 101 -348 101 -17:47 101 -linton 101 -kennett 101 -lifesaver 101 -intervenes 101 -lowther 101 -shenyang 101 -incubators 101 -rucker 101 -winstone 101 -astle 101 -artfully 101 -madhya 101 -aspas 101 -yacob 101 -segundo 101 -inglewood 101 -roomy 101 -322 101 -havers 101 -embraer 101 -buner 101 -twitter-like 101 -post-soviet 101 -kenwright 101 -didi 101 -8.99 101 -beeline 101 -liddle 101 -750million 101 -inigo 101 -dhiab 101 -mohsin 101 -walkouts 101 -baruch 101 -69,000 101 -state-by-state 101 -saverin 101 -linder 101 -hausner 101 -costliest 101 -1,000,000 101 -konstantin 101 -05:18 101 -lock-down 101 -dd 101 -590 100 -ael 100 -hankins 100 -convenes 100 -haidar 100 -antebellum 100 -watling 100 -animators 100 -19:41 100 -529 100 -band-aid 100 -britten 100 -fourth-generation 100 -samudio 100 -outfront 100 -zelalem 100 -matias 100 -null 100 -fahim 100 -panicky 100 -kilmartin 100 -petitioners 100 -flare-ups 100 -dominik 100 -patrolmen 100 -42million 100 -raines 100 -ogilvy 100 -diamond-encrusted 100 -mcnab 100 -padre 100 -querrey 100 -30mm 100 -half-million 100 -checkups 100 -shepton 100 -underhand 100 -ronda 100 -hustled 100 -dysentery 100 -lanky 100 -embezzled 100 -drive-in 100 -mantelpiece 100 -145.50 100 -scanlon 100 -carruthers 100 -farnworth 100 -shying 100 -romsey 100 -shoring 100 -ponta 100 -emi 100 -zuculini 100 -ukulele 100 -linguists 100 -canadensis 100 -356 100 -jeopardised 100 -lozano 100 -mannion 100 -undertones 100 -urals 100 -slattery 100 -garay 100 -twittersphere 100 -carmarthen 100 -booby-trapped 100 -mikati 100 -eurofighter 100 -mustafi 100 -scrubland 100 -inconsiderate 100 -brainstorming 100 -much-hyped 100 -izzard 100 -impossibility 100 -judge-led 100 -bottomless 100 -culls 100 -recap 100 -311 100 -rijkaard 100 -sunspot 100 -tyrie 100 -jol 100 -kettles 100 -mcredmond 100 -snarky 100 -boudoir 100 -unsworth 100 -westlake 100 -hilliard 100 -gazebo 100 -masturbate 100 -devonport 100 -businessweek 100 -carcinogenic 100 -dalelv 100 -gleeful 100 -hijabs 100 -termites 100 -rehired 100 -bgt 100 -emiliano 100 -noreen 100 -macdill 100 -mota 100 -08:46 100 -bozorgmehr 100 -passively 100 -rochford 100 -wmur 100 -castrated 100 -5,100 100 -ploughs 100 -mcvitie 100 -1950-53 100 -recuse 100 -tombstoning 100 -fourballs 100 -baristas 100 -propositioned 100 -one-room 100 -goole 100 -sterilized 100 -assuage 100 -folders 100 -authorising 100 -mcbath 100 -a380s 100 -in-vitro 100 -scarecrow 100 -decriminalization 100 -coiled 100 -kawaii 100 -watanabe 100 -moorings 100 -20-yard 100 -logistic 100 -boye 100 -grated 100 -lavera 100 -turbans 100 -sultanate 100 -labradors 100 -braid 100 -bowerman 100 -saber 100 -slumps 100 -averting 100 -praag 100 -shrieking 100 -kremer 100 -peiris 100 -05:20 100 -picard 100 -emeli 100 -brogues 100 -high-fives 100 -uncovers 100 -galvanize 100 -xperia 100 -rampaged 100 -ecuadorean 100 -do-it-yourself 100 -rubber-stamped 100 -milat 100 -newton-john 100 -selecao 100 -asbos 100 -05:47 100 -overtakes 100 -outstrip 100 -lacroix 100 -winston-salem 100 -entranced 100 -squirted 100 -guidebook 100 -cushy 100 -sacrificial 100 -quayside 100 -sea-level 100 -best-sellers 100 -lamo 100 -smokescreen 100 -reconstructions 100 -clemmons 100 -13:40 100 -smoothed 100 -feverishly 100 -hutt 100 -candle-lit 100 -ilkay 100 -cashiers 100 -nadu 100 -d3 100 -cobwebs 100 -patriarchal 100 -integrates 100 -apricot 100 -dispossessed 100 -riddell 100 -1100 100 -year-olds 100 -grudges 100 -off-site 100 -chinoy 100 -afresh 100 -assou-ekotto 100 -fluently 100 -19:02 100 -self-destructive 100 -currier 100 -odious 100 -step-brother 100 -baca 100 -nullify 100 -reinvested 100 -korkie 100 -millan 100 -riad 100 -cairn 100 -beresford-redman 100 -sluts 100 -mundy 99 -havard 99 -overcharging 99 -ulrika 99 -earshot 99 -mullahs 99 -wrenched 99 -cardozo 99 -waterford 99 -hard-up 99 -subtropical 99 -alotaibi 99 -indoctrinated 99 -rabiot 99 -motherly 99 -barclaycard 99 -reinvigorated 99 -copycats 99 -kippur 99 -thorsten 99 -knudson 99 -ruffles 99 -berners-lee 99 -jalisco 99 -centenarian 99 -28.5 99 -silberman 99 -calpol 99 -carrion 99 -basal 99 -u.s.-made 99 -visualize 99 -ruhr 99 -hinkle 99 -tarloff 99 -kaling 99 -lonergan 99 -biathlon 99 -proudlock 99 -everlasting 99 -wtf 99 -paramilitaries 99 -punjabi 99 -kroes 99 -tuchman 99 -ugg 99 -medalists 99 -cheri 99 -disobeyed 99 -tuk 99 -vicodin 99 -idriss 99 -toyoda 99 -12-minute 99 -274 99 -interlagos 99 -27c 99 -memoriam 99 -jie 99 -porpoise 99 -albanese 99 -06:02 99 -fateh 99 -makings 99 -mocked-up 99 -beechcraft 99 -duping 99 -2cm 99 -bac 99 -coolidge 99 -roofer 99 -kwan 99 -donbass 99 -pent-up 99 -casserole 99 -l1 99 -schirmer 99 -seward 99 -fujimori 99 -flemish 99 -alonzo 99 -shipyards 99 -larking 99 -kindhearted 99 -forensically 99 -womaniser 99 -hersh 99 -recouped 99 -weill 99 -concise 99 -radu 99 -corned 99 -sex-change 99 -u20 99 -wulff 99 -bianka 99 -marguerite 99 -multi-purpose 99 -upstanding 99 -lakefront 99 -ladee 99 -co-writer 99 -flood-hit 99 -seconded 99 -ashwin 99 -haw 99 -sabri 99 -speedboats 99 -posterior 99 -disinfected 99 -09:05 99 -connaught 99 -stalinist 99 -3.75 99 -gliedman 99 -gatto 99 -handanovic 99 -235,000 99 -co-presenter 99 -binned 99 -zubayr 99 -vaccinating 99 -on-duty 99 -northup 99 -maru 99 -hardig 99 -cantrell 99 -tailors 99 -uproot 99 -crunched 99 -vass 99 -over-60s 99 -stourbridge 99 -urology 99 -stained-glass 99 -instalments 99 -ebrahimi 99 -17:37 99 -17:39 99 -brick-and-mortar 99 -forward-looking 99 -broadest 99 -pips 99 -recriminations 99 -05:00 99 -05:02 99 -bovey 99 -sterlings 99 -game-winning 99 -queasy 99 -malawian 99 -tongs 99 -lokeren 99 -pedicure 99 -havilland 99 -05:24 99 -inertia 99 -harcourt 99 -albemarle 99 -bracamonte 99 -lyttle 99 -tasman 99 -dropouts 99 -bemusement 99 -24c 99 -knock-off 99 -6-month-old 99 -finkelstein 99 -tortuous 99 -tak 99 -nibbling 99 -ktrk 99 -whores 99 -rushmore 99 -induces 99 -heidelberg 99 -sohus 99 -knifeman 99 -goading 99 -groening 99 -.380 99 -imaged 99 -inxs 99 -surety 99 -fishmonger 99 -grenier 99 -prospered 99 -teh 99 -bric 99 -miyagi 99 -usurp 99 -parfitt 99 -lolo 99 -warthog 99 -pert 99 -heralding 99 -overlord 99 -trudging 99 -ileana 99 -sui 99 -cl 99 -overused 99 -chiropractor 99 -hennen 99 -hensley 99 -councilwoman 99 -mani 99 -amg 99 -hallelujah 99 -daisies 99 -hossain 99 -look-out 99 -lozada 99 -13c 99 -publix 99 -heimlich 99 -mid-1960s 99 -preval 99 -hoffner 99 -ltte 99 -headbands 99 -addie 99 -13-year-olds 98 -reintroducing 98 -homerton 98 -spinster 98 -idolized 98 -hervey 98 -aw 98 -doable 98 -floundered 98 -braveheart 98 -q2 98 -brevity 98 -estrogen 98 -up-close 98 -gouge 98 -diplomas 98 -biggie 98 -goetze 98 -hastened 98 -giveaways 98 -shalom 98 -itâ 98 -vitiligo 98 -interrogator 98 -dislocation 98 -2 98 -westerns 98 -scoville 98 -norbert 98 -claygate 98 -beeping 98 -shrieks 98 -herceptin 98 -hickson 98 -ime 98 -bristling 98 -wallsend 98 -sire 98 -95th 98 -mcquade 98 -realist 98 -perpetrate 98 -omnipresent 98 -deviated 98 -pazzini 98 -tertiary 98 -flacco 98 -soares 98 -13:10 98 -millington 98 -chink 98 -rosenior 98 -omnibus 98 -free-range 98 -knell 98 -404 98 -varga 98 -abergavenny 98 -regretful 98 -bmc 98 -pharmacology 98 -ambiance 98 -katu 98 -smethwick 98 -ventilated 98 -years-old 98 -273 98 -05:58 98 -backdrops 98 -telegrams 98 -complements 98 -lyman 98 -bethan 98 -resell 98 -cleanser 98 -cribs 98 -counterculture 98 -lattice 98 -seven-inch 98 -cranked 98 -kallis 98 -ramone 98 -infringes 98 -339 98 -93rd 98 -mohan 98 -eight-time 98 -meanwell 98 -inflexible 98 -droylsden 98 -orchestral 98 -kiel 98 -loose-fitting 98 -cheema 98 -oddie 98 -malouda 98 -procurator 98 -misdemeanours 98 -toughening 98 -6.20 98 -wef 98 -shermantine 98 -largesse 98 -boardrooms 98 -gung-ho 98 -jayde 98 -haggling 98 -osceola 98 -neguesha 98 -al-qaeda-linked 98 -saboteurs 98 -al-qahtani 98 -busty 98 -off-again 98 -palos 98 -greenock 98 -rigor 98 -gyroscope 98 -ballarat 98 -cypriots 98 -cymru 98 -huynh 98 -trawlers 98 -cvc 98 -200-meter 98 -patricio 98 -4ins 98 -juxtaposed 98 -mccutcheon 98 -3oz 98 -overestimated 98 -airfields 98 -14:00 98 -larynx 98 -dizzee 98 -yekaterinburg 98 -gregorio 98 -bff 98 -miffed 98 -bobble 98 -hobble 98 -quarries 98 -dein 98 -unabashed 98 -sokolich 98 -17:05 98 -caterers 98 -shoestring 98 -disarming 98 -head-mounted 98 -missus 98 -disillusionment 98 -modernizing 98 -counterintelligence 98 -beckons 98 -ghazi 98 -saddles 98 -loaning 98 -half-back 98 -kelso 98 -reusing 98 -undersheriff 98 -superyachts 98 -weapons-grade 98 -rutter 98 -inbreeding 98 -perpetually 98 -monogamous 98 -staggeringly 98 -zarooni 98 -black-clad 98 -bana 98 -thrill-seeking 98 -blindside 98 -berths 98 -mid-week 98 -shrill 98 -anhalt 98 -qusayr 98 -cityscape 98 -downsized 98 -aperture 98 -juggled 98 -saddens 98 -perky 98 -trainor 98 -convulsed 98 -interplay 98 -half-price 98 -05:44 98 -wareham 98 -gandee 98 -destabilising 98 -kempton 98 -barreling 98 -cliches 98 -powerlifting 98 -261 98 -266 98 -bangles 98 -firefly 98 -zenith 98 -atrial 98 -12-year-olds 98 -circadian 98 -beatlemania 98 -kamikaze 98 -comoros 98 -321 98 -u-2 98 -smirked 98 -lasagna 98 -pct 98 -soufan 98 -dissenters 98 -lentils 98 -meilhan 98 -nineteenth 98 -keepsakes 98 -immeasurably 98 -locality 98 -stinky 98 -rationed 98 -newburgh 98 -sell-by 98 -dominoes 98 -rebrasse 98 -mechanically 98 -strutt 98 -newcastle-under-lyme 98 -couturier 98 -930 98 -charlesworth 98 -lamentable 98 -benji 98 -underrated 98 -roitfeld 98 -mucking 98 -negate 98 -inclusiveness 98 -objectors 98 -gynaecologists 98 -straddled 98 -serrated 98 -auschwitz-birkenau 98 -07:32 98 -vella 98 -samara 97 -lucozade 97 -subscribed 97 -kifer 97 -multi-tasking 97 -reta 97 -pinpointing 97 -frolic 97 -mckeown 97 -dyeing 97 -exclamation 97 -politically-motivated 97 -14:13 97 -chaz 97 -intergalactic 97 -latifah 97 -endorphins 97 -postdoctoral 97 -melius 97 -acetate 97 -grier 97 -leningrad 97 -deletion 97 -pro-eu 97 -sapper 97 -cattrall 97 -diced 97 -harvester 97 -modernising 97 -tonbridge 97 -annemarie 97 -antagonistic 97 -lucio 97 -quasar 97 -yelena 97 -anuj 97 -intrude 97 -449 97 -typewriters 97 -chuba 97 -109,000 97 -jockeying 97 -odis 97 -mclernon 97 -ester 97 -kenilworth 97 -bluebird 97 -communicable 97 -herculean 97 -dispersing 97 -expanses 97 -377 97 -wsbtv 97 -90210 97 -prometheus 97 -05:56 97 -borrower 97 -blocker 97 -pokemon 97 -nivea 97 -goy 97 -coulsdon 97 -porthcawl 97 -duster 97 -escambia 97 -gosden 97 -sustenance 97 -bethel 97 -infantryman 97 -storybook 97 -laborious 97 -delicatessen 97 -authenticate 97 -thiel 97 -spacesuits 97 -georgiou 97 -barnabas 97 -zahawi 97 -beckinsale 97 -arnett 97 -signified 97 -renisha 97 -joo 97 -avenged 97 -elstree 97 -robs 97 -topsy-turvy 97 -parapet 97 -imac 97 -eyeglasses 97 -eidur 97 -unincorporated 97 -04:52 97 -vaisey 97 -metaphors 97 -pucker 97 -very.co.uk 97 -theatrics 97 -bahamian 97 -nipping 97 -loomis 97 -abdu 97 -phan 97 -infidelities 97 -yeoman 97 -rarefied 97 -antithesis 97 -fifths 97 -movingly 97 -disastrously 97 -tapeworm 97 -ahsan 97 -anson 97 -fags 97 -pre-empt 97 -kennet 97 -trista 97 -quotations 97 -sterilisation 97 -04:18 97 -90-degree 97 -ahram 97 -ahrar 97 -glimpsed 97 -woburn 97 -nos 97 -bobs 97 -registrant 97 -gustafson 97 -bead 97 -caprio 97 -freeland-gaither 97 -vanquished 97 -alwaleed 97 -aaliyah 97 -infestations 97 -heartbeats 97 -scala 97 -rossendale 97 -keiron 97 -yoweri 97 -08:11 97 -roundhouse 97 -anti-communist 97 -joeys 97 -pasts 97 -gasol 97 -constrictor 97 -passaic 97 -shortstop 97 -wjla 97 -akers 97 -al-samarrai 97 -eight-man 97 -lyall 97 -notches 97 -absolved 97 -uri 97 -gauguin 97 -padlocks 97 -ramiro 97 -throw-in 97 -putrid 97 -blowers 97 -14-years-old 97 -appel 97 -bentleys 97 -cinematographer 97 -octuplets 97 -centrifuge 97 -technologist 97 -teves 97 -characterizes 97 -granma 97 -superjumbo 97 -crumb 97 -organics 97 -karman 97 -growl 97 -drooping 97 -mcginniss 97 -audiotape 97 -f-15 97 -prided 97 -co-hosting 97 -volker 97 -infield 97 -sig 97 -illarramendi 97 -counterweight 97 -upshot 97 -five-year-olds 97 -cala 97 -astala 97 -soulless 97 -perot 97 -defaults 97 -obsessing 97 -iguanas 97 -renato 97 -evesham 97 -impotent 97 -guglielmelli 97 -hendrickson 97 -vila 97 -paribas 97 -4.40 97 -mastiffs 97 -human-rights 97 -monstrosity 97 -nollywood 97 -permeated 97 -6ins 97 -naya 97 -fowleri 97 -high-frequency 97 -737-800 97 -pummeling 97 -overstayed 97 -schuler 97 -sculptural 97 -raila 97 -counterattack 97 -connoisseur 97 -lingus 97 -pleasantries 97 -derwent 97 -dogging 97 -price-tag 97 -noriko 97 -get-go 97 -sugarland 97 -edvard 97 -ajdabiya 97 -unassisted 97 -fodor 97 -re-match 97 -burwood 97 -al-sharif 97 -somersault 97 -freeze-dried 97 -berga 97 -lollobrigida 97 -paralympians 97 -ringwood 97 -garsh 97 -sheepishly 96 -malanda 96 -un-american 96 -23st 96 -baldelli 96 -yukio 96 -elphicke 96 -johnson-thompson 96 -19:43 96 -19:42 96 -hargrove 96 -flexed 96 -detainment 96 -funneling 96 -midlothian 96 -tello 96 -giulio 96 -beko 96 -colleps 96 -flab 96 -no-win 96 -victimless 96 -tatty 96 -tavecchio 96 -subzero 96 -brandao 96 -three-bed 96 -sharyn 96 -clerkenwell 96 -zeman 96 -nicolson 96 -educates 96 -hoaxes 96 -awestruck 96 -sabotaging 96 -triesman 96 -montebourg 96 -faintly 96 -amano 96 -puracal 96 -three-week-old 96 -belied 96 -blinken 96 -riath 96 -stalkers 96 -spattered 96 -botnet 96 -pretzel 96 -succinctly 96 -foundry 96 -cushman 96 -16c 96 -easy-going 96 -groomer 96 -buttered 96 -coatings 96 -29.5 96 -quakers 96 -indexes 96 -patino 96 -grafted 96 -longitude 96 -comically 96 -roccuzzo 96 -tsui 96 -public-sector 96 -05:51 96 -goer 96 -prout 96 -guadagno 96 -yeoh 96 -kare 96 -demaio 96 -asylum-seekers 96 -svindal 96 -dharun 96 -cubby 96 -marveled 96 -riven 96 -despatched 96 -strangler 96 -capsizing 96 -wimbush 96 -nostra 96 -improv 96 -cupping 96 -five-story 96 -composting 96 -top-up 96 -anti-crime 96 -mca 96 -loath 96 -gradient 96 -kush 96 -baynes 96 -anti-trafficking 96 -potion 96 -otak 96 -million-year-old 96 -screwing 96 -314 96 -lawwell 96 -unspoiled 96 -meijer 96 -vma 96 -zhukova 96 -whitton 96 -unexplored 96 -tellers 96 -outpaced 96 -kubiak 96 -marple 96 -reconnecting 96 -footpaths 96 -18-49 96 -morale-boosting 96 -perdido 96 -death-row 96 -walkman 96 -statuesque 96 -poston 96 -facebook/cnnopinion 96 -matalan 96 -aggressiveness 96 -09:09 96 -torrents 96 -canadian-born 96 -hometowns 96 -retinal 96 -efron 96 -cheeseburgers 96 -cadiz 96 -photoshoots 96 -newly-appointed 96 -12lbs 96 -pg&e 96 -professing 96 -crisis-hit 96 -poisonings 96 -860 96 -bimini 96 -benayoun 96 -shokalskiy 96 -plasterer 96 -horny 96 -bairstow 96 -felling 96 -benefitted 96 -overestimate 96 -ridgewell 96 -floaty 96 -diorio 96 -hard-liners 96 -tinseltown 96 -landsat 96 -7,400 96 -pauper 96 -inhalers 96 -maris 96 -biscayne 96 -larimer 96 -ice-covered 96 -ex-military 96 -hamstrung 96 -misrepresenting 96 -nall 96 -nyon 96 -ga 96 -amro 96 -lampposts 96 -godsend 96 -quintet 96 -checkers 96 -sanrio 96 -post-production 96 -multiples 96 -philby 96 -2:45 96 -dithering 96 -right-handed 96 -colonisation 96 -hernan 96 -above-ground 96 -guizhou 96 -lofted 96 -transasia 96 -prokhorov 96 -sedimentary 96 -zico 96 -druids 96 -compress 96 -taoiseach 96 -yakuza 96 -o'groats 96 -bawling 96 -weld 96 -mcinerney 96 -jalili 96 -sonata 96 -13:27 96 -clamps 96 -fusing 96 -befell 96 -bedbugs 96 -non-negotiable 96 -kidson 96 -bertone 96 -tripathi 96 -18:09 96 -monthlong 96 -enamoured 96 -sids 96 -seedlings 96 -aird 96 -kerik 96 -jamarion 96 -1821 96 -interjected 96 -belvin 96 -05:07 96 -lunden 96 -bonita 96 -safekeeping 96 -snob 96 -starnes 96 -snippet 96 -loven 96 -righton 96 -boldon 96 -creases 96 -gastronomic 96 -forebears 96 -million-plus 96 -liddell 96 -whiteout 96 -repossession 96 -beckoned 96 -à 96 -1,650 96 -thrill-seeker 96 -graveyards 96 -midwinter 96 -glauber 96 -deliveryman 96 -291 96 -omari 96 -pre-wedding 96 -murtha 96 -4oz 96 -eighth-grade 96 -rikki 96 -multnomah 96 -skiff 96 -optimize 96 -video-game 96 -wakaso 96 -faria 96 -jps 96 -jean-eric 96 -gant 96 -fawning 96 -undercurrent 96 -bit-part 96 -fobbed 96 -harmison 96 -04:46 96 -reproach 96 -chardy 96 -07:33 96 -wpvi 96 -yakubu 95 -coaxing 95 -human-to-human 95 -glamping 95 -internet-connected 95 -madera 95 -qr 95 -macedonian 95 -ultrasonic 95 -oiled 95 -bumpers 95 -one-half 95 -abetted 95 -abattoirs 95 -processions 95 -donaghy 95 -well-planned 95 -microgrammes 95 -blume 95 -harmlessly 95 -hoarse 95 -cheerios 95 -guile 95 -namath 95 -f&f 95 -cloves 95 -retrace 95 -womanhood 95 -reload 95 -tsai 95 -rear-view 95 -corzine 95 -lonmin 95 -incarnations 95 -mouthwatering 95 -gotbaum 95 -shamefully 95 -conservators 95 -baggins 95 -opik 95 -bajc 95 -impersonal 95 -rims 95 -gpa 95 -thessaloniki 95 -french-speaking 95 -1.85 95 -walla 95 -varun 95 -wizarding 95 -bimbo 95 -dusten 95 -scarier 95 -straight-talking 95 -wazir 95 -wall-e 95 -arnis 95 -sasquatch 95 -moonshine 95 -sequestered 95 -colwyn 95 -benedictine 95 -mcphee 95 -81,000 95 -wash. 95 -bi-polar 95 -lavigne 95 -guadeloupe 95 -csatary 95 -decriminalisation 95 -augie 95 -319 95 -byford 95 -teddington 95 -kj 95 -nuer 95 -four-times 95 -04:57 95 -plainfield 95 -carpentry 95 -leftists 95 -faithfull 95 -radiance 95 -military-grade 95 -ranulph 95 -1500s 95 -fulfilment 95 -que 95 -yoan 95 -04:51 95 -culpepper 95 -jt 95 -messengers 95 -j.w. 95 -luanda 95 -machinations 95 -stargazers 95 -rowles 95 -mcginty 95 -statistician 95 -sayings 95 -non-u.s. 95 -hoffmann 95 -lipinski 95 -moonwalk 95 -colquhoun 95 -pernetti 95 -hawass 95 -koo 95 -dslr 95 -modernized 95 -chet 95 -incedal 95 -corroded 95 -cohabitation 95 -straus 95 -cortina 95 -tung 95 -basso 95 -suppresses 95 -unedited 95 -half-day 95 -bogeyed 95 -imitates 95 -bewilderment 95 -pastels 95 -globe-trotting 95 -underhill 95 -dowdy 95 -starling 95 -four-wheel-drive 95 -evaporation 95 -macaskill 95 -miscalculated 95 -robel 95 -cru 95 -caper 95 -yim 95 -city-state 95 -womens 95 -mostafaei 95 -breathalysed 95 -two-car 95 -nanoparticles 95 -meniscus 95 -bureaucrat 95 -passcode 95 -treetops 95 -bhf 95 -heald 95 -re-released 95 -omit 95 -emts 95 -maupin 95 -ordinances 95 -fearnley 95 -informer 95 -projectors 95 -e-waste 95 -05:25 95 -regressive 95 -apostles 95 -b-2 95 -sawgrass 95 -killick 95 -05:45 95 -waheed 95 -knut 95 -appropriateness 95 -caster 95 -inane 95 -elgar 95 -agave 95 -13:29 95 -shuns 95 -wfsb 95 -father-to-be 95 -ek 95 -easterly 95 -snobbery 95 -mordaunt 95 -petrochemical 95 -bookkeeper 95 -moratti 95 -odile 95 -10.50 95 -backtracking 95 -sinha 95 -petrie 95 -kovack 95 -gastronomy 95 -nadarkhani 95 -immaturity 95 -openers 95 -thicken 95 -eight-point 95 -season-opening 95 -choc 95 -zwanziger 95 -lyra 95 -biometrics 95 -single-minded 95 -showdowns 95 -explainer 95 -staines 95 -92,000 95 -derailing 95 -13:47 95 -attuned 95 -annapurna 95 -freeways 95 -brooklyn-based 95 -lambasting 95 -keely 95 -permissive 95 -nitschke 95 -wrested 95 -seabirds 95 -renouncing 95 -ultrasounds 95 -totem 95 -maggot 95 -thornbury 95 -× 95 -780,000 95 -ahmedabad 95 -impurities 95 -navel 95 -stanhope 95 -18.2 95 -twofold 95 -backless 95 -tented 95 -19:03 95 -ihop 95 -beto 95 -zephyr 95 -levey 95 -mahendra 95 -dunwoody 95 -barre 95 -wriggled 95 -short-sleeved 95 -rj 95 -1a 95 -70ft 95 -ibori 95 -crustacean 95 -spillover 95 -150mph 95 -locked-in 94 -deactivate 94 -crockfords 94 -paymasters 94 -lauten 94 -3.40 94 -hitmaker 94 -watercress 94 -five-man 94 -shanxi 94 -gronkowski 94 -blacked-out 94 -nailing 94 -no-holds-barred 94 -leffler 94 -sukhoi 94 -14:16 94 -shiloh 94 -eren 94 -.357 94 -layover 94 -sidcup 94 -legos 94 -promiscuity 94 -20.5 94 -lex 94 -1844 94 -17:03 94 -al-ahmar 94 -reloaded 94 -impresario 94 -leopard-print 94 -plait 94 -ulcerative 94 -830 94 -uninformed 94 -20kg 94 -breathtakingly 94 -re-signed 94 -soybeans 94 -counterpoint 94 -shyness 94 -schaffhausen 94 -blighting 94 -timebomb 94 -05:01 94 -minute-long 94 -anjali 94 -haverford 94 -townshend 94 -lao 94 -bellucci 94 -cajon 94 -ugliness 94 -14ft 94 -depose 94 -lukashenko 94 -tarek 94 -crestfallen 94 -pisi 94 -crystalline 94 -05:34 94 -pruning 94 -ill-conceived 94 -searchable 94 -mirko 94 -zionists 94 -interviewees 94 -stabilising 94 -fortifications 94 -30km 94 -96th 94 -13:54 94 -post-baby 94 -moroccans 94 -nazareth 94 -stowell 94 -18:30 94 -antagonism 94 -darlings 94 -garmin 94 -monorail 94 -thinned 94 -bare-faced 94 -tarshis 94 -well-organised 94 -332 94 -fandom 94 -kalashnikovs 94 -didsbury 94 -frazzled 94 -tilden 94 -courchevel 94 -unending 94 -headliners 94 -1492 94 -issuance 94 -lauds 94 -deghayes 94 -husseini 94 -logar 94 -astrology 94 -twittervia 94 -slithering 94 -ninjas 94 -19:19 94 -barbarity 94 -crappy 94 -ice-cold 94 -108,000 94 -film-makers 94 -curving 94 -wowing 94 -interest-only 94 -wipers 94 -first-aid 94 -ungrateful 94 -microchipped 94 -283 94 -transsexuals 94 -smoke-filled 94 -1.05 94 -condominiums 94 -visualise 94 -17:56 94 -mangum 94 -seizes 94 -290,000 94 -swaddled 94 -disassembled 94 -daydream 94 -steeplechase 94 -fjord 94 -traversed 94 -dewar 94 -gremio 94 -halsey 94 -traviss 94 -07:06 94 -ser 94 -upswing 94 -aslam 94 -14.8 94 -nvidia 94 -angora 94 -shunick 94 -04:19 94 -forgettable 94 -menstruation 94 -hurtles 94 -condiment 94 -denominator 94 -4x4s 94 -kimber 94 -asymmetric 94 -wiig 94 -humvees 94 -fidler 94 -afflict 94 -sideburns 94 -tigris 94 -reinscheid 94 -two-star 94 -disengaged 94 -cult-like 94 -redruth 94 -bested 94 -yanking 94 -five-a-side 94 -coppafeel 94 -armin 94 -ellwood 94 -remodelled 94 -990 94 -roark 94 -auxerre 94 -paulette 94 -gila 94 -five-inch 94 -macondo 94 -dallas-based 94 -luft 94 -systrom 94 -easygoing 94 -tseng 94 -evaporates 94 -prospectus 94 -raceway 94 -16.2 94 -nagle 94 -tiki-taka 94 -ex-fiance 94 -lindbergh 94 -fb 94 -maelstrom 94 -serendipity 94 -unimpressive 94 -spook 94 -05:48 94 -voigt 94 -walmsley 94 -16:47 94 -picturing 94 -chaplains 94 -patronizing 94 -condiments 94 -wraith 94 -13:48 94 -4-1-4-1 94 -refinement 94 -chalice 94 -self-awareness 94 -ethiopians 94 -envisages 94 -halesowen 94 -korobov 94 -1833 94 -graphical 94 -20-somethings 94 -finalizing 94 -gilman 94 -winifred 94 -poulton 94 -debtors 94 -carpeted 94 -disband 94 -sauntered 94 -17m 94 -anjum 94 -revoking 94 -marigold 94 -barrio 94 -gravest 94 -librarians 94 -mitroglou 94 -krill 94 -speared 94 -sidecar 94 -bramhall 94 -characterizing 94 -cell-phone 94 -weightless 94 -gangsta 94 -paparazzo 94 -ignites 94 -bourke 94 -confounded 94 -heliosphere 94 -kellett 93 -arshavin 93 -madrigal 93 -keatley 93 -janie 93 -hameed 93 -puree 93 -qassim 93 -arouna 93 -doped 93 -01 93 -frustratingly 93 -blondie 93 -semi-pro 93 -poynter 93 -silencer 93 -techno 93 -legrand 93 -ervin 93 -flaring 93 -tombstones 93 -14:11 93 -purport 93 -navigators 93 -inseminated 93 -mlive.com 93 -bloodstains 93 -messes 93 -pease 93 -highly-paid 93 -ladd 93 -temperamental 93 -holme 93 -tsarnaevs 93 -azinger 93 -well-stocked 93 -parlance 93 -nungesser 93 -expended 93 -maybelline 93 -conspire 93 -cundall 93 -suzi 93 -revelling 93 -inaccurately 93 -sorenson 93 -suhaila 93 -arabella 93 -unreachable 93 -murmur 93 -hattie 93 -macey 93 -eschew 93 -13:11 93 -ikechi 93 -brownstone 93 -25p 93 -galvan 93 -inslee 93 -r-virginia 93 -haftar 93 -repurposed 93 -gis 93 -16:30 93 -alaskans 93 -under-18 93 -natty 93 -lac-megantic 93 -18:56 93 -13:09 93 -thunderball 93 -ul 93 -isp 93 -panoramas 93 -13:56 93 -cambrian 93 -mobasherat 93 -364 93 -reachable 93 -oceana 93 -remarry 93 -pay-outs 93 -bedell 93 -yitzhak 93 -soiree 93 -habana 93 -hereby 93 -non-toxic 93 -willerton 93 -seng 93 -undernourished 93 -informational 93 -finesse 93 -populism 93 -otago 93 -faze 93 -hgtv 93 -afterthought 93 -re-enactments 93 -putman 93 -shenhua 93 -cundy 93 -doberman 93 -waze 93 -domenicali 93 -mati 93 -housemaid 93 -ict 93 -keisha 93 -conn 93 -strived 93 -spl 93 -vasile 93 -dobbin 93 -beater 93 -criminalized 93 -dud 93 -secede 93 -mraz 93 -swivel 93 -southend-on-sea 93 -girona 93 -biosphere 93 -abstaining 93 -elphick 93 -andhra 93 -post-operative 93 -intricacies 93 -asad 93 -weathers 93 -jaman 93 -ohuruogu 93 -8.40 93 -clasping 93 -pored 93 -embolden 93 -seasiders 93 -hardaker 93 -erikson 93 -flawlessly 93 -rivett 93 -mcfaul 93 -hookah 93 -swashbuckling 93 -kenner 93 -triggs 93 -tripolis 93 -elway 93 -abdullahi 93 -lyudmila 93 -stockbridge 93 -infrequently 93 -tussling 93 -calaveras 93 -nonproliferation 93 -presse 93 -sca 93 -riverfront 93 -hoang 93 -ushuaia 93 -polarisation 93 -amenas 93 -clubcard 93 -04:30 93 -welts 93 -120mph 93 -latinas 93 -oversize 93 -yaseen 93 -91,000 93 -17:35 93 -08:13 93 -makeovers 93 -sainz 93 -hungerford 93 -clary 93 -cuthbertson 93 -occidental 93 -eventuality 93 -evangeline 93 -salamander 93 -harmoni 93 -coexist 93 -gulfport 93 -towpath 93 -skippered 93 -athar 93 -qurans 93 -unbearably 93 -clostridium 93 -five-fold 93 -stagnated 93 -mares 93 -befall 93 -subterfuge 93 -bielik 93 -billingham 93 -bulmer 93 -carjacker 93 -re-trial 93 -13:23 93 -overturns 93 -malformation 93 -hollering 93 -yapp 93 -persecuting 93 -walkies 93 -phosphate 93 -thuggish 93 -peplum 93 -blackhawk 93 -recital 93 -thirdly 93 -webcast 93 -condensation 93 -garrard 93 -tastings 93 -urry 93 -ventricular 93 -zero-hours 93 -lockup 93 -callejon 93 -ossie 93 -bourgeois 93 -co-produced 93 -allende 93 -hodkin 93 -lomu 93 -daesh 93 -sharron 93 -accompaniment 93 -shoaf 93 -a.m 93 -brazuca 93 -blakey 93 -public-private 93 -consecrated 93 -19:26 93 -pander 93 -s2 93 -triplet 93 -timo 93 -issac 93 -imager 93 -golubev 93 -unsympathetic 93 -starc 93 -icbm 93 -undertakes 93 -eight-inch 93 -ratley 93 -connoisseurs 93 -freakish 93 -blockades 93 -ruptures 93 -rearguard 93 -pune 93 -erdem 92 -panellist 92 -cafà 92 -tanaka 92 -17:49 92 -tmv 92 -ambience 92 -´ 92 -heythrop 92 -overkill 92 -burgos 92 -6-inch 92 -2:15 92 -amoudi 92 -sega 92 -malveaux 92 -hantuchova 92 -tri-state 92 -interned 92 -04:08 92 -semiconductor 92 -mayumi 92 -grieved 92 -collette 92 -exposé 92 -chiquita 92 -gittins 92 -11billion 92 -cosmodrome 92 -in-between 92 -atr 92 -bassam 92 -plo 92 -04:20 92 -04:23 92 -wellingborough 92 -arter 92 -ert 92 -asahi 92 -inhumanity 92 -og 92 -toolkit 92 -24-hours 92 -stashing 92 -caltech 92 -.1 92 -undressing 92 -rhee 92 -zoran 92 -five-under 92 -bledsoe 92 -cantaloupes 92 -depletion 92 -earnhardt 92 -lettings 92 -drug-fueled 92 -kyla 92 -pillaging 92 -05:35 92 -nr 92 -piranha 92 -lunchbox 92 -cebr 92 -khalifi 92 -blankly 92 -13:39 92 -galina 92 -harks 92 -farmyard 92 -7-month-old 92 -dressmaker 92 -corks 92 -caudwell 92 -loneliest 92 -pashto 92 -swami 92 -fn 92 -resource-rich 92 -shanna 92 -13:58 92 -kye 92 -03:58 92 -botany 92 -arnie 92 -phalanx 92 -treloar 92 -octagon 92 -patric 92 -naoto 92 -kiad 92 -evidential 92 -12oz 92 -dabbling 92 -tevlin 92 -25-foot 92 -siqueira 92 -concussed 92 -predicated 92 -charb 92 -mcquaid 92 -first-grader 92 -northridge 92 -ville 92 -synthesis 92 -fitzsimmons 92 -opal 92 -next-gen 92 -blum 92 -04:58 92 -billiard 92 -lotteries 92 -wxia 92 -07:26 92 -vandalising 92 -klokow 92 -kneed 92 -tyldesley 92 -pandemics 92 -three-fourths 92 -mort 92 -tangles 92 -pro-regime 92 -super-fit 92 -19:54 92 -19:55 92 -maciel 92 -mesmerized 92 -brea 92 -wistful 92 -nosy 92 -tavistock 92 -128,000 92 -youngblood 92 -encrypt 92 -zaki 92 -sonogram 92 -collinson 92 -eyjafjallajokull 92 -bollinger 92 -blonde-haired 92 -syllabus 92 -well-timed 92 -oren 92 -taha 92 -baradar 92 -hawk-eye 92 -outgoings 92 -prabhakaran 92 -pott 92 -sparkles 92 -misdiagnosis 92 -04:38 92 -crb 92 -vibrates 92 -ahly 92 -deciphered 92 -billionth 92 -pre-teen 92 -grass-court 92 -moneyball 92 -alwan 92 -walbridge 92 -timeout 92 -cme 92 -grundy 92 -al-zor 92 -eight-match 92 -contemptuous 92 -lidington 92 -coleridge 92 -iago 92 -3ins 92 -self-reported 92 -northernmost 92 -lard 92 -azhar 92 -free-market 92 -freehold 92 -morningside 92 -symbolized 92 -outhouse 92 -mclachlan 92 -hinders 92 -peachtree 92 -15-foot 92 -paddocks 92 -esprit 92 -perusing 92 -sun-like 92 -mopping 92 -tiverton 92 -scotts 92 -kalahari 92 -pixelated 92 -bettinelli 92 -desailly 92 -50g 92 -stem-cell 92 -typified 92 -muskegon 92 -defrocked 92 -chenoweth 92 -friars 92 -moller 92 -akita 92 -13:43 92 -outgrown 92 -2005-06 92 -tilts 92 -6-8 92 -crosswalk 92 -self-defeating 92 -make-over 92 -hartson 92 -faber 92 -ts 92 -colsaerts 92 -wobbled 92 -twomey 92 -microcosm 92 -vivek 92 -babcock 92 -yucca 92 -non-binding 92 -iota 92 -cloud-based 92 -sunnier 92 -prowse 92 -anatomically 92 -osmond 92 -nimrod 92 -spandex 92 -gottfried 92 -billericay 92 -pape 92 -binges 92 -flypast 92 -114,000 92 -34.99 92 -bfm 92 -blockaded 92 -morons 92 -04:45 92 -postbox 92 -tinge 92 -allocating 92 -renews 92 -asses 92 -40-foot 91 -hartnell 91 -record-breaker 91 -diprivan 91 -muslim-majority 91 -healers 91 -satisfactorily 91 -amethyst 91 -government-sponsored 91 -swanage 91 -prenup 91 -8501 91 -last-eight 91 -19:46 91 -775 91 -770 91 -accuweather 91 -deities 91 -siddique 91 -topiary 91 -hinch 91 -paraphrase 91 -perelman 91 -unpacked 91 -eschewing 91 -marli 91 -14:14 91 -6.25 91 -crevice 91 -four-game 91 -seductively 91 -legislating 91 -valero 91 -empathise 91 -encapsulates 91 -offensively 91 -movable 91 -canvey 91 -fund-raiser 91 -smaug 91 -thronged 91 -13:17 91 -buchan 91 -googling 91 -bennetts 91 -digested 91 -allegiant 91 -wallrath 91 -hilarity 91 -water-filled 91 -otman 91 -05:13 91 -meireles 91 -448 91 -13:50 91 -venerated 91 -wife-to-be 91 -filaments 91 -gmtv 91 -brasserie 91 -kluwe 91 -half-hearted 91 -112,000 91 -kodiak 91 -reynosa 91 -chucking 91 -elbe 91 -muggings 91 -06:23 91 -west-northwest 91 -deters 91 -trims 91 -thoroughbreds 91 -oceanography 91 -485 91 -gun-related 91 -drummed 91 -glint 91 -suga 91 -camo 91 -gwendolyn 91 -secularists 91 -marketer 91 -well-mannered 91 -degas 91 -telstra 91 -viceroy 91 -upmc 91 -338 91 -l2 91 -wanderlust 91 -mello 91 -sensuality 91 -stanfield 91 -pre-flight 91 -ntv 91 -makaziwe 91 -adjourning 91 -1,150 91 -subsidence 91 -liquidated 91 -heft 91 -co-exist 91 -characteristically 91 -contraptions 91 -13:34 91 -jeras 91 -hizb 91 -asthmatic 91 -sociopath 91 -tormenting 91 -15.2 91 -sorrentino 91 -schloss 91 -chronology 91 -18-inch 91 -lavatories 91 -alpaca 91 -sm 91 -earth-sized 91 -oklahoman 91 -zealous 91 -non-military 91 -poirier 91 -wipeout 91 -383 91 -amazon.co.uk 91 -z. 91 -dissecting 91 -sulphate 91 -rudeness 91 -wildflowers 91 -hornick 91 -amador 91 -democratization 91 -kampf 91 -cajun 91 -marjah 91 -30-yard 91 -exterminate 91 -432 91 -nara 91 -eshchenko 91 -19:51 91 -pistol-whipped 91 -chutney 91 -mahela 91 -highlighter 91 -biogas 91 -burrowing 91 -thawed 91 -fairgrounds 91 -universes 91 -c-span 91 -lineages 91 -ringtone 91 -usaf 91 -argumentative 91 -sagbo 91 -publicists 91 -17:18 91 -belittling 91 -rivaldo 91 -pared 91 -pm. 91 -dangles 91 -x5 91 -third-year 91 -jaunty 91 -overground 91 -saucers 91 -racegoer 91 -geimer 91 -affording 91 -bedfellows 91 -asafa 91 -eaves 91 -skin-tight 91 -14:43 91 -frisked 91 -shank 91 -sittingbourne 91 -16:03 91 -refreshment 91 -capitalising 91 -legitimize 91 -propellant 91 -16.9 91 -orbitz 91 -coupling 91 -redactions 91 -tynecastle 91 -zoologist 91 -cullinan 91 -thoughtfully 91 -ellery 91 -katmai 91 -bingley 91 -remini 91 -besser 91 -reconstructing 91 -momo 91 -self-image 91 -13:26 91 -havant 91 -miralem 91 -timmons 91 -2ins 91 -eliciting 91 -purging 91 -soulcycle 91 -falsehoods 91 -xxxx 91 -xiaobo 91 -inhibited 91 -vinny 91 -esplanade 91 -mccausland 91 -piquet 91 -13-hour 91 -thats 91 -vesuvius 91 -18:08 91 -940 91 -pilgrimages 91 -unaffiliated 91 -free-standing 91 -islamiyah 91 -underwrite 91 -armagh 91 -conditioners 91 -horseplay 91 -defunding 91 -icahn 91 -nines 91 -darkly 91 -pinter 91 -discrediting 91 -chinos 91 -rubicon 91 -hungarians 91 -atone 91 -pepper-sprayed 91 -annoys 91 -fouad 91 -kiernan 91 -stoll 91 -motherboard 91 -canvass 91 -colonise 91 -blumenschein 91 -jessi 91 -1:45 91 -darron 91 -ebrahim 91 -defecating 91 -extinguishing 91 -condones 90 -anti-viral 90 -scurried 90 -highest-ranked 90 -slant 90 -5:45 90 -hammocks 90 -batters 90 -0-2 90 -armchairs 90 -sams 90 -half-marathon 90 -426 90 -geneticists 90 -qu 90 -cyndi 90 -nld 90 -medallion 90 -hedging 90 -moffett 90 -wholesaler 90 -modicum 90 -hetherington 90 -apprehending 90 -jostle 90 -spouting 90 -atheism 90 -ayoade 90 -escalona 90 -34million 90 -hendley 90 -reinfeldt 90 -venter 90 -mohney 90 -04:43 90 -cautioning 90 -schism 90 -14:34 90 -puyallup 90 -sikorski 90 -04:28 90 -09:10 90 -cuban-americans 90 -drape 90 -465 90 -saucepan 90 -17:21 90 -quarter-century 90 -gnabry 90 -low-profile 90 -begrudge 90 -conn. 90 -05:10 90 -coahuila 90 -articulating 90 -interrogating 90 -hudl 90 -tamp 90 -decentralized 90 -all-weather 90 -boruc 90 -galvanised 90 -fragapane 90 -mortifying 90 -chopard 90 -dialing 90 -kato 90 -rigors 90 -sandal 90 -uk-wide 90 -barnstaple 90 -copulation 90 -dunbartonshire 90 -well-taken 90 -neuroscientists 90 -10.40 90 -rhoda 90 -clarion 90 -02:40 90 -headland 90 -poser 90 -15:03 90 -fly-tipping 90 -ansaru 90 -teetotal 90 -castilla 90 -lagoons 90 -fascinator 90 -1852 90 -20:02 90 -one-by-one 90 -mumbles 90 -sul 90 -u.n 90 -transgression 90 -pompey 90 -hinkley 90 -corrine 90 -18:11 90 -uspga 90 -leggett 90 -mccree 90 -meis 90 -rcn 90 -medland 90 -mikulski 90 -mid-2014 90 -chieftain 90 -muscled 90 -dank 90 -sullied 90 -hubbart 90 -tolerating 90 -gobat 90 -jamaicans 90 -mammography 90 -silica 90 -hackensack 90 -yay 90 -conlin 90 -caloric 90 -tannadice 90 -super-strong 90 -extinctions 90 -akademik 90 -bureaus 90 -wdiv 90 -decc 90 -bilbo 90 -penthouses 90 -benyon 90 -13:13 90 -under-17 90 -loskarn 90 -~ 90 -kiko 90 -crackle 90 -shriners 90 -riordan 90 -twenty-six 90 -forgoing 90 -vernacular 90 -14:20 90 -latoya 90 -self-promotion 90 -vilification 90 -tattersall 90 -bafana 90 -geophysicist 90 -attention-seeking 90 -tardelli 90 -geezer 90 -lilo 90 -orme 90 -144,000 90 -osteopath 90 -mccaffrey 90 -tolerable 90 -commonwealths 90 -690 90 -04:14 90 -04:13 90 -gandalf 90 -nyse 90 -poitras 90 -corley 90 -mexican-americans 90 -fermi 90 -welter 90 -kevan 90 -capitulated 90 -roja 90 -brayden 90 -charteris 90 -chewy 90 -50-plus 90 -lurcher 90 -underrepresented 90 -lattes 90 -experian 90 -columbian 90 -incidental 90 -formulating 90 -asus 90 -mum-of-two 90 -one-woman 90 -abomination 90 -castrogiovanni 90 -moribund 90 -warmup 90 -ten-month-old 90 -10oz 90 -820 90 -expunged 90 -myls 90 -clicquot 90 -16:02 90 -ardiles 90 -d-michigan 90 -hexagonal 90 -tablespoon 90 -reminisce 90 -buerk 90 -klinko 90 -monolithic 90 -beckman 90 -tkachenko 90 -papas 90 -neto 90 -schoolmate 90 -summerfield 90 -braised 90 -loader 90 -ebola-stricken 90 -stumping 90 -bulwark 90 -summitt 90 -tay 90 -petrova 90 -3:45 90 -immediacy 90 -titular 90 -moveable 90 -annuities 90 -taseer 90 -repsol 90 -absurdly 90 -jonesboro 90 -gusher 90 -7.0-magnitude 90 -clappison 90 -fobts 90 -cancellara 90 -izmir 90 -uli 90 -tigger 90 -happenings 90 -kucinich 90 -gold-medal 90 -disembarking 90 -18:04 90 -18:03 90 -personalise 90 -unzipped 90 -scab 90 -minke 90 -affective 90 -squander 90 -characterise 90 -1820 90 -sawing 90 -nujood 90 -enrol 90 -beekeepers 90 -pelka 90 -finery 90 -cfo 90 -bbm 90 -instructive 90 -skilfully 90 -foreboding 90 -rotc 90 -permeates 90 -alcacer 90 -dangle 90 -decapitating 90 -internet-based 90 -verb 90 -19:20 90 -zubair 90 -redeployed 90 -frye 90 -quayle 90 -knockdown 90 -valenzuela 90 -mending 90 -kmgh 90 -christos 90 -gandolfo 90 -laudable 90 -weaning 90 -improvisation 90 -bardarbunga 90 -scalded 90 -knifing 90 -ilfracombe 90 -mandalay 90 -longbottom 90 -darkening 90 -andaman 90 -leatherhead 90 -dorking 90 -out-and-out 90 -m42 90 -schoolers 90 -early-stage 90 -bostonians 90 -04:48 90 -wimborne 90 -blandford 90 -rebbie 89 -pro-morsy 89 -chippy 89 -taming 89 -17:46 89 -neuron 89 -thorns 89 -u-haul 89 -stanmore 89 -voldemort 89 -kristoffer 89 -scamming 89 -baronet 89 -wcpo 89 -godson 89 -wexford 89 -sarah-jane 89 -musburger 89 -montt 89 -barras 89 -indoctrination 89 -halter 89 -honeybee 89 -croix 89 -pushchairs 89 -rollover 89 -bethune 89 -lewy 89 -storm-related 89 -masonic 89 -blinders 89 -remade 89 -oussama 89 -17:25 89 -17:23 89 -networked 89 -mustaches 89 -retaken 89 -golliwog 89 -housebuilding 89 -diverts 89 -foldable 89 -khrushchev 89 -strom 89 -lawrenceville 89 -maglev 89 -tomtom 89 -bloodline 89 -pockmarked 89 -8000 89 -fortuitous 89 -10-0 89 -258 89 -daylong 89 -dhar 89 -veranda 89 -lapan 89 -1.95 89 -resists 89 -13:36 89 -wyden 89 -16:37 89 -plaquemines 89 -abseiling 89 -disloyal 89 -parvin 89 -bennie 89 -whittam 89 -60p 89 -murmansk 89 -hindes 89 -girth 89 -unsophisticated 89 -malvo 89 -belhaj 89 -monotonous 89 -cotillard 89 -ifa 89 -alcock 89 -inherits 89 -sewerage 89 -hitzfeld 89 -25.5 89 -slashes 89 -motherland 89 -deadlier 89 -cramblett 89 -super-bantamweight 89 -cutty 89 -taxied 89 -1804 89 -ever-expanding 89 -welton 89 -interpersonal 89 -alamein 89 -03:30 89 -03:36 89 -jean-yves 89 -mcgann 89 -laurene 89 -42-year 89 -wallop 89 -bombastic 89 -dandelion 89 -kennesaw 89 -affluenza 89 -1837 89 -run-of-the-mill 89 -worst-affected 89 -bilzerian 89 -lartin 89 -transylvania 89 -ejaculation 89 -make-or-break 89 -m16 89 -rogozin 89 -doku 89 -irrefutable 89 -brest 89 -prof. 89 -:1 89 -secessionist 89 -mihajlovic 89 -nightline 89 -lye 89 -pagani 89 -alyson 89 -worland 89 -sherwin 89 -eminently 89 -xena 89 -birkdale 89 -stutzman 89 -rapture 89 -ieng 89 -booties 89 -hrs 89 -clarksville 89 -semi-conscious 89 -17:52 89 -17:54 89 -anara 89 -uncoupling 89 -60-day 89 -04:37 89 -ballantyne 89 -utopian 89 -delegated 89 -tuxedos 89 -coexistence 89 -intimated 89 -goodin 89 -14.2 89 -ferne 89 -raved 89 -labored 89 -relegation-threatened 89 -opel 89 -catwoman 89 -launceston 89 -levene 89 -euphemism 89 -creatives 89 -comic-book 89 -scudetto 89 -chivas 89 -alisher 89 -curzon 89 -lessening 89 -altez 89 -04:31 89 -crusty 89 -storytellers 89 -seyfried 89 -roughshod 89 -chisel 89 -reintegrate 89 -recused 89 -seeps 89 -zerilli 89 -11c 89 -blanketing 89 -hearth 89 -baluchistan 89 -refreshingly 89 -14:40 89 -olympiad 89 -headliner 89 -tumbler 89 -banderas 89 -walgren 89 -gymnastic 89 -badass 89 -fibromyalgia 89 -concannon 89 -postures 89 -shins 89 -topaz 89 -yettaw 89 -keiran 89 -canter 89 -reiter 89 -khalaf 89 -erhardt 89 -lassie 89 -knotweed 89 -cawthorne 89 -well-balanced 89 -urquhart 89 -bhopal 89 -200-mile 89 -homing 89 -keates 89 -physiques 89 -grand-daughter 89 -rebalance 89 -flagler 89 -saddening 89 -microscopy 89 -doggedly 89 -aberdare 89 -367 89 -lacing 89 -roubles 89 -demoralising 89 -grimaldi 89 -evidentiary 89 -tabletop 89 -remus 89 -18,500 89 -four-year-olds 89 -asu 89 -bellwether 89 -brain-damaged 89 -xin 89 -bittermann 89 -superdelegates 89 -donnell 89 -normalization 89 -msn 89 -samberg 89 -stucco 89 -cockpits 89 -crucifixions 89 -lunt 89 -cachet 89 -747-8 89 -ysidro 89 -chevaline 89 -levenshulme 89 -gbr 89 -32a 89 -jean-francois 89 -535 89 -feelgood 89 -buttner 89 -bryony 89 -nuys 89 -vedder 89 -mother-daughter 89 -inversion 89 -foment 89 -pearlman 89 -preddie 89 -galliani 89 -malton 89 -19:22 89 -bilton 89 -9-0 89 -777-200 89 -totton 89 -malmstrom 89 -frailty 89 -fernanda 89 -distracts 89 -horse-riding 89 -starchy 89 -glenys 89 -fennel 89 -debauched 89 -dyfed-powys 89 -hislop 89 -glowed 89 -bj 89 -fariq 89 -jinx 89 -bakary 89 -pyrotechnic 89 -600m 89 -exhale 89 -abbreviation 89 -mille 89 -waxworks 89 -cottagers 89 -cabernet 89 -pcos 89 -two-point 89 -lydiate 89 -fukuda 89 -iom 89 -stillness 88 -harwich 88 -siddle 88 -aseel 88 -alois 88 -unrwa 88 -coots 88 -piggott 88 -saunas 88 -disfiguring 88 -bonucci 88 -drath 88 -q. 88 -fixers 88 -19:45 88 -19:44 88 -19:47 88 -cozumel 88 -20-month 88 -boas 88 -clattered 88 -linchpin 88 -chiselled 88 -cuz 88 -dubs 88 -2007-2008 88 -snoopers 88 -islets 88 -omeruo 88 -saleswoman 88 -manford 88 -bomb-maker 88 -low-carbon 88 -malnourishment 88 -seaplane 88 -prakash 88 -brynn 88 -20km 88 -thiry 88 -16:17 88 -makelele 88 -plagiarized 88 -heightens 88 -ot 88 -linens 88 -reactivated 88 -crandall 88 -swaddling 88 -tap-in 88 -mudeford 88 -clipboard 88 -sherratt 88 -naveed 88 -user-generated 88 -purify 88 -daluise 88 -unconsciously 88 -hulls 88 -bedene 88 -telemundo 88 -150-year 88 -tui 88 -pinged 88 -deformation 88 -alphabetical 88 -garridos 88 -under-19 88 -headwinds 88 -bellicose 88 -daffodil 88 -reciprocal 88 -ainsley 88 -boastful 88 -ghouta 88 -scampi 88 -rothko 88 -repentance 88 -flatmates 88 -ill-prepared 88 -renaud 88 -surmised 88 -limes 88 -adua 88 -mite 88 -osu 88 -catlin 88 -casquejo 88 -aylward 88 -1s 88 -giovani 88 -gop-controlled 88 -fabien 88 -idowu 88 -pratley 88 -flavio 88 -demonstrably 88 -18:12 88 -cavanagh 88 -redrawn 88 -kordofan 88 -aleutian 88 -doormen 88 -schneier 88 -shaftesbury 88 -19:37 88 -separatism 88 -radley 88 -wrinkly 88 -roby 88 -koryo 88 -cemortan 88 -mcardle 88 -sutra 88 -southmead 88 -northwood 88 -barneveld 88 -dji 88 -sabo 88 -azzopardi 88 -dunst 88 -slaughterhouses 88 -injects 88 -edgewater 88 -sneering 88 -hypnotist 88 -19:12 88 -iceman 88 -meddle 88 -9.0 88 -bruck 88 -businesswomen 88 -ndesandjo 88 -04:59 88 -anti-obesity 88 -clos 88 -liston 88 -chain-link 88 -childress 88 -deaton 88 -reprising 88 -artois 88 -fabrications 88 -agut 88 -dni 88 -stepien 88 -dallaglio 88 -delray 88 -tracheostomy 88 -manoa 88 -centre-backs 88 -rhinoplasty 88 -14:06 88 -honiton 88 -parka 88 -balad 88 -xiong 88 -15-second 88 -pittance 88 -sternum 88 -last-four 88 -arce 88 -pictorial 88 -totnes 88 -17:10 88 -settler 88 -technica 88 -chapur 88 -talismanic 88 -huxley 88 -dm.has 88 -amor 88 -quizzing 88 -letham 88 -04:36 88 -chaining 88 -untraceable 88 -residues 88 -clingy 88 -rq-170 88 -colonization 88 -pennine 88 -yeshiva 88 -nix 88 -space.com 88 -dunked 88 -kinship 88 -lighted 88 -malevolent 88 -giraldo 88 -qian 88 -ride-sharing 88 -awa 88 -logue 88 -gauges 88 -runcorn 88 -rattles 88 -uncomplicated 88 -uncannily 88 -1215 88 -affluence 88 -dhanak 88 -simmered 88 -holing 88 -hunks 88 -dawa 88 -thatch 88 -betrays 88 -khoury 88 -parrett 88 -murgatroyd 88 -fruitvale 88 -high-voltage 88 -vixen 88 -epinephrine 88 -kee 88 -heeding 88 -140-character 88 -touchstone 88 -metamorphosis 88 -user-friendly 88 -artistically 88 -pre-industrial 88 -looe 88 -gun-rights 88 -15:00 88 -kimathi 88 -@dailymailgames 88 -honeywell 88 -birnbaum 88 -amerli 88 -agoraphobia 88 -gay-rights 88 -accentuated 88 -kitchenette 88 -d1 88 -chon 88 -mellencamp 88 --20 88 -write-in 88 -six-under 88 -posada 88 -untaxed 88 -cartoonish 88 -sehwag 88 -busters 88 -re-used 88 -05:14 88 -9:45 88 -black-market 88 -depositors 88 -pao 88 -inacio 88 -960 88 -03:00 88 -26.8 88 -psychopaths 88 -scoot 88 -vegans 88 -suh 88 -charriez 88 -jet-setting 88 -granollers 88 -sande 88 -nanna 88 -nimmo 88 -karmel 88 -megyn 88 -reposition 88 -mano 88 -heathcote 88 -tygart 88 -daker 88 -o'hanlon 88 -cuaron 88 -podiums 88 -dvr 88 -marchioness 88 -beveridge 88 -nadler 88 -gazans 88 -facelifts 88 -northerners 88 -trebek 88 -ladybirds 88 -bromsgrove 88 -acrobat 88 -sequinned 88 -three-car 88 -qvc 88 -chez 88 -centrists 88 -itv2 88 -innermost 88 -04:44 88 -hyon 88 -ghazni 88 -lat 88 -matrimony 88 -05:17 88 -neo-natal 88 -lancer 88 -accenture 88 -marzouki 88 -arceneaux 87 -operationally 87 -thorp 87 -terrifyingly 87 -flabby 87 -thorgan 87 -domestication 87 -quivering 87 -waverly 87 -yearned 87 -dystonia 87 -singularly 87 -shira 87 -applebee 87 -supple 87 -tms 87 -ines 87 -qb 87 -commencing 87 -386 87 -bushehr 87 -ex-pat 87 -weiland 87 -succinct 87 -lapid 87 -piccolo 87 -trajectories 87 -14:18 87 -unionized 87 -pudong 87 -reelected 87 -vocally 87 -nourished 87 -azure 87 -blacker 87 -prioritizing 87 -wades 87 -beeching 87 -earnestly 87 -marlena 87 -coretta 87 -felder 87 -megachurch 87 -astro 87 -quangos 87 -guingamp 87 -fairy-tale 87 -digesting 87 -kellerman 87 -sanitized 87 -14:56 87 -112th 87 -rasheed 87 -ashton-under-lyne 87 -saplings 87 -conical 87 -custom-designed 87 -kramatorsk 87 -basseley 87 -australasia 87 -curia 87 -branden 87 -gallman 87 -griego 87 -knockouts 87 -burnie 87 -misogynist 87 -7,800 87 -rosewood 87 -berget 87 -2035 87 -newly-formed 87 -sprinkles 87 -sissy 87 -stonyhurst 87 -'10 87 -low-earth 87 -tramadol 87 -rekindling 87 -life-altering 87 -1840s 87 -bridle 87 -yankovic 87 -beulah 87 -broached 87 -13:38 87 -cesium 87 -own-label 87 -desalvo 87 -gabbard 87 -capps 87 -hunter-gatherers 87 -portway 87 -napthine 87 -iwan 87 -accommodates 87 -berns 87 -13:57 87 -354 87 -wetsuits 87 -gortney 87 -tinsley 87 -xia 87 -sheldrick 87 -bricker 87 -punxsutawney 87 -socialites 87 -toyed 87 -1853 87 -unimportant 87 -pangs 87 -al-marri 87 -50ml 87 -adesanya 87 -mestalla 87 -18:13 87 -transits 87 -kpho 87 -03:37 87 -sloths 87 -bartter 87 -calms 87 -bedraggled 87 -seeger 87 -re-emergence 87 -devaney 87 -fortuna 87 -acerbic 87 -o'kane 87 -sigmund 87 -19:21 87 -hedrick 87 -kinsey 87 -deflecting 87 -ocalan 87 -jumpy 87 -lavinia 87 -coughlin 87 -determinedly 87 -guerlain 87 -half-empty 87 -deyanov 87 -freestanding 87 -ponchos 87 -amazes 87 -vaulting 87 -horta-osorio 87 -70m 87 -7/10 87 -lampoon 87 -zena 87 -bochum 87 -timetables 87 -eschewed 87 -lansdown 87 -campervan 87 -deferring 87 -isidro 87 -snugly 87 -jilin 87 -woodbury 87 -urbina 87 -courteney 87 -18:55 87 -krasnoyarsk 87 -unmanageable 87 -megastar 87 -rhetorically 87 -14.1 87 -hee 87 -hamlyn 87 -roxie 87 -snore 87 -sala 87 -pounder 87 -415 87 -outselling 87 -fallbrook 87 -wsj 87 -lindgren 87 -situational 87 -timmins 87 -xing 87 -ointment 87 -shabwa 87 -jacky 87 -dania 87 -tovar 87 -divorcees 87 -enke 87 -17:16 87 -highest-rated 87 -kona 87 -earmark 87 -04:34 87 -mcareavey 87 -heslin 87 -quandary 87 -carpenters 87 -17:36 87 -17:32 87 -17:33 87 -hoppen 87 -lothario 87 -995 87 -bicep2 87 -bubka 87 -8.25 87 -latterly 87 -undying 87 -caracol 87 -jakupovic 87 -stargazing 87 -listless 87 -mulan 87 -notepad 87 -loveless 87 -7.40 87 -carbonate 87 -castellano 87 -ocr 87 -sub-machine 87 -kasim 87 -on-set 87 -ex-arsenal 87 -kctv 87 -jean-christophe 87 -jacobsen 87 -krishnan 87 -oksana 87 -minimised 87 -copping 87 -retelling 87 -juxtaposition 87 -mert 87 -alsatian 87 -omer 87 -sneha 87 -mishra 87 -vreeland 87 -eyal 87 -342 87 -fuego 87 -jasmin 87 -valiantly 87 -silliness 87 -deluca 87 -katerina 87 -skated 87 -18:27 87 -clamor 87 -hornsby 87 -doctrines 87 -slouch 87 -plums 87 -parris 87 -1847 87 -arup 87 -fulfills 87 -heritage-listed 87 -earplugs 87 -abilene 87 -nominates 87 -ridding 87 -disorganized 87 -zaluska 87 -tatton 87 -defaming 87 -graz 87 -outbid 87 -candelabra 87 -accented 87 -kotb 87 -detonator 87 -airsoft 87 -thermometers 87 -trailblazing 87 -pre-recession 87 -panera 87 -ipro 87 -wintery 87 -trabzonspor 87 -pectoral 87 -staphylococcus 87 -striptease 87 -ilincic 87 -ashwell 87 -mensah 87 -bookcase 87 -thaiday 87 -dir 87 -hermès 87 -loveliest 87 -al-allaf 87 -9:00 87 -anti-cancer 87 -diffusion 87 -dempster 87 -aki 87 -frigates 87 -cringeworthy 87 -fiancà 87 -ex-liverpool 87 -foundered 87 -shrugging 87 -mutilating 87 -kooky 87 -chea 87 -movistar 87 -lynched 87 -remoteness 87 -kieswetter 87 -aimlessly 86 -dryers 86 -bruna 86 -a7 86 -14:39 86 -m60 86 -red-haired 86 -vanquish 86 -mcadam 86 -kingsbury 86 -isolationist 86 -382 86 -implantation 86 -galifianakis 86 -chagall 86 -deliciously 86 -bernardi 86 -taxidermist 86 -chugging 86 -remodel 86 -loco 86 -redheads 86 -oozed 86 -mesmerised 86 -menorca 86 -kie1410 86 -submits 86 -poach 86 -sorrento 86 -editorials 86 -gyrating 86 -aileen 86 -essam 86 -unholy 86 -darrel 86 -stethoscope 86 -capsize 86 -iac 86 -113th 86 -l'aquila 86 -surrenders 86 -right-wingers 86 -rankled 86 -cjd 86 -two-years-old 86 -croke 86 -al-balawi 86 -avakov 86 -blitzed 86 -pakistan-based 86 -mullings 86 -marrakesh 86 -irgc 86 -haus 86 -jean-marie 86 -annika 86 -non-uk 86 -clackamas 86 -morphing 86 -piss 86 -draping 86 -sunspots 86 -hinterland 86 -travails 86 -wsmv 86 -11ft 86 -landslip 86 -36million 86 -infuse 86 -rejoicing 86 -lingo 86 -health-conscious 86 -gim 86 -pickings 86 -16:31 86 -philpotts 86 -noakes 86 -mo. 86 -bunn 86 -aru 86 -woodburn 86 -18:57 86 -oxshott 86 -marys 86 -32.5 86 -firefights 86 -anti-nuclear 86 -weberman 86 -monologues 86 -3 1/2 86 -disproved 86 -singhal 86 -zarra 86 -konta 86 -deflate 86 -rectangle 86 -funerary 86 -resolves 86 -20:04 86 -20:03 86 -calabrese 86 -fairtrade 86 -outperform 86 -eye-opener 86 -bobcat 86 -wide-open 86 -alchemy 86 -ejections 86 -mccammon 86 -mayberry 86 -harbinger 86 -allingham 86 -karol 86 -roush 86 -satisfies 86 -1832 86 -helga 86 -shamrock 86 -318 86 -morello 86 -4.75 86 -glamourous 86 -valletta 86 -lovegrove 86 -pocketbook 86 -bangers 86 -94,000 86 -reassures 86 -hoarders 86 -coursing 86 -outraging 86 -deadpan 86 -introspection 86 -bexar 86 -sw 86 -weinman 86 -level-headed 86 -289 86 -04:54 86 -deeley 86 -self-sustaining 86 -well-spoken 86 -nether 86 -unsettle 86 -stelling 86 -molar 86 -inshore 86 -19:59 86 -monmouthshire 86 -seducing 86 -thrusts 86 -uncollected 86 -sacrament 86 -indian-born 86 -two-fifths 86 -airdrop 86 -glycol 86 -anti-obama 86 -16:49 86 -bop 86 -waist-deep 86 -renton 86 -conkers 86 -trample 86 -palumbo 86 -pru 86 -turlington 86 -michaud 86 -aled 86 -self-assessment 86 -dekker 86 -chromium 86 -perlman 86 -berkman 86 -deuce 86 -palombo 86 -xo 86 -botanist 86 -wittstock 86 -stortford 86 -slushy 86 -ventrell 86 -eames 86 -elvira 86 -hae 86 -d'agostino 86 -hewett 86 -05:03 86 -rosé 86 -yung 86 -15:15 86 -half-a-million 86 -ferreyra 86 -faruk 86 -zooey 86 -campgrounds 86 -muswell 86 -newcastle-upon-tyne 86 -stoically 86 -duley 86 -tighthead 86 -aspinal 86 -garret 86 -elysium 86 -leytonstone 86 -comigel 86 -tintin 86 -mathers 86 -hummel 86 -collectable 86 -pagoda 86 -trolled 86 -off-guard 86 -weintraub 86 -intelligently 86 -cleansed 86 -finality 86 -funnels 86 -2028 86 -biennale 86 -tonsil 86 -quirke 86 -florentine 86 -e-tailer 86 -irc 86 -bree 86 -townley 86 -molyneux 86 -globalisation 86 -wonderbra 86 -caveats 86 -codex 86 -steenson 86 -potosi 86 -cze 86 -entailed 86 -mbia 86 -sis 86 -pronouncement 86 -craggy 86 -paes 86 -lamborghinis 86 -13:41 86 -mended 86 -go-go 86 -bookseller 86 -magellanic 86 -gogglebox 86 -wail 86 -wilted 86 -bubbled 86 -boulden 86 -wobbles 86 -desktops 86 -0.25 86 -75mph 86 -prim 86 -undercutting 86 -unintelligible 86 -cafés 86 -biddle 86 -now-retired 86 -lightfoot 86 -instigation 86 -pacify 86 --10 86 -herrick 86 -sustains 86 -forefathers 86 -invalidate 86 -haddadi 86 -phallic 86 -banding 86 -landmass 86 -unlv 86 -registrars 86 -turkish-syrian 86 -wedgwood 86 -hallie 86 -snowfalls 86 -390,000 86 -cavs 86 -diversifying 86 -ritzy 86 -reined 86 -theorist 86 -turn-by-turn 86 -07:36 86 -raindrops 86 -lollipops 86 -free-speech 86 -04:49 86 -strobe 86 -biochemical 86 -3-4-3 86 -bashful 85 -anderton 85 -troughs 85 -loma 85 -frosting 85 -bahama 85 -subtlety 85 -gallbladder 85 -ay 85 -kutv 85 -● 85 -dmx 85 -hutcherson 85 -backdated 85 -after-effects 85 -winterbourne 85 -knock-down 85 -practicalities 85 -ambergris 85 -coldstream 85 -ribbed 85 -twenty-eight 85 -bundestag 85 -free-trade 85 -alleviating 85 -capricious 85 -guppy 85 -reassembled 85 -fussed 85 -swatted 85 -brie 85 -squeal 85 -tranquillity 85 -misinterpretation 85 -yams 85 -alderson 85 -hypnotised 85 -gargan 85 -ritalin 85 -gardai 85 -04:24 85 -04:26 85 -extrovert 85 -spoonful 85 -jurist 85 -regrow 85 -lexie 85 -delon 85 -maines 85 -harpoons 85 -mists 85 -atonement 85 -scavenge 85 -ebola-free 85 -jem 85 -toya 85 -fishnet 85 -fistula 85 -sectarianism 85 -kismayo 85 -history-making 85 -no-confidence 85 -rehydration 85 -oldbury 85 -pontifical 85 -05:37 85 -wiesel 85 -quarks 85 -mobley 85 -10-mile 85 -7news 85 -orla 85 -underpins 85 -newly-discovered 85 -apathetic 85 -odors 85 -karrar 85 -elwood 85 -monastic 85 -workhorse 85 -centre-left 85 -airplay 85 -13:53 85 -sankey 85 -punks 85 -finders 85 -polluters 85 -ruger 85 -walkie 85 -busily 85 -assessor 85 -supersized 85 -rangoon 85 -soluble 85 -gonorrhoea 85 -ajinkya 85 -18:14 85 -retaking 85 -aqa 85 -goodrich 85 -mcewen 85 -jagland 85 -maximo 85 -lethbridge 85 -scoble 85 -go-kart 85 -316 85 -demean 85 -bengali 85 -o'meara 85 -resonating 85 -6.0 85 -precluded 85 -raiser 85 -simester 85 -mukesh 85 -tiana 85 -19:14 85 -ferencvaros 85 -coeliac 85 -insulating 85 -arrowsmith 85 -2020s 85 -dovizioso 85 -maresca 85 -vox 85 -chester-le-street 85 -greying 85 -peaty 85 -tho 85 -js 85 -01:57 85 -dup 85 -broomstick 85 -17:50 85 -herndon 85 -dyfed 85 -hgh 85 -18:17 85 -18:10 85 -drinkable 85 -porches 85 -play-doh 85 -bfm-tv 85 -masala 85 -baikal 85 -ehsan 85 -riposte 85 -lamine 85 -ex-fiancee 85 -moonlighting 85 -19:18 85 -papaya 85 -gulag 85 -gaspar 85 -pregame 85 -kiriakou 85 -gambon 85 -rajab 85 -right-foot 85 -04:15 85 -ishikawa 85 -balti 85 -smit 85 -dianette 85 -hoppy 85 -cosa 85 -tuscon 85 -panning 85 -17:11 85 -slipstream 85 -vue 85 -wenzhou 85 -passageways 85 -trezeguet 85 -awning 85 -15th-century 85 -buchdahl 85 -edkins 85 -barreto 85 -annexing 85 -corina 85 -nagoya 85 -gamekeeper 85 -pacelle 85 -khatib 85 -01:47 85 -twice-divorced 85 -toasts 85 -precedes 85 -05:06 85 -forecourts 85 -zaza 85 -g1 85 -huish 85 -encapsulated 85 -sattler 85 -16:06 85 -freshest 85 -tingle 85 -bayless 85 -wide-angle 85 -accies 85 -refresher 85 -unshaven 85 -cossack 85 -duch 85 -money-spinning 85 -floridians 85 -mire 85 -unicycle 85 -world-leading 85 -justifications 85 -hmic 85 -westbury 85 -v2 85 -brinksmanship 85 -mondesir 85 -reimbursements 85 -plating 85 -linwood 85 -mickael 85 -farenthold 85 -laylah 85 -oppmann 85 -17-hour 85 -weirdly 85 -matfield 85 -taufiq 85 -6:45 85 -encompassed 85 -18:26 85 -vashti 85 -schwarzkopf 85 -vaginas 85 -sofer 85 -sardine 85 -boldt 85 -sajida 85 -gordy 85 -talkers 85 -ujah 85 -rhizotomy 85 -u-t 85 -kptv 85 -swiss-based 85 -shahar 85 -cements 85 -calo 85 -idiosyncratic 85 -khanna 85 -lawman 85 -jostled 85 -commutation 85 -sanfilippo 85 -government-issued 85 -corfe 85 -martell 85 -nds 85 -singularity 85 -backwater 85 -fellas 85 -discoloured 85 -hinchliff 85 -unyielding 85 -subsides 85 -milled 85 -kakuta 85 -19:24 85 -inky 85 -9-7 85 -sanderlin 85 -rodriquez 85 -vouch 85 -obsess 85 -moffatt 85 -champs-elysees 85 -296 85 -mathematically 85 -snood 85 -18:05 85 -tit 85 -tubbs 85 -cason 85 -carew 85 -50-minute 85 -ure 85 -mccaffery 85 -annenberg 85 -critchley 85 -reminisced 85 -decompression 85 -take-up 85 -1.10 85 -pougatch 85 -adjectives 85 -berk 85 -16:04 85 -dena 85 -nighy 84 -hibiscus 84 -vanden 84 -righted 84 -boga 84 -grey-haired 84 -speedily 84 -126,000 84 -outcrops 84 -fluidity 84 -meet-and-greet 84 -dumbbells 84 -appeasement 84 -aspartame 84 -moeller 84 -hmmm 84 -vertu 84 -18:15 84 -walruses 84 -bachchan 84 -17:07 84 -17:08 84 -oil-producing 84 -endocrinologist 84 -potus 84 -carradine 84 -saima 84 -bude 84 -billi 84 -mailer 84 -kraus 84 -madcap 84 -remorseless 84 -foolishness 84 -reproductions 84 -04:25 84 -creditor 84 -dark-colored 84 -platelets 84 -singlet 84 -windowsill 84 -hemispheres 84 -facundo 84 -polizzi 84 -8/10 84 -wince 84 -geoscience 84 -poodles 84 -hobbyists 84 -antimicrobial 84 -bomb-sniffing 84 -37million 84 -mehmood 84 -firebombs 84 -exoneration 84 -casarez 84 -petzel 84 -hangars 84 -dialog 84 -carbonated 84 -sheppey 84 -multiracial 84 -wehby 84 -16:10 84 -cerro 84 -educations 84 -jeeves 84 -mornington 84 -13.9 84 -overreact 84 -moorhead 84 -feature-length 84 -hotly-anticipated 84 -sweeten 84 -transpires 84 -snakeskin 84 -mor 84 -co.uk 84 -outsold 84 -takeovers 84 -peso 84 -musketeers 84 -scalps 84 -numbing 84 -pleases 84 -grannies 84 -tur 84 -dern 84 -mu 84 -poncho 84 -anchovies 84 -fairview 84 -re-think 84 -fredericks 84 -sliders 84 -coogee 84 -agustin 84 -maloof 84 -half-inch 84 -hitchens 84 -statisticians 84 -deprill 84 -caruso 84 -five-years-old 84 -mabus 84 -check-ins 84 -outcasts 84 -marten 84 -lower-level 84 -sai 84 -redistribute 84 -kilts 84 -tradesman 84 -ippr 84 -amey 84 -1839 84 -31million 84 -lng 84 -fireflies 84 -outlier 84 -19:30 84 -boyles 84 -airfares 84 -toorak 84 -kaleb 84 -pronouncing 84 -vetoing 84 -conwy 84 -classifies 84 -jal 84 -longsight 84 -dabbed 84 -ennahda 84 -razaq 84 -panting 84 -consultative 84 -behind-closed-doors 84 -lugging 84 -lankans 84 -yuletide 84 -three-term 84 -dhow 84 -kallstrom 84 -chucky 84 -birotte 84 -mixologist 84 -thigh-high 84 -89,000 84 -single-player 84 -bakes 84 -operatic 84 -robson-kanu 84 -86million 84 -15.3 84 -constraint 84 -adi 84 -herbicide 84 -divider 84 -ancillary 84 -letwin 84 -245,000 84 -lightsaber 84 -zoltan 84 -sass 84 -henchman 84 -heaslip 84 -steinbrenner 84 -deon 84 -forgeries 84 -photobombed 84 -plotter 84 -aviators 84 -newhouse 84 -propriety 84 -04:10 84 -1.49 84 -pinal 84 -back-row 84 -puffins 84 -pancreatitis 84 -anaemic 84 -verdes 84 -somaia 84 -deidre 84 -17:19 84 -horsley 84 -jobson 84 -nangarhar 84 -cr7 84 -canteens 84 -hannan 84 -cray 84 -myron 84 -crouches 84 -boyz 84 -haskins 84 -quin 84 -hoegh 84 -gellar 84 -17:31 84 -emine 84 -interlocking 84 -neurologists 84 -defecate 84 -handstands 84 -jet-set 84 -howler 84 -blameless 84 -kirra 84 -regains 84 -01:44 84 -confers 84 -chrisdhwaugh 84 -adaption 84 -two-footed 84 -2-5 84 -andujar 84 -nurmi 84 -sanya 84 -moab 84 -larrazabal 84 -waver 84 -imbalances 84 -localities 84 -etheridge 84 -acronyms 84 -flournoy 84 -cridland 84 -naively 84 -2/5 84 -kombat 84 -minions 84 -arya 84 -blackboard 84 -irizarry 84 -mcclay 84 -363 84 -scours 84 -d-vermont 84 -warfarin 84 -weekes 84 -mongar 84 -gander 84 -cloaks 84 -888,246 84 -sawers 84 -prussia 84 -02:54 84 -vitale 84 -mallinder 84 -k-12 84 -daleks 84 -baldacci 84 -self-reliance 84 -niemeyer 84 -03:40 84 -overture 84 -shaaban 84 -craddock 84 -pars 84 -michaele 84 -colclough 84 -gadsby 84 -abril 84 -anti-business 84 -mbta 84 -chantilly 84 -koetters 84 -0.01 84 -shiite-dominated 84 -clear-up 84 -28,500 84 -790 84 -overshadowing 84 -quiff 84 -seminoles 84 -patching 84 -dassault 84 -coconuts 84 -cranberries 84 -better-known 84 -saeb 84 -yodel 84 -grantley 84 -oldknow 84 -tiebreaker 84 -unmet 84 -visage 84 -grampian 84 -17:26 84 -pee-wee 84 -radiate 84 -puffin 84 -rejoiced 84 -loki 84 -ludgate 84 -dragnet 84 -disciple 84 -unheralded 84 -hala 84 -specsavers 84 -nfu 84 -bostick 84 -555 84 -mennonite 84 -arras 84 -elian 84 -gossiping 84 -bumgarner 84 -welled 84 -overlaid 84 -dastardly 84 -deadline-day 84 -full-term 84 -penrose 84 -potok 84 -koco 84 -worden 84 -gravity-defying 84 -silverback 84 -buchholtz 84 -gofundme.com 84 -efsa 84 -pmqs 84 -leake 84 -on-campus 84 -cosplay 84 -yeomans 84 -readies 84 -moresby 83 -tameka 83 -vintages 83 -verve 83 -carlile 83 -dizaei 83 -eca 83 -nosedive 83 -panelist 83 -hussars 83 -hoo 83 -wru 83 -take-away 83 -horwood 83 -uvb 83 -escoto 83 -hellenic 83 -de-escalation 83 -19.6 83 -da14 83 -biodiesel 83 -schuett 83 -kampusch 83 -descriptive 83 -1,550 83 -miniatures 83 -14:19 83 -4:20 83 -chau 83 -gebrselassie 83 -isis-controlled 83 -04:07 83 -04:04 83 -04:09 83 -interceptors 83 -crutchlow 83 -unplayable 83 -coincidences 83 -tokyo-based 83 -complainer 83 -grumman 83 -kayal 83 -chaudhary 83 -hayter 83 -mie 83 -tippi 83 -perisic 83 -ledges 83 -pearcy 83 -self-evident 83 -1,050 83 -moby 83 -antipathy 83 -headcount 83 -16:14 83 -bloomingdale 83 -fata 83 -90-second 83 -non-disclosure 83 -boldness 83 -balakrishnan 83 -14:51 83 -acoustics 83 -duckling 83 -seager 83 -1830s 83 -kuffar 83 -wenlock 83 -accordion 83 -fina 83 -whitworth 83 -sone 83 -rhondda 83 -make-believe 83 -grosseto 83 -wbtv 83 -lovefilm 83 -teak 83 -paschke 83 -landscaper 83 -tarnishing 83 -politicking 83 -grogan 83 -nass 83 -benefactors 83 -catford 83 -outbuilding 83 -epitomized 83 -560,000 83 -hurtle 83 -pantanal 83 -beach-goers 83 -grameen 83 -kilgore 83 -bitters 83 -ventricle 83 -glencore 83 -deere 83 -disappointingly 83 -pick-me-up 83 -counselled 83 -bassi 83 -emoticon 83 -submerging 83 -15:07 83 -lira 83 -gr4 83 -tarik 83 -20cm 83 -snider 83 -light-skinned 83 -birrell 83 -hince 83 -ls 83 -swordfish 83 -18:19 83 -03:31 83 -lackey 83 -lifesavers 83 -8-10 83 -bint 83 -rca 83 -perpetrating 83 -gospels 83 -pumas 83 -mansouri 83 -lentil 83 -4x400m 83 -near-term 83 -cirrus 83 -el-hussein 83 -numan 83 -morelli 83 -kravchenko 83 -loggers 83 -19-years-old 83 -rossli 83 -aquifer 83 -nozette 83 -petulant 83 -in-work 83 -shaq 83 -diez 83 -mchenry 83 -diehl 83 -florcruz 83 -innumerable 83 -yah 83 -durcho 83 -ong 83 -usns 83 -foreheads 83 -beaird 83 -sultana 83 -keiko 83 -merok 83 -12-years-old 83 -trilby 83 -rebel-controlled 83 -recast 83 -theologian 83 -widens 83 -withstanding 83 -walworth 83 -walk-out 83 -bloomed 83 -ormsby 83 -bodrum 83 -big-hitting 83 -hollingworth 83 -aida 83 -hailstones 83 -gliese 83 -perturbed 83 -luger 83 -off-putting 83 -hb 83 -common-law 83 -dakotas 83 -lunacy 83 -watersports 83 -romantics 83 -stroman 83 -transiting 83 -usama 83 -restivo 83 -scanlan 83 -hernanes 83 -josip 83 -jazzy 83 -shrift 83 -hat-tricks 83 -sooty 83 -lenihan 83 -whalen 83 -biggest-selling 83 -9.40 83 -suburbia 83 -bekaa 83 -tributaries 83 -person-to-person 83 -cassel 83 -boudreau 83 -mckean 83 -nabhan 83 -homeware 83 -astrophysical 83 -cardholders 83 -sin-binned 83 -farewells 83 -14:47 83 -harrold 83 -dumbbell 83 -petco 83 -01:45 83 -airworthy 83 -reveillere 83 -dewitt 83 -borehamwood 83 -soo 83 -flame-haired 83 -levski 83 -clergymen 83 -13:52 83 -nobleman 83 -phytoplankton 83 -16-years-old 83 -shorn 83 -cece 83 -three-piece 83 -schweitzer 83 -hardwired 83 -bothuell 83 -immorality 83 -invades 83 -waukesha 83 -7000 83 -defusing 83 -falsify 83 -tae 83 -rebounding 83 -dingoes 83 -holtz 83 -18:28 83 -18:20 83 -19:29 83 -tributary 83 -unconstitutionally 83 -crips 83 -17:48 83 -bushwick 83 -shortbread 83 -vanuatu 83 -friary 83 -pdc 83 -showrooms 83 -bucolic 83 -vacuuming 83 -shazia 83 -0.05 83 -liberians 83 -grinstead 83 -institut 83 -milani 83 -criminalizing 83 -cnnmexico.com 83 -girolamo 83 -wxyz 83 -grimacing 83 -food-borne 83 -estrangement 83 -ferman 83 -treviso 83 -tp 83 -paralyze 83 -joyfully 83 -teacup 83 -de-icing 83 -5-star 83 -cfa 83 -kami 83 -codebreaker 83 -jinnah 83 -anti-democratic 83 -one-dimensional 83 -watermelons 83 -misquoted 83 -yanks 83 -sv 83 -absolve 83 -16:32 83 -shatters 83 -mid-century 83 -long-delayed 83 -hosiery 83 -schoolfriend 83 -foreskin 83 -7:15 83 -farina 83 -burnish 83 -pauli 83 -nullified 83 -kenton 83 -faslane 83 -16.6 83 -9/10 83 -fleiss 83 -wisse 83 -hyaluronic 83 -mcnuggets 83 -open-mouthed 83 -resurrecting 83 -shiels 83 -wasabi 83 -great-grandchild 83 -hyperthermia 83 -leyritz 83 -fatherland 83 -beevers 83 -winson 83 -earths 83 -assigns 82 -harmonies 82 -bemoan 82 -qidwai 82 -figuratively 82 -curler 82 -galactica 82 -§ 82 -¡ 82 -offloading 82 -prat 82 -self-sufficiency 82 -lowdown 82 -leesburg 82 -:-lrb- 82 -vhs 82 -wind-up 82 -carcinogen 82 -04:01 82 -heuer 82 -multi-millionaires 82 -topsy 82 -kgtv 82 -leggatt 82 -high-fashion 82 -sardar 82 -wakeboarding 82 -bock 82 -17:04 82 -fourth-floor 82 -physios 82 -council-run 82 -hocking 82 -recidivism 82 -mckeever 82 -309 82 -diack 82 -filament 82 -310,000 82 -kovacs 82 -pinball 82 -yawns 82 -amani 82 -teterboro 82 -bisciotti 82 -canonization 82 -sanitizer 82 -workspace 82 -overlapped 82 -stammers 82 -stuckey 82 -mckoy 82 -disinfecting 82 -photojournalists 82 -snubbing 82 -coercing 82 -devo 82 -brandish 82 -ugh 82 -knighthoods 82 -fart 82 -taupe 82 -streaker 82 -disorganised 82 -rogan 82 -swanley 82 -classifying 82 -tut 82 -nl 82 -djamel 82 -relent 82 -ghosh 82 -haribo 82 -mohsni 82 -baumann 82 -polystyrene 82 -photobomb 82 -all-women 82 -nastiness 82 -brimager 82 -adversarial 82 -chinese-american 82 -snowmobiles 82 -mian 82 -18:53 82 -villainous 82 -pavilions 82 -dampening 82 -62mph 82 -gretel 82 -didnt 82 -hard-won 82 -baccalaureate 82 -16:56 82 -stonehouse 82 -second-best 82 -customisable 82 -mah 82 -ala 82 -ricksen 82 -18:39 82 -tamera 82 -8:00 82 -tye 82 -gynaecology 82 -gogo 82 -shingle 82 -northallerton 82 -solidity 82 -optimists 82 -wollscheid 82 -aris 82 -forego 82 -unobstructed 82 -winemakers 82 -03:34 82 -matt_barlow_dm 82 -srinivasan 82 -quench 82 -gauteng 82 -intransigence 82 -montazeri 82 -lylah 82 -confining 82 -vacuums 82 -03:19 82 -installs 82 -alsace 82 -quays 82 -voila 82 -raynor 82 -sandbank 82 -opportunism 82 -chafee 82 -scolding 82 -invitation-only 82 -19:15 82 -727 82 -watney 82 -avenging 82 -sombrero 82 -speight 82 -five-game 82 -ravenous 82 -centenarians 82 -79,000 82 -defile 82 -fertilized 82 -tuba 82 -barzun 82 -04:55 82 -mavi 82 -correlate 82 -fly-on-the-wall 82 -vowels 82 -trumpeting 82 -slicked 82 -gwynne 82 -linford 82 -impervious 82 -17:55 82 -covergirl 82 -doorknob 82 -halilovic 82 -seton 82 -antichrist 82 -barkhad 82 -hogue 82 -mapp 82 -cask 82 -bess 82 -hydrocarbon 82 -5oz 82 -agius 82 -bloemfontein 82 -qazi 82 -outlive 82 -re-homing 82 -8c 82 -puerta 82 -leiden 82 -spokespeople 82 -26ft 82 -ottaway 82 -depressingly 82 -sampaoli 82 -04:11 82 -avant 82 -excels 82 -symbolizing 82 -learjet 82 -merabet 82 -decade-old 82 -stabs 82 -whiteside 82 -conjuring 82 -peek-a-boo 82 -gummer 82 -nathanial 82 -chews 82 -tri 82 -dekhar 82 -callen 82 -18.6 82 -lacazette 82 -fixed-wing 82 -sunning 82 -duggars 82 -delbert 82 -noddy 82 -pecan 82 -closeted 82 -evicting 82 -munchkin 82 -folio 82 -cobblestone 82 -lario 82 -avigdor 82 -mid-twenties 82 -decathlon 82 -oxitec 82 -sod 82 -mowatt 82 -sited 82 -factoring 82 -reneging 82 -22m 82 -karikari-apau 82 -musgrove 82 -karkoc 82 -survivable 82 -80p 82 -upland 82 -serenade 82 -teases 82 -bata 82 -orientated 82 -deleon 82 -nakamoto 82 -rabin 82 -materazzi 82 -al-saadi 82 -stauffer 82 -galfy 82 -tootsie 82 -sgueglia 82 -dodd-frank 82 -gonzalez-angulo 82 -routemaster 82 -blubber 82 -masten 82 -gaol 82 -cacao 82 -zermatt 82 -favorability 82 -dc-10 82 -maranello 82 -koroma 82 -apparition 82 -clemons 82 -immunology 82 -seventh-grader 82 -espanol 82 -29-year 82 -chesterton 82 -55million 82 -frothy 82 -13:42 82 -stooped 82 -peeks 82 -suckling 82 -morelia 82 -lazaro 82 -re-establishing 82 -pining 82 -on-and-off 82 -camcorder 82 -mikaeel 82 -400ft 82 -sleep-deprived 82 -460,000 82 -safiro 82 -naturist 82 -plastiki 82 -fellenbaum 82 -irvin 82 -rafter 82 -pleistocene 82 -squirming 82 -petre 82 -scabs 82 -beharry 82 -al-madinah 82 -9,800 82 -19:25 82 -mid-40s 82 -rambunctious 82 -nazar 82 -elinor 82 -baritone 82 -dishwashers 82 -infantino 82 -maurier 82 -jabbing 82 -deflategate 82 -esperanza 82 -self-professed 82 -hitzlsperger 82 -praveen 82 -small-time 82 -acquittals 82 -spoofs 82 -workloads 82 -spink 82 -deductible 82 -hoverboard 82 -marchesa 82 -darlinghurst 82 -03:23 82 -cursive 82 -brags 82 -antibiotic-resistant 82 -gohmert 82 -assailed 82 -outkast 82 -14:24 82 -scissor 82 -mandel 82 -04:41 82 -abiraterone 82 -14billion 82 -firstborn 82 -chung-yong 82 -subprime 82 -holler 82 -dzhokar 81 -yauch 81 -17:43 81 -nbclp.defaultwidth 81 -carhart 81 -washburn 81 -candour 81 -ashfield 81 -log-in 81 -argent 81 -ljajic 81 -ladybird 81 -self-expression 81 -propagandist 81 -multi-talented 81 -phoney 81 -skimmers 81 -mohmand 81 -mammalian 81 -brinkmanship 81 -perpetuates 81 -nationalized 81 -unabomber 81 -silencers 81 -14:10 81 -christin 81 -togolese 81 -home-based 81 -18:18 81 -thune 81 -1.55 81 -governorate 81 -bleu 81 -centralised 81 -shrimps 81 -indices 81 -muralitharan 81 -chapels 81 -increments 81 -halton 81 -suborbital 81 -pheasants 81 -newbold 81 -shaves 81 -04:27 81 -resiliency 81 -globetrotting 81 -preterm 81 -obispo 81 -shampoos 81 -iâ 81 -bartelt 81 -giorgos 81 -goodlatte 81 -13-inch 81 -17:28 81 -disown 81 -cambodians 81 -ambivalence 81 -lian 81 -barbosa 81 -belvoir 81 -10/1 81 -14:53 81 -14:52 81 -papillomavirus 81 -post-racial 81 -policy-making 81 -tepper 81 -weasel 81 -theorized 81 -seafaring 81 -blatt 81 -appetising 81 -hungrier 81 -qassam 81 -azarov 81 -nu 81 -unawares 81 -100k 81 -canaan 81 -supplementary 81 -benaglio 81 -bootleg 81 -03:02 81 -frosted 81 -revolted 81 -performance-related 81 -prestwick 81 -meaningfully 81 -depressions 81 -centrelink 81 -chahal 81 -spreadsheets 81 -dines 81 -o'dell 81 -16:55 81 -seagal 81 -attila 81 -velshi 81 -confuses 81 -appendage 81 -505 81 -bantams 81 -15:09 81 -03:55 81 -22.50 81 -anti-freeze 81 -thirsk 81 -20:05 81 -20:07 81 -all-encompassing 81 -self-indulgent 81 -groupies 81 -headlong 81 -ineos 81 -wgc-bridgestone 81 -turnip 81 -03:35 81 -lateef 81 -farhad 81 -bentonville 81 -snowdrops 81 -toothy 81 -dash-cam 81 -hoekstra 81 -j.crew 81 -respiration 81 -silsby 81 -steamship 81 -persisting 81 -annotated 81 -halasz 81 -ten-month 81 -nabulsi 81 -furman 81 -outrages 81 -menthol 81 -leeming 81 -playable 81 -alger 81 -gsm 81 -radiates 81 -ambushes 81 -28c 81 -nuke 81 -thi 81 -jb 81 -fda-approved 81 -shanksville 81 -decoded 81 -kelner 81 -shoebox 81 -realignment 81 -17:58 81 -adf 81 -wispy 81 -14:26 81 -free-fall 81 -roxana 81 -brasher 81 -bugle 81 -fitzmaurice 81 -westport 81 -bathily 81 -bullfight 81 -haydock 81 -socialised 81 -baillon 81 -aronofsky 81 -congestive 81 -anti-missile 81 -19:13 81 -feign 81 -francine 81 -bywater 81 -mumbai-style 81 -ascertained 81 -99th 81 -ordination 81 -below-par 81 -capsaicin 81 -bulow 81 -bevington 81 -impediments 81 -exfoliating 81 -17:14 81 -flashpoints 81 -7,600 81 -biela 81 -lublin 81 -hs 81 -04:33 81 -04:39 81 -02:55 81 -ivie 81 -muslim-american 81 -wissam 81 -achiever 81 -chitty 81 -llewyn 81 -mcwherter 81 -bagnall 81 -23.7 81 -urinals 81 -rinaldi 81 -gassing 81 -telekom 81 -gulnaz 81 -pinilla 81 -alva 81 -yusor 81 -lyricist 81 -cavemen 81 -twenty-nine 81 -scrawl 81 -ghonim 81 -luff 81 -vilnius 81 -cabal 81 -140million 81 -rerun 81 -shames 81 -culvert 81 -gelatine 81 -05:27 81 -blimps 81 -righteousness 81 -gesticulating 81 -birk 81 -glorifies 81 -shoop 81 -suckers 81 -keefe 81 -deckchair 81 -18:40 81 -enniskillen 81 -nbclp.defaultheight 81 -abdullatif 81 -unrivaled 81 -steeple 81 -densities 81 -mush 81 -gingerich 81 -stornoway 81 -staining 81 -three-shot 81 -02:59 81 -jet-ski 81 -conspirator 81 -incriminate 81 -ent 81 -digby 81 -episodic 81 -tectonics 81 -configurations 81 -herbivores 81 -seevakumaran 81 -highline 81 -delightfully 81 -murder-for-hire 81 -03:29 81 -assent 81 -shiers 81 -womanizer 81 -inadequately 81 -rainn 81 -kenwyne 81 -voyeur 81 -caribe 81 -tiki 81 -jean-luc 81 -speculations 81 -ex-soviet 81 -rancid 81 -hilt 81 -airbrushing 81 -synced 81 -danby 81 -fundraise 81 -mana 81 -rona 81 -orbs 81 -worksop 81 -dadaab 81 -cattery 81 -femoral 81 -leadbitter 81 -pressler 81 -galasso 81 -haruna 81 -marchand 81 -washboard 81 -giampaolo 81 -tic 81 -fagge 81 -cyber-attack 81 -qari 81 -alleviated 81 -unseated 81 -zanotti 81 -spank 81 -remedied 81 -14:23 81 -stoney 81 -circumcisions 81 -eggers 81 -badstuber 81 -chasen 81 -dalek 81 -seventh-placed 81 -epitomises 81 -04:47 81 -one-month-old 81 -steinmeier 81 -amur 81 -halfpipe 81 -butenko 81 -chandon 80 -squatter 80 -av 80 -a2 80 -biplane 80 -lebowski 80 -evansville 80 -bozeman 80 -grandees 80 -honore 80 -zumwalt 80 -blumberg 80 -hand-washing 80 -gherardini 80 -19.2 80 -giunta 80 -10,400 80 -eilat 80 -pulley 80 -14:15 80 -szabo 80 -bypasses 80 -al-ghamdi 80 -sarkar 80 -qe2 80 -loss-making 80 -reclined 80 -mockingly 80 -novo 80 -sociological 80 -slipway 80 -openside 80 -all-action 80 -chattering 80 -offsetting 80 -accede 80 -froth 80 -keratin 80 -borrallo 80 -counsels 80 -shaver 80 -blinks 80 -halcyon 80 -hegemony 80 -sana'a 80 -jackpots 80 -voight 80 -pro-kremlin 80 -uncontested 80 -mesolithic 80 -forty-five 80 -mubenga 80 -nurburgring 80 -fastest-selling 80 -babysat 80 -succulent 80 -khawam 80 -.0 80 -camila 80 -g-force 80 -stewardesses 80 -620,000 80 -kanu 80 -maclaine 80 -qaeda-inspired 80 -harborview 80 -kumari 80 -coeur 80 -air-to-air 80 -05:33 80 -shim 80 -ohio-based 80 -fibula 80 -splurged 80 -unfiltered 80 -re-use 80 -a14 80 -billingsley 80 -natascha 80 -deryn 80 -18:54 80 -kark 80 -auspices 80 -riddance 80 -halloun 80 -mongoose 80 -13:51 80 -warm-weather 80 -premonition 80 -islamophobic 80 -357 80 -100-mile 80 -denby 80 -magellan 80 -04:12 80 -7ins 80 -shrestha 80 -detergents 80 -baha 80 -03:53 80 -re-enactors 80 -hemy 80 -harris-perry 80 -:--rrb- 80 -gans 80 -diagon 80 -mazzarri 80 -pertains 80 -bundaberg 80 -8,300 80 -bluebells 80 -18:16 80 -astros 80 -outstrips 80 -ruane 80 -16s 80 -finnis 80 -booksellers 80 -keke 80 -soundtracks 80 -rimes 80 -utero 80 -2004-05 80 -normalizing 80 -greenhalgh 80 -shepherded 80 -louis-dreyfus 80 -orly 80 -filibusters 80 -defame 80 -lamprey 80 -phillippe 80 -shorelines 80 -discounters 80 -spanner 80 -persecute 80 -aw15 80 -torrington 80 -attaining 80 -gratefully 80 -outermost 80 -civitas 80 -wingate 80 -shehzad 80 -equalling 80 -asch 80 -quills 80 -amat 80 -mecklenburgh 80 -trussell 80 -04:56 80 -free-scoring 80 -molesters 80 -anarchic 80 -insolvent 80 -sobibor 80 -yasmine 80 -livesey 80 -zeng 80 -hark 80 -kinga 80 -chillaxing 80 -lino 80 -doohan 80 -hemline 80 -altima 80 -encircling 80 -firebombed 80 -malacca 80 -long-ball 80 -trad 80 -childminders 80 -hames 80 -thickened 80 -corless 80 -bastions 80 -have-nots 80 -reham 80 -meander 80 -soundproof 80 -adama 80 -bonny 80 -holby 80 -koi 80 -quadriga 80 -catarina 80 -wallowing 80 -shaarawy 80 -14:02 80 -14:09 80 -three-wheeled 80 -lorain 80 -five-page 80 -04:17 80 -duxford 80 -encampments 80 -six-game 80 -arthritic 80 -compacted 80 -breckenridge 80 -michoacana 80 -.223 80 -fincham 80 -terrano 80 -funes 80 -mental-health 80 -gauthier 80 -dishonor 80 -now-deceased 80 -putty 80 -winnable 80 -cady 80 -chacon 80 -02:39 80 -14:41 80 -jarre 80 -rainstorm 80 -jetta 80 -jafari 80 -braddock 80 -dragonflies 80 -catchers 80 -tpc 80 -tzipi 80 -westcott 80 -balpa 80 -foul-smelling 80 -faculties 80 -15:18 80 -blais 80 -monette 80 -volition 80 -baileys 80 -200g 80 -jacquelyn 80 -36dd 80 -jimena 80 -commercialization 80 -streeter 80 -atwal 80 -rebellions 80 -caravaggio 80 -raith 80 -myatt 80 -18:45 80 -15:33 80 -grandmaster 80 -internationale 80 -zhengzhou 80 -afellay 80 -bummed 80 -winked 80 -lippestad 80 -kea 80 -seasonally 80 -nuclei 80 -comolli 80 -unfurl 80 -ep 80 -soapy 80 -jw 80 -aggravation 80 -elson 80 -weasley 80 -bakke 80 -tabb 80 -on-the-go 80 -jarod 80 -iovine 80 -particulate 80 -1oz 80 -constrain 80 -revitalized 80 -s.e. 80 -arable 80 -aqueduct 80 -bahadur 80 -17.8 80 -fillon 80 -necrosis 80 -trickett 80 -30g 80 -selly 80 -crore 80 -eggnog 80 -devilish 80 -updike 80 -murdough 80 -goldsworthy 80 -levitating 80 -mabuse 80 -1801 80 -18-wheeler 80 -cristal 80 -sangeeta 80 -nock 80 -menacingly 80 -windscreens 80 -sundae 80 -inconspicuous 80 -memorialized 80 -proview 80 -egynews 80 -ill-tempered 80 -sallie 80 -stereoscopic 80 -shoves 80 -monogamy 80 -saldana 80 -farringdon 80 -reciprocated 80 -highest-earning 80 -renderings 80 -carmakers 80 -angelus 80 -wails 80 -117,000 80 -high-fiving 80 -hunnam 80 -faintest 80 -pripyat 80 -brafman 80 -dethroned 80 -utc 80 -denbighshire 80 -re-evaluated 80 -nedum 80 -36-year 79 -17:45 79 -zacarias 79 -14:38 79 -14:32 79 -mouret 79 -garbled 79 -q1 79 -¨ 79 -09 79 -ramseys 79 -baitullah 79 -anti-riot 79 -shafi 79 -backsides 79 -amaro 79 -age-appropriate 79 -destin 79 -whimper 79 -900million 79 -basnet 79 -hooray 79 -59.99 79 -hurtado 79 -macadamia 79 -foxley 79 -quorum 79 -pf 79 -1cm 79 -subtitled 79 -gummy 79 -whine 79 -nonbinding 79 -prophets 79 -braver 79 -waal 79 -kaiserslautern 79 -469 79 -468 79 -punctual 79 -krays 79 -collinge 79 -heparin 79 -10.20 79 -fells 79 -partygoer 79 -dyck 79 -nance 79 -liao 79 -avi 79 -125million 79 -zali 79 -dandenong 79 -irrepressible 79 -great-grandparents 79 -.3 79 -engelbert 79 -cbbc 79 -exertions 79 -scrutinizing 79 -104th 79 -newshour 79 -booz 79 -gazeta 79 -catastrophically 79 -teardrop 79 -kombi 79 -nc 79 -nd 79 -sign-off 79 -jaxon 79 -paragon 79 --6 79 -stryker 79 -conlan 79 -jayson 79 -plumadore 79 -376 79 -perrett 79 -19:32 79 -psilocybin 79 -bogan 79 -defour 79 -goings-on 79 -eikenberry 79 -studious 79 -giselle 79 -rockhampton 79 -unskilled 79 -manuela 79 -arterial 79 -drooling 79 -birdcage 79 -trike 79 -353 79 -ferreyr 79 -akinfeev 79 -02:41 79 -hostage-taker 79 -yarbough 79 -atchafalaya 79 -eon 79 -wexler 79 -03:54 79 -exerts 79 -friar 79 -pohl 79 -salient 79 -utica 79 -geckos 79 -94th 79 -dumbest 79 -pachuca 79 -jonglei 79 -wherewithal 79 -lyla 79 -lanai 79 -liberally 79 -vilsack 79 -shikhar 79 -choral 79 -ackerman 79 -ashkelon 79 -eco-tourism 79 -hunnisett 79 -proteus 79 -colditz 79 -balyo 79 -stadion 79 -gouging 79 -03:15 79 -grata 79 -cornfield 79 -seven-point 79 -qld 79 -doutzen 79 -19:33 79 -313 79 -sleeker 79 -scholl 79 -gordon-levitt 79 -18-day 79 -pricked 79 -rhian 79 -jammeh 79 -balk 79 -buryakov 79 -sunscreens 79 -subsidizing 79 -oddities 79 -addy 79 -grafting 79 -two-foot 79 -brainwash 79 -hader 79 -stipulations 79 -ilo 79 -ily 79 -kropp 79 -04:06 79 -willock 79 -cezanne 79 -obrador 79 -cristo 79 -kitts 79 -abell 79 -lautner 79 -bellowing 79 -muguruza 79 -roca 79 -abolitionist 79 -razek 79 -blowback 79 -erodes 79 -eavesdropped 79 -balazs 79 -cobbler 79 -benik 79 -kroft 79 -14.4 79 -wilby 79 -dutiful 79 -reitman 79 -bulldoze 79 -archduke 79 -bandied 79 -nikos 79 -masjid 79 -nauseating 79 -snort 79 -schrader 79 -04:16 79 -viviane 79 -abhor 79 -agnelli 79 -busking 79 -tricorder 79 -non-smokers 79 -etchells 79 -spot-kicks 79 -commentaries 79 -opposite-sex 79 -aegis 79 -jerramy 79 -sharpening 79 -backflip 79 -frasier 79 -spruill 79 -abstentions 79 -18.4 79 -noll 79 -chepstow 79 -abh 79 -overwork 79 -altruism 79 -unfavourable 79 -torkington 79 -sapstead 79 -appalachians 79 -luce 79 -campion 79 -coven 79 -zeb 79 -voles 79 -tween 79 -irn-bru 79 -semi-nude 79 -intruding 79 -unwin 79 -skirted 79 -al-kutobi 79 -ungoverned 79 -emmy-winning 79 -03:41 79 -scammer 79 -berber 79 -marilia 79 -2002-03 79 -s.c. 79 -15:59 79 -rina 79 -airey 79 -catacombs 79 -bentham 79 -dimples 79 -evenson 79 -stonework 79 -goodenough 79 -9-11 79 -15,500 79 -tarlov 79 -cringe-worthy 79 -400-meter 79 -semi-permanent 79 -madikizela-mandela 79 -amygdala 79 -1866 79 -ibis 79 -vejjajiva 79 -self-loathing 79 -ziad 79 -6,100 79 -siemionow 79 -rusedski 79 -wilmot 79 -jabba 79 -novgorod 79 -psychosocial 79 -hosseini 79 -climactic 79 -durm 79 -videla 79 -512 79 -throbbing 79 -jjb 79 -crash-landing 79 -gruff 79 -non-verbal 79 -03:26 79 -orissa 79 -archeologist 79 -hurwitz 79 -incalculable 79 -taghavi 79 -touchpad 79 -glaister 79 -biti 79 -layman 79 -labia 79 -noosa 79 -gatecrashed 79 -weald 79 -03:01 79 -pro12 79 -fichter 79 -spearing 79 -tubman 79 -awford 79 -handlebar 79 -bicyclists 79 -flummoxed 79 -grumble 79 -scopes 79 -bidwell 79 -umbrage 79 -hawn 79 -counterfeiters 79 -blackbird 79 -rosas 79 -norden 79 -frustrates 79 -strang 79 -moreira 79 -lacertosa 79 -stepdad 79 -qom 79 -lhota 79 -routers 79 -cava 79 -mudd 79 -volleying 79 -pitchfork 79 -db 79 -detonators 79 -woolworth 79 -sprain 78 -tomica 78 -middlesborough 78 -whalers 78 -longwood 78 -wetting 78 -yiddish 78 -semi-finalists 78 -debunk 78 -shailene 78 -astride 78 -couwels 78 -40km 78 -nooks 78 -buckeye 78 -thinly-veiled 78 -centcom 78 -levon 78 -jund 78 -8.10 78 -amplifier 78 -non-violence 78 -skewered 78 -mahut 78 -inga 78 -wpp 78 -nooses 78 -egyptian-born 78 -onil 78 -nadeau 78 -merriman 78 -vehement 78 -avitto 78 -17:09 78 -a303 78 -urach 78 -juicer 78 -dishonorable 78 -billabong 78 -pratchett 78 -dissuaded 78 -misbehaviour 78 -sulawesi 78 -explorations 78 -rionda 78 -mills-westley 78 -quneitra 78 -12.20 78 -intakes 78 -wexham 78 -writhed 78 -poughkeepsie 78 -panos 78 -steyer 78 -17:24 78 -al-faisal 78 -feta 78 -galvanise 78 -trended 78 -spritz 78 -sayar 78 -dehumanizing 78 -postmaster 78 -r.j. 78 -reem 78 -u.s.-russian 78 -million-pound 78 -outpace 78 -16:11 78 -veneers 78 -premiering 78 -influencers 78 -glassy 78 -arbroath 78 -metatarsal 78 -hierarchical 78 -photo-shoot 78 -voids 78 -self-destruct 78 --1 78 --3 78 -niña 78 -hohaia 78 -viewable 78 -cybercriminals 78 -meteoroid 78 -under-16 78 -alludes 78 -middle-age 78 -panton 78 -stallions 78 -blackbeard 78 -kandy 78 -inimitable 78 -duets 78 -poseidon 78 -perennially 78 -sherlach 78 -plame 78 -16:58 78 -visualization 78 -palmer-tomkinson 78 -02:47 78 -full-fat 78 -doyen 78 -squint 78 -judah 78 -judas 78 -chickadee 78 -corrects 78 -battleships 78 -dogfight 78 -11.50 78 -kildare 78 -lasalle 78 -zoella 78 -jailers 78 -personhood 78 -l' 78 -demonized 78 -nixed 78 -armrest 78 -coordinators 78 -nuzzi 78 -frontbenchers 78 -mediating 78 -anti-rape 78 -03:12 78 -ophthalmology 78 -cair 78 -schafer 78 -33.5 78 -simpkins 78 -widescreen 78 -rocknroll 78 -third-grade 78 -mccreath 78 -buscemi 78 -worst-ever 78 -embedding 78 -imad 78 -scurry 78 -disraeli 78 -sculley 78 -syndication 78 -17:13 78 -kingsholm 78 -elam 78 -elan 78 -strudwick 78 -all-girls 78 -stoltz 78 -poynton 78 -courtyards 78 -farnells 78 -bombardments 78 -manzanares 78 -browsed 78 -greyhounds 78 -munday 78 -obliterate 78 -racine 78 -9c 78 -oli 78 -holladay 78 -carcinogens 78 -telescopic 78 -galactico 78 -newscaster 78 -chipper 78 -helmsman 78 -14:28 78 -okorocha 78 -rossy 78 -herdman 78 -hangings 78 -giacomo 78 -after-dinner 78 -juninho 78 -rosalie 78 -i/o 78 -longworth 78 -jassim 78 -inter-korean 78 -ajay 78 -fine-tune 78 -cholesterol-lowering 78 -mesopotamia 78 -ecker 78 -babysitters 78 -skinhead 78 -wral 78 -wpbf 78 -prick 78 -delphine 78 -combe 78 -wokingham 78 -linus 78 -moncton 78 -cortana 78 -yoshihiko 78 -sternly 78 -dispelling 78 -anti-tax 78 -compels 78 -liana 78 -rockaways 78 -emsley 78 -conferring 78 -abaya 78 -appendages 78 -sedition 78 -westfalenstadion 78 -e-mailing 78 -saxophonist 78 -stylishly 78 -16:46 78 -jeremie 78 -mega-rich 78 -exacted 78 -pinhole 78 -wride 78 -inflaming 78 -samra 78 -undertakers 78 -goodfellow 78 -businesspeople 78 -17:34 78 -eckersley 78 -nonna 78 -trampolines 78 -sealey 78 -a40 78 -clarifies 78 -summarized 78 -permian 78 -14:42 78 -tangerines 78 -harriman 78 -fatherly 78 -melksham 78 -cpc 78 -nala 78 -lugovoi 78 -bloor 78 -shana 78 -hydrating 78 -incontinent 78 -tynemouth 78 -hou 78 -corny 78 -u.s.-flagged 78 -tracer 78 -maldini 78 -najaf 78 -senser 78 -eccentricity 78 -bicentennial 78 -backlogs 78 -ariosto 78 -belleville 78 -on-the-ground 78 -shuttling 78 -carotid 78 -glow-in-the-dark 78 -5-5 78 -banishing 78 -inhibits 78 -18:43 78 -15:37 78 -ceding 78 -trophy-laden 78 -swigging 78 -dc-3 78 -monolith 78 -timesheets 78 -wfla 78 -seifert 78 -authoritarianism 78 -maltreatment 78 -16:45 78 -faletau 78 -eastlands 78 -wordsworth 78 -monjack 78 -feig 78 -kozinski 78 -a-line 78 -cuoco 78 -26c 78 -i.d. 78 -damilola 78 -entryway 78 -18:24 78 -15:11 78 -nbc4 78 -brainstorm 78 -1846 78 -danziger 78 -editor-at-large 78 -othello 78 -theta 78 -staid 78 -eharmony 78 -stereotyped 78 -thurlow 78 -tortures 78 -glick 78 -jester 78 -4km 78 -lakota 78 -17.3 78 -margaux 78 -wynne 78 -trumpets 78 -30p 78 -cera 78 -fertilizers 78 -recieved 78 -inefficiency 78 -ljubicic 78 -second-leg 78 -genova 78 -sleuth 78 -paradoxically 78 -cadogan 78 -plato 78 -grimaced 78 -ubisoft 78 -wmo 78 -deciphering 78 -mutu 78 -hoarded 78 -no-man 78 -cornelia 78 -hook-up 78 -newby 78 -sabra 78 -lauper 78 -soraya 78 -hardback 78 -kimchi 78 -mqm 78 -kobi 78 -chauhan 78 -bdr 78 -malema 78 -glassware 78 -18.9 78 -australopithecus 78 -19:09 78 -uswitch.com 78 -adria 78 -agonised 78 -porsches 78 -recessive 78 -energy-saving 78 -cassin 78 -slavica 78 -playmates 78 -ibragim 78 -lauding 78 -cliff-top 78 -stelter 78 -bookshops 78 -stetson 77 -opry 77 -refuting 77 -replication 77 -trigg 77 -raincoat 77 -unfilled 77 -bassey 77 -1.37 77 -maggio 77 -132,000 77 -wold 77 -384 77 -silences 77 -cellulose 77 -pyd 77 -32-year 77 -anvil 77 -egotistical 77 -oesophageal 77 -rediscovering 77 -chartres 77 -wek 77 -toshack 77 -videogame 77 -schatz 77 -mothballed 77 -naghmeh 77 -maktabi 77 -link-up 77 -100lbs 77 -layouts 77 -bulking 77 -rendell 77 -loy 77 -marlowe 77 -fave 77 -axl 77 -imessage 77 -carthage 77 -paramour 77 -galvez 77 -chaperones 77 -gascoine 77 -plumage 77 -redeeming 77 -kemal 77 -tarantulas 77 -farooqi 77 -acs 77 -second-bottom 77 -strathearn 77 -boulevards 77 -hoot 77 -replenished 77 -rockin 77 -inequities 77 -huddleston 77 -23m 77 -adjudication 77 -5kg 77 -ruinous 77 -pomeroy 77 -ascents 77 -ugandans 77 -13billion 77 -threefold 77 -squirmed 77 -tailspin 77 -optometrist 77 -sinuses 77 -wilf 77 -spiller 77 -caan 77 -uga 77 -ceasing 77 -desalination 77 -pejic 77 -gosford 77 -ellement 77 -new-build 77 -'12 77 -moazzam 77 -untangle 77 -consequent 77 -jewelers 77 -lovelock 77 -animatedly 77 -pacts 77 -loach 77 -stennis 77 -co-ceo 77 -15:20 77 -greco 77 -pester 77 -hard-nosed 77 -hostage-takers 77 -kuntal 77 -stuns 77 -brogan 77 -anslow 77 -16:50 77 -shadowing 77 -clacton-on-sea 77 -adulterous 77 -rte 77 -dalia 77 -fuzz 77 -replaceable 77 -15:01 77 -mccurry 77 -03:57 77 -03:56 77 -locums 77 -rojava 77 -extra-curricular 77 -minibar 77 -dfb 77 -stam 77 -pre-paid 77 -haag 77 -maltby 77 -ooze 77 -wadsworth 77 -nea 77 -honig 77 -three-metre 77 -high-rises 77 -hscic 77 -speckled 77 -boldest 77 -confidants 77 -alireza 77 -aligns 77 -glioblastoma 77 -scampered 77 -layaway 77 -d'ambrosio 77 -19:38 77 -19:31 77 -mckayla 77 -faxed 77 -mailman 77 -deah 77 -truelove 77 -nostril 77 -croquet 77 -midge 77 -nevermind 77 -lorazepam 77 -ashtray 77 -104,000 77 -j.r.r. 77 -sacrosanct 77 -evaluates 77 -cupp 77 -blundell 77 -gobble 77 -girlguiding 77 -centigrade 77 -back-to-school 77 -ricki 77 -01:56 77 -chimerix 77 -somali-american 77 -lymphocytes 77 -lunn 77 -selenium 77 -17:57 77 -kamen 77 -dosages 77 -manet 77 -ashok 77 -14:25 77 -bacuna 77 -scavengers 77 -marrocco 77 -wetherspoon 77 -chanelle 77 -logistically 77 -bru 77 -peerages 77 -160million 77 -hausa 77 -paladino 77 -ertani 77 -conficker 77 -surbiton 77 -manoj 77 -reunites 77 -fistral 77 -twitterverse 77 -lewdness 77 -14:03 77 -14:07 77 -misbehaved 77 -non-accidental 77 -salinity 77 -cendoya 77 -antara 77 -bondholders 77 -overdo 77 -greaney 77 -winterbottom 77 -gynecology 77 -santillan 77 -three-night 77 -multi-colored 77 -indigent 77 -preeclampsia 77 -pilley 77 -silverstein 77 -birmingham-based 77 -penang 77 -mengele 77 -xwb 77 -yassin 77 -turn-off 77 -heartily 77 -favoritism 77 -idina 77 -eliana 77 -liggett 77 -recur 77 -bettina 77 -readjust 77 -off-spinner 77 -top-quality 77 -acidification 77 -cmt 77 -impassive 77 -fonseca 77 -kibibi 77 -bumblebees 77 -abscond 77 -rayne 77 -thyme 77 -renegotiating 77 -omitting 77 -fear-mongering 77 -dust-up 77 -sullen 77 -entitles 77 -pennell 77 -invocation 77 -tarleton 77 -dreadfully 77 -re-engage 77 -colonial-era 77 -bayside 77 -westmoreland 77 -phosphorous 77 -butters 77 -20:58 77 -7oz 77 -368 77 -donington 77 -aqi 77 -constitutions 77 -locane-bovenizer 77 -banister 77 -whizzed 77 -fabre 77 -rosita 77 -concurs 77 -snide 77 -4gb 77 -watchmaker 77 -lacquer 77 -moveon.org 77 -iata 77 -two-and-a-half-year 77 -yair 77 -16:48 77 -urinal 77 -02:56 77 -02:52 77 -shanti 77 -scaife 77 -p&g 77 -desjardins 77 -flutes 77 -missives 77 -massif 77 -horvath 77 -18:21 77 -pei 77 -fadi 77 -03:48 77 -hirai 77 -27,500 77 -hayatou 77 -malachi 77 -350million 77 -jepsen 77 -gholam 77 -13:45 77 -bedecked 77 -mcot 77 -red-light 77 -bopha 77 -hinchliffe 77 -anissa 77 -goldfarb 77 -fallacy 77 -prewitt 77 -penalize 77 -hominin 77 -pistachio 77 -feliz 77 -ganymede 77 -sebring 77 -blodgett 77 -state-wide 77 -streptococcus 77 -iam 77 -nazca 77 -high-grade 77 -off-licence 77 -kuntz 77 -frimley 77 -flatulence 77 -reassessed 77 -cfs 77 -eduard 77 -swenson 77 -dixons 77 -gynecologists 77 -ontake 77 -unimpeded 77 -pencilled 77 -rovio 77 -barrino 77 -ferment 77 -hollander 77 -bergeron 77 -milchan 77 -18.7 77 -meppen-walter 77 -gautam 77 -starz 77 -19:08 77 -ziauddin 77 -odourless 77 -buyback 77 -leviathan 77 -enlightening 77 -overmars 77 -blomkamp 77 -cambridge-educated 77 -sinabung 77 -amulet 77 -then-gov 77 -15-hour 77 -posy 77 -gullet 77 -leeman 77 -dealey 77 -1d 77 -sinmaz 77 -buy-in 77 -25/07/2012 77 -lancel 77 -otero 77 -17:40 76 -galligan 76 -werritty 76 -reaffirming 76 -montag 76 -peterlee 76 -co-ordinates 76 -livable 76 -14:35 76 -flagpole 76 -nuno 76 -quantified 76 -encroached 76 -innuendos 76 -botulinum 76 -vindicate 76 -materialism 76 -¯ 76 -19:40 76 -monteiro 76 -slithered 76 -aderotimi 76 -gilliland 76 -sheehy 76 -ningsih 76 -ensconced 76 -mendieta 76 -pospisil 76 -admittance 76 -hatoyama 76 -chas 76 -i-listed 76 -form-fitting 76 -mynott 76 -squeak 76 -blundered 76 -thirty-one 76 -fenninger 76 -670,000 76 -pp 76 -threesomes 76 -17:06 76 -traumatizing 76 -pontchartrain 76 -twiston-davies 76 -burnet 76 -murch 76 -offing 76 -gunnarsson 76 -rodial 76 -holderness 76 -2km 76 -6:15 76 -leadsom 76 -cleveland.com 76 -shilling 76 -680,000 76 -peerless 76 -embittered 76 -clarinet 76 -inch-long 76 -chromosomal 76 -morden 76 -asprey 76 -rapid-fire 76 -bethanie 76 -ozzie 76 -4x100 76 -stapled 76 -resonant 76 -disavowed 76 -demented 76 -interprets 76 -sligo 76 -cluley 76 -.2 76 -ping-pong 76 -wto 76 -long-form 76 -turnberry 76 -unpasteurised 76 -nadeem 76 -50km 76 -commences 76 -haptic 76 -lithgow 76 -milkman 76 -blvd. 76 -seafarers 76 -disordered 76 -paradis 76 -662 76 -u.s.-russia 76 -tux 76 -floe 76 -heim 76 -unsound 76 -grisales 76 -whipps 76 -ultra-modern 76 -7/2 76 -16:39 76 -16:38 76 -hamblin 76 -0.75 76 -mcdevitt 76 -bung 76 -backpage.com 76 -viennese 76 -15:21 76 -400-year-old 76 -refitted 76 -one57 76 -sandbag 76 -dawe 76 -identifications 76 -shit 76 -ivorians 76 -margerie 76 -coltrane 76 -slates 76 -gagnon 76 -arwood 76 -nustar 76 -schlesinger 76 -18:37 76 -03:59 76 -smock 76 -cut-throat 76 -nassif 76 -retroactively 76 -17-years-old 76 -placer 76 -1855 76 -goddesses 76 -womanising 76 -20:08 76 -jewelled 76 -unfunded 76 -parsnips 76 -nairn 76 -jeffreys 76 -pickpocketing 76 -rulebook 76 -corinthia 76 -03:33 76 -03:32 76 -day-care 76 -re-emerge 76 -ashley-cooper 76 -acrylamide 76 -dohme 76 -hollesley 76 -bhumibol 76 -gibberish 76 -fifi 76 -stormtroopers 76 -beholder 76 -underwriting 76 -astrobiology 76 -heterosexuals 76 -strong-arm 76 -hardball 76 -19:27 76 -alpharetta 76 -diamante 76 -wenham 76 -estudiantes 76 -shag 76 -legris 76 -subservient 76 -olesen 76 -rebalancing 76 -prescient 76 -fanaticism 76 -15.9 76 -rosyth 76 -vostok 76 -thirty-six 76 -gunnell 76 -repress 76 -signpost 76 -gare 76 -u17 76 -suncream 76 -nag 76 -nad 76 -beguiling 76 -ubiquity 76 -rola 76 -sieve 76 -hoopla 76 -biannual 76 -200-pound 76 -capriati 76 -blurs 76 -bene 76 -bed-ridden 76 -procreation 76 -guidebooks 76 -tenner 76 -hakan 76 -lifewire 76 -embellishments 76 -3mm 76 -encoded 76 -littlewood 76 -ryu 76 -ruto 76 -sakha 76 -best-ever 76 -salzman 76 -churchgoer 76 -transparently 76 -goin 76 -aft 76 -elvin 76 -waterslide 76 -pedalling 76 -fulbright 76 -third-tier 76 -jerks 76 -tenzing 76 -knicker 76 -lowton 76 -humorously 76 -klunder 76 -sedans 76 -yrs 76 -cosh 76 -sapped 76 -unscientific 76 -badr 76 -npc 76 -mathison 76 -riffs 76 -telmo 76 -marroquin 76 -harvard-educated 76 -enlistment 76 -pavlyuchenkova 76 -yid 76 -super-thin 76 -preening 76 -elitism 76 -bioluminescent 76 -abn 76 -gnawed 76 -ashington 76 -3kg 76 -ejector 76 -dios 76 -tivo 76 -snape 76 -covet 76 -tranquilizer 76 -kapur 76 -jabbari 76 -institutionally 76 -wittering 76 -supercell 76 -18-carat 76 -aviary 76 -awed 76 -barrientos 76 -9000 76 -faust 76 -tonks 76 -aldrich 76 -thomason 76 -sellout 76 -barbora 76 -ireports 76 -suman 76 -guillain-barre 76 -creditable 76 -hildale 76 -westerner 76 -queenslanders 76 -letchworth 76 -nestlé 76 -benzene 76 -gonsalves 76 -16:21 76 -nimes 76 -threes 76 -caceres 76 -waggoner 76 -rss 76 -niches 76 -sidi 76 -friso 76 -parcs 76 -fryatt 76 -psychics 76 -wtvr 76 -inferred 76 -dal 76 -backbenches 76 -moma 76 -kiessling 76 -entitle 76 -ampika 76 -top-five 76 -goncalves 76 -02:57 76 -02:53 76 -02:51 76 -sending-off 76 -inbuilt 76 -18-time 76 -parachutist 76 -tutored 76 -15:17 76 -insurrection 76 -skits 76 -d-maryland 76 -ferran 76 -buttoned 76 -mapou 76 -hoff 76 -whiteness 76 -velde 76 -pre-production 76 -first-grade 76 -rubalcaba 76 -ms-13 76 -twenty-seven 76 -co-starring 76 -harrington-cooper 76 -daegu 76 -hillsong 76 -directional 76 -rafal 76 -desecrating 76 -dawood 76 -tayside 76 -estuaries 76 -ablyazov 76 -cuttlefish 76 -dislocate 76 -blyton 76 -pre-programmed 76 -barrow-in-furness 76 -voiceless 76 -life-or-death 76 -vickery 76 -gloved 76 -tamsin 76 -bungle 76 -eeoc 76 -millet 76 -lumberjack 76 -bedsit 76 -differentiation 76 -yorkhill 76 -renwick 76 -skunks 76 -inconsequential 76 -nips 76 -17-day 76 -preschoolers 76 -iterations 76 -rd. 76 -297 76 -rehn 76 -northants 76 -dahlia 76 -gillis 76 -huffing 76 -attention-grabbing 76 -iran-iraq 76 -modena 76 -bk 76 -off-the-field 76 -falk 76 -clydesdale 76 -frain 76 -chhattisgarh 76 -steadied 76 -two-headed 76 -heartbleed 76 -deep-water 76 -brainwave 76 -60-foot 76 -five-a-day 76 -riverboat 76 -curtsey 76 -tortillas 76 -palisades 75 -cheong 75 -scoffing 75 -9-1-1 75 -standard-bearer 75 -14:31 75 -pts 75 -mitty 75 -tmo 75 -mouratoglou 75 -cari 75 -19:48 75 -00 75 -reissued 75 -margulies 75 -stagnating 75 -dagley 75 -fkl 75 -indecisive 75 -oberlin 75 -pilotless 75 -3/4 75 -skeet 75 -croom 75 -85million 75 -04:05 75 -giulia 75 -proteas 75 -klingon 75 -dichotomy 75 -reiger 75 -demerol 75 -complimenting 75 -rewrote 75 -kenworthy 75 -inadequacy 75 -hiss 75 -stickler 75 -pewter 75 -riotous 75 -04:29 75 -rehome 75 -bis 75 -< 75 -hoaxer 75 -cross-examine 75 -6,400 75 -heriberto 75 -patna 75 -23-month-old 75 -100-foot 75 -knackered 75 -toothache 75 -streetcar 75 -finnerty 75 -pluralism 75 -frontex 75 -legible 75 -645 75 -aswat 75 -fdle 75 -hopwood 75 -long-shot 75 -'30 75 -35mph 75 -supercomputers 75 -thoroughfares 75 -lisha 75 -huggins 75 -electricians 75 -feudal 75 -wilt 75 -watercolours 75 -isas 75 -tractor-trailers 75 -nafis 75 -narvaez 75 -nesta 75 -colonised 75 -corliss 75 -logie 75 -groth 75 -soloist 75 -yemm 75 -cliffhanger 75 -rebut 75 -29.9 75 -irrevocably 75 -congealed 75 -perla 75 -embry 75 -ziamani 75 -eke 75 -karras 75 -malaika 75 -11lbs 75 -630,000 75 -kenwood 75 -machin 75 -humala 75 -antimatter 75 -hamsik 75 -topper 75 -02:48 75 -clobbered 75 -prefabricated 75 -amiens 75 -stank 75 -15:08 75 -03:52 75 -scheidt 75 -mealtimes 75 -gahran 75 -porton 75 -zamalek 75 -mesothelioma 75 -strolla 75 -bakker 75 -20-hour 75 -cuisines 75 -fitzsimons 75 -bedsheets 75 -nez 75 -turness 75 -enrage 75 -seaham 75 -moscow-based 75 -engadget 75 -macomb 75 -clambers 75 -auto-immune 75 -drop-in 75 -moylan 75 -rada 75 -sadism 75 -conniving 75 -obsessive-compulsive 75 -guises 75 -water-based 75 -gatekeeper 75 -vandyke 75 -pollination 75 -galloped 75 -indulges 75 -mcclatchy 75 -dumond 75 -9oz 75 -non-government 75 -off-the-record 75 -n'doye 75 -hefei 75 -clowes 75 -kinyua 75 -mcmorris 75 -anti-putin 75 -garces 75 -futurist 75 -biohazard 75 -sps 75 -zeitung 75 -parmertor 75 -34-year-olds 75 -wyre 75 -rodin 75 -lbd 75 -bose 75 -8,400 75 -waltzed 75 -97,000 75 -scuttling 75 -702 75 -henriquez 75 -brampton 75 -eurocrats 75 -keirin 75 -01:51 75 -ranocchia 75 -nettle 75 -pawel 75 -dikes 75 -431 75 -22.4 75 -freebie 75 -seismologists 75 -troon 75 -19:56 75 -clichà 75 -holdup 75 -lavishing 75 -bunks 75 -colourless 75 -gatiss 75 -breland 75 -hollins 75 -14:05 75 -cappella 75 -saharan 75 -airdrops 75 -redondo 75 -narration 75 -mealworms 75 -stedman 75 -lfc 75 -laci 75 -normalise 75 -inferiority 75 -shrubbery 75 -zafar 75 -17:12 75 -mikko 75 -npp 75 -in-fighting 75 -puente 75 -scuffled 75 -stubhub 75 -477 75 -stuttered 75 -aliyah 75 -atticus 75 -ceredigion 75 -renslow 75 -nola 75 -lds 75 -ignominy 75 -itu 75 -23.6 75 -evanston 75 -swabbed 75 -haj 75 -yemen-based 75 -natalya 75 -marci 75 -lytro 75 -uniformity 75 -two-week-old 75 -keypad 75 -grunt 75 -kish 75 -video-link 75 -ex-players 75 -watchtower 75 -industrialization 75 -4cm 75 -eso 75 -telemetry 75 -record-high 75 -deepmind 75 -cross-contamination 75 -marginalization 75 -wc 75 -humpbacks 75 -chelsey 75 -rainham 75 -eloped 75 -constantin 75 -alix 75 -bonobo 75 -cease-and-desist 75 -5ins 75 -oakham 75 -coldly 75 -jobcentres 75 -jenifer 75 -mothership 75 -determinations 75 -shimbun 75 -rugeley 75 -meta 75 -kaltenborn 75 -insecticides 75 -dunkin 75 -2kg 75 -insoles 75 -ill-timed 75 -critically-acclaimed 75 -banstead 75 -badie 75 -eggplant 75 -18:46 75 -18:44 75 -peeps 75 -edgewood 75 -mahaffey 75 -deserters 75 -tip-offs 75 -reinhard 75 -insincere 75 -gunther 75 -bursaries 75 -ayden 75 -akamai 75 -filan 75 -counterintuitive 75 -neapolitan 75 -herbicides 75 -15:16 75 -break-ups 75 -8:15 75 -turpin 75 -school-aged 75 -josefina 75 -quickie 75 -irrelevance 75 -veolia 75 -battle-hardened 75 -fredy 75 -wisconsin-madison 75 -ineptitude 75 -varicose 75 -all-consuming 75 -mangroves 75 -absorbent 75 -mores 75 -timberwolves 75 -allerton 75 -cobble 75 -cabanas 75 -fallow 75 -tex 75 -spot-on 75 -imperialists 75 -edgerton 75 -brunton 75 -arachnids 75 -cyber-security 75 -moorhouse 75 -divas 75 -sarsfield 75 -sugar-sweetened 75 -preah 75 -pankhurst 75 -predate 75 -ahern 75 -ould 75 -bloodletting 75 -karnataka 75 -newly-crowned 75 -ochre 75 -disqualifying 75 -criminalise 75 -noerdlinger 75 -daters 75 -dreamland 75 -vacationed 75 -foresaw 75 -albarn 75 -double-take 75 -juts 75 -wimpy 75 -merapi 75 -constantinople 75 -sympathized 75 -gstaad 75 -watered-down 75 -catriona 75 -conserved 75 -01:49 75 -extorted 75 -ehrlich 75 -mcclintock 75 -whey 75 -kovacic 75 -pushilin 75 -cottrell 75 -b6 75 -bg 75 -cruces 75 -weener 75 -num 75 -cristoforetti 75 -beddoe 75 -snay 75 -duplication 75 -rajesh 75 -sunni-dominated 75 -two-wheeled 75 -scaled-down 75 -lumbering 75 -squirm 75 -traitz 75 -assem 75 -afghanistan-pakistan 75 -protégé 75 -retroactive 74 -13-day 74 -campbelltown 74 -polishes 74 -lorre 74 -bassem 74 -tannoy 74 -429 74 -schiaparelli 74 -lapels 74 -cian 74 -bradfield 74 -yost 74 -traynor 74 -distrustful 74 -2008/09 74 -radish 74 -re-release 74 -puncturing 74 -doron 74 -all-natural 74 -blick 74 -bugaboo 74 -wyman 74 -marla 74 -14:17 74 -kopp 74 -04:02 74 -allawi 74 -non-urgent 74 -gabel 74 -semi-finalist 74 -backscatter 74 -dagg 74 -begiristain 74 -lro 74 -15:50 74 -mustangs 74 -santoro 74 -uncompetitive 74 -burkas 74 -dolgopolov 74 -publicly-funded 74 -higher-end 74 -tagpuno 74 -palaeontologist 74 -zambezi 74 -waist-high 74 -pinstripe 74 -buxom 74 -hargis 74 -exude 74 -lucknow 74 -rocinha 74 -quieten 74 -long-planned 74 -stakeholder 74 -minesweeper 74 -armero 74 -fevered 74 -tased 74 -pro-active 74 -motorboat 74 -hadden 74 -luttrell 74 -16:18 74 -lass 74 -boudicca 74 -eritreans 74 -psp 74 -gansevoort 74 -illegality 74 -32c 74 -10-6 74 -shultz 74 -undertakings 74 -aps 74 -ronaiah 74 -f-16s 74 -cisneros 74 -career-ending 74 -selkirk 74 -379 74 -kissimmee 74 -fiver 74 -wijnaldum 74 -hibberd 74 -may-treanor 74 -15:22 74 -choudhry 74 -fissure 74 -edu 74 -rayleigh 74 -fated 74 -low-energy 74 -scholz 74 -pitino 74 -16:51 74 -sigourney 74 -camborne 74 -corin 74 -alemany 74 -merseysiders 74 -encroach 74 -britishness 74 -ocha 74 -chirpy 74 -number-one 74 -18:33 74 -18:34 74 -18:38 74 -donahoe 74 -youngstown 74 -clarin 74 -despatch 74 -lurching 74 -terabytes 74 -alderden 74 -programmable 74 -jealously 74 -detritus 74 -k-pop 74 -25billion 74 -expensively 74 -birchall 74 -cassell 74 -ning 74 -clarks 74 -mci 74 -aerobatics 74 -uglier 74 -precursors 74 -psalm 74 -drug-induced 74 -19:39 74 -19:34 74 -eighth-grader 74 -beatriz 74 -a-team 74 -junal 74 -rosby 74 -overstepping 74 -diem 74 -kernel 74 -chastened 74 -razan 74 -chobe 74 -24-21 74 -oceanographer 74 -u19 74 -adie 74 -chesser 74 -gorged 74 -martinelli 74 -alim 74 -boston-area 74 -cee 74 -ashar 74 -whiskies 74 -unlocks 74 -passable 74 -willed 74 -uchitel 74 -knockaert 74 -arousing 74 -ridulph 74 -prentice 74 -knife-point 74 -romesha 74 -lickley 74 -loggerhead 74 -miki 74 -deduced 74 -clattering 74 -lancome 74 -kid-friendly 74 -toolbox 74 -brinker 74 -mladenovic 74 -goodie 74 -kick-ass 74 -darragh 74 -nestling 74 -kneecap 74 -alcott 74 -.30 74 -rajan 74 -whoosh 74 -695 74 -tailing 74 -411 74 -zenani 74 -brambles 74 -inquires 74 -jacklin 74 -pharoah 74 -pyatt 74 -novella 74 -endear 74 -ellsberg 74 -kukushkin 74 -167,000 74 -kuttner 74 -lovin 74 -yeater 74 -x1 74 -round-robin 74 -veers 74 -patil 74 -lithe 74 -pérez 74 -descendent 74 -unidos 74 -detections 74 -24-7 74 -hillock 74 -dispensers 74 -mudder 74 -bangle 74 -smithers 74 -tihar 74 -albury 74 -beefed-up 74 -zeitoun 74 -non-british 74 -seven-week-old 74 -gw 74 -decoding 74 -healthful 74 -barbarian 74 -barricading 74 -monogrammed 74 -pried 74 -reciprocate 74 -hazelnuts 74 -bassetlaw 74 -15:57 74 -waseem 74 -integrative 74 -roksanda 74 -six-mile 74 -preemptive 74 -addams 74 -zapped 74 -rosneft 74 -clingfilm 74 -rader 74 -carnoustie 74 -barging 74 -smh 74 -hunker 74 -wallington 74 -fi 74 -messerschmitt 74 -sneered 74 -montella 74 -brimble 74 -dehar 74 -looppay 74 -fit-again 74 -cut-outs 74 -winded 74 -jores 74 -teachable 74 -mailings 74 -pleasuring 74 -vuckic 74 -cormack 74 -sorbet 74 -110th 74 -hodel 74 -lakhan 74 -alcohol-fueled 74 -mpeketoni 74 -02:50 74 -porteous 74 -openshaw 74 -padang 74 -repose 74 -four-way 74 -freeth 74 -03:46 74 -superfluous 74 -neave 74 -chromebook 74 -low-end 74 -midsection 74 -goalmouth 74 -wim 74 -synopsis 74 -peddled 74 -congregating 74 -sickest 74 -trestle 74 -tidbits 74 -scarcella 74 -324 74 -fucarile 74 -tudors 74 -credibly 74 -1.0 74 -dicko 74 -live-fire 74 -premarital 74 -rags-to-riches 74 -rosoff 74 -scaf 74 -03:24 74 -left-arm 74 -helmeted 74 -riquelme 74 -classifications 74 -in-patient 74 -1824 74 -dornier 74 -strack 74 -rande 74 -moorish 74 -carjackings 74 -microscopes 74 -fujitsu 74 -novack 74 -unwrap 74 -contiguous 74 -infantile 74 -decriminalised 74 -bolshevik 74 -technologists 74 -scarface 74 -italian-born 74 -clack 74 -taktouk 74 -parasol 74 -berks 74 -seldon 74 -glenwood 74 -college-educated 74 -lonesome 74 -13m 74 -futility 74 -adeyemi 74 -lighthouses 74 -hydra 74 -circumspect 74 -jyoti 74 -second-ranked 74 -armisen 74 -memorise 74 -janus 74 -re-enacted 74 -top-floor 74 -skippy 74 -sommelier 74 -rinsing 74 -hann 74 -ashanti 74 -randazzo 74 -yup 74 -nus 74 -largs 74 -allergen 74 -jameela 74 -trickles 74 -panic-stricken 74 -regression 74 -marcum 74 -triplett 74 -tomblin 74 -14:01 74 -kabila 74 -aztecs 74 -malabo 73 -aek 73 -quilty 73 -stevia 73 -circumnavigation 73 -a8 73 -14:36 73 -salivating 73 -bawdy 73 -0-3 73 -mainstays 73 -confounding 73 -23andme 73 -effervescent 73 -q3 73 -balthazar 73 -duds 73 -unfancied 73 -e-cigs 73 -19.7 73 -makerbot 73 -nurofen 73 -adeel 73 -arndt 73 -nishiyama 73 -demolitions 73 -04:03 73 -mouthwash 73 -cui 73 -enlighten 73 -retrievers 73 -cottingham 73 -self-censorship 73 -swansong 73 -sergiy 73 -bleep 73 -keyboardist 73 -adder 73 -bladders 73 -stepsister 73 -zig-zag 73 -3-5 73 -shoreham 73 -steves 73 -strangeways 73 -bobbitt 73 -zany 73 -sajjad 73 -govan 73 -one-point 73 -mancunian 73 -csj 73 -henryville 73 -hypotheses 73 -marsupials 73 -lancôme 73 -frizzy 73 -aftab 73 -hunter-gatherer 73 -coopers 73 -ers 73 -ailsa 73 -switchboard 73 -coming-of-age 73 -unashamedly 73 -zebari 73 -breitling 73 -cetin 73 -four-part 73 -overgrowth 73 -burrowed 73 -kebede 73 -hydroponic 73 -fredericksburg 73 -wickedness 73 -spillage 73 -chobani 73 -childers 73 -deckchairs 73 -demers 73 -skeptic 73 -gailey 73 -seven-year-olds 73 -538 73 -beppe 73 -1831 73 -hounding 73 -shoeless 73 -crumbles 73 -ovations 73 -balmer 73 -flog 73 -maximizing 73 -overloading 73 -hucknall 73 -terrie 73 -stratospheric 73 -dinky 73 -sawed-off 73 -dodig 73 -shipwrecked 73 -candreva 73 -weisz 73 -haemorrhagic 73 -full-year 73 -meditative 73 -sydenham 73 -yolo 73 -sandbar 73 -gp2 73 -gratuity 73 -sullock 73 -burners 73 -oliver_todd 73 -cams 73 -superseded 73 -359 73 -constellations 73 -virgina 73 -2.05 73 -he/she 73 -18:36 73 -18:35 73 -15:02 73 -03:51 73 -ejecting 73 -rominger 73 -elfgeeh 73 -winky 73 -dfat 73 -glandular 73 -downwind 73 -concave 73 -kayley 73 -kirsch 73 -nickerson 73 -anti-christian 73 -opportune 73 -bare-knuckle 73 -jaji 73 -mitford 73 -gto 73 -scone 73 -retouching 73 -40-yard 73 -gori 73 -calamities 73 -19:36 73 -18:49 73 -six-page 73 -1,640 73 -priya 73 -graphite 73 -manal 73 -sizzle 73 -shrieked 73 -al-masry 73 -efraim 73 -misappropriated 73 -lectern 73 -anaesthetics 73 -tagg 73 -guma 73 -skids 73 -starcraft 73 -ut-tahrir 73 -dossiers 73 -proportionally 73 -bijan 73 -wolseley 73 -emmert 73 -babbling 73 -supersize 73 -valente 73 -meriden 73 -d'azur 73 -biarritz 73 -raddatz 73 -isee-3 73 -dimes 73 -gift-giving 73 -seater 73 -enrollments 73 -kochs 73 -al-senussi 73 -ccc 73 -upwave 73 -satara 73 -fifth-grade 73 -bloomer 73 -kitson 73 -re/code 73 -ne-yo 73 -gilson 73 -mcnary 73 -long-forgotten 73 -shifter 73 -zafira 73 -presentable 73 -demille 73 -samuelsson 73 -d'affaires 73 -16:42 73 -zap 73 -oblak 73 -swindler 73 -carballo 73 -borriello 73 -fortescue 73 -jonestown 73 -gees 73 -habeas 73 -anti-jewish 73 -nine-bedroom 73 -gorny 73 -rosado 73 -faid 73 -under-pressure 73 -zamboanga 73 -supercup 73 -findley 73 -morales-rodriguez 73 -nerf 73 -hecht 73 -reverberate 73 -bleary-eyed 73 -sparkler 73 -spyer 73 -star-struck 73 -bechtolsheimer 73 -weeding 73 -slagle 73 -oppress 73 -scrutinising 73 -80mg 73 -103,000 73 -sired 73 -vivo 73 -wsoc 73 -alejandra 73 -fortin 73 -dubuque 73 -multi-state 73 -exterminated 73 -aliahna 73 -mineworkers 73 -mykonos 73 -cpu 73 -tadpoles 73 -453 73 -searcy 73 -mcgwire 73 -lundy 73 -ackland 73 -malka 73 -16:08 73 -300th 73 -uninhibited 73 -pear-shaped 73 -35ft 73 -paras 73 -euna 73 -mummification 73 -swerves 73 -675 73 -calvary 73 -bitches 73 -juergen 73 -custodians 73 -rougher 73 -disengagement 73 -naji 73 -mazembe 73 -kora 73 -mowers 73 -tickling 73 -undid 73 -buzi 73 -16:25 73 -brocade 73 -vo 73 -fishburne 73 -60-second 73 -stoppages 73 -18:41 73 -15:30 73 -15:38 73 -tetchy 73 -rigidity 73 -inhibiting 73 -quasars 73 -kamran 73 -repudiate 73 -katona 73 -suvarnabhumi 73 -kingpins 73 -bartow 73 -341 73 -pre-meditated 73 -four-fifths 73 -leakey 73 -18:29 73 -reparative 73 -dismember 73 -landlines 73 -14:46 73 -carabinieri 73 -trigeminal 73 -fluminense 73 -narcissist 73 -bubblegum 73 -indignities 73 -repairman 73 -18:07 73 -showery 73 -wrong-way 73 -03:28 73 -inordinate 73 -vogt 73 -elden 73 -backups 73 -eighteenth 73 -porno 73 -cairngorm 73 -1825 73 -boscombe 73 -tx 73 -misappropriation 73 -duvets 73 -al-adnani 73 -jeni 73 -srna 73 -thinners 73 -scabies 73 -snipped 73 -gauging 73 -03:08 73 -bbs 73 -plain-clothes 73 -outtakes 73 -peaky 73 -ixv 73 -darrow 73 -figc 73 -acclimatise 73 -water-borne 73 -optimus 73 -picketed 73 -brioche 73 -abawi 73 -numberplate 73 -alexie 73 -agha 73 -ill. 73 -innovating 73 -294 73 -tustin 73 -tsavo 73 -comers 73 -divulging 73 -champneys 73 -18.3 73 -minutiae 73 -titcombe 73 -snowplow 73 -parchin 73 -rears 73 -elysees 73 -seven-night 73 -groan 73 -watkinson 73 -deadbeat 73 -brianne 73 -sds 73 -cat-and-mouse 72 -quilts 72 -acadia 72 -time-wasting 72 -antler 72 -mandi 72 -copyrights 72 -maiming 72 -nws 72 -concur 72 -gaziantep 72 -unmitigated 72 -hearne 72 -gasses 72 -on-pitch 72 -auden 72 -ellmers 72 -beading 72 -urls 72 -nott 72 -bjork 72 -corinthian 72 -infiltrators 72 -papadopoulos 72 -divergence 72 -masri 72 -pesto 72 -bajwa 72 -riek 72 -mid-sized 72 -153,000 72 -speedos 72 -20.6 72 -ahoy 72 -waterline 72 -coleslaw 72 -outflow 72 -fourfold 72 -tubular 72 -8.0 72 -grunsfeld 72 -9.7-inch 72 -mosquito-borne 72 -tablecloth 72 -150-year-old 72 -bridgette 72 -burford 72 -fat-free 72 -79.99 72 -radiators 72 -patrizia 72 -trustworthiness 72 -depository 72 -.9 72 -.4 72 -rogues 72 -nama 72 -wolford 72 -lora 72 -four-under 72 -prune 72 -nastase 72 -sugarcane 72 -manchester-based 72 -british-built 72 -pitch-black 72 -disgracefully 72 -ex-labour 72 -capra 72 -incarnate 72 -pianos 72 -netbooks 72 -regimented 72 -baddie 72 -entrust 72 -azaz 72 -purgatory 72 -checkbook 72 -genson 72 -vitoria 72 -a12 72 -sisson 72 -olde 72 -18:52 72 -mnla 72 -engulfs 72 -liveleak 72 -thurgood 72 -portraiture 72 -nachos 72 -skybox 72 -alcohol-free 72 -15.1 72 -half-a-mile 72 -nextgen 72 -gow 72 -side-netting 72 -giorgi 72 -defibrillators 72 -goldin 72 -capper 72 -toppen 72 -02:49 72 -bruiser 72 -bassong 72 -freegard 72 -hacktivist 72 -aitchison 72 -abs-cbn 72 -fry-up 72 -sher 72 -sheree 72 -spirescu 72 -dravid 72 -grigsby 72 --15 72 -svu 72 -narco 72 -grammars 72 -annihilated 72 -matchbox 72 -ebullient 72 -panini 72 -pretzels 72 -reddan 72 -sanguine 72 -self-aware 72 -diggs 72 -remote-control 72 -ground-penetrating 72 -geomagnetic 72 -jabal 72 -1835 72 -frittered 72 -wingman 72 -politicos 72 -insufficiently 72 -14:12 72 -plaistow 72 -tropic 72 -extolling 72 -connacht 72 -jot 72 -youview 72 -escobedo 72 -war-weary 72 -oranje 72 -manayunk 72 -carmine 72 -jarred 72 -choirmaster 72 -whitelaw 72 -15.7 72 -hollobone 72 -wye 72 -tableware 72 -19:16 72 -cusco 72 -shenton 72 -jacobean 72 -h982 72 -romper 72 -masturbated 72 -buffers 72 -jurisprudence 72 -123,000 72 -hamasaki 72 -roberson 72 -swinger 72 -fractions 72 -19:11 72 -01:54 72 -belounis 72 -ephron 72 -varey 72 -pre-crisis 72 -pgmol 72 -wildd 72 -eight-foot 72 -theocracy 72 -laminated 72 -acourt 72 -hypodermic 72 -multi-ethnic 72 -anomalous 72 -play-by-play 72 -makoni 72 -intensification 72 -abscesses 72 -committal 72 -lightened 72 -ristic 72 -thakkar 72 -19:10 72 -hacksaw 72 -dike 72 -9.25 72 -price-fixing 72 -elleray 72 -perlmutter 72 -andalucia 72 -meza 72 -bloch 72 -17-nation 72 -damelin 72 -scuffling 72 -brocket 72 -pica 72 -crowing 72 -flyweight 72 -iverson 72 -dramatized 72 -mid-teens 72 -deprives 72 -sparkled 72 -devalue 72 -shouldered 72 -monikers 72 -rhesus 72 -helier 72 -state-sanctioned 72 -baier 72 -krqe 72 -condé 72 -bauchi 72 -jabbar 72 -interbank 72 -c.y. 72 -14:49 72 -8-2 72 -microns 72 -squeals 72 -adu 72 -marseilles 72 -berliner 72 -lugovoy 72 -his/her 72 -overrated 72 -gaithersburg 72 -2008-2009 72 -mid-north 72 -sleeved 72 -barchi 72 -goldmine 72 -blotchy 72 -shantytown 72 -jiri 72 -15:58 72 -polzeath 72 -brooches 72 -lengthened 72 -aliyev 72 -starbuck 72 -unseasonal 72 -bataan 72 -couey 72 -misrepresent 72 -comings 72 -anemones 72 -dermatitis 72 -denier 72 -gilkey 72 -rodríguez 72 -pompidou 72 -18:48 72 -15:34 72 -asaro 72 -mcvay 72 -nicolle 72 -galindo 72 -ultron 72 -planters 72 -dat 72 -two-bed 72 -tallying 72 -workmanship 72 -macron 72 -fabiana 72 -ppm 72 -player-coach 72 -whirling 72 -bugler 72 -lampooning 72 -alekhina 72 -15:19 72 -overactive 72 -disinfect 72 -lenticular 72 -crests 72 -thornsbury 72 -cloudless 72 -priestly 72 -annihilate 72 -quack 72 -usta 72 -thinspiration 72 -ex-tory 72 -mungin 72 -zients 72 -belizean 72 -platitudes 72 -46million 72 -12.15 72 -thespian 72 -on/off 72 -dm 72 -sweetcorn 72 -remodeled 72 -belmokhtar 72 -corlett 72 -caseload 72 -gbl 72 -drs 72 -enron 72 -plastering 72 -pac-man 72 -17:22 72 -short-changed 72 -etzioni 72 -03:06 72 -sleepiness 72 -tobey 72 -leonor 72 -200lbs 72 --40 72 -shonda 72 -politicizing 72 -ten-bedroom 72 -sq. 72 -17s 72 -kyenge 72 -sabato 72 -disincentive 72 -yasir 72 -free-spirited 72 -tomaszewski 72 -linsanity 72 -unha-3 72 -marra 72 -nationalised 72 -repelling 72 -ogletree 72 -118,000 72 -01:42 72 -14:55 72 -globetrotters 72 -knowle 72 -postmarked 72 -truant 72 -steinem 72 -revitalised 72 -gun-wielding 72 -scheff 72 -allthingsd 72 -treadmills 72 -tapia 72 -snowplough 72 -ribbing 72 -four-foot 72 -engelhardt 72 -peal 72 -15-years-old 72 -tweens 72 -equalises 72 -pragmatist 72 -york-presbyterian 72 -margrethe 72 -dioceses 72 -over-reaction 72 -vries 72 -arcades 72 -disick 72 -danni 72 -desoto 72 -wisteria 72 -zovko 71 -kudrin 71 -servicemembers 71 -monae 71 -safes 71 -majdanek 71 -sheather 71 -pollutant 71 -montefiore 71 -crusading 71 -ivana 71 -1.39 71 -ferragamo 71 -inez 71 -lj 71 -sandman 71 -toasty 71 -parentage 71 -seacole 71 -earbuds 71 -westjet 71 -gantry 71 -bioshock 71 -kidal 71 -ex-servicemen 71 -biba 71 -ledson 71 -kwasi 71 -misunderstand 71 -gaskin 71 -cordero 71 -kalanick 71 -lymphedema 71 -nna 71 -570,000 71 -pb 71 -shavings 71 -naim 71 -islamisation 71 -somersaults 71 -zendaya 71 -bideford 71 -pls 71 -bonanno 71 -04:21 71 -lasso 71 -alfalfa 71 -kennington 71 -cosmology 71 -haircare 71 -vectra 71 -assimilate 71 -17:27 71 -paulsen 71 -etta 71 -matter-of-factly 71 -hoodoo 71 -squirting 71 -aviello 71 -appraised 71 -isambard 71 -14:50 71 -jacinto 71 -connally 71 -okene 71 -benning 71 -superstitions 71 -warm-ups 71 -janna 71 -vuvuzela 71 -nippy 71 -previn 71 -friedlander 71 -mcphail 71 -xtreme 71 -16:15 71 -16:12 71 -streetlights 71 -conceals 71 -sperry 71 -jetliners 71 -15:49 71 -re-examining 71 -afflicting 71 -tampon 71 -bide 71 -memorize 71 -jordy 71 -princip 71 -e-ink 71 -thimble 71 -undies 71 -ludivine 71 -headboard 71 -shimmy 71 -widdowson 71 -37m 71 -whitehaven 71 -maughan 71 -matures 71 -spokes 71 -republished 71 -honeymooners 71 -weakly 71 -rhoads 71 -confine 71 -necrotising 71 -pounces 71 -ream 71 -cellmates 71 -briarwood 71 -norcross 71 -excelsior 71 -asymptomatic 71 -gushes 71 -dickensian 71 -temp 71 -25.6 71 -elytte 71 -mohseni 71 -anti-israeli 71 -colorless 71 -heatherwick 71 -inglorious 71 -5.40 71 -morpheus 71 -1,776 71 -gershwin 71 -lynam 71 -rugg 71 -ecologists 71 -mulcahy 71 -03:39 71 -refinance 71 -eight-week-old 71 -voice-over 71 -dogar 71 -sunseekers 71 -polyurethane 71 -excusing 71 -orangery 71 -debunking 71 -sade 71 -1838 71 -one-mile 71 -gonzaga 71 -animus 71 -benefitting 71 -talib 71 -bushman 71 -red-carded 71 -hunchback 71 -fuddy 71 -kamay 71 -mallah 71 -03:18 71 -03:16 71 -apolitical 71 -scholastic 71 -beagles 71 -tastefully 71 -elkhart 71 -ruhollah 71 -pimped 71 -earnshaw 71 -19:17 71 -ornstein 71 -mega-fight 71 -woodpecker 71 -pagans 71 -high-paying 71 -approx 71 -tapestries 71 -five-week-old 71 -letts 71 -check-out 71 -alessi 71 -wreath-laying 71 -casburn 71 -teletubbies 71 -cassettes 71 -confidantes 71 -hard-to-reach 71 -rimet 71 -rainstorms 71 -icao 71 -usf 71 -contravene 71 -gutzler 71 -vibrio 71 -puerile 71 -one-in-a-million 71 -bejewelled 71 -rotenberg 71 -untamed 71 -nkepile 71 -30-something 71 -17:59 71 -asphyxiated 71 -barrick 71 -morpeth 71 -cornel 71 -14:22 71 -shuster 71 -perspiration 71 -grinds 71 -fiddly 71 -dujana 71 -cannibals 71 -brc 71 -universidad 71 -tsinghua 71 -maximising 71 -trodden 71 -analogous 71 -marielle 71 -sparrows 71 -hairpin 71 -pacemakers 71 -four-fold 71 -liens 71 -265,000 71 -prohibitively 71 -carnell 71 -kandi 71 -folksy 71 -nuffield 71 -aamir 71 -joliet 71 -mamet 71 -pernambuco 71 -shoo-in 71 -natured 71 -sortie 71 -labbe 71 -hoists 71 -dekraai 71 -nac 71 -grabban 71 -song-thaek 71 -alexandr 71 -mugler 71 -lizon 71 -dotting 71 -al-kaseasbeh 71 -bironas 71 -bugg 71 -marconi 71 -cong 71 -gosforth 71 -nichol 71 -crony 71 -yaalon 71 -greengrass 71 -al-fawwaz 71 -eff 71 -dickin 71 -derna 71 -476 71 -x3 71 -howlett 71 -bronchial 71 -kanepi 71 -harriette 71 -rebuff 71 -flecks 71 -23.8 71 -muchelney 71 -queally 71 -gago 71 -bullfighter 71 -applegate 71 -l'oréal 71 -tellez 71 -90ft 71 -10/10 71 -fascinators 71 -hand-reared 71 -heilongjiang 71 -misreading 71 -chaucer 71 -mesquite 71 -ata 71 -satyarthi 71 -cranking 71 -alawadi 71 -de'ath 71 -212,000 71 -darin 71 -elyse 71 -bowley 71 -tiresome 71 -grohl 71 -paluf 71 -gherkin 71 -expectancies 71 -chriswheelerdm 71 -nightgown 71 -rooke 71 -publicising 71 -vector 71 -bunce 71 -ambridge 71 -nilam 71 -5-6 71 -cnnmoney.com 71 -butted 71 -svoboda 71 -15:36 71 -15:31 71 -muscly 71 -jacquard 71 -madley 71 -11.40 71 -life-sustaining 71 -142,500 71 -wellens 71 -deet 71 -rehana 71 -sinners 71 -prichard 71 -unblock 71 -washable 71 -parini 71 -yarrow 71 -setzer 71 -bicyclist 71 -03:42 71 -03:47 71 -ghettos 71 -ua 71 -racially-aggravated 71 -predilection 71 -nisha 71 -carjackers 71 -softness 71 -frieze 71 -carrollton 71 -vallance 71 -buyten 71 -soldo 71 -bott 71 -klaas 71 -grouper 71 -799 71 -belokon 71 -pcb 71 -03:22 71 -dutt 71 -gunship 71 -stuff.co.nz 71 -cavalcade 71 -spandau 71 -autocrat 71 -tues 71 -yolks 71 -loh 71 -pago 71 -waterbury 71 -19:49 71 -md. 71 -cartwheels 71 -mcclendon 71 -occupier 71 -yaris 71 -high-society 71 -purges 71 -scooby-doo 71 -slimmest 71 -saddleback 71 -vanatta 71 -qadir 71 -sacs 71 -kink 71 -17c 71 -anti-bacterial 71 -knoll 71 -kwesi 71 -1791 71 -right-to-work 71 -03:04 71 -stylised 71 -reprimands 71 -sk 71 -hang-ups 71 -regenerating 71 -grubb 71 -globalpost 71 -mlive 71 -3bn 71 -bicknell 71 -reappearance 71 -bathrobe 71 -spencer-churchill 71 -carolina-based 71 -hydraulics 71 -burstow 71 -prabowo 71 -tonge 71 -rohde 71 -hsi 71 -self-sacrifice 71 -03:25 71 -jindo 71 -jervis 71 -invokes 71 -txiki 71 -esso 71 -jura 71 -sdo 71 -specially-trained 71 -melodrama 71 -mcalister 71 -agitating 71 -mutter 71 -tortoiseshell 71 -evita 71 -three-decade 71 -bramley 71 -concertgoers 71 -donâ 71 -hoeven 71 -710 71 -leica 70 -spews 70 -malfeasance 70 -shahin 70 -hedgerows 70 -disbelieving 70 -urethra 70 -distorts 70 -heat-seeking 70 -dual-core 70 -.08 70 -ex-prime 70 -fleecing 70 -a6 70 -dusky 70 -accessorized 70 -washers 70 -1.36 70 -40mm 70 -individualized 70 -ophelia 70 -remodeling 70 -parkers 70 -torsos 70 -hydromorphone 70 -overhanging 70 -carsten 70 -apostrophe 70 -eulogized 70 -orlandi 70 -retrograde 70 -recliner 70 -beecroft 70 -ranching 70 -sharelinks 70 -big-ticket 70 -manhandling 70 -falsification 70 -grout 70 -mignini 70 -nightwear 70 -18:51 70 -gill-webb 70 -stoddart 70 -evens 70 -trenor 70 -setchell 70 -urmson 70 -winnebago 70 -five-foot 70 -datsun 70 -yoder 70 -ishaq 70 -droitwich 70 -frowning 70 -parvez 70 -hoon 70 -listeriosis 70 -utilitarian 70 -afifi 70 -salahis 70 -physiotherapists 70 -mds 70 -near-daily 70 -rupiah 70 -zhong 70 -dugas 70 -kiri 70 -laramie 70 -pitchman 70 -lamma 70 -shaan 70 -deliverance 70 -purefoy 70 -diaz-balart 70 -10-1 70 -barnacles 70 -11th-hour 70 -brickell 70 -pemex 70 -hater 70 -terrifies 70 -yanina 70 -seismologist 70 -duisburg 70 -karis 70 -bioethics 70 -blackley 70 -422 70 -skinning 70 -highly-skilled 70 -filo 70 -20:46 70 -boater 70 -bedwell 70 -arora 70 -hersham 70 -freediving 70 -connectors 70 -splinters 70 -three-years-old 70 -fiduciary 70 -18:50 70 -15:27 70 -afterglow 70 -college-age 70 -pre-nup 70 -baldini 70 -reclamation 70 -minnelli 70 -scheindlin 70 -16:20 70 -demoralised 70 -13:59 70 -fondant 70 -antenucci 70 -coric 70 -1-4 70 -xii 70 -circulates 70 -thanh 70 -bristle 70 -mccafferty 70 -goddamn 70 -lessing 70 -inaki 70 -stour 70 -harried 70 -confucius 70 -popovich 70 -satoshi 70 -robocalls 70 -traceable 70 -ephraim 70 -utilizes 70 -umass 70 -bite-sized 70 -rvp 70 -haverfordwest 70 -clunes 70 -levitation 70 -satirist 70 -chastise 70 -syncing 70 -criss 70 -bed-bound 70 -ten-hour 70 -maclachlan 70 -erstwhile 70 -unheated 70 -allenton 70 -anathema 70 -frere 70 -vermijl 70 -repaint 70 -nisi 70 -ky 70 -flip-flop 70 -hypothetically 70 -shortlists 70 -benotman 70 -1810 70 -kickabout 70 -news-journal 70 -lyuba 70 -html 70 -gurgling 70 -cnet.com 70 -whdh 70 -nay 70 -silverado 70 -heitholt 70 -himon 70 -commercialisation 70 -furlongs 70 -5/2 70 -5:15 70 -oversubscribed 70 -stringing 70 -engravings 70 -anguilla 70 -multi-million-dollar 70 -beni 70 -observable 70 -scrounger 70 -semi-skimmed 70 -maki 70 -cetacean 70 -breakwater 70 -chadwell 70 -evangelos 70 -guatemalans 70 -mockup 70 -11.99 70 -14:27 70 -o'conner 70 -ballew 70 -duxbury 70 -huffpost 70 -doppler 70 -blunted 70 -cadmium 70 -fondre 70 -cladding 70 -ic 70 -wiki 70 -quick-fire 70 -samuelson 70 -kola 70 -farrington 70 -jurich 70 -shelterbox 70 -doodling 70 -calibration 70 -party-loving 70 -parson 70 -melbourne-based 70 -potsdam 70 -rodong 70 -almanac 70 -rolnik 70 -20/1 70 -7,700 70 -yellowknife 70 -opening-day 70 -rwandans 70 -tongan 70 -transcendent 70 -carillo 70 -immunized 70 -insinuated 70 -washoe 70 -evaporating 70 -kadima 70 -hohn 70 -vafeades 70 -privately-run 70 -segel 70 -wotton 70 -ferdaus 70 -dravet 70 -jaccarino 70 -infanta 70 -1:15 70 -pre-loaded 70 -stonestreet 70 -tone-deaf 70 -conveniences 70 -roscosmos 70 -courson 70 -pornhub 70 -corvettes 70 -17:38 70 -laxmi 70 -pro-gay 70 -157,000 70 -superannuation 70 -manoora 70 -14:45 70 -attfield 70 -flatiron 70 -etwitterstatus 70 -murakami 70 -peninsular 70 -xolile 70 -tencent 70 -redo 70 -okoye 70 -63million 70 -third-grader 70 -gordonstoun 70 -part-owned 70 -16:09 70 -brawled 70 -murdochs 70 -speedier 70 -compaore 70 -speedometer 70 -stoute 70 -renard 70 -american-led 70 -verdi 70 -lattin 70 -tiernan 70 -homemaker 70 -mayuka 70 -trumka 70 -thew 70 -tonia 70 -amusingly 70 -metformin 70 -n.h. 70 -chatman 70 -shortsighted 70 -ashcraft 70 -thorbjorn 70 -highly-regarded 70 -seleznev 70 -borehole 70 -mera 70 -paphitis 70 -wallach 70 -immunodeficiency 70 -gigabytes 70 -ex-pm 70 -19s 70 -krystian 70 -philosophers 70 -dipascali 70 -osterman 70 -2.10 70 -attar 70 -anti-racist 70 -wrap-around 70 -time-honored 70 -5/1 70 -espouse 70 -gramercy 70 -drink-driver 70 -03:44 70 -bostock 70 -fireeye 70 -hiram 70 -loon 70 -stipe 70 -mackinnon 70 -marquez-greene 70 -treehouses 70 -teresopolis 70 -cumin 70 -discloses 70 -joelle 70 -nieminen 70 -booting 70 -stop-motion 70 -liven 70 -ricks 70 -2,250 70 -sociologists 70 -classifieds 70 -uttarakhand 70 -rocio 70 -kuipers 70 -17.7 70 -15:12 70 -65s 70 -catapulting 70 -8ins 70 -outdoorsy 70 -grade-ii 70 -spivey 70 -bangerz 70 -mccord 70 -prinsloo 70 -bucca 70 -3c 70 -fingerprinting 70 -filet 70 -danielson 70 -nehemiah 70 -kpix 70 -03:09 70 -seaford 70 -drenthe 70 -alena 70 -toasters 70 -pasteur 70 -bobbed 70 -super-wealthy 70 -janjaweed 70 -19:23 70 -celiac 70 -660,000 70 -hus 70 -omens 70 -hfea 70 -mushroomed 70 -godden 70 -zigzag 70 -long-simmering 70 -bamba 70 -winces 70 -over-55s 70 -18.8 70 -naidoo 70 -fenner 70 -dumbledore 70 -zealots 70 -dvf 70 -leonore 70 -eccleston 70 -victorville 70 -chorizo 70 -latrines 70 -geert 70 -jolene 70 -heart-throb 70 -military-led 70 -fishbein 70 -cuteness 70 -chappelle 70 -ogle 70 -blowtorch 70 -zenya 70 -klas 70 -unrecognised 70 -puna 70 -20mm 70 -mcmillin 70 -upheavals 70 -scribbling 70 -grits 70 -rickman 69 -ozment 69 -sympathetically 69 -amalgamation 69 -acreage 69 -madrassa 69 -expediency 69 -ashrawi 69 -stone-faced 69 -lewa 69 -fizzing 69 -jonylah 69 -rené 69 -ojeda 69 -sheng 69 -thuggery 69 -clowning 69 -inalienable 69 -ginter 69 -as-yet 69 -drainpipe 69 -bresette 69 -flexes 69 -gadaffi 69 -1829 69 -boaz 69 -onslow 69 -klobuchar 69 -badwater 69 -petronas 69 -takeoffs 69 -al-zarqawi 69 -rza 69 -ghulam 69 -emitters 69 -afghani 69 -klee 69 -penknife 69 -wickmayer 69 -council-owned 69 -estell 69 -mid-20th 69 -ceuta 69 -mandiant 69 -collages 69 -bedford-stuyvesant 69 -zink 69 -kimonos 69 -kdvr 69 -georgi 69 -optus 69 -bonsai 69 -shaaliver 69 -04:22 69 -fully-functioning 69 -plexiglass 69 -al-amriki 69 -beeston 69 -15km 69 -marchetti 69 -reauthorization 69 -baccarat 69 -knife-edge 69 -reconsidering 69 -rubbery 69 -tomography 69 -distinctively 69 -overlaps 69 -treetop 69 -achebe 69 -profligate 69 -margiela 69 -20per 69 -syd 69 -normalized 69 -wiens 69 -shaheed 69 -mcculkin 69 -darken 69 -shaar 69 -ehlers-danlos 69 -nuzzling 69 -4-month-old 69 -16:19 69 -footman 69 -chlorella 69 -lotte 69 -allahabad 69 -under-represented 69 -self-discipline 69 -rambler 69 -zayden 69 -visser 69 -uncooked 69 -grandstands 69 -non-political 69 -marais 69 -zeroes 69 -viramontes 69 -off-broadway 69 -arkham 69 -humana 69 -amiri 69 -cottonwood 69 -bueno 69 -aarhus 69 -verrilli 69 -musselman 69 -kerlikowske 69 -13mp 69 -conquerors 69 -18:58 69 -musser 69 -kempinski 69 -caseworkers 69 -uncool 69 -maddening 69 -abul 69 -felice 69 -16:53 69 -kistel 69 -rushton 69 -carplay 69 -british-made 69 -kadeer 69 -reconsideration 69 -02:42 69 -largest-ever 69 -lydon 69 -skiba 69 -privatise 69 -0845 69 -imacs 69 -walgreen 69 -perigee 69 -rab 69 -followup 69 -pardoning 69 -behinds 69 -noman 69 --11 69 -interest-free 69 -jacked 69 -cantaloupe 69 -bodied 69 -dalby 69 -surman 69 -precipitous 69 -528 69 -middlemen 69 -unselfish 69 -rhodesia 69 -claxton 69 -ayia 69 -career-best 69 -eamon 69 -molinelli 69 -overpopulation 69 -marsalis 69 -uppermost 69 -inlaid 69 -farrelly 69 -abdelbaset 69 -brolly 69 -.50 69 -mycoskie 69 -raftery 69 -villar 69 -julissa 69 -jacki 69 -now-closed 69 -abated 69 -trois 69 -refinancing 69 -adamawa 69 -obituaries 69 -gekko 69 -jinks 69 -faulks 69 -mcglynn 69 -pollute 69 -1836 69 -rucksacks 69 -lofts 69 -steinbeck 69 -grumbled 69 -tha 69 -mceachran 69 -owsley 69 -lucerne 69 -tuohy 69 -kiley 69 -wafting 69 -jm 69 -eavis 69 -blofeld 69 -slicker 69 -sondra 69 -sows 69 -russells 69 -jsa 69 -fujimoto 69 -perseids 69 -01:55 69 -aftonbladet 69 -lakin 69 -beretta 69 -gumbel 69 -cunha 69 -sedbergh 69 -ozark 69 -super-strength 69 -mind-bending 69 -voip 69 -maraglino 69 -al-azhar 69 -ballina 69 -double-breasted 69 -nosebleeds 69 -benali 69 -nanula 69 -space-based 69 -deveau 69 -slandered 69 -muddied 69 -3,750 69 -celebre 69 -attkisson 69 -joby 69 -urchin 69 -dulce 69 -andra 69 -ageless 69 -hedonism 69 -loyd 69 -woof 69 -21:00 69 -factional 69 -nutritionally 69 -bottlenecks 69 -headhunters 69 -epcot 69 -wish-list 69 -prologue 69 -perris 69 -snatches 69 -mangoes 69 -o'halloran 69 -newscasts 69 -'t 69 -damme 69 -tobruk 69 -shanley 69 -twig 69 -simplifying 69 -nibbles 69 -slidell 69 -rosacea 69 -homeward 69 -horse-racing 69 -aureus 69 -germanic 69 -forger 69 -frequenting 69 -gambhir 69 -counter-attacking 69 -450million 69 -pringles 69 -salve 69 -horsing 69 -offshoots 69 -radial 69 -cuiaba 69 -grosso 69 -policy-makers 69 -corralled 69 -10-years-old 69 -cpo 69 -home-town 69 -sinhalese 69 -radke 69 -institutionalised 69 -mulch 69 -hanan 69 -bulked 69 -clichés 69 -lindh 69 -megawatt 69 -senzo 69 -nine-week-old 69 -expulsions 69 -kristal 69 -prefrontal 69 -malcom 69 -demetriou 69 -neve 69 -luckier 69 -mido 69 -15:53 69 -15:54 69 -adulterers 69 -ylen 69 -lucarelli 69 -jailer 69 -pompano 69 -rosindell 69 -abdelkader 69 -oberst 69 -shackle 69 -osei 69 -detest 69 -dianna 69 -absenteeism 69 -godbee 69 -aedes 69 -submerge 69 -11-time 69 -paralysing 69 -oar 69 -amityville 69 -phillipe 69 -610 69 -dunston 69 -315,000 69 -brocchetto 69 -bichon 69 -fleischman 69 -misdirected 69 -casks 69 -klara 69 -detested 69 -nielson 69 -stirrup 69 -can-do 69 -lionfish 69 -ludicrously 69 -sparklers 69 -15:14 69 -636 69 -03:45 69 -caton 69 -suhr 69 -herons 69 -avis 69 -fingered 69 -immersing 69 -generalised 69 -christen 69 -bharti 69 -amiable 69 -channon 69 -pitcairn 69 -conversational 69 -littlejohn 69 -20:12 69 -rossum 69 -gulati 69 -recoverable 69 -briatore 69 -dissented 69 -rockingham 69 -cheyne 69 -teigen 69 -souaan 69 -chlorophyll 69 -banega 69 -belgrano 69 -pajama 69 -mallya 69 -lynchpin 69 -t2 69 -t3 69 -artie 69 -anish 69 -maribel 69 -0.24 69 -villalobos 69 -obeid 69 -disapproves 69 -molner 69 -relieves 69 -chicago-area 69 -1c 69 -weimar 69 -braemar 69 -bel-air 69 -non-identical 69 -pawned 69 -re-enacting 69 -huy 69 -concentric 69 -microlight 69 -5-10 69 -chingford 69 -vilify 69 -polish-born 69 -lobban 69 -4:45 69 -eskimo 69 -gysin 69 -four-goal 69 -spiny 69 -uncalled 69 -finke 69 -in-home 69 -marshalls 69 -blanchette 69 -behati 69 -w3 69 -swathed 69 -diego-based 69 -sympathised 69 -firs 69 -bad-tempered 69 -third-choice 69 -haydn 69 -jays 69 -vanishes 69 -turmeric 69 -hsu 68 -hi-vis 68 -picardo 68 -armament 68 -jyllands-posten 68 -entrees 68 -axa 68 -ao 68 -oddest 68 -hard-partying 68 -cwu 68 -monroeville 68 -moldy 68 -knobs 68 -unrecorded 68 -in-your-face 68 -clenching 68 -07 68 -paneling 68 -austrians 68 -freewheeling 68 -theodorou 68 -rashers 68 -pnc 68 -moreau 68 -moorer 68 -snubs 68 -rupee 68 -bhuvneshwar 68 -blindingly 68 -dunking 68 -oberle 68 -townsfolk 68 -anoeta 68 -mandaric 68 -angeles-area 68 -aquilani 68 -290million 68 -katja 68 -dawns 68 -wilman 68 -corning 68 -beekeeper 68 -prised 68 -ex-cop 68 -memorized 68 -trans-pacific 68 -moscovici 68 -sharmila 68 -freida 68 -atchison 68 -donohoo 68 -hand-stitched 68 -xenophon 68 -8mm 68 -parness 68 -skateboards 68 -sex-selective 68 -partaking 68 -charli 68 -chickpeas 68 -unsanctioned 68 -45ft 68 -105th 68 -tsiskaridze 68 -tribune-review 68 -anti-venom 68 -platters 68 -clift 68 -kamil 68 -pho 68 -moneyed 68 -criminalising 68 -louts 68 -nabors 68 -arthropods 68 -reigniting 68 -griffon 68 -long-overdue 68 -varian 68 -glan 68 -immobility 68 -bellowed 68 -child-like 68 -sergeant-at-arms 68 -top-ranking 68 -18:59 68 -15:23 68 -piety 68 -overburdened 68 -slits 68 -loopy 68 -16.1 68 -50lbs 68 -obstetric 68 -prokupecz 68 -amadeus 68 -peckish 68 -mazen 68 -16:52 68 -shrew 68 -aachen 68 -jaczko 68 -siad 68 -retainer 68 -calzaghe 68 -edelstein 68 -broner 68 -corneal 68 -freedomworks 68 -near-record 68 -camberley 68 -dudek 68 -binskin 68 -reticence 68 -fourth-grade 68 -propels 68 -1 1/2 68 -feld 68 -witsel 68 -sighed 68 -overheads 68 -fatberg 68 -03:38 68 -remediation 68 -mastodon 68 -poire 68 -binh 68 -tennyson 68 -left-sided 68 -meringue 68 -tock 68 -forgetfulness 68 -omagh 68 -aldous 68 -extant 68 -9-month-old 68 -brainy 68 -daraya 68 -yaounde 68 -hambleton 68 -nazare 68 -redstone 68 -03:17 68 -ashfaq 68 -harling 68 -erne 68 -twice-daily 68 -bieniewicz 68 -film-making 68 -watterson 68 -piggyback 68 -entergy 68 -flipkens 68 -deplores 68 -aja 68 -henwood 68 -naloxone 68 -swig 68 -northwick 68 -two-tone 68 -rga 68 -outdoorsman 68 -bisset 68 -viscous 68 -neutrals 68 -sidibe 68 -bandanas 68 -lymphoedema 68 -whiteboard 68 -water-resistant 68 -dabashi 68 -superintendents 68 -best-paid 68 -hiddleston 68 -30-mile 68 -saoirse 68 -vucinic 68 -elemental 68 -infrastructures 68 -usoc 68 -nfl.com 68 -4:15 68 -revlon 68 -abdel-fattah 68 -bnd 68 -rebaza 68 -friman 68 -curvier 68 -artisanal 68 -moston 68 -counterfeits 68 -tussled 68 -timers 68 -14:04 68 -publican 68 -yankey 68 -pandya 68 -rancorous 68 -btw 68 -kwai 68 -condemnations 68 -heyworth 68 -malmesbury 68 -alyn 68 -reassessment 68 -glassed 68 -duct-taped 68 -mags 68 -17:15 68 -17:17 68 -crusts 68 -cafu 68 -marisol 68 -7.85 68 -guarantor 68 -spaceplane 68 -contaminant 68 -dayana 68 -surreptitious 68 -screenwriters 68 -hardie 68 -hk 68 -goddaughter 68 -nang 68 -lobbing 68 -ginday 68 -imich 68 -smoking-related 68 -ldr 68 -yule 68 -wkyc 68 -ludo 68 -harbourside 68 -fly-by 68 -saltley 68 -285,000 68 -chadderton 68 -tristane 68 -bhoys 68 -strumming 68 -14-minute 68 -osmakac 68 -iselle 68 -woodhill 68 -hyperbolic 68 -almanea 68 -untruthful 68 -2017-18 68 -arguable 68 -crochet 68 -deux 68 -emmanuelle 68 -castings 68 -pro-beijing 68 -16:07 68 -opticians 68 -sixteenth 68 -tearaway 68 -cocked 68 -hoa 68 -rustenburg 68 -eljero 68 -15:51 68 -pummel 68 -wands 68 -proviso 68 -paphos 68 -nield 68 -19.95 68 -appalachia 68 -geno 68 -moretti 68 -one-sixth 68 -faurlin 68 -inductees 68 -20:52 68 -brutus 68 -scallop 68 -herschel 68 -domesday 68 -odisha 68 -orville 68 -encircle 68 -sinise 68 -fester 68 -amorphous 68 -spanghero 68 -unprofitable 68 -rasping 68 -dedryck 68 -fabiano 68 -wroclaw 68 -shi'ites 68 -cupola 68 -massie 68 -hebden 68 -horsemen 68 -15:13 68 -rapeseed 68 -scratchy 68 -110million 68 -re-write 68 -solitaire 68 -permanente 68 -hakkens 68 -hukou 68 -footloose 68 -boxall 68 -mayr 68 -fenchurch 68 -scavenged 68 -327 68 -atkin 68 -mcneal 68 -miss. 68 -finnair 68 -sadder 68 -off-roader 68 -03:27 68 -plazas 68 -114th 68 -scarratt 68 -perceptive 68 -korman 68 -boulogne 68 -cosying 68 -deficit-reduction 68 -priklopil 68 -carbon-fibre 68 -mariappa 68 -chula 68 -climate-controlled 68 -havre 68 -fends 68 -off-the-shelf 68 -mainsail 68 -hovis 68 -profligacy 68 -neutrons 68 -lab-grown 68 -celis 68 -chastening 68 -kerouac 68 -redzepi 68 -milked 68 -plain-clothed 68 -galante 68 -clinches 68 -half-eaten 68 -binley 68 -c5 68 -rubber-stamp 68 -worktop 68 -bicycling 68 -shuttering 68 -tastier 68 -sojourner 68 -convection 68 -half-year 68 -marchant 68 -dio 68 -yury 68 -tuan 68 -ujaama 68 -ghanaians 68 -lateness 68 -souare 68 -thicket 68 -soe 68 -136,000 68 -munchies 68 -jhessye 68 -specks 68 -sata 68 -binion 68 -brannock 68 -apostates 68 -kollar 68 -shut-eye 68 -creutzfeldt-jakob 68 -mondale 68 -brzezinski 68 -brudenell 68 -cbf 68 -bfi 68 -coddington 68 -bossed 68 -arielle 68 -milli 68 -gignac 68 -underemployed 68 -fff 68 -padma 68 -marist 68 -teeside 68 -jeanine 68 -opportunists 68 -clamouring 68 -basks 68 -gueant 68 -cardamom 68 -french-led 68 -sett 68 -pre-installed 68 -back-breaking 67 -mcnaughton 67 -unnaturally 67 -disparage 67 -14:33 67 -glaxo 67 -mitts 67 -bouffant 67 -fourth-place 67 -875 67 -omni 67 -³ 67 -eying 67 -fivefold 67 -nobles 67 -marburg 67 -introvert 67 -etchberger 67 -irish-born 67 -tf1 67 -brunetti 67 -baloney 67 -mushtaq 67 -sheikha 67 -pettway 67 -mondrian 67 -bails 67 -long-sleeve 67 -face-first 67 -20.7 67 -pironkova 67 -saxton 67 -girard 67 -fortification 67 -980 67 -maxed 67 -9,600 67 -cle 67 -carlsberg 67 -comas 67 -precede 67 -matinee 67 -11-years-old 67 -ready-to-eat 67 -twos 67 -oban 67 -sell-on 67 -baguettes 67 -piven 67 -17:29 67 -685,000 67 -rivets 67 -extra-large 67 -canopies 67 -crowed 67 -hansel 67 -lomer 67 -saber-rattling 67 -60-minute 67 -bridegroom 67 -stubbornness 67 -buford 67 -minton 67 -alfreton 67 -10-year-olds 67 -temecula 67 -matsumoto 67 -2.49 67 -rpm 67 -amenable 67 -zillow 67 -navarra 67 -tun 67 -nagano 67 -toronto-based 67 -20-year-olds 67 -cocks 67 -optimised 67 -kular 67 -7,300 67 -transnistria 67 -rajoelina 67 -dwts 67 -loyau-kennett 67 -15:26 67 -lipo 67 -cheam 67 -pachter 67 -pretenses 67 -sheley 67 -bogue 67 -subtext 67 -cornering 67 -noronha 67 -buttress 67 -20:20 67 -grim-faced 67 -carleton 67 -asuncion 67 -blankenship 67 -wmc 67 -18:31 67 -lakeshore 67 -caterina 67 -03:50 67 -plumping 67 -bap 67 -bal 67 -nine-member 67 -retrofitted 67 -premiers 67 -corneas 67 -sojourn 67 -250ml 67 -cromlix 67 -stodgy 67 -geraldo 67 -industrialists 67 -stenosis 67 -aereo 67 -sickens 67 -koster 67 -answerphone 67 -valderrama 67 -frizz 67 -foraged 67 -centering 67 -birthed 67 -blahnik 67 -ricochet 67 -keshi 67 -feingold 67 -540,000 67 -bcc 67 -bretland 67 -escondido 67 -kirill 67 -bracamontes 67 -frightful 67 -burbidge 67 -decibel 67 -al-dabbagh 67 -pull-ups 67 -lemmings 67 -growled 67 -asier 67 -scuderia 67 -mercier 67 -divinity 67 -jagermeister 67 -03:14 67 -107,000 67 -nettleton 67 -six-storey 67 -novelists 67 -sweatshop 67 -shrinkage 67 -20.2 67 -courtenay 67 -horacio 67 -hafeez 67 -106,000 67 -scrapheap 67 -berlin-based 67 -kinsman 67 -kazantsev 67 -prospecting 67 -7-11 67 -gluck 67 -hinson 67 -monticello 67 -capristo 67 -abdel-majed 67 -1.09 67 -154,000 67 -selfishly 67 -01:53 67 -traylor 67 -tonkin 67 -brookside 67 -one-term 67 -washing-up 67 -frodo 67 -cosford 67 -buts 67 -streitfeld 67 -loveland 67 -17:53 67 -walloped 67 -al-arabiya 67 -homogenous 67 -nave 67 -multi-layered 67 -collaboratively 67 -legless 67 -henrikh 67 -staycation 67 -manoeuvred 67 -willesden 67 -komorowski 67 -ishmael 67 -bpas 67 -shrunken 67 -notables 67 -hydropower 67 -dysmorphic 67 -penalising 67 -protectionism 67 -ferenc 67 -cipher 67 -gonzález 67 -dingell 67 -hemmed 67 -ishihara 67 -theodora 67 -jarrow 67 -federici 67 -asher-smith 67 -damper 67 -rounder 67 -suha 67 -18-foot 67 -murrain 67 -stop-start 67 -viet 67 -thermonuclear 67 -louboutins 67 -gemstones 67 -bixler 67 -kachin 67 -ilyas 67 -first-in-the-nation 67 -rumi 67 -bodie 67 -cordless 67 -roz 67 -optimistically 67 -triassic 67 -yazoo 67 -ucf 67 -picton 67 -par-four 67 -fearlessness 67 -gabrielli 67 -one-punch 67 -beavis 67 -forsythe 67 -catsimatidis 67 -molars 67 -teared 67 -pretender 67 -comprehensives 67 -evaders 67 -brainer 67 -zeigler 67 -stonemason 67 -falun 67 -deaconess 67 -pantone 67 -beales 67 -accrue 67 -agoglia 67 -excommunication 67 -hinduism 67 -finkel 67 -cavanaugh 67 -exclusions 67 -quiano 67 -stepney 67 -freeview 67 -multiyear 67 -cutouts 67 -bartering 67 -hiroki 67 -dejection 67 -2-4 67 -8,800 67 -omid 67 -fulltime 67 -38.5 67 -soi 67 -machiavellian 67 -chand 67 -fondest 67 -miscellaneous 67 -nishimura 67 -fronczak 67 -misbehave 67 -bigwigs 67 -steptoe 67 -bangladeshis 67 -ackroyd 67 -eurogroup 67 -obscures 67 -nominally 67 -lowlands 67 -bours 67 -two-under 67 -next-of-kin 67 -fourteenth 67 -kgw 67 -mini-me 67 -incoherently 67 -16:22 67 -epipen 67 -bauble 67 -ultra-thin 67 -18:42 67 -15:35 67 -guerreros 67 -ashburn 67 -warsame 67 -hercule 67 -pro-clinton 67 -lullaby 67 -loire 67 -lyndsay 67 -strikeforce 67 -kennebunkport 67 -crystal-clear 67 -parlours 67 -akrotiri 67 -16:40 67 -showgirls 67 -darrington 67 -pendle 67 -native-born 67 -apryl 67 -trivialise 67 -jive 67 -01:52 67 -imbued 67 -reinvest 67 -magnitude-7 67 -03:43 67 -munched 67 -entomologist 67 -pomona 67 -westley 67 -brockton 67 -watmough 67 -talkie 67 -dingman 67 -vino 67 -blecher 67 -mojito 67 -triclosan 67 -tomba 67 -altos 67 -woozy 67 -cayrou 67 -seferovic 67 -re-elect 67 -harington 67 -garcia-lopez 67 -draught 67 -fairytales 67 -ma'afu 67 -kilby 67 -centurylink 67 -tc 67 -volpe 67 -genial 67 -gillen 67 -9:15 67 -koirala 67 -garlick 67 -pan-european 67 -light-colored 67 -unabashedly 67 -currington 67 -monsegur 67 -vole 67 -2dayfm 67 -bernier 67 -vihear 67 -markit 67 -sixth-grade 67 -peeked 67 -akon 67 -morosini 67 -rungs 67 -colo. 67 -co-driver 67 -garnished 67 -chafin 67 -nadhim 67 -conch 67 -puebla 67 -tubby 67 -dreads 67 -memorialize 67 -01:41 67 -nailah 67 -eardrum 67 -monogram 67 -excavators 67 -zanardi 67 -malhotra 67 -fop 67 -buren 67 -fylde 67 -thirteenth 67 -seidler 67 -lohse 67 -grudgingly 67 -trombone 67 -pusepa 67 -1.16 67 -bsc 67 -before-and-after 67 -gobbling 67 -8-inch 67 -balboni 67 -profanity-laced 67 -heben 66 -tie-up 66 -pedicures 66 -skywards 66 -contravened 66 -endocrine 66 -cates 66 -ottmar 66 -meatloaf 66 -truncated 66 -17:44 66 -irreversibly 66 -conga 66 -epidermolysis 66 -kayali 66 -stone-throwing 66 -anti-taliban 66 -rejuvenating 66 -exposition 66 -molds 66 -rabaul 66 -second-story 66 -thoracic 66 -high-strength 66 -05 66 -hinojosa 66 -electrolyte 66 -720p 66 -hertford 66 -gert 66 -biggin 66 -gimmicky 66 -475,000 66 -implode 66 -vasek 66 -white-tailed 66 -snares 66 -scorecards 66 -puritanical 66 -misfortunes 66 -hammad 66 -alinghi 66 -lothar 66 -mccoll 66 -maseth 66 -culiacan 66 -formalized 66 -t-bone 66 -percentile 66 -gelato 66 -priceline 66 -allex 66 -hobsbawm 66 -life-limiting 66 -akihito 66 -ramsbottom 66 -hise 66 -wahab 66 -noughties 66 -grenadines 66 -tssa 66 -amoebic 66 -cst 66 -ballerinas 66 -oshkosh 66 -seven-game 66 -gameplan 66 -trimble 66 -cryptically 66 -bartosz 66 -bueller 66 -confidentially 66 -lusaka 66 -mcnabb 66 -drought-stricken 66 -greengrocer 66 -underfloor 66 -abounded 66 -milley 66 -self-guided 66 -birns 66 -avionics 66 -holdsworth 66 -descendents 66 -'40s 66 -mangena 66 -khyami 66 -eye-witnesses 66 -chevalier 66 -slurpee 66 -nosedived 66 -24.9 66 -24.4 66 -top-earning 66 -gavaghan 66 -02:00 66 -headstrong 66 -15:45 66 -manzella 66 -unadulterated 66 -digg 66 -rebelling 66 -asmr 66 -plancarte 66 -buries 66 -hemispheric 66 -zamperini 66 -coello 66 -redirecting 66 -quicksand 66 -diss 66 -high-vis 66 -tooley 66 -pontz 66 -15:24 66 -pfi 66 -adamcrafton 66 -restorer 66 -flipper 66 -walkie-talkie 66 -moulding 66 -straddle 66 -masons 66 -pinches 66 -tabqa 66 -niven 66 -filton 66 -huppert 66 -tory-led 66 -renegotiated 66 -mwai 66 -cowers 66 -yuvraj 66 -stormtrooper 66 -vahidi 66 -atherosclerosis 66 -scribe 66 -37,500 66 -stine 66 -huseyin 66 -pillowcase 66 -foreplay 66 -ridership 66 -trivialising 66 -giovinco 66 -isis-held 66 -intro 66 -hammon 66 -shimizu 66 -single-storey 66 -contusions 66 -almasmari 66 -allusion 66 -smethurst 66 -lupton 66 -nyang 66 -fides 66 -fetters 66 -hgvs 66 -anachronistic 66 -biosecurity 66 -hattiesburg 66 -barbies 66 -tinkoff-saxo 66 -propagate 66 -vagrant 66 -yoshihide 66 -hamon 66 -italian-american 66 -turbo-charged 66 -elwell 66 -cremate 66 -matchroom 66 -ludovic 66 -linley 66 -lambing 66 -loraine 66 -parkhurst 66 -minchin 66 -shenfield 66 -50-mile 66 -pahlavi 66 -workington 66 -islip 66 -bromfield 66 -percocet 66 -hand-outs 66 -keystrokes 66 -4/1 66 -pocahontas 66 -waterhole 66 -brenton 66 -373 66 -happ 66 -potable 66 -pocock 66 -ronell 66 -twice-married 66 -cory-wright 66 -amesbury 66 -likenesses 66 -expedient 66 -motd 66 -1.00 66 -whacking 66 -govt 66 -road-rage 66 -diodes 66 -restated 66 -re-start 66 -stradivari 66 -kilowatt 66 -intuitively 66 -click2houston 66 -whippet 66 -articulation 66 -14:29 66 -14:21 66 -wrongful-death 66 -pitchford 66 -stoneham 66 -quick-fix 66 -lammers 66 -callousness 66 -shiwen 66 -stockbrokers 66 -naomie 66 -quests 66 -rhone 66 -burg 66 -fantasia 66 -afa 66 -newsletters 66 -grey-thompson 66 -gritters 66 -conferencing 66 -qasim 66 -spontaneity 66 -android-based 66 -stymie 66 -supervises 66 -peds 66 -pangolins 66 -tetbury 66 -roorda 66 -nieuwenhuizen 66 -mops 66 -gingham 66 -tejeda 66 -boe 66 -ibarra 66 -dowsett 66 -21:04 66 -swilling 66 -misheard 66 -splashdown 66 -mcmullan 66 -detroit-area 66 -shoveled 66 -hca 66 -escapee 66 -hashed 66 -skittish 66 -josephs 66 -spluttering 66 -code-named 66 -lowy 66 -badlands 66 -curle 66 -belt-tightening 66 -franchisees 66 -porth 66 -wtae 66 -beesley 66 -plzen 66 -repudiation 66 -howled 66 -questioner 66 -14:48 66 -sherrill 66 -zoellick 66 -greenslade 66 -biocontainment 66 -ifab 66 -lambda 66 -morgenstern 66 -gc 66 -gj 66 -anaesthetists 66 -redd 66 -15:10 66 -huck 66 -10.35 66 -stopes 66 -80km 66 -poignancy 66 -pre-date 66 -nuhiu 66 -full-throated 66 -nachman 66 -high-performing 66 -diversification 66 -voided 66 -player-manager 66 -adomah 66 -15:55 66 -barfi 66 -choctaw 66 -cami 66 -cilantro 66 -sealant 66 -viewfinder 66 -overrunning 66 -waterfield 66 -aplenty 66 -lacko 66 -lug 66 -16:28 66 -teflon 66 -marvelled 66 -jig 66 -showstopper 66 -361 66 -20-25 66 -re-created 66 -hemsby 66 -coast-to-coast 66 -niggles 66 -mattu 66 -naht 66 -so-and-so 66 -reinhart 66 -jomo 66 -dona 66 -nawal 66 -kermorgant 66 -comstock 66 -20:36 66 -boddy 66 -attlee 66 -ramble 66 -sagittarius 66 -dijon 66 -summaries 66 -parrott 66 -heffernan 66 -sakineh 66 -admirals 66 -tinto 66 -meningoencephalitis 66 -carbone 66 -18:32 66 -re-routed 66 -code-breaking 66 -tunnelling 66 -20:10 66 -romanov 66 -risk-averse 66 -ex-girlfriends 66 -tivoli 66 -braydon 66 -fully-grown 66 -searcher 66 -observances 66 -bambrough 66 -responsiveness 66 -funders 66 -03:21 66 -al-turki 66 -boland 66 -moree 66 -father-daughter 66 -backhoe 66 -extraordinaire 66 -thirty-eight 66 -quickfire 66 -matchstick 66 -refilled 66 -contemplative 66 -well-suited 66 -tn 66 -ibex 66 -gama 66 -halfords 66 -orator 66 -cheteshwar 66 -pitkin 66 -mom-and-pop 66 -streamers 66 -milf 66 -objector 66 -frightens 66 -66million 66 -kastigar 66 -hayler 66 -ex-wives 66 -inhabiting 66 -warfield 66 -hershberger 66 -250g 66 -unheeded 66 -scratchcard 66 -manzano 66 -southwold 66 -antihistamine 66 -whitacre 66 -google-owned 66 -jessup 66 -chambermaid 66 -9-8 66 -reassuringly 66 -well-built 66 -cromer 66 -fett 66 -vespa 66 -greyfriars 66 -pole-dancing 66 -hajek 66 -rajkumar 66 -commensurate 66 -human-powered 66 -sascha 66 -first-place 66 -re-imagined 66 -tumbleweed 66 -lally 66 -playfulness 66 -01:43 66 -biomarkers 66 -color-coded 66 -akira 66 -captivate 66 -summum 66 -belushi 66 -anemic 66 -18-months 66 -photo-op 66 -trespassers 66 -avandia 66 -gladwell 66 -gizmos 66 -slanted 66 -weightwatchers 66 -925 66 -flapper 66 -koehler 66 -crevices 66 -zahara 66 -1.18 66 -barghouti 66 -riaz 66 -heaved 66 -clunk 66 -bellerive 66 -mazher 65 -580,000 65 -morganza 65 -liliana 65 -chittagong 65 -deniro 65 -14:30 65 -creationism 65 -curiosities 65 -0-4 65 -categorize 65 -qt 65 -verbatim 65 -grinberg 65 -iranian-born 65 -telfer 65 -state-level 65 -kaohsiung 65 -mixtures 65 -satya 65 -stonewalling 65 -wpxi 65 -spartanburg 65 -3/1 65 -lansdorp 65 -tohti 65 -servicewomen 65 -snafu 65 -strata 65 -jae 65 -tsr 65 -3:15 65 -409 65 -slenderman 65 -stirlingshire 65 -wotherspoon 65 -11oz 65 -aau 65 -injury-plagued 65 -sachithra 65 -frieda 65 -nsf 65 -monotone 65 -destabilized 65 -sacrilege 65 -get-out-the-vote 65 -tierce 65 -pommel 65 -snc 65 -carrara 65 -kikwete 65 -wildflower 65 -el-erian 65 -wint 65 -dalston 65 -encode 65 -ruffin 65 -8mp 65 -14:54 65 -sous 65 -criminalised 65 -747-400 65 -duffin 65 -.6 65 -illegible 65 -whisking 65 -fari 65 -vicars 65 -thumbnail 65 -whiteford 65 -drug-addicted 65 -streetwise 65 -ultraconservative 65 -ramses 65 -adopter 65 -html5 65 -rumer 65 -trifle 65 -brides-to-be 65 -deontay 65 -wuthering 65 -heresy 65 -schooner 65 -pattie 65 -visitbritain 65 -jetlag 65 -hyder 65 -month-on-month 65 -15:40 65 -bougrab 65 -shiv 65 -beamish 65 -jahmene 65 -miele 65 -oswestry 65 -michail 65 -caffe 65 -jewelery 65 -malling 65 -longitudinal 65 -technocrat 65 -stop-gap 65 -burleson 65 -caputo 65 -idi 65 -rossetti 65 -exhilaration 65 -four-years-old 65 -feltz 65 -colour-coded 65 -15:25 65 -41.5 65 -zambada 65 -lipa 65 -banditry 65 -wbal 65 -majored 65 -herder 65 -ilene 65 -blesses 65 -biggleswade 65 -16:23 65 -heliport 65 -stillwater 65 -16:54 65 -redfoo 65 -barnstorming 65 -02:43 65 -thirty-three 65 -faversham 65 -508 65 -ex-police 65 -ten-week 65 -touristy 65 -wain 65 -nelsen 65 -bromance 65 -faucet 65 -open-door 65 -walton-on-thames 65 -cliffside 65 -sneer 65 -schaap 65 -stigmatised 65 -dore 65 -ipso 65 -naser 65 -varley 65 -unfriend 65 -small-arms 65 -aminah 65 -magnetism 65 -sable 65 -llorens 65 -globalized 65 -lebaron 65 -lookouts 65 -tanabe 65 -pulev 65 -caminero 65 -incinerator 65 -goggins 65 -31m 65 -bogachev 65 -hondurans 65 -stiffen 65 -d'amato 65 -urbanisation 65 -stylized 65 -konoplyanka 65 -authentically 65 -03:11 65 -tumbleweeds 65 -hotlines 65 -wbz 65 -wooldridge 65 -morsel 65 -prentis 65 -enzalutamide 65 -tajik 65 -helder 65 -proverb 65 -informers 65 -yigal 65 -harmonie 65 -wallops 65 -full-service 65 -bronzes 65 -fainter 65 -intercity 65 -iksanov 65 -cicinelli 65 -fredrick 65 -hite 65 -gerson 65 -delores 65 -torode 65 -moti 65 -gustafsson 65 -sun-seekers 65 -scrawny 65 -bedouins 65 -al-nashiri 65 -2014-2015 65 -1ft 65 -personalize 65 -kobo 65 -1,850 65 -avro 65 -accessorize 65 -goon 65 -wiggling 65 -mainwaring 65 -uniqlo 65 -guided-missile 65 -pre-prepared 65 -sleuths 65 -bhayani 65 -34.5 65 -get-up 65 -humanize 65 -beach-side 65 -mariachi 65 -khadr 65 -765 65 -ir 65 -harker 65 -colorado-based 65 -adjudicatory 65 -drizzly 65 -qwerty 65 -kasasbeh 65 -duffle 65 -shaven-headed 65 -69p 65 -trimaran 65 -rida 65 -eldin 65 -blankfein 65 -mouthy 65 -unwed 65 -unanticipated 65 -rakti 65 -caspar 65 -tenement 65 -florists 65 -corry 65 -non-fatal 65 -bullosa 65 -pm2 65 -famines 65 -polka-dot 65 -icarus 65 -unfavorably 65 -underarm 65 -133,000 65 -love-hate 65 -greenacre 65 -mid-80s 65 -15-member 65 -lala 65 -hassled 65 -eight-page 65 -wakata 65 -rationality 65 -busloads 65 -2060 65 -negroponte 65 -josey 65 -noyes 65 -noleen 65 -well-informed 65 -fast-tracking 65 -janson 65 -co-ed 65 -reda 65 -duan 65 -eystna 65 -bostrom 65 -bighorn 65 -rosenborg 65 -colwell 65 -6cm 65 -rummaged 65 -arrogantly 65 -panangian 65 -sturdey 65 -142,000 65 -levick 65 -precetaj 65 -recycles 65 -spunky 65 -dabble 65 -xxl 65 -deflating 65 -technicolor 65 -manoeuvring 65 -20:55 65 -unencrypted 65 -lgbtq 65 -alot 65 -aishah 65 -15:32 65 -relaxant 65 -catalano 65 -rile 65 -recessed 65 -eachother 65 -overshot 65 -mziwamadoda 65 -briskly 65 -strelkov 65 -dupri 65 -gerda 65 -impactful 65 -husband-and-wife 65 -bathtubs 65 -petted 65 -rebooted 65 -steinbrueck 65 -brightens 65 -hand-carved 65 -1mm 65 -fleury 65 -nainggolan 65 -18:25 65 -18:22 65 -dutroux 65 -non-suspicious 65 -edmonson 65 -explosives-laden 65 -19.50 65 -ui 65 -37.1 65 -lightbulbs 65 -stipp 65 -1849 65 -cranbrook 65 -yoghurts 65 -neilashton 65 -dublin-based 65 -huyton 65 -aumf 65 -50kg 65 -paskin 65 -hauntingly 65 -good-hearted 65 -overhearing 65 -tenderloin 65 -seif 65 -proliferate 65 -third-highest 65 -veasey 65 -17.2 65 -17.1 65 -pedometer 65 -suffern 65 -bookie 65 -tl 65 -firebomb 65 -iau 65 -colson 65 -weekley 65 -beanbag 65 -sedgefield 65 -three-under 65 -reveling 65 -6/1 65 -esaw 65 -aggregation 65 -iver 65 -tirana 65 -self-identified 65 -annul 65 -hilo 65 -musher 65 -marriot 65 -voynov 65 -forshaw 65 -raman 65 -creeper 65 -unrecognized 65 -berra 65 -multipurpose 65 -pregracke 65 -portability 65 -pandit 65 -pincus 65 -bamboozled 65 -zemin 65 -snook 65 -molnar 65 -greiner 65 -eliza-mae 65 -kieren 65 -rapt 65 -audis 65 -panettiere 65 -thunderbird 65 -funfair 65 -20-mile 65 -tunics 65 -improbably 65 -two-party 65 -38million 65 -green-fingered 65 -geriatric 65 -air-traffic 65 -unambiguously 65 -flatlining 65 -anthropological 65 -5-year 65 -madrid-based 65 -sputtering 65 -snuggles 64 -ii-era 64 -geeta 64 -faire 64 -pillsbury 64 -philosophically 64 -exalted 64 -celso 64 -harpham 64 -tapering 64 -gigawatts 64 -kirov 64 -l'express 64 -demonizing 64 -ideologues 64 -inhibitor 64 -q10 64 -rancor 64 -abrasion 64 -hogging 64 -ecclesiastical 64 -hard-boiled 64 -sigthorsson 64 -piazon 64 -on-track 64 -hdx 64 -busker 64 -batts 64 -jag 64 -jiminez 64 -elizabeths 64 -pilate 64 -bibs 64 -20.3 64 -20.4 64 -bommel 64 -thermos 64 -eton-educated 64 -chauffeurs 64 -isenberg 64 -conflagration 64 -hermosillo 64 -frothing 64 -war-time 64 -kogut 64 -breast-feed 64 -schubert 64 -wapping 64 -combats 64 -bergin 64 -ravalomanana 64 -livewire 64 -dumpsters 64 -shibuya 64 -mum-of-three 64 -silber 64 -oaths 64 -amsa 64 -lunchboxes 64 -titillating 64 -pippen 64 -foxtons 64 -deighton 64 -cocos 64 -writhes 64 -outscored 64 -straight-forward 64 -light-emitting 64 -gainer 64 -rishi 64 -2.65 64 -overdone 64 -14:58 64 -digitized 64 -foer 64 -dugan 64 -442 64 -foreign-owned 64 -up-and-down 64 -henneberry 64 -graying 64 -cyber-attacks 64 -cracknell 64 -fas 64 -misspelling 64 -centimeter 64 -hibernate 64 -jerking 64 -21.7 64 -pfannenstiel 64 -suntory 64 -fiestas 64 -hnk 64 -businesslike 64 -androids 64 -serwotka 64 -counter-attacks 64 -gravitated 64 -9.95 64 -arterton 64 -15:42 64 -quelled 64 -humperdinck 64 -modem 64 -telecommuting 64 -drugstores 64 -valance 64 -necessitated 64 -levada 64 -priyanka 64 -ambulatory 64 -knowl 64 -29.7 64 -laud 64 -bown 64 -diplodocus 64 -exuded 64 -bogeyman 64 -20-kilometer 64 -rowhani 64 -bremerton 64 -presidencies 64 -reincarnated 64 -15:29 64 -juggalos 64 -honshu 64 -much-vaunted 64 -frappuccino 64 -17ft 64 -teabags 64 -rhapsody 64 -well-versed 64 -shuler 64 -twit 64 -bemoans 64 -cressey 64 -semi-retired 64 -isd 64 -weis 64 -mh 64 -andalusia 64 -afrikaans 64 -frates 64 -samad 64 -swellings 64 -02:46 64 -wilcher 64 -sununu 64 -502 64 -anemone 64 -counter-intuitive 64 -instagrammed 64 -nondiscrimination 64 -two-fold 64 -mercian 64 -sceptic 64 -savernake 64 -kailash 64 -konstantinos 64 -reasserted 64 -invincibility 64 -infallible 64 -flamenco 64 -blackstock 64 -one-star 64 -julianna 64 -cedars 64 -830,000 64 -saint-etienne 64 -bioluminescence 64 -gti 64 -grapel 64 -near-constant 64 -ccgs 64 -morbidity 64 -omnium 64 -brighouse 64 -binging 64 -best-case 64 -adhesives 64 -self-reliant 64 -ajayi 64 -augustin 64 -nowinski 64 -quidditch 64 -21-16 64 -co-pilots 64 -haydon 64 -maitland-niles 64 -clasps 64 -croslin 64 -sheahan 64 -stalactites 64 -copter 64 -crewmembers 64 -16:36 64 -azadi 64 -empties 64 -annalise 64 -kanaan 64 -adams-kinard 64 -manipulates 64 -725 64 -taron 64 -andress 64 -tres 64 -masterstroke 64 -light-welterweight 64 -taider 64 -403 64 -brugger 64 -15mph 64 -sketchbook 64 -magnify 64 -pertwee 64 -skytrax 64 -marham 64 -usd 64 -gairsoppa 64 -cannoned 64 -fathi 64 -let-up 64 -rothstein 64 -hydrates 64 -norbury 64 -nafta 64 -reeked 64 -niguez 64 -chatwood 64 -fossey 64 -kdka 64 -heartstrings 64 -panellists 64 -dodgeon 64 -sydneysiders 64 -registries 64 -n/a 64 -hams 64 -amalgam 64 -crichton 64 -balletto 64 -miers 64 -impersonators 64 -qiang 64 -kickback 64 -zelich 64 -decontaminated 64 -snouts 64 -charlee 64 -yr 64 -maisey 64 -stier 64 -three-foot 64 -barely-there 64 -sprites 64 -vociferously 64 -qaboos 64 -civil-rights 64 -stover 64 -angelman 64 -indian-administered 64 -supergrass 64 -forney 64 -muesli 64 -hacienda 64 -sanliurfa 64 -linjia 64 -mattis 64 -cordons 64 -grupo 64 -rotations 64 -cro 64 -fairweather 64 -goncalo 64 -haymarket 64 -fledged 64 -7c 64 -bayonne 64 -gelhaus 64 -lawhorn 64 -crisscrossing 64 -godly 64 -corleone 64 -polyethylene 64 -wreg 64 -honk 64 -02:37 64 -mid-wales 64 -sweepstakes 64 -vaucluse 64 -beckoning 64 -sanskrit 64 -burress 64 -45m 64 -multimillion-pound 64 -koichi 64 -star-tribune 64 -erections 64 -825 64 -swatch 64 -4-4-1-1 64 -compressing 64 -sun-soaked 64 -oars 64 -ruffley 64 -dayan 64 -stanislaus 64 -molluscs 64 -kik 64 -mcindoe 64 -mahama 64 -mark-up 64 -tourniquets 64 -katzenberg 64 -kallakis 64 -2006-2007 64 -commerzbank 64 -previewing 64 -40lb 64 -singapore-based 64 -nomura 64 -20:57 64 -cockerell 64 -seahorse 64 -358 64 -five-under-par 64 -trashy 64 -16:29 64 -cold-hearted 64 -zigic 64 -ebbed 64 -nautilus 64 -ideologue 64 -quandt 64 -metrosexual 64 -exorcisms 64 -tohoku 64 -baures 64 -blinkered 64 -letitia 64 -mini-stroke 64 -falsehood 64 -erwiana 64 -cann 64 -medici 64 -ethylene 64 -344 64 -mayor-elect 64 -three-person 64 -marshland 64 -cowie 64 -whistle-blowing 64 -sexts 64 -18:23 64 -groovy 64 -aafia 64 -forecasted 64 -robison 64 -shaylee 64 -sirloin 64 -indian-american 64 -neustadt 64 -parachutists 64 -4p 64 -piranhas 64 -smes 64 -longhorn 64 -immunizations 64 -betterment 64 -anti-wrinkle 64 -trumped-up 64 -cale 64 -ring-fenced 64 -threading 64 -preciado 64 -fluorescence 64 -52million 64 -nikumaroro 64 -gloated 64 -hembree 64 -larva 64 -janitors 64 -industry-wide 64 -bair 64 -griswold 64 -hominid 64 -botching 64 -zealand-born 64 -15p 64 -wiper 64 -reworking 64 -est. 64 -unbuttoned 64 -anatolia 64 -matchsticks 64 -chemmy 64 -lommel 64 -hitchbot 64 -machismo 64 -morphology 64 -woodgate 64 -perversely 64 -comaneci 64 -reyhanli 64 -motioned 64 -compute 64 -humbug 64 -dirksen 64 -shaynak 64 -adjective 64 -dedham 64 -loring 64 -pressley 64 -stillbirths 64 -aeroscraft 64 -benadryl 64 -0-60 64 -reassessing 64 -pallister 64 -aline 64 -60billion 64 -avidly 64 -foss 64 -fortis 64 -01:46 64 -disturbs 64 -breadcrumbs 64 -inefficiencies 64 -peñaflorida 64 -meirion 64 -addicks 64 -missguided 64 -transplanting 64 -managua 64 -chatroom 64 -debrief 64 -flapped 64 -vicariously 64 -litigate 64 -leutner 64 -ill-treated 64 -half-baked 64 -casas 64 -six-second 64 -allayed 64 -blaz 64 -scribble 64 -grassi 64 -apace 63 -helipads 63 -18-hour 63 -rottweilers 63 -scorch 63 -mashru 63 -mazza 63 -caning 63 -pluralistic 63 -bruntrager 63 -nigerian-born 63 -engels 63 -shukrijumah 63 -cinched 63 -dordogne 63 -acceded 63 -421 63 -spectral 63 -blobs 63 -q7 63 -masi 63 -reston 63 -predictability 63 -kickers 63 -excellency 63 -groucho 63 -dietmar 63 -quinto 63 -bankruptcies 63 -impermissible 63 -hudner 63 -easterling 63 -loca 63 -nablus 63 -sanofi 63 -sheikhs 63 -marte 63 -lupe 63 -eberle 63 -r-iowa 63 -whimsy 63 -hendy 63 -hamman 63 -amigos 63 -kilbane 63 -darla 63 -scratch-off 63 -frankie-rose 63 -berkut 63 -wombats 63 -smurf 63 -leyla 63 -jacmel 63 -grotzinger 63 -nealon 63 -tatooine 63 -houston-based 63 -1.79 63 -bic 63 -ash-smith 63 -28.8 63 -proliferated 63 -barbuda 63 -post-gazette 63 -non-combat 63 -frow 63 -sturgis 63 -middling 63 -iconography 63 -brega 63 -subcontractors 63 -endearment 63 -hardcover 63 -souvannarath 63 -glossed 63 -unruffled 63 -festivus 63 -yoho 63 -septum 63 -eugenio 63 -53million 63 -merino 63 -gaiman 63 -lahr 63 -ex-pats 63 -bilderberg 63 -24.7 63 -20-inch 63 -firebox 63 -eich 63 -pillowcases 63 -stoops 63 -02:03 63 -engender 63 -treatise 63 -re-home 63 -bexsero 63 -overdrawn 63 -hopkinson 63 -zarzuela 63 -photojournalism 63 -clevenger 63 -tabatha 63 -distributions 63 -rakossi 63 -obelisk 63 -catch-22 63 -metalist 63 -fuelband 63 -cupped 63 -unimaginably 63 -grunts 63 -minehead 63 -freekick 63 -jaunts 63 -3.95 63 -nuon 63 -helston 63 -buell 63 -legislated 63 -fission 63 -plumstead 63 -02:44 63 -wahid 63 -525,000 63 -weariness 63 -twenty-something 63 -glickman 63 -protrude 63 -anti-violence 63 -pervades 63 -clews 63 -6-foot-4 63 -gmo 63 -pretenders 63 -privately-educated 63 -beachwear 63 -commercialism 63 -tachycardia 63 -bettering 63 -ani 63 -incites 63 -self-obsessed 63 -agreed-upon 63 -mendenhall 63 -three-legged 63 -hoolahan 63 -helplines 63 -first-innings 63 -rejections 63 -vollmer 63 -roden 63 -bonnaroo 63 -casteel 63 -montessori 63 -al-hakim 63 -mek 63 -wide-brimmed 63 -meowing 63 -erasure 63 -dayu 63 -atwell 63 -rocket-powered 63 -then-senator 63 -hath 63 -03:13 63 -bex 63 -spellings 63 -mowat 63 -mid-staffordshire 63 -libreville 63 -kala 63 -self-preservation 63 -99,000 63 -buetow 63 -ga. 63 -purview 63 -mistimed 63 -klizan 63 -cullum 63 -naz 63 -headpieces 63 -kololo 63 -14/08/2012 63 -hyun 63 -dawning 63 -rounders 63 -screeched 63 -ex-premier 63 -usk 63 -overhauls 63 -punchy 63 -conversing 63 -15-day 63 -mossberg 63 -inkings 63 -sunningdale 63 -smalley 63 -vandalizing 63 -manes 63 -wallflower 63 -obstructions 63 -non-discrimination 63 -1.24 63 -denigrating 63 -debrecen 63 -shawls 63 -coningsby 63 -chilpancingo 63 -mattison 63 -seo 63 -737s 63 -convalescent 63 -400th 63 -lokhova 63 -coniston 63 -contravening 63 -coughlan 63 -amoral 63 -mers-cov 63 -keza 63 -run-out 63 -katey 63 -mahil 63 -thumbing 63 -duplicates 63 -lenore 63 -copts 63 -pedy 63 -por 63 -jet-powered 63 -lalit 63 -shein 63 -liquorice 63 -unpack 63 -zinjibar 63 -21:01 63 -hold-ups 63 -pokémon 63 -shahada 63 -unquestioned 63 -sinfield 63 -colloquially 63 -mineral-rich 63 -okazaki 63 -recant 63 -melvyn 63 -8/1 63 -gaeta 63 -shantel 63 -nahas 63 -kost 63 -galle 63 -state-of-the 63 -exorcise 63 -unmask 63 -paleontology 63 -kerfuffle 63 -penetrates 63 -sabahy 63 -whereupon 63 -mondelez 63 -miniskirts 63 -showy 63 -suntan 63 -paintball 63 -stencils 63 -pre-packaged 63 -breakouts 63 -zhen 63 -455 63 -francisca 63 -hamstrings 63 -afghan-pakistan 63 -134,000 63 -fervour 63 -cost-saving 63 -chudley 63 -susteren 63 -hudson-smith 63 -reidy 63 -wooley 63 -ivs 63 -kovtun 63 -w2 63 -grace-and-favour 63 -willa 63 -dauphin 63 -mitting 63 -10mph 63 -cutthroat 63 -xhaka 63 -self-centered 63 -ozturk 63 -canelo 63 -piz 63 -preys 63 -pescara 63 -subtitle 63 -sambolin 63 -offs 63 -unionize 63 -city-wide 63 -organza 63 -looney 63 -sinmun 63 -vkontakte 63 -meekly 63 -charleigh 63 -mencap 63 -369 63 -globovision 63 -mastro 63 -hookup 63 -deep-lying 63 -lederhosen 63 -forty-three 63 -surly 63 -insigne 63 -go-around 63 -pre-determined 63 -lucile 63 -statoil 63 -poulson 63 -convinces 63 -soapbox 63 -19m 63 -yanira 63 -floridian 63 -20:31 63 -randomized 63 -insignificance 63 -rebutted 63 -schaeffer 63 -hirise 63 -azerbaijani 63 -alpert 63 -seko 63 -enrolment 63 -collapsible 63 -extractions 63 -zte 63 -laron 63 -paraiso 63 -malformed 63 -adebayo 63 -pare 63 -certifying 63 -malign 63 -sexiness 63 -ionosphere 63 -gooden 63 -dignify 63 -20:13 63 -garrick 63 -post-partum 63 -homed 63 -third-quarter 63 -neuralgia 63 -jesmond 63 -pettitte 63 -795 63 -straighter 63 -bavarians 63 -515 63 -pheonix 63 -facilitation 63 -nebulae 63 -moretz 63 -behrami 63 -d4 63 -alfano 63 -creigh 63 -decimate 63 -beet 63 -impartially 63 -dalzell 63 -pendants 63 -illinois-based 63 -winthrop 63 -kenzie 63 -acacia 63 -ketchum 63 -haut 63 -guesswork 63 -defenseman 63 -joffrey 63 -rigidly 63 -pump-action 63 -canoeist 63 -nevill 63 -hardliner 63 -foia 63 -krissy 63 -mansur 63 -180million 63 -choco 63 -halewood 63 -inexorably 63 -gypsum 63 -newberry 63 -c3 63 -1798 63 -rothley 63 -19:28 63 -650million 63 -proportionately 63 -guetta 63 -sunrises 63 -terrafugia 63 -texaco 63 -shvedova 63 -177,000 63 -papilloma 63 -rivalling 63 -befuddled 63 -warily 63 -zindzi 63 -monopolies 63 -abseiled 63 -means-tested 63 -thurber 63 -ibisevic 63 -raef 63 -squawk 63 -kennard 63 -heidfeld 63 -40-hour 63 -worshipper 63 -minto 63 -waley 63 -goofing 63 -deviations 63 -burqas 63 -point-to-point 63 -trite 63 -beutler 63 -idiopathic 63 -ilbo 63 -springwatch 63 -hoban 63 -shain 63 -coasted 63 -shazam 63 -fistfight 63 -beshear 63 -ikbal 63 -unpacking 63 -fashioning 63 -oncologists 63 -refraining 63 -entertainments 63 -itv4 63 -yukos 63 -greenford 63 -mawson 63 -indisputably 63 -collard 63 -groom-to-be 63 -27.4 63 -pro-assad 63 -caressing 63 -d-florida 63 -fortuno 63 -heymann 63 -queueing 63 -britta 62 -mcginnis 62 -sherchan 62 -phrasing 62 -five-member 62 -neurosurgeons 62 -roosegaarde 62 -fourth-quarter 62 -self-funded 62 -nueva 62 -pop-culture 62 -phenom 62 -correlations 62 -chiropractic 62 -correia 62 -apostrophes 62 -sary 62 -2007/08 62 -19.1 62 -minimums 62 -culliver 62 -smirnoff 62 -gameover 62 -impassively 62 -incentivise 62 -changeover 62 -seven-foot 62 -warrick 62 -beamond 62 -afro-caribbean 62 -tassels 62 -bottom-up 62 -federalism 62 -preponderance 62 -sayer 62 -skelter 62 -lada 62 -anti-semite 62 -low-slung 62 -novi 62 -insession 62 -tulum 62 -belford 62 -right-winger 62 -board-certified 62 -bartz 62 -rosenker 62 -naà 62 -final-round 62 -tabler 62 -broach 62 -arizona-based 62 -13-years-old 62 -https 62 -whirring 62 -ignazio 62 -sikorsky 62 -tartar 62 -mannered 62 -bigamist 62 -elina 62 -montfort 62 -foreshore 62 -zissman 62 -ashbourne 62 -winnall 62 -rihanoff 62 -pyre 62 -highly-trained 62 -abernethy 62 -caley 62 -orang-utan 62 -post-menopausal 62 -byword 62 -schroder 62 -lymington 62 -surefire 62 -voyeuristic 62 -deller 62 -5-month-old 62 -floggings 62 -h.r. 62 -discolouration 62 -aphrodite 62 -firebombing 62 -100-plus 62 -utters 62 -dot-com 62 -twinkie 62 -ocearch 62 -5live 62 -espousing 62 -sandlin 62 -agitators 62 -fireproof 62 -microsd 62 -wryly 62 -dinka 62 -alfresco 62 --7 62 -ips 62 -upstage 62 -tulare 62 -vacuous 62 -self-pity 62 -architecturally 62 -teatime 62 -10,800 62 -372 62 -periscope 62 -kearse 62 -westland 62 -well-lit 62 -targetting 62 -o'laughlin 62 -gutters 62 -483 62 -bassano 62 -blaenau 62 -wymott 62 -rottman 62 -hazem 62 -chretien 62 -20:22 62 -neruda 62 -unprocessed 62 -defecated 62 -look-alike 62 -15:04 62 -necklines 62 -bendable 62 -dacre 62 -156,000 62 -adorably 62 -laidback 62 -rioter 62 -workweek 62 -commercial-free 62 -yoann 62 -20:09 62 -axani 62 -holgate 62 -vause 62 -fifth-generation 62 -cogswell 62 -notifies 62 -andor 62 -slip-ups 62 -fayyad 62 -frostrup 62 -enslaving 62 -massaro 62 -moisturisers 62 -frontlines 62 -leyhill 62 -light-up 62 -sjs 62 -nagel 62 -mendocino 62 -self-catering 62 -peppering 62 -denial-of-service 62 -skewer 62 -zuhair 62 -inbetweeners 62 -sub-continent 62 -shae 62 -doll-like 62 -thwaites 62 -uptight 62 -perching 62 -bampton 62 -zeppelins 62 -harnden 62 -cognitively 62 -solvents 62 -12.40 62 -kw 62 -tort 62 -azim 62 -montolivo 62 -amalfi 62 -1787 62 -higher-ups 62 -isfahan 62 -herculaneum 62 -tarot 62 -brinkmann 62 -feghouli 62 -al-hasawi 62 -aries 62 -metaphorical 62 -siesta 62 -summerville 62 -knockoff 62 -rummage 62 -3.35 62 -gidley 62 -doering 62 -us-style 62 -aldgate 62 -somer 62 -uprooting 62 -belk 62 -solvency 62 -26,500 62 -enviably 62 -brainwaves 62 -secularist 62 -morison 62 -planter 62 -irritants 62 -triple-dip 62 -brugge 62 -motson 62 -six-man 62 -bhandari 62 -kolb 62 -riverview 62 -super-yachts 62 -seven-times 62 -basham 62 -cta 62 -1.44 62 -plumbed 62 -168,000 62 -86f 62 -396 62 -21:02 62 -martians 62 -junket 62 -girders 62 -sandhu 62 -tyner 62 -yakima 62 -nonplussed 62 -hazara 62 -downgrades 62 -u.s.-israeli 62 -bermudez 62 -darent 62 -478 62 -roughing 62 -ayvani 62 -mayley 62 -cagle 62 -30lbs 62 -eight-years-old 62 -foust 62 -willson 62 -moch 62 -despotic 62 -hassles 62 -sandstorms 62 -holtzclaw 62 -breslin 62 -02:38 62 -d'agostini 62 -rosier 62 -greenblatt 62 -tupperware 62 -cpj 62 -lovehoney 62 -chantel 62 -six-years-old 62 -paradoxical 62 -200km 62 -adiz 62 -flinching 62 -half-brothers 62 -piot 62 -jutland 62 -weinberger 62 -gerlach 62 -15:56 62 -garners 62 -pro-ukrainian 62 -loudoun 62 -misrepresentations 62 -leander 62 -tva 62 -z10 62 -sealand 62 -hand-delivered 62 -chilterns 62 -jericho 62 -oaps 62 -grosses 62 -second-rate 62 -adow 62 -espinal 62 -caffall 62 -cripps 62 -angelino 62 -cristy 62 -skyway 62 -indefatigable 62 -anaya 62 -abad 62 -oat 62 -blackmailer 62 -deeb 62 -enveloping 62 -desta 62 -glammed 62 -eisenstaedt 62 -hide-and-seek 62 -salamanders 62 -lawbreakers 62 -mccloskey 62 -inferences 62 -sclc 62 -loons 62 -macedo 62 -195,000 62 -ayda 62 -nestles 62 -orc 62 -a-10 62 -counterterror 62 -20:16 62 -jamboree 62 -car-maker 62 -winced 62 -fierro 62 -hairbrush 62 -leery 62 -speedskating 62 -surtees 62 -liberalization 62 -hermosa 62 -starke 62 -panmunjom 62 -horsey 62 -chesham 62 -henn 62 -ummah 62 -tsars 62 -segunda 62 -threadbare 62 -17.9 62 -lovestruck 62 -rizwan 62 -salespeople 62 -replenishing 62 -retouched 62 -bongs 62 -co-commentator 62 -crossbreed 62 -unquestionable 62 -parham 62 -456 62 -brandeis 62 -cooing 62 -falconry 62 -foreshadowed 62 -bielsa 62 -dybala 62 -well-groomed 62 -khadijah 62 -vasectomies 62 -arsal 62 -family-oriented 62 -mid-2013 62 -rothman 62 -cu 62 -sneezed 62 -pst 62 -tirades 62 -azawad 62 -spall 62 -mulvey 62 -iveta 62 -dance-off 62 -terrors 62 -turtleneck 62 -centauri 62 -bullhorn 62 -pitot 62 -chiba 62 -micro-organisms 62 -rason 62 -big-hearted 62 -dripped 62 -janowski 62 -betjeman 62 -hotpants 62 -coober 62 -borghese 62 -prefectures 62 -kile 62 -munley 62 -alban 62 -buckwild 62 -fieri 62 -roseann 62 -23ft 62 -hogarth 62 -pipping 62 -peirce 62 -schrier 62 -13lbs 62 -anti-mafia 62 -anecdotally 62 -maddow 62 -westbourne 62 -kaftan 62 -springville 62 -colley 62 -odubajo 62 -commode 62 -1.17 62 -single-use 62 -50-60 62 -diyarbakir 62 -berlinetta 62 -jokanovic 61 -coherence 61 -blasters 61 -rook 61 -hypnotherapist 61 -'30s 61 -carper 61 -eales 61 -vats 61 -televisa 61 -enema 61 -120ft 61 -heavy-lift 61 -dungeness 61 -sonnenberg 61 -back-line 61 -irregularly 61 -rintoul 61 -archbold 61 -shashi 61 -vasilyev 61 -koop 61 -jonny_singer 61 -assertiveness 61 -katrice 61 -bencic 61 -girdle 61 -suggestively 61 -kutner 61 -bearskin 61 -wyckoff 61 -energetically 61 -interlude 61 -multicoloured 61 -demel 61 -berenson 61 -reptilian 61 -seaview 61 -semiautonomous 61 -21m 61 -trickle-down 61 -bruyneel 61 -ringtones 61 -sammer 61 -cross-legged 61 -gadsden 61 -oxon 61 -seeley 61 -sleight 61 -letdown 61 -vfl 61 -appraiser 61 -avn 61 -hand-built 61 -goner 61 -lidar 61 -thereabouts 61 -chocolatier 61 -shoo 61 -luisana 61 -curti 61 -pressurise 61 -endocrinology 61 -magnetosphere 61 -abdul-jabbar 61 -riverbanks 61 -squinting 61 -esoteric 61 -1820s 61 -stachel 61 -chins 61 -loulou 61 -21.4 61 -laid-off 61 -trucked 61 -minimalism 61 -mimicry 61 -mottram 61 -wormhole 61 -bobbies 61 -daventry 61 -kletzky 61 -innards 61 -harvin 61 -ghosn 61 -305,000 61 -cheshunt 61 -re-signing 61 -iqs 61 -espn.com 61 -salle 61 -smokeless 61 -garbine 61 -20:45 61 -haphazardly 61 -head-butting 61 -demographer 61 -irrevocable 61 -panhandling 61 -15:28 61 -manon 61 -underwriter 61 -dairies 61 -pelts 61 -critiqued 61 -11-plus 61 -mohave 61 -sakharov 61 -rabu 61 -resourcefulness 61 -musacchio 61 -milena 61 -clutha 61 -barwell 61 -dalman 61 -riise 61 -pinup 61 -arte 61 -7-8 61 -20:26 61 -mf 61 -16:59 61 -16:57 61 -heglig 61 -hotton 61 -hennis 61 -southland 61 -rivet 61 -asil 61 -zeros 61 -kasandra 61 -burleigh 61 -antagonist 61 -cardiopulmonary 61 -xhosa 61 -spangled 61 -freel 61 -hilal 61 -radoslaw 61 -nabisco 61 -hillsboro 61 -gbowee 61 -1/3 61 -take-out 61 -pravda 61 -pejorative 61 -suiting 61 -tessier 61 -categorical 61 -slasher 61 -shuffles 61 -buy-back 61 -superglue 61 -khao 61 -broken-hearted 61 -maree 61 -tiffin 61 -union-tribune 61 -dorothea 61 -misappropriating 61 -absinthe 61 -purred 61 -call-in 61 -heyman 61 -kibera 61 -549 61 -halderman 61 -bellis 61 -laugher 61 -tursunov 61 -imitations 61 -mclellan 61 -condit 61 -newlove 61 -stabilisation 61 -insinuating 61 -potting 61 -addo 61 -eyebrow-raising 61 -candied 61 -aw13 61 -carport 61 -1780 61 -firming 61 -negotiable 61 -jorden 61 -granovskaia 61 -digitised 61 -odorless 61 -enablers 61 -customisation 61 -well-wishes 61 -gavroche 61 -fastball 61 -@craighope_dm 61 -5bn 61 -newsok 61 -nagar 61 -60km 61 -valdebebas 61 -mumia 61 -irkutsk 61 -polyps 61 -placings 61 -brittanee 61 -incrementally 61 -hortons 61 -worshiped 61 -gremlins 61 -chinchilla 61 -10-game 61 -2.85 61 -reconstituted 61 -low-quality 61 -coombes 61 -bade 61 -commissary 61 -super-earths 61 -ccs 61 -goof 61 -evacuee 61 -tonsillectomy 61 -echelon 61 -conant 61 -1995-96 61 -mattek-sands 61 -folha 61 -tanked 61 -thaddeus 61 -dharamsala 61 -1.28 61 -fawzi 61 -shinde 61 -22.8 61 -nars 61 -breech 61 -spurn 61 -roslyn 61 -ramesh 61 -130mph 61 -strycova 61 -child-rearing 61 -ilan 61 -nevaeh 61 -thandi 61 -front-end 61 -drug-smuggling 61 -vang 61 -miniskirt 61 -porcupines 61 -requiem 61 -solidifies 61 -1770 61 -12-strong 61 -bol 61 -mdna 61 -lfp 61 -janey 61 -guttmann 61 -merrily 61 -jacko 61 -spargo-mabbs 61 -low-pressure 61 -inter-services 61 -allsop 61 -fortresses 61 -crs 61 -four-shot 61 -cyberwarfare 61 -optimise 61 -four-mile 61 -popularize 61 -5,900 61 -chipmunks 61 -rupo 61 -obliging 61 -snarl 61 -subdural 61 -duvernay 61 -slouchy 61 -four-set 61 -01:39 61 -evergrande 61 -german-owned 61 -pickpocket 61 -putative 61 -sittings 61 -slither 61 -nir 61 -victorian-style 61 -10news 61 -2-month-old 61 -brenna 61 -batley 61 -mass-market 61 -klemm 61 -lumpectomy 61 -glanville 61 -cortisone 61 -seath 61 -bachman 61 -200kg 61 -thakur 61 -on-the-job 61 -under-inflated 61 -zahlavova 61 -single-parent 61 -state-based 61 -noye 61 -tarar 61 -simplification 61 -fantasized 61 -hof 61 -hoh 61 -racially-charged 61 -02:11 61 -a66 61 -disobey 61 -under-resourced 61 -zipline 61 -quvenzhané 61 -business-class 61 -proprietors 61 -jordans 61 -alexandru 61 -pallone 61 -showrunner 61 -demeans 61 -200ml 61 -raze 61 -pingers 61 -mcmurdo 61 -50,000-volt 61 -20:56 61 -ghd 61 -daylights 61 -edifice 61 -peacekeeper 61 -patios 61 -tvnz 61 -janner 61 -buttermilk 61 -avenida 61 -typeface 61 -renzo 61 -shearling 61 -ssris 61 -18:47 61 -15:39 61 -berardi 61 -vallejo 61 -tungsten 61 -15-man 61 -entanglement 61 -crowdsourced 61 -peruse 61 -offsets 61 -non-state 61 -kumsusan 61 -bhurji 61 -2001-02 61 -u.s.-cuba 61 -humpty 61 -escalante 61 -newsgathering 61 -agha-soltan 61 -roughness 61 -all-pro 61 -kuiper 61 -eg 61 -dinesh 61 -e-3 61 -commonality 61 -collies 61 -oswalt 61 -deliberative 61 -arsen 61 -yannis 61 -brava 61 -suffragette 61 -pro-reform 61 -eelam 61 -crucifixes 61 -passmore 61 -518 61 -stooge 61 -wyclef 61 -hakamada 61 -generalized 61 -remonstrate 61 -brooms 61 -mottled 61 -doner 61 -best-looking 61 -compressor 61 -side-to-side 61 -airworthiness 61 -fromme 61 -phrased 61 -upshaw 61 -500lb 61 -tummies 61 -perfectionism 61 -amicus 61 -luke_augustus29 61 -45billion 61 -nejame 61 -d.c.-based 61 -oka 61 -inauspicious 61 -blackberrys 61 -acrid 61 -standford 61 -botton 61 -everdeen 61 -bicentenary 61 -extraditing 61 -flightaware 61 -nastiest 61 -party-goer 61 -1.13 61 -cardiologists 61 -funnily 61 -exteriors 61 -courthouses 61 -6kg 61 -hatchlings 61 -1793 61 -yuen 61 -papi 61 -coffs 61 -unapologetically 61 -longed-for 61 -sulzberger 61 -kernels 61 -koat 61 -commending 61 -outspent 61 -tasker 61 -shoulder-to-shoulder 61 -diffused 61 -minty 61 -bonilla 61 -participatory 61 -56million 61 -merlot 61 -mid-nineties 61 -pontius 61 -toronado 61 -deptford 61 -reines 61 -symbolised 61 -twentynine 61 -romaine 61 -jpn 61 -harmonica 61 -ukrinform 61 -rajendra 61 -gondolas 61 -mcaleese 61 -sensationalism 61 -.8 61 -bankrupted 61 -christmas-themed 61 -27.7 61 -ilkeston 61 -amble 61 -khakis 61 -multitask 61 -birkbeck 61 -lillo 61 -expandable 61 -stoicism 60 -munson 60 -exhorted 60 -monocytogenes 60 -relapses 60 -no-contact 60 -soundoff 60 -classless 60 -skylights 60 -pigtails 60 -vasco 60 -dmi 60 -cavett 60 -rett 60 -lackadaisical 60 -teodoro 60 -reprised 60 -workmate 60 -dopey 60 -achy 60 -akil 60 -dorfman 60 -tipperary 60 -shallower 60 -bergstrom 60 -reimagined 60 -hayson 60 -homewood 60 -zingers 60 -ebbsfleet 60 -freeborn 60 -hackles 60 -digne 60 -raisa 60 -bourton 60 -balbi 60 -spaccia 60 -kolar 60 -glebe 60 -abaaoud 60 -cum 60 -meaden 60 -sunlit 60 -arabiya 60 -adrianne 60 -hubbub 60 -portage 60 -binks 60 -guardado 60 -captioning 60 -isabela 60 -internet-enabled 60 -championship-winning 60 -ex-chelsea 60 -higher-rate 60 -fetishes 60 -skinheads 60 -8:20 60 -roehampton 60 -mig 60 -rosekind 60 -isakson 60 -trapaga 60 -khutor 60 -fifteenth 60 -haring 60 -unenforceable 60 -ogwyn 60 -marginalize 60 -one-bed 60 -fuzhou 60 -dongle 60 -electioneering 60 -mahli 60 -ponchaud 60 -southside 60 -441 60 -al-saud 60 -shankman 60 -jihadism 60 -fascinates 60 -ex-new 60 -underpinnings 60 -giger 60 -16:13 60 -szymanski 60 -pillion 60 -110m 60 -umberto 60 -immunotherapy 60 -toppers 60 -wcco 60 -istomin 60 -665 60 -eight-game 60 -calculators 60 -sager 60 -cordesman 60 -trainspotting 60 -valli 60 -29.4 60 -16:33 60 -hoodwinked 60 -self-governing 60 -poppers 60 -eatocracy 60 -dyczynski 60 -chibnall 60 -skewers 60 -wrong-footed 60 -ruppersberger 60 -revisits 60 -ricin-laced 60 -astrologer 60 -tolbert 60 -csizsik-csatary 60 -el-mahroug 60 -pranked 60 -gledhill 60 -kelleher 60 -niang 60 -nine-minute 60 -16:27 60 -allard 60 -m2 60 -no-fee 60 -lankov 60 -brand-name 60 -omega-3s 60 -rabinowitz 60 -sofie 60 -lionheart 60 -15:06 60 -charliesale 60 -million-strong 60 -brielle 60 -burrowbridge 60 -'50 60 -ahem 60 -matter-of-fact 60 -re-posted 60 -yevgeny 60 -linemen 60 -vcjd 60 -teagan 60 -over-run 60 -mnn 60 -counter-insurgency 60 -men-only 60 -40per 60 -oilfield 60 -cloaking 60 -oriol 60 -atvs 60 -melodramatic 60 -trumping 60 -stonehaven 60 -cloakroom 60 -adaptability 60 -leavitt 60 -flue 60 -high-impact 60 -outnumbering 60 -stents 60 -overshadows 60 -maksim 60 -krakauer 60 -handprints 60 -luan 60 -azov 60 -zabul 60 -cabbages 60 -40-mile 60 -coronel 60 -lightness 60 -quade 60 -wickford 60 -mckellar 60 -headlight 60 -amado 60 -judson 60 -schuette 60 -970 60 -verviers 60 -lipton 60 -bergen-belsen 60 -infantrymen 60 -ironclad 60 -downham 60 -loris 60 -emad 60 -pre-nuptial 60 -stauss 60 -boars 60 -dalliance 60 -rajaratnam 60 -nava 60 -bolanos 60 -ruetten 60 -macias 60 -hades 60 -federica 60 -shanghai-based 60 -holzer 60 -liquors 60 -implores 60 -deansgate 60 -subdivisions 60 -aids-related 60 -lutfur 60 -eurocopter 60 -mirchandani 60 -nokes 60 -septuagenarian 60 -waterpark 60 -freighters 60 -glancy 60 -farhan 60 -symbiotic 60 -symbolising 60 -maritza 60 -pradeep 60 -child-bearing 60 -brainpower 60 -dynastic 60 -remembrances 60 -12-point 60 -purveyor 60 -paseo 60 -kleiner 60 -inarritu 60 -ryn 60 -erasmus 60 -lodi 60 -bergdorf 60 -cronuts 60 -fortunato 60 -warria 60 -omran 60 -3:20 60 -22.2 60 -reinvestment 60 -rewiring 60 -applicator 60 -doze 60 -auvergne 60 -worktops 60 -freelancers 60 -14.9 60 -unaccustomed 60 -ramirez-cruz 60 -cerebellum 60 -lajeunesse 60 -husted 60 -renminbi 60 -walk-through 60 -restaurateurs 60 -limos 60 -hatley 60 -harun 60 -abseil 60 -fantasised 60 -1.47 60 -lancelot 60 -16:41 60 -bodymoor 60 -vibrators 60 -training-ground 60 -bakar 60 -vied 60 -cpap 60 -391 60 -tuysuz 60 -six-story 60 -43.5 60 -cernan 60 -indemnity 60 -mislabeled 60 -westlife 60 -clapp 60 -milks 60 -6.18 60 -returnees 60 -morne 60 -proenca 60 -haywards 60 -reconfigured 60 -non-starter 60 -ascribed 60 -yerger 60 -encino 60 -usga 60 -senor 60 -ill-feeling 60 -5.7-inch 60 -wilsons 60 -cortland 60 -futerman 60 -archaeopteryx 60 -scarpa 60 -unamid 60 -bulldozing 60 -kristof 60 -e7 60 -massacring 60 -hahaha 60 -12-member 60 -humbert 60 -juma 60 -conscripts 60 -politicize 60 -papademos 60 -leichhardt 60 -martinique 60 -starks 60 -achondroplasia 60 -lusk 60 -guilford 60 -wild-card 60 -louw 60 -berisha 60 -nonu 60 -sjogren 60 -kawashima 60 -schwimmer 60 -repudiated 60 -fairbank 60 -quill 60 -bridgnorth 60 -vitamix 60 -seahorses 60 -prods 60 -vonderrit 60 -iribe 60 -stringfellow 60 -non-english 60 -stews 60 -hoarau 60 -ex-army 60 -20:53 60 -skateboarders 60 -dribbled 60 -voltaire 60 -herero 60 -csu 60 -asante 60 -ashkar 60 -sepulveda 60 -battery-operated 60 -triantafilo 60 -by-products 60 -letterhead 60 -stony-faced 60 -merz 60 -amphipolis 60 -déjà 60 -bonnets 60 -reprocessing 60 -panayiotou 60 -wildwood 60 -dais 60 -constrict 60 -16:44 60 -espy 60 -royalists 60 -centre-halves 60 -rua 60 -relearn 60 -01:58 60 -sellotape 60 -frankfort 60 -evernote 60 -refurbishments 60 -suter 60 -lipped 60 -submariners 60 -parents-to-be 60 -20:14 60 -jalapeno 60 -edgier 60 -mutassim 60 -hurriyet 60 -hartfield 60 -chabot 60 -chávez 60 -reissue 60 -strettle 60 -2.55 60 -beefy 60 -jesper 60 -aksel 60 -molokai 60 -brooksbank 60 -t5 60 -artis 60 -righting 60 -helleson 60 -goths 60 -fillies 60 -startle 60 -shoulder-fired 60 -25st 60 -ultra-low 60 -navarro-canales 60 -podmore 60 -berliners 60 -ighalo 60 -antolin 60 -21ft 60 -styers 60 -randomness 60 -1790 60 -9,300 60 -tabriz 60 -x-files 60 -bessie 60 -clairvoyant 60 -ponzo 60 -isha 60 -quick-witted 60 -depression-era 60 -slathered 60 -albu 60 -podcasts 60 -m20 60 -dept 60 -mauve 60 -alcorn 60 -phylicia 60 -kimmerle 60 -lowes 60 -incredulously 60 -decimating 60 -verdun 60 -01:48 60 -sanctis 60 -dutta 60 -mela 60 -wah 60 -mutilations 60 -drunk-driving 60 -walkover 60 -go-karting 60 -monochromatic 60 -co-sponsor 60 -three-test 60 -low-ranking 60 -bridged 60 -sciatica 60 -high-achieving 60 -fakery 59 -digitalglobe 59 -ihsan 59 -appeasing 59 -buono 59 -soleimani 59 -salina 59 -1.38 59 -svenson 59 -427 59 -computer-controlled 59 -brommel 59 -peterhead 59 -piston 59 -incisors 59 -tu-95 59 -lamond 59 -decries 59 -militaristic 59 -butternut 59 -19.8 59 -19.9 59 -millicent 59 -mazatlan 59 -neutering 59 -1min 59 -stelios 59 -vindicates 59 -submariner 59 -deduce 59 -tranquiliser 59 -stimulator 59 -hellman 59 -lombardy 59 -summonsed 59 -dlamini 59 -pnd 59 -ena 59 -blithely 59 -paquin 59 -qusair 59 -maryann 59 -bergerac 59 -ravishing 59 -lighterlife 59 -sparky 59 -policyholders 59 -huybrechts 59 -dressers 59 -great-great-grandfather 59 -catalogs 59 -beowulf 59 -quijano 59 -re-build 59 -ramdev 59 -cooperatives 59 -refaeli 59 -pickler 59 -abdalla 59 -pureed 59 -re-ignited 59 -koin 59 -pinder 59 -wilcock 59 -limps 59 -legumes 59 -kavanaugh 59 -632 59 -lingzi 59 -near-infrared 59 -signer 59 -css 59 -imperialist 59 -fri 59 -eagan 59 -nehru 59 -realtime 59 -arash 59 -chuang 59 -randal 59 -meads 59 -acord 59 -fitchburg 59 -time-out 59 -cushioning 59 -el-keib 59 -shabab 59 -3 59 -hollowed-out 59 -otamendi 59 -half-siblings 59 -marchionne 59 -delors 59 -r-michigan 59 -wholegrain 59 -well-designed 59 -u-turns 59 -0800 555111 59 -london-bound 59 -blood-curdling 59 -collating 59 -recoiled 59 -predation 59 -rosolie 59 -miming 59 -21.6 59 -berenice 59 -02:01 59 -schar 59 -swanston 59 -flasks 59 -stablemate 59 -bitumen 59 -escrow 59 -aromatherapy 59 -womanizing 59 -hollywood-style 59 -soria 59 -blood-splattered 59 -animatronic 59 -anscombe 59 -record-extending 59 -taos 59 -resta 59 -zaheer 59 -goodband 59 -tridevil 59 -jacek 59 -740,000 59 -d-west 59 -hypoxia 59 -chartering 59 -bhatia 59 -486 59 -kalimantan 59 -farrer 59 -doan 59 -willamette 59 -full-frontal 59 -inciweb 59 -purser 59 -20:27 59 -long-run 59 -ammann 59 -352 59 -rafiki 59 -inducements 59 -uswitch 59 -pda 59 -unemotional 59 -whittemore 59 -mcstays 59 -sprawls 59 -dinghies 59 -platypus 59 -mown 59 -foresees 59 -stoneman 59 -uninitiated 59 -de-facto 59 -cherishes 59 -epitaph 59 -palmor 59 -evi 59 -yousif 59 -stoughton 59 -intrauterine 59 -mamdouh 59 -adelie 59 -kwh 59 -melodic 59 -idahosa 59 -broun 59 -inhabitant 59 -mccroskey 59 -schuylkill 59 -linzi 59 -rcm 59 -outmoded 59 -turntable 59 -warding 59 -inflection 59 -machen 59 -luckey 59 -malinga 59 -deconstructed 59 -inattention 59 -biltmore 59 -canucks 59 -byram 59 -prowled 59 -boorish 59 -chastising 59 -interbred 59 -virender 59 -caithness 59 -cueto 59 -scrimmage 59 -lurie 59 -deriding 59 -800th 59 -wide-reaching 59 -pistachios 59 -100-day 59 -xenon 59 -13-minute 59 -top-of-the-line 59 -reo 59 -cormorant 59 -bryer 59 -mclellands 59 -calderdale 59 -state-issued 59 -iranian-backed 59 -albiol 59 -helmsley 59 -genomic 59 -fiddler 59 -abdicating 59 -lindner 59 -meninga 59 -shrouds 59 -bodyism 59 -stitch-up 59 -becket 59 -dementieva 59 -self-righteous 59 -shoals 59 -cindi 59 -laughlin 59 -farrugia 59 -varoufakis 59 -113,000 59 -airbrush 59 -nine-week 59 -seventh-day 59 -mother-of 59 -duracell 59 -attias 59 -hawarden 59 -koralewski 59 -calico 59 -taiga 59 -teesdale 59 -crowne 59 -hypoallergenic 59 -disinformation 59 -1,000-a-night 59 -wanamaker 59 -437 59 -434 59 -o-levels 59 -utensil 59 -urbana 59 -maciej 59 -nme 59 -vestiges 59 -samour 59 -iu 59 -monbeg 59 -self-assured 59 -mendip 59 -eco-home 59 -debt-ceiling 59 -kain 59 -ashlyn 59 -wolinski 59 -nine-years-old 59 -telephoto 59 -broody 59 -bejeweled 59 -squishy 59 -naughtie 59 -pasalic 59 -coders 59 -hoody 59 -denotes 59 -stadler 59 -25-man 59 -745 59 -n'zonzi 59 -joneses 59 -skippers 59 -confederates 59 -linz 59 -mcroberts 59 -pineapples 59 -isna 59 -high-flyers 59 -vali 59 -quashing 59 -crocked 59 -whitefield 59 -linkage 59 -geologically 59 -perfunctory 59 ------ 59 -geisinger 59 -gansler 59 -kays 59 -pullback 59 -bionics 59 -helium-filled 59 -ohanian 59 -9.0-magnitude 59 -h_mackay 59 -helter 59 -al-khelaifi 59 -three-pointer 59 -galavis 59 -sabet 59 -risers 59 -owings 59 -thorntons 59 -aipac 59 -borja 59 -crowd-pleasing 59 -sumter 59 -birkett 59 -bian 59 -mamie 59 -precludes 59 -michala 59 -abramoff 59 -recreations 59 -re-examination 59 -cashew 59 -wanchope 59 -moisturise 59 -janesville 59 -depleting 59 -sharpshooter 59 -forty-two 59 -adekoya 59 -hispaniola 59 -slackline 59 -trig 59 -astakhov 59 -diclofenac 59 -deshawn 59 -graduations 59 -minas 59 -torsten 59 -panelled 59 -bagshot 59 -crespi 59 -yevhen 59 -oppressors 59 -thame 59 -noncompliance 59 -flopping 59 -exempts 59 -moisturizer 59 -verheijen 59 -10-second 59 -20:59 59 -priestess 59 -inboxes 59 -fidyka 59 -ketones 59 -16:24 59 -inglot 59 -double-murder 59 -alom 59 -pilling 59 -alpacas 59 -bylaws 59 -chancellery 59 -parwan 59 -bohol 59 -urticaria 59 -cahuzac 59 -64-bit 59 -mundine 59 -crowd-sourced 59 -4/20 59 -alfaro 59 -gunnery 59 -brannon 59 -colonoscopies 59 -commendations 59 -scarpetta 59 -luangwa 59 -abadi 59 -puffer 59 -kev 59 -lismore 59 -tolman 59 -bdo 59 -nicolaus 59 -bone-chilling 59 -myfox 59 -theatrically 59 -wplg 59 -slevin 59 -waffen 59 -pinkman 59 -noyce 59 -elkin 59 -tollcross 59 -peruvians 59 -coady 59 -tammie 59 -highbrow 59 -narrow-minded 59 -cognizant 59 -zappos 59 -resetting 59 -pro-immigration 59 -banafsha 59 -morsels 59 -12-14 59 -truest 59 -calypso 59 -vexed 59 -shuddering 59 -wintertime 59 -retallick 59 -prestbury 59 -p90x 59 -malians 59 -whitson 59 -gloat 59 -1750 59 -manney 59 -vedova 59 -charterhouse 59 -kuol 59 -bagan 59 -enya 59 -kaliningrad 59 -standley 59 -iag 59 -ever-more 59 -peterhansel 59 -tighar 59 -crawlies 59 -misjudgment 59 -harrah 59 -ether 59 -03:07 59 -03:03 59 -35mm 59 -owusu 59 -obiang 59 -105million 59 -adults-only 59 -earthen 59 -mid-50s 59 -barksdale 59 -pre-dates 59 -savar 59 -winner-take-all 59 -reichstag 59 -kirobo 59 -schams 59 -pummelled 59 -damas 59 -sumarti 59 -grubs 59 -non-cancerous 59 -expansions 59 -burchill 59 -tear-jerking 59 -elmbridge 59 -bitchy 59 -anencephaly 59 -bassil 59 -arizonans 59 -revitalization 59 -al-kassasbeh 59 -amfar 59 -leibowitz 59 -rotational 59 -jordaan 59 -hina 59 -cringed 59 -rics 59 -synapses 59 -softest 59 -mountford 59 -chealander 59 -ellis-bextor 59 -bust-ups 59 -roams 59 -msu 59 -32ft 59 -slacker 59 -carlina 59 -multicolored 59 -ramage 59 -impound 59 -ebola-affected 59 -porthleven 59 -perpetuity 59 -wincing 59 -thorney 59 -blue-and-white 59 -assimilated 58 -keatings 58 -atia 58 -chopin 58 -meakin 58 -fatherless 58 -mid-1950s 58 -suds 58 -fakih 58 -marne 58 -14:37 58 -plaything 58 -anti-poaching 58 -camino 58 -center-back 58 -myriam 58 -vieques 58 -palates 58 -schemer 58 -sunfish 58 -moormann 58 -9ins 58 -administratively 58 -fixed-term 58 -gallegos 58 -massara 58 -drawbridge 58 -geneva-based 58 -aleksandra 58 -gallops 58 -specially-made 58 -bialek 58 -chestnuts 58 -vicarious 58 -denbigh 58 -legionella 58 -goalline 58 -aegypti 58 -moda 58 -zero-sum 58 -immunised 58 -chanda 58 -dauntless 58 -yavapai 58 -ten-fold 58 -weirder 58 -one-page 58 -cleats 58 -respirators 58 -dalla 58 -mikkel 58 -guzzling 58 -maathai 58 -cranks 58 -far-off 58 -thresher 58 -straights 58 -462 58 -lomond 58 -buzzword 58 -16-24 58 -leaderless 58 -moto2 58 -tremble 58 -roomba 58 -superstardom 58 -pitchside 58 -spliced 58 -sola 58 -swe 58 -impairs 58 -henriksen 58 -c-4 58 -tanni 58 -edoardo 58 -roose 58 -certifications 58 -mouthfuls 58 -baraa 58 -ellbretland 58 -gartside 58 -stephenville 58 -443 58 -423 58 -denunciation 58 -apaches 58 -whishaw 58 -charly 58 -viner 58 -weavers 58 -110mph 58 -loudon 58 -!!!!!! 58 -chf 58 -lustrous 58 -high-flyer 58 -megumi 58 -fulcher 58 -peachy 58 -covina 58 -blatz 58 -cornerstones 58 -26.2-mile 58 -15:44 58 -15:46 58 -iihs 58 -fogg 58 -hedge-fund 58 -biopharmaceutical 58 -aftercare 58 -nonverbal 58 -909090 58 -paulie 58 -dispenses 58 -raincoats 58 -champaign 58 -recon 58 -visionaries 58 -fashion-conscious 58 -unattached 58 -day-by-day 58 -asylums 58 -20:47 58 -sure-fire 58 -16:35 58 -16:34 58 -week-old 58 -378 58 -stupak 58 -taper 58 -enteroviruses 58 -wrona 58 -trailblazers 58 -bosman 58 -dress-up 58 -ania 58 -half-ton 58 -3407 58 -poshest 58 -on-scene 58 -iso 58 -30kg 58 -beano 58 -volcanism 58 -drop-out 58 -crossbench 58 -pickford 58 -mak 58 -canandaigua 58 -ammon 58 -polymers 58 -mincemeat 58 -molton 58 -uncontacted 58 -perseid 58 -bah 58 -risa 58 -incontrovertible 58 -majuro 58 -leedy 58 -klinger 58 -bullseye 58 -ustinov 58 -unaltered 58 -canons 58 -squib 58 -penance 58 -well-oiled 58 -socotra 58 -r2-d2 58 -nafeek 58 -araguz 58 -mournful 58 -ney 58 -lovelorn 58 -527 58 -webbing 58 -chevening 58 -frameworks 58 -sequential 58 -appraisals 58 -stampa 58 -comme 58 -paraffin 58 -riddler 58 -medan 58 -blotches 58 -nowicki 58 -pantries 58 -howland 58 -dimas 58 -ebola-like 58 -arbuthnot 58 -haslet-davis 58 -softbank 58 -oxygenated 58 -inspector-general 58 -jiro 58 -joost 58 -whipsnade 58 -estyn 58 -shirin 58 -sentimentality 58 -konna 58 -austro-hungarian 58 -repositioning 58 -ronni 58 -malaya 58 -multistate 58 -hegarty 58 -inaccuracy 58 -enola 58 -phew 58 -one-armed 58 -secondment 58 -mantises 58 -quorn 58 -mary-kate 58 -waterstone 58 -hyun-ah 58 -budden 58 -unifil 58 -remotest 58 -aint 58 -tamarin 58 -300lbs 58 -whittier 58 -holmby 58 -qur 58 -barthel 58 -lucifer 58 -motm 58 -roaccutane 58 -bushby 58 -recesses 58 -syco 58 -trapani 58 -tethering 58 -toler 58 -turbocharged 58 -raad 58 -red-headed 58 -shumlin 58 -3-inch 58 -guilt-free 58 -razgrad 58 -irena 58 -cobblestones 58 -devore 58 -8,600 58 -disinfection 58 -adamu 58 -mariella 58 -officer-involved 58 -meath 58 -4-5-1 58 -vaping 58 -leopoldo 58 -beliebers 58 -montmartre 58 -ascends 58 -confectionary 58 -musket 58 -kayongo 58 -bristol-based 58 -transcontinental 58 -chauncey 58 -tipples 58 -incorporation 58 -colonized 58 -414 58 -416 58 -kristopher 58 -low-dose 58 -throwers 58 -86m 58 -off-peak 58 -sobchak 58 -splintering 58 -formanek 58 -bacile 58 -defensible 58 -westernised 58 -nps 58 -rupp 58 -kelleys 58 -isherwood 58 -220million 58 -razia 58 -amon 58 -ketone 58 -duncroft 58 -hier 58 -webbed 58 -crowson 58 -darcis 58 -ldp 58 -nibbled 58 -chummy 58 -bischoff 58 -777-200er 58 -barracuda 58 -exclusionary 58 -granules 58 -gossard 58 -elouise 58 -douglin 58 -clinique 58 -batiste 58 -lympne 58 -fitzherbert 58 -charlestown 58 -elana 58 -6/10 58 -siebert 58 -kasprzak 58 -hakeem 58 -shani 58 -:30 58 -basquiat 58 ------- 58 -grimly 58 -wranglers 58 -pti 58 -rationalize 58 -dumpty 58 -face-saving 58 -fiber-optic 58 -motability 58 -vichy 58 -pigmentosa 58 -specially-adapted 58 -oce 58 -left-winger 58 -parasailing 58 -tv2 58 -obasanjo 58 -ohioans 58 -coronial 58 -tass 58 -coombe 58 -baden-powell 58 -singalong 58 -20:54 58 -budgie 58 -anthony_hay 58 -hiv-infected 58 -mckechnie 58 -clear-eyed 58 -curacao 58 -unwrapping 58 -362 58 -124,000 58 -tiling 58 -electro 58 -sledges 58 -yeardley 58 -leashes 58 -amberley 58 -barbaro 58 -role-play 58 -2027 58 -doukara 58 -canonized 58 -geopolitics 58 -ariz. 58 -flat-pack 58 -bream 58 -sheard 58 -twitchy 58 -two-horse 58 -betsey 58 -fail-safe 58 -noemi 58 -donal 58 -dunks 58 -cone-shaped 58 -khatoon 58 -glenfield 58 -made-for-tv 58 -217mph 58 -bolingbrook 58 -j' 58 -keshia 58 -ides 58 -nawaf 58 -prosaic 58 -timberland 58 -tosic 58 -platelet 58 -staterooms 58 -re-offend 58 -1843 58 -tgi 58 -double-check 58 -410,000 58 -glasman 58 -two-faced 58 -20:11 58 -14:44 58 -breakups 58 -garraway 58 -horrendously 58 -die-in 58 -vydra 58 -mcvie 58 -100,000-a-year 58 -light-coloured 58 -fifth-grader 58 -achingly 58 -teng 58 -bannockburn 58 -c-word 58 -2018-19 58 -restock 58 -streaky 58 -gloating 58 -second-string 58 -second-guessing 58 -singaporeans 58 -saucedo 58 -admonition 58 -inverclyde 58 -al-habashi 58 -nonhuman 58 -pasternak 58 -17:20 58 -capsicum 58 -tenney 58 -deland 58 -triptych 58 -half-blood 58 -bertarelli 58 -guenther 58 -over-eating 58 -bridport 58 -comp 58 -tisch 58 -refueled 58 -careening 58 -leatherback 58 -bordier 58 -morgenstein 58 -fast-rising 58 -disobedient 58 -blanked 58 -ambushing 58 -36.5 58 -cortical 58 -hooping 58 -feedings 58 -harvard-smithsonian 58 -jesuits 58 -sudeikis 58 -toe-curling 58 -buress 58 -palins 58 -multi-faith 58 -ck 58 -dillingham 58 -araujo 58 -asiatic 58 -.44 58 -ably 58 -satanists 58 -ssc 58 -ajmol 58 -lansdowne 58 -single-day 58 -westhauser 58 -seeman 58 -bafta-winning 58 -norgay 58 -assyrians 58 -antelopes 58 -constructor 58 -wrotham 58 -well-founded 58 -oxygenation 58 -conrado 58 -rosina 58 -uk-born 58 -fallis 58 -twinge 58 -al-adel 58 -grasso 58 -hate-crime 58 -foibles 58 -take-down 58 -sedlacek 58 -deceleration 58 -millett 58 -stice 58 -illustrators 57 -oldies 57 -simonson 57 -papoulias 57 -teetered 57 -bankhead 57 -b-team 57 -prerecorded 57 -archangel 57 -sportscar 57 -klosters 57 -paroles 57 -melua 57 -denizens 57 -zhuo 57 -sangary 57 -brynne 57 -emanuella 57 -well-developed 57 -four-seater 57 -1020 57 -sodini 57 -1.32 57 -runaround 57 -ozarks 57 -qsymia 57 -yeats 57 -scandal-plagued 57 -well-adjusted 57 -381 57 -reverts 57 -wrana 57 -chatrier 57 -orde 57 -clawson 57 -koon 57 -gers 57 -marksandspencer.com 57 -horsewoman 57 -cutback 57 -one-fourth 57 -midget 57 -roldan 57 -cagayan 57 -amplifies 57 -sphynx 57 -battle-scarred 57 -reclines 57 -155mph 57 -unfurling 57 -755 57 -fraying 57 -kal 57 -mendelsohn 57 -fumbles 57 -ahmedzay 57 -rutting 57 -monotony 57 -miran 57 -dragoon 57 -belles 57 -dimitris 57 -bayne 57 -ktvi 57 -bir 57 -28.6 57 -bartels 57 -sangster 57 -banality 57 -franchisee 57 -faughey 57 -consign 57 -minefields 57 -33/1 57 -banqueting 57 -follow-on 57 -plaice 57 -yalta 57 -fifty-five 57 -clackmannanshire 57 -east-southeast 57 -superlative 57 -corinth 57 -rom-com 57 -fathers4justice 57 -bekele 57 -tommie 57 -handkerchiefs 57 -outperforming 57 -wined 57 -madea 57 -consett 57 -crickmore 57 -chinn 57 -24.3 57 -suzhou 57 -abenomics 57 -blauser 57 -02:05 57 -wasatch 57 -harpers 57 -colic 57 -gazelles 57 -stewed 57 -2033 57 -jahangir 57 -jisr 57 -15:41 57 -photocopy 57 -brodkin 57 -nicolae 57 -continuance 57 -long-lived 57 -clubber 57 -vaccaro 57 -bonser 57 -lap-band 57 -showmanship 57 -almaty 57 -treads 57 -backstop 57 -chrisley 57 -heins 57 -inflatables 57 -al-qassam 57 -yuki 57 -varney 57 -ahmadzai 57 -weekender 57 -coupland 57 -untried 57 -40cm 57 -gause 57 -stressed-out 57 -yepes 57 -isadore 57 -chocolat 57 -self-propelled 57 -self-fulfilling 57 -weise 57 -lunching 57 -pronouns 57 -peace-loving 57 -stopgap 57 -playgroup 57 -wolsey 57 -ferro 57 -tarred 57 -hedren 57 -drugeon 57 -geyer 57 -diktat 57 -lamas 57 -doormat 57 -pistes 57 -connah 57 -gritted 57 -septa 57 -tinderbox 57 -retching 57 -particulates 57 -teleportation 57 -mejias 57 -jocks 57 -portcullis 57 -kyw 57 -ledford 57 -dialogues 57 -ghoncheh 57 -re-united 57 -unvarnished 57 -eurosport 57 -karie 57 -sophos 57 -raitt 57 -reacher 57 -materiel 57 -dynamically 57 -discoverer 57 -bacillus 57 -svr 57 -6-foot-2 57 -162,000 57 -bakken 57 -dnipropetrovsk 57 -toothpick 57 -ayton 57 -disciplinarian 57 -efford 57 -grist 57 -remiss 57 -minaret 57 -mystifying 57 -co-opted 57 -03:10 57 -hypothesized 57 -musculoskeletal 57 -azam 57 -serrato 57 -deplete 57 -great-aunt 57 -westside 57 -commandment 57 -prostrate 57 -mutineers 57 -wring 57 -mail-order 57 -jurisdictional 57 -gaviria 57 -ayinde 57 -lovemaking 57 -glossop 57 -three-year-olds 57 -619 57 -nightstand 57 -flowered 57 -tidings 57 -retrieves 57 -dames 57 -independiente 57 -98th 57 -brno 57 -beeps 57 -matthaus 57 -refocused 57 -wrongdoings 57 -eweida 57 -windies 57 -levesconte 57 -all-you-can-eat 57 -hounye 57 -brunettes 57 -chudleigh 57 -neuropathy 57 -radiated 57 -kiro-tv 57 -rock-solid 57 -paralytic 57 -katyn 57 -catheters 57 -magnification 57 -nakamura 57 -translational 57 -biya 57 -vasily 57 -therapeutics 57 -pacchieri 57 -usp 57 -spindly 57 -conveyer 57 -kester 57 -pawnbroker 57 -suárez 57 -oneida 57 -schutz 57 -u.n.-arab 57 -molding 57 -cabriolet 57 -assemblywoman 57 -kabbalah 57 -devaluation 57 -slink 57 -leonel 57 -causation 57 -bedsheet 57 -adenovirus 57 -olmos 57 -frowns 57 -barrows 57 -metaphorically 57 -ender 57 -grender 57 -seder 57 -dadt 57 -gatti 57 -embargoes 57 -honan 57 -fall/winter 57 -rohr 57 -brockman 57 -wheelhouse 57 -facie 57 -18-months-old 57 -self-harmed 57 -unmissable 57 -urwin 57 -scampton 57 -vesnina 57 -clip-on 57 -roosting 57 -divo 57 -matchwinner 57 -scrawling 57 -ndtv 57 -halpin 57 -cleavers 57 -halos 57 -adulterated 57 -clamored 57 -trouncing 57 -bused 57 -+44 57 -ingots 57 -hotdog 57 -palmed 57 -394 57 -high-velocity 57 -orem 57 -20-plus 57 -tip-top 57 -leidy 57 -eckstein 57 -glimmers 57 -purley 57 -winans 57 -offstage 57 -deneuve 57 -farndon 57 -artichoke 57 -tax-avoidance 57 -unshakeable 57 -hialeah 57 -thaugsuban 57 -loonies 57 -charon 57 -meltwater 57 -dewayne 57 -ex-cia 57 -baldy 57 -mayon 57 -malignaggi 57 -abt 57 -truthfulness 57 -scalable 57 -qipco 57 -mentorship 57 -jarkko 57 -scottie 57 -niklas 57 -newcombe 57 -refuges 57 -weeded 57 -leaver 57 -spongy 57 -cpa 57 -hotdogs 57 -459 57 -staggers 57 -69.99 57 -kruidbos 57 -klotz 57 -downriver 57 -underwriters 57 -maitlis 57 -howitzer 57 -sitters 57 -jaap 57 -dougan 57 -male-only 57 -netbook 57 -outshine 57 -lower-league 57 -heysel 57 -retinitis 57 -juke 57 -thruway 57 -cabinet-level 57 -moallem 57 -m.i.a. 57 -imprints 57 -abbi 57 -nodules 57 -then-fiancée 57 -cadence 57 -e-petition 57 -clear-out 57 -interviewee 57 -normal-sized 57 -kindergartens 57 -10-time 57 -gedion 57 -sawdust 57 -gladbach 57 -thurso 57 -risk-based 57 -redone 57 -eldon 57 -zervas 57 -whaanga 57 -reliefs 57 -ex-chief 57 -chancery 57 -whsmith 57 -whitburn 57 -americorps 57 -cockatoo 57 -critchlow 57 -forewarned 57 -tidworth 57 -bannu 57 -coppers 57 -haque 57 -hitches 57 -hollered 57 -foretold 57 -kaden 57 -rashida 57 -carnal 57 -belden 57 -parklife 57 -5.95 57 -20:32 57 -call-out 57 -cecelia 57 -adjutant 57 -346 57 -50c 57 -ei 57 -asp 57 -elwazer 57 -farriss 57 -estefan 57 -mccarran 57 -mulder 57 -supervolcano 57 -gadgetry 57 -decontaminate 57 -1842 57 -culley 57 -pickaxe 57 -spineless 57 -plotline 57 -20:17 57 -thohir 57 -dilution 57 -skintight 57 -ogre 57 -baldrick 57 -five-metre 57 -fifth-floor 57 -stateroom 57 -mahone 57 -transponders 57 -2600 57 -rfs 57 -coors 57 -iriyanto 57 -womanly 57 -dicey 57 -stiner 57 -flexibly 57 -corduroy 57 -tinkered 57 -holyhead 57 -tew 57 -scoutmaster 57 -stoppard 57 -double-header 57 -fantasise 57 -top-of-the-table 57 -bluffing 57 -fiend 57 -taub 57 -hydroxide 57 -ravioli 57 -kimble 57 -credential 57 -sunburnt 57 -arkady 57 -non-halal 57 -burnings 57 -cataloguing 57 -dubliner 57 -painkilling 57 -amstetten 57 -loko 57 -grondona 57 -toussaint 57 -msps 57 -1803 57 -@hiddencash 57 -mid-2015 57 -do-over 57 -transverse 57 -bantleman 57 -à 57 -116,000 57 -denigrated 57 -ardi 57 -sy 57 -tirol 57 -invigorating 57 -wilford 57 -poindexter 57 -disbelievers 57 -stangroom 57 -gemmell 57 -shadid 57 -bolting 57 -unobtrusive 57 -hensarling 57 -crosse 57 -coatesville 57 -01:40 57 -rafale 57 -20:06 57 -kabang 57 -hungaroring 57 -modernism 57 -overstaying 57 -leelah 57 -ecmo 57 -anti-trust 57 -declassify 57 -over-reliance 57 -resentments 57 -transmissible 57 -perea 57 -chutzpah 57 -post-surgery 57 -co-created 57 -borden 57 -lauri 57 -119,000 57 -ultra-high 57 -electrolysis 57 -pinkett 57 -sensationalist 57 -1.14 57 -courgette 57 -sats 57 -box-to-box 57 -nori 56 -advantaged 56 -thankless 56 -jawed 56 -correlates 56 -mattiacci 56 -co-founding 56 -keyless 56 -burnat 56 -red-and-white 56 -calvo 56 -36-hour 56 -pecked 56 -rennison 56 -pro-morsi 56 -bilson 56 -smash-and-grab 56 -danner 56 -gelatin 56 -suggs 56 -revolutionizing 56 -diren 56 -truong 56 -isinbayeva 56 -heenes 56 -samaria 56 -bednar 56 -rideout 56 -asturias 56 -warriner 56 -pippin 56 -plenary 56 -enabler 56 -boots.com 56 -40kg 56 -131,000 56 -19.4 56 -battiston 56 -choupette 56 -exacerbates 56 -scaffolder 56 -alums 56 -smythson 56 -40-50 56 -ajit 56 -acheson 56 -musee 56 -propublica 56 -tuol 56 -willenhall 56 -brigid 56 -rollings 56 -reparation 56 -raji 56 -laughton 56 -sepia 56 -kenai 56 -fraley 56 -shawshank 56 -savant 56 -moneysavingexpert.com 56 -merfeld 56 -re-creation 56 -asexual 56 -820,000 56 -jakes 56 -rybolovleva 56 -raworth 56 -revives 56 -follies 56 -150g 56 -scotrail 56 -orland 56 -palk 56 -muema 56 -shamsi 56 -nsl 56 -imgur 56 -high-skilled 56 -rebooked 56 -depress 56 -abramowitz 56 -keeled 56 -elmendorf 56 -soni 56 -suleyman 56 -d'honneur 56 -maffei 56 -three-fold 56 -euromonitor 56 -464 56 -28.4 56 -1440 56 -colfer 56 -hurrying 56 -trendiest 56 -hewell 56 -iud 56 -dalziel 56 -marineland 56 -productively 56 -introspective 56 -selee 56 -rigau 56 -bloodstock 56 -skylines 56 -delevigne 56 -miron 56 -143rd 56 -vossen 56 -christiaan 56 -antithetical 56 -teahouse 56 -tenby 56 -bayh 56 -valdivia 56 -rosters 56 -reselling 56 -redecorated 56 -tiburon 56 -flu-related 56 -farber 56 -lumsden 56 -21.3 56 -multilingual 56 -communicative 56 -mojang 56 -specificity 56 -02:07 56 -pacifier 56 -readable 56 -liquidate 56 -riverton 56 -10-8 56 -yaqoob 56 -kamin 56 -bournville 56 -ysgol 56 -dreamgirls 56 -ignominious 56 -15:48 56 -entanglements 56 -pre-cancerous 56 -sportswomen 56 -ginseng 56 -cerise 56 -light-weight 56 -nudges 56 -mutharika 56 -al-brega 56 -fleshy 56 -serignese 56 -shutout 56 -bruzas 56 -cally 56 -nursultan 56 -garcia-margallo 56 -aubergine 56 -triumvirate 56 -tripe 56 -worn-out 56 -manos 56 -gisela 56 -bond-style 56 -archivists 56 -faberge 56 -saxo 56 -urmston 56 -bhattacharjee 56 -goshen 56 -butting 56 -20:25 56 -outpourings 56 -salome 56 -cerberus 56 -par-three 56 -mondella 56 -millenium 56 -a30 56 -brain-eating 56 -epidemiological 56 -godman 56 -ribena 56 -marwijk 56 -bruton 56 -dileo 56 -rivard 56 -paulus 56 -pinsent 56 -marita 56 -dampener 56 -cakir 56 -celestin 56 -potash 56 -turia 56 -possums 56 -shwe 56 -semifinalists 56 -graco 56 -eimiller 56 -benicio 56 -evertonians 56 -late-stage 56 -hiers 56 -pappas 56 -sadomasochism 56 -+2 56 -mccreery 56 -ljubljana 56 -shirking 56 -harter 56 -shape-shifting 56 -salamanca 56 -la-based 56 -geldenhuys 56 -irritations 56 -mutts 56 -artsy 56 -airprox 56 -goulburn 56 -phonesavanh 56 -christiansen 56 -gault 56 -fairest 56 -spellbinding 56 -rues 56 -x-wing 56 -herath 56 -sayre 56 -163,000 56 -shak 56 -23billion 56 -manassas 56 -frana 56 -lorelei 56 -lindley 56 -tatarstan 56 -re-join 56 -pershing 56 -ex-player 56 -crypts 56 -tunguska 56 -internet.org 56 -okaloosa 56 -allocations 56 -glib 56 -trivago 56 -wickens 56 -back-four 56 -ytn 56 -salafists 56 -israeli-occupied 56 -child-free 56 -estepp 56 -politicised 56 -boneless 56 -dorman 56 -micaela 56 -ridgefield 56 -leakers 56 -doin 56 -invigorated 56 -non-stick 56 -wightman 56 -hangman 56 -trigger-happy 56 -beckley 56 -cherwell 56 -nath 56 -cleverer 56 -monnin 56 -rorschach 56 -all-but 56 -revolvers 56 -redder 56 -dun 56 -letzgo 56 -constricted 56 -sizemore 56 -dinara 56 -fnb 56 -bolsters 56 -fourth-grader 56 -hythe 56 -machinist 56 -seidel 56 -real-terms 56 -pilfered 56 -veritas 56 -182,000 56 -uwaydah 56 -raglan 56 -cygnet 56 -taylors 56 -15kg 56 -chairperson 56 -22.9 56 -prematurity 56 -unmade 56 -plitt 56 -satorova 56 -28-24 56 -eights 56 -anti-japanese 56 -macarena 56 -issy 56 -baniyas 56 -elissa 56 -galette 56 -alternated 56 -baffle 56 -vincennes 56 -het 56 -hew 56 -attica 56 -eu-wide 56 -sagrada 56 -y. 56 -duplicated 56 -psyched 56 -fifth-largest 56 -cold-case 56 -o'flynn 56 -poms 56 -freshener 56 -under-reported 56 -699 56 -shannan 56 -taobao 56 -419 56 -ranjit 56 -dratel 56 -profiler 56 -bloodhounds 56 -super-earth 56 -397 56 -nesirky 56 -21:03 56 -5-foot 56 -meister 56 -barenaked 56 -patinkin 56 -deion 56 -fellini 56 -jaffer 56 -exum 56 -banaz 56 -hammans 56 -carting 56 -tedx 56 -brudenell-bruce 56 -rudge 56 -blood-covered 56 -foals 56 -befallen 56 -maldivian 56 -sanctimonious 56 -forty-four 56 -dominicking_dm 56 -troupes 56 -rahimi 56 -shalt 56 -ribbsaeter 56 -dunstan 56 -namib 56 -23.4 56 -pulsing 56 -tarpaulins 56 -kosta 56 -pvt 56 -alhimidi 56 -175million 56 -depute 56 -mailboxes 56 -dunleavy 56 -pushpa 56 -iger 56 -pedaling 56 -rosebud 56 -bethpage 56 -coves 56 -mansouret 56 -dollop 56 -satish 56 -al-hillis 56 -crampton 56 -tie-in 56 -wolverines 56 -off-white 56 -no-balls 56 -world-first 56 -lollapalooza 56 -sooo 56 -byproducts 56 -ditka 56 -stonewalled 56 -37-year 56 -upfield 56 -meadowcroft 56 -omits 56 -jaborian 56 -telephony 56 -plonk 56 -warping 56 -desegregation 56 -justly 56 -popovic 56 -mccance 56 -eun 56 -octavio 56 -vardag 56 -cervarix 56 -fishtail 56 -norgaard 56 -blackbirds 56 -sext 56 -crier 56 -debrett 56 -cis 56 -stylistic 56 -toll-free 56 -frise 56 -peebles 56 -ultranationalist 56 -cmdr 56 -quelling 56 -brackley 56 -machus 56 -vtv 56 -dwain 56 -melia 56 -gerada 56 -pavlos 56 -dram 56 -engelbrecht 56 -druid 56 -repentant 56 -branco 56 -icebreakers 56 -dirie 56 -wyshak 56 -20:30 56 -20:33 56 -confidence-building 56 -eckhart 56 -ian_ladyman_dm 56 -self-destructing 56 -pulverized 56 -cliched 56 -166,000 56 -tantric 56 -nerazzurri 56 -parisse 56 -beswick 56 -loony 56 -cmb 56 -pugsley 56 -mrozek 56 -anti-fraud 56 -lentini 56 -burdick 56 -lubricants 56 -riggitano 56 -cadel 56 -conscripted 56 -22ft 56 -waikato 56 -brukner 56 -brezhnev 56 -kraemer 56 -nz$ 56 -goalwards 56 -morristown 56 -pugnacious 56 -five-times 56 -borgen 56 -inadequacies 56 -dippy 56 -03:20 56 -faella 56 -slugging 56 -spiritualist 56 -delft 56 -prospectors 56 -sign-ups 56 -renae 56 -wiel 56 -3,280 56 -unm 56 -jettison 56 -sportscenter 56 -luann 56 -primo 56 -pocket-sized 56 -termite 56 -cluj 56 -payet 56 -co-leader 56 -kfar 56 -mercantile 56 -sushil 56 -permanence 56 -souks 56 -non-member 56 -ilham 56 -bartiromo 56 -harshness 56 -cyberwar 56 -cooperates 56 -plodding 56 -2,750 56 -lambast 56 -superlatives 56 -figs 56 -amplifying 56 -tidwell 56 -getup 56 -tableau 56 -videoing 56 -easy-to-use 56 -taff 56 -widgets 56 -horlivka 56 -hatter 56 -nitric 56 -befits 56 -rabaa 56 -stepmom 56 -craighope01 56 -stalingrad 56 -chappaqua 56 -mcnuff 56 -2x 56 -zakharchenko 56 -talitha 56 -loin 56 -miso 56 -hipps 56 -al-habib 56 -overawed 56 -baddest 56 -engendered 56 -loyally 56 -animal-rights 56 -decorators 56 -condense 56 -2.95 56 -urchins 56 -aubrey-ward 56 -stenhouse 56 -singer/songwriter 56 -two-seat 56 -coit 56 -genesee 56 -smarties 56 -blue-green 56 -ostriches 56 -change4life 56 -707 56 -sqm 56 -favreau 56 -agonizingly 56 -boerner 56 -strife-torn 56 -big-city 56 -disbarred 56 -satherley 55 -unsinkable 55 -roundworm 55 -ensenada 55 -azteca 55 -molasses 55 -fastening 55 -paglia 55 -ganesh 55 -battler 55 -uppsala 55 -bussed 55 -pascall 55 -steppe 55 -beeped 55 -gillman 55 -higdon 55 -spurts 55 -improprieties 55 -shepherding 55 -yuck 55 -gendarmerie 55 -hummingbirds 55 -minter 55 -19.3 55 -mythic 55 -devenney 55 -meritocracy 55 -bagh 55 -bruhl 55 -must-haves 55 -cnc 55 -tebowing 55 -granholm 55 -loved-ones 55 -cottam 55 -jepson 55 -breaded 55 -ninette 55 -entebbe 55 -impostor 55 -leder 55 -407 55 -malika 55 -suppers 55 -fps 55 -fontainebleau 55 -kickstarted 55 -gemstone 55 -matiullah 55 -cheetos 55 -geoglyphs 55 -zinn 55 -mcandrew 55 -costar 55 -manifold 55 -blood-thinning 55 -turismo 55 -cogent 55 -paktia 55 -deron 55 -oilers 55 -brydon 55 -blevins 55 -rin 55 -174,000 55 -metra 55 -homers 55 -leela 55 -near-misses 55 -greipel 55 -low-speed 55 -fluctuates 55 -hissed 55 -mauritian 55 -football-mad 55 -undefined 55 -ns&i 55 -unsw 55 -savoring 55 -255,000 55 -sethi 55 -reorganize 55 -anti-chinese 55 -lifejacket 55 -anti-protest 55 -orellana 55 -cuter 55 -acceptability 55 -valastro 55 -mollusc 55 -yaroslavl 55 -guler 55 -toral 55 -bragman 55 -divina 55 -2.36 55 -woolacombe 55 -pro-british 55 -wfts 55 -02:02 55 -newsfeed 55 -gendarme 55 -derecho 55 -15:43 55 -carpeting 55 -taranis 55 -inventories 55 -chindamo 55 -fitzwilliam 55 -fondle 55 -100s 55 -islay 55 -grotesquely 55 -iuds 55 -anstey 55 -brassington 55 -joly 55 -scarano 55 -katt 55 -bogdanov 55 -40lbs 55 -mccalla 55 -sulfide 55 -dmaa 55 -kisco 55 -20:41 55 -radicalize 55 -jewel-encrusted 55 -ten-man 55 -sportsaid 55 -self-destruction 55 -patina 55 -tasters 55 -aye 55 -claiborne 55 -judicious 55 -dewi 55 -well-travelled 55 -vapors 55 -galea 55 -repositioned 55 -gas-powered 55 -britani 55 -remonstrated 55 -barone 55 -shearers 55 -tibbs 55 -scapegoating 55 -breast-fed 55 -declassification 55 -sameer 55 -overdrafts 55 -polis 55 -blow-dried 55 -20:29 55 -490,000 55 -cleanses 55 -younan 55 -mutombo 55 -shelia 55 -giddings 55 -200,000-a-week 55 -bosnich 55 -two-door 55 -postiga 55 -dellacqua 55 -sonali 55 -favouritism 55 -rickshaws 55 -pogue 55 -osi 55 -oswaldo 55 -benzo 55 -syntagma 55 -chancey 55 -chirping 55 -emms 55 -jeane 55 -spray-on 55 -earley 55 -practicable 55 -dangi 55 -mcs 55 -moises 55 -fermanagh 55 -buav 55 -ntege 55 -marinas 55 -rosette 55 -10-match 55 -ucas 55 -marshalled 55 -risk-free 55 -jeroen 55 -scot-free 55 -seven-under 55 -;-rrb- 55 -hdtv 55 -tunnicliffe 55 -povero 55 -tuft 55 -latiker 55 -stv 55 -two-decade 55 -penteado 55 -smellie 55 -formers 55 -ices 55 -40-day 55 -parque 55 -ebola-infected 55 -0800 55 -31.6 55 -frith 55 -ormrod 55 -olli 55 -sohn 55 -39.50 55 -colm 55 -pugachev 55 -farkas 55 -tila 55 -brehm 55 -self-absorbed 55 -mocha 55 -15oz 55 -broeksmit 55 -branning 55 -acars 55 -cris 55 -cooped 55 -scrupulous 55 -honeypot 55 -menaced 55 -jamaat 55 -verdon 55 -calving 55 -theorised 55 -contemptible 55 -keil 55 -abalimba 55 -polunin 55 -maia 55 -overpayments 55 -subasic 55 -machete-wielding 55 -aylett 55 -cet 55 -hypothyroidism 55 -runescape 55 -boken 55 -inedible 55 -longterm 55 -biddy 55 -wringing 55 -cadillacs 55 -tacitly 55 -bosham 55 -elevates 55 -hernias 55 -carmageddon 55 -mid-1800s 55 -pettigrew 55 -seven-years-old 55 -cornbread 55 -adulyadej 55 -hopewell 55 -bewdley 55 -lancasters 55 -gittens 55 -705 55 -weiser 55 -parodying 55 -trickster 55 -viera 55 -ccd 55 -dictation 55 -glutes 55 -snooty 55 -goeth 55 -1813 55 -bough 55 -severino 55 -third-world 55 -438 55 -22.3 55 -22.6 55 -hemsley 55 -moebius 55 -writer-director 55 -beets 55 -truvada 55 -lanyard 55 -aram 55 -popalzai 55 -pillbox 55 -gamesmanship 55 -pentangelo 55 -untouchables 55 -52.5 55 -sashimi 55 -j.t. 55 -murmurs 55 -regenerated 55 -prebble 55 -39.6 55 -abides 55 -jacuzzis 55 -genomics 55 -nimitz 55 -maharaja 55 -inflates 55 -edo 55 -team-sheet 55 -anaesthetised 55 -sharland 55 -valeri 55 -slow-growing 55 -625,000 55 -westeros 55 -ham-fisted 55 -squeaking 55 -palmieri 55 -ball-sized 55 -conceptions 55 -trager 55 -skint 55 -answerable 55 -bobi 55 -rosenblum 55 -screenwriting 55 -kono 55 -pryke 55 -buffets 55 -rosenstein 55 -sub-committee 55 -not-so-subtle 55 -ingle 55 -strove 55 -dinklage 55 -conservativehome 55 -behr 55 -takashi 55 -multimillionaires 55 -nagpur 55 -camryn 55 -01:37 55 -supplementing 55 -ginobili 55 -elects 55 -viscountess 55 -7p 55 -sav 55 -wilkes-barre 55 -iwobi 55 -eggshells 55 -radiating 55 -lubna 55 -dimiceli 55 -inclinations 55 -ginza 55 -shereka 55 -bedazzled 55 -outgrow 55 -vastness 55 -nsc 55 -piszczek 55 -3km 55 -unfavourably 55 -659 55 -meiktila 55 -coking 55 -worley 55 -ellingson 55 -sugarloaf 55 -sulu 55 -sulk 55 -pilbara 55 -millionairess 55 -ditta 55 -ditty 55 -sedwill 55 -diamondbacks 55 -newsreaders 55 -anti-rejection 55 -flyovers 55 -shop-bought 55 -prescribes 55 -kaneohe 55 -paok 55 -6g 55 -cornucopia 55 -show-off 55 -ptc 55 -alphonse 55 -heckles 55 -pastis 55 -seaworthy 55 -baloo 55 -thistlethwaite 55 -rhoden 55 -roggio 55 -semantic 55 -tolled 55 -mistreat 55 -quiche 55 -echolocation 55 -government-wide 55 -wrongdoers 55 -joão 55 -manzanillo 55 -ewes 55 -trencin 55 -cailin 55 -concerto 55 -oomph 55 -criss-crossed 55 -bertram 55 -wehrmacht 55 -kathlynn 55 -badakhshan 55 -hatchery 55 -mancera 55 -cranium 55 -whizzes 55 -tien 55 -propagating 55 -plibersek 55 -milberg 55 -clove 55 -dinkheller 55 -jobar 55 -restlessness 55 -15,000-a-year 55 -argan 55 -wian 55 -clutterbuck 55 -baptists 55 -studs-up 55 -16:43 55 -remastered 55 -347 55 -35billion 55 -segregationist 55 -serpents 55 -vitals 55 -airlineratings.com 55 -quiver 55 -ganji 55 -government-held 55 -co-existed 55 -sinclaire 55 -03:49 55 -anji 55 -rupaul 55 -relenza 55 -zulfiqar 55 -albi 55 -sokratis 55 -marxists 55 -11.25 55 -sandell 55 -bega 55 -exhibitionist 55 -schooler 55 -pokot 55 -probity 55 -20:18 55 -20:15 55 -wisest 55 -macanthony 55 -co-producer 55 -redraw 55 -simran 55 -demoralized 55 -braunfels 55 -lactation 55 -peron 55 -ticket-holders 55 -floes 55 -511 55 -metsker 55 -prater 55 -valegro 55 -+33 55 -aetna 55 -tommaso 55 -pelling 55 -12/1 55 -ribera 55 -matej 55 -belmore 55 -prepackaged 55 -giamatti 55 -expend 55 -hewer 55 -dehlin 55 -berthed 55 -decaires 55 -papastathopoulos 55 -officialdom 55 -bassingbourn 55 -technicalities 55 -keele 55 -539 55 -miscalculations 55 -tidied 55 -longingly 55 -pai 55 -obtains 55 -jael 55 -stimpson 55 -boardwalks 55 -03:05 55 -south-southwest 55 -skiffs 55 -tisci 55 -rbc 55 -carnahan 55 -days-long 55 -iturbe 55 -half-a-dozen 55 -raffaella 55 -taaffe 55 -pathak 55 -snacked 55 -enquire 55 -winningest 55 -blood-red 55 -dadds 55 -perpignan 55 -pandey 55 -cliques 55 -reubens 55 -uist 55 -unitary 55 -smithereens 55 -equalizing 55 -viciousness 55 -holdsclaw 55 -sunnyside 55 -invertebrate 55 -phuoc 55 -golson 55 -redwoods 55 -serums 55 -figurative 55 -floatation 55 -allude 55 -deputise 55 -vulgarity 55 -yobo 55 -18.1 55 -rapp 55 -pinto-walsh 55 -14:57 55 -forty-six 55 -back-room 55 -bacher 55 -mahe 55 -h2o 55 -protectively 55 -hulbert 55 -derisory 55 -krzyzewski 55 -meilutyte 55 -bobolas 55 -215,000 55 -estée 55 -lazily 55 -katter 55 -becki 55 -boughton 55 -dilnot 55 -defrosting 55 -krusinski 55 -1.12 55 -self-confident 55 -lannister 55 -markea 55 -pucci 55 -neurones 55 -guerin 55 -undivided 54 -runt 54 -abadie 54 -drmic 54 -ballpoint 54 -laxman 54 -basset 54 -surest 54 -einar 54 -taxpayer-backed 54 -428 54 -730,000 54 -demarcation 54 -four-person 54 -botanists 54 -'99 54 -lunatics 54 -06 54 -downplays 54 -fancier 54 -pshonka 54 -alethea 54 -arby 54 -daftary 54 -charlatan 54 -manilow 54 -kool 54 -hdl 54 -sacrilegious 54 -coed 54 -arwen 54 -.25 54 -mullane 54 -misiewicz 54 -jax 54 -tpims 54 -nbclp.vidpid 54 -squyres 54 -interminable 54 -hirsute 54 -tse 54 -bunton 54 -406 54 -gentlemanly 54 -fuad 54 -nbclp.cmsid 54 -shamu 54 -gollum 54 -iran-contra 54 -quotient 54 -cori 54 -shamans 54 -fen 54 -tranquilliser 54 -aac 54 -bennison 54 -vamp 54 -glinting 54 -stewie 54 -casciaro 54 -ktvk 54 -razor-thin 54 -god-fearing 54 -airdrie 54 -joon-seok 54 -paperwhite 54 -sapling 54 -yorkie 54 -wide-spread 54 -capo 54 -ragland 54 -gainey 54 -eulogies 54 -citizenfour 54 -slavic 54 -nbclp.currentsiteloc 54 -wrist-worn 54 -over-75s 54 -carlotta 54 -slats 54 -wvir 54 -vaudeville 54 -648 54 -chosun 54 -re-married 54 -engler 54 -hooke 54 -post-graduate 54 -bogey-free 54 -sendak 54 -maclaren 54 -02:06 54 -directories 54 -dispassionate 54 -unexplainable 54 -faq 54 -faure 54 -shirtfront 54 -mmm 54 -homeownership 54 -liban 54 -dachshunds 54 -barbieri 54 -kaaba 54 -10-2 54 -yuasa 54 -radnor 54 -english-only 54 -treme 54 -gluing 54 -penghu 54 -meritorious 54 -25g 54 -scrums 54 -spidey 54 -use-of-force 54 -nbclp.vidsec 54 -littlewoods.com 54 -dissociative 54 -leopardstown 54 -bagshawe 54 -prestwich 54 -anti-russian 54 -youcef 54 -hydrants 54 -21-gun 54 -20:50 54 -refills 54 -underfoot 54 -finucane 54 -toogood 54 -margaritas 54 -dandong 54 -600th 54 -deep-pocketed 54 -pouncey 54 -smorgasbord 54 -moll 54 -turnstile 54 -fitzgibbon 54 -bnsf 54 -rostron 54 -worldpanel 54 -20:23 54 -wagstaff 54 -broadbeach 54 -uka 54 -barbed-wire 54 -conquers 54 -smidgen 54 -maisani 54 -spacing 54 -lmfao 54 -balaclava-clad 54 -15:05 54 -doctor-patient 54 -fash 54 -+20 54 -claustrophobia 54 -charlotte-mecklenburg 54 -desjarlais 54 -geir 54 -depreciation 54 -licensees 54 -cahoots 54 -anti-english 54 -paire 54 -despaired 54 -gatting 54 -spray-painting 54 -strontium 54 -kogarah 54 -pyt 54 -straight-line 54 -``` 54 -fly-in 54 -foyle 54 -silverwater 54 -harpenden 54 -zune 54 -l3 54 -misspoke 54 -18-week 54 -business-friendly 54 -top-to-bottom 54 -renunciation 54 -1645 54 -seewald 54 -thornley 54 -warders 54 -karrueche 54 -janiero 54 -avoiders 54 -hot-air 54 -thamesmead 54 -zionism 54 -figoski 54 -cataloging 54 -ddt 54 -sochaux 54 -1834 54 -moxie 54 -quang 54 -rhyming 54 -gantt 54 -gaffe-prone 54 -ladakh 54 -kadhim 54 -favourited 54 -kun 54 -verging 54 -25ml 54 -marshfield 54 -lagrange 54 -fastidious 54 -mobilising 54 -side-footed 54 -johnlewis.com 54 -unpretentious 54 -peed 54 -billion-a-year 54 -meaghan 54 -ranbaxy 54 -mobot 54 -sajak 54 -dweller 54 -squashing 54 -50-day 54 -omsk 54 -el-sissi 54 -dancy 54 -andina 54 -liane 54 -ares 54 -cabello 54 -accelerometers 54 -open-topped 54 -1,000-year-old 54 -curating 54 -taney 54 -bebeto 54 -thirty-seven 54 -tino 54 -alek 54 -one-inch 54 -hanbury 54 -hottie 54 -feruz 54 -yardstick 54 -waxy 54 -cadena 54 -suchet 54 -towson 54 -cramlington 54 -wareing 54 -encodeuricomponent 54 -douglas-home 54 -dux 54 -maskell 54 -overhang 54 -sorcerer 54 -35.2 54 -ex-offenders 54 -compensates 54 -585 54 -command-and-control 54 -stairwells 54 -mechanized 54 -rhys-jones 54 -inhabits 54 -computer-based 54 -ilyushin 54 -overridden 54 -jaffar 54 -litigated 54 -televangelist 54 -follicle 54 -2k 54 -dns 54 -quinnell 54 -springdale 54 -bri 54 -six-wheeled 54 -rba 54 -schnauzer 54 -cobras 54 -perpendicular 54 -montserrat 54 -seaborne 54 -kroll 54 -xiamen 54 -fact-checking 54 -diarist 54 -extenuating 54 -chairlift 54 -outlasted 54 -t-cells 54 -morgantown 54 -end-stage 54 -125mph 54 -cappuccinos 54 -pawnbrokers 54 -bleasdale 54 -followill 54 -pita 54 -arteaga 54 -cto 54 -triano 54 -twirled 54 -menon 54 -tetrad 54 -nabbing 54 -mutating 54 -haldeman 54 -plasters 54 -no2 54 -curtice 54 -edinburgh-based 54 -boxnation 54 -perp 54 -re-launched 54 -j-league 54 -nowzad 54 -newts 54 -kewell 54 -south-facing 54 -yurt 54 -ablation 54 -immortals 54 -romping 54 -sulking 54 -skopje 54 -birks 54 -peculiarly 54 -lukla 54 -jouejati 54 -nobby 54 -fokker 54 -sativex 54 -gratuitously 54 -jannah 54 -17-month 54 -asiri 54 -debtor 54 -ellman 54 -speedster 54 -trod 54 -aggies 54 -pyotr 54 -smedinghoff 54 -eleven-year-old 54 -catawba 54 -kitties 54 -bingle 54 -burlingame 54 -reclassify 54 -radiologists 54 -smoulders 54 -tejada 54 -trutv 54 -nine-man 54 -habitability 54 -sunni-shiite 54 -petford 54 -pawing 54 -apc 54 -boies 54 -spiers 54 -scragg 54 -et/pt 54 -02:12 54 -02:15 54 -neva 54 -mccune 54 -sandys 54 -barnfield 54 -antipsychotic 54 -lombok 54 -azharuddin 54 -672 54 -waft 54 -sulaimaniya 54 -geng 54 -whitelock 54 -intersect 54 -patmore 54 -dark-coloured 54 -wealdstone 54 -softens 54 -home-schooling 54 -spurning 54 -ff 54 -seventy-five 54 -pytlarz 54 -re-introduce 54 -rubido 54 -king-size 54 -khazaee 54 -scriptwriter 54 -barbary 54 -kouchner 54 -rsc 54 -rajib 54 -365,000 54 -kegs 54 -bhatt 54 -fdic 54 -nbclp.vidsubsec 54 -heifer 54 -1,429 54 -hoosiers 54 -leeks 54 -deprimo 54 -deductibles 54 -emmental 54 -295,000 54 -windfalls 54 -fekitoa 54 -ex-nfl 54 -serendipitous 54 -thirty-four 54 -spin-offs 54 -fbu 54 -astrophysicists 54 -brittni 54 -uninspired 54 -subotic 54 -tramway 54 -ranjini 54 -techel 54 -pantyhose 54 -0.62 54 -gilt-edged 54 -malarkey 54 -birtwhistle 54 -trippy 54 -advocaat 54 -65mph 54 -neubauer 54 -castello 54 -fieldhouse 54 -ntaganda 54 -19-month 54 -popper 54 -tgv 54 -20:19 54 -irobot 54 -vergeer 54 -overeat 54 -kyrie 54 -0.02 54 -cults 54 -truer 54 -6:20 54 -ams 54 -aljaz 54 -multidisciplinary 54 -gnarled 54 -exclusives 54 -pca 54 -bublé 54 -ekaireb 54 -daynes 54 -aire 54 -salafis 54 -chol 54 -relegating 54 -off-load 54 -sudo 54 -mopped 54 -dey 54 -deo 54 -one-game 54 -underlings 54 -naturism 54 -gangly 54 -khz 54 -asos.com 54 -nbclp.currentpageloc 54 -lok 54 -alida 54 -markell 54 -tpm 54 -jahmel 54 -mahrough 54 -vikram 54 -zenn 54 -maneuverability 54 -sucart 54 -nudists 54 -hyperbaric 54 -croizon 54 -millay 54 -battisti 54 -glenny 54 -pentathlon 54 -femail@mailonline.co.uk 54 -lyth 54 -corluka 54 -clemence 54 -okra 54 -schuman 54 -diocesan 54 -shakedown 54 -vitaliy 54 -smokin 54 -serkis 54 -÷ 54 -nimr 54 -intersecting 54 -ywca 54 -ombre 54 -krg 54 -selter 54 -injurious 54 -chan-ocha 54 -goji 54 -dereham 54 -hywel 54 -maximilian 54 -milorad 54 -entrapped 54 -carshalton 54 -supercontinent 54 -septicemia 54 -deweese 54 -atmeh 54 -red-eyed 54 -tutti 54 -zero-gravity 54 -arse 54 -adem 54 -leboeuf 54 -drowns 54 -estoril 54 -merges 54 -hilux 54 -morelos 54 -caryn 54 -reveler 54 -panamera 54 -immobilised 54 -abided 54 -horwich 54 -belbek 54 -rothermere 54 -runnymede 54 -barrassment 54 -messam 54 -pollinators 54 -quashie 54 -hazelwood 54 -djourou 54 -greatest-ever 54 -dà 54 -decriminalized 54 -first-timers 54 -lip-syncing 54 -maja 53 -chaim 53 -locklear 53 -mundi 53 -devoutly 53 -dabbawalas 53 -scarily 53 -swinney 53 -effing 53 -salivary 53 -fictionalized 53 -mapps 53 -curtain-raiser 53 -moskovitz 53 -11-minute 53 -haughey 53 -algerie 53 -bellarabi 53 -adastra 53 -aunties 53 -feathering 53 -svenningsen 53 -samy 53 -tarter 53 -pitta 53 -wishaw 53 -hardens 53 -pattemore 53 -tenured 53 -thruster 53 -overexposed 53 -ingalls 53 -consignments 53 -duda 53 -labella 53 -nitro 53 -galvanizing 53 -reais 53 -houchin 53 -quinta 53 -wood-paneled 53 -jonze 53 -20-second 53 -14/1 53 -incan 53 -ensaf 53 -ill-considered 53 -impedes 53 -fulsome 53 -tish 53 -wide-scale 53 -first-rate 53 -wordpress 53 -exelon 53 -zieler 53 -deep-space 53 -over-reacted 53 -mccombe 53 -detours 53 -epidemiologists 53 -rospa 53 -herbst 53 -sheerness 53 -longer-lasting 53 -180-degree 53 -labradoodle 53 -trevi 53 -olivas 53 -pinheiro 53 -2014-now 53 -galas 53 -looper 53 -lefebvre 53 -torez 53 -mazhar 53 -wildey 53 -all-girl 53 -pontificate 53 -lurked 53 -slann 53 -watermark 53 -overvalued 53 -p-3 53 -chancel 53 -4.10 53 -atc 53 -dafydd 53 -216,000 53 -knxv 53 -imtiaz 53 -o'shaughnessy 53 -f-bomb 53 -gigante 53 -imperiled 53 -gender-specific 53 -self-centred 53 -day-trippers 53 -sidhu 53 -100-metre 53 -thirlwall 53 -aymara 53 -passat 53 -grungy 53 -gilpin 53 -rilya 53 -avignon 53 -yehya 53 -sleeplessness 53 -catty 53 -albano 53 -greeley 53 -verrier 53 -lebrun 53 -inks 53 -recalcitrant 53 -tristen 53 -semantics 53 -interwoven 53 -al-ansi 53 -860,000 53 -yehuda 53 -kingsmeadow 53 -16:16 53 -apologists 53 -14th-century 53 -orgasmic 53 -frimpong 53 -02:09 53 -creaky 53 -robbo 53 -potiskum 53 -wittenberg 53 -browner 53 -milonov 53 -moronic 53 -mcinnis 53 -furedi 53 -d'aloisio 53 -reverberating 53 -ghorbani 53 -2.29 53 -accusatory 53 -fanzone 53 -mazur 53 -lookin 53 -soundcloud 53 -disparaged 53 -selva 53 -balsamic 53 -beggs 53 -1.80 53 -talafair 53 -preteen 53 -torturers 53 -mw 53 -gox 53 -herbalife 53 -diluting 53 -wagers 53 -phish 53 -harrop 53 -skylab 53 -screenplays 53 -hematoma 53 -maw 53 -stilted 53 -ached 53 -rubinstein 53 -albertville 53 -boyette 53 -degenerated 53 -irregularity 53 -opinium 53 -stigmas 53 -irrawaddy 53 -akhmetov 53 -non-parole 53 -dors 53 -susic 53 -20-page 53 -maniacal 53 -reoffend 53 -skullcap 53 -out-of-favour 53 -kollection 53 -asymmetry 53 -524 53 -fanta 53 -chesimard 53 -trembled 53 -snowpack 53 -hydrothermal 53 -auguste 53 -stowing 53 -blenders 53 -long-duration 53 -warr 53 -squibb 53 --30 53 -mixers 53 -closeup 53 -locomotion 53 -bake-off 53 -rampart 53 -subcontracted 53 -sniped 53 -dinkins 53 -banger 53 -burpees 53 -long-winded 53 -steinman 53 -cockle 53 -hussien 53 -in-cell 53 -agca 53 -1796 53 -acors 53 -ikeme 53 -usagi 53 -upend 53 -mcguiness 53 -glass-fronted 53 -23,500 53 -dayna 53 -albie 53 -extolled 53 -rotund 53 -five-strong 53 -foreshadowing 53 -300g 53 -ride-on 53 -three-and-a-half-year 53 -pertain 53 -santacon 53 -on-the-run 53 -aquarius 53 -music-streaming 53 -merica 53 -specialities 53 -good-bye 53 -enders 53 -dumont 53 -moschino 53 -eight-under 53 -grating 53 -baytown 53 -dabney 53 -suzanna 53 -scowling 53 -bi-annual 53 -watercolor 53 -outwit 53 -coolum 53 -espouses 53 -vietto 53 -arellano-felix 53 -schadenfreude 53 -jatropha 53 -four-term 53 -700th 53 -constructs 53 -break-out 53 -fonseka 53 -rcgp 53 -b-17 53 -buin 53 -uplifted 53 -nickolay 53 -sabi 53 -verna 53 -tutus 53 -deltona 53 -12a 53 -dolla 53 -ransack 53 -seale 53 -icap 53 -millbank 53 -duh 53 -democratically-elected 53 -magnanimous 53 -industrialisation 53 -breathlessly 53 -duguid 53 -sutyagin 53 -guesthouses 53 -four-car 53 -morissette 53 -suriname 53 -u.s.-afghan 53 -furtherance 53 -beaked 53 -mato 53 -choupo-moting 53 -neurologic 53 -treacle 53 -romeu 53 -burglarizing 53 -sem 53 -languishes 53 -do-nothing 53 -ik 53 -20:34 53 -vitor 53 -nickels 53 -cacti 53 -life-and-death 53 -grandmother-of-two 53 -00:56 53 -martynenko 53 -4,000-year-old 53 -fascinate 53 -lifter 53 -left-field 53 -victorian-era 53 -trooped 53 -privatize 53 -dlr 53 -colonia 53 -tettey 53 -1.46 53 -kafka 53 -opprobrium 53 -ashour 53 -warminster 53 -crombie 53 -paralyse 53 -accesses 53 -belorussian 53 -slimane 53 -asimo 53 -barthelemy 53 -pre-selected 53 -conduction 53 -daltrey 53 -jumpstart 53 -bereszynski 53 -man-eating 53 -maness 53 -tamzin 53 -low-tax 53 -ladbroke 53 -groaned 53 -psychotropic 53 -wolfram 53 -niculescu 53 -01:35 53 -675,000 53 -nightcrawler 53 -dilation 53 -aquifers 53 -customization 53 -spooning 53 -pursuers 53 -huntly 53 -statuettes 53 -fine-tuned 53 -clarksdale 53 -vertebral 53 -stannard 53 -rehoused 53 -khattab 53 -ex-employee 53 -cml 53 -billow 53 -prokopi 53 -bashes 53 -pipa 53 -dsm-5 53 -20-years-old 53 -nine-hole 53 -covey 53 -retested 53 -snagging 53 -g3 53 -aggregated 53 -methamphetamines 53 -rollercoasters 53 -pawan 53 -timescales 53 -sigman 53 -pereyra 53 -guilherme 53 -senza 53 -proportioned 53 -harvieu 53 -ucsf 53 -bellamar 53 -marbled 53 -mcilory 53 -olin 53 -cambria 53 -chignon 53 -abuelazam 53 -bleaker 53 -rizzi 53 -trifecta 53 -highest-level 53 -acquiesced 53 -airframe 53 -dcs 53 -levity 53 -mook 53 -climaxed 53 -al-wuhayshi 53 -minnow 53 -ipa 53 -bandeau 53 -elicits 53 -nemes 53 -incarcerate 53 -benzodiazepines 53 -collina 53 -pepys 53 -bentiu 53 -16:26 53 -1715 53 -sanlu 53 -juggler 53 -enlists 53 -newson 53 -5mph 53 -monarchies 53 -pataki 53 -upwardly 53 -guantánamo 53 -nlrb 53 -adios 53 -variance 53 -clayson 53 -demonised 53 -pavlof 53 -wtvd 53 -bremer 53 -zig 53 -meru 53 -merv 53 -student-athlete 53 -18,600 53 -7,100 53 -stamos 53 -chinneck 53 -impolite 53 -swainson 53 -gilet 53 -charliefscott 53 -96-year-old 53 -breadwinners 53 -bazaars 53 -paki 53 -cheban 53 -einhorn 53 -fissile 53 -müller 53 -lurches 53 -inlets 53 -winging 53 -typifies 53 -co-sleeping 53 -634 53 -4-foot 53 -herniated 53 -sotnikova 53 -high-power 53 -over-hyped 53 -samoans 53 -kurniawan 53 -vagus 53 -mavrias 53 -desi 53 -overspend 53 -12-10 53 -almonte 53 -gracias 53 -judeh 53 -altimeter 53 -ricard 53 -abbreviations 53 -jabari 53 -rock-throwing 53 -waddled 53 -forefinger 53 -enriquez 53 -shahab 53 -tillie 53 -perranporth 53 -brouhaha 53 -shimmery 53 -borre 53 -leogane 53 -reya 53 -gloag 53 -5:20 53 -si.com 53 -pay-tv 53 -dk 53 -co-chaired 53 -tiber 53 -bo-kyung 53 -bores 53 -hrc 53 -off-color 53 -bumbum 53 -brookdale 53 -al-qaida-linked 53 -caress 53 -haslet 53 -pettis 53 -simkins 53 -demilitarization 53 -dowson 53 -flipboard 53 -9:40 53 -talha 53 -flamethrower 53 -blizerian 53 -refunding 53 -newspoll 53 -mexes 53 -messham 53 -ramblings 53 -multi-billionaire 53 -degrasse 53 -trinket 53 -whoop 53 -bedsores 53 -rollin 53 -crisscrossed 53 -cn 53 -cp 53 -kinston 53 -waltons 53 -mahesh 53 -16,400 53 -distantly 53 -senatore 53 -raptures 53 -indra 53 -oldsmobile 53 -mid-70s 53 -topples 53 -cowling 53 -raed 53 -high-fructose 53 -29m 53 -auto-pilot 53 -ghirga 53 -5.10 53 -quaresma 53 -lavin 53 -holkham 53 -marchese 53 -basit 53 -margherita 53 -appetizing 53 -jeffress 53 -symptom-free 53 -735 53 -playpen 53 -chubbs 53 -sarge 53 -airpower 53 -grice 53 -moroney 53 -groupings 53 -sachets 53 -nabeel 53 -kilian 53 -messianic 53 -doh 53 -inequity 53 -hollosy 53 -1.19 53 -peekaboo 53 -non-custodial 53 -scrotal 53 -medcalf 53 -40billion 53 -#ferguson 53 -dack 53 -beatified 53 -mother-of-seven 53 -three-member 53 -berm 53 -buckman 53 -111th 53 -ksat 53 -canavan 53 -rhodesian 53 -galata 53 -fatih 53 -urs 53 -costin 53 -2,000-year-old 52 -chivalrous 52 -atif 52 -excoriated 52 -resveratrol 52 -drink-fuelled 52 -crowd-sourcing 52 -inwood 52 -siracusa 52 -rusher 52 -devens 52 -remaking 52 -1.26 52 -nightspots 52 -ecj 52 -best-performing 52 -regurgitate 52 -testa 52 -tarbuck 52 -early-onset 52 -recedes 52 -emmitt 52 -cordingley 52 -comforter 52 -necrophilia 52 -dismount 52 -bullitt 52 -hepworth 52 -jallah 52 -ans 52 -feehery 52 -vintage-inspired 52 -bolin 52 -socialization 52 -radiographer 52 -dasilva 52 -zagat 52 -cappadocia 52 -headlands 52 -high-sugar 52 -everson 52 -tingly 52 -pre-show 52 -armitstead 52 -mohican 52 -kneeled 52 -poxon 52 -maudsley 52 -ginia 52 -higher-quality 52 -watchlist 52 -20.8 52 -20.9 52 -demoulas 52 -iwf 52 -ouagadougou 52 -nymph 52 -low-hanging 52 -brezler 52 -leticia 52 -kaz 52 -healthkit 52 -besting 52 -flatbread 52 -thursby 52 -aurelio 52 -miscarrying 52 -sabres 52 -ruppert 52 -sodje 52 -palmeiras 52 -gaitan 52 -oxidation 52 -28.3 52 -sunita 52 -yen-hsun 52 -ex-news 52 -lasse 52 -tolling 52 -crenshaw 52 -shaherkani 52 -trapper 52 -ibsen 52 -streptococcal 52 -catronio 52 -shinto 52 -beavercreek 52 -franchised 52 -gt3 52 -resourced 52 -parsi 52 -tenterhooks 52 -i-80 52 -typist 52 -half-life 52 -flitcroft 52 -monnig 52 -kimpton 52 -avo 52 -baddies 52 -myeloma 52 -.7 52 -four-figure 52 -omarosa 52 -jebali 52 -provenzano 52 -hooky 52 -30.2 52 -marini 52 -nine-and-a-half 52 -shopaholic 52 -essa 52 -highsmith 52 -giambattista 52 -24.6 52 -cloistered 52 -lastminute.com 52 -somali-born 52 -diamorphine 52 -waders 52 -montesano 52 -labours 52 -hesperia 52 -enraging 52 -tete 52 -soldiering 52 -llangollen 52 -bortles 52 -hemlock 52 -coppedge 52 -vacating 52 -rigueur 52 -sono 52 -20:44 52 -renter 52 -low-power 52 -karolinska 52 -vertiginous 52 -paju 52 -professes 52 -upskirt 52 -archeology 52 -sfgate 52 -a10 52 -confiding 52 -bellow 52 -antagonists 52 -cherbourg 52 -colorfully 52 -bianco 52 -redeployment 52 -alcopops 52 -dyess 52 -gustave 52 -whaler 52 -brideshead 52 -18-20 52 -varndell 52 -penarth 52 -hettie 52 -framers 52 -gumbo 52 -20:24 52 -skinnier 52 -horden 52 -moner 52 -boylan 52 -138,000 52 -twycross 52 -hackathon 52 -ecumenical 52 -prorsum 52 -xoom 52 -bregman 52 -ventilators 52 -al. 52 -look-a-like 52 -matlin 52 -d'antoni 52 -25.4 52 -re-live 52 -home-owners 52 -decriminalize 52 -single-story 52 -12.01 52 -sisterly 52 -aldeburgh 52 -ronnies 52 -didion 52 -chun-ying 52 -arevalo 52 -saker 52 -heartbreakingly 52 -compactor 52 -jaiden 52 -quitter 52 -purr 52 -maelor 52 -spectacled 52 -bluebella 52 -lidia 52 -stavros 52 -doggett 52 -al-maqdis 52 -trivialised 52 -indiscreet 52 -seema 52 -890 52 -ex-minister 52 -processional 52 -animate 52 -sadd 52 -str 52 -hammill 52 -fifo 52 -ismailia 52 -kuban 52 -marrapodi 52 -hagerty 52 -baston 52 -regrouping 52 -ark. 52 -90th-minute 52 -garcia-cisneros 52 -ying-jeou 52 -trevena 52 -kaiya 52 -Ötzi 52 -yellowing 52 -616 52 -magnusson 52 -kt 52 -m11 52 -accuweather.com 52 -horrocks 52 -fugue 52 -pritchett 52 -phonecall 52 -paiva 52 -1080 52 -railroaded 52 -movahedi 52 -campbell-brown 52 -hillcrest 52 -columba 52 -150,000-a-week 52 -regimens 52 -abbiati 52 -anyhow 52 -lulz 52 -rometty 52 -eger 52 -biochemist 52 -platte 52 -manston 52 -velvety 52 -baghlan 52 -messner 52 -dedman 52 -hirsi 52 -bengtsson 52 -01:50 52 -b37 52 -ohno 52 -scamper 52 -sildenafil 52 -mariel 52 -smoothest 52 -anti- 52 -stammer 52 -calderwood 52 -attests 52 -yam 52 -indiscipline 52 -hustings 52 -streiff 52 -hasselblad 52 -caned 52 -holton 52 -rahat 52 -433 52 -22.1 52 -chafing 52 -step-dad 52 -dada 52 -jorgelina 52 -lilongwe 52 -kennewick 52 -ofer 52 -stanislaw 52 -backfiring 52 -768 52 -elinda 52 -r-maine 52 -jus 52 -rice-davies 52 -shot-stopper 52 -pan-african 52 -validates 52 -29.50 52 -bihlmaier 52 -pukki 52 -khel 52 -amiin 52 -cranny 52 -e-verify 52 -discolored 52 -tanisha 52 -3:00 52 -pk 52 -immigrate 52 -evoque 52 -shirty 52 -scottish-born 52 -kalynda 52 -screengrab 52 -wooster 52 -umana 52 -aerials 52 -imprecise 52 -bewitched 52 -kms 52 -jameel 52 -deflects 52 -hedwig 52 -narrowboat 52 -pico 52 -shahidullah 52 -dockside 52 -souk 52 -hg 52 -slicks 52 -flounder 52 -8.05 52 -kitschy 52 -peptides 52 -carpool 52 -through-ball 52 -yoke 52 -weeks-long 52 -jaqueline 52 -maulana 52 -capewell 52 -47.5 52 -volga 52 -purifying 52 -masterplan 52 -try-scoring 52 -middle-earth 52 -acquires 52 -marcheline 52 -sowed 52 -essen 52 -arching 52 -hotting 52 -nowitzki 52 -mattar 52 -452 52 -alway 52 -woollahra 52 -laxative 52 -maxillofacial 52 -calyx 52 -breasted 52 -arabic-language 52 -ashik 52 -xtra 52 -midseason 52 -lynwood 52 -berelowitz 52 -esky 52 -lansky 52 -ritualistic 52 -binged 52 -mawhinney 52 -taubira 52 -anti-americanism 52 -zeebrugge 52 -dipper 52 -28-nation 52 -2009-2011 52 -hoerler 52 -o.c. 52 -wimp 52 -insurgencies 52 -xcor 52 -well-run 52 -dockerty 52 -life-prolonging 52 -holi 52 -harkness 52 -mushers 52 -belligerence 52 -bakhtov 52 -slutty 52 -twang 52 -cometary 52 -johnsen 52 -neary 52 -passbook 52 -josephus 52 -eraser 52 -scowl 52 -republican-leaning 52 -shouldering 52 -kellen 52 -emmerich 52 -reeks 52 -lewiston 52 -laser-guided 52 -vice-chair 52 -laffer 52 -chiarelli 52 -tropicana 52 -1:20 52 -pursing 52 -panics 52 -rabia 52 -samper 52 -rezaie 52 -ex-boss 52 -diverged 52 -weingarten 52 -cassation 52 -regretfully 52 -adolph 52 -moghadam 52 -proclamations 52 -patter 52 -olympic-sized 52 -taher 52 -fiedler 52 -finnbogason 52 -130ft 52 -gershon 52 -despises 52 -everytime 52 -20:39 52 -liaised 52 -brats 52 -gatecrashers 52 -lavergne 52 -hankin 52 -kingston-upon-thames 52 -bertil 52 -creane 52 -humongous 52 -marzipan 52 -behrens 52 -jaffray 52 -d'mello 52 -dennison 52 -520,000 52 -dataset 52 -pre-grammy 52 -schaible 52 -roasts 52 -multi-party 52 -glazebrook 52 -oreos 52 -barbell 52 -showground 52 -eris 52 -honeymoons 52 -krabi 52 -libellous 52 -canoodling 52 -simcox 52 -beasley-murray 52 -well-executed 52 -propositions 52 -12-15 52 -then-governor 52 -druze 52 -repels 52 -sandcastles 52 -0.04 52 -13-month 52 -pinpoints 52 -yosef 52 -mendel 52 -geely 52 -benzino 52 -creeped 52 -newseum 52 -paddies 52 -carlsen 52 -meiji 52 -waris 52 -thoughtfulness 52 -sr-72 52 -wok 52 -unclothed 52 -cacher 52 -lankford 52 -adapters 52 -vanya 52 -toothpastes 52 -anti-gaddafi 52 -implantable 52 -metallics 52 -speeded 52 -vesely 52 -cartographer 52 -elonis 52 -parcells 52 -11-inch 52 -norgrove 52 -unaddressed 52 -gunderson 52 -misperceptions 52 -130-year-old 52 -cranfield 52 -disbanding 52 -o157 52 -qianlong 52 -r-oklahoma 52 -triglycerides 52 -bios 52 -zuber 52 -implicates 52 -narayan 52 -tear-gas 52 -cretan 52 -inexorable 52 -well-defined 52 -millfield 52 -giada 52 -valais 52 -burton-on-trent 52 -schall 52 -shias 52 -oettinger 52 -baseballs 52 -fascia 52 -markin 52 -tamron 52 -female-only 52 -destabilization 52 -148,000 52 -season-opener 52 -garcia-juaregui 52 -licensee 52 -400g 52 -aib 52 -adequacy 52 -bukit 52 -jablonski 52 -mcmenamin 52 -cut-back 52 -muscle-bound 52 -hideki 52 -mirlande 52 -sallis 52 -relapsing 52 -bohemia 52 -decamped 52 -mintz 52 -annotations 52 -ssl 52 -surtax 52 -highly-respected 52 -planetarium 52 -whet 52 -kross 52 -usability 52 -havelange 52 -exhaustively 52 -jolleys 52 -eddington 52 -br 52 -chiller 52 -faircloth 52 -embarrassments 52 -diverge 52 -rueda 52 -pre-booked 52 -jessy 52 -newly-wed 52 -shuter 52 -pally 52 -micrometres 52 -wildcards 52 -brannigan 52 -jaish 52 -rial 52 -tetra 52 -ewart 52 -ayan 52 -cetera 52 -second-guess 52 -genealogist 52 -kinross 52 -calibrate 52 -luol 52 -thakrar 52 -botulism 52 -drool 52 -herrin 51 -9,200 51 -objectification 51 -brunning 51 -dazzles 51 -hecker 51 -taiyuan 51 -kitchenware 51 -besal 51 -debilitated 51 -trundling 51 -holywood 51 -fluttered 51 -moffitt 51 -multiparty 51 -millsap 51 -bur 51 -chart-topper 51 -424 51 -3.49 51 -tonteria 51 -hanoun 51 -buble 51 -hellas 51 -larnaca 51 -jenks 51 -kelp 51 -hitto 51 -alda 51 -spruced 51 -malinois 51 -half-truths 51 -uk-bound 51 -cna 51 -notaro 51 -shkodran 51 -kilowatts 51 -eight-mile 51 -fulk 51 -boogaard 51 -extendable 51 -pagones 51 -nbclp.arandomnumber 51 -pna 51 -dalliances 51 -thredbo 51 -yentob 51 -adderley 51 -sanatorium 51 -uncircumcised 51 -cassai 51 -cous 51 -truus 51 -unseasonable 51 -violetta 51 -ledesma 51 -bolivians 51 -320million 51 -inrix 51 -mis 51 -haitham 51 -jailbird 51 -weepu 51 -zulus 51 -fetches 51 -asterisk 51 -lesa 51 -heat-trapping 51 -boult 51 -o'hagan 51 -avebury 51 -kilns 51 -rix 51 -akmal 51 -hounsou 51 -salangi 51 -behemoths 51 -hankering 51 -mckiernan 51 -epidermal 51 -fable 51 -warlike 51 -otley 51 -hixon 51 -suomi 51 -irritates 51 -flat-bed 51 -milik 51 -brinks 51 -barbecued 51 -effecting 51 -ribner 51 -revolutionising 51 -cardiothoracic 51 -passau 51 -oakmont 51 -mazic 51 -sadeq 51 -malakai 51 -co-president 51 -emiratis 51 -chalkboard 51 -seltzer 51 -utilization 51 -impatiently 51 -kassam 51 -2mm 51 -stressors 51 -brights 51 -sahil 51 -chunying 51 -carmelita 51 -runion 51 -ets 51 -kroner 51 -khimki 51 -111,000 51 -gatherers 51 -jousting 51 -120m 51 -kltv 51 -most-visited 51 -harking 51 -reeking 51 -cumbernauld 51 -logbook 51 -blantyre 51 -olten 51 -1997-98 51 -cavers 51 -re-arrest 51 -13oz 51 -chiapperini 51 -thermostats 51 -stillwell 51 -moyo 51 -sapporo 51 -operable 51 -fyi 51 -draco 51 -netherlands-based 51 -back-to-work 51 -death-penalty 51 -eav 51 -hibernating 51 -shipmates 51 -woah 51 -389 51 -371 51 -jaua 51 -salahuddin 51 -mehos 51 -dispiriting 51 -ferri 51 -swaggering 51 -embeds 51 -voeckler 51 -wests 51 -foye 51 -cotton-top 51 -rustle 51 -bonus-point 51 -outriders 51 -emcee 51 -segregate 51 -djotodia 51 -yongkang 51 -pleaser 51 -hanratty 51 -misperception 51 -howards 51 -twitter.com 51 -perishable 51 -ayalon 51 -hazed 51 -hairdryers 51 -holt-singh 51 -aycliffe 51 -barrowman 51 -portrush 51 -sentient 51 -supplementation 51 -02:45 51 -sporyshev 51 -dasher 51 -interdisciplinary 51 -three-pronged 51 -journeying 51 -nightie 51 -remortgaged 51 -1.43 51 -mascarell 51 -25.2 51 -r2d2 51 -flirts 51 -inler 51 -victorino 51 -use-by 51 -dockers 51 -wiggett 51 -yeh 51 -neutralized 51 -8,200 51 -bullet-ridden 51 -gradel 51 -stockholders 51 -336 51 -mini-break 51 -corroboration 51 -star-crossed 51 -10-20 51 -cantankerous 51 -lm 51 -vertigo-inducing 51 -arian 51 -tegel 51 -taverns 51 -angulo 51 -chigwell 51 -castelao 51 -criminalization 51 -mid-west 51 -bolsheviks 51 -hogmanay 51 -chesterman 51 -colo 51 -accidently 51 -incapacitate 51 -yesim 51 -mixed-sex 51 -gelled 51 -tugendhat 51 -cerrillo 51 -conciliation 51 -convivial 51 -out-of-body 51 -cink 51 -montclair 51 -unpick 51 -28-day 51 -1817 51 -mulla 51 -porterfield 51 -brozovic 51 -faustino 51 -portishead 51 -hiles 51 -fuerteventura 51 -telemarketing 51 -catch-all 51 -tribble 51 -ila 51 -furnaces 51 -instrumentation 51 -clothe 51 -rebuilds 51 -mortlake 51 -parreira 51 -roizen 51 -12,800 51 -debi 51 -yarns 51 -laura_mail 51 -re-emerging 51 -norrie 51 -trafigura 51 -urena 51 -pregnancy-related 51 -gatewood 51 -presby 51 -showboating 51 -ornithology 51 -grommet 51 -11-week-old 51 -drachma 51 -sloshing 51 -carwyn 51 -peguero 51 -dieticians 51 -now-husband 51 -reroute 51 -shakeel 51 -bojana 51 -widmer 51 -set-back 51 -grammy-nominated 51 -karsten 51 -whetstone 51 -wwdc 51 -koreatown 51 -bendjelloul 51 -compendium 51 -yogyakarta 51 -at-large 51 -al-shishani 51 -abrahamson 51 -rossa 51 -senselessly 51 -codebreakers 51 -newall 51 -1.27 51 -reevaluate 51 -storm-force 51 -22.7 51 -erith 51 -one-over 51 -fwc 51 -soviet-style 51 -summarizing 51 -penetrative 51 -miro 51 -business-like 51 -20:38 51 -roni 51 -donaghey 51 -gulping 51 -part-way 51 -608 51 -populus 51 -kumra 51 -detonations 51 -certainties 51 -gair 51 -autobahn 51 -ilkley 51 -kanawha 51 -mosquitos 51 -amore 51 -viviana 51 -wrekin 51 -cafferty 51 -meera 51 -ecole 51 -farmbox 51 -15,700 51 -donie 51 -naturel 51 -agrarian 51 -judgemental 51 -god-like 51 -german-speaking 51 -earth-shattering 51 -arcing 51 -alen 51 -anti-graft 51 -paymaster 51 -cor 51 -distinguishable 51 -breadline 51 -co-sponsors 51 -ht 51 -baby-sitting 51 -colada 51 -fume 51 -palaeontology 51 -baijiu 51 -schematics 51 -nrdc 51 -jacquie 51 -fox-pitt 51 -hartsfield 51 -hard-drinking 51 -anti-balaka 51 -traceability 51 -todmorden 51 -bronwen 51 -octane 51 -dabbing 51 -cicero 51 -characterizations 51 -baffles 51 -paracel 51 -whiley 51 -streete 51 -jamila 51 -20g 51 -pankaj 51 -pinging 51 -intracranial 51 -majewska 51 -exhuming 51 -art. 51 -scoping 51 -marant 51 -debriefing 51 -anteater 51 -expendables 51 -lashkar-e-taiba 51 -millisecond 51 -trojans 51 -abdel-rahman 51 -40-something 51 -tyton 51 -big-game 51 -predictors 51 -2:40 51 -lambourn 51 -vas 51 -lifespans 51 -hanson-young 51 -tianna 51 -gozo 51 -hypersensitivity 51 -youssouf 51 -enticement 51 -eugenics 51 -belching 51 -sexted 51 -lineups 51 -counterbalance 51 -sterilise 51 -biro 51 -nanometres 51 -pheromones 51 -lone-wolf 51 -schladming 51 -arreola 51 -hungarian-born 51 -globetrotter 51 -boykin 51 -michibata 51 -wasteney 51 -arsenault 51 -unseaworthy 51 -osterholm 51 -wnep 51 -sabir 51 -enhancer 51 -phillipa 51 -unai 51 -nsue 51 -martindale 51 -converter 51 -wabc-tv 51 -anorak 51 -hammonds 51 -shevell 51 -scotti 51 -krupp 51 -i-75 51 -metrodome 51 -four-test 51 -godalming 51 -kraken 51 -02:58 51 -kibbutz 51 -al-shaabab 51 -zhivago 51 -cota 51 -ischemic 51 -jovanovski 51 -inch-perfect 51 -arecibo 51 -dodges 51 -epson 51 -well-positioned 51 -becci 51 -bushey 51 -d&d 51 -sonnets 51 -cratered 51 -willenborg 51 -symington 51 -pumice 51 -infusing 51 -non-smoking 51 -archuleta 51 -628 51 -spuds 51 -dimitry 51 -chinese-language 51 -rwe 51 -dollywood 51 -oblast 51 -splurging 51 -reinares 51 -up-do 51 -al-khalifa 51 -photoshopping 51 -abdoulaye 51 -1828 51 -anti-theft 51 -awakens 51 -great-great-great 51 -untruths 51 -belie 51 -westen 51 -chafe 51 -loa 51 -fruitcakes 51 -hoult 51 -naseem 51 -sert 51 -coatbridge 51 -munchausen 51 -300-pound 51 -esta 51 -rezaian 51 -537 51 -lordship 51 -kashgar 51 -super-size 51 -ok! 51 -26.9 51 -bollettieri 51 -hossu 51 -rots 51 -577 51 -predated 51 -dibell 51 -castelveter 51 -dignitary 51 -redesigning 51 -1797 51 -taylforth 51 -skagit 51 -2200 51 -harmonie-rose 51 -newland 51 -rottnest 51 -singin 51 -kesteven 51 -damaturu 51 -synthesizer 51 -bamu 51 -dalmatian 51 -muggy 51 -marveling 51 -wormwood 51 -ever-evolving 51 -lovechild 51 -emissary 51 -highly-charged 51 -leibovich 51 -catnip 51 -quintin 51 -reddish-brown 51 -lutyens 51 -pepfar 51 -collate 51 -eludes 51 -nusaybah 51 -reorganized 51 -osuna 51 -abram 51 -zante 51 -sextuplets 51 -verandah 51 -litigious 51 -peeved 51 -reflexology 51 -defoggi 51 -isgrove 51 -parkside 51 -kiwomya 51 -bicarbonate 51 -powerlessness 51 -crowell 51 -wal 51 -glazin 51 -akcakale 51 -1x 51 -deferral 51 -dahan 51 -hijinks 51 -sammon 51 -sympathiser 51 -fisticuffs 51 -saphir 51 -27.9 51 -morganelli 51 -once-in-a-decade 51 -pilfering 51 -1730 51 -geisel 51 -get-togethers 51 -newkirk 50 -meissen 50 -post-conflict 50 -desensitized 50 -dohuk 50 -onyewu 50 -okavango 50 -irrigate 50 -popov 50 -harty 50 -re-runs 50 -cajoling 50 -fragmentary 50 -vinogradov 50 -igbo 50 -lowrey 50 -bagpuss 50 -1.29 50 -elst 50 -weems 50 -ectodermal 50 -motoart 50 -long-dead 50 -canute 50 -invite-only 50 -extroverted 50 -belfie 50 -nostrum 50 -dai-ichi 50 -outlast 50 -cutaway 50 -bucky 50 -388 50 -clode 50 -misskelley 50 -sub-prime 50 -wodehouse 50 -6p 50 -milliband 50 -chaka 50 -46.5 50 -sea-based 50 -bodhi 50 -sarita 50 -old-time 50 -aponte 50 -sigel 50 -68f 50 -justino 50 -lillehammer 50 -debbi 50 -rothbury 50 -epp 50 -bookmark 50 -garmin-sharp 50 -cordery 50 -environs 50 -bolkiah 50 -flaunts 50 -riathalsam 50 -extrapolated 50 -toluca 50 -razzie 50 -complexions 50 -divest 50 -dibaba 50 -blitzkrieg 50 -carves 50 -ketsana 50 -strava 50 -morgues 50 -imber 50 -trialist 50 -e10 50 -eight-under-par 50 -guidroz 50 -riedel 50 -flip-flopping 50 -dependant 50 -erm 50 -baukus 50 -ragtag 50 -chianti 50 -cieran 50 -schreibvogel 50 -mujiasih 50 -made-to-measure 50 -kaftans 50 -impure 50 -rubbers 50 -rate-fixing 50 -393 50 -merhige 50 -laissez-faire 50 -14:59 50 -indicting 50 -jee 50 -goines 50 -under-20s 50 -aikines-aryeetey 50 -ceylon 50 -forex 50 -commentate 50 -2mp 50 -duesler 50 -dilley 50 -photobombing 50 -4bn 50 -lippman 50 -stoudemire 50 -bffs 50 -glidden 50 -fam 50 -exemplify 50 -dariusz 50 -suffragettes 50 -farmlands 50 -school-based 50 -gombe 50 -desirability 50 -manteca 50 -malpas 50 -fat-burning 50 -tsvetana 50 -re-introduced 50 -pro-al 50 -intubated 50 -ill-effects 50 -ultrabooks 50 -secondaries 50 -hcpc 50 -nocerino 50 -seabass 50 -netherton 50 -butterball 50 -laiki 50 -ichthyosis 50 -varner 50 -showstopping 50 -europcar 50 -zinger 50 -rfl 50 -scumbags 50 -20:49 50 -1545 50 -dugald 50 -geophysics 50 -worthiness 50 -four-strong 50 -clarion-ledger 50 -distilleries 50 -14-point 50 -3500 50 -overslept 50 -biter 50 -fronsman 50 -trieste 50 -bund 50 -calderoli 50 -disassociate 50 -pinkney 50 -church-going 50 -langtree 50 -carpaccio 50 -mar-a-lago 50 -chiming 50 -radwan 50 -bod 50 -patriarchy 50 -gudmundsson 50 -monocle 50 -four-decade 50 -capel 50 -alloys 50 -northside 50 -china-based 50 -wegener 50 -didnâ 50 -reah 50 -gay-friendly 50 -coterie 50 -cornrows 50 -ish 50 -kosik 50 -suspicious-looking 50 -eighteen-year-old 50 -obstructionist 50 -licence-fee 50 -cherry-picking 50 -colgate 50 -sing-along 50 -caricatured 50 -wait-and-see 50 -butane 50 -oda 50 -gargiulo 50 -eos 50 -innately 50 -bahn 50 -ninety-nine 50 -caucasians 50 -vexing 50 -hadassah 50 -antagonise 50 -maniacs 50 -bede 50 -beaven 50 -incubated 50 -sasaki 50 -yer 50 -dorgan 50 -shearman 50 -1,080 50 -ballgame 50 -dori 50 -virgen 50 -dsm 50 -dingwall 50 -murrah 50 -turkestan 50 -kweku 50 -concoct 50 -puncher 50 -doghouse 50 -ramsden 50 -giteau 50 -jager 50 -khar 50 -justo 50 -presuming 50 -d.j. 50 -lexy 50 -creedon 50 -talansky 50 -father-of-seven 50 -renege 50 -trouble-free 50 -minibuses 50 -novaya 50 -assembles 50 -butchery 50 -kumaritashvili 50 -denoting 50 -kvue 50 -4,900 50 -laureus 50 -credit-card 50 -non-profits 50 -anaerobic 50 -milliken 50 -sinoti 50 -cockrell 50 -devours 50 -brooklands 50 -geostationary 50 -telepathic 50 -schleicher 50 -antananarivo 50 -shantytowns 50 -cooperstown 50 -bontinck 50 -tailback 50 -muñoz 50 -medrano 50 -abiola 50 -pulleys 50 -salar 50 -begrudgingly 50 -pujols 50 -roadsides 50 -flitting 50 -inundating 50 -halogen 50 -25km 50 -guestbook 50 -tomei 50 -eastside 50 -zarqawi 50 -ofwat 50 -tenn. 50 -antipsychotics 50 -drop-down 50 -jamaican-born 50 -digestives 50 -moxey 50 -applewhite 50 -antikythera 50 -steampunk 50 -unspectacular 50 -archrival 50 -churns 50 -retardants 50 -hrafnsson 50 -zanesville 50 -metre-long 50 -yana 50 -tammany 50 -borschberg 50 -seedings 50 -henriques 50 -redecorate 50 -pre-k 50 -vander 50 -paper-thin 50 -wimmer 50 -azriel 50 -sulphide 50 -emotionally-charged 50 -ardennes 50 -andal 50 -immovable 50 -karey 50 -zoologists 50 -gaz 50 -heathfield 50 -unos 50 -iheanacho 50 -kerrey 50 -djalili 50 -soothed 50 -soothes 50 -97-year-old 50 -unrequited 50 -rabble 50 -mondol 50 -snowiest 50 -16-page 50 -sedating 50 -sediq 50 -swinburn 50 -live-tweeted 50 -kabayeva 50 -timbrell 50 -i3 50 -koc 50 -winterburn 50 -2:20 50 -light-headed 50 -exhaled 50 -vane 50 -9.35 50 -840,000 50 -40-plus 50 -osgood 50 -rosh 50 -imitators 50 -fizzed 50 -saleroom 50 -climate-change 50 -team-building 50 -swingeing 50 -minute-by-minute 50 -culdrose 50 -free-running 50 -linehan 50 -frankincense 50 -uyuni 50 -virologist 50 -irrationally 50 -extravagantly 50 -actuality 50 -toffs 50 -pederson 50 -bruin 50 -revitalizing 50 -ange 50 -griff 50 -bridgehampton 50 -race-based 50 -open-necked 50 -verkaik 50 -bodo 50 -dorney 50 -bick 50 -lemongrass 50 -01:32 50 -spanair 50 -christiana 50 -sax 50 -sidestepping 50 -moshi 50 -ketogenic 50 -849 50 -xiaojun 50 -bustan 50 -ouija 50 -cooper-hohn 50 -roshan 50 -illuminations 50 -parvaiz 50 -02:34 50 -pasteurised 50 -cohan 50 -ill-treating 50 -barletta 50 -smarting 50 -daverin 50 -grimmer 50 -globemaster 50 -651 50 -adm 50 -gohel 50 -paucity 50 -teeter 50 -backstreets 50 -majewski 50 -israel-gaza 50 -gx 50 -steinfeld 50 -zip-up 50 -copernicus 50 -miyamoto 50 -lambeau 50 -pittodrie 50 -ww 50 -piekarsky 50 -schenectady 50 -trumpeter 50 -140mph 50 -kielder 50 -japanese-american 50 -tardy 50 -pelley 50 -dryden 50 -rinks 50 -perceiving 50 -shabelle 50 -2/1 50 -fricke 50 -805 50 -maas 50 -diakite 50 -tendonitis 50 -trumbull 50 -v-shaped 50 -indelibly 50 -persians 50 -multi-faceted 50 -rattlesnakes 50 -parent-teacher 50 -restorations 50 -copywriter 50 -f3 50 -351 50 -korotaeva 50 -five-second 50 -salutary 50 -leeds-based 50 -coryton 50 -concocting 50 -krenwinkel 50 -rehydrate 50 -baptisms 50 -holstein 50 -co-counsel 50 -elleithee 50 -reheated 50 -gritting 50 -ritson 50 -post-apartheid 50 -bündchen 50 -496 50 -sammi 50 -swisher 50 -rlc 50 -enclose 50 -poldark 50 -enclosing 50 -tammi 50 -record-equaling 50 -blears 50 -haries 50 -ebury 50 -pithy 50 -hanes 50 -smackdown 50 -pimple 50 -novara 50 -brindle 50 -veronique 50 -ansah 50 -snizhne 50 -thomlinson 50 -surfs 50 -radonski 50 -30-strong 50 -polyamorous 50 -trondheim 50 -stupendous 50 -rivalled 50 -6:40 50 -ninemsn 50 -categorise 50 -kocher 50 -otten 50 -recapturing 50 -lemoine 50 -mudie 50 -seven-member 50 -240million 50 -southwards 50 -renn 50 -jumbled 50 -villareal 50 -armrests 50 -sunanda 50 -greige 50 -vuvuzelas 50 -serfontein 50 -exaggerations 50 -qaim 50 -umaru 50 -braham 50 -alanis 50 -brisket 50 -foday 50 -moniz 50 -mainlanders 50 -talladega 50 -garbo 50 -kazuo 50 -pusher 50 -eaterie 50 -kingmaker 50 -jean-jacques 50 -privatized 50 -werewolves 50 -rell 50 -insipid 50 -steiber 50 -honeymooned 50 -unsociable 50 -likability 50 -kao 50 -controllable 50 -turrion 50 -mizead 50 -mihayo 50 -ndp 50 -olay 50 -self-respecting 50 -waddling 50 -24st 50 -psychopathy 50 -cobblers 50 -harlington 50 -luker 50 -pabst 50 -sud 50 -sup 50 -anti-hazing 50 -towne 50 -avaaz 50 -lanning 50 -trade-offs 50 -02:20 50 -crowd-pleaser 50 -pillaged 50 -underreported 50 -chemcam 50 -much-publicized 50 -manx 50 -scotstoun 50 -mcquiston 50 -humdrum 50 -niceties 50 -flanking 50 -143,000 50 -hyperinflation 50 -211-game 50 -recalibrate 50 -wolong 50 -787-9 50 -comert 50 -nenad 50 -katmandu 50 -lambton 50 -nocco 50 -modibo 50 -destitution 50 -floodlit 50 -riskiest 50 -deletes 50 -klingenmeyer 50 -mariposa 50 -holdout 50 -b2 50 -kpa 50 -bombed-out 50 -572 50 -blights 50 -type-c 50 -michonne 50 -unsportsmanlike 50 -abdomens 50 -hirshberg 50 -chesters 50 -muslim-americans 50 -artichokes 50 -meteoroids 50 -43million 50 -low-enriched 50 -perinatal 50 -tignous 50 -miraval 50 -looter 50 -eight-months 50 -blab 50 -tenterden 50 -backache 50 -15-inch 50 -queretaro 50 -borderlands 50 -forster-caskey 50 -714 50 -712 50 -tyndall 49 -thalia 49 -sprig 49 -a340 49 -squatted 49 -jrr 49 -frugality 49 -rafie 49 -cruellest 49 -9.10 49 -entree 49 -copland 49 -demoralizing 49 -samu 49 -1.33 49 -1.34 49 -drawl 49 -cresting 49 -concert-goers 49 -circumvented 49 -recitation 49 -nanodiamonds 49 -bharat 49 -disloyalty 49 -bennu 49 -straitjacket 49 -droids 49 -thins 49 -02:14 49 -isro 49 -pulliam 49 -hominins 49 -50-yard 49 -catrina 49 -belmond 49 -table-topping 49 -ma'a 49 -raghad 49 -made-to-order 49 -detoxification 49 -express-news 49 -iwata 49 -smudged 49 -inorganic 49 -waxes 49 -sidell 49 -sytsma 49 -dumbo 49 -emigrants 49 -serbians 49 -dergarabedian 49 -mcadoo 49 -prather 49 -boubacar 49 -nine-point 49 -sate 49 -'till 49 -al-dulaimi 49 -sirisena 49 -blacken 49 -cryonics 49 -kenzo 49 -claudius 49 -747s 49 -sith 49 -albers 49 -avalanna 49 -kerswell 49 -touma 49 -megabits 49 -laurens 49 -khedair 49 -heart-healthy 49 -whooped 49 -arndale 49 -selborne 49 -oscillating 49 -demarai 49 -toole 49 -flood-prone 49 -mokhtar 49 -glitters 49 -panty 49 -klout 49 -periodical 49 -gaya 49 -rego 49 -conforms 49 -godoy 49 -harland 49 -enlivened 49 -cielo 49 -haya 49 -bronfman 49 -laith 49 -goodson 49 -899 49 -four-course 49 -satmar 49 -harri 49 -cafeterias 49 -attack-minded 49 -tigerair 49 -sunburned 49 -644 49 -portend 49 -backlit 49 -wellchild 49 -kondogbia 49 -okawa 49 -mirth 49 -sandcastle 49 -syllables 49 -edmiston 49 -progeny 49 -prophylactic 49 -slc 49 -khosla 49 -militarised 49 -brodeur 49 -darnall 49 -procopio 49 -pro-obama 49 -stalagmites 49 -shipboard 49 -copse 49 -nuestra 49 -kimiko 49 -24.2 49 -deaves 49 -sunnyvale 49 -school-leavers 49 -mudflats 49 -lanuf 49 -rollicking 49 -energizing 49 -2.47 49 -wpix 49 -rolland 49 -husbandry 49 -arina 49 -cmes 49 -phu 49 -gian 49 -demonise 49 -664 49 -yearling 49 -duong 49 -tastiest 49 -insuring 49 -picasa 49 -peephole 49 -borodin 49 -kati 49 -bonjean 49 -hankinson 49 -jarryd 49 -mcginlay 49 -tampons 49 -20:43 49 -20:48 49 -tremaine 49 -big-box 49 -then-new 49 -parlous 49 -29.1 49 -tolerates 49 -374 49 -smooching 49 -flashmob 49 -seung-hui 49 -mcluckie 49 -saltburn 49 -unglamorous 49 -fieldwork 49 -initiates 49 -denman 49 -waka 49 -extender 49 -clench 49 -faxes 49 -iridium 49 -roi 49 -wicksteed 49 -ait 49 -20:21 49 -pifer 49 -groundsmen 49 -zippy 49 -harrod 49 -half-sisters 49 -carriageways 49 -bishkek 49 -4.35 49 -gately 49 -evelina 49 -belanger 49 -chancellors 49 -hornsey 49 -niekerk 49 -choudhuri 49 --13 49 -zandt 49 -earth-size 49 -citric 49 -thebes 49 -abounds 49 -45-7 49 -chiesa 49 -547 49 -thandie 49 -staunchest 49 -guttural 49 -illingworth 49 -distillers 49 -knudsen 49 -mongols 49 -kommersant 49 -landsberg 49 -mid-day 49 -indus 49 -345,000 49 -cocking 49 -1640 49 -muzzled 49 -heerenveen 49 -open-water 49 -hinduja 49 -atolls 49 -mid-winter 49 -microbe 49 -khieu 49 -otay 49 -gorani 49 -mariota 49 -quant 49 -provocateurs 49 -trisomy 49 -turki 49 -doorknobs 49 -sayle 49 -mentally-ill 49 -validating 49 -phenylbutazone 49 -tredegar 49 -chatterley 49 -bidet 49 -causer 49 -hoes 49 -ochlik 49 -33.3 49 -sanitize 49 -electrolytes 49 -f-1 49 -manas 49 -sparkes 49 -vowel 49 -presumptuous 49 -shao 49 -pathetically 49 -nijmegen 49 -springwood 49 -zschaepe 49 -geraldton 49 -deadmau5 49 -bonnard 49 -lorin 49 -goldblum 49 -yusef 49 -darjeeling 49 -meggs 49 -1819 49 -margolies 49 -stynes 49 -merrett 49 -non-commissioned 49 -cold-weather 49 -drumsticks 49 -woio 49 -11kg 49 -six-person 49 -adis 49 -akhandananda 49 -underperformed 49 -562 49 -lantos 49 -campbells 49 -sieges 49 -scalpels 49 -hawaiians 49 -nikko 49 -klugman 49 -encyclopaedia 49 -swamping 49 -selwyn 49 -j.m. 49 -musty 49 -whitchurch 49 -scrupulously 49 -coghlan 49 -impulsiveness 49 -dum 49 -primacy 49 -sloped 49 -lubanga 49 -20-odd 49 -tiya 49 -selimovic 49 -pegging 49 -nowzaradan 49 -schlegel 49 -thickly 49 -olley 49 -back-heel 49 -coulton 49 -khon 49 -autobiographies 49 -binchester 49 -bodey 49 -popp 49 -karel 49 -exhausts 49 -sandilands 49 -anti-slavery 49 -tula 49 -steadying 49 -hannes 49 -sediba 49 -330ft 49 -two-litre 49 -grasps 49 -single-handed 49 -denver-based 49 -plumley 49 -glared 49 -sex-abuse 49 -jordyn 49 -dugmore 49 -uzbeks 49 -mangold 49 -meritless 49 -self-restraint 49 -ia 49 -sadomasochistic 49 -deceptions 49 -krupa 49 -encasing 49 -golly 49 -roadhouse 49 -attics 49 -khin 49 -snags 49 -10-men 49 -melancon 49 -sensuous 49 -herzigova 49 -785 49 -kadcyla 49 -under-fives 49 -lulled 49 -o'doherty 49 -mile-wide 49 -ramtha 49 -orientations 49 -payack 49 -suzette 49 -hedged 49 -exfoliation 49 -porky 49 -98-year-old 49 -fulfils 49 -triple-digit 49 -seventh-grade 49 -nation-wide 49 -zatuliveter 49 -hemet 49 -johnstown 49 -scissorhands 49 -monetize 49 -rulli 49 -2:00 49 -bada 49 -cremations 49 -plexiglas 49 -hira 49 -high-density 49 -merkley 49 -baddeley 49 -471 49 -pasting 49 -agintas 49 -11,700 49 -middle-school 49 -drivel 49 -mathilda 49 -35-yard 49 -seven-mile 49 -reintegrated 49 -23.2 49 -bola 49 -wallwork 49 -feltman 49 -abo 49 -hurlingham 49 -convalescing 49 -persuasions 49 -bugarach 49 -kilotons 49 -crannies 49 -02:36 49 -aviemore 49 -reshma 49 -crevasses 49 -dundas 49 -short-sightedness 49 -6.75 49 -enemas 49 -brazil-born 49 -deyn 49 -suzan 49 -five-course 49 -petroglyphs 49 -hooten 49 -melson 49 -sansom 49 -sorenstam 49 -excreted 49 -attentively 49 -cspi 49 -pinderfields 49 -camargo 49 -harefield 49 -carpathia 49 -humerus 49 -slocombe 49 -chana 49 -plies 49 -blasé 49 -kiowa 49 -pressly 49 -transcending 49 -vodkas 49 -knoefel 49 -ambani 49 -stéphane 49 -out-of-contract 49 -nuvaring 49 -right-hander 49 -conjunctivitis 49 -stashes 49 -coin-operated 49 -bobblehead 49 -armadillos 49 -niu 49 -well-appointed 49 -nonbelievers 49 -donte 49 -deflate-gate 49 -spatula 49 -policymaking 49 -9/4 49 -hellqvist 49 -chicharito 49 -gena 49 -assault-style 49 -idolatry 49 -birthrate 49 -burntwood 49 -henke 49 -urca 49 -members-only 49 -montecito 49 -mehran 49 -guitarists 49 -macaw 49 -lui 49 -ghouls 49 -zoned 49 -month-to-month 49 -stubbed 49 -edsel 49 -irwindale 49 -horwitz 49 -abm 49 -bolian 49 -aditya 49 -dimpled 49 -chain-reaction 49 -800ft 49 -nehoray 49 -mckelvie 49 -apothecary 49 -palmers 49 -familiarise 49 -inayat 49 -hypermobility 49 -shahbaz 49 -fine-tuning 49 -wayland 49 -likeliest 49 -incineration 49 -cussing 49 -tchaikovsky 49 -whitcomb 49 -intelligence-led 49 -mustachioed 49 -ottery 49 -cease-fires 49 -gnc 49 -smiler 49 -myocarditis 49 -coxes 49 -birds-eye 49 -7mm 49 -radcliff 49 -arno 49 -zara.com 49 -ey 49 -withington 49 -jha 49 -fatten 49 -wallman 49 -nsaids 49 -copson 49 -callender 49 -tartare 49 -stingy 49 -re-branded 49 -decriminalising 49 -oestrike 49 -mariko 49 -4u 49 -whitesides 49 -croods 49 -d'huez 49 -paddle8 49 -easa 49 -saddique 49 -glc 49 -tripwire 49 -intercede 49 -retracting 49 -boney 49 -gollin 49 -lekhwiya 49 -cowles 49 -hitchhike 49 -canaletto 49 -doctoring 49 -hominids 49 -creche 49 -tackett 49 -growls 49 -ibooks 49 -shahan 49 -stabenow 49 -nine-under 49 -subglacial 49 -shepperton 49 -re-admitted 49 -kilmer 49 -limburg 49 -post-intelligencer 49 -student-led 49 -mutua 49 -noisily 49 -fretted 49 -cnnmexico 49 -chomp 49 -million-a-year 49 -joie 49 -unhappily 49 -noam 49 -loveridge 49 -hewlin 49 -archrivals 49 -chukchi 49 -60-mile 49 -okapi 49 -wiry 49 -greenbrier 49 -greco-roman 49 -madras 49 -qualitative 49 -online-only 49 -26.7 49 -widely-used 49 -barklie 49 -chit 49 -undercuts 49 -hodgkins 49 -bretagne 49 -mamba 49 -vfb 49 -jeopardizes 49 -csl 49 -116th 49 -80-year 49 -prejudge 49 -stoyanov 49 -36.1 49 -normalised 49 -knecht 49 -tourre 49 -avicii 49 -kwazulu-natal 49 -outflows 49 -556 49 -aia 49 -futon 49 -ermine 49 -fowkes 49 -sofyen 49 -aoife 49 -10-metre 49 -barium 49 -klippel 49 -intranet 49 -resents 49 -patterdale 49 -prussian 49 -kosovan 49 -yuriy 49 -revue 49 -deasy 49 -lorimer 49 -sandbox 49 -holroyd 49 -eviscerated 49 -paulk 49 -marcell 49 -wigdor 49 -vignettes 49 -llewelyn-bowen 49 -barfield 49 -impersonations 49 -supt. 49 -neutralizing 49 -spindler 49 -burak 49 -cornella 49 -precipitate 49 -jinn 49 -yazdanpanah 49 -egyptologist 49 -neurotransmitters 49 -kerkowski 49 -reshuffled 49 -panzer 49 -r-utah 49 -waterlooville 49 -efsf 49 -falwell 49 -oriana 49 -hendrik 49 -karenina 49 -hamrick 49 -kota 49 -preened 49 -hassane 49 -arirang 49 -dowden 49 -holness 49 -puny 49 -tarver 49 -reiki 49 -cross-cultural 49 -shazad 49 -ulman 49 -cormann 49 -lopilato 49 -bogs 48 -westerman 48 -choy 48 -cuatro 48 -8-foot 48 -acupuncturist 48 -jillette 48 -ranch-style 48 -morakot 48 -earthworms 48 -magnifies 48 -leveler 48 -menz 48 -vosper 48 -pinstriped 48 -socino 48 -rohner 48 -@barackobama 48 -ect 48 -vardon 48 -barents 48 -serenading 48 -leitch 48 -televise 48 -hainey 48 -carbon-fiber 48 -morphs 48 -03 48 -foxborough 48 -preamble 48 -gomera 48 -derren 48 -high-earning 48 -aquamarine 48 -darley 48 -morlidge 48 -bolaven 48 -sandia 48 -biglia 48 -campsie 48 -44million 48 -babble 48 -southpaw 48 -bogie 48 -obita 48 -1:00 48 -menstruating 48 -cinch 48 -atzeni 48 -savic 48 -hall-style 48 -deveraux 48 -garrincha 48 -zombieland 48 -neots 48 -syracuse.com 48 -aimless 48 -petrus 48 -kiara 48 -cowburn 48 -retinoblastoma 48 -blaster 48 -beaudoin 48 -guestrooms 48 -custis 48 -precondition 48 -zambrano 48 -decriminalizing 48 -beekeeping 48 -ilona 48 -dinh 48 -twice-weekly 48 -chavis 48 -chamois 48 -jardin 48 -flickers 48 -korn 48 -diani 48 -brockenhurst 48 -466 48 -plumper 48 -cross-breed 48 -28.7 48 -dehaan 48 -calloway 48 -anti-clockwise 48 -timea 48 -camuto 48 -yakutia 48 -fire-breathing 48 -glioma 48 -dolled 48 -kastenbaum 48 -dramatised 48 -impaler 48 -noncombat 48 -zein 48 -skimp 48 -tahari 48 -traverso 48 -dhesi 48 -rosalyn 48 -burrowes 48 -kudrow 48 -lenighan 48 -edema 48 -doody 48 -musée 48 -pontypool 48 -insofar 48 -fishel 48 -fussing 48 -off-shoot 48 -refloat 48 -fifa.com 48 -orgreave 48 -soldiered 48 -9.75 48 -labor-intensive 48 -glaubers 48 -deride 48 -paice 48 -admiringly 48 -moroccan-born 48 -snaring 48 -wariness 48 -holford 48 -toft 48 -shenzhou 48 -riband 48 -cooey 48 -qat 48 -mccolgan 48 -garside 48 -glorification 48 -nares 48 -republique 48 -disease-free 48 -urbi 48 -penn. 48 -summarised 48 -stubbings 48 -ikram 48 -linea 48 -u.s.-mexican 48 -cursor 48 -skechers 48 -21.1 48 -nanning 48 -congresses 48 -mellberg 48 -kibble 48 -spearfishing 48 -blotting 48 -mertz 48 -maqsood 48 -flickered 48 -colloquial 48 -10-week-old 48 -uruguayans 48 -postecoglou 48 -two-metre 48 -francona 48 -blencowe 48 -nightdress 48 -unspeakably 48 -katv 48 -buckwheat 48 -luxuriously 48 -georgia-based 48 -crociere 48 -anker 48 -36ft 48 -lashawn 48 -untethered 48 -7.35 48 -straub 48 -gheorghe 48 -brindisi 48 -latches 48 -subliminal 48 -34dd 48 -sugden 48 -carbide 48 -pancho 48 -nflpa 48 -tebbs 48 -lerwick 48 -causalities 48 -chisholms 48 -fanboys 48 -southee 48 -robertshaw 48 -methylamphetamine 48 -pseudo 48 -conglomerates 48 -face-lift 48 -leigh-on-sea 48 -labropoulou 48 -streller 48 -mortis 48 -wunderkind 48 -undergrad 48 -32.8 48 -reay 48 -savored 48 -wapt 48 -gainiyeva 48 -attwell 48 -gagne 48 -dera 48 -liotta 48 -end-of-terrace 48 -israelites 48 -ledgett 48 -ichthyosaur 48 -a34 48 -montagu 48 -yushchenko 48 -anti-competitive 48 -ochs 48 -acetone 48 -shirdon 48 -phangan 48 -aviles 48 -sixth-grader 48 -benihana 48 -5/4/80 48 -oss 48 -kotov 48 -m-16 48 --12 48 -earp 48 -eco-system 48 -r-tennessee 48 -chartres-abbott 48 -hadzic 48 -arstechnica.com 48 -super-fan 48 -strauss-khan 48 -tardiness 48 -posner 48 -hosing 48 -romulus 48 -beeson 48 -naira 48 -splashy 48 -anh 48 -then-candidate 48 -mime 48 -belfield 48 -spinney 48 -government-approved 48 -octopussy 48 -class-a 48 -placental 48 -cottesloe 48 -abortive 48 -bretag 48 -nuzzle 48 -stalybridge 48 -retorts 48 -substrate 48 -backfires 48 -tucci 48 -wall-mounted 48 -nederlander 48 -ngn 48 -shiba 48 -20:40 48 -nicolaides 48 -information-sharing 48 -400lbs 48 -extraterrestrials 48 -ghobadi 48 -bazooka 48 -coldness 48 -fadhel 48 -975 48 -thought-out 48 -makris 48 -notation 48 -lohr 48 -deadpanned 48 -half-full 48 -ferndale 48 -kiruna 48 -fido 48 -gastropub 48 -haddon 48 -hamlett 48 -fredrickson 48 -marfan 48 -metalwork 48 -franco-german 48 -faraday 48 -chamberlin 48 -let-off 48 -noguchi 48 -preeminent 48 -awick 48 -nolte 48 -harmonic 48 -boba 48 -kishore 48 -duchenne 48 -free-to-air 48 -virtual-reality 48 -humiliations 48 -democrat-controlled 48 -buckyballs 48 -stripped-down 48 -mucous 48 -gendered 48 -coyly 48 -wcnc 48 -100-pound 48 -test-fired 48 -under-secretary 48 -rizzle 48 -below-freezing 48 -42.6 48 -3.10 48 -quagliarella 48 -labiaplasty 48 -tappan 48 -knitters 48 -thatcherite 48 -dyken-rouen 48 -underachievement 48 -glowingly 48 -umberger 48 -redpath 48 -dinars 48 -bushmen 48 -ceasar 48 -holmfirth 48 -anastacia 48 -mauritanian 48 -581 48 -mcgonigle 48 -begic 48 -flat-footed 48 -crittenton 48 -johana 48 -maginnis 48 -holdouts 48 -dalmatians 48 -sacchi 48 -seamers 48 -tawana 48 -b.i.g. 48 -theologians 48 -biddulph 48 -palazuelos 48 -sanz 48 -cross-eyed 48 -detonates 48 -discontinuing 48 -drouet 48 -cineworld 48 -centipede 48 -dreyfus 48 -eight-storey 48 -mindsets 48 -tough-talking 48 -passchendaele 48 -lidocaine 48 -herbivore 48 -coste 48 -maturo 48 -miquel 48 -nos. 48 -kyrece 48 -6.10 48 -hex 48 -cac 48 -mendis 48 -laotian 48 -buskers 48 -coefficient 48 -gonzo 48 -defiled 48 -tartaglia 48 -tarmoh 48 -abalone 48 -illicitly 48 -grinders 48 -yon 48 -winner-takes-all 48 -romney-ryan 48 -five-wicket 48 -lazarevic 48 -refundable 48 -sci 48 -reactivate 48 -cannibalistic 48 -krone 48 -haugen 48 -postural 48 -elvan 48 -civet 48 -malverde 48 -thaler 48 -designations 48 -scribes 48 -montalvo 48 -hobday 48 -turman 48 -longford 48 -a300 48 -mhz 48 -locates 48 -h7 48 -bociurkiw 48 -karyn 48 -hara 48 -astrobotic 48 -pouts 48 -oco-2 48 -moath 48 -expansionist 48 -lucentis 48 -565 48 -darcie 48 -average-sized 48 -kiteboarding 48 -basketballs 48 -latrine 48 -hayton 48 -navigable 48 -inversions 48 -up-to-the-minute 48 -frontera 48 -aslef 48 -superseding 48 -shapeless 48 -tamales 48 -monégasque 48 -liskeard 48 -go-karts 48 -groomers 48 -uscis 48 -game-time 48 -d-north 48 -shakir 48 -smallholding 48 -kronor 48 -adjudicated 48 -creamery 48 -salameh 48 -mcgeorge 48 -superhighway 48 -hyperventilating 48 -inflow 48 -geolocation 48 -cudicini 48 -shovelling 48 -exterminator 48 -tchenguiz 48 -do-gooder 48 -sixteen-year-old 48 -mihai 48 -casework 48 -wistfully 48 -serino 48 -sparkman 48 -canty 48 -sonnet 48 -propagation 48 -fill-in 48 -arroja 48 -buzzy 48 -counter-terrorist 48 -loudmouth 48 -nouns 48 -linde 48 -alumna 48 -spectra 48 -harbisson 48 -family-orientated 48 -keflezighi 48 -aragones 48 -mahler 48 -bucs 48 -wellman 48 -u.s.-pakistan 48 -lie-in 48 -12-week-old 48 -16-week 48 -kasia 48 -simgholam 48 -rino 48 -'08 48 -toma 48 -mini-bar 48 -grobbelaar 48 -plasticine 48 -exempting 48 -firebird 48 -morgans 48 -zbigniew 48 -tca 48 -sma 48 -lockstep 48 -dennett 48 -schoolfriends 48 -cerny 48 -scouler 48 -scorcher 48 -tablecloths 48 -wracking 48 -hobo 48 -callebs 48 -timepieces 48 -v1 48 -hypoplastic 48 -fielders 48 -highfield 48 -blacksburg 48 -goforth 48 -capitalization 48 -heisenberg 48 -hoye 48 -scroungers 48 -9-12 48 -asprilla 48 -duomo 48 -veneto 48 -tweedy 48 -w8 48 -ashmore 48 -fashionably 48 -3:40 48 -615 48 -campeche 48 -pando 48 -jcpenney 48 -nycfc 48 -philadelphia-based 48 -carrefour 48 -idiocy 48 -typos 48 -fernbridge 48 -reville 48 -dissipates 48 -snarls 48 -sunniest 48 -pinki 48 -kelston 48 -gordo 48 -twenty-somethings 48 -kellermann 48 -featureless 48 -laughingly 48 -preloaded 48 -eilidh 48 -glt 48 -mantras 48 -afton 48 -pichai 48 -proportionality 48 -belsize 48 -gentoo 48 -identifier 48 -hofmann 48 -white-haired 48 -iga 48 -aeromexico 48 -worships 48 -brusk 48 -stealthily 48 -burks 48 -sin-bin 48 -516 48 -v10 48 -crewmates 48 -grandview 48 -birkhall 48 -krell 48 -deform 48 -planking 48 -leven 48 -gerontology 48 -binns 48 -g-string 48 -ak47s 48 -d5 48 -strictures 48 -then-vice 48 -25-30 48 -newlands 48 -monarchist 48 -dian 48 -methyl 48 -1823 48 -remonstrating 48 -jenrick 48 -gugulethu 48 -fixed-rate 48 -dvorak 48 -straight-up 48 -shaul 48 -moonlit 48 -petered 48 -hwy 48 -crikey 48 -delany 48 -topsoil 48 -superficially 48 -mo'nique 48 -makati 48 -secretarial 48 -andreotti 48 -beckerman 48 -horse-power 48 -serry 48 -shafei 48 -26.3 48 -budimlic 48 -ipsosmori 48 -carissa 48 -politic 48 -high-res 48 -narrates 48 -eliasson 48 -lindstrom 48 -government-appointed 48 -tarcisio 48 -al-shugur 48 -marquees 48 -bright-eyed 48 -politifact 48 -chokes 48 -rioja 48 -koolhaas 48 -sarries 48 -komissarova 48 -sestak 48 -indulgences 48 -porush 48 -droopy 48 -nff 48 -watzke 48 -orbi 48 -14-foot 48 -leprechaun 48 -puglia 48 -terreblanche 48 -dawah 48 -90m 48 -eurocontrol 48 -necessitate 48 -breitbart.com 48 -schnitt 48 -virals 48 -4:40 48 -cranch 48 -shuafat 48 -lakenheath 48 -demographers 48 -swiftwater 48 -walkden 48 -farmingdale 48 -160mph 48 -27st 48 -optimized 48 -westmorland 48 -jamel 48 -coworth 48 -georginio 48 -debora 48 -4/4/81 48 -amenhotep 48 -lipitor 48 -caddis 48 -long-ago 48 -beloit 48 -melisa 48 -padiham 48 -subic 48 -gazzard 48 -over-stretched 48 -unchanging 48 -barcelona-based 48 -eckerd 48 -weisfeldt 48 -anachronism 48 -respess 48 -constipated 48 -msg 48 -spring-like 48 -koren 48 -florenzi 48 -mele 48 -paracuaro 48 -below-average 48 -lafontaine 48 -rijksmuseum 48 -12-ounce 48 -fukuoka 48 -dioxin 48 -javan 48 -riker 48 -cadavers 48 -laporta 48 -27.3 48 -shriek 48 -pompadour 48 -stop-off 48 -zuri 48 -best-preserved 48 -unmistakably 48 -hidic 48 -coxon 48 -pinkston 48 -sandrine 48 -leathery 48 -waseca 48 -whiteknighttwo 48 -decorates 48 -530,000 48 -washroom 48 -50cm 48 -officeholders 47 -techie 47 -pleats 47 -automate 47 -1gb 47 -dependencies 47 -kesse 47 -pyeongchang 47 -itzcoatl 47 -youtuber 47 -chuckulnaskit 47 -scrummaging 47 -harbhajan 47 -femi 47 -misadventures 47 -mellen 47 -followings 47 -kuti 47 -481 47 -robillard 47 -foiling 47 -snazzy 47 -hunniford 47 -bub 47 -gabbidon 47 -telluride 47 -rework 47 -frequents 47 -sharpshooters 47 -rossiya 47 -multan 47 -qashqai 47 -ciao 47 -darzi 47 -park51 47 -six-term 47 -simplex 47 -labouring 47 -gulch 47 -re-opens 47 -madd 47 -incas 47 -veneno 47 -surnamed 47 -chicagoans 47 -non-whites 47 -broadsheet 47 -krugman 47 -152,000 47 -sphinxes 47 -mirco 47 -republicanism 47 -high-spec 47 -cerqua 47 -rate-rigging 47 -gratuities 47 -fletch 47 -market-based 47 -waca 47 -t1 47 -over-excited 47 -mugello 47 -differentiated 47 -adrianna 47 -powerade 47 -wpa 47 -m'vila 47 -trippers 47 -deutsch 47 -iwc 47 -darkroom 47 -cryotherapy 47 -kermode 47 -technocrats 47 -troisi 47 -p3 47 -fez 47 -bexhill 47 -udar 47 -abetz 47 -15per 47 -toiletry 47 -minstrel 47 -lunchroom 47 -radicalising 47 -reloading 47 -brusque 47 -emenike 47 -schmelzer 47 -fireside 47 -yiwu 47 -muscatine 47 -toumani 47 -jakisic 47 -darrien 47 -job-seekers 47 -bij 47 -khou.com 47 -windfarms 47 -guttmacher 47 -disconnection 47 -17-point 47 -hopton 47 -172,000 47 -ramapo 47 -caversham 47 -swooning 47 -afsar 47 -group-stage 47 -matsui 47 -outsmart 47 -r.r. 47 -lps 47 -havasu 47 -savvas 47 -beastly 47 -kesner 47 -nondisclosure 47 -fatu 47 -crystallized 47 -politicization 47 -oversharing 47 -disinterest 47 -oy 47 -leg-spinner 47 -bryans 47 -brutish 47 -verzilov 47 -non-issue 47 -matalin 47 -throb 47 -kestrel 47 -46-year 47 -10-11 47 -idealized 47 -kartika 47 -446 47 -ice-skating 47 -gokhan 47 -sargeant 47 -tarn 47 -youn 47 -keyword 47 -divisiveness 47 -botticelli 47 -georgiana 47 -sheyi 47 -3.5-inch 47 -ugo 47 -fishmongers 47 -handicaps 47 -well-qualified 47 -tetrahydrocannabinol 47 -gatekeepers 47 -offal 47 -kyly 47 -karnamaya 47 -rouzier 47 -1994-95 47 -tum 47 -bme 47 -mortgaged 47 -stofan 47 -dinar 47 --8 47 -pittsburg 47 -telepresence 47 -7/1 47 -chidambaram 47 -bazalgette 47 -kingstown 47 -cina 47 -venky 47 -mootz 47 -emulates 47 -bushmeat 47 -2.27 47 -nhs-funded 47 -lindhout 47 -indictable 47 -windhoek 47 -borgata 47 -wagstaffe 47 -leishman 47 -congressionally 47 -25cm 47 -busing 47 -ellie-mae 47 -663 47 -worshiping 47 -yorba 47 -berkhamsted 47 -dall 47 -kaduna 47 -ground-level 47 -leed 47 -hougaard 47 -rosenfield 47 -al-sabah 47 -huard 47 -17.50 47 -gondwana 47 -intrigues 47 -kameni 47 -bhagat 47 -vaxevanis 47 -giardina 47 -kozlowski 47 -farne 47 -kcen 47 -myopic 47 -agg 47 -wicklow 47 -pallotta 47 -kipsang 47 -after-show 47 -sharelinktop 47 -on-hand 47 -thrillseeker 47 -cartwheel 47 -overpasses 47 -mcmuffin 47 -picutred 47 -kum 47 -decompress 47 -duesseldorf 47 -uars 47 -frawley 47 -turkey-syria 47 -jinked 47 -annoyances 47 -dory 47 -nutrient-rich 47 -lerman 47 -26-mile 47 -nemcova 47 -lukyanova 47 -pre-emptively 47 -fallowfield 47 -heavily-pregnant 47 -montauk 47 -costanza 47 -sex-related 47 -sauerkraut 47 -234,000 47 -borst 47 -kham 47 -mcmahill 47 -balloted 47 -five-part 47 -falafel 47 -bikies 47 -lessens 47 -sanitizing 47 -titleholder 47 -bernadino 47 -micron 47 -22-hour 47 -firma 47 -apricots 47 -short-haired 47 -echeverria 47 -around-the-world 47 -intercollegiate 47 -marche 47 -raizy 47 -blackmun 47 -thimerosal 47 -dithered 47 -chinthu 47 -appetizer 47 -ramparts 47 -deas 47 -1799 47 -fifty-one 47 -prefix 47 -mirfield 47 -irradiated 47 -second-grade 47 -1665 47 -trentadue 47 -personalization 47 -stearman 47 -jaroslav 47 -oxted 47 -put-down 47 -bilbies 47 -geer 47 -krims 47 -,500 47 -undeserving 47 -1818 47 -rpgs 47 -g-spot 47 -callista 47 -manhattan-based 47 -lingfield 47 -spanish-american 47 -freemasons 47 -ganeshan 47 -upriver 47 -u-shaped 47 -49million 47 -niswender 47 -rosalynn 47 -twosome 47 -twyford 47 -918 47 -ricocheting 47 -ruefully 47 -torchlight 47 -unanimity 47 -menasche 47 -chief-of-staff 47 -niland 47 -maryland-based 47 -low-light 47 -anyways 47 -washed-up 47 -winstanley 47 -water-logged 47 -lamberth 47 -syrian-turkish 47 -golightly 47 -1.06 47 -usefully 47 -colonels 47 -family-sized 47 -djuricic 47 -redden 47 -30-plus 47 -articlechannelfollowbutton 47 -i-5 47 -mingles 47 -paculis 47 -harb 47 -iodide 47 -lorgat 47 -rollback 47 -70p 47 -also-rans 47 -downforce 47 -biscardi 47 -586 47 -mangue 47 -mujuru 47 -emanate 47 -senior-level 47 -jinking 47 -braiding 47 -praline 47 -hiva 47 -mixed-use 47 -hickling 47 -croce 47 -mcauslan 47 -romps 47 -48million 47 -timur 47 -toggle 47 -ascribe 47 -sodom 47 -gallego 47 -leominster 47 -codified 47 -batmaz 47 -unachievable 47 -deangelo 47 -arison 47 -manser 47 -wynonna 47 -stanislav 47 -ryker 47 -attaché 47 -pickets 47 -azamara 47 -mementoes 47 -tupolev 47 -wakayama 47 -nola.com 47 -hush-hush 47 -piaf 47 -languid 47 -tlas 47 -vincente 47 -ivanpah 47 -dissonance 47 -sderot 47 -cort 47 -reedy 47 -417 47 -fosse 47 -top-scored 47 -maryanne 47 -haywire 47 -398 47 -ex-partners 47 -jolting 47 -21:05 47 -expeditious 47 -boatload 47 -didn 47 -enthuses 47 -hook-handed 47 -stagnate 47 -18-man 47 -corman 47 -aiyana 47 -urszula 47 -long-exposure 47 -hadza 47 -bests 47 -whatley 47 -dumitru 47 -729 47 -harford 47 -fait 47 -lenas 47 -dropcam 47 -al-sheikh 47 -christoper 47 -malak 47 -silvercrest 47 -nastier 47 -pearmain 47 -4 1/2 47 -drash 47 -479 47 -yip 47 -aneurism 47 -blindfolds 47 -chug 47 -whats 47 -canady 47 -muffle 47 -31-year 47 -saudia 47 -ebola-hit 47 -erath 47 -half-finished 47 -realign 47 -thoroughness 47 -mcduffie 47 -bettley 47 -noorani 47 -australasian 47 -02:33 47 -02:35 47 -crider 47 -stilnox 47 -derman 47 -s'mores 47 -wolcott 47 -bankia 47 -solarium 47 -karmen 47 -aggregates 47 -pharo 47 -chanced 47 -manoir 47 -cajoled 47 -mattson 47 -sweetman 47 -hublot 47 -wresting 47 -bingeing 47 -baaps 47 -melaku 47 -englander 47 -zamata 47 -sanghera 47 -hiller 47 -lapre 47 -40-strong 47 -slogging 47 -wizz 47 -maraschino 47 -dewenter 47 -booze-fuelled 47 -theropod 47 -pebley 47 -lindisfarne 47 -impinge 47 -off-the-shoulder 47 -yaroslava 47 -cheatham 47 -fdp 47 -takamatsu 47 -militarism 47 -mutilate 47 -bolo 47 -one-word 47 -histamine 47 -bamburgh 47 -d-missouri 47 -drysdale 47 -belstaff 47 -quantock 47 -porta 47 -chaparral 47 -brollies 47 -denzil 47 -zuhri 47 -pirouette 47 -baumet 47 -puritan 47 -zmuda 47 -gamestop 47 -stunting 47 -palaszczuk 47 -bulatov 47 -fun-filled 47 -neknomination 47 -alayed 47 -498 47 -girlish 47 -babygro 47 -fairey 47 -62million 47 -purim 47 -botafogo 47 -dah 47 -cynon 47 -fondue 47 -yemeni-american 47 -compulsively 47 -20:37 47 -besigye 47 -paperless 47 -then-chief 47 -verifies 47 -razing 47 -guanajuato 47 -stir-fry 47 -curricula 47 -gold-digger 47 -gorka 47 -11alive 47 -giersch 47 -fur-trimmed 47 -chamoun 47 -el-zour 47 -quintanilla 47 -manna 47 -neasden 47 -denture 47 -camarillo 47 -maddalena 47 -trickiest 47 -no-contest 47 -neutralised 47 -kuna 47 -eustice 47 -pro-syrian 47 -yutu 47 -vedad 47 -mcpartlin 47 -ifixit 47 -sleepwear 47 -cassock 47 -vanesa 47 -dicing 47 -caden 47 -patisserie 47 -mykola 47 -onondaga 47 -well-read 47 -boedecker 47 -encapsulate 47 -hayle 47 -calorie-laden 47 -0.06 47 -tattersalls 47 -eccentricities 47 -cartographers 47 -murano 47 -wrought-iron 47 -veganism 47 -consents 47 -29.95 47 -aylesford 47 -pannone 47 -conscientiousness 47 -517 47 -39million 47 -perrier 47 -degrades 47 -nagged 47 -malleable 47 -vice-versa 47 -loathsome 47 -moomin 47 -12.10 47 -figment 47 -morey 47 -nanoscale 47 -4kg 47 -rnas 47 -nanotubes 47 -xxxxx 47 -12/5 47 -yamuna 47 -curragh 47 -leftie 47 -co-chairs 47 -hurrell 47 -noticias 47 -wilts 47 -capybara 47 -boyzone 47 -cerf 47 -barmby 47 -misfired 47 -24ft 47 -supermajority 47 -relegate 47 -hessian 47 -i-report 47 -futurama 47 -repatriating 47 -olav 47 -95million 47 -rollinson 47 -psalms 47 -titcomb 47 -quezon 47 -26.6 47 -celik 47 -stonie 47 -sealife 47 -kiraly 47 -trotta 47 -rote 47 -kidscape 47 -shafia 47 -sutay 47 -clincher 47 -pressure-cooker 47 -strapline 47 -12-person 47 -brigden 47 -knighton 47 -ryman 47 -1795 47 -stebbing 47 -yearwood 47 -internazionale 47 -quattro 47 -abdulkadir 47 -55m 47 -wolston 47 -jeffords 47 -star-forming 47 -frederiksen 47 -interdiction 47 -backgammon 47 -1540 47 -dilate 47 -neurotransmitter 47 -selous 47 -krall 47 -ultramarathon 47 -a.m.-5 47 -186,000 47 -boorman 47 -mcleary 47 -depeche 47 -kareen 47 -sluice 47 -lumb 47 -rivaling 47 -wanderer 47 -d'alene 47 -judeo-christian 47 -colourfully 47 -technocratic 47 -amersham 47 -unsurvivable 47 -sellafield 47 -spitalfields 47 -ebosse 47 -ferzat 47 -anti-whaling 47 -thoreau 47 -imus 47 -folkes 47 -gotye 47 -backhanded 47 -newbies 47 -n.w.a. 47 -showjumper 47 -lichtsteiner 47 -abeyta 47 -harpist 47 -creased 47 -guerillas 47 -stapleford 47 -scobie 47 -epworth 47 -rigondeaux 47 -weatherhead 47 -godinez-avila 47 -pruned 47 -gielgud 47 -refracted 47 -post-pregnancy 47 -science-based 47 -lukic 47 -dog-fighting 47 -9:20 47 -multivitamin 47 -droop 47 -allis 46 -unbowed 46 -aed 46 -derkosh 46 -lady-in-waiting 46 -hilltops 46 -equitably 46 -tacks 46 -lujan 46 -olathe 46 -slowness 46 -m65 46 -outlooks 46 -aduriz 46 -scythe 46 -hedi 46 -bermane 46 -kimye 46 -radio-controlled 46 -blixt 46 -bolsover 46 -mase 46 -missing-person 46 -dominos 46 -shoppe 46 -¦ 46 -815 46 -correio 46 -luiten 46 -herbivorous 46 -loews 46 -rediscovery 46 -miyazaki 46 -walbrook 46 -speakes 46 -bradman 46 -ector 46 -staci 46 -hazlewood 46 -anteaters 46 -russian-built 46 -trayers 46 -shindig 46 -unsurprised 46 -rasen 46 -buckhead 46 -quince 46 -muirhead 46 -confection 46 -expendable 46 -beke 46 -bibb 46 -mishcon 46 -176,000 46 -lundgren 46 -amorim 46 -1999-2000 46 -beslan 46 -homie 46 -rearview 46 -monroy 46 -steam-powered 46 -assaf 46 -catton 46 -boyega 46 -transcendental 46 -p6 46 -childrenswear 46 -zeki 46 -20-years 46 -ancona 46 -10,200 46 -sanitised 46 -barral 46 -mughniyeh 46 -sundry 46 -utterances 46 -deterrents 46 -surpluses 46 -kolbeinn 46 -burney 46 -u.a.e. 46 -vouched 46 -cocooned 46 -onlive 46 -bureaucracies 46 -atl 46 -fraga 46 -turkson 46 -hustling 46 -1992-95 46 -honeymooning 46 -amour 46 -commercialise 46 -csp 46 -georgette 46 -hylands 46 -churchman 46 -expressjet 46 -crazies 46 -tyagi 46 -cabling 46 -collings 46 -combatting 46 -ducasse 46 -pasceri 46 -sneyd 46 -parsnip 46 -kemar 46 -willems 46 -replaying 46 -u.s.-iran 46 -macneil 46 -donohoe 46 -incubating 46 -summarize 46 -tilapia 46 -koko 46 -ticket-holder 46 -veltman 46 -nizhny 46 -lashkar-e-jhangvi 46 -coining 46 -apodaca 46 -flotsam 46 -one-goal 46 -blotted 46 -jacinta 46 -larval 46 -weirdness 46 -stafylidis 46 -fuselages 46 -642 46 -two-for-one 46 -42-page 46 -barrasso 46 -sahib 46 -plaits 46 -eide 46 -redeveloping 46 -kumbh 46 -lorca 46 -aarthun 46 -abdou 46 -screwdrivers 46 -woodlawn 46 -third-most 46 -ridgeback 46 -switch-on 46 -outliers 46 -albatrosses 46 -chynn 46 -well-maintained 46 -softie 46 -wjxt 46 -jaywalking 46 -f.w. 46 -narnia 46 -interning 46 -crashers 46 -psc 46 -naveen 46 -autocue 46 -light-sensitive 46 -ganim 46 -trackside 46 -euroscepticism 46 -marfa 46 -frack 46 -chery 46 -apportion 46 -bilston 46 -leapfrogging 46 -kick-offs 46 -octagonal 46 -dirtier 46 -moye 46 -overstating 46 -britian 46 -passenger-side 46 -vagaries 46 -hobica 46 -naturists 46 -enin 46 -usis 46 -towcester 46 -astrophotographer 46 -devaluing 46 -coalesce 46 -30-month 46 -prepubescent 46 -unicorns 46 -7/5 46 -blooper 46 -jammer 46 -flaccid 46 -j.b. 46 -buckner 46 -kaelin 46 -most-capped 46 -eucharist 46 -year-over-year 46 -logjam 46 -recites 46 -torry 46 -maiga 46 -immigrations 46 -diuretic 46 -ballgown 46 -bacchus 46 -rachida 46 -chameleons 46 -35-minute 46 -777s 46 -podemos 46 -infomercial 46 -cubist 46 -pseudomonas 46 -137,000 46 -glyndebourne 46 -demography 46 -48m 46 -mycobacterium 46 -inoculated 46 -mid-year 46 -ensnare 46 -nutcase 46 -rieckhoff 46 -khaldoon 46 -obstructionism 46 -hereafter 46 -steelworks 46 -ogling 46 -888 46 -kalac 46 -trudi 46 -husk 46 -a38 46 -chhang 46 -holywell 46 -hakin 46 -klamath 46 -47,500 46 -wethington 46 -const 46 -kilkenny 46 -haneda 46 -foothill 46 -solvable 46 -sheba 46 -rah 46 -low-density 46 -maladies 46 -goldberger 46 -osa 46 -devitt 46 -euclid 46 -testes 46 -engrained 46 -pashtuns 46 -kirribilli 46 -farm-to-table 46 -bulges 46 -animalistic 46 -federighi 46 -baidoa 46 -demonising 46 -excellently 46 -balham 46 -kshb 46 -céline 46 -jakadrien 46 -child-abuse 46 -angharad 46 -rauseo 46 -2,000-mile 46 -woolsey 46 -jubb 46 -silver-haired 46 -desertification 46 -marois 46 -paradon 46 -barbier 46 -acolytes 46 -swannell 46 -mambo 46 -placentas 46 -raff 46 -ashish 46 -customizable 46 -elauf 46 -tegra 46 -200lb 46 -falkingham 46 -flinched 46 -clunkers 46 -hard-charging 46 -12in 46 -dano 46 -high-scoring 46 -non-consensual 46 -brookhaven 46 -facilitators 46 -resoundingly 46 -benedetti 46 -obscenely 46 -jarosz 46 -543 46 -elt 46 -indoctrinate 46 -bezel 46 -kamar 46 -scarsdale 46 -comma 46 -petrovic 46 -radziwon-chapman 46 -mpa 46 -cash-rich 46 -vrooman 46 -venturi 46 -beagley 46 -poli 46 -184,000 46 -noa 46 -intuit 46 -duopoly 46 -tizen 46 -1816 46 -eleonora 46 -rinsed 46 -conestoga 46 -petrobras 46 -joaquim 46 -betfred 46 -neonatologist 46 -compagnie 46 -reauthorized 46 -sub-species 46 -sawa 46 -apprised 46 -jimbo 46 -reann 46 -sket 46 -disliking 46 -200-acre 46 -roly 46 -@neymarjr 46 -french-algerian 46 -outgrowth 46 -maggiolo 46 -bipedal 46 -palettes 46 -earful 46 -cooperatively 46 -kaino 46 -goerges 46 -interdependent 46 -boulter 46 -1811 46 -gidget 46 -musing 46 -cannings 46 -sung-yeung 46 -riccardi 46 -pantsuit 46 -curveball 46 -madre 46 -simvastatin 46 -camembert 46 -meacher 46 -mediaset 46 -german-occupied 46 -rong 46 -forestall 46 -5-3-2 46 -peto 46 -sopwith 46 -adeline 46 -endoscopic 46 -gynecological 46 -devereaux 46 -squeaked 46 -sine 46 -breeden 46 -near-identical 46 -anti-retroviral 46 -willy-nilly 46 -koepka 46 -neoclassical 46 -sidebottom 46 -tolga 46 -gas-guzzling 46 -almanza 46 -vladmir 46 -bure 46 -karishma 46 -kintyre 46 -tressel 46 -touchingly 46 -enquiring 46 -gudrun 46 -sixth-formers 46 -oconee 46 -come-from-behind 46 -linkin 46 -sainte 46 -sunblock 46 -al-khansa 46 -debenham 46 -koons 46 -littlest 46 -l-shaped 46 -3.05 46 -self-effacing 46 -edm 46 -savyon 46 -fund-raisers 46 -ganged 46 -poconos 46 -samer 46 -doctrinal 46 -yate 46 -bolivarian 46 -azusa 46 -oden 46 -morehead 46 -741 46 -749 46 -leavy 46 -min-seok 46 -alibis 46 -tugboats 46 -miramax 46 -895 46 -koizumi 46 -anti-syrian 46 -hajar 46 -danone 46 -perrie 46 -wiretapped 46 -treanor 46 -alinea 46 -spry 46 -foa 46 -trespasser 46 -braff 46 -palcohol 46 -rawan 46 -zorro 46 -redditor 46 -shilpa 46 -shamil 46 -draven 46 -intonation 46 -mauldin 46 -750m 46 -helmet-mounted 46 -leper 46 -halterneck 46 -hayashi 46 -horlock 46 -naught 46 -mccool 46 -pampers 46 -lethality 46 -agape 46 -crosscountry 46 -grates 46 -bacardi 46 -dominicans 46 -volodymyr 46 -7.62 46 -suni 46 -nuristan 46 -wlwt 46 -nicollette 46 -14cm 46 -kensit 46 -giwa 46 -nrk 46 -gentrified 46 -engstrom 46 -overconfident 46 -ploetz 46 -irvington 46 -rustin 46 -65m 46 -palmerston 46 -puntland 46 -chaffins 46 -ifaw 46 -12.95 46 -stiffened 46 -motiveless 46 -kinvig 46 -lago 46 -carnes 46 -compton-rock 46 -250ft 46 -duplicity 46 -nosed 46 -kojo 46 -natural-born 46 -20lbs 46 -megi 46 -salaried 46 -aquaculture 46 -bicker 46 -tumbledown 46 -gauged 46 -mishal 46 -dunton 46 -bibeau 46 -roney 46 -tapeworms 46 -yanis 46 -lancers 46 -ails 46 -speakerphone 46 -farbrace 46 -200,000-a-year 46 -basinger 46 -assuredly 46 -managerless 46 -bonifield 46 -christmassy 46 -glasgow-based 46 -green-light 46 -hairpiece 46 -sitka 46 -melina 46 -macao 46 -seahawk 46 -giggly 46 -33ft 46 -fairburn 46 -flaking 46 -massachusetts-dartmouth 46 -partitioned 46 -soloman 46 -topanga 46 -damsel 46 -storro 46 -manjoo 46 -najjar 46 -492 46 -juana 46 -dangerousness 46 -greenlee 46 -bisexuality 46 -hanen 46 -tac 46 -highnesses 46 -nocera 46 -north-northwest 46 -kristel 46 -1775 46 -heung-min 46 -f-18 46 -styx 46 -nutribullet 46 -purples 46 -gatling 46 -saidy 46 -600-year-old 46 -haatchi 46 -minden 46 -karunaratne 46 -tele 46 -crozier 46 -hylton 46 -anti-ship 46 -biding 46 -mehmanparast 46 -seven-star 46 -12-1 46 -techies 46 -pitbulls 46 -lohman 46 -hapoel 46 -haniya 46 -hodder 46 -oatley 46 -second-oldest 46 -ungainly 46 -vina 46 -heurelho 46 -laine 46 -dial-up 46 -levitan 46 -pierluigi 46 -50-metre 46 -chatrooms 46 -796 46 -nouvel 46 -scotland-williams 46 -racially-motivated 46 -40-acre 46 -hodgkiss 46 -jukes 46 -stobart 46 -bricked 46 -lise 46 -renate 46 -mini-dress 46 -huzar 46 -holiday-makers 46 -roadworthy 46 -fausto 46 -graver 46 -89th-minute 46 -cambra 46 -stockists 46 -gundotra 46 -hoskin 46 -sippy 46 -agnostic 46 -gloriana 46 -1483 46 -hende 46 -basma 46 -franchising 46 -restful 46 -date-krumm 46 -wafer-thin 46 -signers 46 -kuma 46 -x-47b 46 -koller 46 -pae 46 -roan 46 -@cnnopinion 46 -preen 46 -loughrey 46 -stevens-johnson 46 -cheree 46 -poof 46 -defecation 46 -pushups 46 -gingrey 46 -burmila 46 -paediatricians 46 -chock 46 -underused 46 -white-knuckle 46 -parker-bowles 46 -cellophane 46 -6km 46 -vfw 46 -seaorbiter 46 -eastland 46 -olympique 46 -devaux 46 -jozef 46 -second-quarter 46 -pandev 46 -418 46 -36.7 46 -arrayed 46 -pathogenic 46 -huq 46 -ryabkov 46 -bossing 46 -caps/goals 46 -hard-wired 46 -aranguiz 46 -ioan 46 -ehrman 46 -rayburn 46 -unappetising 46 -atul 46 -cleverest 46 -cuny 46 -lathrop 46 -elwa 46 -greenside 46 -seperate 46 -patronize 46 -tumilson 46 -mid-60s 46 -scherr 46 -goblet 46 -cygnets 46 -travelocity 46 -stigmatize 46 -repackaged 46 -principe 46 -vestments 46 -apprehensions 46 -gumede 46 -11-1 46 -ballinger 46 -motegi 46 -downmarket 46 -herein 46 -artest 46 -leuven 46 -saraswati 46 -veena 46 -wheeldon 46 -casto 46 -decanter 46 -overexposure 46 -plaxo 46 -maturation 46 -tf-x 46 -cookware 46 -bushkin 46 -athina 46 -ripening 46 -ob-gyn 46 -galanos 46 -vpn 46 -dou 46 -guarani 46 -houma 46 -mccarney 46 -sambo 46 -longer-range 46 -ravenscroft 46 -rapier 46 -fto 46 -roughest 46 -leclerc 46 -elmhurst 46 -deckhand 46 -sawaya 46 -o'dempsey 46 -weiler 46 -facey 46 -kirton 46 -715 46 -acuna 46 -first-of-its-kind 46 -bailed-out 45 -e.j. 45 -motherless 45 -bygones 45 -times-dispatch 45 -undiminished 45 -terezin 45 -fairley 45 -sarra 45 -lazing 45 -near-total 45 -marysville-pilchuck 45 -belgrave 45 -english-born 45 -satanist 45 -dmc 45 -courier-journal 45 -timekeeping 45 -garlett 45 -brunell 45 -mahina 45 -tiendalli 45 -luminescent 45 -shenk 45 -mildest 45 -quinonez 45 -speier 45 -7.9-inch 45 -funder 45 -slights 45 -backlight 45 -b-movie 45 -writhe 45 -ensler 45 -adirondacks 45 -cialis 45 -portofino 45 -nationale 45 -tagle 45 -permutations 45 -sbu 45 -zircon 45 -unfunny 45 -nite 45 -sambadrome 45 -methylprednisolone 45 -customizing 45 -rearranging 45 -elasticated 45 -climatologist 45 -catskills 45 -ocean-going 45 -iraqi-born 45 -pavlyuchenko 45 -4.95 45 -al-hajj 45 -souris 45 -nineteen-year-old 45 -haggle 45 -superdry 45 -fidgeting 45 -ashli 45 -canford 45 -retraced 45 -ischaemic 45 -smelting 45 -tebartz-van 45 -nunley 45 -684 45 -issuers 45 -rathbone 45 -paul_newmandm 45 -fensome 45 -volk 45 -2011/2012 45 -pavlo 45 -fidgety 45 -highest-ever 45 -garabrant 45 -1,500-page 45 -midler 45 -beitar 45 -lightman 45 -footwell 45 -high-wire 45 -balshaw 45 -melanomas 45 -interment 45 -228,000 45 -pidgeon 45 -sita 45 -tomatina 45 -mids 45 -chayce 45 -non-biological 45 -other-worldly 45 -baldry 45 -ghawi 45 -objectified 45 -goyal 45 -1998-99 45 -ayoub 45 -re-named 45 -aurelie 45 -bould 45 -skywalk 45 -multi-billion-dollar 45 -parquet 45 -wagged 45 -463 45 -cuzco 45 -28.1 45 -briers 45 -squealed 45 -aerion 45 -over-ruled 45 -electricals 45 -al-douri 45 -whiten 45 -bisphenol 45 -delectable 45 -ekberg 45 -shrews 45 -waterworld 45 -capsizes 45 -uea 45 -laity 45 -dotty 45 -margarito 45 -six-acre 45 -pahrump 45 -european-style 45 -392 45 -ob 45 -people-to-people 45 -ralphie 45 -baddiel 45 -airedale 45 -crampons 45 -de'marquise 45 -knvb 45 -reprogrammed 45 -andretti 45 -templates 45 -chorister 45 -unescorted 45 -varsha 45 -ackerson 45 -kaiden 45 -gaudi 45 -francais 45 -dugouts 45 -harpal 45 -imdb.com 45 -hammett 45 -soundbites 45 -fifty-six 45 -kitesurfing 45 -anti-union 45 -reimer 45 -fingleton 45 -beauden 45 -napper 45 -teather 45 -15:47 45 -5mm 45 -dnainfo.com 45 -unknowing 45 -− 45 -chagas 45 -n1 45 -wholehearted 45 -decal 45 -bergamo 45 -dba 45 -lofthouse 45 -seacom 45 -work/life 45 -nazi-themed 45 -bittar 45 -flavouring 45 -epitomizes 45 -musonda 45 -mladenov 45 -borodai 45 -knickerbocker 45 -one-touch 45 -fire-fighters 45 -cartoon-like 45 -auriemma 45 -rolls-royces 45 -burgoyne 45 -zwanzger 45 -mswati 45 -beyer 45 -cicadas 45 -nikolas 45 -mussel 45 -lasseter 45 -604 45 -609 45 -pachauri 45 -retailed 45 -sovereigns 45 -exfoliate 45 -split-screen 45 -loni 45 -gigantism 45 -laundromat 45 -liddy 45 -lefevre 45 -palawan 45 -mousetrap 45 -berni 45 -karr 45 -prudish 45 -bobak 45 -reversals 45 -sopo 45 -losey 45 -auger 45 -constanta 45 -efren 45 -loosehead 45 -notepaper 45 -stubs 45 -ischannel 45 -sharaf 45 -ghailani 45 -half-built 45 -deputising 45 -fairing 45 -laure 45 -504 45 -503 45 -precipitating 45 -kamrava 45 -mosier 45 -myopia 45 -crighton 45 -grugy 45 -yanomami 45 -624 45 -montego 45 -11.10 45 -maundy 45 -outfitter 45 -experiential 45 -donda 45 -sulky 45 -houser 45 -andalusian 45 -clearinghouse 45 -taubman 45 -6-foot-5 45 -6-foot-3 45 -culottes 45 -santini 45 -99.5 45 -misspelt 45 -stoichkov 45 -10.99 45 -abobaker 45 -siegal 45 -clenches 45 -one-up 45 -birmingham-shuttlesworth 45 -ridicules 45 -521 45 -varvara 45 -shyly 45 -1644 45 -kero 45 -numerals 45 -titmuss 45 -tangier 45 -anchin 45 -nedved 45 -kitting 45 -ilonen 45 -heart-broken 45 -cyberstalking 45 -wildland 45 -unmistakeable 45 -relives 45 -leniently 45 -bad-boy 45 -flatscreen 45 -carneiro 45 -kan. 45 -dallaire 45 -ferkova 45 -amaechi 45 -adleta 45 -mousley 45 -sweated 45 -meo 45 -mez 45 -badgered 45 -31.7 45 -mounties 45 -545 45 -co-executive 45 -robo 45 -bravura 45 -vedran 45 -waistcoats 45 -hopi 45 -jokowi 45 -612 45 -wyland 45 -closter 45 -seleznyov 45 -ev 45 -trick-or-treaters 45 -s.h.i.e.l.d. 45 -lushan 45 -19,500 45 -zaccheroni 45 -50per 45 -gai 45 -under-strength 45 -albitz 45 -ifill 45 -1788 45 -four-bed 45 -craniosynostosis 45 -newbie 45 -farfan 45 -three-months-old 45 -glenconner 45 -39-year 45 -couscous 45 -pinner 45 -educationally 45 -willits 45 -25kg 45 -miskiw 45 -meliandou 45 -tamely 45 -kensal 45 -haniyeh 45 -transporters 45 -mordechai 45 -aider 45 -mismanaging 45 -klapheke 45 -sturtz 45 -iranian-americans 45 -invective 45 -interdependence 45 -subscribing 45 -callebaut 45 -zircons 45 -500-pound 45 -meatless 45 -wilfrid 45 -slavisa 45 -fiba 45 -flagrantly 45 -552 45 -javaheri 45 -head-maarek 45 -stiliyan 45 -gulped 45 -murrysville 45 -sundby 45 -woolman 45 -stepsons 45 -kloman 45 -hyams 45 -sabratha 45 -haemoglobin 45 -docker 45 -hokey 45 -mannarino 45 -butlin 45 -semi-truck 45 -purslow 45 -tangy 45 -thermals 45 -setters 45 -ketv 45 -bludgeon 45 -inshallah 45 -lauriewhitwell 45 -lampitt 45 -golborne 45 -dunsby 45 -brabourne 45 -litchfield 45 -122,000 45 -kleybanova 45 -openly-gay 45 -dewan 45 -gerbils 45 -weigh-ins 45 -pemba 45 -preflight 45 -paragliders 45 -aldwych 45 -perishing 45 -urbane 45 -touchy-feely 45 -windshields 45 -ripen 45 -just-released 45 -stani-reginald 45 -libelous 45 -geier 45 -vanderpump 45 -irises 45 -lembit 45 -ice-free 45 -bastareaud 45 -playdate 45 -stubby 45 -blunk 45 -395,000 45 -stieg 45 -thrillseekers 45 -maypole 45 -intruded 45 -top-scorer 45 -royton 45 -mar. 45 -ottowa 45 -ex-boyfriends 45 -590,000 45 -synch 45 -10.25 45 -mcateer 45 -geosciences 45 -magi 45 -ore. 45 -playboys 45 -doble 45 -weitz 45 -segregating 45 -o'bannon 45 -ramprakash 45 -gunness 45 -restorers 45 -inbred 45 -ingersoll 45 -west-southwest 45 -afterparty 45 -gorski 45 -shout-out 45 -mubadala 45 -totalitarianism 45 -x-box 45 -lubchenco 45 -unitarian 45 -alpe 45 -caggie 45 -kristallnacht 45 -telefonica 45 -gloster 45 -hibernians 45 -itinerant 45 -1.60 45 -stobbart 45 -marthakelner 45 -01:33 45 -hesse 45 -one-line 45 -sae 45 -130billion 45 -ciccone 45 -riddles 45 -23.3 45 -alanah 45 -enlargements 45 -cade 45 -granby 45 -mcclanahan 45 -sexed 45 -llandaff 45 -cashpoints 45 -sprains 45 -madhouse 45 -unluckiest 45 -ona 45 -energising 45 -polycarbonate 45 -meanest 45 -meles 45 -veale 45 -tranquilized 45 -decriminalise 45 -quark 45 -cheongsam 45 -fergusson 45 -gd 45 -shoelace 45 -ruched 45 -smithson 45 -3,250 45 -purina 45 -rahnavard 45 -mychal 45 -directorships 45 -dryas 45 -beane 45 -e-fits 45 -nakhuda 45 -bomblets 45 -gekas 45 -ceaseless 45 -hysen 45 -wansbeck 45 -alis 45 -delle 45 -buzzwords 45 -anti-british 45 -ratatouille 45 -crack-smoking 45 -github 45 -al-hijrah 45 -incinerators 45 -lenzie 45 -volcanology 45 -part-funded 45 -constituting 45 -standoffs 45 -olivares 45 -agi 45 -o-level 45 -15:52 45 -pim 45 -pio 45 -lizbeth 45 -piri 45 -domnica 45 -02:04 45 -aikman 45 -niamey 45 -wbbm 45 -anti-morsy 45 -mowgli 45 -dulled 45 -micro-usb 45 -hayling 45 -devey 45 -bombe 45 -jasmeen 45 -helly 45 -high-fived 45 -ferrers 45 -bottle-fed 45 -nyack 45 -schmaderer 45 -viagogo 45 -horsebox 45 -overwhelms 45 -ifc 45 -ya'alon 45 -fatwas 45 -sobered 45 -faulds 45 -voisin 45 -dis 45 -eighth-placed 45 -teitel 45 -skysat-1 45 -497 45 -carneau 45 -two-stroke 45 -aina 45 -karamanlis 45 -steppes 45 -nuba 45 -osler 45 -audibly 45 -lensing 45 -openssl 45 -inviolable 45 -short-listed 45 -4-7 45 -tritium 45 -laval 45 -bratz 45 -eight-legged 45 -loosing 45 -y’ 45 -carbuncle 45 -circumventing 45 -muath 45 -ppg 45 -2.17 45 -two-room 45 -lace-up 45 -collectables 45 -sharp-tongued 45 -body-building 45 -greased 45 -jaar 45 -shintaro 45 -andreessen 45 -magneto 45 -lieut 45 -linham 45 -strummer 45 -surrey-based 45 -magag 45 -crawfish 45 -confound 45 -bareminerals 45 -37.9 45 -now-dead 45 -oil-based 45 -cynic 45 -canvasses 45 -terrapins 45 -furrowed 45 -thickens 45 -shoshana 45 -duc 45 -short-circuit 45 -octomom 45 -mullock 45 -extrapolate 45 -popstars 45 -triples 45 -meandered 45 -ray-ban 45 -kokomo 45 -endometrial 45 -513 45 -orobator 45 -freestyling 45 -fratto 45 -formulations 45 -seiu 45 -aneurysms 45 -roques 45 -phanfone 45 -garthwaite 45 -pikachu 45 -squawking 45 -henk 45 -cowgirl 45 -filibustered 45 -incentivised 45 -all-seater 45 -horse-trading 45 -ergonomic 45 -tufnell 45 -queenie 45 -scouser 45 -kd 45 -vivre 45 -slanderous 45 -lubrication 45 -rastan 45 -woodville 45 -dorson 45 -prehistory 45 -strasse 45 -rodley 45 -best-value 45 -harrassment 45 -palaeolithic 45 -536 45 -cassava 45 -bergner 45 -weale 45 -horwell 45 -teamsheet 45 -ndlovu 45 -demeaned 45 -limpopo 45 -disenchantment 45 -dscc 45 -google.com 45 -dog-friendly 45 -oed 45 -plath 45 -quadrant 45 -mandolin 45 -installer 45 -gediman 45 -g4 45 -mordovia 45 -haemorrhages 45 -parklands 45 -goudie 45 -catharsis 45 -myung 45 -take-offs 45 -co-signed 45 -cg 45 -sonographer 45 -larder 45 -21-month 45 -natural-looking 45 -sentries 45 -kimbrough 45 -amoled 45 -atlanta-area 45 -pendergrass 45 -miuccia 45 -ayrow 45 -arrigo 45 -grittier 45 -privately-funded 45 -c2 45 -bama 45 -re-launch 45 -hardison 45 -plateaued 45 -dauphine 45 -zarutsky 45 -manpads 45 -chalky 45 -dejagah 45 -schreiner 45 -muthanna 45 -gawp 45 -dangerfield 45 -nicking 45 -mendy 45 -audrina 45 -opossum 45 -curbishley 45 -mashaal 45 -hany 45 -agitate 45 -elliman 45 -haggan 45 -pacy 45 -bustos 45 -mcneely 45 -uta 45 -sighing 45 -hitchhiked 45 -upholstered 45 -quipping 45 -sex-offender 45 -mike_dickson_dm 45 -jack_gaughan 45 -paraphrasing 45 -juande 45 -connecticut-based 45 -maho 45 -vanegas 45 -selwood 45 -eleftheria 45 -gondii 45 -tribesman 45 -party-line 45 -astrodome 45 -unruh 45 -spamhaus 45 -printout 45 -spud 45 -rf 45 -canonisation 45 -pdt 45 -serpas 45 -kadish 45 -chee 45 -illuminati 45 -date-rape 45 -capella 45 -guist 45 -revs 45 -sqn 45 -call-ups 45 -27.2 45 -27.1 45 -highers 45 -natisha 45 -hooch 45 -salesperson 45 -lecroy 45 -haldane 45 -sunder 45 -godiva 45 -hoped-for 45 -vernal 45 -smokehouse 45 -guillory 45 -dene 45 -employable 44 -hernandez-llach 44 -gun-walking 44 -rain-swollen 44 -refraction 44 -chatterbox 44 -contravenes 44 -froch-groves 44 -recession-hit 44 -toboggan 44 -4:00 44 -allstate 44 -subreddit 44 -beaching 44 -porcine 44 -prayerful 44 -ledezma 44 -arnott 44 -mathai 44 -krzysztof 44 -argentinas 44 -02:10 44 -tiangong-1 44 -willacy 44 -socked 44 -sqft 44 -initiations 44 -losada 44 -lemus 44 -pro-gaddafi 44 -bahebeck 44 -chishti 44 -homeschooled 44 -kansai 44 -iwate 44 -four-metre 44 -d'avino 44 -oscillations 44 -wilber 44 -funnelling 44 -fsis 44 -tanking 44 -limehouse 44 -seismology 44 -illiberal 44 -umpiring 44 -char 44 -chay 44 -heaton-harris 44 -glitterati 44 -intermission 44 -salubrious 44 -fluoridation 44 -hollows 44 -stavridis 44 -tenpenny 44 -frank-walter 44 -hideously 44 -guttering 44 -mackinlay 44 -charlier 44 -gerdes 44 -20.1 44 -savoured 44 -medhurst 44 -lonegan 44 -radha 44 -novelties 44 -co-chairmen 44 -extra-judicial 44 -intimates 44 -capitulate 44 -meet-up 44 -rescheduling 44 -wiesberger 44 -deadwood 44 -75p 44 -758 44 -vervia 44 -farnsworth 44 -thales 44 -preemptively 44 -uche 44 -vassell 44 -sicken 44 -croxteth 44 -peden 44 -buffaloes 44 -over-subscribed 44 -newly-opened 44 -zing 44 -sidelining 44 -825,000 44 -clangers 44 -mondadori 44 -rigsby 44 -webpages 44 -manged 44 -loathes 44 -najat 44 -9.58 44 -188,000 44 -frost/nixon 44 -federline 44 -smadi 44 -devolving 44 -xiii 44 -impreza 44 -jebb 44 -drago 44 -bare-bones 44 -disunity 44 -30ml 44 -kilter 44 -misao 44 -skims 44 -40.5 44 -marylynn 44 -serenely 44 -ramanujan 44 -lenhart 44 -dockett 44 -cacace 44 -mid-thirties 44 -kieffer 44 -fetz 44 -haematoma 44 -boysen 44 -dream-like 44 -bosnians 44 -fraser-pryce 44 -congleton 44 -hashimi 44 -on-street 44 -outlook.com 44 -bissell 44 -grimilda 44 -aswad 44 -cosima 44 -crocuses 44 -guarino 44 -sealion 44 -self-critical 44 -158,000 44 -haylee 44 -homescreen 44 -tarr 44 -pixies 44 -unpopulated 44 -neuroticism 44 -murrow 44 -heart-felt 44 -harkins 44 -anti-secrecy 44 -jiggling 44 -shivered 44 -7.55 44 -bigland 44 -21.9 44 -phlegm 44 -protein-rich 44 -seyi 44 -impey 44 -ridgemont 44 -muddar 44 -marquise 44 -staley 44 -sprott 44 -foschi 44 -well-traveled 44 -fayad 44 -hillandale 44 -gaul 44 -mirra 44 -zesty 44 -bandanna 44 -'15 44 -endley 44 -baloch 44 -1650 44 -rummel 44 -gruesomely 44 -supernovas 44 -anti-hero 44 -toxoplasma 44 -dmytro 44 -20:42 44 -deepa 44 -parable 44 -android-powered 44 -prostituted 44 -snobbish 44 -oft-repeated 44 -encouragingly 44 -caremark 44 -toss-up 44 -cocoons 44 -pre-natal 44 -ketoacidosis 44 -frisson 44 -olmsted 44 -blustering 44 -catcalls 44 -ema 44 -1603 44 -1605 44 -hasawi 44 -remonstrates 44 -484 44 -tarry 44 -conforming 44 -granados 44 -coattails 44 -pin-point 44 -wingfield 44 -gnaw 44 -arsons 44 -beeney 44 -dundalk 44 -up-market 44 -márquez 44 -soler 44 -chiu 44 -insinuation 44 -non-contact 44 -breakage 44 -jags 44 -spanish-born 44 -personalisation 44 -massenet 44 -wraysbury 44 -akins 44 -50f 44 -alt 44 -anhydrous 44 -laign 44 -seven-page 44 -selleck 44 -25.3 44 -eod 44 -232,000 44 -checklists 44 -thirty-nine 44 -grijalva 44 -severin 44 -re-vote 44 -blom 44 -pervaded 44 -kezia 44 -araldo 44 -arlo 44 -disenfranchise 44 -paled 44 -unemployable 44 -safa 44 -underclass 44 -sanusi 44 -surmise 44 -galleon 44 -similar-sized 44 -bullman 44 -lincs 44 -zapping 44 -jyllands 44 -scandinavians 44 -kaleidoscopic 44 -one-acre 44 -pro-growth 44 -car-free 44 -dompierre 44 -brunson 44 -rabbo 44 -carre 44 -carri 44 -cloture 44 -singlehandedly 44 -low-resolution 44 -suharto 44 -taylan 44 -resins 44 -zeller 44 -kafala 44 -bignell 44 -7:20 44 -outfitting 44 -41million 44 -place2be 44 -nisa 44 -elston 44 -heavyset 44 -sappho 44 -pantene 44 -aune 44 -microbiome 44 -gelatinous 44 -unwelcoming 44 -dias-griffin 44 -blanch 44 -meacham 44 -pitch-side 44 -5d 44 -arkwright 44 -pelton 44 -remes 44 -cabut 44 -vilifying 44 -levar 44 -overseers 44 -thomasson 44 -studdard 44 -top-ups 44 -waregem 44 -7:00 44 -mazover 44 -29.2 44 -morayfield 44 -pertussis 44 -revell 44 -nazi-era 44 -turncoat 44 -335,000 44 -yakutsk 44 -level-par 44 -chaneya 44 -vagrants 44 -10per 44 -mengyuan 44 -kostova 44 -under-secretary-general 44 -ajc 44 -albiceleste 44 -malformations 44 -sandpit 44 -gladwin 44 -dozer 44 -ngoc 44 -karrie 44 -kempes 44 -domaine 44 -northwich 44 -mythbusters 44 -1,120 44 -402 44 -naan 44 -cokes 44 -refocusing 44 -pres. 44 -doig 44 -183,000 44 -thr 44 -brummie 44 -hillgrove 44 -vexatious 44 -01:59 44 -mixon 44 -chillier 44 -shoda 44 -pisano 44 -firetruck 44 -virology 44 -then-partner 44 -pujayasa 44 -mausoleums 44 -manbij 44 -cash-in-hand 44 -osteosarcoma 44 -hagley 44 -kauto 44 -1,360 44 -hastening 44 -enriches 44 -ethicist 44 -petulance 44 -coinage 44 -korda 44 -eagerly-awaited 44 -tubbataha 44 -ryo 44 -indianna 44 -stoehr 44 -peamount 44 -shively 44 -hiatt 44 -two-drug 44 -mearns 44 -confused.com 44 -fifty-three 44 -436 44 -lump-sum 44 -stationing 44 -twelve-year-old 44 -coletti 44 -zusi 44 -parikh 44 -finns 44 -270million 44 -wormald 44 -bummer 44 -goold 44 -mignonet 44 -life-extending 44 -trungpa 44 -insinuations 44 -frito-lay 44 -11.0 44 -leitrim 44 -palatine 44 -fhp 44 -bullfights 44 -rethought 44 -burp 44 -25s 44 -sobel 44 -karine 44 -eckel 44 -southworth 44 -deryl 44 -smulls 44 -polarising 44 -faddy 44 -lloyd-webber 44 -anpr 44 -infront 44 -overlaying 44 -smyczek 44 -weapons-related 44 -mandera 44 -avedon 44 -aykroyd 44 -antilles 44 -tolhurst 44 -quixote 44 -scatters 44 -self-protection 44 -back-end 44 -1,320 44 -demonstrative 44 -neighbourly 44 -toksvig 44 -buggery 44 -zulte 44 -dilip 44 -recrimination 44 -interflora 44 -leatherman 44 -confirmations 44 -consul-general 44 -traumatising 44 -r-alabama 44 -esiason 44 -livonia 44 -court-martialed 44 -patronised 44 -chien 44 -slayed 44 -anti-malaria 44 -off-the-ball 44 -aritz 44 -bice 44 -fyodor 44 -coldwell 44 -01:34 44 -mariela 44 -self-immolated 44 -suny 44 -nonwhite 44 -3-foot 44 -re-energize 44 -proactiv 44 -oberon 44 -sandbagging 44 -kwtv 44 -fair-minded 44 -12mph 44 -posers 44 -fajardo 44 -gebregeorgis 44 -451 44 -puppetry 44 -twenty-first 44 -masekela 44 -outpatients 44 -auf 44 -aut 44 -duckett 44 -punts 44 -carbine 44 -bribe-taking 44 -groff 44 -bianchini 44 -konias 44 -propositioning 44 -harmer 44 -clydebank 44 -creeds 44 -457 44 -sterger 44 -etoundi 44 -putted 44 -kearsley 44 -trade-in 44 -2-d 44 -varanasi 44 -goblins 44 -828 44 -valon 44 -hieroglyphics 44 -wooler 44 -aziza 44 -polding 44 -reappears 44 -fisheye 44 -incomers 44 -sparkbrook 44 -newburn 44 -8.9-inch 44 -djoko 44 -inconveniences 44 -alig 44 -tring 44 -trinh 44 -oxides 44 -proscribed 44 -matsuyama 44 -heraldic 44 -sterilizations 44 -ardley 44 -vlogger 44 -elephantiasis 44 -disaffection 44 -emergent 44 -bedbug 44 -slinging 44 -biron 44 -corrales 44 -10ml 44 -great-nephew 44 -pathmark 44 -kxan 44 -pivoted 44 -gundlach 44 -575,000 44 -discos 44 -'07 44 -cambs 44 -vinaigrette 44 -licenced 44 -d-texas 44 -38-year 44 -priapism 44 -spine-tingling 44 -compresses 44 -susquehanna 44 -pallekele 44 -pieper 44 -decays 44 -wolfswinkel 44 -carpal 44 -domenici 44 -bingsu 44 -halima 44 -luu 44 -discoloration 44 -1718 44 -pail 44 -toscano 44 -anti-mubarak 44 -hernández 44 -clubb 44 -fringing 44 -epics 44 -clothier 44 -boustany 44 -moradi 44 -ploys 44 -kayne 44 -waterspout 44 -lacina 44 -maputo 44 -scannell 44 -pettersen 44 -point-and-shoot 44 -eighth-graders 44 -mischievously 44 -maisonette 44 -preddy 44 -kirch 44 -curbside 44 -kimmitt 44 -alcohol-based 44 -aboutalebi 44 -six-months 44 -sterilize 44 -wordplay 44 -carnan 44 -somali-americans 44 -smoggy 44 -lacie 44 -signposted 44 -ppe 44 -sexually-transmitted 44 -full-blooded 44 -lustre 44 -second-worst 44 -fractionally 44 -crowbars 44 -cantone 44 -minivans 44 -flugence 44 -nation-building 44 -champagnes 44 -decedent 44 -turnips 44 -governorships 44 -bego 44 -deportees 44 -smee 44 -vegetarianism 44 -attorney-client 44 -forfeiting 44 -afflictions 44 -ragan 44 -amitai 44 -tavon 44 -cloutier 44 -12-18 44 -accomodation 44 -identifiers 44 -ghassan 44 -moonves 44 -tunstall 44 -racier 44 -morgannwg 44 -thunders 44 -yamaguchi 44 -biles 44 -demurred 44 -professionalized 44 -anti-lock 44 -cleansers 44 -clammy 44 -andrzej 44 -subcutaneous 44 -beady 44 -aber 44 -cricklewood 44 -government-commissioned 44 -villanova 44 -tamura 44 -slacklining 44 -hazeltine 44 -kadir 44 -kaffir 44 -beek 44 -1822 44 -fizzle 44 -barnwell 44 -tet 44 -steinway 44 -daenerys 44 -manderson 44 -perecman 44 -hasler 44 -sappin 44 -stiglitz 44 -schooldays 44 -formulaic 44 -drm 44 -und 44 -capacitive 44 -cpp 44 -snot 44 -saunter 44 -self-interested 44 -crossbones 44 -cataloged 44 -althorp 44 -ktm 44 -cumulus 44 -kroening 44 -oks 44 -pollinate 44 -belper 44 -sorrells 44 -dida 44 -reisner 44 -durrani 44 -argonauts 44 -clenbuterol 44 -whistle-stop 44 -roslin 44 -country-wide 44 -fire-damaged 44 -1806 44 -hanke 44 -mcglone 44 -joe_strange 44 -l.a 44 -backpage 44 -in-law 44 -mayang 44 -crimp 44 -scribbles 44 -bettie 44 -suzman 44 -candida 44 -helpings 44 -fixer-upper 44 -haunches 44 -mcsorley 44 -multibillion 44 -re-design 44 -baulked 44 -clobber 44 -snappers 44 -pajares 44 -leadbetter 44 -d'orsay 44 -6,000-a-year 44 -licorice 44 -hasson 44 -high-minded 44 -nazario 44 -hospitalisations 44 -musso 44 -kerman 44 -sorley 44 -globalsecurity.org 44 -fehrnstrom 44 -vassar 44 -rehm 44 -optioned 44 -stankovic 44 -jamaat-e-islami 44 -ledecky 44 -6-foot-tall 44 -dreamhouse 44 -non-specific 44 -ghosted 44 -unimaginative 44 -54million 44 -winnie-the-pooh 44 -vaporized 44 -tulsi 44 -franca 44 -solidifying 44 -lacock 44 -unknowable 44 -reenact 44 -rat-infested 44 -cbb 44 -champlain 44 -rock-climbing 44 -deyoung 44 -crock 44 -shelbrooke 44 -rudderless 44 -feller 44 -typecast 44 -doi 44 -mantova 44 -catwell 44 -millaa 44 -incl 44 -stojanovic 44 -27.6 44 -fadell 44 -telegraaf 44 -huckleberry 44 -celestine 44 -macbooks 44 -1280 44 -roubaix 44 -self-administered 44 -fist-pumping 44 -dg 44 -boehm 44 -sciglio 44 -kojima 44 -60-hour 43 -stylers 43 -macht 43 -weidman 43 -17.99 43 -consorting 43 -pseudoephedrine 43 -mcgreavy 43 -recliners 43 -hfc 43 -kayes 43 -anode 43 -grierson 43 -alderney 43 -hartl 43 -13-time 43 -10bn 43 -mujahedin 43 -drop-goal 43 -electrolux 43 -grabovo 43 -complementing 43 -morientes 43 -daehli 43 -court-approved 43 -two-stage 43 -pippie 43 -38m 43 -yanez 43 -debits 43 -insinuate 43 -puffiness 43 -prefectural 43 -riazor 43 -siva 43 -idealist 43 -decca 43 -rappelling 43 -jame 43 -high-functioning 43 -mwc 43 -gelling 43 -temerity 43 -preservationists 43 -minimizes 43 -foxcroft 43 -camra 43 -nikolic 43 -squidgy 43 -playmakers 43 -i-10 43 -12,600 43 -marmot 43 -goulart 43 -4500 43 -contoured 43 -pakay 43 -virulently 43 -recompense 43 -lowther-pinkerton 43 -vinceti 43 -paye 43 -sorghum 43 -kazi 43 -robards 43 -hilversum 43 -355,000 43 -tarantini 43 -blond-haired 43 -lindquist 43 -whitehorse 43 -deming 43 -melli 43 -stopwatch 43 -aab 43 -purebred 43 -bosma 43 -libdem 43 -lemond 43 -147,000 43 -libre 43 -misfire 43 -marchi 43 -a320-200 43 -foreclose 43 -juli 43 -fowles 43 -zanu 43 -fritts 43 -customarily 43 -mannington 43 -huddart 43 -normans 43 -gimp 43 -casado 43 -flunked 43 -chain-smoking 43 -melba 43 -studer 43 -day-night 43 -shara 43 -1.76 43 -lystra 43 -directness 43 -461 43 -campy 43 -836 43 -texas-mexico 43 -bacharach 43 -wameling 43 -conifers 43 -firewalls 43 -chislehurst 43 -asthmatics 43 -cena 43 -kelsall 43 -intermarriage 43 -sensenbrenner 43 -stipulating 43 -domscheit-berg 43 -kidnaps 43 -detroit-bound 43 -post-2014 43 -joinery 43 -confides 43 -faraz 43 -uncorroborated 43 -leedham 43 -prolapse 43 -non-compliant 43 -egleston 43 -bedminster 43 -lactic 43 -renegades 43 -converters 43 -teignmouth 43 -varna 43 -11-12 43 -montpelier 43 -glosses 43 -poisoner 43 -nervosa 43 -obliges 43 -al-habsi 43 -menin 43 -171,000 43 -bittorrent 43 -multi-car 43 -pakistan-afghanistan 43 -cairney 43 -chubb 43 -regrowth 43 -spotlighted 43 -tittensor 43 -skillen 43 -hambantota 43 -distiller 43 -daren 43 -hand-wringing 43 -gigolo 43 -riflemen 43 -morozov 43 -21.8 43 -privateer 43 -oneworld 43 -carli 43 -acas 43 -shaykh 43 -buzzes 43 -barrages 43 -eben 43 -pattharamon 43 -rambled 43 -honeytrap 43 -minimum-wage 43 -dinnertime 43 -menkhausen 43 -blips 43 -post-katrina 43 -puig 43 -icicle 43 -rangana 43 -106th 43 -drillers 43 -3200 43 -jwoww 43 -gas-fired 43 -provincetown 43 -blackcurrant 43 -mcmurray 43 -cypher 43 -swimmingly 43 -estella 43 -htun 43 -1701 43 -belieber 43 -identically 43 -all-in 43 -corky 43 -tendai 43 -coote 43 -20:51 43 -ann-marie 43 -b-52s 43 -philanderer 43 -clotted 43 -eadie 43 -fruin 43 -tremblay 43 -t44 43 -popsicle 43 -dogecoin 43 -hignett 43 -stengel 43 -undertone 43 -spla 43 -northstar 43 -luv 43 -sante 43 -chelone 43 -reat 43 -mattmorlidge 43 -repressing 43 -excerpted 43 -htein 43 -boks 43 -pimms 43 -1760 43 -pottinger 43 -f430 43 -government-sanctioned 43 -boobies 43 -stubb 43 -deviating 43 -arabi 43 -mcgrady 43 -mickens 43 -ancier 43 -goal-scorer 43 -faccenda 43 -nigam 43 -detracts 43 -623 43 -upminster 43 -leonhart 43 -stuntwoman 43 -ephedrine 43 -dufault 43 -moneymaker 43 -satterberg 43 -donner 43 -westwater 43 -oxymoron 43 -crutchley 43 -spayed 43 -devito 43 -bedi 43 -soren 43 -d-pennsylvania 43 -jandali 43 -hopson 43 -vermillion 43 -mobile-phone 43 -groban 43 -lutnick 43 -pro-marijuana 43 -agyness 43 -noire 43 -four-and-a-half-year 43 -shaolin 43 -compere 43 -drunken-driving 43 -inattentive 43 -langridge 43 -mid-2012 43 -kalantar 43 -callas 43 -jmw 43 -sherbet 43 -lowden 43 -scurrilous 43 -mitzi 43 -rinpoche 43 -spargo 43 -syntax 43 -boomf 43 -nauseated 43 -re-instated 43 -rutger 43 -bakiev 43 -beeswax 43 -uruzgan 43 -nunchucks 43 -hijra 43 -brummer 43 -roomful 43 -pcbs 43 -164,000 43 -bathes 43 -stuffs 43 -103rd 43 -anthropomorphic 43 -enraptured 43 -sportswriter 43 -monroy-bracamonte 43 -meecham 43 -rattan 43 -devedjian 43 -causey 43 -19ft 43 -mizen 43 -sondheim 43 -thedirty.com 43 -lonzo 43 -screensaver 43 -541 43 -nonmilitary 43 -langlois 43 -birkenau 43 -ridgewood 43 -hansard 43 -burping 43 -memorised 43 -spectrograph 43 -fifty-two 43 -career-threatening 43 -belkin 43 -woodchester 43 -kepler-186f 43 -2006/07 43 -ushakov 43 -forty-one 43 -pols 43 -lakeview 43 -debater 43 -amge 43 -mounir 43 -claridges 43 -uthman 43 -7-foot 43 -rocher 43 -goldsands 43 -leisser 43 -unfollow 43 -maplin 43 -allport 43 -breastfeeds 43 -hyslop 43 -hick 43 -holst 43 -wanes 43 -roskilly 43 -inoffensive 43 -haden 43 -uberx 43 -sarfraz 43 -1,950 43 -cutmore 43 -plunket 43 -fitchett 43 -ozcan 43 -knoxy 43 -chesil 43 -hydrophobic 43 -brockport 43 -lenku 43 -babb 43 -much-criticised 43 -celluloid 43 -buettner 43 -forbes.com 43 -harwell 43 -auteurs 43 -moraes 43 -grumbles 43 -ceviche 43 -arvind 43 -lette 43 -oud 43 -step-children 43 -tubb 43 -zhirinovsky 43 -trammell 43 -frau 43 -street-porter 43 -preliminarily 43 -dua 43 -saux 43 -truncheons 43 -1914-18 43 -pisani 43 -deraney 43 -fianceé 43 -dalrymple 43 -high-water 43 -snead 43 -70f 43 -radina 43 -co-existence 43 -gurdon 43 -heitman 43 -lycopene 43 -flavonoids 43 -decolletage 43 -riba 43 -theodor 43 -overy 43 -yokkaichi 43 -wanstead 43 -arguidos 43 -anti-death 43 -janusz 43 -darmian 43 -high-rolling 43 -zapp 43 -ex-coach 43 -resuscitating 43 -southfield 43 -krumm 43 -coletta 43 -sartain 43 -tress 43 -34.7 43 -cityscapes 43 -primatologist 43 -woodards 43 -basques 43 -camuti 43 -24.95 43 -waterworks 43 -hesitancy 43 -horncastle 43 -garnet 43 -environmentalism 43 -hansford 43 -reappearing 43 -aspersions 43 -calista 43 -jeong 43 -visitengland 43 -seeiso 43 -fitts 43 -labyrinthine 43 -tthe 43 -vorster 43 -part-timers 43 -newhart 43 -branston 43 -diamondback 43 -half-centuries 43 -412 43 -413 43 -grizzled 43 -parsing 43 -muttiah 43 -ex-fiancée 43 -glenelg 43 -enlarging 43 -joaan 43 -yakov 43 -chinese-born 43 -noh 43 -gazetta 43 -cormac 43 -panova 43 -revelatory 43 -powerbrokers 43 -transpire 43 -darwish 43 -plc. 43 -staving 43 -magomedov 43 -valuev 43 -abertawe 43 -greenwell 43 -flatters 43 -hashem 43 -hand-eye 43 -guedioura 43 -tri-city 43 -above-inflation 43 -spectroscopy 43 -snipping 43 -tomorrowland 43 -trimarco 43 -12cm 43 -arbiters 43 -01:38 43 -845 43 -amps 43 -cassey 43 -norsigian 43 -ith 43 -hartmann 43 -wiggly 43 -forty-eight 43 -cockermouth 43 -flamboyance 43 -pnas 43 -forges 43 -visors 43 -kingsmill 43 -skarsgard 43 -shamima 43 -divestment 43 -house-sitting 43 -agnetha 43 -tendinitis 43 -nugroho 43 -beautifying 43 -1.88 43 -healthy-looking 43 -fancy-dress 43 -well-fed 43 -60cm 43 -merwe 43 -657 43 -658 43 -splatter 43 -german-based 43 -kiana 43 -lagers 43 -ceballos 43 -tsuyoshi 43 -subtitling 43 -g2 43 -kudu 43 -wyss 43 -whodunnit 43 -proliferating 43 -crisco 43 -halvorson 43 -guiseppe 43 -caicedo 43 -loosens 43 -10,600 43 -nansen 43 -imbalanced 43 -ultra-rare 43 -yael 43 -duff-gordon 43 -paramus 43 -heretics 43 -ayutthaya 43 -tesney 43 -then-defense 43 -pallavi 43 -holl 43 -02:18 43 -a64 43 -annoyingly 43 -dibble 43 -pascagoula 43 -nim 43 -church-goers 43 -saloons 43 -unreformed 43 -macaroons 43 -rabbinical 43 -bytes 43 -hoofed 43 -crouse 43 -filipina 43 -larks 43 -lennie 43 -ligon 43 -plaited 43 -dissipating 43 -skiles 43 -wyness 43 -heartbreaker 43 -tcu 43 -underlie 43 -zivotofsky 43 -holzapfel 43 -barthez 43 -balint 43 -@pontifex 43 -densest 43 -kgs 43 -alon 43 -packwood 43 -slaboszewski 43 -cistern 43 -issuer 43 -cannabinoids 43 -kalla 43 -truscott 43 -robustness 43 -goglia 43 -vb 43 -vx 43 -long-eared 43 -deface 43 -shinnie 43 -maqueira 43 -lehr 43 -niazi 43 -catfight 43 -full-strength 43 -eagerly-anticipated 43 -494 43 -brimstone 43 -rotator 43 -ranjan 43 -gst 43 -farsala 43 -tyabb 43 -mid-summer 43 -plainview 43 -hawksley 43 -hanni 43 -nozzles 43 -bpi 43 -disbursed 43 -matip 43 -muslim-dominated 43 -613 43 -odometer 43 -20:35 43 -zebo 43 -romani 43 -lisle 43 -i-70 43 -hiscock 43 -lather 43 -12p 43 -deegan 43 -pimlott 43 -delinquents 43 -thomsen 43 -self-rule 43 -toru 43 -sahraoui 43 -ishinomaki 43 -torchwood 43 -studland 43 -nahid 43 -snorkelers 43 -waterproofing 43 -superwoman 43 -martoma 43 -debenhams.com 43 -afrikaner 43 -contortionists 43 -brokeback 43 -judoka 43 -pastrana 43 -subtract 43 -32,400 43 -yesh 43 -usweekly 43 -raspy 43 -cesspit 43 -auma 43 -mock-ups 43 -badea 43 -jakob-park 43 -reiss.com 43 -tasering 43 -frehse 43 -cave-in 43 -fur-lined 43 -p-51 43 -starck 43 -25/1 43 -peeta 43 -kabc-tv 43 -blackwelder 43 -dutro 43 -15-point 43 -paleolithic 43 -trulli 43 -rizal 43 -sa-11 43 -25-34 43 -booktrust 43 -single-celled 43 -elbert 43 -duis 43 -35.6 43 -gateways 43 -tw 43 -hurghada 43 -akshay 43 -cajole 43 -compulsions 43 -gbs 43 -nebulous 43 -co-directed 43 -ze 43 -snog 43 -jaxa 43 -seven-man 43 -blowouts 43 -great-niece 43 -rubina 43 -hendersonville 43 -stage-managed 43 -ager 43 -lochaber 43 -capito 43 -rindge 43 -seventeen-year-old 43 -9,900 43 -zou 43 -mullaney 43 -larch 43 -587 43 -cebull 43 -klansman 43 -tobacco-related 43 -all-electric 43 -farleigh 43 -lachapelle 43 -culhane 43 -satterfield 43 -guttman 43 -24billion 43 -vere 43 -siddhartha 43 -1792 43 -turnouts 43 -ryley 43 -muscling 43 -inskip 43 -off-grid 43 -barnette 43 -groubert 43 -biscay 43 -ancestry.com 43 -canto 43 -scriptwriters 43 -oig 43 -zucchini 43 -treblinka 43 -saada 43 -hurun 43 -shrivelled 43 -weight-lifting 43 -wimps 43 -kirke 43 -undrafted 43 -18-yard 43 -skewing 43 -best-kept 43 -solara 43 -longhurst 43 -600ft 43 -bastin 43 -pacu 43 -usmnt 43 -100-strong 43 -neediest 43 -gumball 43 -koester 43 -spammers 43 -molybdenum 43 -molinaro 43 -deployable 43 -despres 43 -boreham 43 -expletive-filled 43 -tobacco-free 43 -baggio 43 -chipmunk 43 -kitchin 43 -mosh 43 -macdowell 43 -marzullo 43 -ishiguro 43 -six-months-old 43 -justus 43 -erudite 43 -gazette-journal 43 -qiu 43 -soirees 43 -gillon 43 -adana 43 -cristea 43 -emmeline 43 -birkhead 43 -giannantonio 43 -uts 43 -marylin 43 -blackie 43 -trundle 43 -11/4 43 -unpaved 43 -jackals 43 -albufeira 43 -drood 43 -fiumicino 43 -xlvii 42 -pakistani-american 42 -co-anchors 42 -sandor 42 -updyke 42 -ocracoke 42 -drinkaware 42 -mishmash 42 -myelodysplastic 42 -carnations 42 -preexisting 42 -bramlage 42 -anda 42 -perlin 42 -wolpe 42 -rawls 42 -cornbury 42 -castano 42 -calhanoglu 42 -rolle 42 -six-speed 42 -romina 42 -virginie 42 -isark 42 -preface 42 -vice-captains 42 -01:08 42 -molde 42 -six-day-old 42 -soliman 42 -oryx 42 -meccano 42 -chippendale 42 -six-under-par 42 -ruemmler 42 -liturgy 42 -kessel 42 -wednesbury 42 -quizzical 42 -eight-year-olds 42 -agribusiness 42 -munition 42 -mum-of-one 42 -hit-list 42 -seraphine 42 -seiler 42 -levitate 42 -lifejackets 42 -nasim 42 -16-minute 42 -umbro 42 -lipsky 42 -roig 42 -stonegate 42 -itemized 42 -kammer 42 -sixers 42 -grieveson 42 -interactivity 42 -johnson-sirleaf 42 -top-three 42 -ozawa 42 -hassanal 42 -vatnajokull 42 -zeynep 42 -bledisloe 42 -kenseth 42 -estela 42 -canadian-egyptian 42 -epi 42 -45-year 42 -trawick 42 -dumbed 42 -bushra 42 -pathe 42 -smarty 42 -aranda 42 -bibby 42 -schlitterbahn 42 -posited 42 -unshakable 42 -hibbs 42 -incirlik 42 -aeros 42 -people-watching 42 -corexit 42 -maxey 42 -gries 42 -studebaker 42 -ascencao 42 -solorio 42 -back-door 42 -perchlorate 42 -nymphomaniac 42 -linlithgow 42 -utley 42 -neuromuscular 42 -garibay 42 -commercialized 42 -swash 42 -booklets 42 -man-management 42 -moorfields 42 -enshrining 42 -finlayson 42 -insole 42 -vape 42 -scraper 42 -pressurising 42 -makeweight 42 -tallapoosa 42 -forlornly 42 -meyrick 42 -146,000 42 -nischelle 42 -barot 42 -konstantopoulos 42 -safran 42 -breese 42 -rapacious 42 -maddula 42 -scamp 42 -javaid 42 -greenish 42 -deryke 42 -02:26 42 -02:23 42 -02:21 42 -immy 42 -bakkali 42 -od 42 -accessorise 42 -yaakov 42 -bodywear 42 -briefest 42 -proselytizing 42 -mofaz 42 -64f 42 -diversionary 42 -kazlowski 42 -boseman 42 -kheir 42 -97.5 42 -shivani 42 -javon 42 -raynes 42 -wort 42 -reauthorize 42 -fujifilm 42 -uneasiness 42 -elspeth 42 -horsemanship 42 -qs 42 -feasibly 42 -wood-panelled 42 -khalili 42 -ninth-grade 42 -gamergate 42 -sinofsky 42 -roisin 42 -dorvilier 42 -ruggiero 42 -subduing 42 -giap 42 -unforeseeable 42 -inhibition 42 -drewniak 42 -rekos 42 -1970s-style 42 -sportsweek 42 -hamdeen 42 -bolaris 42 -diffraction 42 -co-educational 42 -short-list 42 -taufa 42 -wfor 42 -overrides 42 -rapraeger 42 -nbc10 42 -29.8 42 -flirtations 42 -indexed 42 -entomology 42 -weng 42 -carillion 42 -o'mahony 42 -greenlight 42 -naysmith 42 -eight-part 42 -mcsally 42 -pittsfield 42 -clustering 42 -mcclymont 42 -low-altitude 42 -great-great-grandchildren 42 -160m 42 -farrier 42 -ngozi 42 -601 42 -hellyer 42 -buttercream 42 -criss-cross 42 -pacification 42 -devante 42 -solders 42 -gamepad 42 -freestone 42 -weimaraner 42 -jurado 42 -11.35 42 -mid-to-late 42 -truncheon 42 -public-health 42 -self-improvement 42 -partridges 42 -hesmondhalgh 42 -javascript 42 -estimations 42 -porfirio 42 -0.15 42 -jafar 42 -pre-pregnancy 42 -overseer 42 -warzones 42 -scarfs 42 -whicker 42 -sculls 42 -faggots 42 -ansan 42 -hossam 42 -indyk 42 -trots 42 -zippori 42 -expletive-ridden 42 -mile-and-a-half 42 -maaret 42 -methylisothiazolinone 42 -shey 42 -2cv 42 -prabal 42 -asic 42 -carmody 42 -caerleon 42 -mineralogy 42 -patria 42 -osh 42 -stifles 42 -pyroclastic 42 -jolts 42 -rich-poor 42 -junkyard 42 -pearman 42 -svk 42 -defcon 42 -sprigs 42 -philadelphia-area 42 -ganache 42 -50mm 42 -arab-american 42 -fattened 42 -al-sweady 42 -schulze 42 -birdwatcher 42 -plater 42 -clowney 42 -mohmed 42 -israeli-american 42 -daniell 42 -runnings 42 -bamieh 42 -armer 42 -r-wisconsin 42 -showboat 42 -kynaston 42 -mance 42 -1642 42 -1649 42 -gophers 42 -pikes 42 -spurlock 42 -kainth 42 -corrupts 42 -hutchinson-foster 42 -rcp 42 -flockhart 42 -low-paying 42 -karoo 42 -binman 42 -mademoiselle 42 -subduction 42 -multiplier 42 -varma 42 -unstructured 42 -refuelled 42 -nicolai 42 -changchun 42 -mettyear 42 -lyne 42 -locally-sourced 42 -yala 42 -kesinovic 42 -chauvinistic 42 -tavernier 42 -petersons 42 -supergroup 42 -pricetag 42 -nga 42 -resellers 42 -80billion 42 -cross-examining 42 -overplayed 42 -remortgage 42 -re-apply 42 -cicada 42 -wrtv 42 -wantaway 42 -inverse 42 -grecian 42 -sikhism 42 -cotte 42 -minidress 42 -jesperson 42 -touchid 42 -13th-century 42 -quarter-inch 42 -saito 42 -8:40 42 -kournikova 42 -507 42 -ringmaster 42 -rady 42 -dessie 42 -sadist 42 -awan 42 -a-grade 42 -sabu 42 -triads 42 -kainat 42 -kuzmanovic 42 -follow-through 42 -5.136 42 -steger 42 -corney 42 -burdening 42 -imola 42 -guanabara 42 -postmark 42 -gard 42 -pulsed 42 -lulworth 42 -totty 42 -sawn 42 -italianate 42 -4,250 42 -zabel 42 -dimanche 42 -bodurov 42 -cayla 42 -gals 42 -horley 42 -720,000 42 -d'auriol 42 -halverson 42 -moorea 42 -qatar-based 42 -inkjet 42 -familiarize 42 -hoole 42 -demystify 42 -leandra 42 -phablets 42 -unworn 42 -ide 42 -spitsbergen 42 -32-inch 42 -schaaf 42 -cnnstudentnews 42 -everytown 42 -timpson 42 -garwood 42 -pit-lane 42 -envelop 42 -bookcases 42 -defrost 42 -snowdrop 42 -edda 42 -zeddie 42 -perceptual 42 -scardino 42 -theocratic 42 -uric 42 -vania 42 -zbudowskyj 42 -baich 42 -ncp 42 -manfredi 42 -schur 42 -mccrery 42 -macduff 42 -fairford 42 -oliphant 42 -thavisha 42 -bellaire 42 -kurtis 42 -carlesha 42 -r&r 42 -angell 42 -outpointed 42 -caren 42 -blowhole 42 -aftermarket 42 -yeppoon 42 -vongfong 42 -arrasate 42 -bronies 42 -spokespersons 42 -1.23 42 -misnomer 42 -ape-like 42 -reinhold 42 -useable 42 -halim 42 -fwa 42 -tazreen 42 -20-1 42 -solos 42 -280million 42 -retinas 42 -ralls 42 -edina 42 -arad 42 -wriggles 42 -kassem 42 -nicolette 42 -gulab 42 -hometrack 42 -bondsman 42 -1,560 42 -circassian 42 -león 42 -olafur 42 -rajah 42 -reprogram 42 -rinat 42 -sapping 42 -edi 42 -nextdoor 42 -pullover 42 -azimi 42 -temperature-controlled 42 -misidentified 42 -equivalence 42 -majorly 42 -square-mile 42 -blood-sucking 42 -norley 42 -sunbathed 42 -doulton 42 -addendum 42 -kasami 42 -nelspruit 42 -seven-storey 42 -newsrooms 42 -boozer 42 -five-bed 42 -pickard 42 -muzaffar 42 -bollaert 42 -sportsperson 42 -harting 42 -arbeit 42 -sahintas 42 -a319 42 -keiren 42 -michigan-based 42 -immutable 42 -fitzrovia 42 -shar-pei 42 -dogaru 42 -alcove 42 -26-week 42 -hot-headed 42 -ulvaeus 42 -naima 42 -1,144 42 -jaundiced 42 -mclemore 42 -sinusitis 42 -690,000 42 -34-year 42 -flat-rate 42 -ringlets 42 -williams-thomas 42 -peart 42 -mcgivern 42 -full-grown 42 -1,440 42 -01:36 42 -fanconi 42 -parana 42 -disconnecting 42 -abut 42 -jazmine 42 -top-performing 42 -d'etre 42 -1250 42 -milagros 42 -utah-based 42 -loetz 42 -anesthetics 42 -lavelle 42 -jobbik 42 -backpass 42 -reconvenes 42 -32.6 42 -23.1 42 -23.9 42 -koblenz 42 -serato 42 -hundredths 42 -lowest-ranked 42 -abv 42 -cerezo 42 -englishness 42 -sarawak 42 -fermenting 42 -yurts 42 -21billion 42 -17-mile 42 -low-security 42 -child-sized 42 -captial 42 -bovril 42 -kurkova 42 -lugner 42 -eldred 42 -nissen 42 -airlifting 42 -krasniqi 42 -moroni 42 -tosca 42 -vice-principal 42 -dusters 42 -undercroft 42 -sonner 42 -off-track 42 -sub-par 42 -juris 42 -out-of-form 42 -sisley 42 -stewartstown 42 -slimmed-down 42 -oiling 42 -chateaux 42 -amitabh 42 -timeshare 42 -suller 42 -spur-of-the-moment 42 -troutdale 42 -turkic-speaking 42 -gang-raping 42 -hillel 42 -samri 42 -pineau 42 -tioman 42 -02:17 42 -crennel 42 -srivastava 42 -mclain 42 -meanders 42 -biros 42 -motored 42 -unascertained 42 -barratts 42 -unremitting 42 -goldkorn 42 -piotrowski 42 -spinnaker 42 -700m 42 -east-central 42 -bukhari 42 -sagnol 42 -antoni 42 -bourne-arton 42 -cinema-goers 42 -minimalistic 42 -jellies 42 -montreux 42 -q400 42 -all-volunteer 42 -splendidly 42 -equerry 42 -then-first 42 -1713 42 -hyper-realistic 42 -twombly 42 -landa 42 -963 42 -jobsworths 42 -kick-starting 42 -sanele 42 -windsors 42 -approximation 42 -froman 42 -snowplows 42 -wafted 42 -gilkes 42 -prunes 42 -wrights 42 -aimer 42 -disembodied 42 -standouts 42 -zayas 42 -torvill 42 -copiapo 42 -dawid 42 -kalejaiye 42 -moustachioed 42 -sooners 42 -tuaregs 42 -conaway 42 -manicurist 42 -noticeboard 42 -ciders 42 -dc-9 42 -f-15s 42 -matusiewicz 42 -groveland 42 -bratwurst 42 -fairclough 42 -sinderbrand 42 -ninety-five 42 -markov 42 -gatos 42 -cortani 42 -lebo 42 -contextual 42 -badgering 42 -switchblade 42 -co-parent 42 -506 42 -seattle-tacoma 42 -yalding 42 -garsallaoui 42 -ej 42 -300,000-a-week 42 -palladino 42 -slow-cooked 42 -third-person 42 -komar 42 -ilias 42 -mid-continent 42 -welly 42 -limbu 42 -astroturf 42 -cylvia 42 -idled 42 -lilah 42 -pdl 42 -regurgitated 42 -shreateh 42 -democker 42 -maye 42 -high-volume 42 -philp 42 -munt 42 -stellenbosch 42 -o'riordan 42 -dobby 42 -inner-west 42 -conscript 42 -anesthesiologists 42 -nandos 42 -weighting 42 -matalon 42 -baturina 42 -marat 42 -ginormous 42 -dibs 42 -neuter 42 -gurdwara 42 -goal-kicking 42 -tretchikoff 42 -brightly-colored 42 -frogmarched 42 -incommunicado 42 -revisionist 42 -borek 42 -baggie 42 -ashlea 42 -non-christian 42 -40.2 42 -humphris 42 -ebanks 42 -spellbound 42 -openstreetmap 42 -ksenia 42 -300lb 42 -widowers 42 -martos 42 -64million 42 -jokester 42 -31-28 42 -tedium 42 -nolito 42 -aboubakar 42 -destefano 42 -astrological 42 -bellisario 42 -marxism 42 -miraflores 42 -cashless 42 -irish-american 42 -separations 42 -goldthorpe 42 -pilcher 42 -hypoxic 42 -spurting 42 -vendee 42 -montenegrin 42 -stuivenberg 42 -post-conviction 42 -messines 42 -earvin 42 -cruachan 42 -defeatist 42 -bijou 42 -aspden 42 -berke 42 -salvator 42 -dutchmen 42 -hobnobbing 42 -whittling 42 -encrypting 42 -415,000 42 -byker 42 -ci 42 -canonical 42 -low-interest 42 -choker 42 -refrains 42 -ali-khan 42 -usmani 42 -9-5 42 -icj 42 -ich 42 -5.35 42 -millman 42 -popkov 42 -dismissively 42 -waterfowl 42 -16mm 42 -559 42 -ogier 42 -khalilzad 42 -wattisham 42 -saqqara 42 -bridwell 42 -duthiers 42 -keshishian 42 -chalke 42 -bounties 42 -pinkham 42 -octopuses 42 -isthmus 42 -dorky 42 -flowerbeds 42 -pit-stop 42 -pauly 42 -skuse 42 -17,100 42 -high-precision 42 -leif 42 -toff 42 -sava 42 -sater 42 -kadmiri 42 -debarge 42 -hsc 42 -succour 42 -carys 42 -budget-conscious 42 -pro-russians 42 -himalaya 42 -blairites 42 -nicolelis 42 -electable 42 -volunteerism 42 -low-maintenance 42 -staughton 42 -not-too-distant 42 -nics 42 -voyagers 42 -groundstrokes 42 -rn 42 -detoxify 42 -19billion 42 -charmingly 42 -d'art 42 -eaw 42 -eston 42 -pulsars 42 -gravelly 42 -vohra 42 -vltava 42 -3.65 42 -barossa 42 -pacesetters 42 -quiverfull 42 -car-jacking 42 -gawk 42 -westfall 42 -iodine-131 42 -enchautegui 42 -three-stage 42 -americano 42 -taber 42 -mortician 42 -prototyping 42 -dafoe 42 -4-methylcyclohexane 42 -717 42 -side-scan 41 -fernandez-gonzalez 41 -pandemrix 41 -twenty-year-old 41 -satirists 41 -curlers 41 -phallus 41 -parolee 41 -propylene 41 -ultimatums 41 -bugger 41 -cringing 41 -domjan 41 -carmouche 41 -eery 41 -cup-winner 41 -callow 41 -sima 41 -hendron 41 -al-thinni 41 -0-6 41 -mayflies 41 -rowlett 41 -bathymetric 41 -hydrochloric 41 -brangelina 41 -verlhac 41 -foodbanks 41 -avina 41 -rotaru 41 -anglo-french 41 -hakura 41 -nemetz 41 -unfeasible 41 -hamleys 41 -rogerio 41 -¶ 41 -verbals 41 -ethernet 41 -martinson 41 -arsenio 41 -gascon 41 -mattias 41 -verkerke 41 -101-year-old 41 -qwikster 41 -caroll 41 -thermage 41 -microsystems 41 -shukla 41 -goneva 41 -keddie 41 -manchu 41 -love-struck 41 -rivaled 41 -gaetano 41 -valladares 41 -islamist-led 41 -noncommittal 41 -scuba-diving 41 -peptide 41 -unbranded 41 -miserly 41 -ultra-violent 41 -birdshot 41 -anv_pl_def 41 -charleroi 41 -pro-isis 41 -yuichi 41 -unfailingly 41 -accessorising 41 -687 41 -emanated 41 -arzak 41 -cuffing 41 -elen 41 -fp1 41 -perenara 41 -antietam 41 -lek 41 -lem 41 -white-out 41 -iwm 41 -councilors 41 -kiddies 41 -duhamel 41 -life.com 41 -sociopathic 41 -2010-2012 41 -mccoys 41 -novotel 41 -powis 41 -requisitioned 41 -frolicked 41 -zina 41 -elsenham 41 -soccerex 41 -funchal 41 -lead-in 41 -yaroslav 41 -whooper 41 -multi-day 41 -niraj 41 -abbate 41 -apostate 41 -ibaraki 41 -cadres 41 -amna 41 -korey 41 -talaat 41 -lighty 41 -rixos 41 -wonsan 41 -improvising 41 -ktvt 41 -alexsandro 41 -castleberry 41 -reuters.com 41 -3-4-1-2 41 -one-horned 41 -issam 41 -genachowski 41 -co-ops 41 -radja 41 -split-level 41 -stuntmen 41 -worklessness 41 -skilling 41 -sagas 41 -schnatter 41 -seydoux 41 -ciavarella 41 -pippo 41 -saxena 41 -lindfield 41 -formalise 41 -paperboy 41 -somersaulted 41 -sillars 41 -02:25 41 -lemieux 41 -wehde 41 -graydon 41 -21.2 41 -kor 41 -brutsch 41 -drs. 41 -grecko 41 -145th 41 -tussaud 41 -baauer 41 -schenker 41 -aftertaste 41 -novellino 41 -44m 41 -shanties 41 -buckmaster 41 -dixson 41 -wasserstein 41 -aime 41 -endeavoured 41 -signalman 41 -panola 41 -souders 41 -lopped 41 -gmos 41 -bilge 41 -kentuckians 41 -protectorate 41 -wiggin 41 -kettyle 41 -blood-thirsty 41 -2012/2013 41 -wile 41 -mpts 41 -thumps 41 -bensouda 41 -crumpets 41 -unsurpassed 41 -large-screen 41 -durgahee 41 -privatising 41 -lotta 41 -ullman 41 -ilori 41 -combos 41 -africa-based 41 -comeuppance 41 -zoroastrianism 41 -leche 41 -oblong 41 -sandbach 41 -sachet 41 -dogmatic 41 -qcs 41 -serna 41 -kaspar 41 -unravels 41 -rothesay 41 -wessel 41 -reestablish 41 -m.j. 41 -azar 41 -pendennis 41 -harriott 41 -tangling 41 -siting 41 -29.3 41 -62.5 41 -0844 472 4157 41 -kickball 41 -oradour-sur-glane 41 -milsom 41 -johndroe 41 -paperclip 41 -kellner 41 -hillbilly 41 -extricated 41 -megabus 41 -abc13 41 -ravage 41 -blimey 41 -crean 41 -critiquing 41 -emo 41 -epperson 41 -media-savvy 41 -heart-rate 41 -guisborough 41 -bom 41 -fertilisers 41 -time-honoured 41 -dineen 41 -5-foot-11 41 -delfouneso 41 -18-29 41 -self-discovery 41 -fouquet 41 -unwinnable 41 -flood-affected 41 -o'day 41 -soliris 41 -bodyshockers 41 -jono 41 -bxg 41 -pansies 41 -sandland 41 -zapeta 41 -pro-europe 41 -snipe 41 -hughey 41 -offutt 41 -huffed 41 -bodysuits 41 -aminu 41 -yearbooks 41 -porras 41 -endoscope 41 -angelika 41 -honoree 41 -bond-buying 41 -1-5 41 -non-public 41 -gacek 41 -hooiveld 41 -cornflower 41 -manish 41 -gueye 41 -posen 41 -barzagli 41 -25.7 41 -gobbler 41 -axford 41 -goal.com 41 -lovesick 41 -expressionist 41 -liebschner 41 -pennie 41 -parlayed 41 -chandhok 41 -anti-european 41 -bahari 41 -toothed 41 -bertagna 41 -leonards 41 -lake-effect 41 -aisling 41 -danforth 41 -kennaugh 41 -daman 41 -nautica 41 -six-metre 41 -wearily 41 -panga 41 -leaguer 41 -refrigerant 41 -1745 41 -accentuating 41 -duality 41 -rasul 41 -numbed 41 -lightning-fast 41 -zobkiw 41 -expanders 41 -al-zawahri 41 -fiegel 41 -celebrant 41 -labors 41 -trenchant 41 -storify 41 -robathan 41 -subhash 41 -reasonableness 41 -carlie 41 -blaec 41 -inhumanely 41 -ba'ath 41 -permeate 41 -magdi 41 -matagrano 41 -tera 41 -18-page 41 -zogby 41 -o'dea 41 -15mm 41 -paella 41 -whisks 41 -mres 41 -rehnquist 41 -coronet 41 -butterscotch 41 -alderton 41 -grzegorz 41 -egyptian-american 41 -soulmates 41 -thurley 41 -33.8 41 -morrey 41 -meh 41 -cushnie 41 -rusi 41 -31.4 41 -pedestrianised 41 -unloads 41 -farjo 41 -f-4 41 -jorgenson 41 -600lb 41 -chicane 41 -chistyakov 41 -purbeck 41 -spokeman 41 -antigen 41 -rei 41 -liberal-leaning 41 -favia 41 -glaringly 41 -ramada 41 -darmstadt 41 -osbornes 41 -spoofed 41 -buffoon 41 -rahma 41 -decimation 41 -multiforme 41 -jaine 41 -understandings 41 -karn 41 -soptic 41 -attested 41 -inoculation 41 -1785 41 -ipos 41 -menaces 41 -ubuntu 41 -differentiating 41 -mgb 41 -muzychko 41 -anti-vaccine 41 -72million 41 -high-brow 41 -primeval 41 -vetokele 41 -1.56 41 -hauliers 41 -one-upmanship 41 -vol 41 -kalymon 41 -prostituting 41 -al-rahman 41 -bronislaw 41 -wentz 41 -handrails 41 -dermis 41 -misspellings 41 -cyrano 41 -lodgers 41 -250km 41 -wombles 41 -palladian 41 -firmino 41 -conwoman 41 -ostler 41 -sheaf 41 -mannus 41 -walkie-talkies 41 -arochi 41 -franciscans 41 -ind 41 -16/1 41 -35.5 41 -two-factor 41 -heiden 41 -chavs 41 -haro 41 -okla. 41 -kochi 41 -ady 41 -authorship 41 -cobalt-60 41 -violence-plagued 41 -farquhar 41 -six-strong 41 -spinosaurus 41 -phonecalls 41 -manipur 41 -furness-smith 41 -detente 41 -haddow 41 -farouq 41 -60233 41 -tiggy 41 -jell-o 41 -baller 41 -al-khattab 41 -24-month 41 -soltero 41 -jairo 41 -fwd 41 -haqqanis 41 -yarm 41 -feathery 41 -wooly 41 -ezadeen 41 -althea 41 -aasiya 41 -afd 41 -fryett 41 -re-started 41 -mich. 41 -three-day-old 41 -wicket-taker 41 -mistura 41 -logins 41 -roeser 41 -langkawi 41 -newry 41 -cau 41 -burbridge 41 -mohne 41 -magalluf 41 -dreamlike 41 -6.35 41 -8p 41 -marshlands 41 -catterall 41 -gas-rich 41 -boffins 41 -non-sexual 41 -beary 41 -harroun 41 -homeschooling 41 -scherer 41 -footgolf 41 -hake 41 -200-year 41 -medium-term 41 -loder 41 -thrun 41 -utterance 41 -timbaland 41 -cuervo 41 -uncoordinated 41 -agonized 41 -campaign-style 41 -roundtrip 41 -abdulhadi 41 -flatley 41 -skymall 41 -vaulter 41 -urine-soaked 41 -yokota 41 -reciprocity 41 -caines 41 -go-pro 41 -panstarrs 41 -seger 41 -codd 41 -seaways 41 -outhwaite 41 -asylum-seeker 41 -pareidolia 41 -oscillation 41 -counter-protest 41 -tamimi 41 -230ft 41 -stairways 41 -9/11-style 41 -xj 41 -courtier 41 -middle-eastern 41 -kwong 41 -kohan 41 -chui 41 -squaddies 41 -cast-offs 41 -dunmore 41 -steere 41 -jeez 41 -lippy 41 -wine-making 41 -geranium 41 -ad-free 41 -pivoting 41 -in-school 41 -blackest 41 -abp 41 -udi 41 -725,000 41 -machinists 41 -questlove 41 -flipbook 41 -friggin 41 -richelle 41 -a47 41 -must-read 41 -falters 41 -guiness 41 -paducah 41 -hedgerow 41 -synchronise 41 -obermiller 41 -cambridge-based 41 -debolt 41 -riverhead 41 -clorox 41 -neven 41 -rome-based 41 -allocates 41 -aced 41 -bretherton 41 -quiroz 41 -scrolled 41 -gg 41 -clanging 41 -rayna 41 -unselfishly 41 -khatana 41 -bullivant 41 -meshael 41 -jobsworth 41 -woolen 41 -greenpoint 41 -didgeridoo 41 -leyden 41 -dae-jung 41 -secrete 41 -10-acre 41 -shortens 41 -cochise 41 -desborough 41 -cearns 41 -astoundingly 41 -bilel 41 -f50 41 -caramelized 41 -perham 41 -barkin 41 -reynold 41 -delacruz 41 -ava-jayne 41 -polina 41 -zimmermann 41 -zimmermans 41 -easterbrook 41 -table-toppers 41 -jordana 41 -olongapo 41 -major-general 41 -kameron 41 -frohwein 41 -playthings 41 -instragram 41 -state-appointed 41 -808 41 -patenting 41 -as-levels 41 -skillet 41 -darijo 41 -inter-agency 41 -arber 41 -disappoints 41 -nespresso 41 -yavuz 41 -syne 41 -earthlings 41 -gallantly 41 -mikheil 41 -agee 41 -dust-covered 41 -vales 41 -hadj 41 -gut-busting 41 -malkin 41 -delfino 41 -pininfarina 41 -amran 41 -smutty 41 -dalit 41 -juan-carlos 41 -gravitating 41 -hoovers 41 -60kg 41 -fatbergs 41 -cadwallader 41 -margolis 41 -velour 41 -fugu 41 -spherules 41 -swale 41 -gikawa 41 -614 41 -malachy 41 -b3 41 -petsmart 41 -1010 41 -deadlift 41 -1.07 41 -shalam 41 -callagher 41 -gorgeously 41 -duchovny 41 -gerrymandering 41 -purewal 41 -trescothick 41 -hippodrome 41 -legging 41 -hovel 41 -3.85 41 -amplification 41 -westcliff 41 -self-employment 41 -burghley 41 -raskalov 41 -acrobatically 41 -holte 41 -venereal 41 -multivitamins 41 -hleb 41 -cordell 41 -rednecks 41 -lee-anna 41 -dehli 41 -refsdal 41 -tea-time 41 -grandaughter 41 -fox40 41 -basile 41 -askham 41 -anti-climax 41 -minoan 41 -adirondack 41 -thomaz 41 -soltani 41 -oudin 41 -mete 41 -co-commentary 41 -vayner 41 -castresana 41 -bagpipe 41 -teotihuacan 41 -giannini 41 -paro 41 -unspent 41 -schwartzman 41 -nawab 41 -kibo 41 -bluish 41 -stander 41 -birthmarks 41 -metastasized 41 -7cm 41 -designates 41 -baudouin 41 -homey 41 -102-year-old 41 -99-year-old 41 -gavrilo 41 -4.24 41 -sidekicks 41 -jezki 41 -lazard 41 -pacified 41 -scandalously 41 -ghoochannejhad 41 -dabbs 41 -melchert-dinkel 41 -nirbhaya 41 -slather 41 -eglin 41 -caressed 41 -do-or-die 41 -four-lane 41 -smut 41 -resets 41 -clinton-era 41 -hypertrophic 41 -sikora 41 -khushbu 41 -propagated 41 -saint-andre 41 -elderflower 41 -wickr 41 -ichihashi 41 -heerden 41 -vulin 41 -kraut 41 -leismann 41 -anisa 41 -overdoing 41 -caufield 41 -radishes 41 -dreadlocked 41 -gun-free 41 -thibault 41 -cheska 41 -leoni 41 -mro 41 -newsreel 41 -hindle 41 -kirkbright 41 -scrapbooks 41 -falling-out 41 -40-page 41 -credo 41 -glenmore 41 -poveda 41 -obit 41 -blue-ribbon 41 -eggleston 41 -battista 41 -two-pronged 41 -crue 41 -laghman 41 -tenable 41 -bullfighters 41 -trendsetters 41 -four-wheeled 41 -then-mayor 41 -ala. 41 -corvallis 41 -kokkinakis 41 -thematic 41 -speicher 41 -daou 41 -deanne 41 -outboard 41 -attired 41 -ballplayer 41 -heffey 41 -amrit 41 -jenas 41 -eye-wateringly 41 -goaltender 41 -high-maintenance 41 -slouching 41 -stone-tipped 41 -sylvan 41 -equusearch 41 -redefinition 41 -seneng 41 -186mph 41 -king5 41 -verhoeven 41 -tahitian 41 -magnitudes 41 -jean-baptiste 41 -crosswords 41 -yearns 41 -djing 41 -rabobank 41 -psychoanalyst 41 -dunklin 41 -telesales 41 -garcia-bratcher 41 -eros 41 -mussa 41 -one-club 41 -gamper 41 -warder 41 -syria-related 41 -kinzey 41 -minarets 41 -buttoned-up 41 -trinian 41 -bursaspor 41 -sokol 41 -audiotapes 41 -cicely 41 -nita 41 -bottrill 41 -vice-admiral 41 -chavarria 41 -owain 41 -bothersome 41 -poitier 41 -nanometers 41 -anesthesiology 41 -sellars 41 -safechuck 41 -tillakaratne 41 -inflicts 41 -rekers 41 -nui 41 -foxtrot 41 -al-shami 41 -southerner 41 -multiplication 41 -8.55 41 -baria 41 -fine-grained 41 -scrounge 41 -must-visit 41 -anally 41 -enslave 41 -samphan 41 -1:40 41 -casal 41 -tol 41 -toxteth 41 -propagandists 41 -light-middleweight 41 -trappers 41 -espaillat 41 -littlehampton 41 -ethane 41 -supercopa 41 -12-page 41 -tancredo 41 -donchak 41 -proclaimers 41 -civilly 41 -ior 41 -geiser 41 -self-government 41 -chebarkul 40 -quibble 40 -ulla 40 -saris 40 -594 40 -carriere 40 -beliveau 40 -antivirals 40 -individualistic 40 -trigz 40 -fota 40 -caesareans 40 -gordley 40 -anschutz 40 -great-great-grandmother 40 -4.55 40 -re-interview 40 -maury 40 -blvd 40 -break-even 40 -7:50 40 -rough-and-tumble 40 -reinaldo 40 -shark-infested 40 -01:05 40 -01:00 40 -starry-eyed 40 -'90 40 -madondo 40 -schemed 40 -decisiveness 40 -demaryius 40 -thinness 40 -bingbing 40 -geordies 40 -alomar 40 -red-tape 40 -notley 40 -pre-kindergarten 40 -,19 40 -hinman 40 -uas 40 -fritsch 40 -texters 40 -worton 40 -boing 40 -pinnacles 40 -spaghettios 40 -soul-destroying 40 -rzeszowski 40 -datchet 40 -11-point 40 -guennec 40 -deduct 40 -wardlow 40 -rà 40 -jaffna 40 -mayorkas 40 -sinden 40 -powe 40 -ya'an 40 -forestieri 40 -eea 40 -besler 40 -city-owned 40 -whimpers 40 -1.58 40 -kruzan 40 -usurping 40 -tkm-ebola 40 -peritonitis 40 -portugese 40 -kingsland 40 -45-degree 40 -smartly-dressed 40 -pistorious 40 -white-washed 40 -vinay 40 -mody 40 -haass 40 -rostov-on-don 40 -21:10 40 -smoothness 40 -cirstea 40 -14-1 40 -saucepans 40 -cba 40 -postgame 40 -deal-making 40 -silcott 40 -leclair 40 -dotro 40 -rossman 40 -kadillak 40 -fastnet 40 -hushovd 40 -cogs 40 -nss 40 -telenovelas 40 -kakar 40 -buda 40 -azwan 40 -elongate 40 -fleck 40 -teeters 40 -egm 40 -kory 40 -yuzu 40 -histrionics 40 -1.78 40 -londono 40 -foils 40 -pull-out 40 -natick 40 -artemio 40 -pavelka 40 -conformed 40 -3a 40 -blackrock 40 -kliptown 40 -24.1 40 -aroud 40 -lithograph 40 -prez 40 -orangeburg 40 -brintha 40 -appleyard 40 -negates 40 -o'bagy 40 -scottishpower 40 -governess 40 -laxton 40 -2:50 40 -illustrative 40 -880,000 40 -15lbs 40 -pud 40 -seles 40 -pessimists 40 -02:24 40 -02:29 40 -14-inch 40 -hassen 40 -schnapps 40 -ow 40 -mildenhall 40 -aramaic 40 -gregson 40 -superjet 40 -sub-tropical 40 -sulfuric 40 -store-bought 40 -steffan 40 -frisch 40 -emitter 40 -hyacinth 40 -zyro 40 -biff 40 -boucle 40 -inhales 40 -texter 40 -exaggerates 40 -30.6 40 -disavow 40 -cosmological 40 -all-expenses 40 -querying 40 -wtnh 40 -okada 40 -rosenzweig 40 -slickly 40 -geale 40 -impairing 40 -head-butt 40 -archiving 40 -two-set 40 -faugheen 40 -agreeableness 40 -trapuzzano 40 -diogo 40 -wate 40 -hatfields 40 -northland 40 -maxis 40 -tarnishes 40 -valuck 40 -multi-touch 40 -arinc 40 -third-bottom 40 -sediqqi 40 -paulding 40 -sky-rocketed 40 -menelik 40 -laika 40 -2031 40 -sabatini 40 -tithe 40 -plaines 40 -sausalito 40 -benzodiazepine 40 -slaving 40 -elichaoff 40 -reburial 40 -calorie-controlled 40 -lorax 40 -yelton 40 -grandfather-of-four 40 -videographers 40 -sorrowful 40 -recently-released 40 -ousama 40 -guccifer 40 -sjp 40 -109th 40 -east-northeast 40 -preplanned 40 -mcfarlan 40 -7/4 40 -dearing 40 -multi-organ 40 -weirdos 40 -zito 40 -frankfurter 40 -reappointed 40 -50.5 40 -weatherup 40 -medical-grade 40 -pabon 40 -isom 40 -wrack 40 -binnie 40 -inbev 40 -keat 40 -hofmeister 40 -rectangles 40 -nixie 40 -35-hour 40 -170million 40 -jil 40 -so-so 40 -miao 40 -roda 40 -cancer-fighting 40 -mnlf 40 -holuhraun 40 -prisoner-of-war 40 -illusory 40 -allpress 40 -hundredth 40 -stripy 40 -bleeken 40 -doppelgangers 40 -reinach 40 -referenda 40 -d'amico 40 -8,700 40 -teacups 40 -mcgettigan 40 -systemically 40 -stainless-steel 40 -azadeh 40 -heraklion 40 -nad-e 40 -asenjo 40 -white-sand 40 -yeezus 40 -mind-altering 40 -procrastination 40 -hagi 40 -sackings 40 -locos 40 -channell 40 -oklahomans 40 -ponytails 40 -coraline 40 -anonymised 40 -rain-hit 40 -arshack 40 -gaither 40 -509 40 -198,000 40 -30per 40 -luzerne 40 -kafr 40 -krivsun 40 -dinked 40 -overspent 40 -insomniac 40 -paul-henri 40 -bhalla 40 -morbillivirus 40 -somalia-based 40 -cyber-crime 40 -depaul 40 -hems 40 -rayden 40 -conceptually 40 -leonarda 40 -sulser 40 -wolfie 40 -padnos 40 -moyse 40 -herkimer 40 -lasogga 40 -saviours 40 -lederhaas-okun 40 -muay 40 -esophageal 40 -retinol 40 -over-fishing 40 -zackary 40 -guillon 40 -wegmans 40 -spymaster 40 -yetman 40 -fela 40 -punted 40 -isola 40 -left-wingers 40 -gossips 40 -richar 40 -pre-fight 40 -mccallister 40 -hairstyling 40 -sleaford 40 -dungannon 40 -f.c. 40 -underinflated 40 -kinghorn 40 -covetable 40 -suk-young 40 -frick 40 -old-world 40 -talkback 40 -lawnmowers 40 -twisty 40 -leasehold 40 -tdi 40 -blything 40 -orang-utans 40 -aysha 40 -schedulers 40 -yore 40 -post-revolution 40 -otmani 40 -henen 40 -bovis 40 -structuring 40 -sweetwater 40 -kirschenbaum 40 -torridge 40 -sammie 40 -apartheid-era 40 -chandrika 40 -presumes 40 -snowdrifts 40 -upenn 40 -standish 40 -non-members 40 -harrovian 40 -immigrating 40 -redcliffe 40 -klayman 40 -raynaud 40 -aneurin 40 -aus$ 40 -shaper 40 -acqua 40 -frontmen 40 -primogeniture 40 -midsize 40 -drizzled 40 -glees 40 -mini-van 40 -figureheads 40 -self-reflection 40 -bouteflika 40 -okeechobee 40 -slammer 40 -kool-aid 40 -nubia 40 -headmasters 40 -rance 40 -12,900 40 -cille 40 -obafemi 40 -tarpon 40 -panjshir 40 -ewa 40 -ironies 40 -strip-search 40 -billingshurst 40 -dalaman 40 -texarkana 40 -afrika 40 -vbs 40 -catapults 40 -denyer 40 -arrowed 40 -majka 40 -snowshoes 40 -knick 40 -yazid 40 -1.08 40 -jerri 40 -tugay 40 -bernera 40 -hospital-acquired 40 -adeleye 40 -m-pesa 40 -thumper 40 -welshpool 40 -fourteen-year-old 40 -mowry 40 -jerked 40 -r.d. 40 -gape 40 -stupidest 40 -enfant 40 -modica 40 -casson 40 -lalicata 40 -exterminating 40 -erena 40 -mcdermid 40 -30-round 40 -brokenhearted 40 -doubletree 40 -hewn 40 -nellessen 40 -shifa 40 -avner 40 -hser 40 -royer 40 -jiroemon 40 -elop 40 -romanticism 40 -937 40 -navs 40 -jamey 40 -anti-obamacare 40 -limbering 40 -whatnot 40 -mette-marit 40 -wittels 40 -adorno 40 -gergiev 40 -picnicking 40 -westborough 40 -nary 40 -sambuca 40 -coomera 40 -beaty 40 -dadahanov 40 -siver 40 -faulk 40 -rossington 40 -free-form 40 -bickley 40 -ditzy 40 -dolomites 40 -romel 40 -bonne 40 -griped 40 -jarmila 40 -high-interest 40 -unashamed 40 -nemorin 40 -burl 40 -resnick 40 -14oz 40 -icefall 40 -mcentee 40 -islamist-dominated 40 -coleshill 40 -160ft 40 -wildfowl 40 -première 40 -merriam-webster 40 -pricy 40 -arch-federalist 40 -39.9 40 -ormerod 40 -solomons 40 -wuxi 40 -michell 40 -front-running 40 -goalbound 40 -zaliukas 40 -saly 40 -phelps-roper 40 -mini-skirts 40 -goulash 40 -self-penned 40 -99.7 40 -erring 40 -remakes 40 -sicko 40 -satsuma 40 -yuba 40 -ackermann 40 -supercritical 40 -vortices 40 -blinder 40 -noland 40 -doolan 40 -denoted 40 -21:06 40 -gerrans 40 -kronk 40 -ruzan 40 -analytic 40 -mcquivey 40 -thronging 40 -milgram 40 -broadcasted 40 -kazeem 40 -parakeets 40 -christenings 40 -minnetonka 40 -subsiding 40 -one-under 40 -well-drilled 40 -@cnnphotos 40 -trespassed 40 -gigabit 40 -post-op 40 -bloxwich 40 -70kg 40 -deactivating 40 -473 40 -self-radicalized 40 -ex-navy 40 -choudhary 40 -kasabian 40 -dimopoulou 40 -sneezy 40 -futbol 40 -aflame 40 -snowmobiling 40 -belter 40 -colorblind 40 -shrove 40 -holleman 40 -32.2 40 -bullpen 40 -perri 40 -jean-philippe 40 -ailina 40 -amalia 40 -pushkov 40 -7kg 40 -levitin 40 -ferriz 40 -60-70 40 -immunize 40 -seales 40 -02:31 40 -kokorin 40 -cmc 40 -cmv 40 -berrendo 40 -howley 40 -weeden 40 -victimize 40 -metzler 40 -farmhouses 40 -acquiesce 40 -budgettravel.com 40 -unep 40 -652 40 -fords 40 -george-harvan 40 -12-under 40 -buganda 40 -meyran 40 -soundscan 40 -polyp 40 -gf 40 -yuna 40 -wattage 40 -28st 40 -victim-blaming 40 -som 40 -morgenthau 40 -persepolis 40 -kunsthal 40 -anti-vaccination 40 -lotter 40 -zlitan 40 -walkable 40 -1610 40 -stallings 40 -smugly 40 -love-in 40 -zinner 40 -petejenson 40 -riske 40 -kingsway 40 -whio 40 -soundgarden 40 -zakariya 40 -zimonjic 40 -hyperlapse 40 -exhaling 40 -cardin 40 -astronomically 40 -schrimm 40 -horyn 40 -pharmacological 40 -pinkie 40 -morro 40 -good-quality 40 -baltimore-washington 40 -jiggle 40 -conroe 40 -asaph 40 -blow-out 40 -yomiuri 40 -widely-held 40 -off-course 40 -decentralization 40 -gastrectomy 40 -janos 40 -roxbury 40 -reynaldo 40 -hunstanton 40 -alexandros 40 -ballantine 40 -overseas-based 40 -tantum 40 -helvellyn 40 -lactate 40 -colbourne 40 -aquinas 40 -hodgepodge 40 -lumbered 40 -tehreek-e-insaf 40 -compilations 40 -agarwal 40 -prc 40 -balasubramaniam 40 -iraizoz 40 -cabrera-bello 40 -orbison 40 -feka 40 -residencies 40 -oppressor 40 -flood-ravaged 40 -trilateral 40 -couched 40 -asboy 40 -611 40 -mottershead 40 -yang-ho 40 -sixty-five 40 -live-streaming 40 -491 40 -six-member 40 -reuter 40 -stagg 40 -bacha 40 -zombie-like 40 -jet2.com 40 -newfield 40 -remodelling 40 -kader 40 -geoengineering 40 -gerasimenko 40 -bridgman 40 -pre-approved 40 -bonnett 40 -crofton 40 -tricksters 40 -faysal 40 -2003/04 40 -ehlers 40 -sanabria 40 -sickeningly 40 -zlata 40 -gnu 40 -hodgin 40 -bottom-of-the-table 40 -a.c. 40 -mosby 40 -perlitz 40 -manganiello 40 -nandy 40 -qahtan 40 -34m 40 -steelworkers 40 -wertheimer 40 -kukowski 40 -sofiane 40 -yildirim 40 -salvia 40 -stakeout 40 -krampus 40 -shadi 40 -asi 40 -zoeller 40 -rts 40 -rebooting 40 -miranshah 40 -schwerner 40 -kevyn 40 -frommer 40 -broadhurst 40 -bellefonte 40 -uw 40 -fitz 40 -naff 40 -32,500 40 -silviniaco 40 -shoplift 40 -pentland 40 -schrems 40 -hayabusa 40 -montebello 40 -leafs 40 -unenthusiastic 40 -turcotte 40 -sags 40 -85,000-a-year 40 -snobby 40 -moneysupermarket 40 -assiduously 40 -bronzing 40 -saidakhmetov 40 -mandible 40 -mcalinden 40 -three-under-par 40 -agostinelli 40 -longbridge 40 -i-d 40 -mafia-style 40 -industrial-grade 40 -misbah 40 -sigrid 40 -garten 40 -judea 40 -congenial 40 -error-strewn 40 -dumbstruck 40 -ziga 40 -widget 40 -bigirimana 40 -79p 40 -crosbie 40 -allendale 40 -documentarian 40 -514 40 -energise 40 -croatians 40 -koen 40 -baio 40 -republican-dominated 40 -6-6 40 -accruing 40 -datasets 40 -kanoute 40 -mccaughey 40 -sberbank 40 -blakeway 40 -street-level 40 -weightman 40 -sr-71 40 -barangaroo 40 -lagwinowicz 40 -overlords 40 -sookie 40 -woolford 40 -deniability 40 -crasbo 40 --25 40 -bayview 40 -dellorto 40 -wertheim 40 -kingery 40 -5.55 40 -amped 40 -haslem 40 -ferretti 40 -gasper 40 -squabbled 40 -quadrennial 40 -weepy 40 -blaszczykowski 40 -ois 40 -kelsie 40 -multi-level 40 -well-coordinated 40 -hobbits 40 -wasim 40 -ultra-nationalist 40 -6:00 40 -medulloblastoma 40 -kovalainen 40 -woosnam 40 -entitling 40 -krupskaia 40 -linares 40 -malory 40 -brainstem 40 -969 40 -dredger 40 -bridgetown 40 -quarrels 40 -schunk 40 -quart 40 -peluso 40 -conservative-led 40 -altoona 40 -barford 40 -unerring 40 -rizzuto 40 -semple 40 -alite 40 -biggins 40 -fearn 40 -becerra 40 -drawstring 40 -flume 40 -ayub 40 -su-25 40 -xuan 40 -us-born 40 -five-acre 40 -okkhoy 40 -plonked 40 -whodunit 40 -prensa 40 -monkhouse 40 -riggins 40 -krusty 40 -aiders 40 -hillmann 40 -tonal 40 -shaath 40 -dpa 40 -muzzammil 40 -carrizales 40 -agathe 40 -excrete 40 -mechanised 40 -delauter 40 -troup 40 -deby 40 -ablett 40 -mischaracterized 40 -banishment 40 -sylla 40 -minx 40 -hammerstein 40 -juche 40 -90f 40 -jorg 40 -sulistyaningsih 40 -zhuhai 40 -mulally 40 -halabja 40 -time-poor 40 -sympathizer 40 -milanesi 40 -eloquence 40 -anglin 40 -jesinta 40 -programme-makers 40 -dead-ball 40 -namie 40 -squiddly 40 -babylonian 40 -discretely 40 -battersby 40 -cies 40 -brazos 40 -magnificence 40 -gramophone 40 -worst-performing 40 -hailo 40 -unquenchable 40 -imb 40 -superstructure 40 -ceawlin 40 -overprotective 40 -yarbrough 40 -sobhi 40 -cohen-ahnine 40 -nine-match 40 -news/washington 40 -unseal 40 -gameshow 40 -joburg 40 -huesca 40 -now-former 40 -tressa 40 -buhman 40 -mckevitt 40 -misjudgement 40 -dsi 40 -pepperdine 40 -airport-style 40 -frolics 40 -chem 40 -jarnigan 40 -mother-of-eight 40 -politically-charged 40 -horsfall 40 -dinsmore 40 -much-changed 40 -1b 40 -1g 40 -affordably 40 -citic 40 -xf 40 -chinky 40 -desseigne 40 -suckle 40 -ear-to-ear 40 -limbless 40 -clune 40 -oppressing 40 -716 40 -repurposing 40 -cost-effectiveness 39 -chomped 39 -trang 39 -dragster 39 -wallingford 39 -sorter 39 -ello 39 -unsporting 39 -davao 39 -slane 39 -segarra 39 -pro-kiev 39 -hadleigh 39 -10-pound 39 -youcaring.com 39 -readhead 39 -100-acre 39 -khumbu 39 -pinafore 39 -chequebook 39 -overpayment 39 -arnoldo 39 -trans-siberian 39 -45mins 39 -brindley 39 -nemeth 39 -christianson 39 -wakey 39 -wendt 39 -mildura 39 -unseating 39 -nenkham 39 -beta-carotene 39 -wead 39 -baltics 39 -reimagining 39 -pecans 39 -step-son 39 -theriault 39 -bolelli 39 -obaid 39 -high-cost 39 -leering 39 -amari 39 -beautify 39 -over-priced 39 -gourcuff 39 -grubbs 39 -kuzya 39 -batth 39 -addlestone 39 -alemao 39 -fratton 39 -esquivel 39 -audra 39 -electrostatic 39 -huthart 39 -baobab 39 -maudit 39 -685 39 -rohrabacher 39 -changers 39 -ludlam 39 -scullion 39 -baute 39 -handprint 39 -postulated 39 -winterson 39 -nisar 39 -courgettes 39 -mateen 39 -ettore 39 -always-on 39 -biomimicry 39 -ex-soldiers 39 -sapa 39 -pea-sized 39 -teofilo 39 -well-worked 39 -taverna 39 -firmware 39 -hot-spot 39 -bailey-cole 39 -falla 39 -kerridge 39 -zickuhr 39 -manipulations 39 -inflationary 39 -maltais 39 -hetty 39 -u.s.-iranian 39 -vapours 39 -dari 39 -tainting 39 -zilli 39 -depalo 39 -boerrigter 39 -wrangles 39 -hypothalamic 39 -first-responders 39 -keyed 39 -dally 39 -burrage 39 -flavourings 39 -dibley 39 -helicoptered 39 -2016/17 39 -hooped 39 -joes 39 -kathrin 39 -trell 39 -high-crime 39 -tucudean 39 -rosenblat 39 -corfield 39 -harps 39 -lesh 39 -turkic 39 -5-hour 39 -letterboxes 39 -commas 39 -plausibly 39 -minnesota-based 39 -rantie 39 -chaperoned 39 -rixon 39 -1.73 39 -skyy 39 -ajaccio 39 -467 39 -inflators 39 -wimunc 39 -28.2 39 -hotten 39 -bushtucker 39 -ahadi 39 -ochberg 39 -majestically 39 -top-heavy 39 -luckett 39 -breadbasket 39 -bottom-line 39 -assoun 39 -redact 39 -appelbaum 39 -sagar 39 -uhrig 39 -paupers 39 -grouch 39 -vecchio 39 -up-or-down 39 -kingham 39 -carrizo 39 -single-decker 39 -four-piece 39 -top-security 39 -stepdaughters 39 -trautmann 39 -brimfield 39 -lumping 39 -dowlers 39 -anthonys 39 -kernizan 39 -guiliana 39 -now-shuttered 39 -sadek 39 -wusa 39 -electorates 39 -subhuman 39 -montezuma 39 -zamudio 39 -dietitians 39 -cucamonga 39 -scleroderma 39 -2/3 39 -under-used 39 -moama 39 -treble-winning 39 -unencumbered 39 -segall 39 -crime-scene 39 -demarchelier 39 -truckload 39 -platinum-selling 39 -kotak 39 -crisscross 39 -infomercials 39 -newly-weds 39 -magnates 39 -chonghaejin 39 -xers 39 -quarterfinalist 39 -hye 39 -hoyos 39 -flagg 39 -huda 39 -sharifi 39 -progreso 39 -kickboxer 39 -much-rumoured 39 -super-combined 39 -trialed 39 -glynis 39 -goosen 39 -trussed 39 -putters 39 -sleiman 39 -camilleri 39 -morita 39 -helsum 39 -five-and-a-half-year 39 -halilhodzic 39 -dolman 39 -atresia 39 -8km 39 -jaymie 39 -10-3 39 -jolyon 39 -sabatino 39 -domodedovo 39 -dairy-free 39 -hampshire-based 39 -wickramasinghe 39 -culpeper 39 -180ft 39 -kipp 39 -rhinestone 39 -tipler 39 -heine 39 -houseboats 39 -intergenerational 39 -gwoza 39 -boyden 39 -maplewood 39 -layup 39 -top-seeded 39 -stripey 39 -objectify 39 -slavishly 39 -cheeki 39 -hypertensive 39 -louay 39 -egon 39 -pendragon 39 -frew 39 -b.b. 39 -scotusblog.com 39 -beddall 39 -mortems 39 -protruded 39 -final-day 39 -chaco 39 -adriaunna 39 -christies 39 -schellman 39 -2034 39 -furthered 39 -a-roads 39 -abhorrence 39 -robocall 39 -huygens 39 -simian 39 -gulps 39 -cilacap 39 -cofounder 39 -mateusz 39 -two-day-old 39 -bosley 39 -high-fliers 39 -gentleness 39 -00:55 39 -2005-2006 39 -shavers 39 -plasticity 39 -motor-racing 39 -moakler 39 -sultanas 39 -embalmer 39 -brushstrokes 39 -lesher 39 -brattleboro 39 -norville 39 -miccoli 39 -metabolise 39 -naturally-occurring 39 -tameria 39 -gp3 39 -instagrams 39 -stanning 39 -eighty-five 39 -32.1 39 -32.3 39 -aravindan 39 -jony 39 -binge-watching 39 -terminates 39 -mucky 39 -oxbow 39 -momeni 39 -himachal 39 -king-sized 39 -ex-royal 39 -20:28 39 -romancing 39 -m7 39 -mangalore 39 -amalgamated 39 -viber 39 -katarzyna 39 -starace 39 -marson 39 -cochlea 39 -33.2 39 -u.s.-iraqi 39 -goree 39 -plotlines 39 -hoad 39 -lazer 39 -pranab 39 -day-old 39 -kurzawa 39 -chaperoning 39 -97th 39 -boniface 39 -brooklyn-born 39 -overstep 39 -homeboy 39 -kls 39 -sherif 39 -xix 39 -decoys 39 -dexterous 39 -iduna 39 -25.1 39 -ironside 39 -bugsy 39 -grimsson 39 -whingeing 39 -diab 39 -sphincter 39 -amply 39 -taftanaz 39 -insubordination 39 -youtubers 39 -tevita 39 -coppell 39 -slauson 39 -israel-hamas 39 -hotties 39 -kerbs 39 -kilic 39 -cbs2 39 -ahh 39 -ayaan 39 -wolstenholme 39 -gendron 39 -razwan 39 -thies 39 -choristers 39 -devilme 39 -jaye 39 -gottschall 39 -risible 39 -tegally 39 -wisecracking 39 -demonstrable 39 -clatter 39 -interventionist 39 -tarka 39 -gera 39 -lufkin 39 -expending 39 -leer 39 -repented 39 -faiz 39 -côte 39 -dodgson 39 -despots 39 -bcg 39 -salon.com 39 -classiest 39 -castmates 39 -stingemore 39 -habibi 39 -restarts 39 -shudders 39 -asokkumar 39 -shanteau 39 -liddell-grainger 39 -erdely 39 -oneunited 39 -glassman 39 -religiosity 39 -glycaemic 39 -rotondo 39 -zunich 39 -slurping 39 -skelly 39 -bwelle 39 -triceps 39 -one-yard 39 -triathletes 39 -krug 39 -cait 39 -motteram 39 -presser 39 -offhand 39 -javeed 39 -azra 39 -ibo 39 -33.6 39 -malayan 39 -fitouri 39 -housemaster 39 -cfda 39 -gilbertson 39 -544 39 -hotchkiss 39 -cotta 39 -alexi 39 -pennsylvanian 39 -heleen 39 -honcho 39 -tendrils 39 -coll 39 -gatecrash 39 -cgc 39 -cgt 39 -slam-dunk 39 -ravichandran 39 -voykina 39 -crimeans 39 -carraway 39 -lcpl 39 -squall 39 -ret 39 -mcdaid 39 -loria 39 -outpaces 39 -germanotta 39 -couponing 39 -dorrian 39 -granddaddy 39 -brontë 39 -chavismo 39 -waldrom 39 -symbian 39 -denunciations 39 -whitewashing 39 -monusco 39 -copsey 39 -dahlinger 39 -tasking 39 -brennand 39 -terfel 39 -negroes 39 -satanism 39 -arslan 39 -hitchen 39 -banton 39 -2003-2004 39 -mattie 39 -anti-malarial 39 -levesque 39 -568 39 -effingham 39 -medium-size 39 -cookham 39 -topher 39 -albayrak 39 -cogan 39 -jost 39 -parkas 39 -newsprint 39 -ashtrays 39 -vitagliano 39 -xenia 39 -gyroscopes 39 -landstuhl 39 -ctia 39 -konya 39 -eggshell 39 -buyouts 39 -el-araby 39 -1.04 39 -middle-distance 39 -samak 39 -jasminder 39 -masque 39 -preppers 39 -surrealism 39 -yancey 39 -frattini 39 -fehon 39 -decoder 39 -elizondo 39 -stepp 39 -35.3 39 -35.1 39 -marden 39 -coddled 39 -karimov 39 -31,500 39 -35mg 39 -har 39 -beecham 39 -gilder 39 -dandach 39 -suraj 39 -eloy 39 -wordless 39 -20-day 39 -ccp 39 -hardee 39 -wilberforce 39 -allegory 39 -ghannouchi 39 -211,000 39 -7:40 39 -fulcrum 39 -semiconductors 39 -hamel 39 -bendik 39 -danijel 39 -10-under 39 -cashews 39 -minotaur 39 -beata 39 -turgid 39 -frivolity 39 -yarnton 39 -mabey 39 -fifty-eight 39 -farmhand 39 -becchetti 39 -hicham 39 -multi-task 39 -dumper 39 -tharun 39 -last-32 39 -ganga 39 -kom 39 -granular 39 -annexes 39 -1606 39 -opie 39 -jud 39 -jut 39 -stile 39 -legally-binding 39 -fess 39 -maina 39 -abdelhamid 39 -fold-out 39 -eked 39 -prodigies 39 -vani 39 -vinicio 39 -physiologically 39 -sall 39 -diddly 39 -counter-piracy 39 -louse 39 -41p 39 -immaterial 39 -seamstresses 39 -child-care 39 -apologist 39 -spatter 39 -bandidos 39 -twitches 39 -yeboah 39 -eisner 39 -al-shihri 39 -monte-carlo 39 -dunas 39 -stuyvesant 39 -burhanuddin 39 -goelz 39 -raiford 39 -levens 39 -minimum-security 39 -stansbury 39 -caldecott 39 -arrestee 39 -macdermott 39 -garuda 39 -castaldo 39 -dissension 39 -dad-of-two 39 -travelsupermarket 39 -ota 39 -drudgery 39 -rosehip 39 -d-connecticut 39 -roxanna 39 -bracewell 39 -bensalem 39 -cuneyt 39 -single-seater 39 -lint 39 -maggiore 39 -pessina 39 -greensburg 39 -glycerin 39 -keeney 39 -120th 39 -2045 39 -overreached 39 -meisel 39 -hanbok 39 -carnivals 39 -perforation 39 -slippage 39 -mongering 39 -blacklisting 39 -1.69 39 -xs 39 -proclivities 39 -aqaba 39 -imparted 39 -incubate 39 -icky 39 -minard 39 -hallandale 39 -yuli 39 -herringbone 39 -matchplay 39 -readout 39 -lamouchi 39 -university-educated 39 -2800 39 -spicing 39 -uefa.com 39 -wilkin 39 -cavort 39 -heat-resistant 39 -nutmegged 39 -taji 39 -dethrone 39 -grauman 39 -hiv-1 39 -right-thinking 39 -perfumed 39 -liknes 39 -enshrines 39 -bursa 39 -exclaim 39 -brotherton 39 -cosmin 39 -cromarty 39 -qiao 39 -mlb.com 39 -twirls 39 -lela 39 -40/40 39 -55-inch 39 -montaño 39 -bradburn 39 -kyong 39 -tps 39 -eremenko 39 -quiros 39 -re-attached 39 -disgusts 39 -mcalester 39 -transcription 39 -mris 39 -prongs 39 -shang 39 -ambelas 39 -brac 39 -correspondences 39 -unwillingly 39 -natchez 39 -raivich 39 -bgr 39 -edney 39 -raw-rees 39 -5 1/2 39 -exculpatory 39 -food-related 39 -60-vote 39 -pitch-perfect 39 -non-jewish 39 -roath 39 -2.56 39 -hola 39 -independent-minded 39 -dahlgren 39 -mcnaught 39 -hubers 39 -garvin 39 -khyra 39 -retold 39 -stringfellows 39 -andie 39 -counterclaim 39 -mobilisation 39 -roya 39 -fine-dining 39 -csis 39 -russet 39 -piro 39 -rehash 39 -mignon 39 -sandeman 39 -yemenia 39 -whale-watching 39 -asensio 39 -u.s.-japan 39 -dynamos 39 -culbertson 39 -jeal 39 -gargoyles 39 -whitecaps 39 -guerreiro 39 -post/abc 39 -interpretive 39 -sicari 39 -soir 39 -77-year 39 -bonaventura 39 -afflicts 39 -marsico 39 -webley 39 -alok 39 -hellmann 39 -all-over 39 -leotards 39 -abstraction 39 -rampages 39 -bungay 39 -motherboard.tv 39 -khouri 39 -vj 39 -kiaran 39 -12-man 39 -non-working 39 -2029 39 -girder 39 -wassall 39 -first-quarter 39 -remizowski 39 -masik 39 -prive 39 -narrating 39 -eye-tracking 39 -18-34 39 -cheerio 39 -minichiello 39 -repertory 39 -garriott 39 -california-berkeley 39 -anti-nazi 39 -barraclough 39 -airgun 39 -byd 39 -4.5-inch 39 -raper 39 -soliders 39 -holdsworth-wild 39 -dunphy 39 -karlsson 39 -17billion 39 -american-based 39 -francie 39 -petter 39 -subsisting 39 -http 39 -bookish 39 -solder 39 -ruffling 39 -elastin 39 -sallow 39 -commentated 39 -2.19 39 -reve 39 -saidi 39 -world-beating 39 -benenden 39 -alm 39 -bridgford 39 -1.98 39 -yalland 39 -asb 39 -payan 39 -kcbs 39 -leakes 39 -dinsdale 39 -abdur 39 -635 39 -kadian 39 -pitroipa 39 -rock-star 39 -ceinws 39 -swift-tuttle 39 -denting 39 -loor 39 -drownings 39 -anti-christ 39 -in-line 39 -charleville 39 -gedling 39 -ghanem 39 -fowey 39 -galliard 39 -prioritizes 39 -phill 39 -gregorian 39 -lambrini 39 -dura 39 -terrorist-related 39 -lldc 39 -belardine 39 -hin 39 -supplanted 39 -mockford 39 -chabon 39 -internist 39 -2400 39 -freckled 39 -shayla 39 -wdsu 39 -groundskeeper 39 -reentry 39 -1,000-mile 39 -monger 39 -16-foot 39 -waives 39 -lindberg 39 -keurig 39 -mimas 39 -bedder 39 -swifts 39 -reform-minded 39 -dh 39 -thuram 39 -seven-strong 39 -damazer 39 -krejci 39 -invalides 39 -lie-detector 39 -1826 39 -westcliff-on-sea 39 -coauthor 39 -aurochs 39 -highpoint 39 -babatunde 39 -lop 39 -deodorants 39 -caucus-goers 39 -ayotzinapa 39 -domenic 39 -sharecropper 39 -15.99 39 -cherry-picked 39 -electric-powered 39 -barbadian 39 -anti-seizure 39 -minutemen 39 -impregnate 39 -outshining 39 -luang 39 -gajjar 39 -26,000-a-year 39 -534 39 -kpakiwa 39 -seaports 39 -payed 39 -payen 39 -mumble 39 -hix 39 -pentagram 39 -bazar 39 -dahab 39 -crystallised 39 -tuolumne 39 -25-1 39 -resubmit 39 -biomechanics 39 -yolande 39 -26.1 39 -mayers 39 -revd 39 -departures.com 39 -traill 39 -mclaren-honda 39 -braswell 39 -offord 39 -meetup 39 -sterility 39 -nakata 39 -birger 39 -barcodes 39 -beci 39 -tko 39 -niggle 39 -matsumura 39 -medran 39 -dishonourable 39 -riken 39 -ex-cabinet 39 -zona 39 -wellspring 39 -durex 39 -smolensk 39 -beeny 39 -nakedness 39 -taiwan-based 39 -kincade 39 -achille 39 -500px 39 -2207 39 -heltebrake 39 -u16 39 -munchkins 39 -inducement 39 -kristensen 39 -mantell 39 -wilfork 39 -muslin 39 -furtive 39 -v-8 39 -mjallby 39 -acker 39 -turners 39 -frederickson 39 -stampeding 39 -braverman 39 -varkha 39 -dammartin-en-goele 39 -bdd 39 -reinvigorating 39 -bundlers 39 -radio-frequency 39 -shallue 39 -disfigure 39 -5:00 39 -legge 39 -lynchings 39 -liger 39 -hackman 39 -poppi 39 -189,000 39 -piraeus 39 -squished 39 -princier 39 -narrate 39 -nagatomo 39 -pedalled 39 -millican 39 -lacquered 39 -jelle 39 -cca 39 -souring 39 -kawaoka 39 -hindustan 39 -watercolors 39 -50mg 39 -seacoast 39 -jenga 39 -muniain 39 -imo 39 -curried 39 -wigmore 39 -kehl 39 -spectroscopic 39 -hoogland 39 -scanadu 39 -tombides 39 -uneaten 39 -mynarski 39 -meili 39 -cheyanne 39 -mossley 39 -prewar 39 -cryogenic 39 -speakeasy 39 -sigurdardottir 39 -coolio 39 -iot 39 -ceausescu 39 -sways 39 -frecklington 39 -gambles 39 -tuipulotu 39 -two-acre 39 -bayamon 39 -zedillo 39 -shelagh 39 -jogo 39 -r1 39 -non-threatening 39 -troughton 39 -romanced 39 -goya 39 -gourd 39 -freeland 39 -dimeglio 39 -candidacies 39 -oisin 39 -benevolence 39 -aphids 39 -catalysts 39 -sdr 39 -dollhouse 39 -portlandia 38 -makoto 38 -atid 38 -rune 38 -sixx 38 -overstreet 38 -knotts 38 -inaugurations 38 -reindeers 38 -millstone 38 -peachey 38 -fordow 38 -seay 38 -watmore 38 -dragovic 38 -niacin 38 -news9 38 -calvi 38 -26-28 38 -lukashevich 38 -corsicana 38 -harriett 38 -hakkasan 38 -70-day 38 -488 38 -offit 38 -poggiali 38 -opm 38 -mallette 38 -al-maktoum 38 -crissy 38 -counter-claim 38 -bouzid 38 -toupee 38 -rhine-westphalia 38 -pitti 38 -tmi 38 -dragoons 38 -shifty 38 -unneeded 38 -wanjiru 38 -01:01 38 -extremity 38 -one-liner 38 -fwd.us 38 -pomrenze 38 -madani 38 -vocalists 38 -isolationism 38 -under-appreciated 38 -shipbuilders 38 -off-stage 38 -shaka 38 -model-of-the-moment 38 -bomer 38 -philomene 38 -hauge 38 -vestibular 38 -67p/churyumov 38 -manageress 38 -calderón 38 -sixty-two 38 -murnaghans 38 -butner 38 -laboring 38 -selectman 38 -oneal 38 -fenced-off 38 -sb1070 38 -standard-issue 38 -sambisa 38 -facebook-owned 38 -explosively 38 -forty-seven 38 -britcher 38 -zippers 38 -slahi 38 -kyndall 38 -redoubled 38 -inditex 38 -hydrogenated 38 -ebon 38 -400lb 38 -muhajiroun 38 -sunglass 38 -reimbursing 38 -seepage 38 -hoovering 38 -gerstenmaier 38 -1.53 38 -endemol 38 -katheryn 38 -henrico 38 -baily 38 -sheron 38 -dobbins 38 -fifteen-year-old 38 -tear-gassed 38 -leh 38 -hilla 38 -politi 38 -twersky 38 -tabit 38 -751 38 -mattioli 38 -reverberations 38 -sarmiento 38 -196,000 38 -ghostwriter 38 -kosice 38 -conifer 38 -jebel 38 -glasgow-born 38 -ironbridge 38 -delis 38 -3-7 38 -11,200 38 -nested 38 -guilds 38 -tipuric 38 -salwa 38 -loc 38 -hallucinogen 38 -naila 38 -fleetingly 38 -abraira 38 -zealot 38 -saavedra 38 -shunting 38 -coman 38 -movie-making 38 -lightner 38 -denborg 38 -lizi 38 -saiz 38 -repo 38 -joannides 38 -csr 38 -1.74 38 -kitv 38 -clandestinely 38 -gagan 38 -fencer 38 -330ml 38 -douglasville 38 -pangaea 38 -seceded 38 -perales 38 -vahid 38 -smeets 38 -endicott 38 -bet365 38 -135million 38 -refusals 38 -off-base 38 -crotty 38 -thompsons 38 -fgw 38 -suze 38 -under-30s 38 -notepads 38 -aldeguer 38 -datu 38 -labianca 38 -non-christians 38 -acm 38 -sardinian 38 -maroubra 38 -mealtime 38 -gowan 38 -mislaid 38 -antagonizing 38 -supremes 38 -02:22 38 -anti-us 38 -caswell 38 -shennan 38 -northjersey.com 38 -precipitously 38 -waghorn 38 -grega 38 -ex-model 38 -castellani 38 -kalman 38 -hristo 38 -sportspeople 38 -ashkenazi 38 -rudiger 38 -heffron 38 -ludacris 38 -obfuscation 38 -2.44 38 -condenses 38 -floodplain 38 -disallow 38 -self-mutilation 38 -huelva 38 -waterson 38 -30.4 38 -musgraves 38 -nevil 38 -eventer 38 -kulkarni 38 -510,000 38 -arranger 38 -octa-core 38 -misérables 38 -tamu 38 -outhouses 38 -zandra 38 -ellet 38 -bazlinton 38 -free-falling 38 -higginbottom 38 -chipmaker 38 -australian-based 38 -rumba 38 -bethlem 38 -pimples 38 -elke 38 -electrocardiogram 38 -2032 38 -kepiro 38 -hojbjerg 38 -tulley 38 -slumbering 38 -trampolining 38 -longfellow 38 -wafts 38 -tradeoffs 38 -mtdna 38 -paravant 38 -tue 38 -checque 38 -nm 38 -ninewells 38 -ercis 38 -scalextric 38 -loran 38 -greenan 38 -switzer 38 -guzzo 38 -tba 38 -husks 38 -jeezy 38 -wynyard 38 -slayton 38 -ankeny 38 -cardholder 38 -tumilty 38 -yall 38 -freas 38 -gladiatorial 38 -cremer 38 -250th 38 -1trillion 38 -skillforce 38 -mailonlinepictures@dailymail.co.uk 38 -pre-set 38 -bilking 38 -questioners 38 -wfmz 38 -moy 38 -abrahmsohn 38 -jharkhand 38 -osmosis 38 -zweig 38 -photobooth 38 -stormwater 38 -plushenko 38 -zandipour 38 -weinstock 38 -misophonia 38 -macedon 38 -777x 38 -regane 38 -00:54 38 -605 38 -three-wheeler 38 -bok 38 -cheryshev 38 -xojane 38 -naia 38 -swampland 38 -sylmar 38 -scandalised 38 -horsfield 38 -regaled 38 -underactive 38 -four-runway 38 -greenslate 38 -ex-chairman 38 -freelander 38 -laterally 38 -careflight 38 -gingers 38 -restocking 38 -diktats 38 -sanli 38 -squelch 38 -piketty 38 -hazen 38 -atef 38 -sports-related 38 -sparkhill 38 -7.10 38 -0.18 38 -quat 38 -sarris 38 -redwine 38 -kalas 38 -safia 38 -206,000 38 -riggi 38 -8.92 38 -wadongo 38 -rt. 38 -alys 38 -near-freezing 38 -ashley_clements 38 -late-season 38 -house-hunting 38 -shes 38 -benders 38 -thrashes 38 -jakaya 38 -swiss-born 38 -soundproofed 38 -appetizers 38 -tarto 38 -goodchild 38 -hildreth 38 -lansdale 38 -headlamps 38 -dfc 38 -jarrar 38 -keothavong 38 -exhortations 38 -playwrights 38 -leeanne 38 -beall 38 -spratly 38 -cbs4 38 -blavatnik 38 -750ml 38 -bachus 38 -kailahun 38 -skydived 38 -two-tonne 38 -carolan 38 -hebridean 38 -hanky 38 -01:15 38 -datuk 38 -club-mate 38 -remixed 38 -chatah 38 -nunzio 38 -stoplight 38 -arik 38 -incorrigible 38 -shellacking 38 -benner 38 -so-far 38 -semak 38 -chaste 38 -6mm 38 -powerbase 38 -lambo 38 -sporn 38 -all-expenses-paid 38 -kaiba 38 -ashen 38 -kilty 38 -cuenca 38 -apfel 38 -parse 38 -staniford 38 -meditations 38 -professorial 38 -fifth-place 38 -violets 38 -m-cat 38 -dolgov 38 -grievously 38 -dhillon 38 -transcendence 38 -385,000 38 -yunis 38 -14.95 38 -salutation 38 -skylark 38 -energy-sapping 38 -joyless 38 -thumbed 38 -introverts 38 -auroral 38 -presidio 38 -dzemaili 38 -confections 38 -raimi 38 -gisin 38 -pin-ups 38 -rivero 38 -embellish 38 -handrail 38 -triple-a 38 -karts 38 -corkovic 38 -laker 38 -belarussian 38 -aggravates 38 -bei 38 -bez 38 -sardonic 38 -grammes 38 -dolton 38 -fallback 38 -deriving 38 -sexually-explicit 38 -condell 38 -efrain 38 -slivers 38 -epidermis 38 -benfleet 38 -uday 38 -headhunted 38 -propecia 38 -meadowlands 38 -36.4 38 -my-wardrobe 38 -naughtiest 38 -10x 38 -horak 38 -gar 38 -gab 38 -maule 38 -mini-bus 38 -gashed 38 -kondvar 38 -loreto 38 -2001-2002 38 -doylestown 38 -ile 38 -maxse 38 -huie 38 -bisexuals 38 -coun 38 -baggott 38 -riffing 38 -howey 38 -joely 38 -refillable 38 -manholes 38 -four-night 38 -lunchtimes 38 -spamalot 38 -kfmb 38 -yukiya 38 -mrf 38 -self-injury 38 -vittoria 38 -sophomores 38 -32billion 38 -wieber 38 -morag 38 -zoraida 38 -vasili 38 -kilmeade 38 -shevardnadze 38 -benke 38 -desroches 38 -adieu 38 -gonaives 38 -pender 38 -potteries 38 -pre-pubescent 38 -fly-tippers 38 -vintage-style 38 -jonge 38 -walsham 38 -quora 38 -bernhardt 38 -588 38 -ruan 38 -godden-edwards 38 -surat 38 -qinghai 38 -d'argent 38 -arvizo 38 -leaguers 38 -blemished 38 -humps 38 -zishan 38 -schwandt 38 -ieee 38 -kirstin 38 -aishwarya 38 -crieff 38 -ingmar 38 -drink-related 38 -@jarrettbellini 38 -eveningwear 38 -patras 38 -cotterell 38 -bursary 38 -peculiarities 38 -trialing 38 -russia-ukraine 38 -439 38 -longstaff 38 -syal 38 -cure-all 38 -oases 38 -25-years-old 38 -yarl 38 -anti-castro 38 -arrestees 38 -parasols 38 -yashin 38 -214,000 38 -grangetown 38 -unodc 38 -toribio 38 -bankrupting 38 -umesh 38 -time-tested 38 -kob 38 -manifesting 38 -korans 38 -perimeters 38 -natcho 38 -stilwell 38 -onazi 38 -congratulation 38 -hydrochloride 38 -oxidative 38 -wews 38 -ratifying 38 -wheelies 38 -mildew 38 -say-so 38 -haslemere 38 -polyphenols 38 -v-22 38 -unfussy 38 -bozella 38 -michels 38 -devotional 38 -yisrael 38 -696 38 -0300 38 -elsmore 38 -al-shariah 38 -1.42 38 -flibanserin 38 -legalizes 38 -voronezh 38 -zag 38 -horsman 38 -ph.d 38 -foxhole 38 -loewen 38 -single-seat 38 -mutianyu 38 -berkin 38 -singer-actress 38 -audio-visual 38 -high-dollar 38 -natures 38 -60per 38 -jailbreaking 38 -21:09 38 -eventualities 38 -mega-yacht 38 -oldie 38 -cruelty-free 38 -colonising 38 -lauderdale-hollywood 38 -boyne 38 -savaging 38 -wausau 38 -hallucinate 38 -walley 38 -rupa 38 -odle 38 -life-saver 38 -outwitted 38 -smirks 38 -press-enterprise 38 -camera-equipped 38 -f-35b 38 -4runner 38 -courtland 38 -takei 38 -megane 38 -rocca 38 -chrysalis 38 -commandeer 38 -demiraj 38 -campania 38 -younker 38 -antagonize 38 -poulsen 38 -ex-kgb 38 -batavia 38 -twice-yearly 38 -wecht 38 -itc 38 -w.r. 38 -sediqi 38 -inside-out 38 -billingsgate 38 -high-dose 38 -withings 38 -sartin 38 -jinushi 38 -bartlam 38 -45-day 38 -calderin 38 -jiao 38 -festered 38 -hon. 38 -hag 38 -weatherford 38 -recommit 38 -bocelli 38 -sailboats 38 -jodhpur 38 -t-pain 38 -moukandjo 38 -fielder-civil 38 -self-healing 38 -au$ 38 -salvageable 38 -20k 38 -diverging 38 -8-3 38 -aptly-named 38 -documentary-style 38 -public-relations 38 -heavily-tattooed 38 -scotched 38 -bennion 38 -incapacitating 38 -kwarteng 38 -ricotta 38 -tijuca 38 -fordo 38 -snapp 38 -recitals 38 -45c 38 -southerland 38 -rutherglen 38 -government-in-exile 38 -lout 38 -redeploy 38 -mineta 38 -wwl 38 -camera-shy 38 -dauber 38 -ess 38 -30-3 38 -amess 38 -mozambican 38 -crossers 38 -mouton 38 -ocular 38 -brar 38 -mccue 38 -spahic 38 -bessette 38 -sachsgate 38 -sood 38 -filippetti 38 -haller 38 -slaughters 38 -cabu 38 -crocheted 38 -studiously 38 -six-shot 38 -sonntag 38 -deccan 38 -tintagel 38 -yerevan 38 -pre-release 38 -andressa 38 -mckillop 38 -ikeda 38 -och 38 -altaf 38 -crossbows 38 -masot 38 -shhh 38 -ad-hoc 38 -tv4 38 -horseradish 38 -sayyid 38 -pastebin 38 -pogo 38 -kejriwal 38 -sudoku 38 -iea 38 -victimizing 38 -barhoum 38 -driffield 38 -dm.later 38 -off-licences 38 -nizwa 38 -manton 38 -zita 38 -failsworth 38 -clarendon 38 -blood-borne 38 -f2 38 -formichetti 38 -g-forces 38 -beatson 38 -abdifatah 38 -tsim 38 -bielefeld 38 -outsell 38 -non-indigenous 38 -ahtisaari 38 -engrossing 38 -emissaries 38 -preempt 38 -sankurathri 38 -ginkgo 38 -beyondblue 38 -nine-inch 38 -ragnar 38 -gennevilliers 38 -unal 38 -yoav 38 -1,280 38 -murkier 38 -hellhole 38 -shead 38 -biographers 38 -parva 38 -lutterworth 38 -bumper-to-bumper 38 -jeweled 38 -dae 38 -lakh 38 -uniontown 38 -burdell 38 -prairies 38 -michaloliakos 38 -lube 38 -league-winning 38 -nrsc 38 -kalmbach 38 -skylanders 38 -43-year 38 -rushden 38 -finca 38 -medallions 38 -marrero 38 -oilfields 38 -hardscrabble 38 -lifg 38 -realistic-looking 38 -50k 38 -exorcists 38 -castrating 38 -ef 38 -bromide 38 -ruz 38 -lamotta 38 -hengelo 38 -kyiv 38 -eni 38 -1616 38 -sharpie 38 -hamyd 38 -myint 38 -d&g 38 -jussi 38 -hispania 38 -ancestry.co.uk 38 -fettle 38 -craned 38 -thaiya 38 -guimaraes 38 -sik 38 -capcom 38 -golders 38 -mafwenke 38 -harbord 38 -10.55 38 -monstrosities 38 -hill-wood 38 -ponderous 38 -campus-wide 38 -lindholm 38 -1755 38 -1620 38 -oakwell 38 -incinerate 38 -gorda 38 -father-and-son 38 -gautier 38 -fenced-in 38 -ige 38 -cupich 38 -sino-u.s. 38 -exoskeletons 38 -wurzelbacher 38 -10-bedroom 38 -mirga 38 -syncope 38 -bayes 38 -tatars 38 -949 38 -conry 38 -bormann 38 -accentuates 38 -sherwyn 38 -retford 38 -capricorn 38 -iaboni 38 -sunda 38 -reber 38 -christmastime 38 -lackawanna 38 -sidetracked 38 -thermoelectric 38 -ajamu 38 -bridie 38 -haber 38 -hennigan 38 -multi-vehicle 38 -nimmala 38 -renan 38 -artic 38 -pooja 38 -quaffing 38 -cost-benefit 38 -unevenly 38 -regenhard 38 -scuppering 38 -tranter 38 -ottomans 38 -chudinov 38 -implanon 38 -haughty 38 -nutini 38 -kimani 38 -exposto 38 -de-stress 38 -repainting 38 -kumi 38 -ansaldi 38 -caver 38 -forgers 38 -karkare 38 -ilna 38 -13,200 38 -26.4 38 -fatboy 38 -dual-use 38 -hawked 38 -gaza-based 38 -g.r.l. 38 -mazes 38 -mellowed 38 -dad-of-three 38 -38billion 38 -krane 38 -rebukes 38 -probyn 38 -hang-out 38 -weight-related 38 -passivity 38 -hrabove 38 -dashboards 38 -decision-maker 38 -molby 38 -cyclospora 38 -duquesne 38 -mubi 38 -muhammadi 38 -photocopied 38 -caoimhe 38 -kacie 38 -rewire 38 -near-post 38 -bonin 38 -nourmohammadi 38 -9-1 38 -9-3 38 -icr 38 -plagiarizing 38 -shut-down 38 -scrabbling 38 -refiled 38 -gorse 38 -a/c 38 -recuperated 38 -interlinked 38 -triplex 38 -1/10 38 -gabonese 38 -haralson 38 -tutton 38 -vis 38 -cdf 38 -bda 38 -fresheners 38 -haimona 38 -wreaks 38 -baxendale 38 -mankiewicz 38 -argon 38 -five-place 38 -cornejo 38 -corporates 38 -grennan 38 -otc 38 -glossary 38 -bottomley 38 -subverted 38 -spasticity 38 -euphemisms 38 -pokey 38 -10g 38 -workwear 38 -uncouth 38 -stanger 38 -smale 38 -linney 38 -gallatin 38 -pavia 38 -westernized 38 -songbirds 38 -mauri 38 -fairbairn 38 -stuani 38 -p.o. 38 -bated 38 -pocket-lint 38 -cuarón 38 -daugher 38 -cound 38 -4-year 38 -double-bogey 38 -negros 38 -demonization 38 -out-of-towners 38 -okubote 38 -blakemore 38 -harrigan 38 -brutalised 38 -americares 38 -body-worn 38 -ro 38 -yelping 38 -screed 38 -glasshouse 38 -howden 38 -flamed 38 -scrunched 38 -emyr 38 -saintly 38 -tiley 38 -plimpton 38 -multigenerational 38 -speth 38 -grabber 38 -brookwood 38 -alyce 38 -insua 38 -amgen 38 -kosen 38 -gdc 38 -out-dated 38 -life-affirming 38 -automaton 38 -katya 38 -temarii 37 -wilco 37 -peaceable 37 -592 37 -okinawan 37 -aet 37 -balloonists 37 -furrow 37 -omo 37 -qumu 37 -willimon 37 -11-under 37 -fajr 37 -sabriya 37 -tredinnick 37 -unwinding 37 -insures 37 -stewing 37 -schip 37 -8cm 37 -tweetdeck 37 -a-10s 37 -kwang 37 -hearns 37 -pinstripes 37 -six-fold 37 -egg-shaped 37 -huelskamp 37 -pergola 37 -rnib 37 -qatif 37 -analogies 37 -hassall 37 -omelettes 37 -edimar 37 -arish 37 -plantains 37 -treasuries 37 -self-indulgence 37 -axles 37 -flannigan 37 -whitehill 37 -castrate 37 -cothran 37 -avital 37 -hebert 37 -anti-inflammatories 37 -soliloquy 37 -phailin 37 -mezhgan 37 -kongolo 37 -inexhaustible 37 -yamazaki 37 -basilan 37 -guayaquil 37 -mikkelsen 37 -metropolises 37 -778 37 -522 37 -madi 37 -calcite 37 -irungu 37 -locus 37 -caldicott 37 -pre-term 37 -stigmatizing 37 -beazley 37 -histrionic 37 -grillos 37 -medgar 37 -scramjet 37 -kochie 37 -spondike 37 -girl-next-door 37 -45.5 37 -abbotsbury 37 -beach-front 37 -derrico 37 -pirating 37 -maesteg 37 -dulux 37 -precedent-setting 37 -acclimated 37 -duston 37 -merest 37 -lititz 37 -befriends 37 -681 37 -myners 37 -dermonds 37 -emanates 37 -all-seeing 37 -katsnelson 37 -majlis 37 -40g 37 -formosa 37 -peacemaking 37 -volo 37 -multi-functional 37 -quiktrip 37 -jaruzelski 37 -fumigated 37 -chill-out 37 -iceni 37 -subtleties 37 -seat-belt 37 -corpsman 37 -turlock 37 -sankaran 37 -blow-by-blow 37 -champing 37 -co-editor 37 -anti-personnel 37 -eyeko 37 -zielinski 37 -sworn-in 37 -nanterre 37 -magnums 37 -uckfield 37 -cauley 37 -mestre 37 -agitator 37 -durdle 37 -rosenblatt 37 -silicate 37 -colkett 37 -dini 37 -8-year 37 -security-related 37 -shinkansen 37 -rfef 37 -normand 37 -lese 37 -arrendale 37 -1992-93 37 -admonishment 37 -ardmore 37 -21-foot 37 -normalisation 37 -cerebrospinal 37 -icelandair 37 -rabbatts 37 -silkworm 37 -bialik 37 -haakon 37 -beatable 37 -nrcc 37 -unidentifiable 37 -clumsiness 37 -igf-1 37 -aguer 37 -zabadani 37 -typographical 37 -step-up 37 -newbery 37 -kisser 37 -tips4jesus 37 -cheynes 37 -sagal 37 -bolotov 37 -tooele 37 -tinchy 37 -shanda 37 -43-8 37 -fully-equipped 37 -annmarie 37 -sargodha 37 -delvin 37 -tsarist 37 -caci 37 -swithin 37 -unredacted 37 -flipside 37 -9,400 37 -over-crowded 37 -ultra-rich 37 -withing 37 -24-week 37 -unbuckled 37 -amlin 37 -stapler 37 -othmani 37 -deletions 37 -habsi 37 -02:27 37 -partitions 37 -oa 37 -misinterpret 37 -araqchi 37 -tourer 37 -early-warning 37 -newly-created 37 -nicklasson 37 -first-line 37 -cock-up 37 -pro-abortion 37 -dinardo 37 -quieted 37 -werrington 37 -pastas 37 -snellville 37 -shackling 37 -prolongs 37 -gore-booth 37 -clevedon 37 -injury-free 37 -zdf 37 -axonal 37 -phones4u 37 -exonerating 37 -sansha 37 -819 37 -nusrat 37 -gawker.com 37 -satchels 37 -obhrai 37 -809 37 -digestible 37 -evansdale 37 -denisovan 37 -araud 37 -khama 37 -patriot-news 37 -waterproofs 37 -headrest 37 -kindergartner 37 -colobus 37 -17-minute 37 -mehanna 37 -ineffectiveness 37 -colter 37 -anantara 37 -bayt 37 -02:08 37 -rostas 37 -mid-2009 37 -carle 37 -pitfall 37 -quadrangle 37 -ladle 37 -fcpa 37 -wedderburn 37 -duty-bound 37 -lusted 37 -bunched 37 -injury-prone 37 -daringly 37 -perrins 37 -suppressive 37 -sundial 37 -mediterranean-style 37 -100,00 37 -amigo 37 -wjz 37 -consummated 37 -sandpaper 37 -summited 37 -sniggering 37 -nine-foot 37 -sturdier 37 -batali 37 -mesmeric 37 -coloradans 37 -arrhythmias 37 -honecker 37 -repellents 37 -anthocyanins 37 -spore 37 -kandel 37 -tackler 37 -glassing 37 -2.23 37 -350g 37 -ahonen 37 -kear 37 -nyt 37 -three-acre 37 -home-school 37 -ntaiya 37 -youth-team 37 -kamke 37 -sixty-four 37 -00:59 37 -sheepdogs 37 -puello 37 -outdid 37 -rissman 37 -ex-mistress 37 -hailee 37 -fixate 37 -kel-tec 37 -krasinski 37 -albertini 37 -bybee 37 -2000-01 37 -mortgage-backed 37 -toastie 37 -budged 37 -traina 37 -hobbes 37 -spriggs 37 -luminosity 37 -six-car 37 -pedantic 37 -shh 37 -double-barrelled 37 -duolingo 37 -eramo 37 -inaugurate 37 -costed 37 -defrosted 37 -over-arching 37 -doty 37 -truett 37 -loosemore 37 -hoyland 37 -accumulator 37 -mid-size 37 -quickenden 37 -1-7 37 -anti-money 37 -789 37 -flesh-coloured 37 -rip-roaring 37 -episiotomy 37 -horry 37 -wide-field 37 -fully-clothed 37 -shinjuku 37 -mollify 37 -mics 37 -mich 37 -jaziri 37 -sorpe 37 -compacts 37 -diao 37 -john-paul 37 -post-secondary 37 -labram 37 -rahmatollah 37 -gullies 37 -shepreth 37 -pyrmont 37 -mastcam 37 -westover 37 -gullwing 37 -soooo 37 -belling 37 -jackknifed 37 -melich 37 -anti-hiv 37 -overspill 37 -nebraska-lincoln 37 -sakes 37 -goverment 37 -warmongers 37 -elaina 37 -whitham 37 -damming 37 -sus 37 -shabir 37 -diabaly 37 -wrinkle-free 37 -pompeo 37 -526 37 -523 37 -off-air 37 -unwisely 37 -boger 37 -weather.com 37 -mapbox 37 -ex-manager 37 -wolman 37 -finning 37 -quarles 37 -hors 37 -remarrying 37 -hosmer 37 -once-popular 37 -gianna 37 -d-washington 37 -ambled 37 -bryden 37 -walston 37 -a-ha 37 -kawhi 37 -inured 37 -disharmony 37 -alhakim 37 -contorting 37 -creatine 37 -maplecroft 37 -cosmetically 37 -michaelis 37 -gippsland 37 -idiosyncrasies 37 -frontotemporal 37 -reflexively 37 -leckie 37 -pellicano 37 -comedown 37 -crewmember 37 -anneliese 37 -ethicists 37 -exosuit 37 -subjugated 37 -veltins 37 -ex-mayor 37 -matheny 37 -quickness 37 -edexcel 37 -eisenbud 37 -mcrib 37 -ibb 37 -fajitas 37 -abdul-jabbaar 37 -bmis 37 -cravat 37 -drescher 37 -benedetto 37 -29-year-olds 37 -gutenberg 37 -marshburn 37 -manar 37 -re-learn 37 -nayarit 37 -obtainable 37 -toews 37 -1550 37 -aymeric 37 -north-northeast 37 -258,000 37 -makhloufi 37 -udas 37 -fibre-optic 37 -limber 37 -diamond-shaped 37 -kveton 37 -bylaw 37 -reine 37 -doerr 37 -swithun 37 -marmosets 37 -@cnnliving 37 -catanzaro 37 -300km 37 -seventy-two 37 -post-standard 37 -wusa9 37 -vulnificus 37 -sing-off 37 -soylent 37 -selman 37 -rectifying 37 -faleh 37 -dog-walker 37 -ex-fiancé 37 -whack-a-mole 37 -chocolate-covered 37 -trillion-dollar 37 -forbearance 37 -grandmother-of-three 37 -204,000 37 -al-aziziya 37 -recardo 37 -cen 37 -llcd 37 -ear-splitting 37 -spicher 37 -capillaries 37 -olney 37 -jochen 37 -tiptoeing 37 --50 37 -nicholl-pierson 37 -breakdancing 37 -inflator 37 -alessa 37 -ravenhill 37 -bendou 37 -42.2 37 -stick-thin 37 -ramesses 37 -rathkeale 37 -russian-language 37 -hawkesbury 37 -life-time 37 -theakston 37 -148million 37 -ketsbaia 37 -uh-oh 37 -dabo 37 -obtrusive 37 -forrestal 37 -grudging 37 -procreate 37 -hapgood 37 -wows 37 -baptise 37 -zafer 37 -zu 37 -five-goal 37 -flashbulbs 37 -taleb 37 -salvatrucha 37 -anuradha 37 -lunsmann 37 -veli 37 -120billion 37 -njie 37 -vocalisations 37 -10-person 37 -criss-crossing 37 -matin 37 -mountainsides 37 -nationalize 37 -lode 37 -kadhimiya 37 -kenobi 37 -under-developed 37 -agus 37 -ebu 37 -flossie 37 -ghoga 37 -ballen 37 -satiety 37 -el-gamal 37 -odiham 37 -minya 37 -descents 37 -garman 37 -bloat 37 -shewan 37 -gutless 37 -data-driven 37 -parkisson 37 -1,001 37 -gympie 37 -400kg 37 -anti-homosexuality 37 -55-gallon 37 -unboxing 37 -300-plus 37 -b.a. 37 -non-committal 37 -mitrokhin 37 -accosting 37 -south-southeast 37 -siskel 37 -glares 37 -höss 37 -murguia 37 -stereosonic 37 -mid-2011 37 -medevac 37 -insufficiency 37 -icty 37 -re-imagining 37 -flouts 37 -parsonage 37 -omoruyi 37 -incredibles 37 -grandin 37 -ventnor 37 -doula 37 -238,000 37 -crossan 37 -robocup 37 -heineman 37 -klimt 37 -huangpu 37 -bergholz 37 -39.5 37 -39.1 37 -sufferings 37 -daugherty 37 -unsuited 37 -panagiotis 37 -tidally 37 -publicans 37 -red-eye 37 -grainne 37 -winchell 37 -eady 37 -setups 37 -korma 37 -twitched 37 -honoré 37 -berrios 37 -hughie 37 -1,020 37 -18-mile 37 -2007-2010 37 -xlvi 37 -wonderment 37 -cammack 37 -durie 37 -21:07 37 -azza 37 -sitar 37 -hyping 37 -espen 37 -loredana 37 -bourget 37 -betsi 37 -gluttony 37 -nashi 37 -telepathy 37 -how-tos 37 -doocy 37 -bently 37 -popescu 37 -tiangong 37 -sutures 37 -nph 37 -amazonas 37 -kurbanov 37 -jaxson 37 -rudest 37 -al-odah 37 -dims 37 -semper 37 -1330 37 -eckhardt 37 -nine-nine 37 -shamim 37 -kyaw 37 -kyah 37 -female-friendly 37 -22-month 37 -bronk 37 -inputting 37 -r-indiana 37 -switchover 37 -obama-biden 37 -makarov 37 -woerth 37 -22:15 37 -sze 37 -africanus 37 -moulson 37 -superfans 37 -four-week-old 37 -balloonist 37 -centaur 37 -irani 37 -anti-religious 37 -temazepam 37 -vassallo 37 -ldn 37 -aboud 37 -radicalizing 37 -moskowitz 37 -carbon-based 37 -vongtau 37 -pager 37 -lilia 37 -bilked 37 -alighted 37 -profusion 37 -siriusxm 37 -gunboat 37 -27-nation 37 -thao 37 -farhi 37 -bobsledder 37 -fourball 37 -misstatements 37 -bloatware 37 -amulets 37 -venner 37 -abstention 37 -calyn 37 -bossangoa 37 -burse 37 -marcie 37 -combat-ready 37 -rcog 37 -anti-japan 37 -admonishing 37 -olof 37 -rifaat 37 -hideaways 37 -choreographers 37 -disemboweled 37 -seibel 37 -dimichele 37 -chaffin 37 -10ins 37 -athos 37 -berliet 37 -zed 37 -zabar 37 -rais 37 -goethe 37 -fifty-four 37 -daubing 37 -kapisa 37 -al-moallem 37 -naturalists 37 -rewrites 37 -nona 37 -luong 37 -subjugation 37 -eland 37 -rezko 37 -third-class 37 -colonel-in-chief 37 -cornflake 37 -xstrata 37 -biderman 37 -part-owner 37 -cinemark 37 -as-level 37 -sarno 37 -scapegoated 37 -malted 37 -finches 37 -rheumatism 37 -99-year 37 -four-cylinder 37 -tuner 37 -barbiturate 37 -proenza 37 -abottabad 37 -nakahara 37 -02:16 37 -abruzzo 37 -kashif 37 -terebin 37 -placeholder 37 -southers 37 -mirjana 37 -hamade 37 -binnish 37 -wishart 37 -ex-tottenham 37 -pantazopoulos 37 -explanatory 37 -tabata 37 -10mm 37 -674 37 -brainless 37 -hateley 37 -wafb 37 -thain 37 -tried-and-true 37 -grater 37 -absolution 37 -greystone 37 -ampleforth 37 -rooibos 37 -browder 37 -lyles 37 -hs1 37 -jet-lagged 37 -lichaj 37 -smc 37 -bite-size 37 -hembery 37 -craniofacial 37 -necropsies 37 -venous 37 -enamelled 37 -gongol 37 -vinokourov 37 -yager 37 -cusick 37 -perricone 37 -grog 37 -anse 37 -spada 37 -36m 37 -kagoshima 37 -prs 37 -unmonitored 37 -miron-buchacra 37 -decals 37 -retool 37 -aalborg 37 -al-manar 37 -hand-cut 37 -in-season 37 -taguchi 37 -dontrell 37 -livin 37 -nezami 37 -incurs 37 -frist 37 -aeromonas 37 -fox8 37 -one-person 37 -best-of-seven 37 -ferdowsi 37 -49m 37 -fourth-highest 37 -eberling 37 -prospering 37 -reoccurring 37 -dishonored 37 -donato 37 -caye 37 -grubber 37 -daughtry 37 -asti 37 -maytum 37 -taf 37 -toit 37 -constrictors 37 -meaner 37 -impaling 37 -erfurt 37 -oregon-based 37 -deferential 37 -parry-jones 37 -sklar 37 -bratt 37 -keo 37 -show-jumping 37 -sunni-led 37 -ppd 37 -worrell 37 -carterton 37 -lybrand 37 -swabbing 37 -lorenzen 37 -c.s. 37 -prejudicing 37 -weâ 37 -fellowships 37 -siu 37 -restocked 37 -erdman 37 -opposition-held 37 -639 37 -dalley 37 -colonial-style 37 -dedications 37 -xis 37 -lodzinski 37 -ritually 37 -rarely-seen 37 -farves 37 -wild-eyed 37 -37.6 37 -ladette 37 -skrillex 37 -six-year-olds 37 -salaita 37 -playsuit 37 -jowett 37 -ketunuti 37 -squeaky-clean 37 -12-13 37 -wafers 37 -ewert 37 -redistributing 37 -schoenfeld 37 -cold-calling 37 -150-foot 37 -adenosine 37 -denouement 37 -buffed 37 -prion 37 -veloso 37 -mathura 37 -sangar 37 -amt 37 -abubaker 37 -izzat 37 -snakebite 37 -libertarian-leaning 37 -131ft 37 -thompkins 37 -emulsion 37 -0.99 37 -breadsticks 37 -teleka 37 -975,000 37 -minardi 37 -powar 37 -nadi 37 -kiwayu 37 -newsbeat 37 -goble 37 -redirects 37 -barreras 37 -northumbrian 37 -gricar 37 -wormholes 37 -thuds 37 -redrawing 37 -apatzingan 37 -over-18s 37 -astori 37 -memorising 37 -well-rehearsed 37 -cormorants 37 -syllable 37 -wide-body 37 -7,900 37 -homesickness 37 -cabinda 37 -stepladder 37 -randa 37 -injectables 37 -laundries 37 -0.03 37 -archways 37 -30k 37 -3x 37 -cert 37 -bnei 37 -gerken 37 -seri 37 -five-night 37 -scoffs 37 -este 37 -leong 37 -destinies 37 -cheddars 37 -immodest 37 -jumbotron 37 -fortifying 37 -orford 37 -sagged 37 -persuades 37 -1651 37 -hedglin 37 -burman 37 -5/10 37 -hypothermic 37 -godless 37 -linebackers 37 -guilt-ridden 37 -flintstone 37 -rbi 37 -tanith 37 -postscript 37 -bedsits 37 -downturns 37 -b-29 37 -saola 37 -cometh 37 --45 37 -mcgarvey 37 -turfed 37 -ventriloquist 37 -findlater 37 -suk 37 -engraver 37 -violator 37 -schalk 37 -survivability 37 -re-evaluating 37 -pre-dated 37 -touch-sensitive 37 -weathermen 37 -rocchi 37 -zacatecas 37 -steichen 37 -beachhead 37 -prognosticators 37 -schobert 37 -trillionth 37 -commends 37 -icd 37 -marcano 37 -wood-tv 37 -tendering 37 -brayford 37 -randomised 37 -reconstitute 37 -rootmetrics 37 -wetherspoons 37 -denaro 37 -top-grossing 37 -100-member 37 -blagging 37 -komsomolskaya 37 -mackean 37 -off-the-wall 37 -first-day 37 -washed-out 37 -2l 37 -truism 37 -goslin 37 -coos 37 -tippett 37 -200-plus 37 -bdp 37 -sagi 37 -1,130 37 -demario 37 -novorossiya 37 -thirteen-year-old 37 -andrés 37 -transvestites 37 -arkle 37 -pressurization 37 -tuam 37 -5000m 37 -convex 37 -everitt 37 -second-row 37 -girling 37 -bitton 37 -rapa 37 -now-iconic 37 -61398 37 -945 37 -pageboy 37 -axon 37 -d-virginia 37 -rouer 37 -tail-end 37 -willinger 37 -shimmied 37 -spann 37 -jacobite 37 -158th 37 -pointon 37 -illuminator 37 -urological 37 -uzan 37 -dispels 37 -deniz 37 -runes 37 -flattens 37 -handicrafts 37 -short-track 37 -liverpool-born 37 -hoffs 37 -eboue 37 -warthogs 37 -quaglia 37 -cragg 37 -drug-testing 37 -quartermaster 37 -baffin 37 -lakhvi 37 -ordain 37 -pudgy 37 -halloumi 37 -napravnik 37 -palios 37 -mcquillan 37 -eggheads 37 -leafing 37 -caddyshack 37 -djerassi 37 -penton 37 -snitches 37 -baran 37 -half-submerged 37 -sexsomnia 37 -stansfield 37 -houlihan 37 -carlino 37 -cash-only 37 -noad 37 -wilmott 37 -tretton 37 -gerais 37 -mitre 37 -awww 37 -gerrards 37 -elizabethtown 37 -colborne 37 -ultra-wealthy 37 -35g 37 -flip-flopper 37 -placebos 37 -seto 37 -periban 37 -agirretxe 37 -718 37 -xscape 37 -folate 36 -fib 36 -adenhart 36 -heng 36 -washington-area 36 -carjack 36 -slanging 36 -wot 36 -enquires 36 -pigmented 36 -sex-trafficking 36 -deslauriers 36 -sputtered 36 -pacifica 36 -scottsboro 36 -graber 36 -kutz 36 -dutchess 36 -gooseberry 36 -gorleston 36 -ctbto 36 -scandalized 36 -minting 36 -l.k. 36 -42m 36 -riddick 36 -hiit 36 -benschop 36 -01:04 36 -fleischmann 36 -higginbotham 36 -21-years-old 36 -sarmad 36 -f12 36 -saru 36 -locascio 36 -belaunde 36 -secker 36 -maseratis 36 -cadden 36 -nari 36 -asselin 36 -gollattscheck 36 -vigilantism 36 -strasburg 36 -cibeles 36 -11th-century 36 -a321 36 -tem 36 -bayfords 36 -puritans 36 -birley 36 -balsall 36 -ninth-placed 36 -laraine 36 -notary 36 -ann-kathrin 36 -spoofing 36 -1520 36 -kaylie 36 -podobnyy 36 -cussons 36 -circumcise 36 -unlikeliest 36 -serval 36 -recyclables 36 -nothin 36 -sancoff 36 -drunkards 36 -skyah 36 -1.52 36 -1.57 36 -co-presenters 36 -hashanah 36 -msha 36 -trachtenberg 36 -2007-2009 36 -deplaned 36 -high-stress 36 -chope 36 -adweek 36 -ray-jones 36 -caraballo 36 -parcak 36 -impeachable 36 -simonton 36 -abhors 36 -closely-guarded 36 -+3 36 -10-member 36 -simao 36 -bambino 36 -acela 36 -llantrisant 36 -etowah 36 -novy 36 -follicular 36 -2010-2013 36 -satwant 36 -bunney 36 -rerouting 36 -kincaid 36 -dishonorably 36 -jesús 36 -tsuji 36 -bankable 36 -kobler 36 -bolero 36 -midafternoon 36 -10-fold 36 -poling 36 -8.35 36 -bazzi 36 -plz 36 -malinowski 36 -accelerators 36 -physiologist 36 -second-grader 36 -condescension 36 -yatseniuk 36 -xian 36 -sanding 36 -fantasizing 36 -infographics 36 -suncorp 36 -jacking 36 -aiko 36 -warehousing 36 -selassie 36 -criminologists 36 -footie 36 -cup-tied 36 -enmeshed 36 -stooping 36 -shaima 36 -isuzu 36 -cenk 36 -chandigarh 36 -stoma 36 -velázquez 36 -doueiry 36 -coontz 36 -lightsabers 36 -rotas 36 -purist 36 -ankers 36 -fry-ups 36 -lithuanians 36 -naypyidaw 36 -mesozoic 36 -fitna 36 -jasmina 36 -gaff 36 -grybauskaite 36 -slighted 36 -slugged 36 -thwarts 36 -dramatics 36 -unbehaun 36 -personifies 36 -newly-qualified 36 -distilling 36 -wantonly 36 -outsize 36 -pupae 36 -avb 36 -sniffles 36 -pigskin 36 -ptolemy 36 -siraj 36 -hierarchies 36 -numerically 36 -premadasa 36 -glens 36 -yellowish 36 -ef-5 36 -alf-inge 36 -gebeli 36 -chalking 36 -joined-up 36 -laparoscopic 36 -bioware 36 -synchronicity 36 -authorizations 36 -snobs 36 -parisi 36 -panenka 36 -demote 36 -long-sought 36 -much-criticized 36 -innit 36 -quinine 36 -marcher 36 -zonda 36 -morfis 36 -twittering 36 -heyneke 36 -damselflies 36 -college-aged 36 -2008-2010 36 -murga 36 -bulent 36 -stauffenberg 36 -oriel 36 -space-saving 36 -roseland 36 -dinenage 36 -student-run 36 -andreea 36 -5ml 36 -hobbyist 36 -ahlittia 36 -dibenedetto 36 -perrine 36 -well-publicised 36 -lumley-savile 36 -toymaker 36 -mevish 36 -flor 36 -lorises 36 -obstinate 36 -unpredictably 36 -aleem 36 -r.i. 36 -wallasey 36 -deripaska 36 -brignac 36 -nasl 36 -mabbutt 36 -timberline 36 -gosar 36 -necessitates 36 -vanda 36 -wthr 36 -micrometers 36 -rouleau 36 -saltillo 36 -gio 36 -protrudes 36 -tarragona 36 -santamaria 36 -100,000-a-week 36 -prosecutes 36 -plucks 36 -lodeiro 36 -flywheel 36 -clued 36 -ararat 36 -350m 36 -a19 36 -muammer 36 -boyett 36 -talos 36 -mortgage-free 36 -overcharge 36 -re-routing 36 -giannis 36 -ypsilanti 36 -cadman 36 -41.6 36 -603 36 -hydrophila 36 -sodomizing 36 -non-arab 36 -latasha 36 -cashback 36 -rohingyas 36 -blackmailers 36 -dehumanising 36 -stourhead 36 -demetri 36 -tipsters 36 -vatileaks 36 -third-biggest 36 -rotorua 36 -dispossess 36 -jammers 36 -uplands 36 -rosner 36 -yoyo 36 -plant-eating 36 -1.89 36 -rav4 36 -zuk 36 -inger 36 -fayez 36 -a-road 36 -disengage 36 -canard 36 -herren 36 -franchi 36 -oroville 36 -ukr 36 -tocopilla 36 -0.19 36 -colover 36 -caboolture 36 -2.00 36 -backflips 36 -parolin 36 -estonians 36 -dees 36 -0844 36 -ixil 36 -larue 36 -flyhalf 36 -sanpete 36 -herx 36 -employability 36 -bluey 36 -25.8 36 -perked 36 -betel 36 -anka 36 -waif 36 -magnetometer 36 -reapers 36 -dirigible 36 -olmstead 36 -tigress 36 -single-digit 36 -shrewdly 36 -archetype 36 -kurilla 36 -100lb 36 -mind-numbing 36 -pask 36 -abortion-inducing 36 -saillant 36 -jpac 36 -d'hooghe 36 --17 36 -sprecher 36 -simione 36 -dumpling 36 -2013/2014 36 -babin 36 -clubmate 36 -minnesotans 36 -wrong-headed 36 -trance-like 36 -four-digit 36 -formulae 36 -maylin 36 -c.diff 36 -542 36 -99.8 36 -spangler 36 -donley 36 -calkins 36 -haan 36 -wjbk 36 -résumés 36 -dala 36 -thornell 36 -man-eater 36 -steffens 36 -washi 36 -arie 36 -1/5 36 -concussive 36 -150-mile 36 -wisp 36 -@lizlandau 36 -wesolowski 36 -homefront 36 -avtar 36 -cydney 36 -politicise 36 -jmp 36 -1415 36 -washtenaw 36 -subgroup 36 -democratic-leaning 36 -dilbeck 36 -lita 36 -upending 36 -bce 36 -submersibles 36 -loiter 36 -neglects 36 -12.25 36 -avery-wright 36 -hanline 36 -whitened 36 -pan-american 36 -mandanda 36 -mysticism 36 -ziniak 36 -hassiba 36 -cystitis 36 -geisler 36 -tryout 36 -thankyou 36 -azcentral.com 36 -granary 36 -sumnima 36 -cherilyn 36 -enchantment 36 -contort 36 -760,000 36 -chive 36 -spedding 36 -radarcultura 36 -mpc 36 -chattahoochee 36 -conductivity 36 -mailbookshop.co.uk 36 -eschews 36 -thaws 36 -chilies 36 -servando 36 -kut 36 -rosslyn 36 -centrifugal 36 -emanu-el 36 -crocus 36 -joerg 36 -wylde 36 -tartus 36 -mazumdar-shaw 36 -kroenig 36 -berzins 36 -professorship 36 -borzoni 36 -bed-and-breakfast 36 -wateraid 36 -riservato 36 -pocatello 36 -kn 36 -binfield 36 -1,499 36 -laughingstock 36 -overrode 36 -lysette 36 -cibrian 36 -multisport 36 -quixotic 36 -fired-up 36 -nuzzled 36 -substance-abuse 36 -sasago 36 -zoonotic 36 -mccusker 36 -electroencephalography 36 -gunbattles 36 -bly 36 -rachid 36 -westernmost 36 -womble 36 -livestream 36 -plateaus 36 -tsakirakis 36 -buttigieg 36 -doers 36 -southwick 36 -moo-hyun 36 -splicing 36 -precheck 36 -rostrum 36 -houston-area 36 -dependants 36 -hawkesworth 36 -sprucing 36 -abramovic 36 -kulik 36 -bien 36 -567 36 -rodeos 36 -njenga 36 -10-page 36 -vis-a-vis 36 -1688 36 -ex-presidents 36 -stargazer 36 -mrc 36 -writer/director 36 -bowral 36 -neurotoxic 36 -crps 36 -syrupy 36 -qui 36 -monye 36 -boatman 36 -untainted 36 -zanny 36 -mevoli 36 -arugula 36 -1.02 36 -thu 36 -boudjellal 36 -happisburgh 36 -a13 36 -maeve 36 -125cc 36 -jn 36 -good-faith 36 -haghighi 36 -criminalizes 36 -sicilia 36 -cetaceans 36 -avondale 36 -paedo 36 -binny 36 -100-yard 36 -self-cleaning 36 -forethought 36 -perth-based 36 -706 36 -495,000 36 -15-years 36 -rotella 36 -esters 36 -whitten 36 -583 36 -thumbprint 36 -formalised 36 -acclimatised 36 -a431 36 -theatergoers 36 -wgc-hsbc 36 -minsters 36 -normalising 36 -strikeouts 36 -x-class 36 -glass-walled 36 -crooning 36 -andaz 36 -westwood-brookes 36 -speeders 36 -barmaids 36 -niguel 36 -accoutrements 36 -macroeconomic 36 -laughably 36 -agua 36 -douche 36 -mustaine 36 -60mm 36 -girton 36 -dampens 36 -maslany 36 -gardners 36 -tnc 36 -svitolina 36 -faumuina 36 -bagpiper 36 -echidna 36 -harbottle 36 -owers 36 -most-popular 36 -vaccarello 36 -190mph 36 -slaton 36 -imposters 36 -nickie 36 -petrofac 36 -grandfathered 36 -ma'an 36 -40-inch 36 -warm-hearted 36 -766 36 -velma 36 -brierfield 36 -mimed 36 -falconio 36 -box-ticking 36 -needlework 36 -hazelmary 36 -istvan 36 -sunset.com 36 -siwa 36 -zeroing 36 -sabre-rattling 36 -pillage 36 -steve-o 36 -agni 36 -soft-top 36 -epfl 36 -kieny 36 -beauvoir 36 -huntsmen 36 -conners 36 -point-of-view 36 -insufferable 36 -shyam 36 -gosselaar 36 -ayer 36 -691 36 -694 36 -bettman 36 -zellner 36 -triana 36 -meanness 36 -equivalency 36 -dosing 36 -01:14 36 -taghrooda 36 -seersucker 36 -meninges 36 -seige 36 -alabama-based 36 -silvana 36 -tandoori 36 -masthead 36 -iffy 36 -sco 36 -cairo-based 36 -21:08 36 -dutfield 36 -morkel 36 -no7 36 -fedor 36 -oviatt 36 -rotana 36 -trevelyan 36 -ringers 36 -eons 36 -stenciled 36 -casuarina 36 -novelli 36 -schweizer 36 -sideswiped 36 -iveson 36 -beehives 36 -necked 36 -hlntv.com 36 -clouding 36 -1.68 36 -saturnian 36 -reframe 36 -ven 36 -auliea 36 -nosebleed 36 -unamused 36 -coz 36 -yellowcake 36 -rannoch 36 -rushworth 36 -harborne 36 -ecotourism 36 -472 36 -hebburn 36 -synthesize 36 -casale 36 -sohaib 36 -paganism 36 -werribee 36 -longboard 36 -mehmedi 36 -onstar 36 -eft 36 -85mph 36 -ging 36 -pouty 36 -yikes 36 -imprudent 36 -70km 36 -snark 36 -1.67 36 -brockley 36 -allwood 36 -sayyed 36 -icehotel 36 -01:30 36 -yamada 36 -adoringly 36 -shriner 36 -kaku 36 -brachytherapy 36 -potpourri 36 -cap-and-trade 36 -outrigger 36 -saa 36 -mijas 36 -oxleas 36 -lawmaking 36 -bantick 36 -keeton 36 -crupi 36 -crowdsource 36 -lipps 36 -burrough 36 -invigorate 36 -theisen 36 -goldschmidt 36 -deauville 36 -thirimanne 36 -clicker 36 -constraining 36 -stowmarket 36 -vilonia 36 -shelly-ann 36 -starfleet 36 -loeffler 36 -masson 36 -ebbw 36 -wagered 36 -overcooked 36 -alexandrides 36 -higher-level 36 -duque 36 -bouchra 36 -highbridge 36 -empanadas 36 -ikin 36 -metastasis 36 -moala 36 -palomares 36 -linsey 36 -power-hungry 36 -khosravi 36 -'20 36 -mcmeen 36 -1,060 36 -oster 36 -38.6 36 -esc 36 -enthroned 36 -re-appeared 36 -babington 36 -lowey 36 -macari 36 -fabergé 36 -brockie 36 -persaud 36 -garenne 36 -emmy-nominated 36 -sumption 36 -polythene 36 -draper_rob 36 -carré 36 -kalsi 36 -hoi 36 -semone 36 -olinguito 36 -02:13 36 -iheartradio 36 -then-19-year-old 36 -shum 36 -girardeau 36 -burnsville 36 -kitteridge 36 -120-year-old 36 -olivarez 36 -refloated 36 -bluhm 36 -ferullo 36 -nargis 36 -forsberg 36 -nine-months 36 -seren 36 -107-year-old 36 -pcts 36 -ronen 36 -bigg 36 -unrealistically 36 -wendover 36 -marquand 36 -falfurrias 36 -adulterer 36 -deadpool 36 -801 36 -traian 36 -flewitt 36 -doby 36 -long-necked 36 -laia 36 -saviano 36 -mrkh 36 -yeezy 36 -nsync 36 -reba 36 -bedsore 36 -dumbwaiter 36 -larche 36 -pre-marital 36 -al-attiyah 36 -liberalisation 36 -moorman 36 -harber 36 -bloopers 36 -picher 36 -landy 36 -chernukhin 36 -turn-out 36 -masseurs 36 -matrosova 36 -naperville 36 -bonhomie 36 -tablelands 36 -kidlington 36 -prt 36 -tanin 36 -738 36 -upper-middle-class 36 -sanitizers 36 -pre-columbian 36 -quai 36 -pottering 36 -step-grandfather 36 -ibra 36 -naadam 36 -kegel 36 -crisper 36 -daulby 36 -shaddy 36 -gallium 36 -shippers 36 -spinelli 36 -patong 36 -whinge 36 -matadors 36 -schumann 36 -ganzouri 36 -brisbon 36 -purveyors 36 -three-level 36 -qamar 36 -sebum 36 -boulud 36 -dray 36 -delcid 36 -schematic 36 -masterfully 36 -all-boys 36 -newsmakers 36 -daz 36 -blighty 36 -jafferjee 36 -gavan 36 -broadstairs 36 -vice-like 36 -gulags 36 -songza 36 -varnishes 36 -electrification 36 -masuri 36 -guelph 36 -becher 36 -pingit 36 -novotna 36 -first-stage 36 -prowls 36 -swifter 36 -mizrahi 36 -diode 36 -musonda-malata 36 -conservatories 36 -gastroparesis 36 -objectifying 36 -anibal 36 -romy 36 -700ft 36 -warm-blooded 36 -rosica 36 -indian-controlled 36 -sakyiwaa 36 -prodigal 36 -beneful 36 -kochel 36 -magnitude-6 36 -physician-assisted 36 -theresienstadt 36 -cowbell 36 -vorderwulbecke 36 -cleeve 36 -one-ton 36 -zakia 36 -apopka 36 -obi-wan 36 -yattara 36 -tomcat 36 -scrumptious 36 -slop 36 -blohm 36 -kohrs 36 -memorizing 36 -postmenopausal 36 -age-group 36 -odabash 36 -chamberlain-creighton 36 -badley 36 -sébastien 36 -pomegranates 36 -brownsea 36 -niaz 36 -blazek 36 -al-bab 36 -times-union 36 -semi-trailer 36 -accomplishes 36 -hitchiner 36 -out-of-this-world 36 -karroubi 36 -centerfold 36 -presences 36 -dowdall 36 -kluger 36 -kingsford 36 -+36 36 -ehs 36 -wakeup 36 -non-citizens 36 -veryfirstto 36 -back-to-front 36 -linscott 36 -re-read 36 -earth-based 36 -ruing 36 -20bn 36 -reassemble 36 -colourings 36 -mateschitz 36 -treisman 36 -mitterand 36 -walia 36 -smooths 36 -gillet 36 -atallah 36 -ponderosa 36 -trotsky 36 -traceycox.com 36 -psychosexual 36 -then-attorney 36 -admir 36 -dhanuson 36 -die-offs 36 -homespun 36 -majeste 36 -pre-raphaelite 36 -haug 36 -swanton 36 -karlsruhe 36 -courier-mail 36 -hanuman 36 -pit-bull 36 -hailstorm 36 -front-seat 36 -kogi 36 -matri 36 -8:50 36 -vliet 36 -toohey 36 -lusardi 36 -setts 36 -???? 36 -midhurst 36 -abrahamic 36 -methodologies 36 -back-pass 36 -schmucker 36 -exuding 36 -leys 36 -subtracted 36 -umayyad 36 -250k 36 -replicator 36 -unfriending 36 -1807 36 -1802 36 -phillipines 36 -lesyshen 36 -ereader 36 -dorma 36 -self-identify 36 -surface-to-surface 36 -sasser 36 -gunfights 36 -imploding 36 -jordin 36 -agoraphobic 36 -oxen 36 -328ft 36 -binyamin 36 -randhawa 36 -muang 36 -fawley 36 -36.8 36 -instigators 36 -warrener 36 -debt-stricken 36 -2002-2003 36 -shies 36 -ubud 36 -553 36 -detaching 36 -rubi 36 -stillman 36 -90p 36 -campari 36 -battlements 36 -unedifying 36 -biocon 36 -axtell 36 -newcomb 36 -mcnerney 36 -nencini 36 -fortenberry 36 -hoefl-riesch 36 -2,150 36 -prothese 36 -82.5 36 -brinson 36 -shoot-outs 36 -entrap 36 -acromegaly 36 -shijiazhuang 36 -adumim 36 -ktnv 36 -okaka 36 -yvon 36 -glisson 36 -neo-gothic 36 -stabiliser 36 -cooky 36 -pauls 36 -2sides 36 -sop 36 -inaba 36 -pre-watershed 36 -nutters 36 -deacons 36 -tri-nations 36 -rumsey 36 -baty 36 -seavey 36 -lucius 36 -all-or-nothing 36 -ten-point 36 -vicenza 36 -b4 36 -malady 36 -cavan 36 -fruitcake 36 -homewares 36 -yalcin 36 -whitetip 36 -sikes 36 -high-handed 36 -w0 36 -spinoffs 36 -anti-extremist 36 -aids-free 36 -rh 36 -earthquake-ravaged 36 -roberge 36 -bickerstaff 36 -newly-married 36 -flight-tracking 36 -subscription-based 36 -scheer 36 -wam 36 -amardeep 36 -glasspool 36 -unrepresentative 36 -consecration 36 -ornately 36 -talkin 36 -mississippians 36 -cutesy 36 -rosanne 36 -tyron 36 -segways 36 -ravines 36 -splattering 36 -reva 36 -overshoot 36 -woodham 36 -allergist 36 -18.50 36 -terrapin 36 -bitterman 36 -repoter 36 -swindlers 36 -crutchfield 36 -clitoral 36 -fixable 36 -fontes 36 -telesur 36 -kadena 36 -zizka 36 -britax 36 -20ml 36 -tyga 36 -angella 36 -entrée 36 -546 36 -schakowsky 36 -wachovia 36 -vester 36 -oranjestad 36 -chait 35 -anthropologie 35 -roundups 35 -sifts 35 -symmonds 35 -sinew 35 -schoeller 35 -about-turn 35 -tongue-tied 35 -waine 35 -inglesino 35 -sheerman 35 -m.b. 35 -wignall 35 -godfathers 35 -cutaneous 35 -a9 35 -ahluwalia 35 -economy-class 35 -palmyra 35 -strike-rate 35 -kobold 35 -15,800 35 -bomb-proof 35 -berek 35 -britannica 35 -neoguri 35 -solana 35 -dez 35 -redshaw 35 -keenest 35 -pateros 35 -01:02 35 -pan-arab 35 -hom 35 -workaholics 35 -fantasising 35 -longhorns 35 -kelsey-fry 35 -so15 35 -mini-skirt 35 -unretouched 35 -pracon 35 -head-scratching 35 -disinfectants 35 -cox-powell 35 -unintelligent 35 -mady 35 -megeve 35 -banahan 35 -buzbee 35 -gigova 35 -dzokhar 35 -syrian-born 35 -tryon 35 -veneration 35 -costelloe 35 -croker 35 -younus 35 -sneakily 35 -wellings 35 -cng 35 -wbrc 35 -pentecost 35 -foundational 35 -31.3 35 -31.9 35 -rentokil 35 -henceforth 35 -five-test 35 -hotbeds 35 -orbis 35 -boron 35 -alessia 35 -nine-page 35 -briefcases 35 -snorers 35 -sherilyn 35 -1.59 35 -frédéric 35 -laverick 35 -overbury 35 -tikal 35 -statesmanship 35 -stockade 35 -`' 35 -pushover 35 -pagers 35 -hanescu 35 -huibers 35 -fully-fit 35 -wonk 35 -indebtedness 35 -uncivilized 35 -disassociated 35 -88million 35 -sangria 35 -w1a 35 -messrs 35 -schoen 35 -21:14 35 -pletikosa 35 -re-employed 35 -o.k. 35 -out-of-the-way 35 -neuroendocrine 35 -sandalwood 35 -assemblage 35 -beauticians 35 -zoya 35 -desecrate 35 -cftc 35 -kgo-tv 35 -parbuckling 35 -tonto 35 -cannula 35 -10,000-a-year 35 -law-breaking 35 -vieux 35 -formica 35 -jetman 35 -weta 35 -dubose 35 -existent 35 -non-organic 35 -absalon 35 -carvey 35 -6,900 35 -honeysuckle 35 -air-to-ground 35 -rhythmically 35 -eliseo 35 -tastebuds 35 -wolfhounds 35 -fairbrother 35 -dru 35 -handford 35 -dogfights 35 -18in 35 -pallant 35 -undp 35 -owler 35 -lysol 35 -ansf 35 -csc 35 -1.70 35 -tradeoff 35 -glutamate 35 -buhari 35 -modifies 35 -100bn 35 -riki 35 -macworld 35 -nassar 35 -meat-eating 35 -seeber 35 -spyker 35 -paring 35 -carters 35 -artes 35 -litigants 35 -assefa 35 -four-yearly 35 -blackspot 35 -taxicab 35 -koval 35 -nha 35 -penny-pinching 35 -winser 35 -muncie 35 -cop-killer 35 -najera 35 -brouwer 35 -fiege 35 -frantz 35 -whitehouse.gov 35 -schnegg 35 -prida 35 -constricting 35 -toenail 35 -saajid 35 -zahia 35 -europhile 35 -free-floating 35 -wickes 35 -expectantly 35 -besse 35 -c02 35 -fist-sized 35 -moore-robinson 35 -demarcus 35 -northway 35 -stomach-turning 35 -lower-ranking 35 -bui 35 -idolatrous 35 -joubert 35 -ccrc 35 -syria-based 35 -rondo 35 -retracts 35 -howletts 35 -816 35 -sars-like 35 -99.95 35 -velocities 35 -decimal 35 -hyndburn 35 -transcranial 35 -stroh 35 -amancio 35 -andresen 35 -kamina 35 -fibroids 35 -denver-area 35 -lubis 35 -over-the-air 35 -lapeer 35 -riegel 35 -shanesha 35 -monoamniotic 35 -bottomed 35 -latourette 35 -apu 35 -bookmaking 35 -nadim 35 -prefab 35 -205,000 35 -566 35 -gerritsen 35 -disorienting 35 -season-high 35 -birkenfeld 35 -spohr 35 -sacher 35 -yawned 35 -dolezal 35 -oikos 35 -aparecida 35 -infuriates 35 -febrile 35 -'13 35 -bitterest 35 -wattle 35 -tatp 35 -voice-controlled 35 -al-quso 35 -pushkin 35 -morena 35 -ciphers 35 -bassbuds 35 -kingfishers 35 -syms 35 -deben 35 -compasses 35 -hoggard 35 -santee 35 -remotes 35 -jerrold 35 -andreozzi 35 -carnaby 35 -imperil 35 -aiguille 35 -demings 35 -four-under-par 35 -braben 35 -one-legged 35 -sesler 35 -concussion-related 35 -pierces 35 -rscpa 35 -melchor 35 -imanol 35 -canapés 35 -dieted 35 -acrimoniously 35 -micro-chipped 35 -calcified 35 -armytage 35 -tacking 35 -bannerman 35 -joselyn 35 -wenche 35 -samaraweera 35 -jstor 35 -169,000 35 -clays 35 -cepeda 35 -feburary 35 -ionic 35 -lassiter 35 -ogley 35 -15.50 35 -00:58 35 -brussel 35 -lemaitre 35 -erraught 35 -tenements 35 -exhilarated 35 -18-25 35 -lofgren 35 -gpi 35 -discordant 35 -two-and-a-half-hour 35 -wove 35 -hissy 35 -pirate-infested 35 -consolidates 35 -luci 35 -co-chief 35 -afghan-led 35 -pless 35 -durrell 35 -appell 35 -puel 35 -canola 35 -m9 35 -maxmara 35 -jonty 35 -shooed 35 -scaneagle 35 -lachy 35 -fyssas 35 -margery 35 -tryouts 35 -65-year 35 -carry-ons 35 -mcginnes 35 -barlyn 35 -bildt 35 -barragan 35 -northcote 35 -25.9 35 -damnation 35 -comandante 35 -fill-up 35 -coffeehouse 35 -begay 35 -lovitch 35 -crossroad 35 -mecklenburg 35 -perito 35 -ravers 35 -hogtied 35 -scola 35 -scold 35 -migliorini 35 -waitressing 35 -stache 35 -crackled 35 -toted 35 -paley 35 -hammy 35 -yoi 35 -holdover 35 -svp 35 -sequeira 35 -seven-under-par 35 -jotted 35 -dramedy 35 -sairee 35 -unforgiveable 35 -suffix 35 -gómez 35 -switzerland-based 35 -25per 35 -u.s-led 35 -saket 35 -perfectionists 35 -roddenberry 35 -reformulated 35 -iftar 35 -impregnable 35 -stampedes 35 -grizzle 35 -basanta 35 -ault 35 -nev 35 -dorjee 35 -lynas 35 -outwood 35 -bariloche 35 -c.v. 35 -52m 35 -rvs 35 -wirth 35 -bemelmans 35 -najafi 35 -kennebec 35 -decoatsworth 35 -fly-fishing 35 -gagosian 35 -kers 35 -khalsa 35 -graville 35 -blackford 35 -lenglen 35 -carluccio 35 -hajizadeh 35 -elle.com 35 -maheen 35 -blushed 35 -onyeachonam 35 -pitchforks 35 -christodoulou 35 -out-of-sorts 35 -limbert 35 -walk-up 35 -steinhafel 35 -geographer 35 -bathhouse 35 -mazzer 35 -sta 35 -partway 35 -alioto 35 -legebokoff 35 -ryven 35 -mazy 35 -unethically 35 -ramstein 35 -multi-channel 35 -matilde 35 -third-hand 35 -stackhouse 35 -waltzing 35 -krever 35 -ormskirk 35 -bickerton 35 -rose-tinted 35 -rimer 35 -collaborates 35 -frisking 35 -nextel 35 -waltrip 35 -hippocratic 35 -taulupe 35 -al-hayat 35 -forty-nine 35 -raizi 35 -podlaski 35 -crysis 35 -seesaw 35 -revis 35 -vijh 35 -signposts 35 -unaired 35 -actuary 35 -appeased 35 -wotte 35 -upstarts 35 -boothe 35 -earmuffs 35 -darya 35 -eeny 35 -cheech 35 -playford 35 -woolloomooloo 35 -world-herald 35 -tassel 35 -v-sign 35 -communicators 35 -aggregator 35 -1782 35 -bundesbank 35 -squaddie 35 -one-step 35 -work-outs 35 -prouty 35 -728 35 -qala 35 -rittenhouse 35 -armas 35 -hancox 35 -mungo 35 -venn 35 -horta 35 -bettison 35 -sonmez 35 -karissa 35 -sela 35 -cincinnati.com 35 -smarr 35 -lomas-anderson 35 -durden 35 -genealogical 35 -olguin 35 -elwen 35 -commiserate 35 -mimran 35 -beeches 35 -ballas 35 -athletically 35 -soiling 35 -mots 35 -papyri 35 -kemerovo 35 -confectioner 35 -chrysanthemum 35 -devoto 35 -1.03 35 -pershad 35 -pushers 35 -gnocchi 35 -warneford 35 -slandering 35 -square-metre 35 -khurshid 35 -humorist 35 -delancy 35 -forward-facing 35 -omelets 35 -v-2 35 -sauw 35 -darryn 35 -wolstencroft 35 -oram 35 -ad-supported 35 -statuary 35 -cookout 35 -foodbank 35 -mashup 35 -creamed 35 -hyper-partisan 35 -yotel 35 -copybook 35 -miku 35 -orica-greenedge 35 -jann 35 -monocled 35 -jonker 35 -12km 35 -apoplectic 35 -westmacott 35 -wealden 35 -al-hilal 35 -shahnaz 35 -ruta 35 -northover 35 -3-day 35 -giannasca 35 -touche 35 -gleam 35 -sourtoe 35 -moscow-backed 35 -congerton 35 -shostak 35 -spanish-style 35 -menai 35 -kuby 35 -tueller 35 -chelan 35 -acquiescence 35 -shockers 35 -fairmount 35 -gannett 35 -skellig 35 -darknet 35 -sasa 35 -renée 35 -pliable 35 -cnni 35 -distrusted 35 -pampa 35 -aymen 35 -daydreaming 35 -762 35 -mciver 35 -inverness-shire 35 -quarantining 35 -dioxins 35 -snelson 35 -jue 35 -holroyde 35 -warrnambool 35 -moonrise 35 -debt-laden 35 -utah-arizona 35 -pilchuck 35 -viscosity 35 -regusters 35 -massadio 35 -wargrave 35 -incised 35 -lalani 35 -low-priced 35 -claudette 35 -gaza-bound 35 -mccraw 35 -seven-goal 35 -6.31 35 -metropcs 35 -blue-blooded 35 -plotkin 35 -dermer 35 -pou 35 -poa 35 -trejo 35 -marathoner 35 -teapots 35 -bluetooth-enabled 35 -ringwoodite 35 -medeiros 35 -mutinous 35 -verbs 35 -southerton 35 -oval-shaped 35 -ex-teacher 35 -blown-out 35 -blood-sugar 35 -kaziranga 35 -pedram 35 -marg 35 -uninteresting 35 -levying 35 -mccomb 35 -royally 35 -disables 35 -bedoya 35 -deyes 35 -hard-left 35 -maketa 35 -nbcuniversal 35 -conger 35 -high-tempo 35 -astrium 35 -yildiz 35 -placerville 35 -munden 35 -dowds 35 -curly-haired 35 -harli 35 -mockups 35 -summarising 35 -cress 35 -rov 35 -helluva 35 -inter-faith 35 -lalanne 35 -sarvis 35 -chamblee 35 -serhiy 35 -30-hour 35 -leposo 35 -sihanouk 35 -27,600 35 -worst-kept 35 -ophthalmic 35 -589 35 -dovetail 35 -8.00 35 -upper-body 35 -uppercut 35 -traipsing 35 -chengguan 35 -wbns 35 -acuity 35 -misspent 35 -eyesores 35 -nyan 35 -wilhite 35 -fsf 35 -12-month-old 35 -pugliese 35 -fianna 35 -unearths 35 -whinging 35 -guth 35 -jowls 35 -sugared 35 -valkyrie 35 -non-gm 35 -rukh 35 -emmaus 35 -tenzin 35 -iles 35 -addled 35 -junor 35 -elif 35 -great-great-grandson 35 -stoller 35 -khattak 35 -sturgess 35 -birdwatchers 35 -tyke 35 -jurists 35 -constriction 35 -unappreciated 35 -shkaplerov 35 -seabird 35 -raconteur 35 -new-age 35 -vosburg 35 -mastodons 35 -carlyon 35 -halligan 35 -foychris 35 -22:34 35 -drafthouse 35 -valenciennes 35 -oregano 35 -pre-deployment 35 -sub-culture 35 -menke 35 -940,000 35 -ratajkowski 35 -stenographer 35 -sufyan 35 -circassians 35 -nwas 35 -sturt 35 -mascheroni 35 -5-foot-8 35 -daisey 35 -overflows 35 -government-led 35 -umami 35 -despondency 35 -al-rahim 35 -callanan 35 -narcotraffickers 35 -8,900 35 -9,000-a-year 35 -leonora 35 -woolwright 35 -39.95 35 -neutrino 35 -hos 35 -haverstock 35 -normative 35 -regensburg 35 -hekla 35 -then-17-year-old 35 -codie 35 -movie-goers 35 -peeler 35 -bullhead 35 -hamada 35 -high-necked 35 -government-imposed 35 -wefaq 35 -starched 35 -petrauske 35 -mendelson 35 -pis 35 -tethers 35 -parkins 35 -nullifying 35 -longreach 35 -single-party 35 -tutt 35 -bankston 35 -woodyatt 35 -23:00 35 -wbbh 35 -blinkbox 35 -zarin 35 -crabbing 35 -6-10 35 -profuse 35 -microblogs 35 -inflames 35 -paddleboard 35 -imp 35 -stigmatising 35 -leappad 35 -donnington 35 -millinery 35 -13-and-a-half 35 -inala 35 -paraphrased 35 -14-16 35 -14-13 35 -mid-staffs 35 -crosland 35 -elocution 35 -reshuffling 35 -game-plan 35 -pogrebnyak 35 -harjo 35 -barre-sinoussi 35 -nametag 35 -1,380 35 -hallamshire 35 -arjun 35 -scrounging 35 -mursitpinar 35 -pre-made 35 -600lbs 35 -mccrum 35 -overflights 35 -obliterating 35 -streetview 35 -spinello 35 -ousey 35 -trahan 35 -waylon 35 -quinceañera 35 -entrench 35 -61f 35 -bakerloo 35 -savouring 35 -waterskiing 35 -pavlov 35 -ivanova 35 -harkens 35 -diawara 35 -grinnell 35 -ziv 35 -joyously 35 -ollerhead 35 -califano 35 -sarginson 35 -aloysius 35 -betrayals 35 -higher-income 35 -yorks. 35 -molitor 35 -underbite 35 -alisdair 35 -kulluk 35 -moult 35 -counter-intelligence 35 -curtly 35 -taz 35 -ebmeyer 35 -kidz 35 -hotness 35 -belea 35 -buemi 35 -heysham 35 -pflager 35 -deary 35 -clowery 35 -plumpton 35 -tiptree 35 -pecuniary 35 -blithe 35 -powles 35 -iptl 35 -tozser 35 -spiteri 35 -34c 35 -painlessly 35 -uzumcu 35 -vice.com 35 -york-area 35 -141,000 35 -danuta 35 -repost 35 -cowden 35 -pec 35 -350ft 35 -seekingarrangement.com 35 -rehabilitative 35 -rebooking 35 -fulop 35 -633 35 -diabate 35 -snowshoeing 35 -jota 35 -cierzniak 35 -charman 35 -47million 35 -ud 35 -shehnila 35 -stasis 35 -hard-drive 35 -dim-witted 35 -ord 35 -padua 35 -moorgate 35 -goal-bound 35 -duos 35 -hersi 35 -1.92 35 -self-reporting 35 -zipcar 35 -deal-breaker 35 -bannon 35 -undisguised 35 -sunbeam 35 -curds 35 -mollison 35 -valarie 35 -fernández 35 -paris-roubaix 35 -petworth 35 -fotheringham 35 -meric 35 -booking.com 35 -augmenting 35 -noncitizens 35 -completions 35 -dicks 35 -corruptly 35 -regressed 35 -iturraspe 35 -habyarimana 35 -derisive 35 -luongo 35 -weatherill 35 -pranking 35 -pancetta 35 -qualia 35 -kirvin 35 -andre-pierre 35 -agora 35 -ayurvedic 35 -leverett 35 -oen 35 -factset 35 -clardy 35 -baig 35 -most-viewed 35 -skyla 35 -menard 35 -shoplifted 35 -expediting 35 -sidiqi 35 -gastro 35 -endowments 35 -pre-games 35 -contortion 35 -imiela 35 -mauney 35 -kvapil 35 -wheater 35 -anzio 35 -cheneys 35 -loshagin 35 -cournoyer 35 -tavis 35 -agenesis 35 -neoprene 35 -twin-to-twin 35 -unpleasantly 35 -bachinger 35 -20-stone 35 -chataway 35 -queensferry 35 -printouts 35 -pent 35 -pippi 35 -smudging 35 -ten-mile 35 -dyce 35 -symone 35 -giacchetto 35 -abboud 35 -latency 35 -590.5 35 -533 35 -giacalone 35 -lumpkin 35 -1400s 35 -skywest 35 -headlamp 35 -gossipy 35 -wyler 35 -jaques 35 -demelza 35 -parents-in-law 35 -platz 35 -donadoni 35 -birthers 35 -kirkbride 35 -carlee 35 -164ft 35 -mcgillivary 35 -pay-day 35 -petaluma 35 -nuclear-capable 35 -chadha 35 -lipoglaze 35 -173,000 35 -70.3 35 -schuchardt 35 -10-meter 35 -disregards 35 -action-adventure 35 -tiffani 35 -carman 35 -ratcliff 35 -consumerist 35 -discotheque 35 -casem 35 -3.26 35 -malick 35 -iñárritu 35 -upto 35 -joint-top 35 -blanken 35 -makoko 35 -overqualified 35 -kenosha 35 -joysticks 35 -02:28 35 -9-2 35 -nunavut 35 -enterobacteriaceae 35 -ahmadis 35 -self-publishing 35 -bailly 35 -sohel 35 -rylee 35 -ansell 35 -s7 35 -wahls 35 -mossy 35 -burnished 35 -anti-gang 35 -aparthotel 35 -altintop 35 -55s 35 -davro 35 -gift-wrapped 35 -recuperates 35 -two-parent 35 -negril 35 -samcunningham 35 -mallorcan 35 -trentham 35 -rhinestones 35 -svengali 35 -stroppy 35 -eubanks 35 -fondren 35 -palmas 35 -picketers 35 -soundings 35 -brutalist 35 -coops 35 -anti-domestic 35 -schwedler 35 -bonfield 35 -retracing 35 -teutonic 35 -hypnotized 35 -riccioletti 35 -princeling 35 -moonpig 35 -populists 35 -preiss 35 -47.8 35 -gastro-intestinal 35 -wikimedia 35 -knapsack 35 -parlay 35 -stourport 35 -value-for-money 35 -churlish 35 -pavin 35 -barwon 35 -nine-months-old 35 -sugar-laden 35 -pre-inquest 35 -hadji 35 -offa 35 -elsinore 35 -ima 35 -batu 35 -obediently 35 -juanda 35 -b1 35 -villard-appolon 35 -abortion-rights 35 -incongruously 35 -meneses 35 -sheneman 35 -palm-fringed 35 -envisioning 35 -airstream 35 -mexican-born 35 -monsoons 35 -arcelormittal 35 -nuj 35 -msv 35 -englehardt 35 -sorana 35 -mely 35 -louima 35 -echidnas 35 -titleholders 35 -personification 35 -anti-independence 35 -bss 35 -panic-buying 35 -sherifi 35 -genentech 35 -bottom-placed 35 -heidy 35 -eroticism 35 -live-tweeting 35 -mireskandari 35 -playset 35 -cici 35 -un-backed 35 -thiem 35 -darwinian 35 -crewkerne 35 -deena 35 -festival-goer 35 -metin 35 -biplanes 35 -sadhu 34 -corazon 34 -tantalizingly 34 -sunloungers 34 -marrickville 34 -micol 34 -tirpitz 34 -stinney 34 -brainard 34 -coriam 34 -ragweed 34 -loong 34 -midgley 34 -pollak 34 -ae 34 -meeny 34 -under-investment 34 -3-year 34 -cudahy 34 -mcnicol 34 -non-union 34 -gutman 34 -virgo 34 -workaround 34 -wreathed 34 -250lbs 34 -mensing 34 -unstinting 34 -ousmane 34 -betrothed 34 -chartreuse 34 -re-organisation 34 -wrc 34 -140m 34 -homophobe 34 -afzali 34 -fethullah 34 -low-intensity 34 -sciencelogic 34 -387 34 -federle 34 -prude 34 -cornforth 34 -proust 34 -chappy 34 -handicappers 34 -madu 34 -serjeant 34 -laborde 34 -wilting 34 -sandip 34 -angerer 34 -pre-empted 34 -swb 34 -30.8 34 -summation 34 -45.7 34 -medicating 34 -us-mexico 34 -multiplies 34 -montauban 34 -tranquilised 34 -marlo 34 -harvesters 34 -weihan 34 -tolentino 34 -itza 34 -lowde 34 -eee 34 -coned 34 -chillicothe 34 -zunzuneo 34 -stiusso 34 -westray 34 -mitchum 34 -lachie 34 -codner 34 -burnette 34 -nc-17 34 -fpa 34 -besieging 34 -lamia 34 -paym 34 -birdlife 34 -kremlin-backed 34 -yumi 34 -fractional 34 -saps 34 -125ml 34 -wingtip 34 -jakey 34 -kosar 34 -naruhito 34 -biomarker 34 -courts-martial 34 -cistercian 34 -zoloft 34 -eardrums 34 -arsema 34 -bortnikov 34 -redbrick 34 -fisher-price 34 -pokhara 34 -seer 34 -easterners 34 -sasebo 34 -swivel-eyed 34 -p-8 34 -jampolis 34 -chancer 34 -murrell 34 -masaeid 34 -leather-bound 34 -mà 34 -griffins 34 -soloway 34 -comal 34 -lefties 34 -11,300 34 -normanton 34 -non-commercial 34 -tovey 34 -splat 34 -tianlang 34 -sump 34 -prudhoe 34 -headlocks 34 -kloser 34 -erk 34 -emus 34 -mobo 34 -then-rep 34 -11.11.11 34 -urbana-champaign 34 -non-judgmental 34 -fedorcio 34 -taki 34 -partier 34 -rulon 34 -noncombatants 34 -essex-born 34 -bequeath 34 -amyas 34 -self-criticism 34 -gercke 34 -hoosier 34 -massager 34 -libertine 34 -y-12 34 -placated 34 -petrifying 34 -shehnaz 34 -108mph 34 -rimmed 34 -chaplaincy 34 -estabrook 34 -riccio 34 -fripp 34 -25-metre 34 -130m 34 -ningbo 34 -public-spirited 34 -birtwistle 34 -vestige 34 -boujis 34 -grandmother-of-four 34 -akhter 34 -mankini 34 -confucian 34 -fdlr 34 -gordillo 34 -otunga 34 -djibril 34 --35 34 -tomkinson 34 -knightly 34 -ghoul 34 -yous 34 -shaam 34 -yaser 34 -lessig 34 -arvin 34 -humourous 34 -psychoanalysis 34 -diatribes 34 -taliaferro 34 -myelitis 34 -mealamu 34 -veet 34 -anti-lgbt 34 -gyanendra 34 -bahman 34 -tds 34 -gensler 34 -coaker 34 -maxie 34 -granit 34 -vicks 34 -balliol 34 -25k 34 -erykah 34 -billodeaux 34 -2038 34 -1,014 34 -sherbow 34 -mastroianni 34 -broadens 34 -`'' 34 -milt 34 -sorta 34 -mnn.com 34 -pacquaio 34 -velociraptor 34 -mannschaft 34 -100f 34 -vocativ 34 -sideshows 34 -glazier 34 -sixth-floor 34 -loncar 34 -roddam 34 -mariha 34 -skaggs 34 -180mph 34 --9 34 -cavorted 34 -idealised 34 -ember 34 -undesirables 34 -emporis 34 -22billion 34 -lancing 34 -p26 34 -summerhouse 34 -self-examination 34 -fair-skinned 34 -terrine 34 -mandelbaum 34 -newly-minted 34 -krstic 34 -isiah 34 -phantoms 34 -positano 34 -dubonnet 34 -reggio 34 -overstatement 34 -lovelady 34 -suhail 34 -fleeces 34 -kcci 34 -stand-your-ground 34 -headstand 34 -marksmanship 34 -collyer 34 -upskirting 34 -2005-2007 34 -kursk 34 -tonneson 34 -giocondo 34 -naif 34 -whaley 34 -baroni 34 -generalize 34 -scruples 34 -67million 34 -hellbent 34 -bateson 34 -catatonic 34 -revises 34 -genene 34 -exterminators 34 -fashi 34 -unfailing 34 -unadorned 34 -josue 34 -ulsterman 34 -keep-ups 34 -burgas 34 -dorota 34 -7-9 34 -high-status 34 -jallow 34 -godley 34 -kiraithe 34 -al-numan 34 -contortionist 34 -obsessives 34 -readjusting 34 -hardness 34 -35p 34 -taciturn 34 -moonraker 34 -mchm 34 -nicolescu 34 -giacometti 34 -comparably 34 -time-sensitive 34 -wiggled 34 -second-home 34 -12-round 34 -marbs 34 -siac 34 -tarif 34 -arvada 34 -gett 34 -ingrown 34 -torpoint 34 -misbah-ul-haq 34 -localism 34 -voice-recognition 34 -650ft 34 -firas 34 -qassem 34 -vendettas 34 -condensing 34 -mammadov 34 -devinder 34 -pemberley 34 -rak 34 -raleigh-durham 34 -abacus 34 -castille 34 -teemu 34 -mita 34 -over-the-knee 34 -mcgeever 34 -dfs 34 -petersfield 34 -hieroglyphs 34 -pitsea 34 -torgan 34 -ismay 34 -implacable 34 -double-sided 34 -flotus 34 -pioli 34 -beals 34 -corporon 34 -lipid 34 -biosciences 34 -uncontained 34 -shoaib 34 -facedown 34 -olimpija 34 -ledgers 34 -pugel 34 -monotheistic 34 -re-branding 34 -wrightson 34 -keef 34 -foreshadow 34 -mucked 34 -nes 34 -bernama 34 -cardoza 34 -legitimise 34 -foss-greenaway 34 -pfeifer 34 -weedkiller 34 -schapiro 34 -400-pound 34 -demetrio 34 -budget-cutting 34 -parolees 34 -shojai 34 -per-capita 34 -superheated 34 -+7 34 -bioengineering 34 -501st 34 -celaya 34 -martínez 34 -two-child 34 -bina 34 -incentivize 34 -syndromes 34 -89p 34 -megacities 34 -jauhari 34 -laxity 34 -novoazovsk 34 -freakin 34 -guskiewicz 34 -pornographer 34 -rawlins 34 -kidjo 34 -genting 34 -dutch-born 34 -payá 34 -dandruff 34 -workforces 34 -soucy 34 -vomits 34 -tabling 34 -foregoing 34 -ganley 34 -branham 34 -prowler 34 -leppings 34 -rausch 34 -9:50 34 -allaster 34 -bulkier 34 -shar 34 -putu 34 -kanchanaburi 34 -roussel 34 -goatley 34 -548 34 -cantilever 34 -killzone 34 -valeriy 34 -seismically 34 -millbrook 34 -92.5 34 -ovulating 34 -ex-head 34 -snowbound 34 -frosties 34 -dibella 34 -schnitzel 34 -hamden 34 -21-17 34 -counter-culture 34 -picts 34 -confusingly 34 -kb 34 -post-debate 34 -unceremonious 34 -sidesteps 34 -ows 34 -sigba 34 -scrunchie 34 -mongolians 34 -chmerkovskiy 34 -hamit 34 -emas 34 -sabc 34 -brenninkmeyer 34 -res 34 -mashburn 34 -pro-american 34 -bourland 34 -sinema 34 -permeating 34 -misconceived 34 -riles 34 -linfoot 34 -daisha 34 -orta 34 -roadie 34 -decently 34 -wilbert 34 -non-indian 34 -superga 34 -sanader 34 -halloween-themed 34 -helfand 34 -deeside 34 -gahan 34 -900-year-old 34 -keim 34 -gentlest 34 -temblors 34 -saari 34 -malan 34 -manhunts 34 -vroom 34 -work-at-home 34 -abydos 34 -winemaking 34 -neurosurgical 34 -caramelised 34 -910 34 -majerus 34 -zimmat 34 -schlosser 34 -ambleside 34 -exhibitor 34 -bgs 34 -doisneau 34 -panjwai 34 -jaylynn 34 -left-to-right 34 -allnutt 34 -aleks 34 -nuku 34 -nationalisation 34 -firmed 34 -jiechi 34 -negus 34 -winches 34 -subsea 34 -bertolotti 34 -rickhuss 34 -meriwether 34 -welders 34 -14.50 34 -affirmatively 34 -hoggan 34 -witten 34 -mok 34 -sarong 34 -over-reacting 34 -snowdrift 34 -dome-shaped 34 -lalibela 34 -dimmock 34 -35.4 34 -peripherals 34 -dowries 34 -valuer 34 -maybrown 34 -pinprick 34 -gisborne 34 -aardman 34 -giresse 34 -worby 34 -taffeta 34 -thackeray 34 -hots 34 -muggli 34 -topographic 34 -crossfield 34 -ignacia 34 -bowl-winning 34 -hfcs 34 -rizana 34 -ruts 34 -haswell 34 -all-british 34 -scratchcards 34 -jeri 34 -obsidian 34 -viant 34 -tahira 34 -rumbelow 34 -3:25 34 -chatterjee 34 -tugend 34 -beato 34 -cree 34 -aidy 34 -rennert 34 -lathlean 34 -aubel 34 -reflectors 34 -mehboob 34 -doddle 34 -disposals 34 -taylor-fletcher 34 -ruckinger 34 -lower-cost 34 -figueiredo 34 -barbiturates 34 -idleness 34 -conspiratorial 34 -pmma 34 -21:25 34 -co-opt 34 -rbis 34 -semmens 34 -cruella 34 -brawner 34 -siskowski 34 -bikubi 34 -idps 34 -steams 34 -hypoglycaemic 34 -goodwillie 34 -elmi 34 -danwon 34 -harborough 34 -hei 34 -frankcom 34 -21,500 34 -mutates 34 -satter 34 -hipp 34 -fenland 34 -kipper 34 -wfaa-tv 34 -tekmira 34 -cozying 34 -mutlu 34 -andreasen 34 -amma 34 -michela 34 -beaters 34 -wycherleys 34 -a330s 34 -mope 34 -harpurhey 34 -3.55 34 -zai 34 -vishal 34 -nape 34 -single-issue 34 -recanting 34 -akerman 34 -moggies 34 -ex-united 34 -869 34 -862 34 -jodrell 34 -supersede 34 -pronounces 34 -sound-proofed 34 -dakotah 34 -ducted 34 -alms 34 -radon 34 -daewoo 34 -wilmer 34 -egenlauf 34 -molinar 34 -ex-newcastle 34 -one-under-par 34 -abalo 34 -test-tube 34 -refrigerate 34 -dewberry 34 -kempner 34 -mcat 34 -anointing 34 -sanborn 34 -karthikeyan 34 -bothroyd 34 -chartier 34 -ratan 34 -european-based 34 -englanders 34 -bl86 34 -laminate 34 -castros 34 -parejo 34 -catalytic 34 -banshee 34 -super-slim 34 -15-19 34 -shamir 34 -fobs 34 -brolga 34 -fair-haired 34 -ullmann 34 -stuckmann 34 -pestle 34 -hamar 34 -larrikin 34 -1.64 34 -ktuu 34 -34.4 34 -gadhimai 34 -dardanelles 34 -easy-bake 34 -machemedze 34 -xm 34 -kwch 34 -clinking 34 -lamolinara 34 -olver 34 -non-smoker 34 -antoniou 34 -pimco 34 -steamroller 34 -oswego 34 -#jesuischarlie 34 -kryptonite 34 -calton 34 -18-minute 34 -meat-free 34 -snooper 34 -crathie 34 -confectioners 34 -nie 34 -reconvened 34 -u.k 34 -moonlights 34 -#nomakeupselfie 34 -diehards 34 -elope 34 -moosa 34 -jamali 34 -hons 34 -hac 34 -nrt 34 -wmds 34 -159,000 34 -confab 34 -shrivel 34 -renfro 34 -flatman 34 -sunando 34 -waqar 34 -alvi 34 -500k 34 -leeza 34 -ngurah 34 -dickon 34 -faucets 34 -mudslinging 34 -well-endowed 34 -vogelsberg 34 -22:33 34 -merrylands 34 -60lbs 34 -rjukan 34 -waley-cohen 34 -codify 34 -supino 34 -38.3 34 -higginson 34 -20-acre 34 -cyberbunker 34 -wainscott 34 -abdelrahman 34 -giant-shimano 34 -88th-minute 34 -101,000 34 -weevil 34 -wheeze 34 -short-staffed 34 -teresita 34 -underappreciated 34 -6s 34 -chest-high 34 -whit 34 -teare 34 -breadfruit 34 -hatchets 34 -kuszczak 34 -sextortion 34 -hutaree 34 -malmaison 34 -bett 34 -korner 34 -balog 34 -leng 34 -al-haddad 34 -haemorrhoids 34 -willowy 34 -tris 34 -ebacc 34 -673 34 -ridiculousness 34 -prokofiev 34 -heart-to-heart 34 -longton 34 -flathead 34 -bruckheimer 34 -gbangbola 34 -dote 34 -stekelenburg 34 -fixe 34 -substations 34 -danedream 34 -underarms 34 -3,000-year-old 34 -kiryat 34 -cholmondeley 34 -bandleader 34 -supercomplication 34 -r-word 34 -musial 34 -heitinga 34 -yuk 34 -arato 34 -grosser 34 -trolleybus 34 -cuernavaca 34 -chapin 34 -435,000 34 -punchbowl 34 -razorbacks 34 -jarque 34 -star-shaped 34 -magrath 34 -brulee 34 -150ml 34 -whatcom 34 -50,000-a-year 34 -gauvin 34 -mahroof 34 -20-24 34 -lajovic 34 -faux-pas 34 -french-style 34 -abb 34 -close-cropped 34 -vg 34 -al-qasr 34 -sundeck 34 -popa 34 -pot-bellied 34 -#illridewithyou 34 -edurne 34 -jacc 34 -actuators 34 -618 34 -pre-hispanic 34 -hotheads 34 -wtvf 34 -493 34 -inna 34 -skin-to-skin 34 -kogelo 34 -macdougall 34 -trope 34 -uttoxeter 34 -bosingwa 34 -sanur 34 -innkeeper 34 -miscreants 34 -boshoff 34 -swallowtail 34 -rybak 34 -immobilized 34 -sprague 34 -catamarans 34 -bren 34 -marcelinho 34 -macca 34 -campbell-ryce 34 -cani 34 -suthers 34 -acta 34 -kaplinsky 34 -jet-lag 34 -34b 34 -encarnacion 34 -kovalcik 34 -reprint 34 -trollies 34 -feit 34 -pre-historic 34 -gorky 34 -calfire 34 -ganja 34 -clickorlando 34 -spourdalakis 34 -etheredge 34 -u.s.s. 34 -ideo 34 -malkovich 34 -ayris 34 -maynor 34 -servo 34 -hobie 34 -stramaccioni 34 -hooting 34 -selamat 34 -neckties 34 -attebery 34 -high-caliber 34 -gnarly 34 -easternmost 34 -al-sharaa 34 -klansmen 34 -policia 34 -rancheria 34 -hawick 34 -ladera 34 -fadillioglu 34 -hasnat 34 -12-0 34 -klim 34 -travelzoo 34 -1.96 34 -immunologist 34 -jabeen 34 -pufferfish 34 -taye 34 -put-downs 34 -not-so-secret 34 -repopulate 34 -latymer 34 -barramundi 34 -strong-armed 34 -floodgate 34 -swiel 34 -nimbys 34 -romanticized 34 -predominate 34 -fabienne 34 -time-frame 34 -catsuit 34 -mbabazi 34 -glucagon 34 -düsseldorf 34 -undersized 34 -muskogee 34 -saifi 34 -fly-past 34 -multidimensional 34 -tuchel 34 -schrade 34 -vice-marshal 34 -postpones 34 -crofts 34 -chrystal 34 -matt_lawton_dm 34 -shishy 34 -monisha 34 -veljkovic 34 -stovall 34 -bassinet 34 -smaller-scale 34 -jazzed 34 -chowing 34 -explosive-laden 34 -coalesced 34 -peruto 34 -talman 34 -cunningly 34 -tehrik-e-taliban 34 -inupiat 34 -manion 34 -last-day 34 -48.5 34 -gangmasters 34 -sheva 34 -spiritualism 34 -katrin 34 -ob/gyn 34 -negrete 34 -romankow 34 -elustondo 34 -have-a-go 34 -treasonous 34 -pescatore 34 -steadier 34 -pictoris 34 -deroche 34 -tpp 34 -bedpost 34 -532 34 -smartglass 34 -merriment 34 -nodianos 34 -wbz-tv 34 -chokri 34 -cross-field 34 -kittyhawk 34 -ten-year-olds 34 -stanbury 34 -henshell 34 -rattray 34 -goodrum 34 -tyurin 34 -inkster 34 -huddles 34 -virgilio 34 -kurz 34 -osc 34 -117th 34 -aest 34 -threadneedle 34 -gelder 34 -pangasinan 34 -medlock 34 -strudel 34 -pro-kurdish 34 -purdum 34 -mistral 34 -non-criminal 34 -40-second 34 -royster 34 -riggans 34 -hellerman 34 -213,000 34 -fewster 34 -rundle 34 -massillon 34 -rastafarian 34 -arthropod 34 -photocopies 34 -ica 34 -remsburg 34 -rothamsted 34 -u18 34 -weidenfeld 34 -nf1 34 -mujeres 34 -quartered 34 -wechsler 34 -shiel 34 -mayoralty 34 -pipers 34 -rootes 34 -small-minded 34 -lynskey 34 -augur 34 -bertens 34 -liggins 34 -100per 34 -tinderella 34 -hallucination 34 -2-to-1 34 -exemplar 34 -multi-billion-pound 34 -bame 34 -209,000 34 -vik 34 -liwa 34 -attention-deficit 34 -colet 34 -peraud 34 -ex-bbc 34 -re-joined 34 -fat-shaming 34 -airfix 34 -hard-liner 34 -lawmen 34 -figg-hoblyn 34 -césar 34 -lennart 34 -mcquilliams 34 -dillenbeck 34 -llanberis 34 -torkham 34 -tawheed 34 -tis 34 -outputs 34 -dccc 34 -bleakest 34 -burrard-lucas 34 -shere 34 -souths 34 -imperatives 34 -jansrud 34 -heartrending 34 -strangelove 34 -pollinating 34 -ahus 34 -contortions 34 -stimson 34 -comeaux 34 -swoosh 34 -non-refundable 34 -panspermia 34 -initally 34 -lusitania 34 -sabanci 34 -tiptoes 34 -9/2 34 -slingshots 34 -luma 34 -kocontes 34 -eeast 34 -49.9 34 -kington 34 -crumbly 34 -h20 34 -lipoedema 34 -green-eyed 34 -579 34 -574 34 -ataxia 34 -57f 34 -frictions 34 -withnell 34 -kortney 34 -augmentations 34 -segadelli 34 -bayreuth 34 -sorrel 34 -kikukawa 34 -rear-end 34 -850million 34 -nub 34 -westonbirt 34 -zongoloni 34 -fogel 34 -erebus 34 -holdovers 34 -lipids 34 -grownup 34 -chocolatey 34 -gamba 34 -ostapenko 34 -netizen 34 -moktar 34 -doa 34 -racecar 34 -spreaders 34 -fillip 34 -mose 34 -kaim 34 -agostino 34 -redrick 34 -kaldas 34 -tax-dodging 34 -tough-guy 34 -modou 34 -re-discovered 34 -porker 34 -fremantlemedia 34 -stanek 34 -body-conscious 34 -hi-seas 34 -karmy-jones 34 -flighty 34 -tadpole 34 -surgeon-gynaecologist 34 -lochs 34 -specht 34 -danns 34 -gatlinburg 34 -vladi 34 -burck 33 -thora 33 -pliny 33 -598 33 -kriss 33 -desiring 33 -sparling 33 -mixologists 33 -elman 33 -three-years 33 -algarad 33 -carlene 33 -61million 33 -abner 33 -azodicarbonamide 33 -galician 33 -beston 33 -stir-fried 33 -convener 33 -genetically-modified 33 -japanese-style 33 -pryde 33 -skewering 33 -tanvir 33 -13,600 33 -1503 33 -colas 33 -herrington 33 -trayon 33 -likelier 33 -774 33 -second-choice 33 -16billion 33 -8.75 33 -pecs 33 -bordesley 33 -swannery 33 -gowalla 33 -bialkowski 33 -darrah 33 -misbegotten 33 -lifer 33 -yuval 33 -cranford 33 -qq 33 -cliveden 33 -brushfire 33 -01:06 33 -tavakoli 33 -billiton 33 -'98 33 -sidebar 33 -1,016 33 -covlin 33 -two-ton 33 -staffie 33 -crags 33 -evangelists 33 -husby 33 -yokosuka 33 -tambo 33 -joana 33 -bompas 33 -spokeswomen 33 -schoch 33 -waldorf-astoria 33 -mwangi 33 -mcfadyen 33 -sydney-hobart 33 -robaina 33 -ghanian 33 -buzzed-about 33 -underestimates 33 -marbury 33 -princelings 33 -nuclear-tipped 33 -vermouth 33 -flavoring 33 -angioplasty 33 -choruses 33 -crossman 33 -marcelino 33 -fiske 33 -ahold 33 -haberdashers 33 -aleman 33 -31.2 33 -numeric 33 -feist 33 -guptill 33 -nutley 33 -thisted 33 -vasey 33 -duplicitous 33 -gwendoline 33 -france-based 33 -magnetically 33 -sub-orbital 33 -post-gadhafi 33 -encrypts 33 -devereux 33 -porterville 33 -hagia 33 -neurobiology 33 -01:20 33 -week-in 33 -reportage 33 -fp2 33 -hogweed 33 -853 33 -chakravarty 33 -mamchur 33 -treva 33 -udon 33 -palming 33 -isis-linked 33 -somerton 33 -wakeham 33 -saddling 33 -21:13 33 -emarketer 33 -red-flagged 33 -kallenbach 33 -couper 33 -paddick 33 -prescriptive 33 -muldoon 33 -doorstop 33 -semtex 33 -meting 33 -re-enters 33 -c/o 33 -mummery 33 -ex-governor 33 -safar 33 -cinders 33 -suspenseful 33 -deana 33 -deano 33 -hisense 33 -medstar 33 -nkosi 33 -leonhardt 33 -ats 33 -labolt 33 -2-year 33 -teer 33 -farshbaf 33 -updraft 33 -much-publicised 33 -murmured 33 -grandison 33 -anti-semites 33 -hobley 33 -endorsers 33 -scharf 33 -myrna 33 -lubricated 33 -51.6 33 -kuen 33 -recurred 33 -sebastiano 33 -fidan 33 -sommerville 33 -uzo 33 -sight-seeing 33 -huppenthal 33 -brighthaupt 33 -yurchikhin 33 -autocomplete 33 -indentured 33 -negated 33 -zumanjaro 33 -iar 33 -abernathy 33 -aurelien 33 -swi 33 -kollin 33 -byng 33 -paradoxes 33 -psas 33 -segmented 33 -makkawi 33 -2.68 33 -d'addario 33 -kick-about 33 -ventana 33 -1,517 33 -ackerley 33 -shylock 33 -etonians 33 -beanstalk 33 -egyptologists 33 -vilhena 33 -sekulow 33 -bryanston 33 -gilbert-lurie 33 -oo 33 -laurentiis 33 -medi 33 -bi-partisan 33 -200-foot 33 -wegner 33 -pirie 33 -quiles 33 -danson 33 -stryder 33 -eryn 33 -meggan 33 -kanter 33 -647 33 -passe 33 -ayad 33 -cosimo 33 -adjudicate 33 -+81 33 -well-rested 33 -midmorning 33 -norrish 33 -cooner 33 -uttam 33 -giancola 33 -200-300 33 -hassabis 33 -cyclops 33 -10,000-a-week 33 -30.3 33 -damehood 33 -#superbowl 33 -damion 33 -renewals 33 -stapling 33 -sadc 33 -burton-upon-trent 33 -mogherini 33 -sla 33 -gagliano 33 -concealed-carry 33 -pull-up 33 -almajid 33 -toblerone 33 -shirvell 33 -lakhani 33 -gold-coloured 33 -kann 33 -24.8 33 -immelt 33 -off-street 33 -brettschneider 33 -chrysanthemums 33 -beaverton 33 -quids 33 -nonsmokers 33 -cluck 33 -debaters 33 -play-fighting 33 -osteopathy 33 -taupo 33 -forgone 33 -avensis 33 -yifrach 33 -41-year 33 -domiciled 33 -carmack 33 -medtronic 33 -lazenby 33 -backbreaking 33 -overstay 33 -durning 33 -coasteering 33 -apo 33 -three-vehicle 33 -d-oregon 33 -milkweed 33 -monro 33 -alumnae 33 -latisse 33 -ottolenghi 33 -kuoni 33 -stinchcombe 33 -soonest 33 -noisiest 33 -ingratiate 33 -23:12 33 -ferrarini 33 -tunneling 33 -trelissick 33 -bataille 33 -katoomba 33 -koussa 33 -posten 33 -movie-star 33 -db9 33 -hatami 33 -crumple 33 -krauthammer 33 -sspca 33 -round-table 33 -rumph 33 -bogdanos 33 -dunwich 33 -arusha 33 -disgustingly 33 -frederico 33 -chakraborty 33 -ios7 33 -rhug 33 -p2p 33 -1707 33 -barawe 33 -bolam 33 -ivanishvili 33 -finasteride 33 -bladon 33 -futuristic-looking 33 -2.28 33 -gleaves 33 -dahmer 33 -haslingden 33 -inbee 33 -trottie 33 -crowd-funded 33 -sackpardew.com 33 -harnish 33 -bomberg 33 -foreign-language 33 -pâté 33 -burls 33 -haemophilia 33 -irukandji 33 -mascaras 33 -gabashvili 33 -discala 33 -clohessy 33 -apolo 33 -moalin 33 -2-inch 33 -chegwin 33 -602 33 -hesitantly 33 -kinley 33 -aske 33 -gribble 33 -bittern 33 -dislocations 33 -crummy 33 -487 33 -madding 33 -naturalised 33 -roa 33 -stick-on 33 -bodenheimer 33 -maestros 33 -32.7 33 -nowhereelse.fr 33 -paydays 33 -systolic 33 -sho 33 -56mph 33 -nameplate 33 -ricco 33 -reinke 33 -mx 33 -gilda 33 -battlers 33 -slate.com 33 -barnhart 33 -chekhov 33 -butty 33 -superfish 33 -searched-for 33 -rehouse 33 -popocatepetl 33 -anatomist 33 -safin 33 -sayuki 33 -rascals 33 -heartstealer 33 -tootle 33 -plante 33 -lazukin 33 -kerzner 33 -mcgreevey 33 -dotes 33 -disconsolate 33 -cydia 33 -slowdowns 33 -wardell 33 -62p 33 -gayoom 33 -orlova 33 -8,100 33 -jamme 33 -goproud 33 -apogee 33 -torosidis 33 -15lb 33 -cutinella 33 -inductive 33 -10-10-10 33 -nags 33 -equifax 33 -raz 33 -wrecker 33 -stingers 33 -ioannis 33 -whitmer 33 -nymi 33 -disintegrates 33 -donoghue 33 -161,000 33 -preston-booth 33 -dings 33 -humayun 33 -self-regulatory 33 -tramples 33 -rudimental 33 -jeremey 33 -strandlof 33 -bricklayers 33 -blippar 33 -kanwal 33 -hippest 33 -telenovela 33 -wenceslas 33 -wivb 33 -16-17 33 -schauble 33 -13,300 33 -baughn 33 -keyring 33 -jaidee 33 -guymon 33 -kansas-based 33 -spellers 33 -keet 33 -jevon 33 -pura 33 -shotover 33 -embodying 33 -kookaburra 33 -fearns 33 -ilie 33 -dezeen 33 -osorio-arellanes 33 -galan 33 -robeson 33 -algal 33 -isotopic 33 -multicellular 33 -161million 33 -ngorongoro 33 -southington 33 -ouellette 33 -bossa 33 -floodplains 33 -scally 33 -memorializing 33 -benita 33 -pono 33 -785,000 33 -brandreth 33 -bluestones 33 -nygaard 33 -twee 33 -keychain 33 -saraiva 33 -soraja 33 -sads 33 -blerim 33 -pastrami 33 -gord 33 -colotl 33 -jarrell 33 -caughey 33 -deleo 33 -mohamoed 33 -post-wedding 33 -transcribing 33 -33.7 33 -skews 33 -budging 33 -areva 33 -nirmal 33 -libin 33 -backstabbing 33 -31.8 33 -chica 33 -hamidi 33 -tax-payer 33 -ogawa 33 -l'automobile 33 -samini 33 -lorillard 33 -essence.com 33 -farewelled 33 -palmeri 33 -grotty 33 -zemeckis 33 -daines 33 -reverent 33 -patchogue 33 -57.1 33 -metabolites 33 -next-day 33 -ks 33 -off-court 33 -superbad 33 -tamarins 33 -carbapenem-resistant 33 -techs 33 -bageerathi 33 -disbelieved 33 -t+l 33 -lasith 33 -debonair 33 -lumiere 33 -tramps 33 -suliman 33 -lumos 33 -harbourlife 33 -mulls 33 -declassifying 33 -shmuel 33 -redecorating 33 -1,960 33 -3.31 33 -nabila 33 -12th-century 33 -tempering 33 -manvell 33 -vagnini 33 -re-evaluation 33 -behnaz 33 -f.b.i. 33 -sivia 33 -shomrim 33 -gringotts 33 -chittenden 33 -dhalla 33 -demerit 33 -fetid 33 -16-1 33 -autocrats 33 -virility 33 -juanes 33 -roybal 33 -courteau 33 -high-heel 33 -spektor 33 -epoxy 33 -imerman 33 -willies 33 -twits 33 -mannix 33 -corked 33 -babu 33 -70070 33 -filibuster-proof 33 -615,000 33 -minn. 33 -warpath 33 -wiebe 33 -beckel 33 -midwife-led 33 -valenti 33 -three-bedroomed 33 -oedema 33 -marineris 33 -yellowed 33 -kabuki 33 -reeder 33 -revie 33 -greencore 33 -lullabies 33 -polak 33 -3.17 33 -shovelled 33 -walde 33 -anglo-american 33 -end-all 33 -lower-end 33 -derwentwater 33 -wygal 33 -49.95 33 -deskovic 33 -alzheimers 33 -candyfloss 33 -hunter-reay 33 -rotman 33 -20-point 33 -hamble 33 -plutarco 33 -sex-reassignment 33 -morass 33 -708 33 -wessels 33 -rueful 33 -preheat 33 -pecos 33 -kcal-tv 33 -sawiris 33 -leask 33 -wgc-cadillac 33 -holsey 33 -zukunft 33 -clasie 33 -genocides 33 -impulsively 33 -pre-teens 33 -pharma-quickstep 33 -smarten 33 -overcoats 33 -stangl 33 -rockne 33 -deplane 33 -rockabilly 33 -cockayne 33 -legere 33 -flapjacks 33 -naftogaz 33 -pitney 33 -esplin 33 -industrial-strength 33 -fetes 33 -niqabs 33 -sensationalized 33 -tired-looking 33 -cva 33 -under-construction 33 -1.21 33 -charring 33 -bickered 33 -sandé 33 -chalfont 33 -mancienne 33 -hallock 33 -11-foot 33 -wahabi 33 -22-minute 33 -cornett 33 -castrol 33 -bindings 33 -hydrangeas 33 -h2 33 -prophecies 33 -gavi 33 -shahi 33 -matthews-burton 33 -open-world 33 -revill 33 -durbar 33 -perrone 33 -dunster 33 -sharp-eyed 33 -e-bike 33 -maes 33 -recusal 33 -wolfeboro 33 -utes 33 -fistful 33 -yonsei 33 -unplugging 33 -unicode 33 -jul 33 -drugmaker 33 -garecht 33 -carrol 33 -mennonites 33 -call-girl 33 -dokdo 33 -cancún 33 -eskimos 33 -medieval-style 33 -bahrainis 33 -23/10 33 -broomsticks 33 -fanboy 33 -booby-trap 33 -buckshot 33 -fleshed 33 -barbeau 33 -1350 33 -nirmala 33 -tench 33 -edd 33 -www.suicidepreventionlifeline.org 33 -cressie 33 -oystons 33 -breheny 33 -detains 33 -rentrak 33 -three-letter 33 -roundhay 33 -feedly 33 -stiffening 33 -s-300 33 -lenami 33 -rochette 33 -sunborn 33 -maghull 33 -shaina 33 -keibler 33 -abousamra 33 -kassetas 33 -237,000 33 -rivlin 33 -hobbiton 33 -70-mile 33 -quarried 33 -darroch 33 -hair-pulling 33 -higson 33 -kisha 33 -pennsylvania-based 33 -kurdish-controlled 33 -kunhardt 33 -230million 33 -junker 33 -achatz 33 -baltasar 33 -berlocq 33 -fyfe 33 -jari 33 -hitch-hiking 33 -mcclurg 33 -passive-aggressive 33 -ocoee 33 -nurtures 33 -coc 33 -bartsch 33 -baber 33 -pirouettes 33 -1,140 33 -kimetto 33 -oribe 33 -34.8 33 -hz 33 -broad-spectrum 33 -r-georgia 33 -ramsdell-oliva 33 -stechford 33 -nationalization 33 -legionnaire 33 -kudlow 33 -mellis 33 -left-armer 33 -lehrmann 33 -match-ups 33 -stoeckley 33 -1,260 33 -armfuls 33 -1.66 33 -proverbs 33 -47m 33 -algonquin 33 -campanella 33 -hellings 33 -wiest 33 -highrise 33 -56m 33 -01:31 33 -uncivilised 33 -dehumanization 33 -1,045 33 -cadwaladr 33 -aboul 33 -cataplexy 33 -longacre 33 -katina 33 -207,000 33 -gmbh 33 -stupa 33 -sau 33 -difiore 33 -urbanites 33 -crimmins 33 -maximums 33 -nib 33 -theorize 33 -gomorrah 33 -wilfredo 33 -zabriskie 33 -diplegia 33 -misidentification 33 -danya 33 -derrik 33 -zonal 33 -aby 33 -condors 33 -stradbroke 33 -vanzant 33 -mediawatch 33 -al-amin 33 -magana 33 -hak 33 -kearny 33 -homogeneous 33 -horseshoe-shaped 33 -ansalone 33 -watchtowers 33 -summarizes 33 -curios 33 -overindulgence 33 -cream-colored 33 -rashed 33 -ornish 33 -919 33 -somali-based 33 -dixit 33 -75-minute 33 -molehill 33 -fuhrmann 33 -unsalted 33 -snorkels 33 -treadwell 33 -bluett 33 -rosalina 33 -454 33 -d-ohio 33 -manohar 33 -genealogists 33 -saggar 33 -105m 33 -thrushes 33 -under-estimated 33 -soulja 33 -ads-b 33 -mihal 33 -photo-editing 33 -kugler 33 -kadan 33 -four-member 33 -khilafah 33 -siegler 33 -damini 33 -hydrogel 33 -congeniality 33 -perfumery 33 -boozman 33 -cadman-jones 33 -rhabdomyosarcoma 33 -21:49 33 -mackaill 33 -composted 33 -pursed 33 -ecommerce 33 -revelle 33 -xiaoli 33 -sneddon 33 -orin 33 -phonographic 33 -unbelievers 33 -98.6 33 -galati 33 -kosinski 33 -fardell 33 -chhurim 33 -maybach 33 -ben-gurion 33 -irksome 33 -al-waleed 33 -shue 33 -reflexive 33 -kangas 33 -sixth-largest 33 -giant-killing 33 -pedelty 33 -bazoobi 33 -garibaldi 33 -yelps 33 -spillman 33 -elwes 33 -pire 33 -peyroux 33 -mugford 33 -tve 33 -kasir 33 -areola 33 -volante 33 -penobscot 33 -four-acre 33 -red-blooded 33 -poulin 33 -spiranovic 33 -cheerily 33 -amormino 33 -sherrington 33 -mobius 33 -headrests 33 -kerch 33 -bestia 33 -irretrievable 33 -lait 33 -whitefish 33 -malakand 33 -tcf 33 -athlone 33 -flory 33 -pinkberry 33 -schadt 33 -al-rubaish 33 -mom2mom 33 -chidgey 33 -clubmoor 33 -fw 33 -chicagoland 33 -tutwiler 33 -dawg 33 -springy 33 -secreting 33 -dharavi 33 -weah 33 -third-ranked 33 -greatrex 33 -445,000 33 -weirs 33 -hammerheads 33 -payphone 33 -ginger-haired 33 -time-saving 33 -botsford 33 -cobh 33 -valdano 33 -runnels 33 -220lbs 33 -danniella 33 -onofre 33 -virginian 33 -strivers 33 -somberly 33 -rockwood 33 -petey 33 -hoar 33 -24p 33 -aurelius 33 -roll-up 33 -topshop.com 33 -montiel 33 -ballsy 33 -ginni 33 -embry-riddle 33 -9-10 33 -tranquilizers 33 -newbridge 33 -blt 33 -tottenville 33 -astrobiologist 33 -jiah 33 -sidique 33 -rexach 33 -resit 33 -brean 33 -veneta 33 -vice-presidency 33 -grunenthal 33 -corchado 33 -kiva 33 -sinisa 33 -yelm 33 -soundwave 33 -touchstones 33 -audiovisual 33 -effeminate 33 -weatherfield 33 -rayon 33 -self-drive 33 -youtube.com 33 -hellerstein 33 -cathode 33 -bunty 33 -kenley 33 -1773 33 -repellant 33 -adoptees 33 -triangulate 33 -7.00 33 -thinktank 33 -smothers 33 -lazzaro 33 -laycock 33 -aust 33 -soelistyo 33 -solver 33 -banjul 33 -imagers 33 -lubyanka 33 -192,000 33 -jaleel 33 -abdurrahman 33 -jointed 33 -bergamot 33 -ruf 33 -oleh 33 -telomere 33 -basketballer 33 -eco-conscious 33 -igloos 33 -638 33 -nitin 33 -pesci 33 -tried-and-tested 33 -uf 33 -co-captain 33 -pro-taliban 33 -hela 33 -democratizing 33 -37.2 33 -37.4 33 -woodworking 33 -kgosi 33 -deihim 33 -traficant 33 -miscued 33 -pop-star 33 -casio 33 -brenden 33 -kucukkoylu 33 -ingushetia 33 -conover 33 -craggs 33 -chauvinism 33 -jwst 33 -theobald 33 -mahathir 33 -lieutenant-colonel 33 -laxey 33 -non-standard 33 -parabolic 33 -up-and-comers 33 -ellena 33 -thera 33 -medium-security 33 -play-acting 33 -uppingham 33 -parietal 33 -braai 33 -claudene 33 -flail 33 -saran 33 -8-7 33 -eleri 33 -rochwell 33 -feldstein 33 -maran 33 -meld 33 -appstore 33 -karst 33 -wlbt 33 -purrs 33 -aperitif 33 -ridelondon 33 -stuker 33 -lesean 33 -karolos 33 -bithell 33 -aubin 33 -sent-off 33 -rochat 33 -fasano 33 -seyed 33 -best-of-five 33 -duplicating 33 -catharine 33 -bak 33 -csaba 33 -googleplex 33 -non-african 33 -jonson 33 -kuranyi 33 -thane 33 -brassy 33 -colemans 33 -serey 33 -tailbone 33 -zuluaga 33 -error-prone 33 -zengin 33 -pre-wimbledon 33 -karagounis 33 -soyinka 33 -15s 33 -chaise 33 -penalizing 33 -rainbow-colored 33 -f-250 33 -wolfhound 33 -dermatological 33 -staffan 33 -wooding 33 -legislatively 33 -alper 33 -pinson 33 -likeability 33 -egyptian-brokered 33 -0.27 33 -marionette 33 -sellu 33 -hwa 33 -farmiloe 33 -hollowing 33 -galinsky 33 -costars 33 -fan-base 33 -jlr 33 -megastore 33 -senneff 33 -6/4 33 -thorax 33 -perreault 33 -tiptoe 33 -opris 33 -dmitriy 33 -sauntering 33 -bbb 33 -wounda 33 -allergan 33 -impregnating 33 -brumbies 33 -b-25 33 -choong 33 -wavertree 33 -arch-enemy 33 -daldry 33 -mersane 33 -haddon-cave 33 -pater 33 -problem-plagued 33 -burchell 33 -sunnylands 33 -2.60 33 -genia 33 -fox13 33 -skomal 33 -cleef 33 -semyon 33 -sowers 33 -90lbs 33 -second-ranking 33 -arline 33 -arabic-speaking 33 -quackery 33 -nondenominational 33 -shenker 33 -deviancy 33 -ditton 33 -schweich 33 -typhus 33 -galahad 33 -african-led 33 -inundation 33 -aik 33 -haren 33 -synthesized 33 -alongi 33 -taoyuan 33 -deploring 33 -limandri 33 -blarney 33 -jamia 33 -grima 33 -extrasolar 33 -smithy 33 -impeaching 33 -keiji 33 -29c 33 -rebar 33 -auteur 33 -wearhouse 33 -recharges 33 -pre-op 33 -sympathizes 33 -amputating 33 -770,000 33 -style.com 33 -afro-american 33 -sugarcoat 33 -bernese 33 -rogo 33 -karembeu 33 -riche 33 -marshalling 33 -149.99 33 -staake 33 -cherry-garrard 33 -miscegenation 33 -unlivable 33 -casadei 33 -two-and-half 33 -warley 33 -reorganise 33 -slurp 33 -co-codamol 33 -sawford 33 -quong 33 -genewatch 33 -alachua 33 -sagebrush 33 -five-person 33 -573 33 -aku 33 -avonmouth 33 -o'donovan 33 -saperstein 33 -unearthly 33 -noibi 33 -bakari 33 -profit-making 33 -un-named 33 -schoolmaster 33 -goons 33 -on-shore 33 -marengo 33 -ravasi 33 -bonham-carter 33 -password-protected 33 -polytechnique 33 -suker 33 -140lbs 33 -anti-ira 33 -45kg 33 -121,000 33 -barstow 33 -paraplegics 33 -hassani 33 -preset 33 -all-purpose 33 -delegitimize 33 -ieuan 33 -20-room 33 -dagmar 33 -rian 33 -ticketholders 33 -boneyard 33 -scuff 33 -al-momani 33 -rusnak 33 -javits 33 -vevo 33 -unprompted 33 -gradients 33 -never-say-die 33 -money-grabbing 33 -anel 33 -trumpington 33 -homme 33 -bergwall 33 -audiobooks 33 -laryngoscopy 33 -thracian 33 -28billion 33 -sharrock 33 -biologic 33 -nalmefene 32 -ishido 32 -thore 32 -rossig 32 -quarter-million 32 -head-turning 32 -musselwhite 32 -lockette 32 -schanze 32 -omb 32 -barbarous 32 -cottom 32 -heliopolis 32 -mutare 32 -dyken 32 -mun 32 -ando 32 -lynsi 32 -circuitous 32 -supertanker 32 -percolating 32 -m61 32 -utair 32 -unswerving 32 -revaluation 32 -wacko 32 -ndamukong 32 -0-5 32 -hustlers 32 -disentangle 32 -fillmore 32 -dioguardi 32 -mogensen 32 -ancients 32 -hair-cutting 32 -cwm 32 -coppinger 32 -ekso 32 -nairo 32 -doper 32 -mangudadatu 32 -houda 32 -absconds 32 -alberts 32 -stub 32 -cerulean 32 -kelt 32 -mcanea 32 -influencer 32 -ardabili 32 -neff 32 -latvians 32 -100-degree 32 -agc 32 -107th 32 -pb&j 32 -tonioli 32 -yuppie 32 -overexcited 32 -fannin 32 -paleontological 32 -mp3s 32 -hertel 32 -greville 32 -p8 32 -sackler 32 -unobtainable 32 -ncube 32 -view-master 32 -eclampsia 32 -exley 32 -macchiarini 32 -theblaze 32 -edington 32 -90km 32 -khairkhwa 32 -anti-catholic 32 -care.data 32 -pan-starrs 32 -hacktivists 32 -eek 32 -saki 32 -synonym 32 -midshipman 32 -gartshore 32 -foamy 32 -scobee 32 -3:16 32 -deimos 32 -flatt 32 -well-made 32 -sixty-nine 32 -colouration 32 -differentiates 32 -reichel 32 -01:22 32 -skatepark 32 -500-a-week 32 -egonu 32 -megalodon 32 -cavendish-coulson 32 -campagnaro 32 -substantively 32 -curmudgeonly 32 -sportiva 32 -ill-defined 32 -nourishes 32 -far-away 32 -aldebaran 32 -re-built 32 -nothingness 32 -hamzy 32 -frilled 32 -fes 32 -lateline 32 -spouted 32 -sapienza 32 -al-ahmad 32 -juiced 32 -nelli 32 -lipsey 32 -lettuces 32 -alladin 32 -gilhooly 32 -nalty 32 -time-limited 32 -campfires 32 -half-dressed 32 -shrouding 32 -briar 32 -dicky 32 -flamboyantly 32 -bosons 32 -fantasize 32 -16-team 32 -lamontagne 32 -@talalmusa 32 -channing-williams 32 -computed 32 -signet 32 -grandfather-of-three 32 -demjanovich 32 -1.72 32 -whitsundays 32 -miny 32 -photo-shopped 32 -ricochets 32 -schwindt 32 -wathen 32 -riu 32 -chron.com 32 -trocadero 32 -reasserting 32 -macauley 32 -crazier 32 -pettus 32 -purer 32 -jetsons 32 -tenenbaum 32 -mindboggling 32 -dickenson 32 -12-second 32 -fluid-filled 32 -nantwich 32 -kosaka 32 -well-used 32 -jasmyn 32 -unama 32 -nhc 32 -i-limb 32 -anti-china 32 -naivete 32 -underhanded 32 -marginalizing 32 -melania 32 -minion 32 -holocene 32 -anzang 32 -batistuta 32 -ribosomes 32 -rozhetskin 32 -horseshoes 32 -makdissi 32 -eppie 32 -smika 32 -stabilizer 32 -crucify 32 -optimally 32 -taxpaying 32 -betar 32 -tailgaters 32 -un-british 32 -almas 32 -moana 32 -hobnobbed 32 -world-changing 32 -voelker 32 -pjs 32 -mispronounced 32 -manaslu 32 -coolmore 32 -award-nominated 32 -yakovlev 32 -krasnaya 32 -lyubov 32 -9,700 32 -hutcheon 32 -semesters 32 -longrich 32 -sihanoukville 32 -zofia 32 -essex-based 32 -kctv5 32 -2.46 32 -iyer 32 -litton 32 -thievery 32 -814 32 -yelland 32 -smallholder 32 -libertines 32 -emancipated 32 -tolo 32 -chote 32 -mcnutt 32 -blackfoot 32 -prosthetist 32 -ronstadt 32 -marlee 32 -scorpio 32 -sunstroke 32 -transracial 32 -worksheet 32 -toxicological 32 -croston 32 -kotaku 32 -most-searched 32 -soundbite 32 -25-year-olds 32 -neonicotinoids 32 -goose-stepping 32 -unmolested 32 -tonics 32 -alchemist 32 -delaet 32 -self-monitoring 32 -boudreaux 32 -rantings 32 -napped 32 -cannonballs 32 -learmount 32 -zubikarai 32 -13,700 32 -mendota 32 -couchman 32 -snafus 32 -rickmansworth 32 -kittinger 32 -wakeling 32 -under-the-radar 32 -tadeusz 32 -joya 32 -bufford 32 -bestowing 32 -debriefed 32 -leszek 32 -non-venomous 32 -roping 32 -virginal 32 -buddhas 32 -freckleton 32 -see-saw 32 -rule-making 32 -persevering 32 -15-30 32 -carn 32 -unhealthily 32 -multistory 32 -snowbank 32 -batson 32 -repetitions 32 -hoshyar 32 -frederica 32 -opryland 32 -miakienko 32 -promontory 32 -k.c. 32 -heitkamp 32 -panizza 32 -jetskis 32 -grafter 32 -dejonge 32 -sasse 32 -last-second 32 -mol 32 -japanese-americans 32 -talon 32 -santamarta 32 -ganguly 32 -ary 32 -en-suites 32 -cordoned-off 32 -anysha 32 -counter-sued 32 -ryokan 32 -weatherby 32 -perroncel 32 -100mg 32 -pre-sentencing 32 -dcms 32 -489 32 -niclas 32 -invoiced 32 -expos 32 -evgeniy 32 -image-sharing 32 -lewsey 32 -car-sized 32 -1996-97 32 -haiku 32 -snooped 32 -longhouse 32 -1.86 32 -barths 32 -stillaguamish 32 -linah 32 -hulton 32 -raymundo 32 -deers 32 -olufsen 32 -eastlake 32 -durantez 32 -soley 32 -lubet 32 -cadywould 32 -non-denominational 32 -inhabitable 32 -kells 32 -antar 32 -customising 32 -fast-changing 32 -kamens 32 -preeti 32 -nation-state 32 -steinhauer 32 -lakemba 32 -prachanda 32 -kikuyu 32 -china-u.s. 32 -e.l 32 -disproves 32 -boggy 32 -harmoniously 32 -simister 32 -sabsabi 32 -impulsivity 32 -egrets 32 -warton 32 -ageist 32 -crosson 32 -defreitas 32 -asis 32 -baf 32 -jürgen 32 -peak-time 32 -mixed-breed 32 -anyplace 32 -oblique 32 -angostura 32 -gilfoyle 32 -trueman 32 -hemi 32 -mafraq 32 -choosy 32 -prismatic 32 -atorvastatin 32 -melancholic 32 -weedy 32 -havelock 32 -orthodontist 32 -spyros 32 -rahin 32 -babic 32 -38ft 32 -provencal 32 -epeat 32 -beyoncà 32 -georesonance 32 -skelcher 32 -sheedy 32 -pre-taped 32 -poltergeist 32 -dowie 32 -human-induced 32 -stoneley 32 -33m 32 -jabulani 32 -bertolet 32 -al-zawiya 32 -ratti 32 -overindulging 32 -l4 32 -l5 32 -out-of-character 32 -rashly 32 -hadith 32 -awesomeness 32 -bellator 32 -under-served 32 -forma 32 -melly 32 -shtick 32 -school-educated 32 -oona 32 -allofs 32 -strayer 32 -simchuk 32 -102nd 32 -infielder 32 -tern 32 -creegan 32 -al-shifa 32 -elswick 32 -macbride 32 -767-300 32 -erevia 32 -sarcophagi 32 -calabrian 32 -koonin 32 -remo 32 -trainings 32 -ag2r 32 -casilla 32 -reynoso 32 -steinbach 32 -googly 32 -creekmore 32 -grammatically 32 -drewett 32 -plumps 32 -overwrought 32 -visualizing 32 -bryanna 32 -sooth 32 -sheyla 32 -peep-toe 32 -jeon 32 -sowders 32 -delroy 32 -pickaxes 32 -zaher 32 -tenancies 32 -33.9 32 -ordsall 32 -a/w 32 -mes 32 -self-deportation 32 -platoons 32 -effluent 32 -20-strong 32 -saryan 32 -wildebeests 32 -short-sleeve 32 -44.5 32 -well-crafted 32 -1666 32 -1660 32 -catskill 32 -morrone 32 -rotisserie 32 -a.b. 32 -stefania 32 -dudamel 32 -internationally-renowned 32 -scalloped 32 -adela 32 -bilk 32 -detoxifying 32 -karat 32 -illusive 32 -satirised 32 -arscott 32 -cromartie 32 -shurn 32 -29-15 32 -mayim 32 -vignette 32 -yearley 32 -insp. 32 -hounslea 32 -fold-up 32 -salcido 32 -ryong 32 -warburtons 32 -treebhoowoon 32 -b.j. 32 -tareena 32 -fazer 32 -vandalize 32 -retinue 32 -handovers 32 -ruthin 32 -geez 32 -midori 32 -crazily 32 -howcast 32 -1783 32 -recantation 32 -hengl 32 -:0 32 -kleinrock 32 -#rip 32 -verruckt 32 -yakin 32 -naw 32 -ten-inch 32 -72m 32 -722 32 -armah 32 -neuchatel 32 -re-brand 32 -warringah 32 -spacewalkers 32 -suvir 32 -glossip 32 -7-10 32 -judelson 32 -jalapeño 32 -platts 32 -astrand 32 -917 32 -signups 32 -mothers2mothers 32 -buchholz 32 -micromanage 32 -lendon 32 -superstores 32 -lollar 32 -burkhard 32 -crudup 32 -musgrave 32 -not-so 32 -abdul-aziz 32 -counter-extremism 32 -syrups 32 -tollefsbol 32 -sinned 32 -etherton 32 -umanos 32 -honeys 32 -holstered 32 -irks 32 -42.7 32 -prospector 32 -eisen 32 -kimoto 32 -grigio 32 -kmsp 32 -scalping 32 -jl 32 -zikuski 32 -polzer 32 -stringed 32 -heslop 32 -kanon 32 -blitzing 32 -tondo 32 -longlist 32 -kahului 32 -mcquarrie 32 -postmistress 32 -flaxseed 32 -satterthwaite 32 -2.80 32 -yacine 32 -boatyard 32 -durkee 32 -pre-crash 32 -necn 32 -autocar 32 -criquette 32 -zilge 32 -antihydrogen 32 -hehir 32 -receivership 32 -burhan 32 -candidature 32 -ka-shing 32 -misleadingly 32 -ciftci 32 -metaphysical 32 -devalues 32 -ionian 32 -littler 32 -tokelau 32 -nigro 32 -xylophone 32 -moringa 32 -four-day-old 32 -restauranteur 32 -strafford 32 -tourney 32 -redland 32 -bathwater 32 -hermaphrodite 32 -kafunda 32 -someones 32 -hookups 32 -turano 32 -43m 32 -200-strong 32 -hause 32 -sooooo 32 -hi-fi 32 -wintering 32 -kubot 32 -squaw 32 -sudamericana 32 -koepcke 32 -56.5 32 -manucho 32 -xprize 32 -tie-dye 32 -lampe 32 -airboard 32 -prade 32 -fauria 32 -on-rushing 32 -zeeshan 32 -beirut-based 32 -sickle-cell 32 -gilmartin 32 -busse 32 -philander 32 -764 32 -fast-acting 32 -oborne 32 -cst-100 32 -co-led 32 -sky-diving 32 -keast 32 -ophthalmologists 32 -week-out 32 -exhortation 32 -30-somethings 32 -hotly-contested 32 -mairs 32 -plunger 32 -goard 32 -reverberates 32 -fuengirola 32 -boonen 32 -chowder 32 -piedras 32 -anakin 32 -lilu 32 -saira 32 -misinterpreting 32 -teasers 32 -1536 32 -manifestos 32 -stealer 32 -nine-game 32 -zaky 32 -oxilofrine 32 -alawi 32 -firmin 32 -orono 32 -shearon 32 -fouda 32 -vegalta 32 -libero 32 -12-acre 32 -indystar 32 -measurably 32 -puhar 32 -mames 32 -16-18 32 -ghafoor 32 -turn-of-the-century 32 -maraniss 32 -200billion 32 -quaking 32 -elsey 32 -eberhard 32 -dyersburg 32 -charis 32 -bogut 32 -49.50 32 -dislodging 32 -pushcart 32 -theses 32 -fanzine 32 -ouamouno 32 -sutton-in-ashfield 32 -arca 32 -knees-up 32 -wilbanks 32 -mammatus 32 -noc 32 -rewired 32 -spectrometry 32 -ten-foot 32 -unionism 32 -motion-capture 32 -jacquelin 32 -rookwood 32 -cat-like 32 -manorial 32 -28-foot 32 -co-ordinators 32 -mud-covered 32 -4.85 32 -discontented 32 -nguema 32 -escapist 32 -h3 32 -hl 32 -wallen 32 -cabramatta 32 -15-16 32 -khairul 32 -serviceable 32 -domestic-violence 32 -julliard 32 -much-heralded 32 -koss 32 -bended 32 -ill-will 32 -neuropsychologist 32 -scheffler 32 -antechamber 32 -zapruder 32 -crag 32 -ameerah 32 -co-parenting 32 -lubricate 32 -250kg 32 -besson 32 -dunce 32 -radin 32 -godparent 32 -jeer 32 -roadwork 32 -bayshore 32 -absorber 32 -brown-haired 32 -albone 32 -elysée 32 -tomioka 32 -tempts 32 -cricketer-turned-politician 32 -configure 32 -desplat 32 -timmermans 32 -ochocinco 32 -bunol 32 -susie-belle 32 -snores 32 -spendthrift 32 -solemnity 32 -drench 32 -laddish 32 -twinned 32 -forerunners 32 -telles 32 -burundian 32 -fourfourtwo 32 -stanza 32 -schirra 32 -demographically 32 -stansell 32 -pastoralists 32 -evangelistic 32 -england-based 32 -classicist 32 -tromso 32 -kaba 32 -fronds 32 -howlers 32 -woyjeck 32 -gl 32 -alliss 32 -boulahrouz 32 -sailings 32 -netherland 32 -13-mile 32 -right-side 32 -caserta 32 -826 32 -meshing 32 -yet-to-be 32 -janae 32 -supine 32 -gorbunova 32 -play.com 32 -uh-60 32 -burston 32 -underwritten 32 -delphi 32 -trapdoor 32 -pro-hunting 32 -lynsie 32 -ws 32 -albus 32 -phoenician 32 -chiou 32 -gà 32 -kirchhoff 32 -the_topspin 32 -ucsd 32 -destabilisation 32 -alaina 32 -value-added 32 -under-23 32 -italian-style 32 -darian 32 -post-communist 32 -winding-up 32 -dragana 32 -head-up 32 -alkadi 32 -pileups 32 -kovacevic 32 -bawden 32 -sotogrande 32 -nordmann 32 -beringia 32 -pegatron 32 -beckon 32 -vashi 32 -pix 32 -roye 32 -jagoba 32 -pocketbooks 32 -maddest 32 -powertrain 32 -triangulation 32 -ointments 32 -eighth-floor 32 -upal 32 -u19s 32 -erases 32 -al-zoubi 32 -parisa 32 -80g 32 -two-dozen 32 -tash 32 -offloads 32 -energy-rich 32 -green-lighted 32 -pumped-up 32 -telemarketers 32 -skinnygirl 32 -busta 32 -heidelbergensis 32 -homesteads 32 -ipp 32 -disagreeable 32 -paranthropus 32 -razr 32 -glumly 32 -agata 32 -discounter 32 -anti-vietnam 32 -ther 32 -antell 32 -u.s.-funded 32 -gimbel 32 -zebrafish 32 -soundscapes 32 -ferrera 32 -manliness 32 -quirkiest 32 -slow-speed 32 -rheumatic 32 -7.29 32 -eurythmics 32 -btec 32 -kristol 32 -pra 32 -pascrell 32 -sentra 32 -cloudier 32 -staccato 32 -2.38 32 -undimmed 32 -mnz 32 -ulric 32 -thapa 32 -malang 32 -margolin 32 -rs6 32 -overabundance 32 -hardwicke 32 -lampert 32 -buttes 32 -kinard 32 -peltz 32 -goiania 32 -abhishek 32 -busload 32 -belsen 32 -buchman 32 -catie 32 -tanfield 32 -masin 32 -guzzled 32 -arachnophobia 32 -munsters 32 -hahnemann 32 -dragan 32 -menounos 32 -confit 32 -hoblyn 32 -gwar 32 -2560 32 -cays 32 -honky 32 -weehawken 32 -similar-looking 32 -posies 32 -spr 32 -labour-supporting 32 -irn 32 -harsha 32 -dehumanized 32 -spielman 32 -uncommitted 32 -dagbladet 32 -neets 32 -nissa 32 -airlock 32 -youri 32 -chaoyang 32 -waber 32 -sereno 32 -ill-suited 32 -loop-the-loop 32 -2.16 32 -rsvp 32 -ky. 32 -wadebridge 32 -washingtonians 32 -4.05 32 -luskin 32 -granja 32 -priefer 32 -shante 32 -ex-lovers 32 -comodi 32 -hairdos 32 -e6 32 -aduba 32 -hezekiah 32 -dater 32 -sherone 32 -predispose 32 -″ 32 -hailstone 32 -amazons 32 -barish 32 -hollaback 32 -3-litre 32 -disc-shaped 32 -gongbay 32 -legwork 32 -farallon 32 -shuddered 32 -cruelest 32 -mizuno 32 -post-assad 32 -wic 32 -oldenburg 32 -jaitley 32 -drewitt-barlow 32 -generics 32 -vtech 32 -5.1-inch 32 -carolynne 32 -marika 32 -everingham 32 -13-point 32 -1.90 32 -jaimee 32 -juraboev 32 -battlefront 32 -wakefulness 32 -yimou 32 -cbre 32 -zwicker 32 -stigmatization 32 -war-style 32 -bigham 32 -burstin 32 -cringe-inducing 32 -urbanised 32 -career-defining 32 -buzzell 32 -muni 32 -egregiously 32 -white-owned 32 -25,500 32 -80-minute 32 -mcaleny 32 -estepona 32 -mouldings 32 -washingtonian 32 -aarti 32 -yugoslavian 32 -a.p. 32 -8-9 32 -wilfong 32 -hammadi 32 -deselected 32 -wildin 32 -towler 32 -posses 32 -tena 32 -rakuten 32 -ilha 32 -jaramillo 32 -ballrooms 32 -africa-born 32 -scalpers 32 -lafite 32 -shored 32 -entreaties 32 -1588 32 -recouping 32 -hilson 32 -reserve-team 32 -lunched 32 -wolfowitz 32 -wo2 32 -boparan 32 -bethea 32 -thick-skinned 32 -minala 32 -ornithologist 32 -55,573 32 -motlanthe 32 -foshan 32 -retrofit 32 -260million 32 -horrify 32 -khameneh 32 -comerford 32 -40,000-a-year 32 -mutants 32 -chaff 32 -cerit 32 -biton 32 -vickii 32 -fayre 32 -squirts 32 -sowerby 32 -durr 32 -anacondas 32 -lazo 32 -i-35 32 -riina 32 -wretch 32 -mother-son 32 -lebanese-born 32 -peral 32 -swickle 32 -diamanti 32 -anouk 32 -flag-raising 32 -kikuchi 32 -satellite-based 32 -mixtape 32 -shaniya 32 -neuwirth 32 -streatfeild 32 -rimac 32 -korphe 32 -geena 32 -justyna 32 -531 32 -intelligence-sharing 32 -courbet 32 -700-year-old 32 -romneycare 32 -corroborates 32 -pere 32 -taiwo 32 -patridge 32 -kuzmin 32 -typewritten 32 -jeronimo 32 -diadem 32 -swissport 32 -esfandiari 32 -ladysmith 32 -zeiss 32 -warbler 32 -reger 32 -15-strong 32 -lagardere 32 -popularizing 32 -leonov 32 -kleeman 32 -inclusivity 32 -miglia 32 -carmax 32 -magnier 32 -pretences 32 -annus 32 -overstuffed 32 -moshtarak 32 -huggies 32 -50-meter 32 -alpeyrie 32 -mambazo 32 -b-eat 32 -thanasi 32 -etosha 32 -titian 32 -leiweke 32 -sundar 32 -1452 32 -used-car 32 -bafflement 32 -loanees 32 -jerseyans 32 -hertling 32 -cinatl 32 -s1 32 -beemer 32 -vawter 32 -maybury 32 -lhuillier 32 -tadcaster 32 -anti-tobacco 32 -malamute 32 -evo-stik 32 -decommission 32 -hodskins 32 -better-than-expected 32 -jori 32 -@rupertmurdoch 32 -bianconeri 32 -zidan 32 -hamas-run 32 -esmeralda 32 -westwards 32 -naida 32 -polaroids 32 -nahmad 32 -woolhouse 32 -macrophages 32 -sadow 32 -disenfranchisement 32 -wcs 32 -milanese 32 -holebas 32 -housh 32 -thatcherism 32 -orloff 32 -iligan 32 -plantlife 32 -241,000 32 -plopped 32 -ssp 32 -pingpong 32 -3.00 32 -aref 32 -11p 32 -farazmand 32 -amusements 32 -mongrels 32 -hanham 32 -fonterra 32 -masham 32 -mizoram 32 -paunch 32 -meeker 32 -end-of-summer 32 -mom-of-two 32 -segura 32 -frederique 32 -19-hour 32 -sannino 32 -pegman 32 -sloshed 32 -fom 32 -djedje 32 -silla 32 -arpad 32 -soule 32 -negron 32 -tsouli 32 -odd-looking 32 -nuremburg 32 -pharma-quick 32 -badham 32 -bulli 32 -ukrainian-born 32 -sainsburys 32 -invulnerable 32 -1744 32 -italo 32 -horticulturalist 32 -dorr 32 -morial 32 -bouckaert 32 -piran 32 -bagshaw 32 -25-mile 32 -shehadeh 32 -clownfish 32 -whacky 32 -dor 32 -alterman 32 -bedspread 32 -villans 32 -perriello 32 -bergh 32 -popo 32 -d-louisiana 32 -impish 32 -30-years-old 32 -growcoot 32 -prescription-only 32 -wect 32 -experimentally 32 -kuwaitis 32 -fox59 32 -sheindlin 32 -blas 32 -hollands 32 -refilling 32 -dorling 32 -optimising 32 -somatoform 32 -dtm 32 -utz 32 -vanna 32 -4-3-2-1 32 -hypertrichosis 32 -68million 32 -sanjeev 32 -goldieblox 32 -lilla 32 -chainz 32 -boggling 32 -cleggs 32 -cova 32 -hauer 32 -p.i. 32 -300-400 32 -buttercups 31 -waterboard 31 -multi-sensory 31 -pre-dinner 31 -then-deputy 31 -smartwater 31 -brahim 31 -populating 31 -thesaurus 31 -kessy 31 -omelet 31 -khalida 31 -keynsham 31 -glendon 31 -amata 31 -inscrutable 31 -wentworth-stanley 31 -post-presidential 31 -army-navy 31 -bellinger 31 -aseem 31 -subpar 31 -tonalist 31 -coster-waldau 31 -giverny 31 -counceller 31 -hasi 31 -petticoat 31 -priddis 31 -tskhinvali 31 -abete 31 -orval 31 -afghanis 31 -5:40 31 -snoods 31 -lefroy 31 -pequeno 31 -eight-person 31 -dearington 31 -dakhil 31 -pawning 31 -proffered 31 -todenhoefer 31 -off-target 31 -city-dwellers 31 -seasick 31 -al-bayda 31 -elham 31 -sterne 31 -marula 31 -lokpal 31 -'20s 31 -hogwash 31 -intouch 31 -gujarati 31 -01:03 31 -01:09 31 -kuan 31 -heeney 31 -owasso 31 -sea-ice 31 -dukinfield 31 -alberici 31 -akio 31 -nesbit 31 -stuf 31 -roof-top 31 -dambuster 31 -farren 31 -77f 31 -fall-back 31 -rehashing 31 -wyly 31 -delegating 31 -heatherton 31 -hairstylists 31 -disuse 31 -pym 31 -cispa 31 -orszag 31 -waistbands 31 -gamekeepers 31 -tizzard 31 -delauro 31 -11ins 31 -sixth-place 31 -4:25 31 -contemporaneous 31 -bowditch 31 -delong 31 -intercession 31 -hoshide 31 -689 31 -delaughter 31 -menuhin 31 -rayman 31 -tatar 31 -1.54 31 -10,300 31 -18.99 31 -flav 31 -quokkas 31 -vogue.com 31 -thuy 31 -glenarthur 31 -vara 31 -01:27 31 -wulf 31 -brutes 31 -855 31 -852 31 -super-charged 31 -fool-proof 31 -boisei 31 -ponsford 31 -letter-writing 31 -birtles 31 -breeches 31 -sheiks 31 -discouragement 31 -surfside 31 -hicheur 31 -ramones 31 -seung 31 -desuze 31 -whant 31 -wood-fired 31 -rowboat 31 -brunger 31 -megatron 31 -over-confident 31 -makinson 31 -cranmer 31 -dushevina 31 -sinan 31 -roki 31 -4-mi 31 -landrum 31 -withnail 31 -steatoda 31 -9-9-9 31 -gucht 31 -63m 31 -harpy 31 -ilse 31 -gaborone 31 -heinkel 31 -mosely 31 -starla 31 -katelynn 31 -micrograph 31 -reddit.com 31 -al-nafjan 31 -meggings 31 -stacker 31 -ninth-grader 31 -nouha 31 -paperweight 31 -de-extinction 31 -kraska 31 -28.9 31 -high-spirited 31 -either/or 31 -rafati 31 -belzer 31 -miyake 31 -sailsman 31 -dagan 31 -panettone 31 -mauer 31 -nearside 31 -dupnik 31 -succumbs 31 -test-fire 31 -leonards-on-sea 31 -1,000-plus 31 -maadi 31 -destabilised 31 -northerner 31 -pakistani-born 31 -refreshes 31 -rispler 31 -anti-choice 31 -broxbourne 31 -interloper 31 -lasha 31 -oliwier 31 -hoagland 31 -non-aligned 31 -fatalistic 31 -idolize 31 -disperses 31 -adonia 31 -visakhapatnam 31 -koresh 31 -ever-popular 31 -lifelines 31 -benincasa 31 -al-kuwaiti 31 -liesheng 31 -exaro 31 -1987a 31 -plimsolls 31 -hahahaha 31 -filmer 31 -alienates 31 -callis 31 -jagerbombs 31 -underachieving 31 -dubbo 31 -ef-4 31 -mccullagh 31 -decelerate 31 -mid-sentence 31 -130th 31 -plake 31 -gemalto 31 -dyncorp 31 -debunks 31 -lapshyn 31 -spools 31 -majoras 31 -cooed 31 -bottega 31 -non-football 31 -imore 31 -edwarda 31 -hardest-working 31 -moonen 31 -merci 31 -cleburne 31 -zanzi 31 -al-daher 31 -motorcades 31 -poplawski 31 -penne 31 -thynn 31 -zadroga 31 -10.00 31 -three-room 31 -arromanches 31 -hagens 31 -maden 31 -hermits 31 -threateningly 31 -rohrer 31 -faw 31 -fae 31 -300-page 31 -pooped 31 -tehreek-e-taliban 31 -indicts 31 -allemand 31 -bit.ly 31 -frenemies 31 -memri 31 -heyerdahl 31 -fact-based 31 -banwell 31 -arnason 31 -cronan 31 -lenon 31 -bruner 31 -redditors 31 -robbi 31 -taptic 31 -liveatc.net 31 -tiona 31 -8kg 31 -lastpass 31 -peretti 31 -findmypast.co.uk 31 -inheritances 31 -mostar 31 -vaio 31 -hijras 31 -peepers 31 -5,853 31 -andree 31 -eco-systems 31 -ackman 31 -tansy 31 -willott 31 -tick-borne 31 -targaryen 31 -669 31 -vegas-style 31 -essences 31 -astin 31 -guthmiller 31 -petkov 31 -warby 31 -qaeda-backed 31 -ghattas 31 -soupy 31 -rmc 31 -primus 31 -landscapers 31 -bandy 31 -normington 31 -gru 31 -indescribably 31 -wjw 31 -toulson 31 -slatten 31 -tatu 31 -z06 31 -yuka 31 -coloration 31 -turkington 31 -paletta 31 -statesmanlike 31 -gaucho 31 -daykin 31 -kats 31 -midriffs 31 -diags 31 -haarde 31 -consultancies 31 -mahopac 31 -balian 31 -git 31 -cillian 31 -saffioti 31 -landslips 31 -toilette 31 -50.1 31 -dhabi-based 31 -wrappings 31 -lifeway 31 -tanja 31 -non-defense 31 -second-minute 31 -bagosora 31 -lie-flat 31 -darriean 31 -brann 31 -sapozhnikov 31 -obliteration 31 -castigating 31 -megabytes 31 -rumpus 31 -creak 31 -off-spin 31 -ofa 31 -taffy 31 -00:50 31 -off-loaded 31 -boc 31 -re-tweeting 31 -482 31 -chisels 31 -sepulchre 31 -flit 31 -hemoglobin 31 -ashburton 31 -ashen-faced 31 -outstandingly 31 -nitride 31 -exponents 31 -ketner 31 -servetas 31 -molo 31 -pervaiz 31 -fainthearted 31 -driskill 31 -firkins 31 -nijjer 31 -tinny 31 -cron 31 -27.8 31 -chrisman 31 -feuded 31 -thayer 31 -carolinians 31 -1766 31 -rooneys 31 -parvovirus 31 -opentable 31 -undecideds 31 -oompa 31 -0.11 31 -picower 31 -ftse100 31 -prurient 31 -115-year-old 31 -kalay 31 -20-15 31 -birth-control 31 -pupa 31 -pedaled 31 -tejma 31 -alabaster 31 -218,000 31 -spearmint 31 -pdp 31 -1.48 31 -curbelo 31 -majority-muslim 31 -lasered 31 -philduncanf1 31 -621 31 -626 31 -1,000-acre 31 -sixpence 31 -1,180 31 -12.00 31 -22lbs 31 -marwa 31 -brutsche 31 -demirel 31 -suey 31 -wholefood 31 -fedoras 31 -zappa 31 -iron-fisted 31 -masdar 31 -barbershops 31 -shackerley 31 -joffe 31 -ji-hyun 31 -collegial 31 -inundate 31 -45-0 31 -bloodier 31 -phoenicians 31 -illegitimately 31 -safaei 31 -800-meter 31 -modlesky 31 -kehazaei 31 -counterclaims 31 -0.37 31 -essie 31 -one-twos 31 -terracini 31 -tupou 31 -a-z 31 -qatari-owned 31 -puro 31 -hypotheticals 31 -said.the 31 -chessie 31 -fieger 31 -wis. 31 -wisn 31 -strafing 31 -anp 31 -tonys 31 -refinements 31 -third-ranking 31 -costanzo 31 -bajaur 31 -63.5 31 -hotwire 31 -bano 31 -druitt 31 -suspender 31 -laboriously 31 -egmont 31 -7:25 31 -hypochondriac 31 -wonton 31 -rayyan 31 -canongate 31 -gettin 31 -mayka 31 -cottoned 31 -falzon 31 -willan 31 -rookery 31 -tripods 31 -temptress 31 -pirker 31 -emporio 31 -re-learning 31 -bowd 31 -non-competitive 31 -seven-story 31 -zeno 31 -50-something 31 -marazion 31 -sexual-assault 31 -liberators 31 -80/20 31 -gracia 31 -ex-pupil 31 -cally-jo 31 -parakeet 31 -teigan 31 -diveroli 31 -traum 31 -kamau 31 -care-free 31 -16-ounce 31 -chae 31 -pidgin 31 -buffering 31 -977 31 -kidsgrove 31 -xcom 31 -under-19s 31 -overpaying 31 -thongchai 31 -lendal 31 -kazran 31 -shad 31 -avni 31 -saharkhiz 31 -okhotsk 31 -abagnale 31 -103-year-old 31 -copilot 31 -saanvi 31 -@waynerooney 31 -daylesford 31 -kk 31 -itsy 31 -leylandii 31 -rousteing 31 -treo 31 -retooled 31 -audiologist 31 -torrentfreak 31 -plod 31 -cuticle 31 -mitrice 31 -callisto 31 -magdaleno 31 -kumamoto 31 -breault 31 -#sotu 31 -roxburgh 31 -dwelled 31 -fazel 31 -lorenzi 31 -sevier 31 -costings 31 -gat 31 -araya 31 -cullinane 31 -yani 31 -fugelsang 31 -cuban-born 31 -6.95 31 -york-bound 31 -hallowe'en 31 -md-80 31 -osamu 31 -tunica 31 -bayard 31 -anding 31 -gumm 31 -mireille 31 -mcparland 31 -droll 31 -grexit 31 -swedish-born 31 -doe-eyed 31 -quilting 31 -mwangura 31 -suraya 31 -kemper 31 -walney 31 -hallucinated 31 -data-mining 31 -mekhissi-benabbad 31 -jerod 31 -crossbenchers 31 -magnolias 31 -cume 31 -1,129 31 -awang 31 -antrobus 31 -arqiva 31 -al-yami 31 -knbc 31 -sweet-toothed 31 -scurr 31 -nicolo 31 -al-deen 31 -lillard 31 -laidler 31 -dikembe 31 -15-acre 31 -muskets 31 -bathgate 31 -masum 31 -mesereau 31 -g&t 31 -shipton 31 -slacking 31 -15-time 31 -cantina 31 -doings 31 -thibodeaux 31 -op-eds 31 -weisman 31 -globules 31 -hellcat 31 -malawians 31 -freemium 31 -saskatoon 31 -corsair 31 -speakman 31 -fomented 31 -espace 31 -00:57 31 -amorth 31 -campinas 31 -must-do 31 -cavallo 31 -nobly 31 -millipedes 31 -williamsport 31 -704 31 -701 31 -kingsdown 31 -chukotka 31 -castes 31 -tollner 31 -unchartered 31 -gelber 31 -apl 31 -consiglio 31 -modiano 31 -siciliano 31 -harping 31 -al-mauretani 31 -distill 31 -clemo 31 -medea 31 -jaynes 31 -bruma 31 -16.50 31 -mercurio 31 -poppet 31 -malmanche 31 -bagpipers 31 -bunning 31 -550million 31 -rifkin 31 -tremseh 31 -colindale 31 -lody 31 -hellawell 31 -faraji 31 -crack-cocaine 31 -rezai 31 -holtom 31 -lembongan 31 -abend 31 -ekland 31 -enhancers 31 -ogston 31 -rapide 31 -topps 31 -sexpert 31 -loaders 31 -web-enabled 31 -jeyapaul 31 -inveterate 31 -makepeace 31 -trouliotis 31 -dishonour 31 -okenka 31 -32-foot 31 -feelunique.com 31 -internalized 31 -spoelstra 31 -contusion 31 -exomars 31 -yersinia 31 -krypton 31 -government-mandated 31 -anti-illegal 31 -michy 31 -byer 31 -quebecois 31 -panhard 31 -two-wheel 31 -ex-employees 31 -schuringa 31 -retrofitting 31 -arlanda 31 -disillusion 31 -1,870 31 -cinemagoers 31 -long-acting 31 -vincenti 31 -hansson 31 -pavillion 31 -stumpy 31 -39.7 31 -aisleyne 31 -matrixyl 31 -co-pay 31 -sa-7 31 -microelectronics 31 -distended 31 -synthesised 31 -crispi 31 -abdul-haq 31 -madagascan 31 -asinine 31 -ardrossan 31 -fasts 31 -yoel 31 -salk 31 -iconia 31 -trapattoni 31 -healthbook 31 -ct. 31 -englaro 31 -sameh 31 -schoolkids 31 -cheznye 31 -raub 31 -junkets 31 -hoots 31 -zaidi 31 -r-illinois 31 -spiel 31 -xoxo 31 -teatro 31 -tancock 31 -kokontis 31 -jaydon 31 -hussainy 31 -microfinance 31 -nuria 31 -sews 31 -geosynchronous 31 -low-rise 31 -2011-2013 31 -silvano 31 -hypothalamus 31 -upper-middle 31 -bowker 31 -74million 31 -camblos 31 -mcclintic 31 -goldblatt 31 -over-40s 31 -antakya 31 -glamorised 31 -margaery 31 -shootdown 31 -744 31 -blackspots 31 -modigliani 31 -lydney 31 -gorgon 31 -jac 31 -anselmo 31 -ticos 31 -worst-dressed 31 -extraneous 31 -highly-publicized 31 -almahri 31 -disquieting 31 -elks 31 -ex-boxer 31 -ver 31 -benwell 31 -grabowski 31 -middle-east 31 -desousa 31 -badzak 31 -wacker 31 -robed 31 -overshare 31 -blakeney 31 -34.9 31 -charlemagne 31 -chickpea 31 -brussels-based 31 -self-consciousness 31 -planing 31 -bewitching 31 -shiroki 31 -sundress 31 -pendlebury 31 -vnukovo 31 -vivisection 31 -trick-or-treat 31 -good-humoured 31 -red-meat 31 -sloat 31 -christiano 31 -liquidmetal 31 -titillation 31 -naden 31 -drooped 31 -125g 31 -pretensions 31 -british-owned 31 -shebab 31 -stoni 31 -manigat 31 -hmpo 31 -corporals 31 -sabahi 31 -cross-platform 31 -talcum 31 -cooperman 31 -verbessem 31 -ivry 31 -culberson 31 -jills 31 -consonants 31 -pajero 31 -french-made 31 -7.65 31 -hammer-wielding 31 -danang 31 -cream-coloured 31 -pro-putin 31 -royces 31 -retest 31 -fixed-odds 31 -hagrid 31 -arles 31 -boivin 31 -hau 31 -perfumer 31 -gyrus 31 -02:30 31 -02:32 31 -zarifi 31 -vapourised 31 -crocodile-infested 31 -lahiri 31 -triessl 31 -macfarlane-barrow 31 -13-foot 31 -pruett 31 -connaughton 31 -d-wave 31 -sino-japanese 31 -prieta 31 -8-4 31 -lell 31 -.308 31 -philipps 31 -gfa 31 -land-locked 31 -aleksey 31 -plasterwork 31 -crackpot 31 -antunes 31 -1,240 31 -isu 31 -dusautoir 31 -pro-anorexia 31 -fernandez-castano 31 -chelsie 31 -piet 31 -ministership 31 -andreja 31 -farfetched 31 -back-to-basics 31 -petrella 31 -829 31 -cincinatti 31 -lisbeth 31 -mandibles 31 -patt 31 -week-and-a-half 31 -windblown 31 -buller 31 -emmonds 31 -p.config.width 31 -quinten 31 -perseus 31 -injury-ravaged 31 -wb 31 -chios 31 -exponent 31 -deutschland 31 -drug-addled 31 -titusville 31 -zakopalova 31 -luzhniki 31 -corriann 31 -stebic 31 -lochner 31 -cupertino-based 31 -berchtesgaden 31 -gazan 31 -kahler 31 -conversed 31 -ducky 31 -15-mile 31 -shipstone 31 -costuming 31 -c-max 31 -disley 31 -suture 31 -s-class 31 -spouts 31 -shellard 31 -salvesen 31 -lusting 31 -o'fallon 31 -crosley 31 -morillo 31 -matchy 31 -pil 31 -hipkiss 31 -tria 31 -kabylie 31 -nekounam 31 -alister 31 -al-ain 31 -balsam 31 -tvn 31 -ingber 31 -croutons 31 -liaquat 31 -roesler 31 -pyfrom 31 -turn-around 31 -tolley 31 -srivaddhanaprabha 31 -klug 31 -hailwood 31 -intentioned 31 -bataar 31 -strong-minded 31 -nicoll 31 -14,700 31 -stopovers 31 -mckie 31 -heikki 31 -exotics 31 -abdurabu 31 -boohoo 31 -minimises 31 -puking 31 -taxidermists 31 -uhd 31 -rubbish-strewn 31 -petrovich 31 -non-nuclear 31 -cse 31 -tanis 31 -canales-gomez 31 -thorens 31 -redick 31 -contentedly 31 -unwto 31 -hincapie 31 -ruhter 31 -swoboda 31 -tannery 31 -prefaced 31 -hollywood.com 31 -abcnews 31 -10000 31 -murtagh 31 -missier 31 -twitchell 31 -underwire 31 -laziest 31 -mark-viverito 31 -hoodwink 31 -saputra 31 -ercolino 31 -christel 31 -golf.com 31 -petplan 31 -yoovidhya 31 -lipgloss 31 -mysore 31 -laneway 31 -monifa 31 -heracleion 31 -ramy 31 -ainu 31 -72.5 31 -siddall 31 -pickston 31 -antibes 31 -cackle 31 -evana 31 -kabaddi 31 -reddin 31 -dyspraxia 31 -clerc 31 -tigre 31 -5,250 31 -headings 31 -moviegoing 31 -cayo 31 -turnage 31 -bricklaying 31 -scaffolds 31 -bunkerville 31 -irby 31 -eltahawy 31 -thornaby 31 -search-engine 31 -retro-style 31 -wenman 31 -khans 31 -brek 31 -lindon 31 -jewish-owned 31 -motion-sensing 31 -gilliver 31 -cambell 31 -card-carrying 31 -fasted 31 -kya 31 -rancour 31 -camperdown 31 -pahomova 31 -relational 31 -docu-series 31 -médecins 31 -investigational 31 -pettersson 31 -wojtyla 31 -tough-tackling 31 -parslow 31 -rockdale 31 -visualisations 31 -interbrand 31 -secc 31 -deliverable 31 -elegans 31 -nuwan 31 -lapointe 31 -thorogood 31 -zero-emission 31 -sibary 31 -spritely 31 -martzen 31 -eyeful 31 -heiko 31 -mawgan 31 -villers-farrow 31 -nyiragongo 31 -minifigures 31 -wepner 31 -contessa 31 -jentsch 31 -millimeter/submillimeter 31 -35c 31 -milinkovic 31 -goodlad 31 -37.3 31 -maxing 31 -mbas 31 -ticehurst 31 -sociopaths 31 -choos 31 -c130 31 -sheepshead 31 -re-using 31 -hofer 31 -weatherley 31 -ossetian 31 -discerned 31 -foresters 31 -inventiveness 31 -tribalism 31 -liquidators 31 -sprayer 31 -lockner 31 -bolten 31 -zarein 31 -vova 31 -aposhian 31 -tibbetts 31 -keltner 31 -alyona 31 -ugur 31 -rock-hard 31 -fairpo 31 -341st 31 -chauvinist 31 -ulm 31 -ula 31 -munk 31 -muna 31 -firechat 31 -stewarding 31 -18-point 31 -aoun 31 -gentrifying 31 -mid-sixties 31 -prophylaxis 31 -vervet 31 -6:25 31 -murayama 31 -rb 31 -westword 31 -sea-surface 31 -compassionately 31 -51m 31 -sickie 31 -popova 31 -tiktaalik 31 -matchless 31 -hundred-foot 31 -woodring 31 -anti-north 31 -wranglings 31 -hippocrates 31 -wrestlemania 31 -three-tiered 31 -wetzel 31 -kesh 31 -amarjit 31 -584 31 -fritter 31 -talanova 31 -jetstream 31 -1580 31 -heckle 31 -oymyakon 31 -tottering 31 -scheck 31 -bawled 31 -woz 31 -bequests 31 -liverpoolfc.com 31 -dep 31 -zeit 31 -saez 31 -lochhead 31 -tsukiji 31 -45.8 31 -15g 31 -ullswater 31 -t4 31 -contrails 31 -seismological 31 -almeda 31 -top-two 31 -belviq 31 -crothers 31 -nicotero 31 -misstatement 31 -a330-200 31 -videoconferencing 31 -sproles 31 -68.5 31 -anise 31 -muggles 31 -costilla 31 -invitees 31 -filey 31 -originator 31 -smits 31 -seasickness 31 -teleport 31 -canâ 31 -never-before-heard 31 -960,000 31 -brumback 31 -catadores 31 -killam 31 -66.7 31 -laybourn 31 -okc 31 -acolyte 31 -wastefulness 31 -nastia 31 -palillo 31 -suet 31 -nitty-gritty 31 -cfl 31 -florio 31 -popularise 31 -spirulina 31 -super-human 31 -2k12 31 -mager 31 -nowra 31 -guv 31 -leya 31 -bochy 31 -robohand 31 -langur 31 -well-paying 31 -kranz 31 -re-interviewed 31 -p.config.height 31 -1809 31 -coal-mining 31 -cased 31 -2,350 31 -transactional 31 -onoda 31 -tapsell 31 -ojai 31 -teagle 31 -ashtiaq 31 -swidorsky 31 -elaborates 31 -platell 31 -teriyaki 31 -naposki 31 -must-pass 31 -goonies 31 -rockport 31 -luoyang 31 -disdainful 31 -marlie 31 -somethin' 31 -salvos 31 -shakey 31 -goodeve-docker 31 -165million 31 -stanziano 31 -screeches 31 -loasby 31 -porthole 31 -borgye 31 -nemphos 31 -griffith-jones 31 -wangari 31 -dreyer 31 -901 31 -diosdado 31 -fiendishly 31 -oonagh 31 -seaward 31 -unmasking 31 -biffa 31 -fromage 31 -polyana 31 -balde 31 -adelina 31 -genoese 31 -poppe 31 -wisden 31 -ratzenberger 31 -zanab 31 -levina 31 -kuridrani 31 -dunkels 31 -hannelore 31 -league-leading 31 -wayt 31 -branko 31 -marie-chantal 31 -roseanna 31 -abduwali 31 -gfk 31 -look-in 31 -sixty-seven 31 -heretical 31 -cakewalk 31 -29.6 31 -utilises 31 -superintelligence 31 -thymus 31 -hued 31 -siegenthaler 31 -p.config 31 -earthenware 31 -ex-mp 31 -pom-poms 31 -segue 31 -49.5 31 -731 31 -tillett 31 -franklyn 31 -phonetically 31 -nebuchadnezzar 31 -frankston 31 -baraka 31 -reminisces 31 -oswaldtwistle 31 -dervite 31 -calero 31 -wg 31 -encke 31 -servicewoman 31 -trabant 31 -transposed 31 -mazar 31 -ifthekar 31 -timidity 31 -leitte 31 -digoxin 31 -elum 31 -bresciano 31 -kupchak 31 -jawaharlal 31 -doria 31 -rubble-strewn 31 -zabihullah 31 -commoners 31 -bursar 31 -defers 31 -udine 31 -hypothesised 31 -barykina 31 -reforestation 31 -manssor 31 -poundworld 31 -maksym 31 -bolton-born 31 -schwyzer 31 -nanyuki 31 -tibor 31 -espada 31 -hair-hanging 31 -kaag 31 -taboada 31 -disease-causing 31 -lacombe 31 -tannins 31 -manitou 31 -freniere 31 -vallee 31 -98ft 31 -lipnitskaia 31 -half-billion 30 -upsides 30 -videoid 30 -rouwhorst 30 -peppy 30 -antagonized 30 -fauzi 30 -593 30 -597 30 -alloush 30 -sleuthing 30 -hélène 30 -180-day 30 -anti-latino 30 -granbury 30 -rood 30 -kitzmiller 30 -pro-nazi 30 -bow-tie 30 -cellulitis 30 -reconvicted 30 -even-handed 30 -unrefined 30 -re-joining 30 -bolt-hole 30 -assimilating 30 -c64 30 -ingatestone 30 -lounged 30 -hamdy 30 -storks 30 -dzong 30 -1,230 30 -close-by 30 -talke 30 -rx 30 -everage 30 -potholed 30 -courtly 30 -rappard 30 -43f 30 -pickwick 30 -haglund 30 -prozone 30 -avinash 30 -chandlers 30 -yide 30 -mendonca 30 -saayman 30 -nine-to-five 30 -multi-media 30 -al-din 30 -cayden 30 -harvie 30 -arbitrators 30 -anderegg 30 -full-court 30 -burbage 30 -praet 30 -staved 30 -dudi 30 -grouchy 30 -sault 30 -janne 30 -2017/18 30 -haugh 30 -abbotsford 30 -pygmies 30 -batam 30 -duron 30 -stonings 30 -55ft 30 -klu 30 -chillin 30 -evry 30 -wabash 30 -beeley 30 -long-tailed 30 -clanger 30 -2:10 30 -davidsen 30 -sweety 30 -cellucci 30 -qandil 30 -sevigny 30 -rankine 30 -fethiye 30 -nazer 30 -barnstable 30 -osmary 30 -dullest 30 -blare 30 -wishbone 30 -cingulate 30 -countertop 30 -trawls 30 -2016-19 30 -frito 30 -rucks 30 -jah 30 -allott 30 -wagering 30 -detaches 30 -british-led 30 -kopf 30 -unsmoked 30 -saxony-anhalt 30 -masry 30 -szarek 30 -65.7 30 -intra-party 30 -1,215 30 -heptonstall 30 -rousan 30 -gajdosova 30 -hyper-partisanship 30 -neuropsychiatric 30 -scoundrels 30 -adryan 30 -gruffalo 30 -coulston 30 -biryukova 30 -salgueiro 30 -longmire 30 -iron-clad 30 -unsexy 30 -fassett 30 -ecuadorians 30 -highest-scoring 30 -privatizing 30 -p.loadvideoexpressv3 30 -once-secret 30 -unmoving 30 -kadare 30 -20in 30 -70-year 30 -zamzam 30 -legault 30 -uninvolved 30 -21:12 30 -21:16 30 -globalised 30 -cancer-related 30 -moats 30 -75f 30 -underboss 30 -feg 30 -pre-charge 30 -nitrates 30 -fishnets 30 -7.95 30 -refile 30 -brannstrom 30 -amboseli 30 -bantam 30 -lucidity 30 -higher-resolution 30 -christ-like 30 -kvesic 30 -walsgrave 30 -mbokani 30 -robinsons 30 -racquets 30 -karthik 30 -pelke 30 -two-second 30 -ergo 30 -similiar 30 -two-years 30 -brickman 30 -pampas 30 -thibodeau 30 -sais 30 -tyrer 30 -delisle 30 -bil 30 -22:03 30 -22:00 30 -unavailability 30 -preyen 30 -gopperth 30 -wilnelia 30 -suzann 30 -rahall 30 -832 30 -190million 30 -tonk 30 -mushrooming 30 -compean 30 -bushels 30 -groper 30 -ivens 30 -visualising 30 -limbic 30 -shandy 30 -york-born 30 -embarkation 30 -desensitised 30 -ishak 30 -six-nation 30 -14-page 30 -non-users 30 -shortlived 30 -knockers 30 -six-seater 30 -tonnage 30 -livened 30 -logbooks 30 -tanna 30 -pratibha 30 -villers 30 -chit-chat 30 -coan 30 -haemangioma 30 -phthalate 30 -nightwatchman 30 -tarraf 30 -polluter 30 -todner 30 -potshots 30 -budgies 30 -calamari 30 -leeuw 30 -dosed 30 -0.50 30 -9-to-5 30 -thulani 30 -eia 30 -00:19 30 -minstrels 30 -chantix 30 -villano 30 -vehemence 30 -mcglinchey 30 -symphorien 30 -relocations 30 -three-line 30 -laferrari 30 -shedd 30 -agbeko 30 -llewelyn 30 -bootsma 30 -mountstevens 30 -diskin 30 -r.k. 30 -griffiss 30 -webby 30 -mohd 30 -slugfest 30 -wantage 30 -neophyte 30 -plews 30 -14-mile 30 -third-set 30 -single-shot 30 -orrey 30 -wealth-x 30 -purfleet 30 -davita 30 -chc 30 -nason 30 -jaffrey 30 -nonpolitical 30 -pix11 30 -point-scoring 30 -shanghaiist 30 -chautauqua 30 -tomi 30 -khairullozhon 30 -lorance 30 -disqualifications 30 -lalezary 30 -go-getter 30 -wardley 30 -czars 30 -frankness 30 -helensburgh 30 -zhiqiang 30 -misuari 30 -kornienko 30 -schoeman 30 -cordier 30 -fogs 30 -medusa 30 -print-out 30 -zillion 30 -parnassus 30 -anatoliy 30 -legazpi 30 -leggero 30 -668 30 -667 30 -shelburne 30 -sedaka 30 --10:00 30 -cascadia 30 -frontières 30 -unfpa 30 -sheff 30 -entrusting 30 -franà 30 -mcfalls 30 -guccione 30 -lahey 30 -ganguzza 30 -falsies 30 -carbonell 30 -monkeying 30 -meraz 30 -propranolol 30 -cocke 30 -katc 30 -luau 30 -benattia 30 -18p 30 -kleinman 30 -bartending 30 -mcwhorter 30 -calmest 30 -attesting 30 -ballets 30 -tyrannosaurs 30 -225mph 30 -sturges 30 -evergreens 30 -hassle-free 30 -baylis 30 -recollect 30 -secretion 30 -cheesman 30 -outvoted 30 -slouched 30 -aviano 30 -laramy 30 -fairlife 30 -partially-sighted 30 -laboratory-confirmed 30 -liev 30 -mou 30 -pottsville 30 -nihilistic 30 -misaligned 30 -ten-week-old 30 -buisson 30 -abc15 30 -spawns 30 -devastates 30 -ingeniously 30 -melding 30 -standoffish 30 -alphabetically 30 -3-series 30 -pawar 30 -dailey 30 -bpay 30 -understate 30 -pronoun 30 -sarkisian 30 -mioduski 30 -pella 30 -trichet 30 -cité 30 -hopscotch 30 -pre-fall 30 -bioterrorism 30 -choate 30 -howdy 30 -decryption 30 -aleema 30 -joakim 30 -hideo 30 -stratocaster 30 -anglo-saxons 30 -stomps 30 -attash 30 -wrung 30 -berne 30 -karo 30 -tiltman 30 -gaurav 30 -3,000-mile 30 -inbetween 30 -belfi 30 -bouzaglo 30 -flowerbed 30 -bareilles 30 -shelbyville 30 -dandelions 30 -neshek 30 -nureyev 30 -cranwell 30 -sportier 30 -competently 30 -kaktovik 30 -theofanis 30 -sharad 30 -slovenly 30 -talkshow 30 -coalfields 30 -fluctuation 30 -haight 30 -hamideh 30 -leese 30 -kahne 30 -kabiru 30 -lefcourt 30 -shardlow 30 -sammobile 30 -bahr 30 -despatches 30 -faith-healing 30 -megafauna 30 -amistad 30 -calcio 30 -isler 30 -bandavad 30 -sacristy 30 -stereotypically 30 -also-ran 30 -scougall 30 -tongariro 30 -milllion 30 -nataliya 30 -zakho 30 -12.0 30 -32-page 30 -patchett 30 -guillain-barré 30 -driller 30 -southernliving.com 30 -caddo 30 -safrit 30 -punctuate 30 -25mm 30 -mokpo 30 -robonaut 30 -nedbank 30 -sourdough 30 -mountaintops 30 -closed-off 30 -keightley 30 -jeaneen 30 -abarth 30 -friesen 30 -lackenby 30 -supplanting 30 -seven-term 30 -d'oliveira 30 -armen 30 -get-away 30 -pawlowichz 30 -l0 30 -apologetically 30 -re-written 30 -wingard 30 -maaloula 30 -feisal 30 -66/1 30 -nessun 30 -bayda 30 -joepa 30 -kelvingrove 30 -buggie 30 -sachsenhausen 30 -tighe 30 -omnivorous 30 -colligan 30 -stop-and-go 30 -raindrop 30 -lfb 30 -ommy 30 -fadlallah 30 -ne'er 30 -sahan 30 -buratti 30 -sotelo 30 -kmbc 30 -libeskind 30 -neighborly 30 -sagnier 30 -backwaters 30 -krichbaum 30 -untie 30 -anti-polio 30 -genera 30 -limbers 30 -yarborough 30 -craftspeople 30 -jejoen 30 -delbonis 30 -alleviation 30 -9-year 30 -riopelle 30 -drumstick 30 -rogelio 30 -rottingdean 30 -ill-afford 30 -glos 30 -wellhead 30 -chairez 30 -usborne 30 -centerville 30 -do-gooders 30 -one-cent 30 -circelli 30 -teardown 30 -tolan 30 -bronner 30 -scania 30 -west-central 30 -hudhud 30 -certifies 30 -flavorful 30 -football-loving 30 -songz 30 -bugaighis 30 -esopenko 30 -mapperley 30 -marring 30 -computer-aided 30 -frenkel 30 -cockatoos 30 -147th 30 -loveday 30 -acquisitive 30 -54f 30 -t.d. 30 -rademaker 30 -9:10 30 -triple-0 30 -stedmon 30 -cleverness 30 -dobbie 30 -149,000 30 -samina 30 -nish 30 -ground-to-air 30 -conflict-free 30 -yardage 30 -wittams 30 -taormina 30 -boomeroo 30 -unpleasantness 30 -clickable 30 -concertmaster 30 -pet-friendly 30 -picoult 30 -beautification 30 -erez 30 -greenhithe 30 -tamarind 30 -hormel 30 -capos 30 -jetski 30 -foot-and-mouth 30 -dahlstrom 30 -headhunter 30 -reposting 30 -rosemont 30 -pivots 30 -chafed 30 -upper-income 30 -thousandths 30 -fitzgibbons 30 -hitchhikers 30 -pgmo 30 -employer-sponsored 30 -christman 30 -retaliates 30 -21,600 30 -embankments 30 -zygier 30 -acme 30 -vavrinyuk 30 -once-thriving 30 -valter 30 -phraya 30 -inconsistently 30 -beechwood 30 -redux 30 -73million 30 -frets 30 -land-use 30 -paulino 30 -569 30 -calorie-counting 30 -cremin 30 -goch 30 -paddlers 30 -detrick 30 -dostum 30 -egyptair 30 -pontarelli 30 -seffner 30 -al-zahrani 30 -kalispell 30 -anti-coup 30 -buddi 30 -overstone 30 -blares 30 -meech 30 -agincourt 30 -sennett 30 -peppard 30 -kiss-and-tell 30 -tilt-rotor 30 -post-sandy 30 -mafias 30 -42.4 30 -mauthausen 30 -4-8 30 -shehu 30 -giedroyc 30 -reproduces 30 -30st 30 -penfield 30 -lucich 30 -renita 30 -macrobiotic 30 -dabs 30 -free-to-play 30 -lung-busting 30 -rajvir 30 -jocular 30 -basha 30 -fiorano 30 -chutneys 30 -marijuana-growing 30 -35.7 30 -lobos 30 -ncr 30 -rusholme 30 -deconstruct 30 -trivialize 30 -proboscis 30 -932 30 -159th 30 -witless 30 -conejero 30 -crusher 30 -umenyiora 30 -eiger 30 -razer 30 -al-hamid 30 -500kg 30 -yeomanry 30 -near-impossible 30 -skynyrd 30 -eero 30 -1.22 30 -rauch 30 -aecom 30 -eaza 30 -500-mile 30 -vote-buying 30 -forstater 30 -megacity 30 -human-sized 30 -kubo 30 -2.43 30 -aquilla 30 -elgort 30 -20-6 30 -faustini 30 -top-20 30 -ristorante 30 -halawi 30 -facial-recognition 30 -kalapana 30 -tropika 30 -latorre 30 -bloomingdales 30 -romer 30 -banfi 30 -glimmering 30 -rhona 30 -a-space 30 -anabel 30 -4.3-inch 30 -aran 30 -richters 30 -maysan 30 -funicular 30 -seven-months 30 -buttressed 30 -hulsey 30 -aliquippa 30 -802 30 -geminid 30 -parsonses 30 -hoaxers 30 -grabois 30 -mums-to-be 30 -dione 30 -kersey 30 -21-year-olds 30 -under-13s 30 -acmd 30 -jinling 30 -guttenberg 30 -five-term 30 -much-improved 30 -graffitied 30 -mercosur 30 -rhinehart 30 -auberge 30 -alprazolam 30 -stirrups 30 -papageorge 30 -i-connecticut 30 -ayo 30 -reflector 30 -d.b. 30 -soper 30 -lenora 30 -parke 30 -greenagel 30 -10,700 30 -bevins 30 -pov 30 -kretzmer 30 -clobbering 30 -borna 30 -hetter 30 -gaddesden 30 -mifflin 30 -palmira 30 -hageman 30 -ss13 30 -olusegun 30 -forelimbs 30 -01:16 30 -vaud 30 -rahway 30 -mahjong 30 -janee 30 -gatt 30 -shaukat 30 -ambler 30 -newstead 30 -reprimanding 30 -dreadnoughtus 30 -westinghouse 30 -church-goer 30 -crumpet 30 -kimes 30 -3300 30 -agadir 30 -rajput 30 -barrales 30 -brocken 30 -damaris 30 -petronella 30 -tafoya 30 -muhairi 30 -braque 30 -southbridge 30 -googles 30 -manilla 30 -sanitiser 30 -charalambous 30 -extreme-right 30 -shanice 30 -rivkin 30 -armento 30 -hradecka 30 -carin 30 -concha 30 -senile 30 -cineplex 30 -hadnott 30 -34.3 30 -facile 30 -bodleian 30 -pinker 30 -2041 30 -oatcakes 30 -tarnower 30 -longboat 30 -tykes 30 -froese 30 -headwaters 30 -crusted 30 -incriminated 30 -fanciest 30 -mayar 30 -gtb/4 30 -spigot 30 -94.9 30 -indivisible 30 -thurgarland 30 -delport 30 -crispr 30 -chuy 30 -5,000-square-foot 30 -no-ball 30 -howitzers 30 -nimbly 30 -deaner 30 -luminary 30 -anabelle 30 -castleton 30 -bookworm 30 -7s 30 -goldfields 30 -forsake 30 -visible-light 30 -cross-dresser 30 -huan 30 -mulsanne 30 -gacacas 30 -genet 30 -amedeo 30 -duller 30 -re-tweets 30 -costlier 30 -phytophthora 30 -maan 30 -seefeld 30 -saunderson-smith 30 -quivers 30 -markup 30 -cubed 30 -bayernlb 30 -vive 30 -gwangju 30 -galvanising 30 -mraps 30 -voynich 30 -selfe 30 -laroche 30 -schlessinger 30 -autocracy 30 -lepchenko 30 -pow/mia 30 -helming 30 -rammasun 30 -hesitating 30 -estado 30 -meeko 30 -debruin 30 -kadiza 30 -tinney 30 -luscombe 30 -wunderle 30 -glaciologist 30 -mccants 30 -televising 30 -calciopoli 30 -tactless 30 -653 30 -self-serve 30 -stender 30 -925,000 30 -cpd 30 -tpa 30 -jingles 30 -detracted 30 -winstead 30 -ilja 30 -gurgaon 30 -florentijn 30 -delahanty 30 -petits 30 -penfold 30 -weissman 30 -admissibility 30 -scrivner 30 -wspa 30 -preseli 30 -geraniums 30 -alljoyn 30 -germany-based 30 -schaft 30 -cinnabon 30 -clichéd 30 -electroshock 30 -salou 30 -five-door 30 -brae 30 -cohost 30 -kluge 30 -flat-chested 30 -kaleem 30 -news/wall 30 -undigested 30 -humanists 30 -gris 30 -u.s.-pakistani 30 -6a 30 -manfully 30 -canada-based 30 -hor 30 -decoutere 30 -wasar 30 -veendam 30 -muneeb 30 -divya 30 -9-inch 30 -badoo 30 -shellshocked 30 -pml-n 30 -deepsea 30 -woollard 30 -farida 30 -tovin 30 -bathrobes 30 -legalities 30 -upperclassmen 30 -assisted-living 30 -16-inch 30 -milion 30 -eurobonds 30 -ultralight 30 -arbaeen 30 -beutel 30 -676 30 -677 30 -ejaculate 30 -hit-man 30 -peekskill 30 -one-shoulder 30 -1765 30 -41,450 30 -zaria 30 -tactfully 30 -bombast 30 -maccallum 30 -loughnane 30 -waterspouts 30 -bierhoff 30 -anti-air 30 -breeana 30 -asiata 30 -aperribay 30 -postboxes 30 -kuzmina 30 -fly-out 30 -moos 30 -aflac 30 -stutz 30 -tce 30 -folger 30 -mucha 30 -smokestack 30 -zite 30 -birt 30 -agate 30 -trobe 30 -fantasyland 30 -vladamir 30 -chagaev 30 -second-fastest 30 -lamanno 30 -inflows 30 -bobsledding 30 -unacknowledged 30 -lamberty 30 -1-inch 30 -informality 30 -sachtleben 30 -clube 30 -dwomoh 30 -backpedal 30 -plagne 30 -short-pitched 30 -zakuani 30 -jarnet 30 -reclaims 30 -chitwan 30 -rescinding 30 -alromisse 30 -pied-a-terre 30 -lehane 30 -rsl 30 -rsd 30 -overwater 30 -peten 30 -chonburi 30 -24k 30 -milad 30 -rohrs 30 -knox-johnston 30 -t54 30 -japan-based 30 -shortwave 30 -llywelyn 30 -eugen 30 -tautou 30 -vipers 30 -readjusted 30 -unhealthiest 30 -dclg 30 -darkens 30 -well-trodden 30 -26st 30 -bacho 30 -emmet 30 -litigating 30 -al-lahim 30 -benyettou 30 -niedringhaus 30 -tavener 30 -countersuit 30 -lonkhuyzen 30 -1230 30 -al-nadhari 30 -keehan 30 -first-come 30 -joynes 30 -tulalip 30 -belem 30 -nose-dived 30 -trikes 30 -90per 30 -26-years-old 30 -hedgepeth 30 -sleng 30 -almodovar 30 -well-written 30 -calcification 30 -kadri 30 -zambians 30 -sereny 30 -kokenes 30 -break-neck 30 -cigna 30 -tanko 30 -touch-up 30 -nrg 30 -4.00 30 -a27 30 -scarab 30 -starkest 30 -izvestia 30 -dahlias 30 -jenna-louise 30 -austin-based 30 -asr 30 -earthbound 30 -housman 30 -berlinger 30 -classier 30 -anti-nausea 30 -ex-u.s. 30 -ozyakup 30 -gumption 30 -itcz 30 -pez 30 -stanikzai 30 -farmworkers 30 -scheduler 30 -diedrick 30 -shoeburyness 30 -her2 30 -functionaries 30 -never-seen-before 30 -refugio 30 -meitiv 30 -davor 30 -starburst 30 -inexact 30 -duncanville 30 -strummed 30 -boyah 30 -holyroodhouse 30 -regurgitating 30 -then-cia 30 -papered 30 -shopfront 30 -'47 30 -mundell 30 -broyhill 30 -tiernan-locke 30 -haixun 30 -jacaranda 30 -rougerie 30 -katyusha 30 -hindson 30 -moline 30 -branford 30 -amplifiers 30 -leather-clad 30 -sloe 30 -waddilove 30 -tarsier 30 -inconveniencing 30 -aww 30 -halbritter 30 -climate-related 30 -vendome 30 -paet 30 -50lb 30 -2013-16 30 -sussan 30 -aiport 30 -53.5 30 -23,250 30 -fazakerley 30 -datta 30 -outmuscled 30 -sharpener 30 -noguera 30 -622 30 -thickest 30 -uzzell 30 -starker 30 -harney 30 -resettling 30 -holmquist 30 -khristine 30 -swiveled 30 -weluree 30 -azzedine 30 -noida 30 -katawal 30 -angelos 30 -firebox.com 30 -nunez-figueroa 30 -shek 30 -anti-women 30 -catrin 30 -imparting 30 -+39 30 -al-majid 30 -down-time 30 -mail-in 30 -hyperpartisan 30 -apsley 30 -four-bathroom 30 -force-feed 30 -judicially 30 -222,000 30 -goias 30 -cleve 30 -11.00 30 -brasse 30 -post-doctoral 30 -kandinsky 30 -carbon-neutral 30 -kockott 30 -good-paying 30 -troitino 30 -al-mutawa 30 -120km 30 -reema 30 -rohilla 30 -follieri 30 -stand-ins 30 -5.56 30 -gosk 30 -premised 30 -sinckler 30 -preventers 30 -mashaei 30 -salopek 30 -akeem 30 -zenawi 30 -outgrew 30 -panahi 30 -colunga 30 -pleat 30 -100mm 30 -pfleger 30 -lengthier 30 -alstrom 30 -brach 30 -metzner 30 -rosling 30 -bordainick 30 -envelops 30 -25lb 30 -40.7 30 -itaquerao 30 -kolles 30 -80-foot 30 -single-vehicle 30 -13.40 30 -putte 30 -968 30 -meisner 30 -bateau 30 -magdelena 30 -uncounted 30 -wilander 30 -kowalczyk 30 -mishit 30 -dido 30 -whitgift 30 -loudness 30 -neo-classical 30 -frill 30 -leyonhjelm 30 -amine 30 -trepanation 30 -auvinen 30 -rococo 30 -phonebloks 30 -daintree 30 -dk2 30 -new-style 30 -rasoul 30 -footholds 30 -chickasha 30 -pawnee 30 -bartek 30 -benstead 30 -tearoom 30 -heldt 30 -mollah 30 -kenia 30 -eoghan 30 -vujicic 30 -akhbar 30 -326,000 30 -marshy 30 -one-in-five 30 -1769 30 -upi 30 -bopping 30 -saturate 30 -tyrelle 30 -teasley 30 -uwem 30 -alcorcon 30 -persecutions 30 -corrode 30 -charice 30 -entourages 30 -lushniak 30 -tyrion 30 -typography 30 -gotterba 30 -babydoll 30 -554 30 -philippoussis 30 -rensselaer 30 -tanilla 30 -fasteners 30 -arkadiusz 30 -grilles 30 -perovskite 30 -bamboozle 30 -munari 30 -greymans 30 -co-writing 30 -serginson 30 -slackers 30 -115th 30 -chenais 30 -tims 30 -open-access 30 -mottaki 30 -m2m 30 -infrasound 30 -epilogue 30 -odeh 30 -hostetler 30 -football-sized 30 -ots 30 -dry-cleaning 30 -softener 30 -non-gamers 30 -buckhorn 30 -payrolls 30 -skiier 30 -pascual 30 -vouchercodes.co.uk 30 -pir 30 -reprogramming 30 -ssi 30 -susman 30 -grasse 30 -dallas-area 30 -perryman 30 -set-ups 30 -risk-taker 30 -58million 30 -albay 30 -mawr 30 -bewick 30 -stabilisers 30 -parkview 30 -betz 30 -corpin 30 -nutkins 30 -coty 30 -holodeck 30 -montparnasse 30 -evaluators 30 -beanies 30 -windfarm 30 -askari 30 -viscerally 30 -strathmore 30 -mellado 30 -optimizing 30 -queenslander 30 -osmun 30 -1690 30 -momager 30 -him/her 30 -shipp 30 -amalaha 30 -sky-blue 30 -auer 30 -saboor 30 -stoch 30 -thirith 30 -nought 30 -czech-born 30 -greechan 30 -testolini 30 -tiler 30 -25-54 30 -baxam 30 -67.5 30 -deraa 30 -airpark 30 -pietsch 30 -rosedale 30 -shroff 30 -zip-tied 30 -3.69 30 -harassers 30 -lowson 30 -120-day 30 -decoster 30 -campervans 30 -flatlined 30 -busacca 30 -searls 30 -woelk 30 -storm-damaged 30 -glozell 30 -post-cold 30 -sweady 30 -aimee-rose 30 -pleasantville 30 -myasthenia 30 -engweiler 30 -belajonas 30 -spluttered 30 -aswan 30 -drame 30 -sdp 30 -meditated 30 -diatchenko 29 -schoolteachers 29 -jornal 29 -livings 29 -59f 29 -current-gen 29 -sexwale 29 -369million 29 -saadiyat 29 -replenishment 29 -manda 29 -foti 29 -crilly 29 -scurries 29 -r-minnesota 29 -whatton 29 -callard 29 -quincey 29 -non-confrontational 29 -herridge 29 -sumeet 29 -dekeyzer 29 -lawford 29 -tarrytown 29 -midpoint 29 -54-year 29 -scocco 29 -satnavs 29 -boniadi 29 -giovanditto 29 -300-strong 29 -two-bathroom 29 -three-strong 29 -mitte 29 -e-cig 29 -madmen 29 -brazoria 29 -rankle 29 -metrorail 29 -cioffi-petrakis 29 -22-year-olds 29 -longman 29 -akhras 29 -ericson 29 -simoes 29 -belkacem 29 -cherubic 29 -202,000 29 -lisburn 29 -espargaro 29 -antiwar 29 -1,070 29 -undershirt 29 -sistema 29 -21:31 29 -khairiah 29 -pheromone 29 -für 29 -second-set 29 -unfocused 29 -u.s.-israel 29 -chorionic 29 -kleenex 29 -glisten 29 -gulzar 29 -doormats 29 -cheesegrater 29 -north/south 29 -nots 29 -46.7 29 -market-leading 29 -kenyan-born 29 -fujimura 29 -,18 29 -frostie 29 -cookinglight.com 29 -nabbach 29 -supersonics 29 -336,000 29 -jarvie 29 -80per 29 -proofs 29 -fadhli 29 -evison 29 -erfan 29 -entrails 29 -comte 29 -re-engagement 29 -appreciable 29 -keya 29 -gawkers 29 -moneypenny 29 -#nbcfail 29 -puffa 29 -farnan 29 -smudges 29 -burkha 29 -karpov 29 -concretion 29 -elmira 29 -oneness 29 -kuster 29 -archetypes 29 -gearhart 29 -shlomo 29 -metrohealth 29 -mailroom 29 -18kg 29 -sulforaphane 29 -arulanandam 29 -kopi 29 -686 29 -682 29 -gargoyle 29 -passarella 29 -rhinebeck 29 -r-missouri 29 -radzyminski 29 -tso 29 -tranches 29 -3:10 29 -pba 29 -houseguests 29 -half-court 29 -abdulmolah 29 -26-man 29 -rajo 29 -flay 29 -ingo 29 -ouya 29 -riel 29 -savin 29 -85m 29 -urthecast 29 -blackshades 29 -castile 29 -kenenisa 29 -hawija 29 -20,000-a-year 29 -boning 29 -kelechi 29 -ejecta 29 -getter 29 -mohegan 29 -al-obeidi 29 -ex-deputy 29 -21:11 29 -studley 29 -telephoning 29 -hanifa 29 -vestal 29 -ulceration 29 -raque 29 -stammering 29 -coalmine 29 -nonchalance 29 -sharelink 29 -lundell 29 -zeke 29 -scythed 29 -thurs 29 -wijaya 29 -mashudur 29 -avoca 29 -five-gallon 29 -wrangled 29 -ucs 29 -fareshare 29 -muskingum 29 -mackail-smith 29 -klann 29 -985 29 -ulverston 29 -vinkovci 29 -madder 29 -mitchinson 29 -sheboygan 29 -commbank 29 -nahuatl 29 -sawmill 29 -ellar 29 -griem 29 -sharlene 29 -horning 29 -loomba 29 -duminy 29 -wrinkling 29 -low-frequency 29 -boberg 29 -mexicali 29 -18billion 29 -unequally 29 -st-germain 29 -mcgahey 29 -diomede 29 -marikina 29 -wing-backs 29 -mutai 29 -outmaneuvered 29 -khanjar 29 -mariya 29 -litzman 29 -capstone 29 -gold-standard 29 -mind-numbingly 29 -gerace 29 -bogert 29 -bia 29 -46p 29 -isolates 29 -mentawai 29 -swierski 29 -sunkissed 29 -16-20 29 -arrowheads 29 -reclassification 29 -nassan 29 -microbiological 29 -lowercase 29 -workmanlike 29 -833 29 -plumed 29 -neuronal 29 -sweatt 29 -mq-9 29 -preliminaries 29 -breadmaker 29 -dogo 29 -wetterling 29 -splotches 29 -niels 29 -bronchiolitis 29 -quanta 29 -tatro 29 -atalay 29 -soft-tissue 29 -ryoo 29 -newtons 29 -echevarria 29 -sonoran 29 -raigmore 29 -agard 29 -warranting 29 -20.30 29 -viler 29 -zuck 29 -camcorders 29 -compaq 29 -savanah 29 -antisemitism 29 -kron 29 -ungovernable 29 -well-orchestrated 29 -gilliard 29 -sandel 29 -dursley 29 -haymore 29 -effendi 29 -replant 29 -monkton 29 -broyles 29 -60-40 29 -simulcast 29 -tornado-ravaged 29 -norlaila 29 -paranaense 29 -killearn 29 -tawfik 29 -flashcards 29 -wahhabi 29 -faulting 29 -roboticists 29 -centerpieces 29 -0530 29 -unionization 29 -mossel 29 -devantier 29 -oppositional 29 -trost 29 -rivka 29 -guzzle 29 -herdsmen 29 -insite 29 -eis 29 -dahir 29 -derails 29 -caleo 29 -ayat 29 -torpor 29 -tsunis 29 -stencilled 29 -vicary 29 -tilbrook 29 -ex-serviceman 29 -paulos 29 -58.5 29 -non-believer 29 -tetrapods 29 -condron 29 -delhi-based 29 -time-bomb 29 -burn-out 29 -innovated 29 -gasket 29 -tari 29 -bedeviled 29 -université 29 -tolu 29 -outlays 29 -handoff 29 -aslamshoyeva 29 -bhaskar 29 -penna 29 -missileers 29 -plaintive 29 -slo 29 -istanbul-based 29 -bartle 29 -21:59 29 -capacitor 29 -geishas 29 -bulkhead 29 -samasko 29 -wilsey 29 -seductress 29 -year-to-date 29 -cluedo 29 -fromm 29 -55-minute 29 -methicillin-resistant 29 -khaleesi 29 -kyriacou 29 -most-liked 29 -normandie 29 -snowboards 29 -bayi 29 -approvingly 29 -bailes 29 -ivories 29 -zylberberg 29 -lotts 29 -intimidates 29 -chn 29 -batra 29 -heart-related 29 -starsky 29 -waisted 29 -lowlife 29 -parasail 29 -zofeya 29 -decrypted 29 -retooling 29 -co-head 29 -filibustering 29 -cherokees 29 -cudgel 29 -corcovado 29 -+61 29 -unwary 29 -restrepo 29 -rankles 29 -np 29 -semi-autobiographical 29 -limoges 29 -panmure 29 -sieg 29 -mthatha 29 -deadspin.com 29 -gauche 29 -o'brian 29 -turnabout 29 -intemperate 29 -bish 29 -guillemots 29 -haitien 29 -stone-built 29 -mosey 29 -kendzior 29 -earth-moving 29 -happenstance 29 -senora 29 -i-40 29 -schrivjer 29 -bewkes 29 -crystal-encrusted 29 -300-mile 29 -cash-flow 29 -decamp 29 -djau 29 -cfr 29 -berates 29 -aedt 29 -beatbox 29 -someway 29 -maxted 29 -fotuali'i 29 -tactful 29 -solid-state 29 -atakan 29 -smooth-talking 29 -hastag 29 -incase 29 -18ct 29 -ofi 29 -plenum 29 -f4 29 -00:53 29 -zealously 29 -606 29 -60c 29 -glycogen 29 -altria 29 -mcdaniels 29 -byte 29 -dalkeith 29 -bacani 29 -zhi 29 -rabe 29 -space-related 29 -oodles 29 -woodpeckers 29 -destruct 29 -lauber 29 -anti-harassment 29 -kindergarteners 29 -daday 29 -grandfather-of-two 29 -juicers 29 -35km 29 -shonan 29 -multitudes 29 -pro-family 29 -57million 29 -mohair 29 -1714 29 -duenez 29 -probable-cause 29 -mayville 29 -alexanders 29 -co-anchored 29 -henricks 29 -mn 29 -gou 29 -biliary 29 -narcissists 29 -decapitations 29 -3700 29 -dog-walking 29 -hilco 29 -drug-sniffing 29 -poon 29 -lgbti 29 -co-production 29 -salmeron 29 -2.03 29 -psychotherapists 29 -shrem 29 -trivedi 29 -784 29 -wardlaw 29 -setiawan 29 -bradish 29 -godstone 29 -anti-virals 29 -nikolaus 29 -kazin 29 -emboldening 29 -wdrb 29 -walk-off 29 -meb 29 -nalepa 29 -627 29 -bousted 29 -8-megapixel 29 -four-team 29 -hayati 29 -orbach 29 -bugbear 29 -thrasher 29 -afshar 29 -tomaselli 29 -brauer 29 -bartfield 29 -straight-laced 29 -risc 29 -6-month 29 -animal-loving 29 -chng 29 -divinely 29 -donelan 29 -maitua 29 -cisterns 29 -zalmay 29 -bedlington 29 -33billion 29 -prudently 29 -house-passed 29 -sexualization 29 -damiao 29 -yow 29 -vacaville 29 -memantine 29 -burstein 29 -hirschfeld 29 -alliyah 29 -adventuring 29 -anti-thatcher 29 -absolut 29 -yoani 29 -decrypt 29 -finbarr 29 -concierges 29 -ring-fencing 29 -takedowns 29 -motivators 29 -over-running 29 -weevils 29 -earlobes 29 -yanni 29 -skull-like 29 -al-saffar 29 -creepypasta 29 -300-acre 29 -mourino 29 -team-talk 29 -lx 29 -wachowski 29 -waynesboro 29 -anu 29 -gameau 29 -glades 29 -loungewear 29 -330million 29 -trivialises 29 -numerology 29 -17p 29 -eike 29 -two-handed 29 -euphemistically 29 -168-year-old 29 -kjaer 29 -parmigiani 29 -breastbone 29 -894 29 -macapagal-arroyo 29 -then-14-year-old 29 -poopy 29 -friended 29 -herrings 29 -lucked 29 -ellerin 29 -2080s 29 -medsker 29 -reedie 29 -cellulosic 29 -watchable 29 -post-presidency 29 -gripper 29 -rollerblading 29 -pankey 29 -875,000 29 -streit 29 -derisively 29 -high-life 29 -lutteropp 29 -convery 29 -31.1 29 -quotable 29 -detainer 29 -samosas 29 -lnp 29 -htoo 29 -festus 29 -dominque 29 -d-list 29 -upper-level 29 -money-maker 29 -motrin 29 -fazl 29 -peele 29 -googoosh 29 -33.1 29 -zizi 29 -shai 29 -leat 29 -post-independence 29 -cuvee 29 -semmons 29 -gioia 29 -persimmon 29 -trevis 29 -news10 29 -sibat 29 -m&t 29 -kazmi 29 -disinclined 29 -strangeness 29 -whataburger 29 -00:28 29 -medhi 29 -678 29 -break-down 29 -rodolph 29 -sing-a-long 29 -rappel 29 -peirong 29 -sampford 29 -chaffee 29 -beefeater 29 -caxton 29 -asem 29 -right-to-buy 29 -arthurian 29 -behrang 29 -50-page 29 -dieu 29 -ackley 29 -housemaids 29 -hyam 29 -d.l. 29 -gaydar 29 -analgesic 29 -catcalling 29 -aconcagua 29 -orlov 29 -lethally 29 -expressen 29 -yust 29 -300k 29 -fertilise 29 -odense 29 -knuckling 29 -millwood 29 -ratepayers 29 -phillippa 29 -vucic 29 -schama 29 -shelf-life 29 -256,000 29 -fixings 29 -civets 29 -mauls 29 -rampling 29 -dudik 29 -caldeira 29 -sakai 29 -2hrs 29 -50.4 29 -uncluttered 29 -vectors 29 -tooling 29 -most-used 29 -haimy 29 -stears 29 -ksla 29 -fourth-ranked 29 -252,000 29 -binyam 29 -pre-packed 29 -hunnewell 29 -shakuri 29 -undelivered 29 -60f 29 -jayhawks 29 -14km 29 -detracting 29 -proto 29 -young-adult 29 -100km/h 29 -lecherous 29 -dunmow 29 -ellixson 29 -overindulge 29 -gray-haired 29 -cec 29 -brandlin 29 -wigley 29 -gilts 29 -cake-making 29 -hosking 29 -violinists 29 -990,000 29 -hirata 29 -pasqualone 29 -sassa 29 -bird-like 29 -vintners 29 -miri 29 -chichen 29 -1,035 29 -aqueducts 29 -envigado 29 -243,000 29 -crosswhite 29 -hawa 29 -westropp 29 -42.8 29 -bpd 29 -kika 29 -montanes 29 -baalbek 29 -murrays 29 -asides 29 -schoolbag 29 -people-smuggling 29 -military-to-military 29 -frankham 29 -lindelof 29 -khattalah 29 -spindle 29 -grindle 29 -straightens 29 -streetwear 29 -zenjov 29 -hixson 29 -phone-ins 29 -z3 29 -cosmetology 29 -corah 29 -z1 29 -newness 29 -703 29 -12per 29 -cubism 29 -invents 29 -panhandler 29 -welborn 29 -free-thinking 29 -wolfed 29 -80cm 29 -maltesers 29 -sylar 29 -22-point 29 -disablement 29 -akshaya 29 -cephalopod 29 -14in 29 -shobna 29 -manxman 29 -cluttering 29 -ngan 29 -935 29 -conscientiously 29 -bleeped 29 -katidis 29 -humberstone 29 -patagonian 29 -mtr 29 -write-up 29 -.10 29 -asan 29 -ifpi 29 -bog-standard 29 -serdyukov 29 -purkis 29 -30-35 29 -fettes 29 -roxane 29 -abol 29 -oheka 29 -cambrai 29 -gunslinger 29 -conder 29 -hamas-ruled 29 -abbots 29 -mcwilliam 29 -19-minute 29 -morl 29 -osotimehin 29 -emplacements 29 -abdusalamov 29 -brw 29 -bandele 29 -orjoux 29 -ind. 29 -antisemitic 29 -kaela 29 -anthropogenic 29 -discredits 29 -non-citizen 29 -pre-islamic 29 -soon-to-be-released 29 -pegues 29 -petermann 29 -dalits 29 -holtzberg 29 -hands-down 29 -pro-irish 29 -job-killing 29 -rosella 29 -feijen 29 -finkbiner 29 -failsafe 29 -20lb 29 -shirva 29 -goldring 29 -murmuring 29 -image-conscious 29 -aral 29 -tagger 29 -fourie 29 -loftis 29 -stragglers 29 -gunduz 29 -gyro 29 -chawla 29 -ap-3c 29 -slicked-back 29 -moynan 29 -absolving 29 -greuther 29 -arpels 29 -ardently 29 -gorrie 29 -n'dour 29 -impermeable 29 -haarlem 29 -barbee 29 -crumpling 29 -goggle 29 -ruderman 29 -long-anticipated 29 -yg 29 -8bn 29 -egret 29 -sarum 29 -timeliness 29 -cryan 29 -indentation 29 -riyo 29 -al-alam 29 -cobus 29 -picture-postcard 29 -prahran 29 -maisel 29 -czerkawski 29 -dlt 29 -brownlie 29 -addicting 29 -prising 29 -estemirova 29 -animating 29 -cornice 29 -tyers 29 -regrown 29 -eldredge 29 -aiff 29 -kerzhakov 29 -01:10 29 -molko 29 -sunbather 29 -r-louisiana 29 -barbeques 29 -kirshner 29 -menifee 29 -lfw 29 -pick-ups 29 -paralyses 29 -stotts 29 -cockatiel 29 -tames 29 -counter-measures 29 -czeisler 29 -poppleton 29 -hastert 29 -francois-henri 29 -erol 29 -goude 29 -durkan 29 -simplot 29 -oberstar 29 -carreiro 29 -nolberto 29 -jerrod 29 -cassino 29 -crocodile-like 29 -rhein 29 -tunnelled 29 -tidbit 29 -six-sided 29 -tomos 29 -vee 29 -yowell 29 -corra 29 -coda 29 -well-managed 29 -yuppies 29 -conca 29 -lamkin 29 -behan 29 -alamogordo 29 -hallucinatory 29 -11-week 29 -spey 29 -pharaonic 29 -brownish 29 -hurdling 29 -wackiest 29 -spreckels 29 -tridents 29 -salton 29 -bracketed 29 -noblemen 29 -ssafa 29 -theres 29 -amniocentesis 29 -canas 29 -anti-cop 29 -parnham 29 -union-backed 29 -kaufmann 29 -gharib 29 -sigler 29 -seidman 29 -7500 29 -rekha 29 -ifop 29 -lackova 29 -pincher 29 -x2 29 -ultrafast 29 -expansionism 29 -biskupic 29 -neugebauer 29 -vibrated 29 -playmobil 29 -suna 29 -mcbeal 29 -bresson 29 -bankier 29 -boxster 29 -ghimire 29 -re-creating 29 -middlebury 29 -chows 29 -nadel 29 -andris 29 -camomile 29 -baskeyfield 29 -penryn 29 -imperceptible 29 -headwind 29 -stansfeld 29 -devonian 29 -lancastrian 29 -armie 29 -fdc 29 -pitied 29 -neet 29 -knopf 29 -sikkim 29 -standardization 29 -boll 29 -mayne-nicholls 29 -paveway 29 -quitters 29 -crystallise 29 -super-secret 29 -deluding 29 -re-sell 29 -comedy-drama 29 -micronesia 29 -kublai 29 -lipnitskaya 29 -stackpole 29 -self-important 29 -heavy-water 29 -williamstown 29 -ovett 29 -micaelo 29 -job-creating 29 -kizer 29 -aum 29 -disrespects 29 -malissa 29 -third-minute 29 -914 29 -tarkanian 29 -vucelic 29 -kindercare 29 -ebbs 29 -greenwashing 29 -nine-year-olds 29 -purrfect 29 -veto-wielding 29 -bentz 29 -transportable 29 -laser-like 29 -iron-rich 29 -daeng 29 -taka 29 -hothead 29 -leafleting 29 -soden 29 -22:32 29 -man-hunt 29 -al-shibli 29 -necking 29 -squamous 29 -mitchel 29 -donenfeld 29 -elano 29 -dfb-pokal 29 -softly-spoken 29 -podolak 29 -bullen 29 -lapdancers 29 -sonnex 29 -castan 29 -wind-powered 29 -migrates 29 -yaojie 29 -guerrido 29 -rodrigue 29 -recreationally 29 -kolodziej 29 -21:41 29 -iva 29 -bajaj 29 -meet-ups 29 -metabolite 29 -runkeeper 29 -5-foot-7 29 -ione 29 -chane 29 -briant 29 -purnima 29 -auto-tune 29 -ibraimi 29 -gevaudan 29 -trine 29 -khufu 29 -non-sporting 29 -talmadge 29 -t-dm1 29 -corne 29 -eddison 29 -02:19 29 -carriger 29 -haberman 29 -detoured 29 -self-medicate 29 -full-day 29 -three-over 29 -aws 29 -hspa 29 -6.55 29 -wallander 29 -costcutter 29 -dmanisi 29 -sex-obsessed 29 -living-room 29 -disinherited 29 -wakeboard 29 -addario 29 -tv3 29 -well-struck 29 -nurse-in 29 -s/s14 29 -edgardo 29 -much-fancied 29 -jolley 29 -peleliu 29 -kohei 29 -274,000 29 -193,000 29 -ahhh 29 -outran 29 -kwadwo 29 -govs. 29 -otzi 29 -white-hot 29 -frittering 29 -quails 29 -akanksha 29 -amaury 29 -775,000 29 -shamrakova 29 -novick 29 -choirboy 29 -fifth-graders 29 -hewetson 29 -wicb 29 -re-establishment 29 -6-week-old 29 -shula 29 -cause-and-effect 29 -donachie 29 -carousing 29 -chatfield 29 -fishbowl 29 -0.45 29 -chlorinated 29 -cedillo 29 -2.37 29 -lay-offs 29 -9500 29 -cantilevered 29 -commonalities 29 -fulgencio 29 -naldo 29 -stiffly 29 -braehead 29 -voyeurs 29 -speedwagon 29 -lawal 29 -buffing 29 -protegee 29 -geotagged 29 -11-page 29 -1,000-strong 29 -readability 29 -maliackal 29 -jetset 29 -nanyang 29 -eln 29 -codepink 29 -dahle 29 -premenstrual 29 -maajid 29 -dimple 29 -ancon 29 -sooam 29 -specious 29 -shmuley 29 -mannan 29 -goretzka 29 -scheherazade 29 -hadwin 29 -maitre 29 -clery 29 -manchester-born 29 -multi-platinum 29 -comary 29 -camisole 29 -eighth-minute 29 -canahuati 29 -luba 29 -braunau 29 -martineau 29 -ex-secretary 29 -heimans 29 -trophyless 29 -bowles-simpson 29 -c/2013 29 -quickening 29 -kes 29 -caffari 29 -caixa 29 -re-take 29 -fabiani 29 -flat-panel 29 -ostrow 29 -launderette 29 -hkt 29 -demagoguery 29 -co-ords 29 -weyrich 29 -horwill 29 -murdo 29 -one-drug 29 -fairhurst 29 -coder 29 -e1 29 -ul-haq 29 -0.17 29 -timesheet 29 -trebling 29 -melati 29 -burkitt 29 -amsterdam-based 29 -buswell 29 -montalban 29 -winterton 29 -waide 29 -ant-man 29 -snowballing 29 -8:10 29 -weimer 29 -keystroke 29 -strongbow 29 -shittu 29 -leeann 29 -kyden 29 -806 29 -jehan 29 -shallots 29 -tubers 29 -anti-euthanasia 29 -maghoma 29 -irreverence 29 -casara 29 -proof-of-concept 29 -7-year-olds 29 -demilitarised 29 -lichen 29 -pared-back 29 -neurotoxin 29 -chemin 29 -self-deprecation 29 -botti 29 -charite 29 -unmotivated 29 -demurely 29 -kleine-ahlbrandt 29 -cecora 29 -bookworms 29 -riggle 29 -pitifully 29 -vins 29 -franky 29 -wallner 29 -pshe 29 -becquerels 29 -yasuo 29 -sleepwalk 29 -highlining 29 -refracts 29 -sampler 29 -meridien 29 -bara 29 -shoebat 29 -murcielago 29 -notations 29 -n'diaye 29 -ranson 29 -armor-piercing 29 -hanneman 29 -519 29 -wheatgrass 29 -bronagh 29 -towles 29 -perjeta 29 -sumba 29 -mahons 29 -whole-grain 29 -candela 29 -house-building 29 -caddell 29 -karsa 29 -misner 29 -alanne 29 -patdown 29 -amateurism 29 -one-hit 29 -charlatans 29 -outtake 29 -jago 29 -943 29 -olajide 29 -colenso 29 -lugged 29 -hmong 29 -cranleigh 29 -turl 29 -sheardown 29 -gribbin 29 -collen 29 -mushaimaa 29 -vacuum-packed 29 -fryman 29 -1oak 29 -kemble 29 -silverdome 29 -50-strong 29 -league-high 29 -lykken 29 -staniforth 29 -junger 29 -edric 29 -romulo 29 -olivo 29 -foxnews 29 -alsop 29 -1590 29 -downfield 29 -6500 29 -reems 29 -buzby 29 -steeled 29 -supply-side 29 -blogpost 29 -hydrangea 29 -kyong-hui 29 -baathist 29 -tr 29 -conceicao 29 -n-bomb 29 -saccharine 29 -tumblers 29 -hogsmeade 29 -lewisville 29 -komo-tv 29 -kar 29 -harbage 29 -dost 29 -story-telling 29 -lavinder 29 -proffitt 29 -chijindu 29 -flagrante 29 -ross-on-wye 29 -unzipping 29 -kolmanskop 29 -stammered 29 -counter-claims 29 -spaceshipone 29 -a.r. 29 -rubino 29 -16km 29 -counter-protesters 29 -faroes 29 -statcounter 29 -visa-free 29 -chalara 29 -chronologically 29 -drunkard 29 -twix 29 -wyles 29 -westling 29 -rappelled 29 -guangbiao 29 -lepers 29 -aural 29 -penmanship 29 -daiquiri 29 -pellegrino 29 -gold-colored 29 -kermeliotis 29 -scheepers 29 -reiser 29 -mullick 29 -inswinging 29 -under-rated 29 -11.11 29 -gourjian 29 -dawber 29 -lulls 29 -capital-journal 29 -saco 29 -pskov 29 -normcore 29 -fagen 29 -brillo 29 -brubeck 29 -scuffs 29 -black-eyed 29 -gurria 29 -forsaken 29 -porsha 29 -tannenbaum 29 -flossing 29 -jong-nam 29 -berwick-upon-tweed 29 -gilardino 29 -southwood 29 -blemish-free 29 -822 29 -second-busiest 29 -counter-narcotics 29 -umaro 29 -beddows 29 -soghoian 29 -garçons 29 -perce 29 -criticsed 29 -gunk 29 -anselm 29 -neurodevelopmental 29 -tick-box 29 -16mp 29 -moping 29 -speediest 29 -hallcup 29 -uchida 29 -armorgroup 29 -prohibition-era 29 -tamarod 29 -race-neutral 29 -photobucket 29 -steinfurth 29 -weathergirl 29 -camouflaging 29 -vereen 29 -909 29 -conceit 29 -prearranged 29 -sashayed 29 -birthrates 29 -electability 29 -fledging 29 -satyanarayan 29 -luzio 29 -mincing 29 -tidswell 29 -smoot 29 -petionville 29 -back-dated 29 -hawai'i 29 -lip-synching 29 -repays 29 -weddle 29 -chabad-lubavitch 29 -megatons 29 -steeds 29 -simcity 29 -nth 29 -115million 29 -brinkman 29 -t-boz 29 -tik 29 -dannielynn 29 -ladybug 29 -3.02 29 -mid-18th 29 -benioff 29 -newent 29 -digbeth 29 -corniche 29 -seethed 29 -dammit 29 -athanasiadis 29 -marijuana-infused 29 -amri 29 -inactivated 29 -benjy 29 -flailed 29 -basim 29 -lowri 29 -stallard 29 -seguro 29 -witsell 29 -neame 29 -yul 29 -160th 29 -top-scoring 29 -mooning 29 -cyan 29 -konig 29 -+971 29 -murle 29 -one-day-old 29 -739 29 -kuilan 29 -side-stepped 29 -year-to-year 29 -flanigan 29 -wingless 29 -mayawati 29 -12-step 29 -keven 29 -type-2 29 -amagansett 29 -10-yard 29 -pluripotent 29 -okamoto 29 -chamomile 29 -w4 29 -glennon 29 -tapie 29 -hayes-white 29 -joachin 29 -petrochemicals 29 -bierman 29 -bloomers 29 -clic 29 -warfighter 29 -schon 29 -llandrindod 29 -r2 29 -run-scorer 29 -sireau 29 -characterisation 29 -steck 29 -eades 29 -race-hate 29 -diorama 29 -reality-tv 29 -attanasio 29 -gp-led 29 -4mm 29 -lodz 29 -dahal 29 -2,717 29 -u.s.-brokered 29 -buffington 29 -61.5 29 -lifeinvader 29 -harbor-hickam 29 -gaudino 29 -abdule 29 -lemigova 29 -3.60 29 -oddy 29 -siham 29 -lilliput 29 -one-metre 29 -gérard 29 -parfum 29 -solly 29 -lasry 29 -venclovas 29 -herriot 29 -hobbles 29 -paynter 29 -luthi 29 -boggles 29 -deephaven 29 -thill 29 -0.85 29 -lilli 29 -shaliza 29 -peppercorn 29 -vaclik 29 -ouistreham 29 -brittani 29 -wayan 29 -self-acceptance 29 -713 29 -pileggi 28 -gilberton 28 -peaceably 28 -+82 28 -omeri 28 -krist 28 -msika 28 -alyeska 28 -iacovou 28 -milian 28 -marsfield 28 -hazzah 28 -bleating 28 -houk 28 -safet 28 -tomasi 28 -colburn 28 -greeter 28 -midshipmen 28 -uncorked 28 -jacobo 28 -74.6 28 -74.5 28 -weisel 28 -lópez 28 -1,170 28 -cubo 28 -reforma 28 -belugas 28 -spader 28 -pal-v 28 -sophomoric 28 -relaunching 28 -bassel 28 -critically-ill 28 -tirado 28 -ecstatically 28 -colnbrook 28 -ambrosini 28 -199.99 28 -kovr 28 -centric 28 -then-leader 28 -kailee 28 -falvey 28 -relievers 28 -graciela 28 -longmore 28 -hirohito 28 -gigaom 28 -22-0 28 -groupe 28 -3.48 28 -wirathu 28 -newborough 28 -mulhouse 28 -seventh-place 28 -rath 28 -ynn 28 -slym 28 -boxtrolls 28 -q4 28 -nandgaon 28 -pigsty 28 -gibbard 28 -deaver 28 -seaquarium 28 -accessorizing 28 -thayne 28 -cigarillos 28 -ardoyne 28 -gosia 28 -geospatial 28 -defiling 28 -pawlett 28 -87th-minute 28 -armfield 28 -moin 28 -douses 28 -small-screen 28 -21:30 28 -eszterhas 28 -backhouse 28 -jeerh 28 -ghaemi 28 -779 28 -misek 28 -fka 28 -rationalise 28 -throw-away 28 -46.4 28 -,11 28 -jack-knifed 28 -lebanon-based 28 -crilley 28 -maximillian 28 -locked-up 28 -cleave 28 -apparitions 28 -madina 28 -headey 28 -mislabeling 28 -schaibles 28 -okehampton 28 -heleno 28 -kleargear.com 28 -anti-fascists 28 -goblets 28 -xfinity 28 -zainabou 28 -two-test 28 -g650 28 -boracay 28 -0730 28 -jugglers 28 -mycelium 28 -70-foot 28 -steadfastness 28 -rudman 28 -chisnall 28 -mangyongdae 28 -kornegay 28 -ilic 28 -holsters 28 -foodstuff 28 -legalistic 28 -tosun 28 -rusk 28 -cham 28 -chav 28 -jaz 28 -189733b 28 -megalomaniac 28 -68m 28 -scooting 28 -sentinels 28 -re-enactor 28 -foran 28 -reapplied 28 -worrier 28 -flash-flooding 28 -#putoutyourbats 28 -groomsman 28 -beki 28 -petrochina 28 -caboose 28 -warlingham 28 -rekik 28 -sociability 28 -shaunna 28 -senate-passed 28 -swatter 28 -assizes 28 -waywire 28 -aftergood 28 -volz 28 -banyan 28 -01:25 28 -leptin 28 -bidens 28 -splatters 28 -impressionism 28 -venegas 28 -espressos 28 -sugarpova 28 -vinton 28 -straughair 28 -hikind 28 -completeness 28 -bonten 28 -krasojevic 28 -low-impact 28 -owensboro 28 -niccolo 28 -seattle-area 28 -1,095 28 -krai 28 -ascendant 28 -two-level 28 -suddons 28 -eleni 28 -aam 28 -kuomintang 28 -smoltz 28 -lom 28 -llandovery 28 -moisturised 28 -michiko 28 -backpedaling 28 -solvang 28 -nello 28 -bianculli 28 -gouges 28 -cemetary 28 -abbotts 28 -guillem 28 -byrum 28 -liow 28 -galston 28 -rheumatology 28 -nsu 28 -oca 28 -landless 28 -gono 28 -rochus 28 -burry 28 -joists 28 -lillis 28 -bardstown 28 -polley 28 -junichiro 28 -grimsey 28 -palosz 28 -mothballs 28 -aydin 28 -snowmelt 28 -mahout 28 -siii 28 -endeavouring 28 -indah 28 -sauropod 28 -severest 28 -najar 28 -four-wheeler 28 -yehudi 28 -pozniak 28 -steels 28 -pacos 28 -clegg-gibson 28 -1.77 28 -ktvb 28 -bim 28 -robinson-pierre 28 -degenkolb 28 -step-grandmother 28 -millom 28 -wicket-keeper 28 -Álvaro 28 -hughley 28 -rottenberg 28 -lassa 28 -834 28 -post-revolutionary 28 -aldred 28 -colonisers 28 -sweatshops 28 -mattock 28 -over-riding 28 -uggs 28 -posits 28 -pork-barrel 28 -huget 28 -yeam 28 -cloister 28 -donepezil 28 -822,000 28 -identikit 28 -money-spinner 28 -tayyab 28 -ferrelle 28 -janssens 28 -fiercer 28 -torvosaurus 28 -caryatids 28 -salvadorans 28 -kepplinger 28 -demarcated 28 -bergendorff 28 -krop 28 -tedesco 28 -coton 28 -tvert 28 -piqué 28 -burd 28 -mcaleer 28 -putra 28 -pre-party 28 -actor-director 28 -arstechnica 28 -post-tropical 28 -66-year 28 -freediver 28 -gyles 28 -black-ish 28 -embellishing 28 -jes 28 -trumpeters 28 -iestyn 28 -deviates 28 -hashima 28 -g'day 28 -irrationality 28 -record-equalling 28 -doggone 28 -imaarl 28 -hunger-free 28 -open-mindedness 28 -kasha 28 -button-up 28 -smarmy 28 -187,000 28 -riis 28 -top-ten 28 -812 28 -gopaul 28 -lampoons 28 -dao 28 -octave 28 -restating 28 -tole 28 -30.7 28 -bergamasco 28 -whitemoor 28 -multi-sport 28 -incompatibility 28 -motioning 28 -kapil 28 -dodgeball 28 -berryman 28 -matherly 28 -mairi 28 -water.org 28 -heterosexuality 28 -ultra-violet 28 -tulio 28 -steenkamps 28 -dorris 28 -bedbound 28 -dswt 28 -tushar 28 -fluro 28 -barret 28 -marquet 28 -overestimating 28 -gap-year 28 -non-aggression 28 -trion 28 -super-skinny 28 -axe-wielding 28 -arana 28 -gita 28 -copters 28 -ogled 28 -pieau 28 -kiprotich 28 -beaudet 28 -biswas 28 -angelenos 28 -salihovic 28 -agutter 28 -lojack 28 -pedal-powered 28 -chaves 28 -resits 28 -00:35 28 -underlies 28 -trobaugh 28 -12-bedroom 28 -14-bedroom 28 -matchups 28 -tuz 28 -bmg 28 -geissler 28 -climatologists 28 -hailstorms 28 -puppeteers 28 -compensations 28 -farhadi 28 -australia-based 28 -hesitates 28 -adductor 28 -'11 28 -pocked 28 -kolodziejczak 28 -giffin 28 -mathie 28 -uncrewed 28 -dual-fuel 28 -takeshi 28 -unhappiest 28 -aesop 28 -mojitos 28 -ex-leader 28 -reconfirmed 28 -blockading 28 -hoss 28 -ready-meals 28 -macaws 28 -home-run 28 -saturating 28 -cackling 28 -waals 28 -poor-quality 28 -mladen 28 -yacoub 28 -reconditioned 28 -leale 28 -burnage 28 -infesting 28 -1704 28 -bolan 28 -h.l. 28 -bartali 28 -dothan 28 -scornful 28 -mudstone 28 -tevel 28 -1214b 28 -37c 28 -earache 28 -five-division 28 -garrigan 28 -celestina 28 -prange 28 -two-up 28 -aburas 28 -mog 28 -berlant 28 -redefines 28 -lamson 28 -father-of-eight 28 -spa-francorchamps 28 -hollier 28 -dews 28 -snatchers 28 -heckmondwike 28 -water-skiing 28 -d-wisconsin 28 -povey 28 -cheerfulness 28 -rifan 28 -budget-friendly 28 -inequitable 28 -leniata 28 -1609 28 -wolfinger 28 -underdevelopment 28 -lavalle 28 -repulse 28 -confino 28 -maryborough 28 -kaslow 28 -fifty-seven 28 -flippantly 28 -mullaittivu 28 -award-winner 28 -milledgeville 28 -nobilis 28 -frigo 28 -vaporised 28 -rayat 28 -soundstage 28 -pinchen 28 -under-estimate 28 -ainscough 28 -gandossy 28 -battams 28 -velupillai 28 -traykov 28 -uppal 28 -karm 28 -citron 28 -lifesize 28 -sherrif 28 -calmes 28 -hooley 28 -miniaturist 28 -eichner 28 -bowring 28 -desertions 28 -khoisan 28 -manzarek 28 -stanwell 28 -non-functioning 28 -adaptor 28 -agadez 28 -punky 28 -887 28 -dufek 28 -ultima 28 -al-adly 28 -pianists 28 -1143 28 -bodypainting 28 -greenhous 28 -788 28 -barings 28 -powley 28 -12,400 28 -john-henry 28 -unsatisfying 28 -goddiva 28 -majority-owned 28 -aliya 28 -ocho 28 -teabag 28 -paris-born 28 -kamryn 28 -pre-launch 28 -comin 28 -patsey 28 -disqualifies 28 -mardirossian 28 -quiller 28 -ministering 28 -bric-a-brac 28 -at-bat 28 -kukri 28 -u18s 28 -two-weight 28 -boudou 28 -ghai 28 -vesna 28 -computation 28 -jeida 28 -subtler 28 -denholm 28 -kerns 28 -980,000 28 -zandi 28 -pipework 28 -imbibing 28 -bussandri 28 -landen 28 -11.55 28 -horoscope 28 -litigator 28 -two-tenths 28 -tybee 28 -dsb 28 -totobiegosode 28 -curmudgeon 28 -druzin 28 -wls-tv 28 -tremonti 28 -beaverbrook 28 -idealists 28 -faktor 28 -encoding 28 -lobel 28 -father-of 28 -1,595 28 -fleet-footed 28 -wilkey 28 -co-researcher 28 -pleasanton 28 -downhearted 28 -kanarikov 28 -callao 28 -rvi 28 -soppy 28 -murtala 28 -electrify 28 -institutionalize 28 -tinkers 28 -lesbianism 28 -criterium 28 -ginnetti 28 -wilhelmina 28 -autonomic 28 -bitty 28 -mclemire 28 -pikey 28 -kanesaki 28 -pro-thaksin 28 -bronken 28 -baathists 28 -kanazawa 28 -honorific 28 -gtb 28 -gts 28 -siddal 28 -2,160 28 -mpumalanga 28 -berkery 28 -mial 28 -fornication 28 -shockley 28 -yeganeh 28 -safdar 28 -gapes 28 -flinches 28 -mordor 28 -nikitta 28 -goyer 28 -balsillie 28 -room-mate 28 -mhairi 28 -gorr 28 -hommes 28 -cumulatively 28 -phaser 28 -amity 28 -1493 28 -maund 28 -albacete 28 -meechan 28 -carbonation 28 -233,000 28 -visitations 28 -okun 28 -secretes 28 -shaista 28 -elmander 28 -golub 28 -halliche 28 -thatcham 28 -930,000 28 -encyclopedic 28 -africom 28 -multi-camera 28 -today/gallup 28 -intelligencer 28 -padmore 28 -cedarville 28 -antron 28 -symphonies 28 -nardone 28 -picayune 28 -cermeno 28 -mongomo 28 -nilson 28 -fawkner 28 -langerhans 28 -pre-baby 28 -episcopalian 28 -roubini 28 -rothblatt 28 -previdi 28 -franzen 28 -sansing 28 -kh 28 -kl 28 -marange 28 -montemayor 28 -taliban-style 28 -al-hadi 28 -subwing 28 -scorchers 28 -procrastinating 28 -wrens 28 -deckard 28 -36.3 28 -workstation 28 -mainframe 28 -al-badri 28 -chippings 28 -hollings 28 -silveira 28 -70-80 28 -purse-friendly 28 -socio-political 28 -bopanna 28 -cheops 28 -artyom 28 -flisher 28 -henge 28 -:2 28 -pedis 28 -toulalan 28 -soldotna 28 -saitama 28 -no-kill 28 -styal 28 -726 28 -1million-plus 28 -chaput 28 -impersonates 28 -al-adawiya 28 -mastromarino 28 -campana 28 -cathey 28 -trulia 28 -60-yard 28 -claas 28 -zaheem 28 -off-loading 28 -moldea 28 -algorithmic 28 -29443 28 -ding-dong 28 -pedroia 28 -1939-45 28 -down-and-out 28 -palenque 28 -1685 28 -burchfield 28 -polgar 28 -lesya 28 -jovovich 28 -sansern 28 -115mph 28 -scituate 28 -gaokao 28 -leymah 28 -telemark 28 -blain 28 -single-payer 28 -flatulent 28 -tink 28 -waste4fuel 28 -zags 28 -jalawla 28 -rapoport 28 -zealand-based 28 -shorrock 28 -siegert 28 -ioannou 28 -Élysée 28 -cammy 28 -haverigg 28 -saipan 28 -1998-1999 28 -pallais 28 -dokic 28 -providenciales 28 -ecotricity 28 -follett 28 -wisbey 28 -briny 28 -most-followed 28 -12s 28 -buskirk 28 -basmati 28 -gutteridge 28 -reznor 28 -punggye-ri 28 -assani 28 -regress 28 -newsdesk 28 -16-strong 28 -paternalistic 28 -ask-don 28 -nuzzles 28 -pennsville 28 -zatopek 28 -pretension 28 -catalyze 28 -balderas 28 -moobs 28 -umbria 28 -antic 28 -antin 28 -french-language 28 -oration 28 -loera 28 -jakeman 28 -2.88 28 -hollandaise 28 -deluise 28 -35.8 28 -orange-red 28 -winspear 28 -castanada 28 -eminence 28 -keelung 28 -krdo 28 -pre-9 28 -indore 28 -hagler 28 -confessor 28 -whitsunday 28 -treese 28 -re-writing 28 -subhreet 28 -dgse 28 -jaidon 28 -castergine 28 -destrehan 28 -richelieu-drouot 28 -bizley 28 -arnav 28 -barkat 28 -heneghan 28 -cookie-cutter 28 -9.00 28 -pull-down 28 -crucifying 28 -noffke 28 -ebt 28 -abdulwahab 28 -dicken 28 -bifengxia 28 -arkin 28 -dibrani 28 -bi-plane 28 -allegro 28 -well-reviewed 28 -mazzara 28 -cvd 28 -tahiri 28 -tns 28 -tnf 28 -encase 28 -babak 28 -dcfs 28 -luuk 28 -cherub 28 -zigzagging 28 -linke 28 -melded 28 -traverses 28 -sweltered 28 -mullarkey 28 -1,004 28 -shuck 28 -relin 28 -shildon 28 -mccully 28 -inopportune 28 -plumlee 28 -carbon-rich 28 -intransigent 28 -setraco 28 -carolee 28 -volcker 28 -unicredit 28 -sed 28 -41.4 28 -gavriel 28 -ricin-tainted 28 -ypj 28 -mid-terrace 28 -fraisse 28 -zell 28 -weaponized 28 -cuty 28 -cutz 28 -staffs. 28 -afi 28 -windsurfers 28 -pinkish 28 -inclusions 28 -tsao 28 -fallible 28 -wlky 28 -karpinski 28 -limetrees 28 -bankside 28 -intelligentsia 28 -agonies 28 -non-combatants 28 -capuchins 28 -conservative-leaning 28 -neace 28 -shoker 28 -4:35 28 -giddins 28 -slims 28 -hege 28 -traitorous 28 -niersbach 28 -2-mile 28 -pos 28 -directioners 28 -2050s 28 -400mg 28 -wgsn 28 -subtracting 28 -petrol-powered 28 -697 28 -swaledale 28 -go-round 28 -gohil 28 -bto 28 -hulanicki 28 -hillbillies 28 -intensities 28 -asmat 28 -obstructs 28 -17-member 28 -mesko 28 -tarbell 28 -entrées 28 -werntz 28 -gini 28 -mcglaughlin 28 -buglione 28 -oran 28 -alcohol-induced 28 -glade 28 -ball-playing 28 -nikolaos 28 -broni 28 -gooners 28 -viel 28 -steeling 28 -kookmin 28 -sauerland 28 -brizuela 28 -below-inflation 28 -bulot 28 -preclearance 28 -sked 28 -sanchez-ramirez 28 -universality 28 -full-skirted 28 -wiktoria 28 -petroleum-based 28 -krona 28 -bifouma 28 -lasko 28 -baratheon 28 -baysinger 28 -macky 28 -sweepers 28 -redoubling 28 -opelika 28 -hermon 28 -tain 28 -d-minnesota 28 -barkway 28 -mobberley 28 -gossamer 28 -72f 28 -175th 28 -plas 28 -buttermere 28 -cleveleys 28 -bellessa 28 -34.6 28 -prinze 28 -nonagenarian 28 -on-ramp 28 -kartheiser 28 -25-years 28 -balco 28 -marnick 28 -richfield 28 -doms 28 -kaylyn 28 -g-mac 28 -goldsboro 28 -dardis 28 -24,500 28 -favorited 28 -sandercock 28 -trt 28 -myfitnesspal 28 -complexo 28 -severs 28 -silbermann 28 -sunninghill 28 -carrigan 28 -craw 28 -traversie 28 -mcdade 28 -847 28 -848 28 -pibor 28 -54.5 28 -feiz 28 -apoe 28 -wreck-it 28 -off-ramp 28 -eight-figure 28 -70per 28 -writtle 28 -trouper 28 -7d 28 -karson 28 -rodionova 28 -wansink 28 -langevin 28 -unfurnished 28 -pre-fabricated 28 -32.4 28 -kowtowing 28 -interventional 28 -eighty-six 28 -venna 28 -indoctrinating 28 -liphook 28 -fables 28 -heavily-guarded 28 -rifqa 28 -eamer 28 -pottstown 28 -weather-beaten 28 -harjinder 28 -guffaws 28 -2.73 28 -honc 28 -m-class 28 -beaman 28 -santosh 28 -moss-covered 28 -countertops 28 -non-metallic 28 -tuleta 28 -cavalryman 28 -vialli 28 -airings 28 -tea-party 28 -highly-prized 28 -malatino 28 -balms 28 -preble 28 -woolies 28 -intricacy 28 -sub-freezing 28 -655 28 -zohra 28 -athol 28 -labour-controlled 28 -22:31 28 -hartland 28 -hsiao 28 -greedily 28 -text-messaging 28 -heiland 28 -caringbridge 28 -rho 28 -game-day 28 -winslade 28 -kudo 28 -grownups 28 -kentwood 28 -war-zone 28 -scripting 28 -re-building 28 -amre 28 -farwell 28 -heracles 28 -delassus 28 -clamoured 28 -wayman 28 -fully-functional 28 -naweed 28 -slinkard 28 -light-rail 28 -israel-palestinian 28 -21:42 28 -camelback 28 -annastacia 28 -hallatt 28 -penske 28 -middle-of-the-road 28 -archimedes 28 -disbursement 28 -adib 28 -maltin 28 -900m 28 -wristwatches 28 -willi 28 -gissing 28 -moorthy 28 -finchem 28 -performance-based 28 -mid-market 28 -notarized 28 -taran 28 -grassed 28 -table-top 28 -abysmally 28 -underutilized 28 -spooking 28 -sudden-death 28 -mcgonagle 28 -no-contract 28 -mla 28 -cannisters 28 -mcdormand 28 -dovetails 28 -henrich 28 -self-assurance 28 -three-seater 28 -spradling 28 -chasms 28 -mailers 28 -200k 28 -r-north 28 -salway 28 -considine 28 -greff 28 -celebrity-filled 28 -stewart-haas 28 -00:09 28 -glossing 28 -soucie 28 -rain-sodden 28 -gowans 28 -671 28 -500-meter 28 -brisbane-based 28 -noddings 28 -3,050 28 -toulouse-lautrec 28 -shepherdess 28 -katsav 28 -putro 28 -yaasmeen 28 -true-life 28 -treynor 28 -boudina 28 -alivia 28 -bedclothes 28 -ceasefires 28 -dungy 28 -tuite 28 -unsaid 28 -fuhrman 28 -silliest 28 -abertay 28 -tcm 28 -10-kilometer 28 -ployers 28 -dad-of-one 28 -reexamine 28 -leatherneck 28 -liturgical 28 -17.00 28 -pinscher 28 -lande 28 -dribbler 28 -mccorquodale 28 -well-regulated 28 -bristol-born 28 -commiserations 28 -bretherick 28 -reunified 28 -boeheim 28 -oakdale 28 -1130 28 -girdles 28 -mesopotamian 28 -synchro 28 -diyas 28 -parent-child 28 -esra 28 -zdanowicz 28 -plascencia 28 -11,400 28 -p.e. 28 -mommas 28 -cashflow 28 -meziane 28 -jinny 28 -ballplayers 28 -14,600 28 -m/v 28 -stojkovic 28 -14lbs 28 -féin 28 -helgen 28 -gymnasiums 28 -ungodly 28 -mceveley 28 -tandridge 28 -00:22 28 -stavanger 28 -ardoin 28 -pagnell 28 -mangueira 28 -yorkshire-born 28 -masip 28 -zhan 28 -match-fixer 28 -auto-injector 28 -sedgemoor 28 -2018/19 28 -vocs 28 -culverwell 28 -twentysomething 28 -wkt 28 -aventura 28 -mccain-palin 28 -zdnet 28 -kimmy 28 -tweeds 28 -wellingtons 28 -moule 28 -subramanian 28 -zanuck 28 -nibbs 28 -zr1 28 -trappist 28 -gnashing 28 -corvus 28 -lightbourn 28 -famers 28 -vandegrift 28 -giudecca 28 -valérie 28 -francia 28 -rattigan 28 -colonic 28 -broadhead 28 -anatabloc 28 -pro-mubarak 28 -overfed 28 -bridenstine 28 -combusted 28 -citalopram 28 -scrivo 28 -ppq 28 -22-foot 28 -ventricles 28 -mactaggart 28 -optometrists 28 -creekside 28 -khadra 28 -pronunciations 28 -unzip 28 -haystacks 28 -fact-checkers 28 -kureishi 28 -p&p 28 -mamamia 28 -croupier 28 -sombreros 28 -ghul 28 -ante-natal 28 -dehumanize 28 -100-1 28 -ragsdale 28 -courey 28 -wittman 28 -birss 28 -footbridges 28 -gigabyte 28 -naqvi 28 -00:49 28 -haldon 28 -employer-provided 28 -repulsion 28 -sebastián 28 -mavs 28 -kuljian 28 -horticulturalists 28 -hatha 28 -fitsteps 28 -joongang 28 -davon 28 -suveges 28 -cryptography 28 -habenula 28 -sounder 28 -serpico 28 -rear-wheel 28 -petticoats 28 -37.8 28 -comeagain 28 -meurice 28 -hurring 28 -lawee 28 -azkaban 28 -4x 28 -zlaticanin 28 -begu 28 -trunki 28 -seduces 28 -reni 28 -hunches 28 -guiuan 28 -fuehrer 28 -auc 28 -aleshin 28 -mentalist 28 -standen 28 -naguib 28 -linkages 28 -non-islamist 28 -razzmatazz 28 -lig 28 -thyroxine 28 -self-build 28 -fouche 28 -finan 28 -eves 28 -reacquainted 28 -globus 28 -cost-efficient 28 -inoculations 28 -franke 28 -32b 28 -phanthavong 28 -dammion 28 -rockfall 28 -norberto 28 -30-kilometer 28 -sailfish 28 -manado 28 -sohae 28 -zarina 28 -reformatory 28 -okocha 28 -youngor 28 -clsa 28 -redelfs 28 -melaniuk 28 -stop-over 28 -marchena 28 -locators 28 -leafless 28 -second-lowest 28 -anarae 28 -pettiness 28 -boykoff 28 -0615 28 -lugger 28 -j-j 28 -oakville 28 -danford 28 -syria-turkey 28 -ariella 28 -facsimile 28 -moggie 28 -jusuf 28 -gateau 28 -bikey 28 -gnashers 28 -gowanus 28 -krejcir 28 -dregs 28 -industrial-scale 28 -inu 28 -haranguing 28 -cross-referenced 28 -rosettes 28 -pollara 28 -youku 28 -broll 28 -chynna 28 -spens 28 -cached 28 -heliospheric 28 -varese 28 -archdeacon 28 -misdirection 28 -schlep 28 -!!!!!!! 28 -tey 28 -sharrod 28 -t-72 28 -arshid 28 -yfz 28 -re-attach 28 -scoreboards 28 -tune-up 28 -bare-breasted 28 -topix 28 -phonics 28 -prioritization 28 -coleman-farrow 28 -soviet-made 28 -dosh 28 -two-over 28 -ice-cool 28 -mommies 28 -habituated 28 -bses 28 -hazrat 28 -0.23 28 -langella 28 -steadies 28 -deafened 28 -enteritidis 28 -welsh-born 28 -lubov 28 -4.49 28 -hinde 28 -nda 28 -nouwarah 28 -snapple 28 -asadullah 28 -supercells 28 -cuss 28 -hydrology 28 -vivanco 28 -pain-killing 28 -monetise 28 -safety-conscious 28 -woolcott 28 -rosenhaus 28 -nazanin 28 -gabeira 28 -iberostar 28 -commercialize 28 -westview 28 -cabra 28 -rom-coms 28 -komarov 28 -spongiform 28 -knifes 28 -150lbs 28 -launderers 28 -pussell 28 -lower-priced 28 -imidacloprid 28 -dancevic 28 -disposes 28 -florid 28 -austria-hungary 28 -winnipesaukee 28 -densely-populated 28 -ryszard 28 -claudine 28 -generalizations 28 -actionaid 28 -data-sharing 28 -berms 28 -o'mahoney 28 -ganadi 28 -b-24 28 -abase 28 -clatters 28 -lbds 28 -montages 28 -moskvin 28 -tbd 28 -car-crash 28 -cordelia 28 -180m 28 -clumped 28 -wrongheaded 28 -three-step 28 -magdala 28 -shipbuilder 28 -crackles 28 -immingham 28 -qm2 28 -c1 28 -expensive-looking 28 -43,500 28 -human-made 28 -cloying 28 -gass 28 -ximena 28 -va-va-voom 28 -drozdz 28 -restate 28 -wind-swept 28 -colander 28 -alagiah 28 -harith 28 -amuses 28 -epecuen 28 -scoped 28 -sangay 28 -mavididi 28 -ring-fence 28 -derring-do 28 -larrivey 28 -unguided 28 -bullmastiff 28 -youthfulness 28 -necktie 28 -2004-2005 28 -ais 28 -sidearm 28 -redskin 28 -simunic 28 -linor 28 -ehsanullah 28 -hilfenhaus 28 -roch 28 -subplot 28 -anti-monarchy 28 -610,000 28 -syriac 28 -904 28 -2b 28 -1670 28 -tharanga 28 -kamangar 28 -levis 28 -metzker-madsen 28 -b29 28 -sloan-kettering 28 -30-6 28 -fedexcup 28 -younghusband 28 -khader 28 -cdt 28 -52.4 28 -hatra 28 -dashiell 28 -khera 28 -sauron 28 -high-purity 28 -snotty 28 -tangipahoa 28 -cuda 28 -moët 28 -langman 28 -lasorda 28 -giornale 28 -4-3-1-2 28 -blobby 28 -donerson 28 -ashurst 28 -burdett 28 -sittin 28 -hanrahan 28 -vurnon 28 -sundaes 28 -52-week 28 -kaveh 28 -ex-lib 28 -melitzer 28 -n'koulou 28 -invitingly 28 -80,000-a-year 28 -60-page 28 -pechey 28 -memin 28 -bird-watching 28 -simpson-daniel 28 -okonjo-iweala 28 -wreckers 28 -backboard 28 -consistory 28 -gesticulated 28 -disrespectfully 28 -durations 28 -bruyn 28 -jetties 28 -9:05 28 -n`t 28 -bulimic 28 -mahmod 28 -faints 28 -zoricic 28 -jammie 28 -unheard-of 28 -svein 28 -artesia 28 -wellman-smith 28 -human-caused 28 -skinbreeze 28 -fredric 28 -canipe 28 -gilding 28 -worcs 28 -gangway 28 -safe-keeping 28 -hang-glider 28 -troves 28 -elfin 28 -airwheel 28 -petcube 28 -checkbooks 28 -oxyelite 28 -cocozza 28 -aissami 28 -rl 28 -36,500 28 -llosa 28 -hilario 28 -gurpreet 28 -heda 28 -silbert 28 -katzenbach 28 -gaisford 28 -dudman 28 -charmian 28 -ryeley 28 -antagonising 28 -fomo 28 -storm-battered 28 -dol 28 -pestis 28 -trinidadian 28 -weissberg 28 -tater 28 -kstu 28 -undisciplined 28 -tok 28 -mezzo-soprano 28 -cosied 28 -californication 28 -baggs 28 -fresnel 28 -lantau 28 -dried-up 28 -decaffeinated 28 -dharda 28 -sequoias 28 -race-related 28 -mcgrew 28 -lak 28 -demolishes 28 -anastasiades 28 -woolston 28 -supplant 28 -crais 28 -bhullar 28 -berner 28 -duck-shaped 28 -quranic 28 -replanted 28 -superimpose 28 -cost-conscious 28 -inflexibility 28 -portwood 28 -everman 28 -gawping 28 -711 28 -hodgins 27 -pervading 27 -diskerud 27 -robothespian 27 -lyrique 27 -591 27 -tankleff 27 -seligman 27 -watchlists 27 -helter-skelter 27 -bennell 27 -mando 27 -ells 27 -mutambara 27 -essayist 27 -herz-sommer 27 -prerogatives 27 -gladden 27 -tauxe 27 -bedworth 27 -multi-racial 27 -skyrim 27 -lolling 27 -burridge 27 -subscribes 27 -dicarlo 27 -ellipse 27 -1509 27 -peacemakers 27 -ownfone 27 -hathloul 27 -kaleigh 27 -kismet 27 -870,000 27 -opp 27 -campi 27 -jeggings 27 -cofidis 27 -whoppers 27 -squeaks 27 -gieves 27 -merlo 27 -mobilizes 27 -madderson 27 -24-foot 27 -nicklen 27 -peddlers 27 -spoonfuls 27 -jamaat-ud-dawa 27 -mardini 27 -al-huwaider 27 -louche 27 -multi-disciplinary 27 -isaak 27 -scheffer 27 -01:07 27 -1,400-square-foot 27 -fruitland 27 -kavan 27 -1,010 27 -240mph 27 -bhupinder 27 -parenteau 27 -kallen 27 -fazul 27 -doyenne 27 -free-wheeling 27 -cheval 27 -end-of-term 27 -p.k. 27 -llyn 27 -stoical 27 -specially-built 27 -birmingham-born 27 -carolinian 27 -bloodcurdling 27 -chillis 27 -horia 27 -saro-wiwa 27 -basecamp 27 -repucom 27 -ankle-length 27 -eth 27 -9-foot 27 -sendoff 27 -singe 27 -smerdon 27 -underfire 27 -biggio 27 -tankini 27 -tersely 27 -brearley 27 -cartier-bresson 27 -avett 27 -gunsmoke 27 -over-hit 27 -aza 27 -coconino 27 -zahn 27 -much-discussed 27 -islami 27 -fule 27 -ryang 27 -xcx 27 -baresi 27 -villafane 27 -song-and-dance 27 -vaser 27 -nilmar 27 -ogunlesi 27 -breier 27 -lumpar 27 -eosinophilic 27 -430million 27 -rain-lashed 27 -mcgarrigle 27 -unfeeling 27 -tyhurst 27 -phippen 27 -tailwind 27 -radstock 27 -ketut 27 -adulteration 27 -65.4 27 -voom 27 -furthers 27 -xerez 27 -lalique 27 -sucker-punched 27 -inarticulate 27 -yucky 27 -46billion 27 -ries 27 -veness 27 -inter-religious 27 -faggot 27 -01:28 27 -01:23 27 -her2-positive 27 -geospatial-intelligence 27 -albasha 27 -2007-09 27 -858 27 -adeogba 27 -hochul 27 -gardendale 27 -antonini 27 -light-filled 27 -woolfe 27 -weatherall 27 -relocates 27 -hit-and-miss 27 -jenin 27 -tatts 27 -astronautics 27 -shama 27 -z-2 27 -meenakshi 27 -commutations 27 -lewins 27 -guled 27 -interludes 27 -fatma 27 -beat-up 27 -ibbotson 27 -leeuwin 27 -yifrah 27 -tush 27 -corporan 27 -simas 27 -natariga 27 -frostbitten 27 -fatimah 27 -traumatize 27 -aller 27 -honourably 27 -terrifically 27 -rossee 27 -betta 27 -footmen 27 -scampers 27 -deliciousness 27 -billion-pound 27 -bacsinszky 27 -barta 27 -barty 27 -under-10s 27 -peony 27 -schimer 27 -gyrated 27 -scher 27 -baljit 27 -coalville 27 -10,000-square-foot 27 -ructions 27 -georgy 27 -atg 27 -magoo 27 -taras 27 -bonito 27 -bridgeton 27 -backbones 27 -kaneria 27 -aasif 27 -autopsied 27 -koro 27 -two-and-a-half-year-old 27 -ex-west 27 -invalidating 27 -sameness 27 -csv 27 -120-mile 27 -svenska 27 -mjukuu 27 -ribbon-cutting 27 -rickles 27 -sylwia 27 -holbert 27 -rie 27 -nightstick 27 -kusadasi 27 -high-traffic 27 -grass-fed 27 -wickedly 27 -heenan 27 -vp113 27 -diphtheria 27 -satay 27 -fish-eye 27 -mukwege 27 -drug-crazed 27 -alcor 27 -locker-room 27 -borodowski 27 -derdiyok 27 -schrock 27 -erturk 27 -o'higgins 27 -articulates 27 -szegedi 27 -sessums 27 -hamelin 27 -delightedly 27 -nbclp.vidframe.height 27 -subverting 27 -subsist 27 -bettany 27 -pua 27 -outfoxed 27 -networker 27 -wein 27 -elida 27 -itemised 27 -toddle 27 -out-of-bounds 27 -deviants 27 -ventilate 27 -maestas 27 -scarrott 27 -guiliano 27 -mediacity 27 -accident-prone 27 -matchmakers 27 -wenatchee 27 -quintavalle 27 -aeronautic 27 -barat 27 -mckendry 27 -chandimal 27 -superfund 27 -padova 27 -cop-out 27 -brauchler 27 -siller 27 -zachery 27 -nation-states 27 -gmoser 27 -parkersburg 27 -palomar 27 -72-hole 27 -extractor 27 -beninati 27 -erekat 27 -sakhir 27 -wth 27 -reenacted 27 -collarless 27 -242,000 27 -aikens 27 -occuring 27 -mclarty 27 -africanized 27 -eto 27 -lowman 27 -30.9 27 -lahj 27 -romell 27 -ailun 27 -strop 27 -cancan 27 -slk 27 -martinsburg 27 -lloret 27 -diarmuid 27 -janzen 27 -kalloo 27 -brackett 27 -juvenal 27 -renna 27 -boof 27 -hessle 27 -rainbow-coloured 27 -kget 27 -theorizes 27 -malorie 27 -porcaro 27 -well-financed 27 -suppository 27 -nanograms 27 -flintstones 27 -overruling 27 -pso 27 -woodfield 27 -al-husseini 27 -damselfly 27 -nottingham-based 27 -wmar 27 -kubrat 27 -karnezis 27 -blisteringly 27 -bastidas 27 -allington 27 -eilean 27 -attwater 27 -off-year 27 -brier 27 -france-klm 27 -sheaths 27 -10-9 27 -yaman 27 -slava 27 -realsense 27 -taree 27 -karun 27 -paddleboarding 27 -doosra 27 -2036 27 -bloodlines 27 -witherow 27 -theropods 27 -matthewman 27 -necessitating 27 -stringy 27 -mallis 27 -jaynie 27 -gondoliers 27 -deputised 27 -energizer 27 -flints 27 -kwan-jin 27 -encamped 27 -soane 27 -gusev 27 -reignites 27 -infuses 27 -malphrus 27 -dever 27 -warneke 27 -simplifies 27 -graciousness 27 -festa 27 -indignantly 27 -lemley 27 -tychon 27 -counter-offensive 27 -raven-haired 27 -inseminate 27 -aguas 27 -ho-hum 27 -toba 27 -chadians 27 -sturrock 27 -fadiga 27 -teheran 27 -whippy 27 -saget 27 -denney 27 -tentacle 27 -syme 27 -covets 27 -hozier 27 -jeremain 27 -parolo 27 -strength-to-strength 27 -whence 27 -zephyrhills 27 -die-ins 27 -loxley 27 -newly-installed 27 -missie 27 -skittle 27 -phuketwan 27 -bloomsburg 27 -clee 27 -lvad 27 -instated 27 -henshall 27 -ivania 27 -verlander 27 -50.8 27 -50.7 27 -shonn 27 -26-foot 27 -insula 27 -exhibitionists 27 -left-over 27 -knaeble 27 -marshaled 27 -mcintee 27 -massapequa 27 -beljan 27 -lackner 27 -kamaleswaran 27 -alberti 27 -spreadable 27 -mohel 27 -huayra 27 -riddlesdown 27 -post-thanksgiving 27 -hungriest 27 -hosptial 27 -apophis 27 -peyron 27 -belisle 27 -nunnery 27 -90cm 27 -14-15 27 -mathewson 27 -exasperating 27 -chapelle 27 -18cm 27 -melling 27 -264,000 27 -trichen 27 -reread 27 -courtesies 27 -00:51 27 -gun-smuggling 27 -aptamil 27 -nicoletti 27 -jagan 27 -teasdale 27 -microcredit 27 -11,800 27 -thomassey 27 -flir 27 -rampton 27 -rou 27 -lined-up 27 -romeike 27 -gpc 27 -teva 27 -spaceman 27 -herbalist 27 -ruffier 27 -45-second 27 -petitgout 27 -jernigan 27 -fantine 27 -piñera 27 -fiala 27 -snowshoe 27 -ziploc 27 -francisco-oakland 27 -handspike 27 -indianola 27 -mine-resistant 27 -taranto 27 -inline 27 -11/10 27 -1.84 27 -geometrical 27 -telegenic 27 -vukovar 27 -forgivable 27 -derk 27 -deniliquin 27 -major-league 27 -slooh 27 -brix 27 -determinant 27 -puccini 27 -jarno 27 -scratcher 27 -7bn 27 -108th 27 -muon 27 -telework 27 -alkins 27 -2040s 27 -cherubs 27 -0.13 27 -padron 27 -presale 27 -15-yard 27 -voluble 27 -gracey 27 -a35 27 -faker 27 -de-ice 27 -katra 27 -34-man 27 -786 27 -boere 27 -swatman 27 -akhenaten 27 -synchrotron 27 -garters 27 -invidious 27 -synchronize 27 -noora 27 -bagless 27 -gaba 27 -coveting 27 -tema 27 -hern 27 -xiu 27 -borderers 27 -arosa 27 -inauthentic 27 -burrata 27 -kellet 27 -persil 27 -gipper 27 -govia 27 -rendon 27 -tax-and-spend 27 -allitt 27 -sixth-minute 27 -60,000-a-week 27 -cascaded 27 -13-page 27 -staffy 27 -portor 27 -dutch-led 27 -laboeuf 27 -lenard 27 -altmire 27 -telstar 27 -lawley-wakelin 27 -damarcus 27 -windham 27 -3-pointer 27 -jewry 27 --9:00 27 -jossa 27 -meydan 27 -super-hot 27 -esselborn 27 -exaltation 27 -thauvin 27 -6-foot-1 27 -^ 27 -backhoes 27 -wookey 27 -unsmiling 27 -cyborgs 27 -byles 27 -byler 27 -senator-elect 27 -ksl.com 27 -uhac 27 -elgindy 27 -kolbert 27 -wolfman 27 -precision-guided 27 -churcher 27 -grekos 27 -double-fronted 27 -remixes 27 -astonish 27 -black-out 27 -prance 27 -konami 27 -3.29 27 -blaney 27 -bedpan 27 -pompei 27 -blaxland 27 -sawada 27 -filho 27 -81.5 27 -lucaj 27 -trembles 27 -brownell 27 -smokestacks 27 -fromelles 27 -peltier 27 -stavrou 27 -sofija 27 -underfunding 27 -glamorizing 27 -ex-barcelona 27 -semi-precious 27 -marcey 27 -character-driven 27 -dismally 27 -multilayered 27 -trollstation 27 -meltzer 27 -magliozzi 27 -busson 27 -eddleston 27 -gtc 27 -bernadine 27 -informatics 27 -mallinson 27 -habiba 27 -15ml 27 -dollhouses 27 -6,250 27 -dreyfuss 27 -artesian 27 -hanagan 27 -griffis 27 -abutting 27 -recession-proof 27 -mansoura 27 -55-year 27 -ourl 27 -pyles 27 -konno 27 -trundled 27 -renfe 27 -maung 27 -dudko 27 -pilings 27 -offsite 27 -pamir 27 -kazmierczak 27 -spider-like 27 -kitkats 27 -23cm 27 -centrality 27 -karakoram 27 -combat-related 27 -kaler 27 -huws 27 -bonheur 27 -caran 27 -swadlincote 27 -holloman 27 -stovell 27 -ningaloo 27 -clarice 27 -700km 27 -sadeghi 27 -cold-water 27 -keep-fit 27 -sisulu 27 -rheas 27 -126million 27 -high-demand 27 -spaceflights 27 -fandango 27 -qamishli 27 -potito 27 -comms 27 -zwick 27 -backsliding 27 -thigh-skimming 27 -7-year 27 -anticlimactic 27 -brelade 27 -monopods 27 -dimitrovska 27 -al-mutlaq 27 -devere 27 -ozkan 27 -riptide 27 -jairzinho 27 -aiton 27 -audriana 27 -mobutu 27 -24-man 27 -ahmar 27 -returner 27 -souffle 27 -understating 27 -window.location.host 27 -70cl 27 -palpably 27 -noncommercial 27 -reallocate 27 -hitchins 27 -yaw 27 -jayda 27 -mayoress 27 -metabolised 27 -humam 27 -flat-lining 27 -mid-south 27 -jokin 27 -corroding 27 -zakynthos 27 -bruijn 27 -multi-use 27 -slippy 27 -hwange 27 -christus 27 -nbclp.vidframe.width 27 -pemble 27 -maryellen 27 -380million 27 -nanos 27 -adaptors 27 -nishida 27 -leadbeater 27 -math.random 27 -kneier 27 -reprocessed 27 -hsv-1 27 -minkoff 27 -cour 27 -patchouli 27 -723 27 -plunked 27 -filatov 27 -563 27 -kcpq 27 -four-nation 27 -re-route 27 -vilakazi 27 -twito 27 -dozes 27 -delonas 27 -malisse 27 -originators 27 -voa 27 -lawday 27 -gunboats 27 -garcia-pellon 27 -icelanders 27 -debs 27 -wwbt 27 -urbaniak 27 -leyburn 27 -acsu 27 -biscotti 27 -sarandos 27 -lower-tier 27 -sultans 27 -zoomed-in 27 -mountain-top 27 -veiovis 27 -turreted 27 -grooving 27 -tambourine 27 -borakove 27 -waveney 27 -westmont 27 -lipkin 27 -weckerly 27 -slingo 27 -c.i.a. 27 -pobitora 27 -dabre 27 -sentinel-1a 27 -bpm 27 -chippewa 27 -sracic 27 -sahra 27 -collum 27 -3.13 27 -buzzards 27 -jh 27 -abington 27 -fairfueluk 27 -unmiss 27 -arab-americans 27 -yachtsmen 27 -alicea 27 -caul 27 -krajicek 27 -side-on 27 -lotterer 27 -tamas 27 -playstations 27 -longshore 27 -fenders 27 -jalaluddin 27 -ichthyosaurs 27 -congregational 27 -baur 27 -slaw 27 -shadwell 27 -spine-chilling 27 -42-inch 27 -appetit 27 -ki-suck 27 -lightning-quick 27 -sea-tac 27 -five-piece 27 -darabont 27 -30-year-olds 27 -large-capacity 27 -wardy 27 -bennington 27 -tyger 27 -remis 27 -musky 27 -200-metre 27 -manel 27 -witte 27 -ningxia 27 -nikolaevo 27 -cube-shaped 27 -mtn 27 -furtively 27 -1519 27 -arnau 27 -post-saddam 27 -leadenhall 27 -frozen-themed 27 -wagtail 27 -anti-sickness 27 -wadham 27 -cleavages 27 -vivendi 27 -vainest 27 -temperaments 27 -kaczowka 27 -veiszadeh 27 -woode 27 -bbqs 27 -grenadiers 27 -tule 27 -inwardly 27 -tableaux 27 -venturebeat 27 -emenalo 27 -tookey 27 -kiis 27 -valkenberg 27 -mannheim 27 -legler 27 -beate 27 -jalopnik 27 -fazal 27 -checkerboard 27 -motte 27 -disrobed 27 -boucek 27 -mcclane 27 -sally-ann 27 -top-six 27 -dog-eared 27 -roomed 27 -fluoridated 27 -dozy 27 -halvorsen 27 -bourgeoisie 27 -klatten 27 -glanton 27 -chaumont 27 -shunts 27 -kick-ups 27 -h1 27 -kravit 27 -kvoa 27 -behenna 27 -boreholes 27 -strasberg 27 -higinbotham 27 -non-chinese 27 -jalen 27 -lanesborough 27 -margam 27 -masayoshi 27 -oberg 27 -world-title 27 -40-pound 27 -rehiring 27 -fifty-nine 27 -name-dropping 27 -uwire 27 -feige 27 -6abc 27 -645,000 27 -6.17 27 -corbitt 27 -non-official 27 -sergeyev 27 -suchy 27 -heu 27 -hel 27 -krispies 27 -cuadra 27 -slagging 27 -mitten 27 -most-loved 27 -polio-like 27 -ca. 27 -fisht 27 -circumnavigating 27 -stumpf 27 -ainge 27 -39.8 27 -lozenge 27 -luckless 27 -ambassadorship 27 -imaginatively 27 -monville 27 -lept 27 -megalomania 27 -freshened 27 -decontaminating 27 -swati 27 -lenzi 27 -microcephaly 27 -skydrive 27 -tequesta 27 -salsbury 27 -ciampino 27 -wixom 27 -aldawsari 27 -nipper 27 -ravitz 27 -jumanji 27 -tippers 27 -16-19 27 -01:13 27 -01:12 27 -loye 27 -decelerator 27 -aquabounty 27 -keren 27 -tempura 27 -hand-raised 27 -moncef 27 -luwak 27 -budleigh 27 -osim 27 -odey 27 -face-covering 27 -pausch 27 -aujali 27 -tinier 27 -backwoods 27 -zoolander 27 -raffled 27 -desimone 27 -baltimore-based 27 -88.5 27 -abdennour 27 -holla 27 -holli 27 -brownwood 27 -kasdan 27 -jabar 27 -lokey 27 -news-press 27 -rooming 27 -bjoerndalen 27 -jadoon 27 -4-door 27 -yerba 27 -1.62 27 -maturey 27 -andries 27 -doohen 27 -shashank 27 -pre-1967 27 -coi 27 -hiri 27 -neri 27 -tite 27 -hw 27 -wollover 27 -heir-apparent 27 -primes 27 -rupe 27 -seymore 27 -activations 27 -fictionalised 27 -selebi 27 -initiator 27 -robitille 27 -nasties 27 -mciiroy 27 -keesey 27 -sashaying 27 -sexualizing 27 -uprated 27 -swart 27 -teitelbaum 27 -puggle 27 -unlisted 27 -yallop 27 -multiplatinum 27 -sipri 27 -cerussi 27 -brothers-in-law 27 -buenaventura 27 -anti-state 27 -oana 27 -maenza 27 -co-sponsoring 27 -sekhon 27 -41-gun 27 -yogurts 27 -doorbells 27 -heathman 27 -warmups 27 -oilman 27 -sing-song 27 -poch 27 -844 27 -wymondham 27 -mcbean 27 -cartland 27 -infiltrator 27 -haart 27 -beseler 27 -halong 27 -mindlessly 27 -do-able 27 -lale 27 -bokova 27 -nierop-reading 27 -officiant 27 -jack-o 27 -kkr 27 -wildschut 27 -liorancas 27 -bahar 27 -scald 27 -arseny 27 -pervasiveness 27 -barwick 27 -perfectly-timed 27 -amoruso 27 -reddick 27 -mid-eighties 27 -oswegatchie 27 -redfield 27 -fernand 27 -khumalo 27 -998 27 -video-on-demand 27 -al-dabi 27 -crossbencher 27 -815,000 27 -juggins 27 -saltash 27 -doster 27 -extol 27 -bunkhouse 27 -pokerstars 27 -asc 27 -teghan 27 -villoldo 27 -aud 27 -sub-atomic 27 -immensity 27 -lele 27 -giffard 27 -caritas 27 -speers 27 -offrink 27 -trackpad 27 -lagomarsino 27 -moonscape 27 -basher 27 -november/december 27 -ultra-light 27 -hydrologist 27 -tomasson 27 -chinaman 27 -self-love 27 -sexology 27 -22:37 27 -22:35 27 -shabbat 27 -tavarez 27 -britpop 27 -cargoes 27 -machine-gunned 27 -remittance 27 -clarksburg 27 -resentenced 27 -life-cycle 27 -gossage 27 -air-quality 27 -nbclp.vidframe.src 27 -ipen 27 -fantasists 27 -desarae 27 -goodling 27 -40-tonne 27 -peterhof 27 -fretful 27 -arenal 27 -teman 27 -arraignments 27 -redolent 27 -mohali 27 -giese 27 -metodiev 27 -bd 27 -horlick 27 -deus 27 -5-foot-3 27 -chelsee 27 -preez 27 -80kg 27 -hammacher 27 -vettori 27 -veda 27 -bergrin 27 -tesoro 27 -regular-sized 27 -skijoring 27 -threapleton 27 -chinooks 27 -lunch-time 27 -areesha 27 -shires 27 -vad 27 -doukas 27 -ouest 27 -netscape 27 -silken 27 -shehata 27 -nordsjaelland 27 -shechtman 27 -altars 27 -carotene 27 -20,000-acre 27 -boneheaded 27 -no-hitter 27 -wormed 27 -supervolcanoes 27 -adamek 27 -ovum 27 -wedgie 27 -awesomely 27 -13/5 27 -aramco 27 -bye-bye 27 -prattsville 27 -sproul 27 -ticketus 27 -fully-stocked 27 -legalese 27 -akbaruddin 27 -lima-marin 27 -tuta 27 -mergea 27 -burmis 27 -50-acre 27 -vedra 27 -diggles 27 -cooppen 27 -mironov 27 -dharma 27 -'09 27 -kharkov 27 -eight-strong 27 -wallflowers 27 -steiff 27 -beeld 27 -janow 27 -kinzinger 27 -steller 27 -housecleaning 27 -thumbs-down 27 -marios 27 -mbes 27 -musallam 27 -shoulder-launched 27 -goofball 27 -keston 27 -misplace 27 -larrieu 27 -gula 27 -extell 27 -cagney 27 -trendsetting 27 -supermarine 27 -quavis 27 -ludmer 27 -feder 27 -hmtd 27 -twinning 27 -fs 27 -mirabal 27 -beram 27 -whiskeys 27 -munger 27 -nbclp 27 -forgan 27 -millipede 27 -lando 27 -founds 27 -rohypnol 27 -pre-dating 27 -hickmott 27 -0.14 27 -tesche 27 -angeline 27 -governorates 27 -back-channel 27 -buderim 27 -stilley 27 -musculature 27 -skimmer 27 -ashkan 27 -denihan 27 -rtÉ 27 -brunker 27 -cic 27 -basista 27 -super-fight 27 -imbecile 27 -leweb 27 -half-backs 27 -16-acre 27 -smokies 27 -llona 27 -wnem 27 -colucci 27 -sobrr 27 -vietnam-era 27 -peà 27 -cremating 27 -farrand 27 -clarkston 27 -triche 27 -wgcl 27 -cicciaro 27 -loutish 27 -manha 27 -fox5 27 -tranquillisers 27 -00:26 27 -roseman 27 -wigton 27 -barstool 27 -hard-man 27 -={ 27 -cerritos 27 -tashi 27 -obliquely 27 -gagandip 27 -male-to-female 27 -top-shelf 27 -troposphere 27 -gaylor 27 -subtype 27 -10,000-member 27 -décolletage 27 -42.1 27 -vandewalle 27 -urea 27 -harpviken 27 -downworth 27 -tantalisingly 27 -punisher 27 -dangote 27 -evd 27 -gillam 27 -lindop 27 -leroux 27 -orma 27 -kanda 27 -baptista 27 -1772 27 -excised 27 -cubesats 27 -islamization 27 -grandmas 27 -hinrichs 27 -26000 27 -bodner 27 -rafaelle 27 -calver 27 -valois 27 -lifx 27 -booze-fueled 27 -bresnik 27 -penge 27 -semi-aquatic 27 -takhar 27 -shayna 27 -ruc 27 -minneapolis-based 27 -wemyss 27 -murrison 27 -porbeagle 27 -diandra 27 -kipping 27 -leuchars 27 -b'tselem 27 -gilford 27 -out-of-competition 27 -lavoro 27 -emsworth 27 -sports-mad 27 -icecube 27 -kilwa 27 -lakmal 27 -mccorkle 27 -berrien 27 -second-longest 27 -37.7 27 -pre-owned 27 -nbclp.vidframe 27 -degan 27 -dongs 27 -edelweiss 27 -mihaela 27 -hangu 27 -steady-state 27 -walczak 27 -be-all 27 -sitton 27 -anacortes 27 -brazenness 27 -chanko 27 -lambretta 27 -gatherer 27 -mcquoid 27 -wides 27 -couldn 27 -tearjerker 27 -paladin 27 -al-essawi 27 -sedna 27 -sanam 27 -scowls 27 -bloomston 27 -muhammadu 27 -ulf 27 -wadlow 27 -kazim-richards 27 -debenture 27 -uncontroversial 27 -baylee 27 -manchin-toomey 27 -crerand 27 -duggins 27 -off-and-on 27 -surdeanu 27 -r3 27 -uploader 27 -self-congratulatory 27 -smulders 27 -lloyd-jones 27 -baumrucker 27 -gardephe 27 -falque 27 -toeing 27 -instagrammers 27 -hajek-richardson 27 -pictet 27 -varia 27 -francoeur 27 -jarratt 27 -conveyance 27 -cohabitees 27 -madelyn 27 -pisco 27 -arboreal 27 -slovenians 27 -hillarycare 27 -carrasquillo 27 -kalaupapa 27 -snively 27 -405,000 27 -aksyonov 27 -orrell 27 -guestlist 27 -goal-kick 27 -hard-edged 27 -doba 27 -detoxing 27 -lautzenheiser 27 -sixty-eight 27 -y-word 27 -wensleydale 27 -lome 27 -mish-mash 27 -speciale 27 -deyan 27 -brew-bevan 27 -pastiche 27 -punchlines 27 -unshackled 27 -pengilly 27 -tiempo 27 -sucrose 27 -dither 27 -chirchir 27 -1827 27 -math.floor 27 -incumbency 27 -17,200 27 -renmin 27 -pitiless 27 -stickland 27 -dunhams 27 -agdal 27 -yaqub 27 -goalpost 27 -dalhousie 27 -beese 27 -leacy 27 -aquascutum 27 -ndume 27 -dog-lover 27 -idolise 27 -oglala 27 -internecine 27 -tasali 27 -1,675 27 -22-day 27 -litem 27 -knick-knacks 27 -neurosciences 27 -churchgoing 27 -malu 27 -women2drive 27 -tioga 27 -orakzai 27 -domin 27 -overlays 27 -gatecrasher 27 -canoeists 27 -abdisamad 27 -transgenic 27 -songbird 27 -storehouse 27 -johnsonville 27 -vogue.co.uk 27 -crathorne 27 -ma'lik 27 -innocent-looking 27 -12-sided 27 -37mph 27 -lexis 27 -kneeing 27 -sectional 27 -rastafarians 27 -homophobes 27 -timmerman 27 -suellen 27 -neoconservative 27 -mell 27 -entsch 27 -noblewoman 27 -eyres 27 -halswell 27 -naby 27 -drover 27 -126th 27 -heaviness 27 -mirela 27 -six-ton 27 -atha 27 -stritch 27 -clapboard 27 -alsip 27 -400-page 27 -skool 27 -pandered 27 -tithing 27 -180s 27 -match-making 27 -terrebonne 27 -re-engineered 27 -rapidly-spreading 27 -hadlow 27 -tinky 27 -khatun 27 -poster-boy 27 -nbclp.vidframe.scrolling 27 -bedchamber 27 -rengo 27 -middlebrook 27 -raheel 27 -nagpaul 27 -bwindi 27 -paar 27 -and-a-half 27 -notre-dame 27 -pornographers 27 -disbarment 27 -three-fifths 27 -tvrdon 27 -semitrailer 27 -henney 27 -high-flier 27 -icp 27 -volkers 27 -unfreeze 27 -puentes 27 -friedberg 27 -40,181 27 -vovchik 27 -ordon 27 -devane 27 -subsec 27 -abdelmalek 27 -dacha 27 -window.location.href 27 -curation 27 -rube 27 -hadjipateras 27 -powerlifter 27 -virile 27 -ramat 27 -montel 27 -sorcerers 27 -travi 27 -c3po 27 -rosat 27 -bramma 27 -cantered 27 -mpshadow 27 -simmers 27 -masochism 27 -2300 27 -exorcised 27 -astrophotography 27 -furtick 27 -167th 27 -lasker 27 -lowrie 27 -ghalib 27 -tansey 27 -5.49 27 -broga 27 -englishwoman 27 -double-life 27 -easkey 27 -airstrips 27 -kodirov 27 -mtc 27 -757s 27 -nbclp.vidframe.style.border 27 -a.m.-6 27 -605,000 27 -47.6 27 -skittled 27 -dfds 27 -stabilises 27 -samuragochi 27 -flash-flood 27 -abrar 27 -adwords 27 -swaffham 27 -elaraby 27 -fair-trade 27 -tv-am 27 -ims 27 -gesticulates 27 -hsr 27 -30-inch 27 -11-0 27 -midwicket 27 -eimers 27 -bucknell 27 -9/12 27 -etsy.com 27 -kaupthing 27 -571 27 -576 27 -mautner 27 -bunking 27 -watsons 27 -grindon 27 -tevin 27 -fourth-year 27 -glamorized 27 -ktvu-tv 27 -roatan 27 -document.getelementbyid 27 -jair 27 -regelbrugge 27 -1694 27 -schmaler 27 -kiplagat 27 -izzedine 27 -graziani 27 -re-energized 27 -bilirubin 27 -conrade 27 -kilroy 27 -hand-knitted 27 -cleaves 27 -79ad 27 -agema 27 -much-younger 27 -4mph 27 -mahmoudiya 27 -rehma 27 -non-responsive 27 -preps 27 -hutches 27 -ergonomics 27 -liberalize 27 -topically 27 -dispirited 27 -nieuw 27 -rightward 27 -seafield 27 -hoess 27 -recieve 27 -weggemann 27 -gruppioni 27 -luth 27 -hertforshire 27 -lubbers 27 -sce 27 -cross-over 27 -up-and-comer 27 -13p 27 -kens 27 -berkoff 27 -match-winners 27 -blag 27 -single-season 27 -bramwell 27 -casemiro 27 -ldsd 27 -51million 27 -unavoidably 27 -chaand 27 -catz 27 -heaves 27 -izquierdo 27 -tempore 27 -vasileva 27 -charalambopoulos 27 -kundra 27 -wkrn 27 -unluckily 27 -vanpelt 27 -wunderlich 27 -cantore 27 -brushy 27 -dedieu 27 -lacey-marie 27 -719 27 -leaded 27 -borstal 26 -age-progression 26 -kostelic 26 -zakharova 26 -sletten 26 -covered-up 26 -gravitationally 26 -realigned 26 -standing-room-only 26 -dilworth 26 -lifers 26 -kemeny 26 -galeana 26 -shipmate 26 -butyric 26 -fourth-biggest 26 -volkov 26 -twitterers 26 -4.56 26 -elrod 26 -cook-morrissey 26 -shizuoka 26 -colao 26 -gualtieri 26 -pollan 26 -reprieved 26 -straitened 26 -troubleshoot 26 -layden 26 -sidonie 26 -reith 26 -kydd 26 -neocortex 26 -lattakia 26 -carbajal 26 -nathanson 26 -74th-minute 26 -bellied 26 -weaponise 26 -aix-en-provence 26 -tanzanians 26 -immerses 26 -rpf 26 -castellon 26 -cnpc 26 -debney 26 -docherty-puncheon 26 -al-faleh 26 -scarlett-rose 26 -time-stamped 26 -abersoch 26 -yuanyuan 26 -kmph 26 -jassem 26 -yids 26 -'96 26 -tiberi 26 -jentzsch 26 -salama 26 -alila 26 -sustainment 26 -f16 26 -upwind 26 -spontana 26 -38c 26 -oolong 26 -high-growth 26 -pub-goers 26 -sbc 26 -laban 26 -ksaz 26 -sharna 26 -pasichuke 26 -keli 26 -permeable 26 -gazers 26 -manochat 26 -engenders 26 -15-page 26 -mampuru 26 -freye 26 -noto 26 -boak 26 -,16 26 -achane 26 -icsr 26 -14,800 26 -foisted 26 -roil 26 -nuptial 26 -overhear 26 -zagar 26 -rdio 26 -hither 26 -off-roading 26 -airboat 26 -backlogged 26 -corso 26 -pushbike 26 -15-0 26 -pachyderm 26 -puri 26 -t-cell 26 -off-label 26 -pliocene 26 -britches 26 -siku 26 -wherry 26 -hosko 26 -maccas 26 -craps 26 -mid-fifties 26 -hawkers 26 -kanlica 26 -rsupal 26 -deactivation 26 -cul 26 -well-guarded 26 -petzschner 26 -musharaf 26 -scobey 26 -directorship 26 -kittery 26 -free-roaming 26 -pleban 26 -sydow 26 -galecki 26 -hounshell 26 -leatherbacks 26 -bhubaneswar 26 -eye-gouging 26 -videoconference 26 -deviance 26 -baryons 26 -best-equipped 26 -donne 26 -mods 26 -godbold 26 -hurayra 26 -podgy 26 -friess 26 -bigglesworth 26 -inova 26 -dimitrios 26 -itchiness 26 -blacktip 26 -nurul 26 -14-0 26 -englefield 26 -touch-and-go 26 -spell-binding 26 -comport 26 -pala 26 -10.05 26 -bryn-y-gog 26 -verhelst 26 -statman 26 -cold-related 26 -counterattacks 26 -encircles 26 -15.00 26 -kinski 26 -leta 26 -mio 26 -cap-haitien 26 -grieg 26 -lozick 26 -voronin 26 -selznick 26 -ten-years-old 26 -cash-in 26 -caluori 26 -naturalistic 26 -convocation 26 -beta-amyloid 26 -scafell 26 -scissor-kick 26 -centrepieces 26 -popsicles 26 -6.00 26 -glaswegians 26 -alaia 26 -beryllium 26 -tikker 26 -v.stiviano 26 -aurelia 26 -optimisation 26 -kountouris 26 -pontoons 26 -cleckheaton 26 -taliban-led 26 -dehydrate 26 -in-elevator 26 -zhangjiajie 26 -wynton 26 -babygrow 26 -nonwhites 26 -22:05 26 -22:08 26 -kito 26 -re-assess 26 -mpofu 26 -abeer 26 -saenuri 26 -netiquette 26 -deschutes 26 -frc 26 -after-work 26 -noms 26 -duper 26 -70lbs 26 -adler-jensen 26 -valets 26 -sexologist 26 -straightener 26 -reflectivity 26 -newberg 26 -lessard 26 -mckone 26 -prouvost 26 -kuvin 26 -kovach 26 -lobb 26 -13-man 26 -ndaba 26 -aceng 26 -kristinn 26 -berwyn 26 -lennoxtown 26 -lpg 26 -40.3 26 -pakistan-born 26 -tonja 26 -lithographs 26 -7.75 26 -sara-pod 26 -hart-moxon 26 -rescission 26 -woricker 26 -grandly 26 -10x10 26 -#blacklivesmatter 26 -hessel 26 -bleill 26 -goslett 26 -vapid 26 -mcgirr 26 -top-seed 26 -export-import 26 -yoovidhaya 26 -gardaí 26 -tedder 26 -red-soled 26 -sligh 26 -pirillo 26 -gannet 26 -23p 26 -marucci 26 -moonshot 26 -11-14 26 -delord 26 -marron 26 -old-age 26 -mies 26 -kishida 26 -hiscutt 26 -cureton 26 -zubieta 26 -lipoprotein 26 -rhododendrons 26 -vindicating 26 -trinder 26 -mugla 26 -landale 26 -pluses 26 -agulla 26 -t-bar 26 -shon 26 -reddened 26 -abdulsalam 26 -sappers 26 -allsorts 26 -enos 26 -b-1 26 -sherbini 26 -lustful 26 -bostonian 26 -doggies 26 -truckee 26 -albon 26 -all-wheel 26 -ayodhya 26 -zihuatanejo 26 -cryptocurrency 26 -hand-fed 26 -kowtow 26 -charades 26 -garofalo 26 -anything-goes 26 -jeanie 26 -gaillard 26 -superimposing 26 -50:50 26 -sharkeisha 26 -piaget 26 -marcial 26 -21:58 26 -portnow 26 -chokeholds 26 -gelbart 26 -kjetil 26 -fini 26 -bengtson 26 -hughesy 26 -romanovs 26 -claudiu 26 -judice 26 -waypoint 26 -kant 26 -capoeira 26 -kuching 26 -stayt 26 -evened 26 -1720 26 -tel-aviv 26 -staite 26 -then-fiance 26 -regale 26 -kashyap 26 -papillary 26 -petoskey 26 -fibbing 26 -goodsir 26 -brunet 26 -tittle 26 -clumping 26 -schuerrle 26 -summly 26 -braly 26 -photonic 26 -mcgahan 26 -chocoholic 26 -barbash 26 -10-7 26 -currin 26 -vaid 26 -menotti 26 -emaar 26 -croup 26 -kfvs 26 -speller 26 -manningham-buller 26 -tuller 26 -tasca 26 -browned 26 -magnetised 26 -microbrewery 26 -plotnikov 26 -woodlief 26 -tuv 26 -teratoma 26 -+66 26 -birand 26 -17-13 26 -categorization 26 -al-jubeir 26 -second-in-line 26 -lowest-paid 26 -ec135 26 -ratting 26 -strange-looking 26 -evp 26 -lesniak 26 -well-attended 26 -low-life 26 -second-division 26 -hoewedes 26 -infernal 26 -nailsea 26 -bioethicist 26 -wari 26 -belying 26 -brathwaite 26 -1625 26 -18k 26 -pedigrees 26 -monegan 26 -dementias 26 -subsonic 26 -eakin 26 -muscovites 26 -quicksilver 26 -ordonez 26 -readjustment 26 -clematis 26 -rolene 26 -tonny 26 -intermountain 26 -lenfest 26 -hegwood 26 -wels 26 -ajantha 26 -nasar 26 -zarifmo 26 -sasheer 26 -corbyn 26 -sittwe 26 -hinoi 26 -3tv 26 -satchwell 26 -unsellable 26 -visconti 26 -brana 26 -misstated 26 -neue 26 -brigg 26 -zulfikar 26 -al-watan 26 -first-past-the-post 26 -eight-months-old 26 -tumen 26 -sexually-charged 26 -prosciutto 26 -hussar 26 -barbecuing 26 -mirdjaja 26 -droning 26 -1607 26 -petering 26 -14-week-old 26 -murry 26 -blaylock 26 -megabyte 26 -kahrizak 26 -humberts 26 -cosseted 26 -baulk 26 -possession-based 26 -rox 26 -6.3-magnitude 26 -senden 26 -ruses 26 -greenstone 26 -sukenik 26 -braunschweig 26 -convalescence 26 -geoffroy 26 -fly-halves 26 -hackford 26 -karg 26 -shaer 26 -italian-made 26 -howser 26 -kempsey 26 -wilbekin 26 -marnix 26 -all-too-familiar 26 -wild-caught 26 -anushika 26 -brdc 26 -bishopsgate 26 -cryptolocker 26 -iinet 26 -misdiagnosing 26 -argentino 26 -7.1.1 26 -zip-line 26 -talkies 26 -aggie 26 -o'byrne 26 -prang 26 -naturals 26 -sessa 26 -stegosaurus 26 -lobsang 26 -sharan 26 -2.02 26 -stéphanie 26 -zebaida 26 -78m 26 -cuspert 26 -improvisational 26 -dearer 26 -reciprocating 26 -stang 26 -oche 26 -virulence 26 -step-sister 26 -ex-everton 26 -lucci 26 -lucca 26 -ove 26 -pasteurized 26 -surin 26 -peloquin 26 -rives 26 -columbine-style 26 -fridge-freezer 26 -vahidipour 26 -brieske 26 -buxbaum 26 -shep 26 -monied 26 -instituto 26 -d'alpuget 26 -aspies 26 -northlandz 26 -braeden 26 -2.32 26 -wisley 26 -hardcourt 26 -competencies 26 -brockmann 26 -amnesties 26 -ghazala 26 -three-tier 26 -slash-and-burn 26 -unreconstructed 26 -loitered 26 -plunk 26 -emme 26 -dublin-born 26 -ehrc 26 -welsch 26 -disempowered 26 -sukamaran 26 -house-made 26 -murphy-o'connor 26 -edreams 26 -hoyal 26 -well-tailored 26 -landes 26 -doublet 26 -re-sold 26 -biomechanical 26 -gairloch 26 -doane 26 -vigen 26 -1808 26 -120lbs 26 -fisker 26 -dso 26 -dsa 26 -molesey 26 -worvell 26 -azelle 26 -laconic 26 -ruehli 26 -0.39 26 -0.33 26 -0.31 26 -super-rocket 26 -jolokia 26 -complutense 26 -commerical 26 -soeda 26 -dovey 26 -falah 26 -mcl 26 -ciprian 26 -6:10 26 -jovanovic 26 -lc 26 -simples 26 -galarza 26 -anodyne 26 -edenbridge 26 -ayoob 26 -stockford 26 -creel 26 -226,000 26 -roydon 26 -debary 26 -2009-11 26 -airdropped 26 -dryly 26 -adult-only 26 -superfly 26 -al-banna 26 -koda 26 -southam 26 -robotically 26 -pacer 26 -8:25 26 -next.co.uk 26 -has-been 26 -murfitt 26 -pannell 26 -svendsen 26 -curnock 26 -rent-controlled 26 -whetted 26 -post-mortems 26 -zeitz 26 -rcs 26 -christabelle 26 -jangling 26 -tbh 26 -nanetti 26 -astrakhan 26 -wanchai 26 -faina 26 -low-carbohydrate 26 -ruparelia 26 -merengue 26 -crays 26 -bombmaker 26 -domi 26 -saltsman 26 -chevelle 26 -refloating 26 -mazzeo 26 -carey-jones 26 -wmc-tv 26 -portaledge 26 -galerie 26 -worboys 26 -reintegrating 26 -surranna 26 -corticosteroids 26 -boesch 26 -lofoten 26 -nosair 26 -helge 26 -half-human 26 -gedbrand10 26 -breaux 26 -marigolds 26 -219,000 26 -yegor 26 -jionni 26 -raney 26 -kratz 26 -igarashi 26 -poinsettias 26 -kodachrome 26 -newlyn 26 -car-makers 26 -papering 26 -fingerless 26 -danai 26 -interscope 26 -eelgrass 26 -medi-cal 26 -lorcan 26 -jadon 26 -vergina 26 -timperley 26 -75-year 26 -vidinhar 26 -pterosaurs 26 -lolong 26 -1664 26 -instinctual 26 -unreadable 26 -overstates 26 -mccullin 26 -inductee 26 -anon 26 -saucer-shaped 26 -gacaca 26 -hemmingway 26 -biddlecombe 26 -defray 26 -match-points 26 -yetis 26 -chakra 26 -quarrying 26 -scutt 26 -swanning 26 -veles 26 -inlay 26 -ex-husbands 26 -mcelynn 26 -qadhi 26 -sedensky 26 -air-raid 26 -ruxton 26 -47-year 26 -charbel 26 -dogtv 26 -nesat 26 -bustier 26 -croppa 26 -magisterial 26 -dossena 26 -martland 26 -djimon 26 -aldon 26 -motorhomes 26 -c-diff 26 -qasr 26 -scandal-ridden 26 -orth 26 -monthslong 26 -carmeli 26 -carmela 26 -hayleigh 26 -etra 26 -grandmother-of-eight 26 -phuong 26 -bogenberger 26 -rendlesham 26 -beefburgers 26 -splish 26 -23:07 26 -versini 26 -averil 26 -langtry 26 -blenkinsopp 26 -almere 26 -sarina 26 -5-megapixel 26 -postgate 26 -hashing 26 -adelphi 26 -angley 26 -eighth-largest 26 -shooing 26 -desirae 26 -windier 26 -bhambri 26 -torments 26 -peul 26 -lausd 26 -pterosaur 26 -almaleki 26 -under-17s 26 -hard-to-find 26 -drink-drivers 26 -okonkwo 26 -keepy-uppy 26 -rayment 26 -dontre 26 -multi-tool 26 -savva 26 -near-empty 26 -butragueno 26 -sarcevic 26 -enca 26 -documentary-maker 26 -corridon 26 -5:10 26 -farhana 26 -waddingham 26 -livelier 26 -multifunctional 26 -simelane 26 -bjelke-petersen 26 -folt 26 -morrisey 26 -legitimized 26 -balking 26 -laindon 26 -cengiz 26 -off-beat 26 -szafranski 26 -dasani 26 -thx 26 -nakhon 26 -melonie 26 -:--lrb- 26 -altamira 26 -slavish 26 -chris_cutmore 26 -megamouth 26 -pas-de-calais 26 -sureties 26 -decentralised 26 -figueres 26 -stromberg 26 -clarkes 26 -13-14 26 -beta-blockers 26 -bosc 26 -aouffir 26 -chipchase 26 -iranian-made 26 -naiman 26 -cassesso 26 -phythian 26 -euclides 26 -threlkeld 26 -schmoozing 26 -139,000 26 -nacion 26 -finster 26 -z4 26 -marton 26 -70g 26 -tavurvur 26 -50bn 26 -podiatrist 26 -realclearpolitics 26 -limpet 26 -adt 26 -tamira 26 -reorganizing 26 -whitmire 26 -mccloy 26 -rotherhithe 26 -white-minority 26 -kossove 26 -rasha 26 -sisco 26 -terziev 26 -bourbons 26 -colonialist 26 -moscone 26 -100metres 26 -siteman 26 -wisps 26 -grammy-award 26 -mortuaries 26 -kidder 26 -keyworth 26 -naranjo 26 -boitano 26 -mytablet 26 -papazian 26 -unfamiliarity 26 -labrie 26 -scholten 26 -gabryszak 26 -lachaux 26 -9.05 26 -keysar 26 -synergies 26 -uneasily 26 -margrave 26 -pearsall 26 -23-month 26 -spectrometers 26 -mcdiarmid 26 -1:50 26 -kilday 26 -gastroenterology 26 -phat 26 -langurs 26 -discernment 26 -djerba 26 -seventeenth 26 -pleadings 26 -sunshine.co.uk 26 -helan 26 -tangential 26 -brrr 26 -domhnall 26 -newsnet5 26 -waites 26 -shuvalov 26 -lydd 26 -'80 26 -seabob 26 -1,005 26 -eye-level 26 -snarked 26 -28-27 26 -widener 26 -jamilah 26 -dekkers 26 -savoia 26 -dysmorphia 26 -hipwood 26 -escarpment 26 -braying 26 -gunung 26 -zululand 26 -crumpton 26 -okafor 26 -sei 26 -stabler 26 -mcnugget 26 -21:22 26 -ihh 26 -lunesta 26 -metalwala 26 -monoliths 26 -prugh 26 -agustawestland 26 -lactobacillus 26 -albasman 26 -under-privileged 26 -goodis 26 -kassel 26 -loompa 26 -9kg 26 -ib 26 -i8 26 -nosh 26 -sexless 26 -helmet-to-helmet 26 -belfiore 26 -afb 26 -swalberg 26 -isight 26 -aggro 26 -alani 26 -rafols 26 -joanie 26 -l'osservatore 26 -short-tempered 26 -autosport 26 -250-year-old 26 -hep 26 -surround-sound 26 -bedley 26 -tudor-style 26 -.32 26 -merten 26 -shiming 26 -wellons 26 -legarreta 26 -keitel 26 -muddying 26 -wiay 26 -astill 26 -french-canadian 26 -marnier 26 -kapaun 26 -risoldi 26 -anti-narcotics 26 -ede 26 -succesful 26 -jokesters 26 -zakary 26 -broods 26 -ruder 26 -jobe 26 -modifiable 26 -brogue 26 -galashiels 26 -metzer 26 -easington 26 -elizaveta 26 -reinstalled 26 -slim-fitting 26 -1989-90 26 -3.57 26 -eade 26 -krychowiak 26 -bloco 26 -ragusa 26 -01:19 26 -schreck 26 -prozer 26 -supposition 26 -tharsis 26 -famagusta 26 -865 26 -23:30 26 -painterly 26 -stear 26 -inversely 26 -kerem 26 -indexing 26 -officals 26 -floorboard 26 -dishcloths 26 -chater 26 -marial 26 -biabiany 26 -windass 26 -stanzel 26 -timber-framed 26 -swaraj 26 -glassey 26 -ismaili 26 -arco 26 -mv-22 26 -hampel 26 -tattooists 26 -43.6 26 -43.3 26 -achim 26 -el-araj 26 -vear 26 -unearned 26 -nutcrackers 26 -turntables 26 -saida 26 -amash 26 -burder 26 -mini-submarine 26 -cleves 26 -rison 26 -dzagoev 26 -malmgren 26 -scotton 26 -wind-whipped 26 -applique 26 -manesh 26 -cessnock 26 -strand-1 26 -souq 26 -lovatt 26 -mech 26 -healthy-eating 26 -2.13 26 -non-uniform 26 -interconnecting 26 -cronk 26 -oresund 26 -eulalia 26 -ikaika 26 -2043 26 -lmao 26 -falinge 26 -nakayama 26 -ever-closer 26 -underemployment 26 -mother-of-nine 26 -air-defense 26 -concubines 26 -crv 26 -1.63 26 -kovalchuk 26 -manisha 26 -474 26 -lamentably 26 -stableford 26 -pauling 26 -greenall 26 -gurning 26 -recommence 26 -syngenta 26 -marinello 26 -hoedown 26 -chuan 26 -kagin 26 -anchovy 26 -politican 26 -crepes 26 -sayeed 26 -antsy 26 -wkyt 26 -angelis 26 -danks 26 -ostrava 26 -six-match 26 -cristianos 26 -lifters 26 -salt-and-pepper 26 -derulo 26 -ingleby 26 -dwindles 26 -gamlem 26 -skermer 26 -attractively 26 -cunnington 26 -umaniec 26 -karaca 26 -hobbycraft 26 -gaenswein 26 -rials 26 -i-90 26 -ninoy 26 -punctuating 26 -horribilis 26 -precepts 26 -31billion 26 -heforshe 26 -tippit 26 -gnat 26 -three-putt 26 -money-off 26 -unclog 26 -habra 26 -pavlovic 26 -imre 26 -alesana 26 -nrf 26 -hartridge 26 -;--rrb- 26 -blowdry 26 -repetitively 26 -berlack 26 -zahoor 26 -digitization 26 -transcribe 26 -renzaho 26 -itskov 26 -meem 26 -sumpter 26 -facemasks 26 -now-discredited 26 -mcnear 26 -rebelo 26 -myopathy 26 -landforms 26 -camus 26 -paszkowski 26 -8-5 26 -al-nour 26 -fragmenting 26 -hydroxycut 26 -retief 26 -azevedo 26 -65p 26 -shoot-down 26 -melet 26 -summarise 26 -grampians 26 -1:37 26 -ransome 26 -isr 26 -bone-marrow 26 -die-off 26 -abben 26 -onomah 26 -party-planning 26 -skorzewski 26 -smmt 26 -wofford 26 -squelched 26 -ghigliotty 26 -dalhuisen 26 -otash 26 -hanaa 26 -kunal 26 -stolz 26 -morzine 26 -bajan 26 -cartersville 26 -w5 26 -wr 26 -deliberates 26 -taccetta 26 -maci 26 -wufra 26 -conditionally 26 -wine-tasting 26 -adenauer 26 -boni 26 -40-0 26 -ladonna 26 -system-wide 26 -dvb 26 -intimating 26 -hairston 26 -hamidur 26 -deliriously 26 -geico 26 -raees 26 -hsus 26 -150km 26 -expedia.com 26 -boatloads 26 -better-equipped 26 -favero 26 -2.51 26 -free-press 26 -flashdance 26 -ataui 26 -filipowicz 26 -three-months 26 -151,000 26 -tinley 26 -anxiang 26 -post-mubarak 26 -colonize 26 -hughes-smith 26 -corsham 26 -adélie 26 -birling 26 -sabers 26 -non-molestation 26 -ustream 26 -spars 26 -bosoms 26 -lene 26 -leni 26 -scrimp 26 -manju 26 -terns 26 -musteata 26 -dualshock 26 -aztecas 26 -namesakes 26 -00:06 26 -misjudging 26 -flammability 26 -mind-body 26 -dahlberg 26 -rudders 26 -masoe 26 -enliven 26 -cartman 26 -unrealized 26 -hans-peter 26 -koppelman 26 -gardena 26 -two-tiered 26 -sania 26 -libdems 26 -roner 26 -unprofessionally 26 -gun-running 26 -choroideremia 26 -drifters 26 -lockdowns 26 -nice-looking 26 -mealworm 26 -baixada 26 -neverending 26 -60-seat 26 -midcentury 26 -font-family 26 -exhumations 26 -mcdavid 26 -tremlett 26 -mandala 26 -nika 26 -liveliest 26 -hs3 26 -muhaned 26 -escherichia 26 -kindergartners 26 -judkins 26 -aldeanos 26 -frassinelli 26 -homemakers 26 -glenister 26 -tapirs 26 -belch 26 -petrescu 26 -spacewalking 26 -cukierman 26 -riling 26 -u.s.-educated 26 -lum 26 -oldco 26 -mateus 26 -workstations 26 -gros 26 -pember 26 -dildo 26 -busskohl 26 -claptrap 26 -465,000 26 -kelson 26 -kress 26 -lorrie 26 -video.foxnews.com 26 -nazim 26 -vaginally 26 -2.39 26 -vcr 26 -palestinian-american 26 -aftereffects 26 -mefloquine 26 -scodelario 26 -2ue 26 -vk 26 -bilaspur 26 -selsey 26 -jcvi 26 -leonean 26 -yishai 26 -totting 26 -hanafi 26 -milam 26 -longden 26 -vice-chancellors 26 -28ft 26 -seebohm 26 -plugin 26 -namdeo 26 -postelection 26 -44.6 26 -hyrons 26 -humanoids 26 -hawtin 26 -sizzurp 26 -spool 26 -jareen 26 -zhai 26 -jianlin 26 -naha 26 -outcries 26 -bottalico 26 -solanki 26 -germination 26 -florentina 26 -qaeda-aligned 26 -contingents 26 -rokeby 26 -f-5 26 -f-15e 26 -surfin 26 -pedestals 26 -melnick 26 -communist-era 26 -6:35 26 -papp 26 -84mph 26 -foxton 26 -tas 26 -headline-making 26 -fat-freezing 26 -ecorse 26 -kristofferson 26 -centurions 26 -complainers 26 -buyens 26 -soldering 26 -commodores 26 -10.75 26 -waldock 26 -mini-strokes 26 -cheektowaga 26 -giallorossi 26 -degraffenreid 26 -staton 26 -argiro 26 -deloney-cain 26 -neos 26 -long-gone 26 -gainful 26 -mariani 26 -malays 26 -admonish 26 -maharishi 26 -culverts 26 -835 26 -batziana 26 -medica 26 -50/1 26 -scambos 26 -r-pennsylvania 26 -crumlin 26 -14-man 26 -snatcher 26 -temperance 26 -ostrom 26 -knaresborough 26 -artifice 26 -re-interred 26 -gutt 26 -ravinder 26 -misrepresents 26 -boreal 26 -lehmkuhl 26 -deedy 26 -871 26 -arment 26 -ebola-related 26 -samcam 26 -placido 26 -raikes 26 -issaquah 26 -5/6 26 -small-caliber 26 -ybor 26 -stockham 26 -wtkr 26 -olek 26 -marinara 26 -herrman 26 -misty-eyed 26 -jaan 26 -qaraqosh 26 -1612 26 -alter-egos 26 -twiddling 26 -patronages 26 -hiccuping 26 -niederbrach 26 -iliad 26 -2012-2014 26 -guanxi 26 -voloshin 26 -late-running 26 -aicha 26 -al-kidd 26 -luxottica 26 -parsed 26 -ronin 26 -jamaliah 26 -sophy 26 -pacifiers 26 -fadil 26 -kunz 26 -mason-dixon 26 -tipsforjesus 26 -'40 26 -firetrucks 26 -janka 26 -42ft 26 -elahi 26 -tangent 26 -l'enfant 26 -chandrayaan-1 26 -batshuayi 26 -beauprez 26 -visually-impaired 26 -kucharczyk 26 -ngong 26 -4a 26 -greenacres 26 -exhibitionism 26 -auditoriums 26 -abassi 26 -grafitti 26 -primeira 26 -atoned 26 -near-collapse 26 -r-mississippi 26 -air-filled 26 -timeslot 26 -20-metre 26 -keffer 26 -mcbeth 26 -buffeting 26 -downe 26 -chilmark 26 -steelworker 26 -bathtime 26 -kaysville 26 -glowlight 26 -axiom 26 -ailey 26 -robuchon 26 -petric 26 -leafield 26 -ex-officer 26 -jabara 26 -axelle 26 -bumi 26 -wcbs-tv 26 -0830 26 -speyside 26 -textgate 26 -fiefdom 26 -drakeford 26 -crosswinds 26 -coupé 26 -fatness 26 -wsvn-tv 26 -5,000-year-old 26 -koryta 26 -highly-publicised 26 -carromero 26 -shimmers 26 -tianjiao 26 -23:13 26 -tadashi 26 -ashy 26 -sundhage 26 -neate 26 -390million 26 -superconducting 26 -topside 26 -elnazir 26 -al-ansari 26 -dismantles 26 -biked 26 -intolerances 26 -colonnaded 26 -oliseh 26 -100-page 26 -oldwage 26 -11.05 26 -arlidge 26 -semi-transparent 26 -statehouses 26 -turn-on 26 -egocentric 26 -k'nex 26 -hypponen 26 -doesnt 26 -48.6 26 -tes 26 -staycations 26 -thembu 26 -45.3 26 -45.2 26 -calvello 26 -two-run 26 -paris-bound 26 -tg 26 -paolucci 26 -escargot 26 -flecked 26 -laze 26 -razzano 26 -eastin 26 -geike 26 -chabrol 26 -jinxed 26 -0.22 26 -keitany 26 -veryfirstto.com 26 -durrington 26 -gamsbart 26 -methotrexate 26 -rakonczay 26 -fmln 26 -johnsbury 26 -malo 26 -renounces 26 -daymond 26 -garvan 26 -betzig 26 -thrice 26 -beiber 26 -aoc 26 -plymouth-based 26 -headshots 26 -stepan 26 -wiggo 26 -hekmatyar 26 -moonless 26 -scrapers 26 -recksiedler 26 -parmenter 26 -clampdowns 26 -mashayekhi 26 -965 26 -800-year-old 26 -barger 26 -swatches 26 -torpedoing 26 -casablancas 26 -ponomarev 26 -solenoid 26 -permanency 26 -kyneton 26 -left-backs 26 -d-arizona 26 -klagenfurt 26 -stodghill 26 -bootie 26 -basson 26 -8.95 26 -kepler-62e 26 -disassemble 26 -tallon 26 -rensburg 26 --44 26 -pathan 26 -parfait 26 -gallimore 26 -dobrev 26 -non-jews 26 -shenzen 26 -ouachita 26 -redecoration 26 -119th 26 -katella 26 -pornstar 26 -post-modern 26 -reggaeton 26 -lank 26 -hard-headed 26 -israel-based 26 -queensbury 26 -disclaimers 26 -biggers 26 -tika 26 -nessa 26 -photocopier 26 -olshansky 26 -sun-worshippers 26 -lapsley 26 -wastelands 26 -brignoni 26 -disfigurements 26 -gazpacho 26 -enthuse 26 -tuppence 26 -proximate 26 -@jonjensen 26 -shumaker 26 -liveleak.com 26 -14,200 26 -kaupang 26 -simonyan 26 -mosse 26 -ligambi 26 -rostock 26 -harnik 26 -frydrych 26 -dollies 26 -ail 26 -jingoism 26 -mazare 26 -tokenism 26 -mediaite 26 -vessey 26 -calthorpe 26 -lakehurst 26 -filkin 26 -bonomi 26 -koffi 26 -toothpicks 26 -pre-internet 26 -dustbins 26 -cdh 26 -peloponnese 26 -smash-hit 26 -sunwing 26 -mapstone 26 -44.99 26 -man-marking 26 -penistone 26 -embarassing 26 -diametrically 26 -souttar 26 -300-year 26 -lanark 26 -Özil 26 -reynald 26 -axelberg 26 -taheri 26 -geoglyph 26 -ludemann 26 -whites-only 26 -brackish 26 -bukavu 26 -tyseley 26 -3.06 26 -nederland 26 -feock 26 -rumeysa 26 -chocolatiers 26 -modis 26 -mafioso 26 -wojdan 26 -heilemann 26 -shawkat 26 -synaesthesia 26 -wiggy 26 -apollon 26 -first-run 26 -holyoke 26 -zachariah 26 -marraccini 26 -27.50 26 -leifman 26 -9/1 26 -taskmaster 26 -gulliksen 26 -11-mile 26 -sarath 26 -mapes 26 -canyonlands 26 -chalfant 26 -delp 26 -1-10 26 -al-quds 26 -anti-politics 26 -masseuses 26 -romualdez 26 -bf 26 -yorkist 26 -unverifiable 26 -hambleden 26 -meerut 26 -snitched 26 -kinan 26 -roll-ups 26 -agim 26 -health-giving 26 -fala 26 -christl 26 -stampeded 26 -dervishaj 26 -mst 26 -souleiman 26 -headbutts 26 -bfg 26 -stereos 26 -marvelling 26 -nymphs 26 -kaushal 26 -rizk 26 -matthieu 26 -rfi 26 -18-rated 26 -tisbury 26 -methuen 26 -revitalising 26 -seven-acre 26 -dob 26 -erging 26 -self-promoting 26 -overindulged 26 -641 26 -wa'el 26 -he-man 26 -takeru 26 -1.11 26 -daffy 26 -beaux 26 -nkandla 26 -trac 26 -segatore 26 -russia-backed 26 -mid-wicket 26 -fachie 26 -liautaud 26 -boombox 26 -derryn 26 -guestroom 26 -mcavennie 26 -heart-stealer 26 -littleborough 26 -endara 26 -dt 26 -io9 26 -chaina 26 -fire-fighting 26 -eartha 26 -fombu 26 -0430 26 -5150 26 -immunities 25 -cloyne 25 -sese 25 -hydrophone 25 -bhavna 25 -cartography 25 -magdeburg 25 -sohale 25 -mossadegh 25 -vagueness 25 -teardrops 25 -luchkiw 25 -levitra 25 -31-17 25 -skaf 25 -895,000 25 -bhuiyan 25 -jiggers 25 -300-seat 25 -brune 25 -bitsy 25 -nwa 25 -macrumors 25 -morgenpost 25 -doswell 25 -194,000 25 -bafflingly 25 -minetta 25 -montoro 25 -74.3 25 -nose-to-nose 25 -turbaned 25 -esfandmozd 25 -meloni 25 -reprieves 25 -egremont 25 -foot-dragging 25 -338,000 25 -sime 25 -sfgate.com 25 -7:55 25 -lorri 25 -giaquinta 25 -frankii 25 -t.j 25 -aspirants 25 -mcgeehan 25 -helium-3 25 -bolcer 25 -dendritic 25 -debased 25 -205mph 25 -bul 25 -22:41 25 -22:48 25 -odette 25 -turkish-born 25 -bockhampton 25 -soltesz 25 -hamidovic 25 -avarice 25 -arlotti 25 -high-rollers 25 -qf 25 -reprises 25 -disavowing 25 -churchmen 25 -streetlight 25 -scudo 25 -gholston 25 -'94 25 -pathos 25 -olimpia 25 -awfulness 25 -benni 25 -eight-team 25 -road-legal 25 -ewell 25 -creepiest 25 -coverciano 25 -celler 25 -trounce 25 -48.9 25 -30-piece 25 -burgon 25 -amati 25 -fighter-bombers 25 -2,450 25 -saunters 25 -773 25 -772 25 -klaassen 25 -pacitti 25 -panks 25 -scentee 25 -mid-autumn 25 -dramani 25 -irishmen 25 -jeannine 25 -williston 25 -spunk 25 -chavanel 25 -bookkeeping 25 -coignard 25 -jeavons 25 -crushingly 25 -skylon 25 -relearning 25 -raghav 25 -umbra 25 -j.s. 25 -dalyan 25 -disrobe 25 -mersiades 25 -quon 25 -hdr 25 -sub-plot 25 -aveiro 25 -mwh 25 -hackleburg 25 -flightglobal 25 -comber 25 -tisdall 25 -gpas 25 -republica 25 -hedegaard 25 -kustes 25 -stairlift 25 -midges 25 -8.12 25 -56.6 25 -crapo 25 -colonialists 25 -pontins 25 -sahadi 25 -computations 25 -loners 25 -crunk 25 -glasses-free 25 -vionnet 25 -tamale 25 -cattlemen 25 -norrman 25 -mia-grace 25 -poovey 25 -65.6 25 -garrigues 25 -monumentally 25 -281,000 25 -cavusoglu 25 -fluker 25 -wbir 25 -chessboard 25 -aspell 25 -kulwant 25 -barbury 25 -cristie 25 -odenkirk 25 -mcgehee 25 -balut 25 -heftier 25 -gilley 25 -lamin 25 -al-halqi 25 -kadioglu 25 -kitzbuhel 25 -unnervingly 25 -obokata 25 -landeros 25 -osho 25 -lactating 25 -eight-member 25 -dhanda 25 -21:17 25 -best-laid 25 -no-fire 25 -blencathra 25 -peace-building 25 -arantxa 25 -post-nuptial 25 -quadriceps 25 -browett 25 -smallish 25 -753 25 -752 25 -annacone 25 -canizares 25 -dpr 25 -al-jamal 25 -smitten-downes 25 -birchwood 25 -biosafety 25 -stravinsky 25 -bartholdi 25 -mp4-12c 25 -mouadamiya 25 -stoupin 25 -tregothnan 25 -tradespeople 25 -taggers 25 -japanese-owned 25 -soundscape 25 -xichang 25 -rajpal 25 -eutopia 25 -wringer 25 -heartsick 25 -hemorrhages 25 -demining 25 -boseley 25 -skorpios 25 -vesper 25 -rehire 25 -kumgang 25 -dint 25 -kling 25 -apparatuses 25 -freelee 25 -tambor 25 -cowart 25 -iguazu 25 -kampl 25 -multiethnic 25 -jamshed 25 -maire 25 -robarge 25 -800-pound 25 -old-growth 25 -mobbing 25 -padge 25 -transitory 25 -january/february 25 -kayte 25 -slipknot 25 -sain 25 -guarnere 25 -sculpts 25 -filochowski 25 -calfskin 25 -clean-living 25 -ypf 25 -22:02 25 -46m 25 -broking 25 -reformulate 25 -maze-like 25 -beard-cutting 25 -conibeer 25 -searles 25 -fechtel 25 -cannister 25 -anti-german 25 -poeta 25 -suwanee 25 -airlifts 25 -mile-high 25 -yeo-thomas 25 -wharfe 25 -45-foot 25 -ladywood 25 -aolani 25 -reinvents 25 -?!? 25 -gunawan 25 -viglen 25 -spotswood 25 -pejeta 25 -chinedu 25 -utecht 25 -kuratas 25 -fatou 25 -tasmanians 25 -philbrick 25 -shigeru 25 -worshipful 25 -strangeway 25 -ollanta 25 -codebreaking 25 -brittin 25 -mukul 25 -bansley 25 -morphological 25 -aircrews 25 -trailhead 25 -pitrora 25 -bumblis 25 -shoji 25 -antiquarian 25 -over-consumption 25 -niteroi 25 -unsteadily 25 -2.64 25 -hammerschlag 25 -tommies 25 -disincentives 25 -yantai 25 -kathi 25 -convair 25 -mkr 25 -misting 25 -weisbrot 25 -one-tonne 25 -nimbus 25 -cloak-and-dagger 25 -once-proud 25 -sencion 25 -over-reliant 25 -pyro 25 -bourdin 25 -250-mile 25 -pulverised 25 -avm 25 -nizzar 25 -vetri 25 -billion-plus 25 -pullin 25 -jedward 25 -hogmo 25 -dissects 25 -cherishing 25 -echos 25 -muertos 25 -cardinal-electors 25 -mda 25 -bizos 25 -arapaho 25 -vasil 25 -subgroups 25 -front-and-center 25 -64m 25 -643 25 -harkess 25 -assocation 25 -maccabees 25 -kick-boxing 25 -marijuana-related 25 -fagin 25 -italic 25 -raffi 25 -borowski 25 -sarker 25 -hoffer 25 -non-latino 25 -meckler 25 -limani 25 -seccuro 25 -champion-morin 25 -mooned 25 -fetishist 25 -vierkant 25 -namaste 25 -duelling 25 -summarises 25 -ratigan 25 -supersedes 25 -manteo 25 -husein 25 -now-familiar 25 -nicastro 25 -macinnes 25 -darek 25 -reclassifying 25 -inter-continental 25 -t.s. 25 -1727 25 -artemyev 25 -6bn 25 -flannagan 25 -hôtel 25 -tamarama 25 -27billion 25 -stunk 25 -houndstooth 25 -mbolombo 25 -osiris-rex 25 -pallbearer 25 -onepoll 25 -no-notice 25 -three-year-deal 25 -tascha 25 -whipple 25 -aubergines 25 -hedonic 25 -cottrez 25 -5-mile 25 -spetic 25 -hopsital 25 -barraged 25 -879 25 -e.u. 25 -belamouadden 25 -rebus 25 -strategically-placed 25 -6.49 25 -feelin 25 -chappie 25 -björn 25 -laureano 25 -sideboard 25 -surma 25 -lowdon 25 -large-caliber 25 -nihilism 25 -fijian-born 25 -winmalee 25 -perl 25 -toucan 25 -ambulance-chasing 25 -bandung 25 -parahawking 25 -navarre 25 -chadlington 25 -konietzky 25 -exhales 25 -birdsall 25 -blingy 25 -recreativo 25 -cavuto 25 -racetracks 25 -nafusa 25 -vishnu 25 -144th 25 -124mph 25 -british-trained 25 -40-odd 25 -nh 25 -bone-dry 25 -islas 25 -catchpole 25 -cipriano 25 -schenk 25 -'14 25 -tyias 25 -lorene 25 -decaf 25 -testar 25 -hydroponics 25 -layun 25 -medicins 25 -kinnaman 25 -bio-hazard 25 -solway 25 -rescreened 25 -bosse 25 -groundstaff 25 -82ft 25 -pagnac 25 -fila 25 -ogbonna 25 -libidos 25 -shute 25 -litchmore-dunbar 25 -sohr 25 -top-rate 25 -eunuchs 25 -rolodex 25 -62.6 25 -vullo 25 -charmouth 25 -tricolour 25 -wonderwall 25 -keenness 25 -parviz 25 -heirens 25 -pus-filled 25 -reppert 25 -2.26 25 -princesa 25 -bircham 25 -backowski 25 -uemura 25 -preordained 25 -6:50 25 -24.50 25 -ophel 25 -korie 25 -kowalczik 25 -burle 25 -crafters 25 -caymans 25 -co-found 25 -muerte 25 -suhaib 25 -wheelbarrows 25 -agumbi 25 -katahdin 25 -sica 25 -humbles 25 -foreign-made 25 -snowmobiler 25 -nfib 25 -nikolaj 25 -forren 25 -buccheri 25 -networkers 25 -time-keeping 25 -salicylic 25 -ellie-may 25 -oversights 25 -nativists 25 -awnings 25 -dividers 25 -in-ground 25 -fingering 25 -boj 25 -otte 25 -branksome 25 -basic-rate 25 -3.43 25 -updo 25 -covell 25 -narciso 25 -pavlova 25 -erlanger 25 -anneka 25 -pro-palestine 25 -haukass 25 -gauk-roger 25 -test-firing 25 -italiano 25 -vanguardia 25 -six-cylinder 25 -za'atari 25 -1.83 25 -1.87 25 -kilinochchi 25 -adjoins 25 -silversea 25 -kisko 25 -vallis 25 -child-proof 25 -mangino 25 -oltz 25 -donbas 25 -helsingborg 25 -escher 25 -lubel 25 -flightpath 25 -chung-hee 25 -unelectable 25 -30-metre 25 -carribean 25 -duqu 25 -multiple-choice 25 -raybould 25 -tarzana 25 -strokeplay 25 -on-the-record 25 -1,680 25 -kalam 25 -hajjar 25 -bellingcat 25 -mingze 25 -783 25 -n.m. 25 -nf 25 -english-based 25 -dacey 25 -gorden 25 -hera 25 -outscore 25 -galilei 25 -winchman 25 -industrial-sized 25 -65,738 25 -user-submitted 25 -arlit 25 -20,500 25 -mazar-e 25 -oireachtas 25 -second-guessed 25 -onorato 25 -burkhalter 25 -twin-engined 25 -kariuki 25 -195million 25 -c'est 25 -glenis 25 -23:50 25 -mehra 25 -guntown 25 -harriotte 25 -nabokov 25 -governor-elect 25 -hopoate 25 -dongshigu 25 -frias 25 -jiuquan 25 -hadfield-hyde 25 -ceremoniously 25 -contrail 25 -plummy 25 -slap-up 25 -leguin 25 -solander 25 -laysan 25 -comayagua 25 -bearup 25 -wipprecht 25 -newhall 25 -steamers 25 -shankland 25 -’92 25 -denys 25 -sterga 25 -dulverton 25 -cinderford 25 -teasingly 25 -zimbabwe-born 25 -witcher 25 -chugged 25 -payslips 25 -klinghoffer 25 -moisturizing 25 -self-pitying 25 -bouterse 25 -dhkp-c 25 -jelani 25 -offical 25 -bour 25 -one-earner 25 -nafees 25 -hamilton-smith 25 -sixth-former 25 -antipodean 25 -prelates 25 -76.5 25 -dalton-in-furness 25 -dog-like 25 -samba-panza 25 -finegan 25 -showrooming 25 -ninh 25 -mislabelling 25 -elector 25 -3-pointers 25 -hangers-on 25 -neo-nazism 25 -bovenizer 25 -then-15-year-old 25 -kumano 25 -bushney 25 -cannabidiol 25 -rustage 25 -chiao 25 -hotpoint 25 -cathleen 25 -52f 25 -diamond-studded 25 -re-worked 25 -crayford 25 -melodie 25 -degarmo 25 -daughters-in-law 25 -posties 25 -sadhus 25 -dismounted 25 -tomar 25 -finales 25 -guilin 25 -faltskog 25 -hora 25 -bissett 25 -dudeney 25 -tendring 25 -tryptophan 25 -kidner 25 -csun 25 -pared-down 25 -maestri 25 -subcultures 25 -farm-raised 25 -flitted 25 -enviously 25 -bino 25 -hatters 25 -capitalisation 25 -unsighted 25 -ducharme 25 -stabilizes 25 -violeta 25 -gennifer 25 -ladsous 25 -denner 25 -nob 25 -castaways 25 -bulova 25 -doucette 25 -slating 25 -ramelli 25 -risenburg 25 -jayasuriya 25 -mid-2008 25 -abdurrahim 25 -penciled 25 -5.60 25 -flightradar24 25 -warnes 25 -fye 25 -mesrine 25 -quanzhou 25 -al-jaafari 25 -apethorpe 25 -gigatonnes 25 -nhfa 25 -crematoria 25 -tedworth 25 -dany 25 -taek 25 -chokers 25 -narinesingh 25 -quadcopters 25 -pgd 25 -barnyard 25 -danah 25 -undersold 25 -10th-placed 25 -doublespeak 25 -twitters 25 -mey 25 -let-down 25 -pecks 25 -microprocessor 25 -aphrodisiacs 25 -rossdale 25 -mckinstry 25 -gordan 25 -rued 25 -quarless 25 -1950s-style 25 -leafcutter 25 -fold-down 25 -envisat 25 -alysia 25 -zetec 25 -misplacing 25 -monounsaturated 25 -shofar 25 -repairer 25 -seaplanes 25 -distemper 25 -londolozi 25 -six-bed 25 -22,400 25 -atlético 25 -modulated 25 -nepean 25 -14-week 25 -liberalized 25 -unsay 25 -mpd 25 -webmaster 25 -callison 25 -sypt 25 -robledo 25 -infection-fighting 25 -57.5 25 -kaili 25 -taranaki 25 -solicitations 25 -taiping 25 -1970s-era 25 -nace 25 -kv 25 -swishing 25 -electrocute 25 -garma 25 -ghee 25 -shikarpur 25 -back-seat 25 -towell 25 -interlopers 25 -yéle 25 -cold-like 25 -millionths 25 -htut 25 -mother-child 25 -klaaskids 25 -canis 25 -moodie 25 -eu-funded 25 -kalgoorlie 25 -hippisley 25 -clapped-out 25 -srb 25 -norell 25 -frohman 25 -puke 25 -École 25 -navcam 25 -agustina 25 -flutters 25 -sciatic 25 -thatâ 25 -milliliter 25 -unjustifiably 25 -all-but-certain 25 -blacksmiths 25 -push-back 25 -sbihi 25 -erdmann 25 -viki 25 -fibulas 25 -300kg 25 -campden 25 -jailbroken 25 -trung 25 -mutism 25 -pinney 25 -nonreligious 25 -akbari 25 -douetil 25 -close-season 25 -puntoriero 25 -borkowski 25 -sort-of 25 -cricinfo 25 -myrick 25 -seascape 25 -prequels 25 -tarsiers 25 -heretic 25 -salpingidis 25 -wetherill 25 -anagram 25 -comely 25 -kenrick 25 -jobim 25 -ill-disciplined 25 -sharps 25 -untangled 25 -cremona 25 -robesky 25 -bartolomeo 25 -roraima 25 -cea 25 -invocations 25 -aransas 25 -jaune 25 -venezia 25 -gome 25 -speke 25 -woodroof 25 -foots 25 -lett 25 -over-indulging 25 -amas 25 -blurt 25 -touray 25 -twal 25 -machan 25 -visualised 25 -topo 25 -skojo 25 -kaji 25 -iversen 25 -non-melanoma 25 -sandaza 25 -smeltzer 25 -trixie 25 -sharia4belgium 25 -22-man 25 -fue 25 -extra-long 25 -scrounged 25 -kayaked 25 -randon 25 -wheelock 25 -takeshima 25 -multi-dimensional 25 -sedgley 25 -#debate 25 -mrap 25 -first-served 25 -zebre 25 -loughlin 25 -gaydon 25 -plutarch 25 -ashlynn 25 -re-educate 25 -kampong 25 -283,000 25 -cervelli 25 -wessely 25 -silverdale 25 -65billion 25 -tagicakibau 25 -zipwire 25 -scholarism 25 -zosia 25 -bechard 25 -17-inch 25 -formalize 25 -luxuriant 25 -quarreling 25 -codling 25 -9s 25 -risch 25 -lacerda 25 -compulsorily 25 -raegan 25 -nts 25 -durazza 25 -no-smoking 25 -hollioake 25 -al-basha 25 -42,500 25 -basravi 25 -lorenza 25 -piñata 25 -late-afternoon 25 -bulk-billing 25 -duman 25 -front-man 25 -akilah 25 -claflin 25 -13-years 25 -sloaney 25 -griffey 25 -wsaz 25 -hardaway 25 -lade 25 -capdevila 25 -milewski 25 -22:50 25 -gomoll 25 -infotainment 25 -undergrads 25 -maoris 25 -pliskova 25 -ecoboost 25 -@schamscnn 25 -sautéed 25 -cross-checked 25 -bean-bag 25 -56.7 25 -sterilizing 25 -pre-med 25 -caniggia 25 -goleta 25 -hamblen 25 -sudetenland 25 -york-style 25 -dovish 25 -gawking 25 -amodio 25 -41.7 25 -41.1 25 -lanzer 25 -dissociate 25 -nose-dive 25 -chainmail 25 -prugo 25 -carinae 25 -loftin 25 -krakoff 25 -bettye 25 -piercy 25 -juxtaposes 25 -carvers 25 -emf 25 -high-def 25 -gold-winning 25 -feigenbaum 25 -chennaiyin 25 -rayonier 25 -crawlers 25 -fame-hungry 25 -pehrsson 25 -eick 25 -flannels 25 -hes 25 -matwyuk 25 -off-the-books 25 -defunded 25 -sougarret 25 -meltz 25 -highliners 25 -prudhomme 25 -tracon 25 -warps 25 -supernanny 25 -nifong 25 -duoduo 25 -heimel 25 -winsford 25 -pestilence 25 -mccrae 25 -pennants 25 -picture-sharing 25 -hendra 25 -sabar 25 -cleavage-baring 25 -eduction 25 -fitful 25 -bejo 25 -nystrom 25 -extraterritorial 25 -leyzaola 25 -napo 25 -crazes 25 -shaari 25 -29p 25 -berghaus 25 -photo-ops 25 -867 25 -pakzad 25 -smid 25 -pishevar 25 -schermerhorn 25 -wgno 25 -t-boned 25 -couto 25 -fact-check 25 -grunshaw 25 -aberrant 25 -790,000 25 -moute 25 -progenitor 25 -djabou 25 -devoe 25 -scb 25 -ziering 25 -jacka 25 -baxendale-walker 25 -blown-up 25 -30bn 25 -hejazi 25 -culbert 25 -rosenkranz 25 -iban 25 -aboriginals 25 -fraiche 25 -byerly 25 -lazzara 25 -hadramout 25 -43.7 25 -liguria 25 -prettily 25 -gym-goers 25 -patrician 25 -dielna 25 -cosmologists 25 -wcsh 25 -piara 25 -missenden 25 -lavallee 25 -off-hand 25 -inquisitor 25 -8mph 25 -ballymena 25 -hcg 25 -balavil 25 -hulse 25 -jamar 25 -scrappers 25 -carden 25 -riyal 25 -antecedents 25 -34.2 25 -schell 25 -high-concept 25 -mammary 25 -to-go 25 -kateri 25 -kingsnorth 25 -iftekhar 25 -ayhan 25 -1990-91 25 -century-long 25 -amoa 25 -hard-wearing 25 -heartlessly 25 -outward-looking 25 -udell 25 -thunderclap 25 -jantzen 25 -ef4 25 -condi 25 -barranquilla 25 -reel-to-reel 25 -behm 25 -knies 25 -trice 25 -1:12 25 -91.5 25 -cri 25 -stabber 25 -14-under 25 -two-leg 25 -prong 25 -isme.com 25 -vasa 25 -naral 25 -minta 25 -fsc 25 -fsg 25 -kookoothe 25 -843 25 -animal-lover 25 -lock-ups 25 -intoned 25 -fishponds 25 -chieftains 25 -oilseed 25 -dippers 25 -show-cause 25 -yiannis 25 -cleasby 25 -mbah 25 -rosdeep 25 -big-match 25 -n.y 25 -lorello 25 -24-inch 25 -itandje 25 -mizell 25 -filipovic 25 -24-20 25 -magowan 25 -demmellash 25 -april-june 25 -madhu 25 -deka 25 -tarasov 25 -pristina 25 -neel 25 -muhumed 25 -sea-change 25 -ashers 25 -byam 25 -porkka 25 -nothings 25 -per-theater 25 -bolli 25 -protrusions 25 -keesling 25 -fujairah 25 -13.99 25 -three-putted 25 -al-byati 25 -991 25 -hearing-impaired 25 -pranced 25 -homebody 25 -dampness 25 -scoopon 25 -over-indulgence 25 -kompa 25 -a46 25 -hashman 25 -mjm 25 -snow-making 25 -herbstreit 25 -n.d. 25 -on-again-off-again 25 -forfar 25 -relais 25 -ponchis 25 -tanana 25 -abyad 25 -rajee 25 -visualized 25 -beira-rio 25 -learmonth 25 -cahokia 25 -kiawah 25 -sucuzhanay 25 -1910s 25 -juilliard 25 -rustie 25 -whiling 25 -videogames 25 -mavens 25 -methodists 25 -struthers 25 -1:35 25 -xkeyscore 25 -katusha 25 -re-engaged 25 -heale 25 -rosy-cheeked 25 -drag-racing 25 -marica 25 -egalitarianism 25 -1,460 25 -70.5 25 -juxtapose 25 -chernyshenko 25 -118th 25 -bonaduce 25 -ployees 25 -hourican 25 -opacity 25 -fruiting 25 -military-type 25 -38.7 25 -autodesk 25 -qishan 25 -isolde 25 -zaziwe 25 -constantino 25 -fast-talking 25 -moak 25 -buckhurst 25 -song-wol 25 -lieutenant-general 25 -wtmj 25 -howitt 25 -seguin 25 -daan 25 -2009-2013 25 -verso 25 -bozell 25 -then-boss 25 -abdukhadir 25 -wm 25 -maca 25 -sexualising 25 -eeyore 25 -predating 25 -streetcars 25 -extraditions 25 -tali 25 -20-time 25 -tonka 25 -sangeang 25 -interceded 25 -siqi 25 -vampy 25 -july-september 25 -crumley 25 -1993-94 25 -hazanavicius 25 -petros 25 -clear-headed 25 -cordially 25 -perin 25 -frit 25 -shur 25 -redvers 25 -horrigan 25 -279,000 25 -choke-hold 25 -neild 25 -intertwine 25 -goodship 25 -deadliness 25 -alcide 25 -gwyther 25 -geostrategic 25 -osbi 25 -kindling 25 -disbrey 25 -bugti 25 -00:02 25 -special-interest 25 -vizconde 25 -athan 25 -antonakos 25 -jewett 25 -lazaridis 25 -7 1/2 25 -ponomaryov 25 -stoicescu 25 -grenville 25 -peacehaven 25 -803 25 -politan 25 -bentsen 25 -lasik 25 -overby 25 -anti-clotting 25 -nkenka 25 -b&m 25 -banzai 25 -wedging 25 -nieland 25 -multi-role 25 -tch 25 -westermann 25 -peace-keeping 25 -glasson 25 -edge-sorting 25 -schade 25 -6.19 25 -rooks 25 -f5 25 -artaban 25 -a.a. 25 -pantoja 25 -luk 25 -crackberry 25 -shull 25 -villasenor 25 -energie 25 -capitulating 25 -man-hours 25 -zulberti 25 -0.12 25 -osolase 25 -ground-up 25 -breathometer 25 -prd 25 -pikmin 25 -73f 25 -heckathorn 25 -fortier 25 -ogenyi 25 -headbanging 25 -solmonese 25 -geenty 25 -midtjylland 25 -civ 25 -counter-suing 25 -knapke 25 -army-backed 25 -transylvanian 25 -rst 25 -delancey 25 -abrahamsen 25 -berkani 25 -intracoastal 25 -pre-planning 25 -5a 25 -irascible 25 -vts 25 -schare 25 -doughnut-shaped 25 -palmera 25 -bedtimes 25 -assistive 25 -cirincione 25 -erraid 25 -bufton 25 -matti 25 -gyarmati 25 -clarifications 25 -braylon 25 -mery 25 -sublett 25 -soufflé 25 -sitwell 25 -provodnikov 25 -pichushkin 25 -makarenkov 25 -18-30 25 -kinmartin 25 -then-chairman 25 -33-man 25 -tigra 25 -bretton 25 -poniewozik 25 -dag 25 -daa 25 -gargano 25 -long-stalled 25 -dechert 25 -hershman 25 -wallack 25 -binyon 25 -prosor 25 -june/july 25 -downfalls 25 -pawed 25 -pinta 25 -photoelectric 25 -reingold 25 -manteghi 25 -feinerman 25 -gannets 25 -2.76 25 -exonerations 25 -volta 25 -dissing 25 -puds 25 -maracaibo 25 -5-feet 25 -soviet-backed 25 -prokudin-gorsky 25 -swigs 25 -nabuguzi 25 -house-hunters 25 -dail 25 -race-conscious 25 -1774 25 -college-bound 25 -muldowney 25 -nanda 25 -cottesmore 25 -sodano 25 -e-paper 25 -litigant 25 -well-honed 25 -minehart 25 -cowshed 25 -rieti 25 -birgfeld 25 -a23 25 -germinate 25 -industry-leading 25 -walling 25 -woodlock 25 -al-kurdi 25 -codey 25 -alvechurch 25 -months-old 25 -babbel 25 -totted 25 -totten 25 -motion-sensor 25 -#gaza 25 -akihabara 25 -mother-of-pearl 25 -low-value 25 -internalize 25 -begets 25 -maurico 25 -ruh 25 -promos 25 -doshi 25 -selin 25 -ethyl 25 -neilly 25 -oeuvre 25 -ped 25 -censuses 25 -brilliant-cut 25 -fetcham 25 -lostwithiel 25 -pacha 25 -giudices 25 -janvier 25 -naghemeh 25 -wella 25 -200-page 25 -23:46 25 -brittoni 25 -agafya 25 -xxxl 25 -edinho 25 -abdullaev 25 -cryogenically 25 -guzmán 25 -lopera 25 -oru 25 -americanized 25 -megantic 25 -tesseneer 25 -doni 25 -vickerage 25 -yuto 25 -lamy 25 -choon 25 -fallers 25 -moki 25 -1.97 25 -1.93 25 -annis 25 -jacenko 25 -sio 25 -johal 25 -second-to-last 25 -chiera 25 -adegbile 25 -lissimore 25 -sawka 25 -housebreaking 25 -sierre 25 -zorc 25 -aubert 25 -meeke 25 -capstick 25 -indigestible 25 -cryptographic 25 -non-discriminatory 25 -dissenter 25 -linhart 25 -stand-down 25 -pooping 25 -1680 25 -ewers 25 -sirgany 25 -feelers 25 -fluty 25 -ding-a-ling 25 -rumpled 25 -outbox 25 -nanchang 25 -gulum 25 -general-purpose 25 -33-1 25 -homolka 25 -vocabularies 25 -serialisation 25 -vloggers 25 -mateljan 25 -sakhalin 25 -mcclory 25 -792 25 -roquefort 25 -morose 25 -qais 25 -chakvetadze 25 -fourniret 25 -ionescu 25 -huggett 25 -supercomputing 25 -saracen 25 -meunier 25 -souped 25 -alcatel 25 -wisecracks 25 -melange 25 -carstens 25 -galimberti 25 -croquette 25 -nayyar 25 -proselytize 25 -peete 25 -baia 25 -rinder 25 -lintel 25 -turd 25 -kleine-levin 25 -prunella 25 -reedus 25 -kilogrammes 25 -trevillion 25 -end-of-course 25 -sanha 25 -hoebel 25 -trend-setting 25 -briones 25 -causative 25 -fatalism 25 -kula 25 -dong-won 25 -wob 25 -morel 25 -high-seas 25 -epithelial 25 -elexis 25 -2520 25 -warlock 25 -lachimia 25 -barina 25 -clucas 25 -riendeau 25 -vlog 25 -mallika 25 -sotherton 25 -penev 25 -darned 25 -ewok 25 -tenorio 25 -magritte 25 -israeli-egyptian 25 -honeymooner 25 -2700 25 -adac 25 -air-lifted 25 -mcclair 25 -athor 25 -trappes 25 -loe 25 -rehearing 25 -1,760 25 -poofter 25 -selters 25 -ramalingam 25 -triglyceride 25 -anti-black 25 -shadbolt 25 -michalis 25 -ammerman 25 -strykul 25 -ivery 25 -whas 25 -nantel 25 -suffocates 25 -nickolas 25 -divan 25 -heavies 25 -flello 25 -northeastward 25 -flatpack 25 -dreweatts 25 -newsagency 25 -f.e. 25 -niarchos 25 -himba 25 -lulic 25 -quadrini 25 -aktar 25 -eiji 25 -k'naan 25 -redheaded 25 -beausejour 25 -mentmore 25 -non-playing 25 -bbl 25 -hecksen 25 -slimbridge 25 -356,000 25 -maiziere 25 -lopez-soto 25 -12.35 25 -well-done 25 -boyes 25 -mandir 25 -sunbury-on-thames 25 -doidge 25 -band-aids 25 -al-sayed 25 -raffaello 25 -berrer 25 -kc-135 25 -dockyards 25 -wolverton 25 -berki 25 -offaly 25 -nonfatal 25 -luken 25 -schorr 25 -mid-2010 25 -postojna 25 -kupala 25 -semipro 25 -armagan 25 -bron 25 -rag-tag 25 -tiramisu 25 -lute 25 -deplaning 25 -gruezo 25 -scarpered 25 -rawling 25 --16 25 -chatroulette 25 -cowherd 25 -euthanise 25 -doesn 25 -15,000-square-foot 25 -dpj 25 -second-deadliest 25 -splurges 25 -fredricka 25 -nolita 25 -tricep 25 -onsen 25 -vanja 25 -co-manager 25 -ryno 25 -magary 25 -pheu 25 -friezes 25 -modelo 25 -hyperplasia 25 -holey 25 -kanawa 25 -cojones 25 -brisley 25 -a350-900 25 -presumptions 25 -ulcerated 25 -afghanaid 25 -proletariat 25 -lorenzana 25 -24-years-old 25 -moviegoer 25 -tubingen 25 -checked-in 25 -2r 25 -2s 25 -muscle-flexing 25 -louiselle 25 -stufflebeem 25 -300billion 25 -challinor 25 -game-changers 25 -clerked 25 -huizhou 25 -doogan 25 -pocklington 25 -tesca 25 -ghoulam 25 -wale 25 -opa-locka 25 -wukan 25 -w.i.p. 25 -fionnuala 25 -bomb-disposal 25 -shimmying 25 -speirs 25 -coolangatta 25 -exhorts 25 -kryzie 25 -mousey 25 -baldi 25 -madoffs 25 -henrichsen 25 -excoriating 25 -finicky 25 -slut-shaming 25 -kinzer 25 -dohoney 25 -joust 25 -phds 25 -tio 25 -mid-2030s 25 -ss2 25 -vocalise 25 -cringes 25 -powers-that-be 25 -10-cent 25 -47.9 25 -jobbing 25 -jowers 25 -nowruz 25 -democrat-led 25 -chatswood 25 -8.8-magnitude 25 -sumida 25 -galton 25 -laymen 25 -drummond-baxter 25 -forgacs 25 -stara 25 -h.g. 25 -fidesz 25 -vpl 25 -sates 25 -off-key 25 -narconon 25 -mezidor 25 -seriously-ill 25 -baton-wielding 25 -devonte 25 -newly-born 25 -del. 25 -tagliabue 25 -ghost-like 25 -violence-related 25 -calzetta 25 -nopd 25 -taramov 25 -leukodystrophy 25 -quolls 25 -transgenders 25 -horus 25 -loddon 25 -ischgl 25 -conboy 25 -dring 25 -piermario 25 -ghibli 25 -animosities 25 -wkrc 25 -spearheads 25 -hennig 25 -sibery 25 -ambassador-at-large 25 -steins 25 -reinterpretation 25 -upbringings 25 -r4 25 -lyndoe-tavistock 25 -didga 25 -bhattacharya 25 -gallopin 25 -familes 25 -ceglia 25 -zada 25 -c-class 25 -bombproof 25 -word-for-word 25 -120kg 25 -dispositions 25 -j.lo 25 -back-rower 25 -ratchford 25 -fahrmann 25 -yorktown 25 -rusli 25 -daveon 25 -fifth-minute 25 -50-state 25 -emmy-award 25 -mosa 25 -first-teamers 25 -rouses 25 -six-litre 25 -toc 25 -bsb 25 -poutine 25 -al-qa 25 -auchterlonie 25 -pajtim 25 -campbellsville 25 -hallstatt 25 -shele 25 -bignone 25 -1750s 25 -adelegan 25 -cheapens 25 -trenchcoat 25 -vava 25 -stouffer 25 -8.46 25 -recaps 25 -salaheddine 25 -kozen 25 -parkwood 25 -fuente 25 -microcontroller 25 -fortuitously 25 -chinks 25 -sufficiency 25 -aplastic 25 -scourges 25 -wohlschlaeger 25 -luddites 25 -hains 25 -cablevision 25 -burlakoff 25 -moriah 25 -batgirl 25 -top-half 25 -smithies 25 -276,000 25 -malghan 25 -callens 25 -re-book 25 -all-spanish 25 -12-time 25 -demis 25 -chondrules 25 -benthall 25 -gazebos 25 -grimsvotn 25 -ex-fbi 24 -fiu 24 -shacked 24 -california-born 24 -waialae 24 -patois 24 -affix 24 -ettinger 24 -vernazza 24 -596 24 -emmanuel-thomas 24 -snorts 24 -cockings 24 -overstretch 24 -2:35 24 -broadsword 24 -ulrike 24 -quarreled 24 -sear 24 -back-post 24 -hout 24 -ottman 24 -marsala 24 -chesty 24 -unscrewed 24 -fischbacher 24 -greenlandic 24 -post-tax 24 -coahoma 24 -t-pims 24 -drug-driving 24 -pagenstecher 24 -swanwick 24 -johnathon 24 -creationist 24 -rain-affected 24 -jennice 24 -goldenberg 24 -keaney 24 -co-creators 24 -uremic 24 -klimkin 24 -wilburn 24 -squiggle 24 -reitz 24 -rolla 24 -reinterred 24 -gemayel 24 -lynyrd 24 -dmg 24 -saviola 24 -combust 24 -neverwet 24 -crescent-shaped 24 -picassos 24 -pierre-mauroy 24 -pelaez 24 -setton 24 -1.31 24 -methuselah 24 -entices 24 -22:44 24 -contentions 24 -287,000 24 -afrique 24 -2.57 24 -roger-vasselin 24 -sancha 24 -rooftopping 24 -suffixes 24 -dolon 24 -fv2 24 -saundra 24 -r.a. 24 -jordan-barber 24 -eirian 24 -oher 24 -flamborough 24 -inter-country 24 -twaddle 24 -cbs46 24 -slip-on 24 -deisy 24 -21:38 24 -21:35 24 -pjaca 24 -ticona 24 -mars-like 24 -energy-intensive 24 -court-side 24 -cruse 24 -771 24 -helzer 24 -aitazaz 24 -skank 24 -85billion 24 -flesh-and-blood 24 -al-golani 24 -post-trial 24 -workrate 24 -innotab 24 -alcon 24 -,15 24 -arbenz 24 -agu 24 -icsi 24 -endorser 24 -cheapening 24 -magill 24 -raeburn 24 -hobey 24 -flapjack 24 -lewallen 24 -ezeagwula 24 -armadale 24 -godrich 24 -ostracism 24 -protà 24 -cns 24 -ephemera 24 -beauregard 24 -voronoi 24 -kalydeco 24 -perused 24 -repossessions 24 -gluttonous 24 -unnerve 24 -spratt 24 -rasmusson 24 -zagora 24 -retraces 24 -vizsla 24 -microchipping 24 -cappuccini 24 -15k 24 -bouba 24 -barrell 24 -gurira 24 -1.51 24 -tathra 24 -yaxley 24 -157th 24 -ketosis 24 -platten 24 -kecil 24 -nannying 24 -ramsdell 24 -garate 24 -unzueta 24 -calment 24 -weigel 24 -mazloumsaki 24 -1648 24 -olmedo 24 -lumberjacks 24 -tensile 24 -shiro 24 -hore 24 -niue 24 -carousels 24 -wushu 24 -vegas-based 24 -recessionary 24 -pagodas 24 -vestas 24 -unpolished 24 -759 24 -remedios 24 -braiden 24 -kaleena 24 -sixty-one 24 -contaminates 24 -sputter 24 -bellosguardo 24 -beadell 24 -charmers 24 -hession 24 -kajaki 24 -565,000 24 -smithville 24 -shiller 24 -crowthorne 24 -besiege 24 -quantifying 24 -halinski 24 -marciniak 24 -re-bailed 24 -convulsion 24 -slapdash 24 -c-3po 24 -fava 24 -olden 24 -gummi 24 -small-sided 24 -rosin 24 -blurting 24 -ncmec 24 -bemba 24 -ashtead 24 -bidean 24 -braha 24 -scheckter 24 -essaouira 24 -stand-offs 24 -cost-free 24 -depuy 24 -cintron 24 -classing 24 -coming-out 24 -interchangeably 24 -luddite 24 -00:01 24 -merkur 24 -jakosky 24 -fraxinea 24 -maslen 24 -barnfather 24 -heselden 24 -criscito 24 -kori 24 -knatalye 24 -yellow-bellied 24 -phonograph 24 -red-top 24 -wasters 24 -bigley 24 -strongmen 24 -korth 24 -mother-of-ten 24 -tutted 24 -agusta 24 -baklava 24 -two-bedroomed 24 -strutton 24 -miklos 24 -695,000 24 -blixseth 24 -moton 24 -albin 24 -51.7 24 -veltins-arena 24 -noisier 24 -lamoureux 24 -kaminski 24 -herivel 24 -katabarwa 24 -re-shape 24 -chakalos 24 -venetia 24 -bhupathi 24 -ere 24 -freelanced 24 -orsos 24 -284million 24 -cabinetry 24 -unasur 24 -raekwon 24 -banu 24 -gleick 24 -somani 24 -maiti 24 -niceness 24 -kellers 24 -gilets 24 -california-irvine 24 -gruodis 24 -stabilizers 24 -coursier 24 -aco 24 -witold 24 -montrouge 24 -foreign-based 24 -borscht 24 -symmetric 24 -haemorrhaged 24 -jet-pack 24 -spankings 24 -2018/2022 24 -castroneves 24 -six-lane 24 -compunction 24 -norilsk 24 -venkatesh 24 -potentials 24 -peroni 24 -150-strong 24 -collins-faunce 24 -amorebieta 24 -luxembourg-based 24 -tippecanoe 24 -safe-haven 24 -fugees 24 -wombwell 24 -nightgowns 24 -208,000 24 -bubb 24 -maillot 24 -riverbeds 24 -out-of-wedlock 24 -abusir 24 -rosaura 24 -model-actress 24 -kassab 24 -etzion 24 -huizenga 24 -stucker 24 -00:11 24 -bronckhorst 24 -androgens 24 -dinnerware 24 -anti-islamist 24 -mccreadie 24 -shatz 24 -minami 24 -22:21 24 -piaggio 24 -kashi 24 -hammarberg 24 -mcerlean 24 -tittle-tattle 24 -anti-feminist 24 -rodhouse 24 -sirajuddin 24 -mette 24 -telegraphed 24 -3.42 24 -radiative 24 -clouse 24 -linsky 24 -fairplay 24 -heart-pounding 24 -jet-setters 24 -orsoni 24 -smashed-up 24 -etu 24 -webbe 24 -etf 24 -mig-21 24 -rusal 24 -nematode 24 -fyfield 24 -madrassas 24 -bequelin 24 -wegman 24 -rademacher 24 -lessin 24 -21:50 24 -griselda 24 -gulet 24 -waveguide 24 -24-16 24 -shariff 24 -halston 24 -collectives 24 -ibrahima 24 -vestry 24 -abaco 24 -faf 24 -vaught 24 -capelouto 24 -al-rubaie 24 -mewing 24 -yada 24 -tenses 24 -pooper 24 -fredricksen 24 -marveaux 24 -dubstep 24 -dwekh 24 -uge 24 -lasd 24 -repossess 24 -million-to-one 24 -talulah 24 -roasters 24 -fundacion 24 -hileman 24 -cassia 24 -urbanized 24 -turchinov 24 -lefranc 24 -rasch 24 -terra-cotta 24 -atdhe 24 -inferring 24 -linsley 24 -ganging 24 -follmer 24 -bhogal 24 -furth 24 -rockwall 24 -rip-offs 24 -russian-american 24 -dissuading 24 -tiong 24 -petula 24 -schone 24 -celebrity-studded 24 -take-aways 24 -manliest 24 -andersons 24 -shoukry 24 -ashmolean 24 -9,100 24 -straggly 24 -al-sheitaat 24 -alcohols 24 -mctier 24 -madewell 24 -o'melia 24 -00:38 24 -one-dayers 24 -66f 24 -shoehorn 24 -ex-council 24 -gerbic 24 -kal-el 24 -uncredited 24 -rehabilitates 24 -movoto 24 -dioncounda 24 -aerodynamically 24 -re-introduction 24 -u.s.-run 24 -confidences 24 -scroggs 24 -dilys 24 -niblett 24 -shakenhurst 24 -sullivans 24 -molls 24 -church-run 24 -shaddick 24 -100,000-plus 24 -canadian-based 24 -boukari 24 -nodaway 24 -hodgdon 24 -thermidor 24 -1659 24 -venti 24 -mellifluous 24 -scherzer 24 -2112 24 -kata 24 -27-inch 24 -dramatize 24 -cross-court 24 -quintillion 24 -over-use 24 -239,000 24 -bekker 24 -military-industrial 24 -pellegrin 24 -towsey 24 -gid 24 -catoosa 24 -skeeter 24 -moneysupermarket.com 24 -cine 24 -1703 24 -nanga 24 -snetro 24 -churchdown 24 -truth-telling 24 -cretu 24 -foston 24 -bestows 24 -haimoudi 24 -nystagmus 24 -yiambilis 24 -caliskan 24 -altiplano 24 -luxemburg 24 -bantering 24 -bus-sized 24 -mckamey 24 -flick-on 24 -porterhouse 24 -retouch 24 -39ft 24 -a11 24 -schneck 24 -gamula 24 -zakk 24 -cross-code 24 -hollies 24 -hidehiko 24 -melandri 24 -whiteboards 24 -traffic-related 24 -mangos 24 -dunstone 24 -rehan 24 -heartthrobs 24 -marcellus 24 -risca 24 -bandaging 24 -wyong 24 -lesbos 24 -pastes 24 -aland 24 -607 24 -al-moussawi 24 -tagesspiegel 24 -8-year-olds 24 -h5n8 24 -ichiro 24 -seventh-graders 24 -gioconda 24 -patriarchs 24 -hirono 24 -marjoribanks 24 -cheapoair 24 -ardeatine 24 -safaricom 24 -amies 24 -15-17 24 -rapson 24 -fijians 24 -connar 24 -13-0 24 -godinez 24 -tavi 24 -third-in-line 24 -mayol 24 -houzz 24 -gameiro 24 -schtick 24 -keppel 24 -moby-dick 24 -monday-friday 24 -coronaviruses 24 -earlobe 24 -jiffy 24 -11-2 24 -odder 24 -1710 24 -polin 24 -assia 24 -dietze 24 -raya 24 -jaida 24 -o'donohue 24 -schipper 24 -drammen 24 -layabout 24 -panem 24 -carnation 24 -halfhide 24 -riveter 24 -chirp 24 -nicoleta 24 -three-meter 24 -acsi 24 -zippo 24 -prioritises 24 -preschooler 24 -cuties 24 -47-17 24 -sarria 24 -mutterings 24 -karrada 24 -sali 24 -bowdon 24 -ambling 24 -entendres 24 -shigeta 24 -veyrons 24 -sinterklaas 24 -exhorting 24 -alya 24 -nine-story 24 -chirps 24 -89.99 24 -kosawa 24 -christakis 24 -niwa 24 -hand-sewn 24 -dugong 24 -629 24 -bonventre 24 -strainer 24 -cheikh 24 -lineouts 24 -borrowdale 24 -greek-born 24 -matawalu 24 -high-paid 24 -ghilas 24 -arijit 24 -mcsherry 24 -dahdaleh 24 -pedometers 24 -sciver 24 -re-ignite 24 -yaquina 24 -bihi 24 -bdnf 24 -goga 24 -garriola 24 -strobes 24 -rcaf 24 -blackthorn 24 -3.53 24 -gyorgy 24 -riesling 24 -badly-damaged 24 -gossiped 24 -patrik 24 -well-aware 24 -12,700 24 -twin-turbocharged 24 -korbut 24 -infirmity 24 -side-foot 24 -linkletter 24 -lavabit 24 -pre-vma 24 -mevagissey 24 -trajan 24 -cup-winners 24 -serdar 24 -14p 24 -satyananda 24 -ill-discipline 24 -helio 24 -ulsan 24 -bed-wetting 24 -1,084 24 -dyana 24 -rebeika 24 -crittercam 24 -chump 24 -multiverse 24 -helmholtz 24 -atholl 24 -seedling 24 -bundu 24 -hipper 24 -bolide 24 -lawrences 24 -thier 24 -0.32 24 -ramshaw 24 -animist 24 -southie 24 -gaddis 24 -k.s. 24 -self-motivated 24 -multi-pronged 24 -aspirant 24 -dolours 24 -lawther 24 -mayefsky 24 -diable 24 -baliutaviciene 24 -breathy 24 -1hr 24 -disinterred 24 -naumann 24 -santeria 24 -hagerstown 24 -malakia 24 -hackemer 24 -grantland 24 -raese 24 -corrina 24 -stanozolol 24 -13.50 24 -63.2 24 -shrigley 24 -galtier 24 -3gb 24 -libra 24 -roxas 24 -venetians 24 -t-junction 24 -homies 24 -linzy 24 -harlech 24 -trophy-winning 24 -tough-as-nails 24 -rayel 24 -800-577-tips 24 -drbohlavova 24 -nationalizing 24 -voraciously 24 -vereniki 24 -17-0 24 -centralisation 24 -cossman 24 -leak-proof 24 -indystar.com 24 -groundskeepers 24 -shaya 24 -score-settling 24 -transdniestria 24 -omdurman 24 -babos 24 -passel 24 -conceited 24 -dulko 24 -job-related 24 -transpacific 24 -castrillo 24 -jamesandrew 24 -erinn 24 -starkweather 24 -leeuwen 24 -flood-related 24 -post-earthquake 24 -prenton 24 -paraty 24 -flood-stricken 24 -unprincipled 24 -salting 24 -oklahoma-based 24 -acog 24 -meddled 24 -zahed 24 -birdwatching 24 -pedroza 24 -warblers 24 -squinted 24 -germanwings 24 -syria-iraq 24 -guenot 24 -triple-bogey 24 -penetrator 24 -shas 24 -re-enlist 24 -swiftkey 24 -oppenneer 24 -scarmardo 24 -wyke 24 -redeemable 24 -withybush 24 -wofl 24 -fenghuang 24 -bexhill-on-sea 24 -sussams 24 -mcmeekin 24 -slawomir 24 -h-1b 24 -21-mile 24 -deleterious 24 -big-government 24 -saltier 24 -ubhey 24 -remee 24 -dumpy 24 -upp 24 -lega 24 -icebox 24 -jet-propelled 24 -spatucci 24 -974 24 -faal 24 -ecuele 24 -steegar 24 -octaves 24 -electorally 24 -8:46 24 -anglophile 24 -mosher 24 -ber 24 -drop-offs 24 -mashid 24 -drizzling 24 -jaroslaw 24 -vespers 24 -newswire 24 -k1 24 -semi-retirement 24 -three-wheel 24 -4:50 24 -factor-style 24 -lemonheigh 24 -asimov 24 -heidemann 24 -zsalynn 24 -roughead 24 -wernet 24 -hsv-2 24 -brigette 24 -moulting 24 -adovasio 24 -icbms 24 -cross-town 24 -advisement 24 -svensson 24 -congdon 24 -mcgeoghean 24 -torr 24 -fahrer 24 -citrine 24 -70cm 24 -waldrop 24 -housam 24 -sra 24 -sru 24 -3.38 24 -ste. 24 -rugg-easey 24 -stringently 24 -three-over-par 24 -pertinently 24 -spliff 24 -yau 24 -ionised 24 -arkani-hamed 24 -12kg 24 -olivine 24 -outweighing 24 -non-nato 24 -seaforth 24 -gellman 24 -ruthie 24 -wyk 24 -3-mile 24 -wroughton 24 -medicate 24 -inter-racial 24 -pawlowski 24 -foragers 24 -38-21 24 -saisons 24 -boodles 24 -nationhood 24 -neodymium 24 -harahap 24 -mahamadou 24 -individualised 24 -vueling 24 -260ft 24 -pre-empting 24 -recommenced 24 -25,000-a-year 24 -qaly 24 -economou 24 -fruitlessly 24 -flexi 24 -she-devil 24 -bier 24 -batterer 24 -goncharenko 24 -prediabetes 24 -earphone 24 -teignbridge 24 -fiddles 24 -steeples 24 -volcanologist 24 -tilton 24 -diagnosable 24 -guinevere 24 -rolo 24 -much-admired 24 -pricking 24 -unconcious 24 -fudged 24 -foundering 24 -mre 24 -athanasios 24 -josette 24 -rosenburg 24 -easement 24 -jitendra 24 -balbirnie 24 -5-foot-10 24 -b&n 24 -spotsylvania 24 -outcropping 24 -lipschis 24 -johny 24 -valley-based 24 -marquezine 24 -sneider 24 -technicolour 24 -luckwell 24 -halter-neck 24 -pronto 24 -gularte 24 -4.16 24 -spc 24 -kreamer 24 -argilla 24 -3.19 24 -pulford 24 -eppley 24 -16-point 24 -delos 24 -appraise 24 -ilc 24 -wynkoop 24 -commonest 24 -pulau 24 -kabban 24 -gobbato 24 -duce 24 -guilhermina 24 -heriot 24 -post-birth 24 -yussman 24 -ogoni 24 -apel 24 -two-speed 24 -kishan 24 -concubine 24 -infarction 24 -squish 24 -herald-tribune 24 -pictish 24 -iksil 24 -wilmar 24 -venal 24 -taman 24 -4-1-2-1-2 24 -barnetta 24 -supersport 24 -jiaxing 24 -agyei-kodie 24 -schonfield 24 -loansharking 24 -anti-oxidants 24 -1,609 24 -2.87 24 -coray 24 -chu-young 24 -ball-carrying 24 -ilminster 24 -sub-surface 24 -audiobook 24 -adegoke 24 -multi-platform 24 -whole-body 24 -zi 24 -lechin 24 -axeing 24 -kellum 24 -mislabelled 24 -fna 24 -extra-terrestrials 24 -re-directed 24 -wauters 24 -bochud 24 -sangha 24 -febuary 24 -woodworth 24 -primesense 24 -vanwagner 24 -workshy 24 -isandlwana 24 -medvedevas 24 -1682 24 -9p 24 -four-over 24 -raymondo-felton 24 -afrobeat 24 -dijck 24 -brighton-based 24 -vil 24 -pathologically 24 -m.a. 24 -foggin 24 -1512 24 -pyke 24 -agricole 24 -jetpacks 24 -sdlp 24 -badiuzzaman 24 -garin 24 -strategize 24 -rainieri 24 -pgad 24 -swooned 24 -dive-bombing 24 -whitt 24 -jean-max 24 -fearfully 24 -sks 24 -pallid 24 -ague 24 -g-rated 24 -kepler-62f 24 -aigles 24 -emea 24 -suppressant 24 -ride-along 24 -wazza 24 -damir 24 -rivington 24 -darusman 24 -haltingly 24 -salamon 24 -cook-off 24 -dujiangyan 24 -1-2-3 24 -14.30 24 -internals 24 -116-year-old 24 -s-21 24 -palani 24 -278,000 24 -waypoints 24 -adamo 24 -recode 24 -halawa 24 -petacchi 24 -konrardy 24 -politkovskaya 24 -mediclinic 24 -footnotes 24 -mogo 24 -besh 24 -fanelli 24 -spacecrafts 24 -beefeaters 24 -handwashing 24 -taung 24 -sound-proof 24 -screengrabs 24 -scadding 24 -kunwar 24 -yipeng 24 -hoodlums 24 -star-advertiser 24 -nuala 24 -dolson 24 -serova 24 -proton-m 24 -manjhi 24 -italics 24 -hugger 24 -fitzwilliams 24 -2:25 24 -jetsetter 24 -walmington-on-sea 24 -chyna 24 -warmer-than-average 24 -dantes 24 -ex-gay 24 -pawnshop 24 -as-yet-unnamed 24 -cybercrimes 24 -toomer 24 -affadavit 24 -rocero 24 -long-legged 24 -ristaino 24 -alagoas 24 -50-100 24 -wachtel 24 -1530 24 -tirr 24 -relaxants 24 -mckillen 24 -39.4 24 -difficultly 24 -pukka 24 -20-gauge 24 -1,700-mile 24 -montreal-based 24 -newschannel 24 -niemann-pick 24 -beckie 24 -adders 24 -degraw 24 -cuddy 24 -oversupply 24 -record-low 24 -diaphragmatic 24 -melber 24 -phiyega 24 -cannibalize 24 -viall 24 -ribeye 24 -gujrat 24 -rhineland 24 -bouillard 24 -fonder 24 -mallissa 24 -12am 24 -ciani 24 -el-gohary 24 -chemise 24 -co-payments 24 -longshot 24 -boogeyman 24 -fring 24 -xolair 24 -1,023 24 -1,025 24 -nellis 24 -portends 24 -vocations 24 -freckle-faced 24 -dyk 24 -comediennes 24 -anoraks 24 -4x200m 24 -fretwell 24 -dupuis 24 -sigonella 24 -barnicle 24 -untangling 24 -arabica 24 -mondragon 24 -dauda 24 -aphorisms 24 -utca 24 -balbuena 24 -bugliosi 24 -77th-minute 24 -marmoset 24 -preserver 24 -slathering 24 -obtuse 24 -baktuns 24 -extenders 24 -omerta 24 -macgillivray 24 -isayah 24 -registrants 24 -wilmshurst 24 -rok 24 -linfen 24 -araki 24 -readmission 24 -queensboro 24 -spitbank 24 -self-tanning 24 -kent-based 24 -fujiyama 24 -mywaitrose 24 -400-acre 24 -victimhood 24 -fryar 24 -blase 24 -rober 24 -thievy 24 -dima 24 -titi 24 -götze 24 -kendall-bryan 24 -34.1 24 -meck 24 -triple-double 24 -cotai 24 -riverdance 24 -quadrillion 24 -editorially 24 -siskiyou 24 -iffrig 24 -mcghie 24 -long-extinct 24 -koontz 24 -25lbs 24 -ginn 24 -infanti 24 -tointon 24 -el-beblawi 24 -sarsen 24 -hoovered 24 -masqueraded 24 -corpulent 24 -disparagingly 24 -#selfie 24 -state-licensed 24 -ebro 24 -smallman 24 -near-universal 24 -lamborn 24 -85th-minute 24 -yik 24 -pneumococcal 24 -poulet 24 -higher-ranking 24 -rebuking 24 -observateur 24 -baroin 24 -fsm 24 -bejing 24 -fraime 24 -thrillingly 24 -pepper-spraying 24 -overfilled 24 -hohenlohe 24 -needling 24 -aerialist 24 -oberoi-trident 24 -11-bedroom 24 -clewes 24 -goodreads 24 -aping 24 -sinnett 24 -saf 24 -a113 24 -borman 24 -beltane 24 -uncoupled 24 -kavkaz 24 -michener 24 -oropharyngeal 24 -panna 24 -cheapskates 24 -verbinski 24 -davis-monthan 24 -dunkel 24 -devilishly 24 -somerford 24 -reckitt 24 -brannagan 24 -kacee 24 -52ft 24 -incoherence 24 -sadistically 24 -ill-thought 24 -gridley 24 -dustmann 24 -myocardial 24 -anoop 24 -wasco 24 -wixson 24 -chilcott 24 -mastocytosis 24 -barefaced 24 -imrg 24 -fourth-tier 24 -tornambe 24 -51-year 24 -lahiru 24 -coogle 24 -@rimamaktabi 24 -beman 24 -cromie 24 -alvey 24 -mudbath 24 -wright-patterson 24 -unesco-listed 24 -290m 24 -sartre 24 -henningsgaard 24 -commack 24 -company-owned 24 -orthodontic 24 -fuca 24 -clouseau 24 -toners 24 -ayovi 24 -rosewater 24 -balme 24 -amia 24 -two-vehicle 24 -ingleton 24 -igneous 24 -freycinet 24 -toyotas 24 -spacek 24 -mccomas 24 -pardalis 24 -procrastinate 24 -pattrick 24 -sisto 24 -cooksey 24 -manors 24 -ruark 24 -pre-schoolers 24 -hooter 24 -helayel 24 -organized-crime 24 -samit 24 -agnese 24 -hauschka 24 -medispa 24 -raib 24 -kirkos 24 -volochkova 24 -meadowhead 24 -tereza 24 -brinsolaro 24 -krissi 24 -gh 24 -sweeting 24 -2-7 24 -burdette 24 -three-parent 24 -tankard 24 -junichi 24 -82f 24 -sule 24 -low-skill 24 -nong 24 -faithless 24 -argentina-born 24 -garita 24 -freire 24 -sapeurs 24 -bycatch 24 -coronato 24 -petters 24 -lage 24 -vian 24 -berklee 24 -vibram 24 -aydelott 24 -two-under-par 24 -wioletta 24 -milstein 24 -wetness 24 -21:44 24 -direct-mail 24 -gaizka 24 -carrell 24 -dovecote 24 -bertolini 24 -otuam 24 -speedskater 24 -chastises 24 -visitscotland 24 -pipette 24 -engadin 24 -kreuzberg 24 -zeitlin 24 -his-and-hers 24 -dammed 24 -sidley 24 -delury 24 -light-heartedly 24 -shingler 24 -laro 24 -dalepak 24 -alif 24 -alit 24 -leiseth 24 -dells 24 -150kg 24 -yogis 24 -ub 24 -zinnel 24 -aquaria 24 -#is 24 -alphonso 24 -fraserburgh 24 -2.54 24 -hedlund 24 -koji 24 -ao.com 24 -wzzm 24 -erlich 24 -jordanna 24 -salto 24 -transients 24 -pradhan 24 -andiola 24 -bromhead 24 -desantis 24 -roseburg 24 -bayyah 24 -gerashchenko 24 -endearingly 24 -doman 24 -marga 24 -billinghurst 24 -kirimoto 24 -kurumi 24 -faruq 24 -irisin 24 -parag 24 -volumising 24 -listerine 24 -posession 24 -129th 24 -non-conforming 24 -sated 24 -kellock 24 -delton 24 -gradwell 24 -weimin 24 -zero-carbon 24 -ramras 24 -paneled 24 -cromitie 24 -moncayo 24 -dysfunctions 24 -pingo 24 -zhouqu 24 -youcaring 24 -enstone 24 -1035 24 -387,000 24 -harcombe 24 -kabwela 24 -radtke 24 -keflavik 24 -350-pound 24 -thudding 24 -breeann 24 -autosomal 24 -vacuum-sealed 24 -wilpon 24 -dovetailed 24 -overusing 24 -little-noticed 24 -ribosome 24 -justyn 24 -kiff 24 -rudine 24 -microwaved 24 -kahlil 24 -quick-step 24 -hornett 24 -sondhi 24 -bulawayo 24 -cockerels 24 -deonta 24 -ukad 24 -maisy 24 -14-18 24 -fourth-graders 24 -hartstein 24 -zedd 24 -yage 24 -jigokudani 24 -runions 24 -lua 24 -#uel 24 -irrigated 24 -dstl 24 -brooksville 24 -manalich 24 -dyfi 24 -receptacle 24 -canterbury-bankstown 24 -out-patient 24 -erakovic 24 -concho 24 -frater 24 -961 24 -flag-bearer 24 -737-700 24 -harrassed 24 -lightbox 24 -reenter 24 -dabrowski 24 -refractive 24 -secours 24 -octopi 24 -harlingen 24 -sebold 24 -stollak 24 -chappaquiddick 24 -labor-oriented 24 -painful-looking 24 -gawande 24 -groupie 24 -abreau 24 -sanclemente 24 -cobo 24 -cio 24 -baedeker 24 -18,700 24 -durcan 24 -shezanne 24 -armendariz 24 -merlino 24 -floorplan 24 -hammersley 24 -eranga 24 -hersheson 24 -wolkoff 24 -neb. 24 -xl1 24 -tooke 24 -snored 24 -lucychoilondon.com 24 -neon-lit 24 -levan 24 -driouch 24 -immortalise 24 -pinatas 24 -liesl 24 -1,285 24 -bynum 24 -mimms 24 -asli 24 -bls 24 -gunna 24 -23:28 24 -dilutes 24 -impactor 24 -heatmap 24 -addenbrookes 24 -annise 24 -gellatly 24 -miftakhov 24 -gilberthorpe 24 -rigell 24 -5.8-magnitude 24 -three-masted 24 -work-release 24 -stimon 24 -buitenboys 24 -murphys 24 -awaking 24 -little-understood 24 -saraceno 24 -kamsler 24 -grendon 24 -foxhound 24 -sulphurous 24 -narcos 24 -gorrostieta 24 -benet 24 -bleary 24 -mohamoud 24 -trentin 24 -vecchiotti 24 -lifetiles 24 -carabiner 24 -risley 24 -taw 24 -colagiovanni 24 -mufid 24 -hamnett 24 -51.5 24 -atala 24 -meet-and-greets 24 -licciardi 24 -2744 24 -michael-martinez 24 -sasson 24 -cruze 24 -meshad 24 -uniondale 24 -ker 24 -dirhams 24 -first-name 24 -giurgiu 24 -domesek 24 -7.05 24 -vipr 24 -12-6 24 -aker 24 -leptospirosis 24 -themself 24 -anti-europe 24 -f-14 24 -nata 24 -ex-beatle 24 -ameliorate 24 -1150 24 -cattistock 24 -62,500 24 -soundwaves 24 -conflating 24 -roly-poly 24 -midlevel 24 -lipson 24 -beakers 24 -kissable 24 -sumsion 24 -fluffing 24 -imperfection 24 -tawakkul 24 -kumakawa 24 -kcbd 24 -halevy 24 -selim 24 -capillary 24 -godspeed 24 -igbokwe 24 -citadels 24 -olivera 24 -heart-lung 24 -jeannard 24 -tawadros 24 -tiarna 24 -out-there 24 -shipper 24 -jeffpowell_mail 24 -comancheros 24 -20,000-square-foot 24 -metu 24 -rejoins 24 -bigwig 24 -ravshan 24 -pragmatists 24 -dentine 24 -white-gloved 24 -helu 24 -water-rich 24 -mylar 24 -galena 24 -bilaterally 24 -ors 24 -orn 24 -aeroshot 24 -eloping 24 -dga 24 -annular 24 -schechter 24 -r_rai 24 -crandell 24 -eleventh-hour 24 -joon 24 -shadsworth 24 -woodforde 24 -humanized 24 -hi-res 24 -mathiesen 24 -retributive 24 -screenwash 24 -muslimah 24 -bilawal 24 -lyke 24 -study-abroad 24 -saratova 24 -nottage 24 -missourians 24 -logger 24 -tagalog 24 -rugby-playing 24 -chögyam 24 -memon 24 -vinh 24 -prager 24 -32g 24 -155million 24 -arnell 24 -name-checked 24 -backheeled 24 -coppack 24 -cohabit 24 -shirazi 24 -ollantaytambo 24 -baby-sit 24 -ramdin 24 -judiciously 24 -airmail 24 -8-point 24 -huss 24 -mysko 24 -harned 24 -berenstain 24 -fitzsimmonds 24 -jamelia 24 -wearn 24 -six-legged 24 -dubin 24 -mcelligott 24 -unpardonable 24 -namias 24 -lapsing 24 -waifs 24 -honister 24 -touraine 24 -navsarka 24 -blindfolding 24 -fuller-figured 24 -kneecaps 24 -harangued 24 -ultrabook 24 -storehouses 24 -rafaa 24 -call-taker 24 -orinoco 24 -hansen-bartel 24 -dried-out 24 -wilcke 24 -wo1 24 -enacts 24 -spurted 24 -kerim 24 -malecon 24 -encroaches 24 -tows 24 -schellpfeffer 24 -48.7 24 -1000011 24 -brokovich 24 -beiji 24 -endpoint 24 -playtex 24 -ex-nba 24 -ovalle 24 -frictionless 24 -ponteland 24 -ashely 24 -zoll 24 -goalies 24 -shuja 24 -baleen 24 -difford 24 -almond-shaped 24 -bessemer 24 -feigley 24 -ballgowns 24 -xabier 24 -hedwall 24 -filleted 24 -garderen 24 -mckell 24 -orduno 24 -business-minded 24 -haymon 24 -kicked-off 24 -muzak 24 -nerveless 24 -filer 24 -plebiscite 24 -stooke 24 -irmatov 24 -bowhead 24 -davegun 24 -scheinberg 24 -one-nil 24 -triumphalism 24 -gabbiadini 24 -larios 24 -rashaida 24 -highly-sensitive 24 -antetokounmpo 24 -chillax 24 -non-communicable 24 -ayliffe 24 -freedivers 24 -unkindly 24 -cetron 24 -prophesied 24 -pentridge 24 -geauga 24 -mularski 24 -birdwatch 24 -alcides 24 -wolfing 24 -peeve 24 -1 24 -30-odd 24 -brundage 24 -bargain-hunters 24 -in-ear 24 -granade 24 -unsupportive 24 -sheinkopf 24 -mandie 24 -885 24 -mogilevich 24 -stroble 24 -argentinosaurus 24 -stepsisters 24 -velzen 24 -disassembly 24 -apso 24 -246,000 24 -lcross 24 -transducer 24 -ejaculated 24 -demasi 24 -becs 24 -spenny 24 -fully-furnished 24 -bostwick 24 -interlock 24 -figi 24 -thameside 24 -chart-toppers 24 -comprehensible 24 -vote-getters 24 -brod 24 -lengthens 24 -zakieya 24 -avuncular 24 -relevancy 24 -140ft 24 -golton 24 -cavell 24 -maur 24 -ilyse 24 -acquitting 24 -angelopoulos 24 -vestibule 24 -12-pack 24 -benediction 24 -epperly 24 -miniaturized 24 -simonds 24 -marunouchi 24 -fatties 24 -bonia 24 -interbreed 24 -36.6 24 -punahou 24 -straight-faced 24 -sunnies 24 -galpin 24 -child-killer 24 -katha 24 -simcock 24 -flushable 24 -sg 24 -wiesner 24 -seamons 24 -easterby 24 -rony 24 -demoura 24 -slow-burning 24 -sutopo 24 -squawks 24 -dernbach 24 -chik-fil-a 24 -nanuq 24 -deville 24 -freeloaders 24 -pistol-whipping 24 -verveer 24 -1673 24 -no-shows 24 -motorboats 24 -elmwood 24 -rockhopper 24 -niccol 24 -hotel-casino 24 -injector 24 -haneke 24 -algeciras 24 -cropp 24 -convertibles 24 -koubbi 24 -shalaine 24 -manically 24 -under-performance 24 -crystal-like 24 -norco 24 -one-offs 24 -messaggero 24 -7:10 24 -reihan 24 -sleighs 24 -cdre 24 -tuxes 24 -psychedelics 24 -15-nation 24 -ntt 24 -skinnies 24 -matar 24 -choreograph 24 -underwired 24 -hing 24 -gyrate 24 -catechism 24 -47.1 24 -47.7 24 -missile-defense 24 -over-estimated 24 -procedurally 24 -niyonshuti 24 -acocks 24 -keijzer 24 -mulvaney 24 -ifans 24 -filiti 24 -harlesden 24 -ornery 24 -500billion 24 -katana 24 -kitesurfer 24 -clavicle 24 -inter-city 24 -visia 24 -ornithologists 24 -one-wheeled 24 -dorsch 24 -randazza 24 -duffie 24 -deregulated 24 -frinton-on-sea 24 -canape 24 -imu 24 -imbeciles 24 -modafinil 24 -okah 24 -milan-based 24 -wilson-fletcher 24 -49.7 24 -49.8 24 -adelle 24 -caryl 24 -afic 24 -ravana 24 -woessner 24 -palacin 24 -mosshart 24 -garavaglia 24 -teruel 24 -longdon 24 -64.5 24 -32-hour 24 -titanfall 24 -bansal 24 -gilbey 24 -sleep-walking 24 -headroom 24 -petrasso 24 -hows 24 -69.6 24 -lily-mae 24 -pacifism 24 -towner 24 -anti-extremism 24 -rhoney 24 -yazdi 24 -lookbook 24 -reynders 24 -nathi 24 -toi 24 -rz 24 -oooh 24 -craning 24 -wide-bodied 24 -scape 24 -adeokun 24 -6 1/2 24 -nonemergency 24 -churchwarden 24 -headmistresses 24 -gulley 24 -outspokenness 24 -reinsch 24 -0930 24 -farlow 24 -cadeaux 24 -underbrush 24 -kaunda 24 -trego 24 -deducting 24 -voice-mail 24 -rylands 24 -maskers 24 -wsfa 24 -nonstarter 24 -divvied 24 -discreditable 24 -churchillian 24 -botting 24 -songhua 24 -khokhar 24 -back-three 24 -uncommonly 24 -imparts 24 -accomodate 24 -mcnicholas 24 -nicknaming 24 -udder 24 -cheapskate 24 -coventry-based 24 -groupama 24 -gdr 24 -sayin 24 -chablis 24 -ortiz-rodriguez 24 -self-medicating 24 -higher-profile 24 -personality-wise 24 -greenoe 24 -wallechinsky 24 -3-point 24 -leathem 24 -eichelberger 24 -shurtleff 24 -tawakkol 24 -foleys 24 -lambert-st 24 -negar 24 -ionized 24 -jakir 24 -camarena 24 -gainsville 24 -luon 24 -nps.gov 24 -scarbrough 24 -vestey 24 -velli 24 -ten-acre 24 -kornberg 24 -leaden 24 -lopicola 24 -portmanteau 23 -suwon 23 -yitzy 23 -saint-tropez 23 -woolton 23 -staab 23 -fudan 23 -beshore 23 -post-fight 23 -hypo 23 -microbreweries 23 -cerza 23 -sines 23 -seabridge 23 -wmar-tv 23 -asptt 23 -vhf 23 -under-14s 23 -re-listed 23 -plausibility 23 -reaps 23 -evonne 23 -asheton 23 -jump-started 23 -aneesh 23 -absolutes 23 -sechin 23 -topographical 23 -ruddell 23 -lambrechts 23 -fery 23 -hollow-point 23 -condemnable 23 -sangeen 23 -dannelly 23 -binational 23 -ronde 23 -crisply 23 -sports-loving 23 -ttip 23 -parameter 23 -ingvar 23 -nestel 23 -sheathed 23 -hartmut 23 -simm 23 -vanderklok 23 -pogroms 23 -self-sustainable 23 -lewi 23 -9.14 23 -groce 23 -freundel 23 -neck-deep 23 -jarecki 23 -dmd 23 -wieser 23 -megastars 23 -take-over 23 -22.99 23 -twinges 23 -145million 23 -iquique 23 -stermer 23 -induct 23 -wingham 23 -grabove 23 -thespians 23 -gaskins 23 -kearl 23 -2,000-a-month 23 -v838 23 -mediacityuk 23 -alginate 23 -slingbox 23 -erb 23 -70-minute 23 -christina-taylor 23 -henbury 23 -one-season 23 -500-strong 23 -moodys 23 -albaugh 23 -favipiravir 23 -vethavanam 23 -hazaras 23 -sarr 23 -huertas 23 -inter-bank 23 -glycerine 23 -frinton 23 -beslow 23 -staver 23 -phua 23 -375billion 23 -21:37 23 -48.8 23 -mcgrail 23 -kws 23 -henryk 23 -whistlestop 23 -a&r 23 -colebrook 23 -www.orionbooks.co.uk 23 -croydon-born 23 -undertow 23 -misaki 23 -libbie 23 -busk 23 -schmooze 23 -heming 23 -tottie 23 -alcoa 23 -utomo 23 -impermissibly 23 -never-before 23 -beevor 23 -matera 23 -lostutter 23 -toughens 23 -adminstration 23 -rokita 23 -impressionistic 23 -shot-stopping 23 -harasta 23 -corrado 23 -multi-buy 23 -312,000 23 -muybridge 23 -intersects 23 -cherry-pick 23 -payerne 23 -khder 23 -coat-dress 23 -arbitrate 23 -brandywine 23 -2,050 23 -libertad 23 -idolising 23 -canoville 23 -wbrz 23 -mexicana 23 -museka 23 -iasonides 23 -eskdale 23 -vainly 23 -workdays 23 -zahi 23 -127million 23 -sithole 23 -much-coveted 23 -199,000 23 -1348 23 -unblocking 23 -nursey 23 -fehily 23 -masterly 23 -buraida 23 -ittihad 23 -née 23 -tindale 23 -girls-only 23 -al-nujaifi 23 -demoed 23 -equis 23 -shortchanged 23 -digests 23 -noack 23 -snuggly 23 -mumbrella 23 -wack 23 -namdaemun 23 -chilis 23 -cross-trainer 23 -wiznitzer 23 -kohno 23 -knockoffs 23 -alhaji 23 -fotoh 23 -leijerstam 23 -leiter 23 -basbug 23 -dulgheru 23 -palestinian-israeli 23 -abdirahman 23 -yes/no 23 -carolla 23 -redacting 23 -valera 23 -picador 23 -previa 23 -holohan 23 -w12 23 -binocular 23 -poelten 23 -roberston 23 -21:15 23 -sentosa 23 -kavlak 23 -oddbins 23 -marlan 23 -reemerged 23 -coupes 23 -keun-ho 23 -toren 23 -killie 23 -ezaldein 23 -loquacious 23 -transistor 23 -fallacious 23 -ismini 23 -maizen 23 -ge235 23 -cael 23 -nouakchott 23 -berkus 23 -brahms 23 -bouton 23 -sharfstein 23 -bledel 23 -meninas 23 -rueben 23 -padilha 23 -tree-planting 23 -286,000 23 -kalin 23 -windrush 23 -brunati 23 -scheiner 23 -mij 23 -cahall 23 -clovelly 23 -different-sized 23 -unita 23 -incentivising 23 -cornflour 23 -dietetics 23 -zana 23 -zant 23 -zdenek 23 -wingdam 23 -donnison 23 -indexation 23 -interconnection 23 -off-kilter 23 -21:20 23 -hosea 23 -9.59 23 -178,000 23 -bluml 23 -ragging 23 -second-from-bottom 23 -seventh-floor 23 -tabaka 23 -lurgan 23 -hendo 23 -shieff 23 -romijn 23 -light-touch 23 -tweety 23 -c-130j 23 -stolichnaya 23 -csb 23 -painswick 23 -two-floor 23 -ossevoort 23 -millon 23 -104f 23 -aweys 23 -tarvydas 23 -brun 23 -l'oeil 23 -levitated 23 -pierpont 23 -51.3 23 -eyers 23 -p45 23 -badmin 23 -re-enacts 23 -videophone 23 -licencing 23 -bozi 23 -skybet 23 -canonised 23 -lodhi 23 -dolittle 23 -savona 23 -galikowska 23 -marcangelo 23 -ceni 23 -iera 23 -swanscombe 23 -siats 23 -church-state 23 -tensed 23 -humiliatingly 23 -sub-contracted 23 -crotts 23 -appaloosa 23 -northern-most 23 -turay 23 -parvizi 23 -schwinn 23 -sixth-round 23 -cawood 23 -maribyrnong 23 -infuriatingly 23 -blocky 23 -beaux-arts 23 -vaporise 23 -tarifa 23 -besancon 23 -2.62 23 -whippets 23 -wcvb-tv 23 -jordie 23 -back-office 23 -poff 23 -lambros 23 -imaginarium 23 -hinks 23 -telefónica 23 -257,000 23 -scowled 23 -gaiser 23 -pavao-pavaozinho 23 -florez 23 -partner-in-crime 23 -newly-renovated 23 -gresini 23 -kelud 23 -twitterati 23 -siga 23 -nyasasaurus 23 -admins 23 -farts 23 -zehnder 23 -malakal 23 -11-13 23 -0.55 23 -heart-melting 23 -trita 23 -frenemy 23 -woohoo 23 -dog-eat-dog 23 -mcso 23 -hatchling 23 -illaramendi 23 -trethewey 23 -00:18 23 -00:14 23 -toomua 23 -empathic 23 -ojukwu 23 -liquidator 23 -adlene 23 -casino-style 23 -belfast-born 23 -langerak 23 -nightcap 23 -paging 23 -schein 23 -3:35 23 -#yesallwomen 23 -rawness 23 -well-cut 23 -senders 23 -parti 23 -delila 23 -german-made 23 -chocoholics 23 -bernalillo 23 -813 23 -fothergill 23 -nidia 23 -lamp-post 23 -loureiro 23 -marnell 23 -magarief 23 -doel 23 -shoshanna 23 -humanise 23 -vermilion 23 -fifth-year 23 -fessey 23 -wholegrains 23 -seabiscuit 23 -revenue-generating 23 -mmorpg 23 -cackled 23 -baptize 23 -wagnon 23 -husen 23 -virginian-pilot 23 -marles 23 -seddiqi 23 -nonconsensual 23 -steiger 23 -33.4 23 -curio 23 -flat-lined 23 -room-service 23 -piazzas 23 -kurram 23 -washwood 23 -normanby 23 -parbat 23 -tase 23 -veen 23 -bitsko 23 -haitian-american 23 -bergara 23 -akufo-addo 23 -mircea 23 -privitera 23 -haleakala 23 -16,800 23 -pre-mixed 23 -espys 23 -co-inventor 23 -much-touted 23 -2010-12 23 -2.48 23 -59908 23 -cnnhealth.com 23 -chicago-born 23 -poges 23 -coch 23 -cross-city 23 -bowness 23 -el-khalifi 23 -shampooing 23 -fealty 23 -passos 23 -chiurai 23 -cnooc 23 -mackrell 23 -timelessness 23 -25f 23 -adventists 23 -unrepresented 23 -tangalle 23 -himes 23 -dirt-track 23 -low-mass 23 -garness 23 -astatke 23 -00:36 23 -rushdi 23 -varlamova 23 -21-20 23 -nigrelli 23 -cornton 23 -fuschia 23 -democratize 23 -gora 23 -boldrini 23 -nb 23 -jolanta 23 -boileau 23 -ytterdahl 23 -counterclockwise 23 -antoin 23 -millionvalue 23 -retch 23 -augmented-reality 23 -evs 23 -jazlyn 23 -omaha.com 23 -ibaka 23 -wildaid 23 -23-years-old 23 -eastgate 23 -by-laws 23 -microsurgery 23 -heâ 23 -turkle 23 -briley 23 -less-than-stellar 23 -kudryavtsev 23 -drapers 23 -nuisances 23 -cuylaerts 23 -bisk 23 -kevans 23 -montsho 23 -towan 23 -1547 23 -malts 23 -sohu 23 -zeev 23 -2006/7 23 -dahn 23 -recoils 23 -seaby 23 -anti-incumbent 23 -hayhurst 23 -honeyman 23 -uiw 23 -name-brand 23 -wallstrom 23 -littledean 23 -hauler 23 -cybart 23 -or-7 23 -sobekhotep 23 -emerton 23 -gorga 23 -aerin 23 -stipends 23 -linesmen 23 -2.21 23 -chives 23 -killjoy 23 -pursuer 23 -sassi 23 -joules 23 -scheppers 23 -chatters 23 -camerota 23 -bunt 23 -instagramming 23 -idolises 23 -ekg 23 -cromnibus 23 -2,560 23 -duffey 23 -humbler 23 -perisher 23 -diphenhydramine 23 -collinsville 23 -lameness 23 -kurochkin 23 -41.3 23 -41.2 23 -rodd 23 -feminisation 23 -kasten 23 -forres 23 -stilt 23 -enfarinats 23 -pieth 23 -ulukaya 23 -mcresource 23 -kher 23 -calin 23 -crescents 23 -transphobic 23 -fairhaven 23 -german-built 23 -niaid 23 -nikole 23 -monckton 23 -aquaman 23 -raby 23 -megabucks 23 -nrma 23 -adizero 23 -yearslong 23 -fulmer 23 -claughton 23 -mcflurry 23 -restrains 23 -oberhansley 23 -fourth-seeded 23 -marinade 23 -shivaji 23 -paffrath 23 -u.va 23 -mccleary 23 -savchenko 23 -shivery 23 -retell 23 -ascetic 23 -molt 23 -quasi-judicial 23 -steamrollered 23 -sweeper-keeper 23 -1.82 23 -280,000-a-week 23 -hemolytic 23 -cultivates 23 -pallial 23 -assiut 23 -labrinth 23 -20.50 23 -reddening 23 -rumbold 23 -monsoor 23 -five-years 23 -rozniakowski 23 -acott 23 -moccasins 23 -robina 23 -legget 23 -barmouth 23 -grieco 23 -natcen 23 -petties 23 -travelator 23 -tailpipe 23 -third-seeded 23 -santimore 23 -surmount 23 -44-year 23 -shri 23 -quechua 23 -flatworm 23 -gonos 23 -pincer 23 -kritzer 23 -windstorm 23 -kerimov 23 -ovo 23 -pulped 23 -atoc 23 -northcott 23 -tsering 23 -clearout 23 -http://nbcnewyork.com 23 -1623 23 -ocasio 23 -whys 23 -pinata 23 -chornovol 23 -lire 23 -seasonings 23 -self-delusion 23 -mahlum 23 -lawbreaking 23 -hornsea 23 -23:58 23 -23:51 23 -oppositions 23 -schipol 23 -fitzhugh 23 -21-man 23 -wingsuits 23 -extortionist 23 -promisingly 23 -hook-ups 23 -4/10 23 -vcs 23 -hakkinen 23 -rovera 23 -rav 23 -brawlers 23 -bracciali 23 -cowgirls 23 -benlolo 23 -clancey 23 -breezing 23 -75per 23 -tove 23 -kopechne 23 -natarsha 23 -kallo 23 -aldhouse 23 -kempster 23 -300-foot 23 -cefn 23 -culbreath 23 -diarrassouba 23 -papay 23 -svt 23 -muffs 23 -chapstick 23 -odion 23 -6-foot-6 23 -al-fayed 23 -scerri 23 -nato-backed 23 -freckle 23 -crnobrnja 23 -laugh-out-loud 23 -kennaway 23 -o'quinn 23 -mercator 23 -haad 23 -journeymen 23 -malindi 23 -hartwig 23 -stiffens 23 -dork 23 -meighan 23 -umi 23 -hand-rolled 23 -twyman 23 -ravitch 23 -well-protected 23 -war-like 23 -picnickers 23 -egomaniac 23 -mubarek 23 -schantz 23 -mcfeely 23 -hansi 23 -fleshing 23 -wheely 23 -half-a-billion 23 -chipps 23 -begonias 23 -ratty 23 -scroogled 23 -fanti 23 -praiseworthy 23 -3.22 23 -grampus 23 -buzzers 23 -non-apple 23 -hackneyed 23 -kaylen 23 -footlong 23 -claes 23 -bosniaks 23 -polansky 23 -ottaviani 23 -zinkon 23 -holdups 23 -kirkgate 23 -aide-de-camp 23 -deathtrap 23 -omnivore 23 -951 23 -anti-hunt 23 -maunder 23 -chinkys 23 -naoki 23 -speckles 23 -dressed-down 23 -gourgeon 23 -kakad 23 -yandamuri 23 -rishton 23 -monici 23 -tree-dwelling 23 -foghorn 23 -wilzig 23 -mongooses 23 -delgatty 23 -flub 23 -grigoropoulos 23 -gamey 23 -nodine 23 -half-heartedly 23 -bareback 23 -merryman 23 -nismo 23 -witwatersrand 23 -jorgen 23 -colicchio 23 -hayes-bautista 23 -laparoscopy 23 -steinitz 23 -meldrew 23 -charlieskillen 23 -dhuhulow 23 -rockettes 23 -wisecrack 23 -gaped 23 -minallah 23 -celcius 23 -easby 23 -dressler 23 -dorothee 23 -tobogganing 23 -16p 23 -mediaeval 23 -anoxic 23 -pershore 23 -mistrusted 23 -navarette 23 -gasperini 23 -malfoy 23 -theraflu 23 -chivu 23 -euthanizing 23 -pain-relieving 23 -milliken-smith 23 -mohandas 23 -16-member 23 -na'alin 23 -labead 23 -encephalomyelitis 23 -crini 23 -prelate 23 -65th-minute 23 -moslehi 23 -re-selling 23 -grigoriev 23 -mex 23 -foulser 23 -roderic 23 -snoozed 23 -citroën 23 -bradl 23 -teems 23 -pantic 23 -limbal 23 -kui 23 -resounded 23 -d.o.b. 23 -m'baye 23 -ahl 23 -ahs 23 -hainsworth 23 -cenote 23 -otunbayeva 23 -valerio 23 -munyenyezi 23 -00:23 23 -92.9 23 -sarchie 23 -genies 23 -stressor 23 -choucroun 23 -j.k 23 -tuileries 23 -glyphosate 23 -aggarwal 23 -drophead 23 -gusted 23 -horridge 23 -poliovirus 23 -national-security 23 -headhunting 23 -whitest 23 -quaye 23 -1086 23 -embarrasses 23 -easy-to-understand 23 -barkey 23 -4x100-meter 23 -senebkay 23 -erno 23 -football-themed 23 -columbo 23 -dudas 23 -silets 23 -181,000 23 -coveney 23 -panero 23 -ghrelin 23 -bouey 23 -dailies 23 -liverpudlians 23 -whirled 23 -toyah 23 -latza 23 -musudan 23 -bodyboarding 23 -gudgeon 23 -gel-like 23 -ebbing 23 -sansum 23 -then-presidential 23 -bristolian 23 -matz 23 -rossouw 23 -means-testing 23 -stationers 23 -ogaden 23 -crasher 23 -qaumi 23 -7:05 23 -azzaoui 23 -joyriding 23 -pasqual 23 -patrimony 23 -opperman 23 -pochter 23 -telematics 23 -harada 23 -liaqat 23 -kostin 23 -unicycles 23 -american-owned 23 -ared 23 -tynes 23 -fla 23 -stanbridge 23 -33,500 23 -przybyl 23 -tinting 23 -sobia 23 -korean-flagged 23 -al-khilifa 23 -pettitt 23 -pottermore 23 -dierks 23 -m.s. 23 -nerlinger 23 -mondo 23 -fistfights 23 -séance 23 -szabados 23 -1939-1945 23 -shrimpers 23 -family-style 23 -ducey 23 -215million 23 -uygur 23 -satyam 23 -burkett 23 -swinburne 23 -trebles 23 -rhossili 23 -tine 23 -under-15 23 -maracas 23 -bache 23 -tralee 23 -austrian-born 23 -at-sea 23 -rustam 23 -newly-launched 23 -news/new 23 -armful 23 -mote 23 -snuffles 23 -kolpak 23 -aneizi 23 -novakovic 23 -sissons 23 -29,500 23 -250mph 23 -eakley 23 -eppridge 23 -3:43 23 -3.18 23 -3.12 23 -grenell 23 -chronicler 23 -55-45 23 -putsch 23 -appathurai 23 -5.27 23 -javanese 23 -okanagan 23 -55f 23 -genotype 23 -jf 23 -michaelson 23 -diffusers 23 -anti-india 23 -110lbs 23 -underplay 23 -fredskov 23 -guava 23 -lmu 23 -80,000-a-week 23 -vulva 23 -skyteam 23 -usm 23 -afrikka 23 -palencia 23 -8,000-mile 23 -con-artist 23 -gornall 23 -bugattis 23 -lurssen 23 -maciejewski 23 -wetumpka 23 -kausman 23 -quirico 23 -esper 23 -emanuela 23 -nevadans 23 -almudena 23 -2.89 23 -slovan 23 -patronized 23 -pearlie 23 -unifies 23 -35.9 23 -nizar 23 -sixty-three 23 -moore-wilton 23 -rowbotham 23 -709 23 -eagleton 23 -knebworth 23 -3.37 23 -reawakening 23 -mis-hit 23 -ketchikan 23 -twohig 23 -854 23 -super-sensitive 23 -debt-free 23 -1,368 23 -laditan 23 -junek 23 -shimmered 23 -four-months 23 -slovaks 23 -vig 23 -sidewinder 23 -carteret 23 -bazard 23 -5.04 23 -creque 23 -cigar-chomping 23 -tranquilisers 23 -hang-gliding 23 -caging 23 -ibragimova 23 -iwicki 23 -spithill 23 -nechin 23 -romanee-conti 23 -hashid 23 -macula 23 -haematologist 23 -zenica 23 -whacks 23 -doneil 23 -mirzaei 23 -foord 23 -eps 23 -schavan 23 -formatted 23 -auchterarder 23 -8-ounce 23 -propeller-driven 23 -paralleled 23 -shirked 23 -dicker 23 -cross-breeding 23 -balled 23 -goodrem 23 -23-foot 23 -zappala 23 -vowles 23 -sarsak 23 -reuters/ipsos 23 -expensively-assembled 23 -sheffield-based 23 -disneyworld 23 -monotheism 23 -gnaws 23 -giroux 23 -volcanos 23 -22:55 23 -okayama 23 -underpayment 23 -pigeonholed 23 -spider-woman 23 -fancying 23 -avios 23 -now-banned 23 -frebble 23 -20-3 23 -1430 23 -1,008 23 -56.4 23 -ifoghas 23 -provable 23 -toei 23 -293,000 23 -audaciously 23 -three-wicket 23 -catcalled 23 -dimly-lit 23 -samphire 23 -bittman 23 -three-second 23 -tanganyika 23 -sel 23 -unusual-looking 23 -jacir 23 -prijedor 23 -co-presented 23 -gap-toothed 23 -tip-toeing 23 -post-retirement 23 -arango 23 -tool-making 23 -macdonagh 23 -gourley 23 -bomb-makers 23 -navidad 23 -wartorn 23 -election-related 23 -gorakhpur 23 -cocoa-producing 23 -retta 23 -a.i. 23 -i5 23 -hintze 23 -unphased 23 -gyre 23 -dowsing 23 -goal-oriented 23 -kacicova 23 -dystopia 23 -caifa 23 -emp 23 -t-pim 23 -combated 23 -postponements 23 -high-temperature 23 -quietened 23 -mccreary 23 -koll 23 -heb 23 -hoppers 23 -actuaries 23 -lilt 23 -weylandt 23 -gloversville 23 -1,133 23 -derya 23 -tie-breaker 23 -stickleback 23 -adjudicators 23 -vincents 23 -denims 23 -spyropoulos 23 -scalds 23 -al-majeed 23 -univeristy 23 -unappetizing 23 -crewmate 23 -5:50 23 -watkin 23 -suazo 23 -m74 23 -holguin 23 -kaohe 23 -ottavio 23 -daigneault 23 -trusties 23 -leguizamo 23 -knutson 23 -km/s 23 -ex-british 23 -colquitt 23 -interchanges 23 -pito 23 -peruggia 23 -latta 23 -e-bikes 23 -facetious 23 -lusail 23 -1.41 23 -pallett 23 -cosco 23 -yingzeng 23 -meeking 23 -numberplates 23 -macquarrie 23 -samel 23 -malabar 23 -kojo-smith 23 -shiveluch 23 -kenni 23 -17cm 23 -afanador 23 -celski 23 -bondarenko 23 -by-pass 23 -minister-designate 23 -esure 23 -duck-billed 23 -sunbathes 23 -puccio 23 -863 23 -bodyform 23 -isuppli 23 -#iamsorry 23 -ambles 23 -dowse 23 -tazia 23 -hoppe 23 -wonga.com 23 -abdollahian 23 -selcuk 23 -cassi 23 -magnani 23 -nordman 23 -raqqah 23 -bowes-lyon 23 -enfants 23 -coss 23 -milroy-sloan 23 -a.k.a 23 -nok 23 -disallowing 23 -ndambuki 23 -110-mile 23 -deif 23 -gastroschisis 23 -martín 23 -nitish 23 -santangelo 23 -houseplants 23 -bissau 23 -berthia 23 -embalmers 23 -democratized 23 -chikhani 23 -beecher 23 -multi-lingual 23 -alamitos 23 -mgs 23 -yotam 23 -coyte 23 -#askjose 23 -morisi 23 -5-year-olds 23 -half-pipe 23 -railgun 23 -magaly 23 -glass-bottomed 23 -rindt 23 -cut-glass 23 -salpa 23 -pro-celebrity 23 -todds 23 -263,000 23 -haddington 23 -1.6-litre 23 -inattentiveness 23 -carder 23 -throaty 23 -cacia 23 -pogues 23 -3,350 23 -hh 23 -268,000 23 -politicising 23 -cerne 23 -ndma 23 -zaim 23 -bismillah 23 -steff 23 -forstmann 23 -balch 23 -batallion 23 -hullabaloo 23 -wartner 23 -bozic 23 -nahal 23 -lowbrow 23 -on-the-field 23 -byndloss 23 -unblocked 23 -fanged 23 -belanglo 23 -liya 23 -sorento 23 -laforty 23 -kofaviv 23 -safiya 23 -rechter 23 -22:11 23 -22:13 23 -spee 23 -nigger 23 -kingscote 23 -cawsey 23 -schaub 23 -perfringens 23 -rimini 23 -twitty 23 -venera 23 -2.5-mile 23 -pansy 23 -millenia 23 -al-qaeda-affiliated 23 -blacked-up 23 -billittier 23 -ahli 23 -1,040 23 -losse 23 -medicals 23 -sport-utility 23 -disreputable 23 -machover 23 -frys.com 23 -ciobotaru 23 -exe 23 -new-york 23 -kirani 23 -garfinkle 23 -haji-ioannou 23 -unlabeled 23 -guek 23 -four-bedroomed 23 -sakirin 23 -wuhayshi 23 -suddenness 23 -seechurn 23 -past-time 23 -7km 23 -udo 23 -subjugate 23 -caspersen 23 -footsie 23 -necas 23 -1,046 23 -wordy 23 -gbbo 23 -920,000 23 -mutawa 23 -wagenhoffer 23 -metairie 23 -dingemans 23 -salvi 23 -hashmat 23 -35-years-old 23 -ranstorp 23 -spotland 23 -cmu 23 -nsfw 23 -lower-calorie 23 -matijevic 23 -saudi-born 23 -helvenston 23 -kolasinac 23 -sobelman 23 -popoola 23 -insulza 23 -valerian 23 -wotton-under-edge 23 -auv 23 -unbound 23 -anti-microbial 23 -dawdling 23 -massow 23 -knatchbull 23 -yo-jong 23 -bacteriology 23 -barsby 23 -warlow 23 -monopolized 23 -biteback 23 -must-watch 23 -refiners 23 -swapp 23 -crossovers 23 -mid-off 23 -turkish-american 23 -conflated 23 -654 23 -faà 23 -high-technology 23 -pincers 23 -sehgal 23 -juara 23 -trialists 23 -cortney 23 -siddiqi 23 -landauer 23 -emmie 23 -'25 23 -joris 23 -vitalii 23 -hartline 23 -38.4 23 -kohver 23 -doda 23 -toot 23 -josi 23 -lemma 23 -1.5-inch 23 -fmcsa 23 -kval 23 -live-streamed 23 -redesigns 23 -puckering 23 -irfu 23 -housatonic 23 -metabolically 23 -playdates 23 -reenactments 23 -laforet 23 -atlases 23 -tizzy 23 -soudani 23 -lombaerts 23 -cersei 23 -bawsey 23 -munyai 23 -450th 23 -adornments 23 -tamagotchi 23 -pontin 23 -sorbonne 23 -majeure 23 -prees 23 -uprights 23 -anti-fur 23 -elber 23 -orit 23 -saniewska 23 -artpop 23 -masada 23 -free-agent 23 -1737 23 -maazel 23 -7.49 23 -beacham 23 -ferrol 23 -tranquilize 23 -ballarin 23 -alphonsi 23 -tomsk 23 -mollman 23 -16lbs 23 -pontificating 23 -36-hole 23 -40-metre 23 -cobalts 23 -noughts 23 -farfetch 23 -super-intelligent 23 -awb 23 -shaldon 23 -poptech 23 -conagra 23 -hamadi 23 -kirti 23 -bogle 23 -bekdash 23 -spain-portugal 23 -athenian 23 -water-boarding 23 -much-talked-about 23 -bedingfield 23 -bickel 23 -mohammadzai 23 -nif 23 -1,500-year-old 23 -maroochydore 23 -kacper 23 -mandron 23 -shopbop 23 -679 23 -cubitt 23 -emeryville 23 -uriah 23 -daydreams 23 -dowdle 23 -adoptable 23 -bigs 23 -dajana 23 -804 23 -vasilis 23 -cammarano 23 -choudhrie 23 -shoehorned 23 -dca 23 -docter 23 -sudhir 23 -ex-spy 23 -egoista 23 -osako 23 -thiessen 23 -ecpat 23 -legitimizes 23 -gaddist 23 -sme 23 -faryab 23 -liebermann 23 -puzzler 23 -guar 23 -atlassian 23 -10.12 23 -bozena 23 -stowage 23 -towery 23 -coundoul 23 -14-17 23 -bight 23 -treehotel 23 -mississauga 23 -u.s.-turkish 23 -lower-ranked 23 -celebrity-obsessed 23 -1711 23 -heike 23 -septuplets 23 -277,000 23 -hajduk 23 -virk 23 -vinciguerra 23 -2.33 23 -niel 23 -hmg 23 -zilina 23 -diaz-ramos 23 -salesi 23 -narotam 23 -henick 23 -nooyi 23 -makani 23 -coveralls 23 -cib 23 -medved 23 -allinson 23 -powerbroker 23 -brame 23 -5-8 23 -saddiq 23 -colegate 23 -hyogo 23 -m/s 23 -sartore 23 -xmm-newton 23 -dhankar 23 -leather-look 23 -manservant 23 -iconoclastic 23 -kahan 23 -sabia 23 -nuncio 23 -xiaofeng 23 -business-as-usual 23 -americanos 23 -texas-born 23 -00:24 23 -tammin 23 -44.1 23 -underplayed 23 -takada 23 -awakes 23 -nevsky 23 -micromanaging 23 -ttm 23 -glassholes 23 -sceptre 23 -dildos 23 -23:20 23 -23:25 23 -a31 23 -mercede 23 -brummies 23 -irshenko 23 -news-leader 23 -borawski 23 -carlow 23 -mammy 23 -skullcaps 23 -cobby 23 -1300s 23 -sang-moon 23 -voegele 23 -chatterton 23 -deluges 23 -rewalk 23 -sorokin 23 -anahlia 23 -distillation 23 -nargund 23 -swordy 23 -pavone 23 -fabia 23 -aijalon 23 -ragnarok 23 -luby 23 -irl 23 -koyasan 23 -sylt 23 -teepee 23 -conundrums 23 -191,000 23 -ensuites 23 -yowie 23 -caravanning 23 -knickknacks 23 -redshift 23 -playhouses 23 -thilan 23 -gangrenous 23 -deicing 23 -out-do 23 -bresnahan 23 -garnica 23 -broadlands 23 -hillah 23 -ferndown 23 -insha'allah 23 -trelawny 23 -spearman 23 -follow-ups 23 -selden 23 -tight-fisted 23 -a20 23 -sakurajima 23 -jovian 23 -free-spending 23 -haberfeld 23 -packman 23 -garrity 23 -micro-blog 23 -coola 23 -cunniff 23 -assiniboine 23 -cfcb 23 -healthwatch 23 -teneriffe 23 -orchestrator 23 -jakobsen 23 -chaldean 23 -crèche 23 -tormenters 23 -humiliates 23 -earmarking 23 -meat-eaters 23 -matrons 23 -swim-up 23 -hellraiser 23 -windbreaker 23 -v-shape 23 -spelthorne 23 -tymchuk 23 -500-foot 23 -cashes 23 -‰ 23 -161m 23 -region-wide 23 -ejaz 23 -getz 23 -gatenby 23 -opening-round 23 -obes 23 -garcons 23 -munches 23 -fawns 23 -arriaga 23 -23:48 23 -unviable 23 -screw-up 23 -1,190 23 -wouter 23 -hostler 23 -6.5-litre 23 -mumsy 23 -cesspool 23 -convulse 23 -cabdriver 23 -oliwia 23 -legatum 23 -maisha 23 -glanz 23 -colorist 23 -lanchester 23 -hudspith 23 -chaleo 23 -nominet 23 -yeni 23 -culloden 23 -2009-12 23 -ferriby 23 -veach 23 -vote-counting 23 -peddler 23 -logisticians 23 -1.94 23 -tay-sachs 23 -strychnine 23 -financials 23 -naplan 23 -serialized 23 -hazleton 23 -furton 23 -pop/rock 23 -mowlam 23 -quesadillas 23 -expels 23 -triumphal 23 -skomer 23 -chadd 23 -valin 23 -zorb 23 -danzig 23 -re-designed 23 -sibneft 23 -reitnauer 23 -mid-thigh 23 -gersh 23 -hatchfield 23 -makena 23 -pillinger 23 -beyoglu 23 -barq 23 -viloude 23 -post-arab 23 -one-storey 23 -bostic 23 -sex-crazed 23 -savino 23 -toontown 23 -matada 23 -manutd.com 23 -midden 23 -pettman 23 -antagonised 23 -st.tropez 23 -barangay 23 -cesium-137 23 -mavuba 23 -mandvi 23 -wende 23 -microfilm 23 -faiza 23 -20-man 23 -rotheram 23 -roomier 23 -pomfret 23 -post-operation 23 -1630 23 -10,000-a-month 23 -tradecraft 23 -mid-1930s 23 -six-times 23 -lisk 23 -crematoriums 23 -pro-romney 23 -all-caps 23 -60-plus 23 -tacheny 23 -huett 23 -wickrematunga 23 -itawamba 23 -insolence 23 -cr-v 23 -rushcliffe 23 -umpteenth 23 -etymology 23 -gasparri 23 -sokoto 23 -picadilly 23 -touristic 23 -noroc 23 -cuticles 23 -seely 23 -capybaras 23 -lomonosov 23 -woy 23 -cybernats 23 -sobule 23 -muriwai 23 -waveguides 23 -perpetration 23 -jostles 23 -countersued 23 -unread 23 -collude 23 -sauceda 23 -72-foot 23 -infrastructural 23 -mineshaft 23 -amanat 23 -sw1 23 -sulayman 23 -krawcheck 23 -detainers 23 -wilms 23 -exocet 23 -dorough 23 -weasels 23 -copy-cat 23 -peanberg 23 -ricketson 23 -housewares 23 -shelford 23 -lycett 23 -sabaoon 23 -peay 23 -meshes 23 -incivility 23 -lupin 23 -klavan 23 -lovastatin 23 -get-ups 23 -koshik 23 -modine 23 -une 23 -lachman 23 -polygamists 23 -mastour 23 -quaalude 23 -merrimack 23 -salcombe 23 -cero 23 -newfangled 23 -luxton 23 -grottoes 23 -denes 23 -cronje 23 -chargrilled 23 -1,670 23 -perijoc 23 -dong-hyuk 23 -facialist 23 -watermarks 23 -180kg 23 -llantwit 23 -caillat 23 -mdf 23 -scousers 23 -10-14 23 -corsage 23 -moneea 23 -re-invent 23 -223,000 23 -alkhawaja 23 -sepulcher 23 -kapadia 23 -caven 23 -cartooning 23 -calise 23 -terminator-like 23 -alysha 23 -6-second 23 -nadolo 23 -unimpeachable 23 -farese 23 -66.5 23 -pera 23 -perv 23 -weich 23 -jelly-like 23 -titch 23 -get-out 23 -rheumatologist 23 -maraldi 23 -one-nation 23 -late-model 23 -#fbrape 23 -prolifically 23 -depew 23 -sufism 23 -arcore 23 -mechanicsburg 23 -bba 23 -brassard 23 -lilybelle 23 -jet-stream 23 -dwells 23 -violette 23 -stylistically 23 -curative 23 -sadik 23 -osetra 23 -imrie 23 -north-facing 23 -peiser 23 -marouf 23 -courtauld 23 -boyko 23 -barkingside 23 -wdiv-tv 23 -sansa 23 -roamio 23 -melford 23 -tous 23 -lingle 23 -pelorus 23 -1779 23 -su-27 23 -upwave.com 23 -red-nosed 23 -227,000 23 -caselli 23 -time-travel 23 -displacements 23 -bartel 23 -torfaen 23 -abellio 23 -plonker 23 -assante 23 -fordyce 23 -prosopagnosia 23 -semi-rural 23 -shenzhou-9 23 -maun 23 -reaves 23 -double-faulted 23 -tee-shirt 23 -bastos 23 -panther-like 23 -snakehead 23 -crams 23 -uproariously 23 -mikiewicz 23 -villainy 23 -ilana 23 -razzall 23 -unpasteurized 23 -shelbourne 23 -36.2 23 -re-hired 23 -trigueros 23 -interlaced 23 -draughts 23 -nfa 23 -headquarter 23 -non-western 23 -elkhorn 23 -carload 23 -katydid 23 -a/w13 23 -suneet 23 -endoskeleton 23 -middle-classes 23 -slumbers 23 -sotero 23 -2004-2006 23 -mi-17 23 -kirkup 23 -chamisa 23 -gravesham 23 -repurpose 23 -foward 23 -cronobacter 23 -roco 23 -conflict-affected 23 -splendora 23 -disembarks 23 -oic 23 -heiner 23 -yagoona 23 -huracan 23 -occassions 23 -mankad 23 -davis-ball 23 -forteau 23 -knapton 23 -ocelot 23 -moveon 23 -cd4 23 -darrall 23 -mercati 23 -lanzhou 23 -44.95 23 -rda 23 -15-feet 23 -lizotte 23 -sanaghan 23 -horticulturist 23 -survivalists 23 -oberhausen 23 -bassim 23 -smoulder 23 -266,000 23 -neligan 23 -chalks 23 -dib 23 -mallue 23 -rockfalls 23 -step-mom 23 -qader 23 -basta 23 -cordoning 23 -crystallize 23 -4 23 -digitimes 23 -horsford 23 -segued 23 -11s 23 -inan 23 -ukok 23 -borgias 23 -detoxes 23 -rotolo 23 -touareg 23 -fashionista.com 23 -dustpan 23 -beggared 23 -tigres 23 -autodrom 23 -purdew 23 -1632 23 -concourses 23 -wyvern 23 -pickthall 23 -basie 23 -markland 23 -fiendish 23 -wise-cracking 23 -frangipani 23 -bookended 23 -hand-off 23 -semedo 23 -leviticus 23 -laser-cut 23 -siriraj 23 -2.97 23 -2.90 23 -al-sistani 23 -postoperative 23 -1515 23 -rollright 23 -tunick 23 -unreliability 23 -freakishly 23 -high-top 23 -732 23 -hamlisch 23 -mahn 23 -waterworth 23 -medine 23 -yahoo.com 23 -suffredini 23 -barros 23 -zawiyah 23 -wine-growing 23 -95mph 23 -glenday 23 -1,370 23 -reuven 23 -prize-ring 23 -lauro 23 -lamott 23 -company-wide 23 -genge 23 -re-convicted 23 -miffy 23 -brule 23 -rennoldson 23 -valleywag 23 -oxlade 23 -intangibles 23 -mojica 23 -senussi 23 -cumia 23 -limberios 23 -abdur-raheem 23 -slaughtermen 23 -macchio 23 -grandbaby 23 -600g 23 -mass-murderer 23 -searingly 23 -skrodzka 23 -scrutinises 23 -liqueurs 23 -reinterpreted 23 -zetland 23 -backtracks 23 -schollick 23 -bickle 23 -non-critical 23 -reveille 23 -13,400 23 -crossland 23 -orbea 23 -memoli 23 -sigal 23 -udaipur 23 -phoenix-based 23 -kait 23 -profiteers 23 -interject 23 -six-wicket 23 -sympathises 23 -edet 23 -hockeyroos 23 -single-aisle 23 -theia 23 -theis 23 -81million 23 -buchko 23 -onodera 23 -bons 23 -loza 23 -mielke 23 -methylphenidate 23 -nakita 23 -underachievers 23 -craic 23 -quieroz 23 -cata 23 -11.95 23 -shreeves 23 -valens 23 -famiglia 23 -sarabi 23 -ghedini 23 -maneuverable 23 -payphones 23 -garífuna 23 -mesurier 23 -non-conventional 23 -ribisi 23 -lacaze 23 -spatially 23 -generalisation 23 -cnil 23 -exportation 23 -98.5 23 -hattab 23 -rezwan 23 -trigonometry 23 -gisby 23 -9:25 23 -livingood 23 -libretto 23 -nonrefundable 23 -xiomara 23 -gillham 22 -waga 22 -343,000 22 -manjit 22 -hass 22 -hoyles 22 -spygate 22 -banus 22 -dweck 22 -tailgated 22 -battley 22 -1,312 22 -red-flag 22 -soft-landing 22 -nephra 22 -interrelated 22 -i-reporter 22 -dioramas 22 -casselton 22 -ferg 22 -gogol 22 -sure-footed 22 -most-expensive 22 -reimagine 22 -262,000 22 -kreger 22 -mossop 22 -takuma 22 -zubayda 22 -kooiman 22 -rothken 22 -buhrman 22 -windom 22 -itta 22 -afflelou 22 -pace-setters 22 -q13 22 -shriveled 22 -sherazi 22 -satirized 22 -multispectral 22 -saling 22 -undetonated 22 -forfeits 22 -vavuniya 22 -22:49 22 -basharat 22 -ex-cricketer 22 -dupré 22 -10,000-strong 22 -745,000 22 -flavin 22 -wayans 22 -derwin 22 -toluene 22 -climbie 22 -reinterpret 22 -220lb 22 -hefferan 22 -motka 22 -half-pound 22 -twentysomethings 22 -877 22 -kgalema 22 -overproduction 22 -wickliffe 22 -lgi 22 -samaris 22 -hearsum 22 -oceanographers 22 -brousseau 22 -afrikaburn 22 -overwritten 22 -treacher 22 -colfax 22 -wentworthville 22 -child-related 22 -acho 22 -prince-boateng 22 -left-side 22 -one-stroke 22 -chlumsky 22 -tashkent 22 -snogging 22 -dcri 22 -endocarditis 22 -zero-hour 22 -loewe 22 -wasendorf 22 -dishy 22 -kucharski 22 -cooling-off 22 -skillern 22 -grider 22 -aburto 22 -cowperthwaite 22 -waukegan 22 -eco-warrior 22 -matey 22 -connectedness 22 -wesh-tv 22 -wijk 22 -cryosat 22 -46.3 22 -46.9 22 -bradford-on-avon 22 -nemiroff 22 -cornal 22 -,17 22 -50-pound 22 -agn 22 -compote 22 -uaw 22 -then-manager 22 -interwebs 22 -sandie 22 -pit-stops 22 -abolitionists 22 -14lb 22 -ignoble 22 -1,330 22 -leotta 22 -284,000 22 -under-12s 22 -skeen 22 -once-dominant 22 -intertrigo 22 -gonville 22 -honus 22 -marbling 22 -wakie 22 -wheeler-dealer 22 -39-0 22 -homogeneity 22 -emylee 22 -rocket-launching 22 -faden 22 -beardy 22 -lemming 22 -perihelion 22 -lavilla 22 -khairunisa 22 -bowood 22 -artiaga 22 -zoko 22 -+4 22 -ashtabula 22 -leonidas 22 -skin-care 22 -uncertainly 22 -greinke 22 -eec 22 -markiewicz 22 -freedom-loving 22 -autoworker 22 -dillion 22 -688 22 -683 22 -deshazo 22 -taxpayer-subsidised 22 -fiftieth 22 -tague 22 -amestoy 22 -murderball 22 -flukes 22 -mehlman 22 -undesired 22 -40f 22 -turbojet 22 -328,000 22 -ozgur 22 -tampax 22 -babbacombe 22 -elaboration 22 -reliquary 22 -kuegler 22 -shyanne 22 -ja-cheol 22 -01:29 22 -cerrito 22 -85ft 22 -iyad 22 -antofagasta 22 -straight-set 22 -1,030 22 -remortgaging 22 -geddie 22 -reefer 22 -shenouda 22 -efremi 22 -montacute 22 -cincotti 22 -castell 22 -ryland 22 -curates 22 -1999-2005 22 -lathe 22 -singlets 22 -hildebrandt 22 -raidy 22 -moate 22 -domene 22 -ondria 22 -binalshibh 22 -p2 22 -mumbai-based 22 -uffizi 22 -gravediggers 22 -hauteville 22 -camerawoman 22 -kuenssberg 22 -middleham 22 -monzon 22 -seat-back 22 -saudi-owned 22 -disgruntlement 22 -ucp 22 -second-time 22 -harryhausen 22 -kerkow 22 -whiny 22 -pws 22 -bartered 22 -elvish 22 -firestorms 22 -984 22 -500cc 22 -worksheets 22 -moscow-led 22 -houghton-le-spring 22 -five-shot 22 -tropfest 22 -queers 22 -scheper-hughes 22 -forsaking 22 -cullman 22 -erhard 22 -helvetica 22 -mubarak-era 22 -times-tribune 22 -mardan 22 -duchesses 22 -decongestant 22 -baltazar 22 -flubbed 22 -jackson-stops 22 -ironwork 22 -jiménez 22 -imaginings 22 -21:26 22 -katich 22 -poul 22 -cosmologist 22 -missoulian 22 -2070 22 -jean-bertrand 22 -ramblas 22 -flatbreads 22 -wacol 22 -branton 22 -139th 22 -pyramid-shaped 22 -insulator 22 -bornstein 22 -reportable 22 -varosha 22 -beyler 22 -rayhan 22 -lyson 22 -1:25 22 -much-awaited 22 -cubits 22 -ktvq 22 -22:01 22 -sandino 22 -waad 22 -friedkin 22 -al-qaim 22 -25-page 22 -taxpayer-owned 22 -opposition-controlled 22 -sverdlovsk 22 -purr-fect 22 -ginnifer 22 -fastens 22 -pedalo 22 -iredale 22 -lowest-ever 22 -berrill 22 -negating 22 -bike-friendly 22 -cankles 22 -meester 22 -pelecanos 22 -vinokurov 22 -eskil 22 -pommes 22 -vancouver-based 22 -burbach 22 -minocycline 22 -nonjudicial 22 -8/11 22 -re-test 22 -30-pin 22 -mcgilvray 22 -sno 22 -zhijun 22 -yorkshire-based 22 -hand-over 22 -shands 22 -pres 22 -hazor 22 -alshaya 22 -envies 22 -non-intrusive 22 -matsuo 22 -knaap 22 -repackage 22 -isham 22 -babacar 22 -flosi 22 -wino 22 -f-series 22 -dato 22 -farting 22 -double-height 22 -buzzfeed.com 22 -aci 22 -transexual 22 -#maccasfail 22 -vulliamy 22 -zwolle 22 -sanded 22 -punchbag 22 -pue 22 -hoque 22 -compacting 22 -wert 22 -woolsery 22 -shagging 22 -20-40 22 -duekoue 22 -allusions 22 -then-pregnant 22 -pin-striped 22 -baguley 22 -hilburn 22 -d'yquem 22 -pontecorvo 22 -anglo-irish 22 -shoeshine 22 -plantain 22 -cathie 22 -lubezki 22 -kashmiris 22 -manchuria 22 -crossbreeds 22 -gresley 22 -mosses 22 -murdoch-owned 22 -non-american 22 -sullins 22 -obo 22 -aborigine 22 -508,000 22 -assata 22 -646 22 -hasaka 22 -patriarchate 22 -set-backs 22 -mankins 22 -1,253 22 -22:23 22 -dieppe 22 -dcis 22 -oggi 22 -tickles 22 -cut-away 22 -kristiansen 22 -lintott 22 -condenser 22 -sendero 22 -bestwood 22 -badcock 22 -malkoff 22 -trillium 22 -bebb-jones 22 -jaylin 22 -semeria 22 -all-too-common 22 -merlet 22 -818 22 -marrone 22 -magnetometers 22 -bocas 22 -debriefings 22 -darkes 22 -anette 22 -surratt 22 -aryeh 22 -100,000-per-week 22 -hotel-style 22 -dronfield 22 -conservatoire 22 -youk 22 -120g 22 -stalemates 22 -calcione 22 -adoptee 22 -jakab 22 -self-righteousness 22 -21:56 22 -21:52 22 -bober 22 -pedley 22 -10.03 22 -besties 22 -hotly-tipped 22 -dallin 22 -czornobaj 22 -tilson 22 -bissinger 22 -zarene 22 -wyff 22 -limply 22 -chine 22 -gangplank 22 -trioli 22 -detachments 22 -18-stone 22 -sarcelles 22 -kinane 22 -gimeno-traver 22 -macfadyen 22 -ardie 22 -cocula 22 -holistically 22 -xinran 22 -foodini 22 -less-expensive 22 -abusalha 22 -weizmann 22 -lachowicz 22 -sanitising 22 -special-effects 22 -co-guardian 22 -@andersoncooper 22 -clydesdales 22 -dependability 22 -knuckled 22 -qazvin 22 -hell-raising 22 -10-4 22 -better-looking 22 -one-car 22 -blaxploitation 22 -severinsen 22 -marray 22 -cognoscenti 22 -ganis 22 -skeete 22 -squirreled 22 -skinny-dipping 22 -jouett 22 -aspergers 22 -chapnick 22 -long-rumored 22 -machiavelli 22 -farmstead 22 -double-glazing 22 -whooshing 22 -gravedigger 22 -rajnath 22 -agoura 22 -inertial 22 -wolraich 22 -unimog 22 -wildenstein 22 -chang-jin 22 -oral-b 22 -tyrannosaur 22 -errington 22 -nieman 22 -hsing 22 -bogaard 22 -bandon 22 -cloisters 22 -mccalman 22 -competences 22 -pertained 22 -acoustical 22 -hedland 22 -vouchercodespro.co.uk 22 -tuk-tuk 22 -'16 22 -fappening 22 -jorda 22 -xiaoming 22 -suau 22 -clucking 22 -trade-based 22 -kerby 22 -keihanaikukauakahihuliheekahaunaele 22 -squids 22 -airbourne 22 -portholes 22 -piturca 22 -enrolls 22 -quinsey 22 -majed 22 -top-to-toe 22 -dillman 22 -bossi 22 -amphlett 22 -barnado 22 -sages 22 -hewitson 22 -promulgated 22 -non-striker 22 -kier 22 -qubeir 22 -phase-out 22 -90999 22 -5-point 22 -tayler 22 -highcliffe 22 -cockles 22 -shortcoming 22 -unforgiven 22 -roppongi 22 -sadrist 22 -panoply 22 -orienteering 22 -maralinga 22 -dethroning 22 -blackmon 22 -delanie 22 -sixty-six 22 -homemaking 22 -267,000 22 -spork 22 -ortelli 22 -glamorising 22 -anith 22 -moberly 22 -expounded 22 -cholili 22 -vaportini 22 -mirror-like 22 -scrine 22 -full-price 22 -midriff-baring 22 -wasson 22 -mcinally 22 -franzese 22 -al-somali 22 -pixellated 22 -mightier 22 -chest-deep 22 -35-foot 22 -rededicate 22 -silver-gilt 22 -thamer 22 -semel 22 -jiu 22 -bolshoy 22 -pennsburg 22 -rieke 22 -pfo 22 -manot 22 -dromedary 22 -schmiedlova 22 -swamy 22 -emm 22 -overfeeding 22 -eagletail 22 -00:52 22 -seamanship 22 -hargitay 22 -ten-strong 22 -chosin 22 -aspiritech 22 -soberly 22 -quieting 22 -roesgen 22 -amadeo 22 -joye 22 -ranville 22 -vimto 22 -drohan 22 -kuntar 22 -liposarcoma 22 -azeez 22 -adornment 22 -lafitte 22 -off-side 22 -short-circuited 22 -kepnes 22 -synthesiser 22 -doar 22 -32.9 22 -germania 22 -moly 22 -sameem 22 -perello 22 -russian-ukrainian 22 -776 22 -upcycling 22 -ftse-100 22 -desso 22 -patronise 22 -andreani 22 -bohrman 22 -subeir 22 -antiretrovirals 22 -60-acre 22 -wesh.com 22 -supercharger 22 -biola 22 -ezzour 22 -candlesticks 22 -miamisburg 22 -hutong 22 -needle-like 22 -game-playing 22 -mckinnell 22 -walker-smith 22 -fortner 22 -yeovilton 22 -humorless 22 -twitpic 22 -ron-robert 22 -laithwaite 22 -0.16 22 -dewey-hagborg 22 -manhatten 22 -wordsmith 22 -jerath 22 -whnt 22 -blakeman 22 -husi 22 -hebble 22 -23-inch 22 -ldcm 22 -al-issawi 22 -over-sharing 22 -mennilli 22 -fens 22 -claypool 22 -p.f. 22 -bassons 22 -subservience 22 -giardini 22 -creepers 22 -chavan 22 -appledore 22 -disproven 22 -65-foot 22 -magnitude-9 22 -mq-8c 22 -lingurar 22 -coens 22 -bolivars 22 -pro-pot 22 -dalambert 22 -tolbachik 22 -rendille 22 -derailments 22 -archerd 22 -aygo 22 -pinion 22 -romeikes 22 -fatemeh 22 -1per 22 -mindi 22 -blagg 22 -hempfest 22 -17th-minute 22 -quantifiable 22 -applecare 22 -diar 22 -mclinden 22 -gigabits 22 -vonnegut 22 -inquisitr 22 -try-scorer 22 -hainault 22 -scirocco 22 -akpan 22 -tignes 22 -kernc 22 -cliff-side 22 -prisms 22 -cura 22 -super-prime 22 -thibou 22 -melancholia 22 -injectors 22 -arkan 22 -degette 22 -stoljar 22 -quartzite 22 -ekuan 22 -azria 22 -sadrists 22 -gartree 22 -al-raqqa 22 -text-to-speech 22 -wapakoneta 22 -babil 22 -kusama 22 -stas 22 -luzuriaga 22 -ghandi 22 -celestis 22 -summiting 22 -600km 22 -profitt 22 -insoluble 22 -megaro 22 -lazier 22 -16-14 22 -assads 22 -toseland 22 -sycophantic 22 -goulden 22 -suelo 22 -skydives 22 -razgui 22 -ibrar 22 -denmure 22 -grasham 22 -y&r 22 -rango 22 -polarity 22 -kylee 22 -tardar 22 -21:21 22 -tblisi 22 -dss 22 -lubricating 22 -bromell 22 -chimboza 22 -01:11 22 -voller 22 -barnie 22 -sindhurakshak 22 -hickok 22 -handre 22 -zinke 22 -danga 22 -deryck 22 -gaskamp 22 -1/8 22 -joltid 22 -sellwood 22 -equivocation 22 -nek 22 -kolawole 22 -martes 22 -zeljko 22 -leota 22 -hisar 22 -felten 22 -32-bit 22 -tintype 22 -wielinski 22 -twiglet 22 -makudi 22 -twttr 22 -systemwide 22 -gruel 22 -2,240 22 -95m 22 -rabbitoh 22 -wsbtv.com 22 -fisichella 22 -0.35 22 -perceptible 22 -algar 22 -aggregating 22 -shirahama 22 -mymaster 22 -masterwork 22 -interlocked 22 -limescale 22 -bohren 22 -wynwood 22 -sismore 22 -mazzone 22 -faine 22 -accompli 22 -misjudge 22 -festivity 22 -inabnitt 22 -lernstift 22 -cruciferous 22 -fire-rescue 22 -seagram 22 -25-21 22 -self-censor 22 -jigger 22 -spencers 22 -keilloh 22 -vienna-based 22 -pmt 22 -tsunami-like 22 -katskhi 22 -lichens 22 -sty 22 -twin-turbo 22 -56402 22 -lesia 22 -blogher 22 -watlington 22 -fairuz 22 -phasey 22 -gewirtz 22 -blunts 22 -lella 22 -yalu 22 -gantz 22 -drakensberg 22 -galt 22 -afkham 22 -caisson 22 -brawny 22 -229,000 22 -publics 22 -reticulated 22 -blunting 22 -remie 22 -cloke 22 -wjla-tv 22 -flasher 22 -grumpiness 22 -seifalian 22 -985,000 22 -kirlan 22 -foosball 22 -soza 22 -fairwater 22 -boyson 22 -hongi 22 -deech 22 -work-in-progress 22 -140-year-old 22 -akb48 22 -g-strings 22 -newsreels 22 -ella-louise 22 -kibaale 22 -concours 22 -mozick 22 -4013 22 -mooresville 22 -daya 22 -mantlepiece 22 -2012-now 22 -reconfiguration 22 -umbridge 22 -seabury 22 -hsin 22 -walloping 22 -larger-scale 22 -peri-peri 22 -manaf 22 -senk 22 -hexapus 22 -snorkeler 22 -norodom 22 -clairmont 22 -newtownards 22 -glenfiddich 22 -chicano 22 -eccentrics 22 -stoners 22 -banin 22 -stanch 22 -antihero 22 -solstices 22 -best-picture 22 -glitchy 22 -herdsman 22 -riyals 22 -six-wheel 22 -bartusiak 22 -910,000 22 -elrey 22 -good-sized 22 -m13 22 -kcrg 22 -neuro 22 -schillings 22 -disbeliever 22 -ferrel 22 -loney 22 -munadi 22 -24,000-a-year 22 -jukeboxes 22 -breeanna 22 -swazi 22 -returnee 22 -kucher 22 -doctorates 22 -now-estranged 22 -1981-2010 22 -9to5 22 -felpham 22 -marquardt 22 -newdow 22 -sre 22 -kordowski 22 -3.36 22 -41.9 22 -http://www.suicidepreventionlifeline.org/ 22 -moreish 22 -yap 22 -40-degree 22 -arendelle 22 -kallum 22 -barrelling 22 -cefaly 22 -clomid 22 -daron 22 -terkel 22 -amboy 22 -icl 22 -vukic 22 -intelligible 22 -stourton 22 -banias 22 -beckner 22 -samsoe 22 -prosecutor-general 22 -right-sided 22 -hyperion 22 -lorik 22 -standardize 22 -innisfail 22 -overextended 22 -carmit 22 -330p 22 -23:05 22 -melmore 22 -heathcliff 22 -epilim 22 -pleck 22 -séraphine 22 -ossining 22 -fevre 22 -martic 22 -bennellick 22 -mcerlain 22 -fretz 22 -kosicky 22 -multilevel 22 -585,000 22 -swails 22 -mother-of-the-bride 22 -42-day 22 -hindlip 22 -dolgellau 22 -ibanez 22 -conformist 22 -falsetto 22 -bainimarama 22 -nine-figure 22 -alwyn 22 -skenazy 22 -humanitarians 22 -purwo 22 -mangy 22 -veneman 22 -4-year-olds 22 -majorcan 22 -beerbower 22 -hoferlin 22 -bakshi 22 -darion 22 -debo 22 -olympic-size 22 -ossett 22 -mersin 22 -jos. 22 -scare-mongering 22 -bango 22 -pastel-coloured 22 -nordby 22 -llanidloes 22 -kuda 22 -anan 22 -symbionese 22 -1,125 22 -cowdray 22 -golesworthy 22 -tyldum 22 -tamika 22 -video-chat 22 -junhui 22 -eletronica 22 -manoeuvrable 22 -indistinct 22 -elts 22 -shredder 22 -tie-ins 22 -poots 22 -sub-dealers 22 -15in 22 -kayetie 22 -synchronous 22 -mitig 22 -shannen 22 -near-naked 22 -faneuil 22 -unfriended 22 -prodigiously 22 -olbia 22 -kraków 22 -melnichenko 22 -berretta 22 -iulian 22 -aogo 22 -aaronson 22 -maman 22 -cha-cha 22 -i-reporters 22 -diuretics 22 -acomb 22 -spreader 22 -flat-topped 22 -zoie 22 -yelped 22 -wedbush 22 -toge 22 -toga 22 -entre 22 -ice-breaking 22 -raiderettes 22 -cordillera 22 -devon-born 22 -sashin 22 -scalped 22 -maddux 22 -browses 22 -glass-enclosed 22 -birtley 22 -nct 22 -wised 22 -schuh 22 -tachira 22 -coomber 22 -stef 22 -dalsey 22 -newsfeeds 22 -kavya 22 -esdaile 22 -schriro 22 -173rd 22 -paralyzes 22 -mwamba 22 -chapped 22 -robertsons 22 -futrell 22 -5,000-strong 22 -jsf 22 -cripples 22 -synchronization 22 -colima 22 -seamaster 22 -fix-it 22 -miami-area 22 -under-15s 22 -micahel 22 -beijing-bound 22 -bodegas 22 -npa 22 -khaliq 22 -stuarts 22 -bradstock 22 -hoeppner 22 -gilead 22 -nahr-e-saraj 22 -tabulated 22 -zotto 22 -okotie 22 -bembridge 22 -quintuple 22 -m55 22 -lainey 22 -humpage 22 -pournazarian 22 -gynaecomastia 22 -auto-rickshaw 22 -mega-hit 22 -stefon 22 -starves 22 -anti-homosexual 22 -postie 22 -seminyak 22 -accademia 22 -dnt 22 -sano 22 -croome 22 -ashgar 22 -2.5-inch 22 -non-prescription 22 -swalwell 22 -damiani 22 -3.79 22 -ringfence 22 -layovers 22 -kohnstamm 22 -bushranger 22 -xboxes 22 -disconcerted 22 -jayce 22 -aherne 22 -vacuumed 22 -re-usable 22 -bahri 22 -out-of 22 -auersperg 22 -re-set 22 -askar 22 -nadav 22 -21:29 22 -21:24 22 -muttahida 22 -ihg 22 -1660s 22 -pwllheli 22 -hpd 22 -offerman 22 -fastest-rising 22 -hums 22 -fomer 22 -970,000 22 -hothouse 22 -colbath 22 -cuckolded 22 -arbil 22 -chinamasa 22 -nickell 22 -tewantin 22 -kot 22 -chancellorsville 22 -engorged 22 -kitchee 22 -newsok.com 22 -off-topic 22 -multi-course 22 -manwin 22 -patent-pending 22 -nawsha 22 -pfeffer 22 -tenaciously 22 -eleuthera 22 -vagrancy 22 -pretorius 22 -trapster 22 -methoxetamine 22 -analgesics 22 -2013-now 22 -mitigates 22 -rosli 22 -marathoners 22 -bizzare 22 -blazquez 22 -françoise 22 -sandinista 22 -kirkwall 22 -eocene 22 -lienz 22 -cag 22 -reenacting 22 -zygielbojm 22 -desigual 22 -reyngoudt 22 -1,160 22 -travel-related 22 -effin 22 -believability 22 -ulema 22 -headspace 22 -bagherzadeh 22 -balan 22 -sagaponack 22 -carr-gregg 22 -kenya-based 22 -ringgit 22 -strother 22 -pon 22 -anti-washington 22 -simsbury 22 -news.com 22 -8s 22 -shiekh 22 -brasenose 22 -digitize 22 -lynd 22 -chip-maker 22 -josif 22 -ireland-based 22 -step-daughters 22 -slingbacks 22 -transphobia 22 -52-year 22 -condensate 22 -ileostomy 22 -sub-basement 22 -scarr 22 -verhaegh 22 -picco 22 -reunify 22 -costantini 22 -holiday-themed 22 -hsien 22 -michelin-star 22 -kaena 22 -vrsajevic 22 -kubis 22 -holmberg 22 -muwonge 22 -substrates 22 -dragic 22 -leppard 22 -farmiga 22 -eardley 22 -holzwarth 22 -karmic 22 -1,027 22 -1,024 22 -reabsorbed 22 -snake-like 22 -2007-2011 22 -kanka 22 -pedestrian-only 22 -400billion 22 -gallivanting 22 -42-20 22 -ottoline 22 -stoles 22 -scrapper 22 -inclines 22 -blacktop 22 -point-of-sale 22 -mcchord 22 -musselburgh 22 -makhorov 22 -cowing 22 -ogunnoiki 22 -demotions 22 -mccranie 22 -noi 22 -26-page 22 -alvalade 22 -9-mm 22 -gt500 22 -iberville 22 -commonly-used 22 -yips 22 -omidyar 22 -soueid 22 -betway 22 -mid-seventies 22 -lso 22 -johran 22 -novello 22 -kmh 22 -autocorrect 22 -hamdani 22 -non-steroidal 22 -shehada 22 -schoolchild 22 -sylvana 22 -shemin 22 -tehrik-i-taliban 22 -tache 22 -knead 22 -barakzai 22 -lovick 22 -20-pound 22 -fully-loaded 22 -influence-peddling 22 -janis-norton 22 -emanuelson 22 -slaney 22 -non-islamic 22 -muggle 22 -medding 22 -22:17 22 -18-21 22 -soundness 22 -semicircle 22 -50,000-a-week 22 -toddy 22 -penrhyn 22 -coxswain 22 -allmusic.com 22 -neuropathic 22 -holmqvist 22 -pelagic 22 -feight 22 -moussambani 22 -disproving 22 -remnick 22 -352,000 22 -ayatollahs 22 -molfetta 22 -puzzlement 22 -proposers 22 -efa 22 -ef3 22 -celebrants 22 -brony 22 -68.2 22 -in-tray 22 -elektra 22 -promenades 22 -moselle 22 -saja 22 -uriel 22 -geminids 22 -crystallography 22 -crp 22 -cornetto 22 -1.61 22 -adenocarcinoma 22 -22:14 22 -chicanery 22 -cabangbang 22 -ronal 22 -jakobsson 22 -poca 22 -842 22 -54.1 22 -sokaluk 22 -furley 22 -valence 22 -krazy 22 -31-foot 22 -overconsumption 22 -chowk 22 -stealthgenie 22 -1,000-year 22 -romao 22 -986 22 -2004/05 22 -joaquín 22 -renaissance-style 22 -long-ruling 22 -devan 22 -k-9s 22 -oak-panelled 22 -yihaodian 22 -12-match 22 -30lb 22 -re-post 22 -nis 22 -ashrams 22 -hinault 22 -naltrexone 22 -reminiscences 22 -nonspecific 22 -shammarah 22 -barefooted 22 -noelia 22 -fausett 22 -flowerpots 22 -ethnography 22 -daragjati 22 -dillard-bothuell 22 -shastri 22 -windpipes 22 -love-making 22 -schlamowitz 22 -famished 22 -unlabelled 22 -crickett 22 -showgrounds 22 -lapo 22 -terrorizes 22 -ninos 22 -ridsdale 22 -habsburg 22 -lovingston 22 -metzelder 22 -conmebol 22 -11g 22 -keeble 22 -toxoplasmosis 22 -hermanos 22 -82mph 22 -1,500-meter 22 -adhikari 22 -bangla 22 -gundersen 22 -greek-owned 22 -herodotus 22 -shakin 22 -julita 22 -verusio 22 -bascoules 22 -longshoremen 22 -marcio 22 -unconvincingly 22 -cool-headed 22 -ttts 22 -quint 22 -weather-wise 22 -tamica 22 -footlights 22 -morejon 22 -eutawville 22 -back-flip 22 -gullberg 22 -refashioned 22 -rasher 22 -8-8 22 -vaquita 22 -extruder 22 -repudiating 22 -bombards 22 -ntcham 22 -cilluffo 22 -jetsam 22 -rolan 22 -koenigsegg 22 -liveries 22 -spring-loaded 22 -howman 22 -1ins 22 -s-shaped 22 -crayola 22 -shia-led 22 -dimethyl 22 -kurosawa 22 -scrimped 22 -benenson 22 -legitimising 22 -ex-member 22 -elbulli 22 -linnea 22 -thatchers 22 -nihad 22 -ascertaining 22 -elleanor 22 -harleigh 22 -wieme 22 -fear-bola 22 -pro-opposition 22 -70.8 22 -politicans 22 -radovic 22 -mujao 22 -sugababes 22 -muddling 22 -semi-circular 22 -criminalisation 22 -haytor 22 -ericka 22 -al-fares 22 -snoozebox 22 -nakano 22 -lauscha 22 -londinium 22 -garnishes 22 -10-story 22 -krinsky 22 -lifeforms 22 -rede 22 -kholo 22 -gumbinner 22 -pocketknife 22 -2009-2012 22 -kenmore 22 -21:40 22 -wiliams 22 -florida-alabama 22 -bartram 22 -rojiblancos 22 -5-foot-9 22 -5-foot-6 22 -glenview 22 -ffa 22 -e-fan 22 -pricks 22 -dishwashing 22 -aneri 22 -coleraine 22 -heckman 22 -sidled 22 -48-year 22 -ankle-deep 22 -craiglist 22 -glaenzer 22 -talc 22 -feu 22 -chovanec 22 -grix 22 -champagne-fuelled 22 -pierre-michel 22 -kermadec 22 -pay-to-play 22 -hobin 22 -goolsbee 22 -slinking 22 -calumet 22 -caban 22 -loved-one 22 -huahua 22 -vanakorn 22 -dibbs 22 -6.05 22 -minkin 22 -elnabi 22 -profitably 22 -israelite 22 -corroborative 22 -700-acre 22 -prosecutable 22 -aigner-treworgy 22 -mcclung 22 -637 22 -saltz 22 -dufour 22 -vallecito 22 -gwadar 22 -self-analysis 22 -easingwold 22 -chiding 22 -barlaston 22 -15-a-side 22 -funai 22 -scarcer 22 -swarthmore 22 -kcnc 22 -makkah 22 -hibbett 22 -balon 22 -stoeser 22 -heartbreakers 22 -dingley 22 -paranavitana 22 -starcher 22 -starches 22 -ocs 22 -lkbennett.com 22 -botello 22 -rinko 22 -csic 22 -00:03 22 -eastcheap 22 -squints 22 -multi-instrumentalist 22 -eight-story 22 -esio 22 -cradley 22 -23:06 22 -mpongwana 22 -anti-morsi 22 -misreporting 22 -25-day 22 -axminster 22 -pre-book 22 -dander 22 -sahbaz 22 -m82 22 -pedraza 22 -reena 22 -dammers 22 -nugusse 22 -less-than 22 -magumba 22 -juddmonte 22 -2006-2008 22 -23:45 22 -hayride 22 -caxirola 22 -impetigo 22 -decesare 22 -kornfeld 22 -faile 22 -granulated 22 -eking 22 -stoutly 22 -jet-black 22 -rybarikova 22 -sportmail 22 -moov 22 -burdisso 22 -asomugha 22 -ramsbury 22 -calientes 22 -tci 22 -tennison 22 -jaimie 22 -200mg 22 -fritters 22 -500-page 22 -intermingled 22 -gearboxes 22 -mendiola-martinez 22 -motyl 22 -razzaq 22 -49ft 22 -drugs-related 22 -evertonian 22 -ashaninka 22 -ceremonially 22 -barkhurst 22 -pre-selection 22 -rayford 22 -tavss 22 -snelgrove 22 -bonaventure 22 -shoppertrak 22 -post-recession 22 -counteracts 22 -tani 22 -pinzon 22 -cannibalized 22 -mauboy 22 -embleton 22 -prp 22 -q4000 22 -733 22 -consumer-friendly 22 -riffle 22 -yoruba 22 -fora 22 -atherosclerotic 22 -bina48 22 -13-storey 22 -tollway 22 -sunter 22 -kawczynski 22 -bread-and-butter 22 -nativist 22 -oversteps 22 -snowblower 22 -k-mart 22 -government-subsidized 22 -tanners 22 -kafkaesque 22 -floella 22 -castree 22 -kouzaris 22 -tebas 22 -boys-only 22 -power-saving 22 -professionnel 22 -magsafe 22 -buccaneering 22 -sashay 22 -bayan 22 -brockwell 22 -plantings 22 -erlam 22 -cyber-criminals 22 -lecuyer 22 -vtb 22 -hickock 22 -nailbiting 22 -harris-lacewell 22 -witonski 22 -shreddies 22 -jamoye 22 -serama 22 -cannondale 22 -jingoistic 22 -carballido 22 -23:27 22 -23:29 22 -ceronne 22 -tetralogy 22 -seoul-based 22 -alcester 22 -trasylol 22 -once-a-decade 22 -outflank 22 -milkmaid 22 -ch-47 22 -drug-using 22 -dhuluiya 22 -braaten 22 -blackphone 22 -miocene 22 -corinium 22 -hanns 22 -goodwyn 22 -estiarte 22 -match-point 22 -skywalkers 22 -thorneloe 22 -tahseen 22 -j318.5-22 22 -236,000 22 -airdog 22 -warmongering 22 -underfed 22 -maladministration 22 -rawtenstall 22 -levallois 22 -2.74 22 -carping 22 -w196 22 -hay-adams 22 -sulabh 22 -alguersuari 22 -guzzlers 22 -gni 22 -songbook 22 -elvia 22 -bulo 22 -schwier 22 -depreciate 22 -radioing 22 -mariane 22 -tokelo 22 -overplay 22 -romana 22 -lavi 22 -medico 22 -49er 22 -al-sabbagh 22 -trivializes 22 -14,400 22 -anti-impotence 22 -arlow 22 -oleksander 22 -bequeathing 22 -deciders 22 -vellum 22 -aspin 22 -veldhuizen 22 -brigantine 22 -tropes 22 -offsides 22 -ysbyty 22 -one-set 22 -journaling 22 -2,013 22 -blackston 22 -melamed 22 -mohrer 22 -hanssen 22 -npt 22 -asl 22 -tomnod 22 -rotter 22 -kansans 22 -glenville 22 -second-graders 22 -minnehaha 22 -929 22 -split-decision 22 -lethwei 22 -nutting 22 -absent-minded 22 -cohosh 22 -wenner 22 -frescos 22 -tsing 22 -ocampos 22 -burrill 22 -seku 22 -norristown 22 -convenor 22 -watch-list 22 -susaeta 22 -giff 22 -00:48 22 -ragonton 22 -greenglass 22 -djorkaeff 22 -mg/dl 22 -nonunion 22 -indymac 22 -catron 22 -all-too 22 -nussbaum 22 -whiteway 22 -23:42 22 -23:43 22 -high-fibre 22 -29-stone 22 -immigration-related 22 -supervillain 22 -longuet 22 -1,199 22 -top-right 22 -sliwa 22 -bika 22 -friedan 22 -rocko 22 -houghdahl 22 -18-11 22 -spamming 22 -1000m 22 -big-hitters 22 -nishi 22 -najor 22 -beauvais 22 -ex-vice 22 -regrowing 22 -choom 22 -submersion 22 -500mph 22 -czahor 22 -euribor 22 -millais 22 -garamba 22 -odesnik 22 -35-40 22 -phosphates 22 -botto 22 -vivica 22 -haredi 22 -militiaman 22 -sealy 22 -ammaria 22 -gojra 22 -yaks 22 -finfer 22 -zora 22 -gansa 22 -khaldiyeh 22 -cycleway 22 -duddy 22 -taba 22 -particularity 22 -1758 22 -throwbacks 22 -abrin 22 -monopolize 22 -sukuraman 22 -artrip 22 -mosquera 22 -tanjung 22 -pixley 22 -quilter 22 -double-deck 22 -water-related 22 -tattle 22 -igo 22 -mabrouk 22 -poonam 22 -unreturned 22 -drogo 22 -anti-satellite 22 -setad 22 -sarkis 22 -virologists 22 -amo 22 -dashwood 22 -varin 22 -codrington 22 -sterilising 22 -liberto 22 -vindskip 22 -bafta-nominated 22 -ramey 22 -866,000 22 -bansko 22 -eldo 22 -chastisement 22 -primroses 22 -muscle-wasting 22 -marie-paule 22 -contraventions 22 -markson 22 -curriculums 22 -lisp 22 -orania 22 -lefson 22 -shoebridge 22 -ebdon 22 -chuckie 22 -binno 22 -joseon 22 -nowlan 22 -ex-football 22 -mishka 22 -ashmeade 22 -nationalise 22 -18-24-year-olds 22 -ingress 22 -hoeben 22 -kembla 22 -super-soft 22 -guillotined 22 -resealed 22 -over-represented 22 -camhs 22 -ktxl 22 -583,000 22 -beefs 22 -benguit 22 -abodes 22 -alstom 22 -over-protective 22 -burlap 22 -pizzey 22 -medyk 22 -shalev 22 -conduits 22 -borel 22 -salame 22 -reawakened 22 -48.4 22 -fallot 22 -brainiest 22 -niesr 22 -andersonville 22 -tev 22 -casiano 22 -downstate 22 -four-engine 22 -interlocutor 22 -neeraj 22 -ogintz 22 -newstart 22 -meshed 22 -hewes 22 -mcelhinney 22 -mezrich 22 -udders 22 -bihl 22 -werzberger 22 -everette 22 -russa 22 -post-industrial 22 -dvd-by-mail 22 -two-down 22 -obergefell 22 -plop 22 -under-reporting 22 -gentil 22 -30f 22 -zig-zagging 22 -all-comers 22 -write-offs 22 -sensationalistic 22 -anorexics 22 -ias 22 -400-foot 22 -tpe 22 -porthmadog 22 -shifang 22 -dawsonville 22 -jedediah 22 -p.c. 22 -251,000 22 -moser-proell 22 -strategizing 22 -optimization 22 -frittata 22 -mahurin 22 -tuckers 22 -sanduskys 22 -bleijie 22 -knysna 22 -busboy 22 -dormice 22 -triffitt 22 -payee 22 -pesetas 22 -re-purposed 22 -godmanchester 22 -lorely 22 -eclairs 22 -baylous 22 -cleanups 22 -embolo 22 -french-owned 22 -mellisa 22 -apoko 22 -teratomas 22 -seok 22 -22-game 22 -jaconelli 22 -vaillant 22 -berta 22 -melchett 22 -2ds 22 -bbg 22 -dispossession 22 -toleration 22 -etzebeth 22 -phagan 22 -captiva 22 -multi-player 22 -radziwill 22 -ehrlichman 22 -wyandotte 22 -spittle 22 -7:31 22 -koukalova 22 -sweet-natured 22 -hanscom 22 -889 22 -devillers 22 -conroy-taylor 22 -yanagawa 22 -marcelles 22 -dunson 22 -internationalist 22 -bruschetta 22 -juicier 22 -par-3 22 -sloppiness 22 -philistine 22 -gaskarth 22 -prophylactics 22 -kamp 22 -racecourses 22 -surmounted 22 -bensen 22 -isatu 22 -cronenberg 22 -borbon 22 -deepflight 22 -mcmenigall 22 -hellboy 22 -majar 22 -siegelman 22 -trowel 22 -kamer 22 -cribbs 22 -emmott 22 -ruggles 22 -tafe 22 -babble.com 22 -flounders 22 -engleitner 22 -searchlights 22 -orientals 22 -dpg 22 -stenberg 22 -savvides 22 -news9.com 22 -wisner 22 -rebutting 22 -949,000 22 -dalat 22 -jawans 22 -lousiana 22 -rose-colored 22 -sonders 22 -ilves 22 -high-heels 22 -dumanis 22 -whittall 22 -pre-eminence 22 -sweatbox 22 -sulcata 22 -heart-attack 22 -tube-fed 22 -tipoff 22 -top-edged 22 -tantalum 22 -hawi 22 -kapo 22 -three-toed 22 -castle-like 22 -journal-sentinel 22 -martial-arts 22 -helgegren 22 -skorjanc 22 -hedworth 22 -aip 22 -38.8 22 -neutralising 22 -mendacious 22 -shergar 22 -braunton 22 -text-message 22 -kailua-kona 22 -newlink 22 -pudil 22 -optically 22 -907 22 -905 22 -908 22 -2005/06 22 -canmore 22 -norther 22 -montesinos 22 -pails 22 -ridged 22 -unanswerable 22 -minow 22 -hackitt 22 -ellyn 22 -ayurveda 22 -gyrocopter 22 -ghadie 22 -saint-exupéry 22 -kamprad 22 -neigh 22 -honigstein 22 -hindhead 22 -zoleik 22 -rizer 22 -well-tended 22 -farquharson 22 -carotenoids 22 -snaith 22 -surya 22 -alien-like 22 -otp 22 -sheshan 22 -novia 22 -lae 22 -selsdon 22 -self-consciously 22 -malahide 22 -gravis 22 -day-and-a-half 22 -mayak 22 -ssn 22 -ssa 22 -weeknight 22 -deadbolt 22 -47.2 22 -diran 22 -shero 22 -41,865 22 -full-contact 22 -ascendance 22 -arouri 22 -interscholastic 22 -hewat 22 -rignot 22 -softy 22 -silvermans 22 -nosey 22 -cousy 22 -oecs 22 -46,500 22 -obaseki 22 -mashal 22 -21:45 22 -findmypast 22 -tanveer 22 -ebeling 22 -dupas 22 -repairable 22 -celica 22 -wrice 22 -verandas 22 -denia 22 -kowaleski 22 -discards 22 -hsn 22 -khyber-pakhtunkhwa 22 -jeptoo 22 -coherently 22 -karoshi 22 -masamba 22 -nations-backed 22 -cornfields 22 -4,750 22 -afiz 22 -ergas 22 -organists 22 -brattin 22 -inpatients 22 -kerwood 22 -code-breaker 22 -100/1 22 -department-issued 22 -high-earners 22 -dartington 22 -incubus 22 -colella 22 -alaei 22 -westway 22 -spilner 22 -ruthanne 22 -head-hunted 22 -vivant 22 -steuart 22 -decanted 22 -sibert 22 -langewiesche 22 -morimoto 22 -petreaus 22 -aardvark 22 -counter-protests 22 -kickstarting 22 -bcbg 22 -fox-udall 22 -recalibrated 22 -rk 22 -300mph 22 -scat 22 -5:25 22 -vaca 22 -deerstalker 22 -nairobi-based 22 -mutko 22 -hualalai 22 -wakelin 22 -eas 22 -d-type 22 -horseball 22 -traipse 22 -springhill 22 -gravener 22 -scofield 22 -hlavackova 22 -mcintire 22 -dinwiddie 22 -fahmi 22 -ma'ale 22 -cyprian 22 -rhoose 22 -foxglove 22 -diameters 22 -tinkoff 22 -lauretta 22 -kalakaua 22 -quique 22 -karoline 22 -soriot 22 -ftt 22 -titchfield 22 -detestable 22 -overpopulated 22 -oxenford 22 -kittiwake 22 -olivet 22 -bitter-sweet 22 -azaleas 22 -pridmore 22 -sashes 22 -danakil 22 -broadley 22 -wkrg 22 -514,000 22 -unnoticeable 22 -bridezilla 22 -cahan 22 -in-country 22 -osoteku 22 -chisinau 22 -home-built 22 -facel 22 -shake-ups 22 -1,000-pound 22 -aulakh 22 -every-day 22 -ondrej 22 -muhtorov 22 -motorcar 22 -bastien 22 -absurdities 21 -jacquez 21 -lucic-baroni 21 -35lb 21 -denson 21 -lumet 21 -nemann 21 -whately 21 -lasses 21 -schaedler 21 -genser 21 -mading 21 -22,000-a-year 21 -admixture 21 -wasser 21 -17-foot 21 -gambari 21 -servat 21 -rolfes 21 -mua 21 -bullett 21 -biomimetic 21 -1507 21 -sabathia 21 -demeter 21 -gharafa 21 -lagasse 21 -rubell 21 -aq 21 -ridgecrest 21 -motorcars 21 -self-publish 21 -brazzaville 21 -ballparks 21 -30-23 21 -ptl 21 -165mph 21 -tuncay 21 -norcal 21 -gringo 21 -sugiyama 21 -10th-minute 21 -janitorial 21 -ex-cons 21 -karani 21 -withe 21 -womankind 21 -dmt 21 -bilingualism 21 -pre-exposure 21 -consigning 21 -orographic 21 -thule 21 -ganswein 21 -samp 21 -carpathian 21 -sieda 21 -dudgeon 21 -palacasi 21 -ramazan 21 -3.46 21 -sunland 21 -flyknit 21 -al-suri 21 -17-storey 21 -miocic 21 -mondiale 21 -qa 21 -blooding 21 -pichardo 21 -o'jays 21 -joesph 21 -exclamations 21 -bucher 21 -kucharek 21 -tiberius 21 -prashant 21 -kormondy 21 -42billion 21 -873 21 -female-to-male 21 -portchester 21 -hetch 21 -zaretsky 21 -civitavecchia 21 -nasa-funded 21 -28-30 21 -double-double 21 -concurring 21 -know-it-all 21 -closely-watched 21 -tamba 21 -bedspreads 21 -shangla 21 -akhilesh 21 -545,000 21 -undersigned 21 -dudu 21 -al-johari 21 -couture-rouleau 21 -lose-lose 21 -daihatsu 21 -21:36 21 -paulin-ramirez 21 -haggerston 21 -bruegel 21 -abhorred 21 -kroon 21 -ryugyong 21 -harris-beard 21 -dumfriesshire 21 -borjan 21 -miser 21 -lundqvist 21 -voiding 21 -dramatization 21 -mads 21 -obfuscate 21 -bresee 21 -left-handers 21 -chachawan 21 -mateu 21 -layvin 21 -y2k 21 -quandaries 21 -,10 21 -troubleshooting 21 -carmo 21 -2:12 21 -nodar 21 -metros 21 -stacho 21 -hyre 21 -manzo 21 -tincturebelle 21 -bulwell 21 -mncube 21 -caudalie 21 -corsi 21 -volatiles 21 -linville 21 -chevene 21 -carolwood 21 -graybeal 21 -@stephenathome 21 -11,000-a-year 21 -45.9 21 -zinfandel 21 -howse 21 -berenberg 21 -marle 21 -stadnyk 21 -nerve-shredding 21 -cauca 21 -22-years-old 21 -perennials 21 -climpson 21 -rinna 21 -kerwin 21 -long-handled 21 -lyell 21 -copthorne 21 -bbva 21 -bed-blocking 21 -reapplying 21 -c-47 21 -kilcoyne 21 -108million 21 -maass 21 -59.95 21 -usdaw 21 -orefice 21 -bides 21 -rumney 21 -westminister 21 -stoneking 21 -sportingly 21 -crudo 21 -dawda 21 -personify 21 -securicor 21 --11:00 21 -pernice 21 -candie 21 -eimear 21 -15-storey 21 -85p 21 -german-american 21 -frolka 21 -wailea 21 -inter-communal 21 -lockitron 21 -seminarians 21 -typefaces 21 -hdtvs 21 -phso 21 -22-inch 21 -tambling 21 -karilyn 21 -bsports 21 -peverley 21 -fatme 21 -self-regulating 21 -butorac 21 -jdi 21 -stoush 21 -tomljanovic 21 -kingdon 21 -crosier 21 -endzone 21 -14-6 21 -sheene 21 -preorders 21 -doesburg 21 -uplifts 21 -2010-2014 21 -reverential 21 -wiccan 21 -jovin 21 -ucu 21 -tippy 21 -delio 21 -serenbe 21 -rodriguez-chomat 21 -geochemist 21 -whiskas 21 -rollouts 21 -victoire 21 -bogans 21 -freston 21 -lemony 21 -bakkerud 21 -post-show 21 -short-stay 21 -juggalo 21 -ralstons 21 -aardvarks 21 -pental 21 -jolé 21 -karanja 21 -hingham 21 -shaneka 21 -reiman 21 -almaer 21 -sleep-inducing 21 -profit-sharing 21 -zonderland 21 -holbein 21 -counternarcotics 21 -21p 21 -1320 21 -best-before 21 -cantin 21 -disease-carrying 21 -omnishambles 21 -barco 21 -elphinstone 21 -oerlemans 21 -al-iraqiya 21 -dijsselbloem 21 -powells 21 -sci-fi/fantasy 21 -babygros 21 -manoux 21 -˚c 21 -oswiecim 21 -46f 21 -bashi 21 -340million 21 -diya 21 -jasvir 21 -piña 21 -cornelissen 21 -1,453 21 -soltren 21 -alsatians 21 -ingolstadt 21 -ilovaysk 21 -liebeck 21 -vallaud-belkacem 21 -du'a 21 -insano 21 -mounoubai 21 -327,000 21 -boscolo 21 -zooniverse 21 -hertsmere 21 -olympic-style 21 -open-wheel 21 -knickerbockers 21 -koukash 21 -bartneck 21 -ruckle 21 -pettipierre 21 -fifth-seeded 21 -shop-window 21 -alzate 21 -shanshan 21 -catalonian 21 -caleigh 21 -castmate 21 -dextrose 21 -edalji 21 -noth 21 -chifundo 21 -tinfoil 21 -postelwait 21 -s/s 21 -crowd-surfing 21 -over-sensitive 21 -off-balance 21 -second-leading 21 -rep.-elect 21 -348,000 21 -fennville 21 -inelegant 21 -u.s.-iraq 21 -595,000 21 -postmodern 21 -schiffman 21 -mweene 21 -medicis 21 -pum 21 -pur 21 -five-months-old 21 -2.67 21 -replenishes 21 -apocryphal 21 -slavin 21 -partiality 21 -extraversion 21 -serenades 21 -widths 21 -garcinia 21 -terrigal 21 -breitner 21 -treuhaft 21 -westlands 21 -http://nbcphiladelphia.com 21 -etruscan 21 -bubu 21 -monteleone 21 -tiwi 21 -seditious 21 -sonagachi 21 -liason 21 -retakes 21 -note-taking 21 -blissett 21 -10/3 21 -kaguya 21 -uzair 21 -23f 21 -adorkable 21 -longest-tenured 21 -1030 21 -psu 21 -rattenbury 21 -crackerjack 21 -2002/03 21 -00:12 21 -ameen 21 -sleep-related 21 -magomed 21 -maximal 21 -unibody 21 -deric 21 -frings 21 -prisk 21 -verdugo 21 -twu 21 -premarket 21 -gurman 21 -kowal 21 -deciduous 21 -localytics 21 -9 1/2 21 -ja'afari 21 -cubesat 21 -amaretto 21 -valentyn 21 -monschein 21 -donervon 21 -slyly 21 -m60-ucd1 21 -workaday 21 -benesova 21 -doer 21 -mlb2k11 21 -coalescing 21 -batcave 21 -après 21 -hand-me-downs 21 -dvrs 21 -sub-editor 21 -cellos 21 -nusoor 21 -gas-filled 21 -nakagawa 21 -21:55 21 -pyros 21 -portnoy 21 -syon 21 -barbours 21 -somaly 21 -ramla 21 -furrows 21 -elohim 21 -neha 21 -unhappier 21 -chaat 21 -boondoggle 21 -wilk 21 -halina 21 -penpals 21 -app-based 21 -pale-skinned 21 -23-page 21 -diamondstein 21 -carbosiero 21 -monopolizing 21 -awwad 21 -evaluator 21 -runnin 21 -ischia 21 -60mins 21 -engrave 21 -non-appearance 21 -maddock 21 -horsehead 21 -clougherty 21 -pre-civil 21 -sexily 21 -coplin 21 -fara 21 -prions 21 -kospi 21 -selangor 21 -sioned 21 -crash-test 21 -mid-2007 21 -bensonhurst 21 -dominy 21 -dolenz 21 -rendall 21 -text-based 21 -ideation 21 -gigapixel 21 -wastefully 21 -durón 21 -mapleton 21 -nato-russia 21 -slamet 21 -karpiak 21 -500-plus 21 -hijrah 21 -frankenweenie 21 -jockeyed 21 -2037 21 -doyin 21 -send-up 21 -53-man 21 -brickyard 21 -roza 21 -rambles 21 -prize-winner 21 -brien 21 -snigger 21 -ekl 21 -cisak 21 -poetically 21 -00:34 21 -malthouse 21 -orhan 21 -kippers 21 -derocher 21 -siobhann 21 -azcentral 21 -nippers 21 -invesco 21 -meta-analysis 21 -gerbil 21 -bothwell 21 -atterberry 21 -multi-culturalism 21 -manzie 21 -liman 21 -23:15 21 -stockmarket 21 -lonczak 21 -biddeford 21 -bombala 21 -tightknit 21 -spacetime 21 -olajuwon 21 -slaloms 21 -intubation 21 -anna-marie 21 -post-flight 21 -ratchets 21 -pokomo 21 -10-piece 21 -aamodt 21 -root-and-branch 21 -shibly 21 -gro 21 -zoopla.co.uk 21 -all-nighters 21 -orifice 21 -disconcertingly 21 -sellick 21 -e-coli 21 -purdie 21 -fadavi 21 -choji 21 -flyboard 21 -jermyn 21 -maseru 21 -flash-bang 21 -snavely 21 -handa 21 -fully-qualified 21 -mungall 21 -vanessa-mae 21 -drapery 21 -rulemaking 21 -photobombs 21 -scheuermann 21 -tonopah 21 -ettlin 21 -2.07 21 -emporia 21 -misprint 21 -gopal 21 -jezard 21 -ticketless 21 -blackhorse 21 -dalmeny 21 -passionata 21 -hoggart 21 -morus 21 -quagga 21 -cloninger 21 -opsahl 21 -photosynthetic 21 -fossedal 21 -hushing 21 -triona 21 -62.3 21 -1702 21 -chromatophores 21 -hard-luck 21 -summa 21 -lockport 21 -hypnotism 21 -friendless 21 -drily 21 -farcically 21 -soliah 21 -unquestioning 21 -maplebeck 21 -gravitation 21 -idk 21 -2.22 21 -2.24 21 -hla 21 -supertall 21 -nugents 21 -air-con 21 -gorry 21 -kerchove 21 -caius 21 -lier 21 -instills 21 -ignashevich 21 -luque 21 -basford 21 -semien 21 -reyhaneh 21 -ex-policeman 21 -nekrasov 21 -skimping 21 -racketeer 21 -vigilantly 21 -shynkarenko 21 -bojang 21 -hieroglyphic 21 -tyrrhenian 21 -485,000 21 -soloists 21 -castelluccio 21 -livingsocial 21 -prader-willi 21 -flink 21 -bellos 21 -throat-slitting 21 -recoiling 21 -360million 21 -adina 21 -glyndwr 21 -mcchicken 21 -penarol 21 -bayfront 21 -glistened 21 -mu'ath 21 -cyberespionage 21 -lib-dem 21 -jupiters 21 -banos 21 -playfair 21 -schlatter 21 -anis 21 -herbrich 21 -23:36 21 -beautified 21 -44lb 21 -residuals 21 -narco-trafficking 21 -higher-than-normal 21 -roofless 21 -langstone 21 -oleson 21 -nonsuch 21 -lamanna 21 -horrifyingly 21 -stratos 21 -kepner 21 -workarounds 21 -furbies 21 -40,000-a-week 21 -khatkar 21 -doak 21 -cheaptickets 21 -tunnock 21 -spray-paint 21 -chernyakova 21 -kalachi 21 -mcentire 21 -re-energise 21 -d'evelyn 21 -mash-ups 21 -adalat 21 -dressed-up 21 -entente 21 -r-ariz. 21 -banlieues 21 -autobahns 21 -kanchelskis 21 -neshoba 21 -fledglings 21 -overconfidence 21 -callegari 21 -yodok 21 -ukraine-russia 21 -lea-ann 21 -anucyia 21 -sumac 21 -paraglide 21 -fatefully 21 -mufasa 21 -sindall 21 -glad-handing 21 -1763 21 -strabane 21 -20,400 21 -over-heating 21 -pisces 21 -jackhammer 21 -keano 21 -villette 21 -carbines 21 -dogfighters 21 -face-recognition 21 -stepmothers 21 -jopling 21 -cruisecritic.com 21 -liberian-flagged 21 -maroua 21 -poxton 21 -884 21 -dhani 21 -onda 21 -ondo 21 -adiyiah 21 -20-17 21 -korostelev 21 -scarfe 21 -conchords 21 -honnold 21 -shalonda 21 -wedner 21 -peterman 21 -porta-potty 21 -vaporizers 21 -codenames 21 -wholefoods 21 -herpetologist 21 -huffpo 21 -kustok 21 -kasidiaris 21 -aruna 21 -kazim 21 -colour-changing 21 -tarin 21 -cross-fire 21 -brundle 21 -brutalize 21 -dansie 21 -bemidji 21 -preachy 21 -expunge 21 -bluer 21 -harrassing 21 -johanne 21 -brosowski 21 -impetuous 21 -everhart 21 -62m 21 -wasboonma 21 -swailes 21 -lay-up 21 -semi-annual 21 -basciano 21 -end-of-the-world 21 -23:59 21 -23:52 21 -23:55 21 -moskva 21 -dubrow 21 -cobbina 21 -diaw 21 -kernan 21 -dilek 21 -strobel 21 -isley 21 -loory 21 -vouching 21 -argys 21 -tasos 21 -telecasts 21 -hot-tempered 21 -midstokke 21 -dorice 21 -corrieri 21 -front-rower 21 -montanans 21 -joice 21 -color-blind 21 -torrie 21 -hospira 21 -woudenberg 21 -pervak 21 -800mhz 21 -sheffield-born 21 -anti-bribery 21 -führer 21 -wagenen 21 -72.50 21 -paerson 21 -hypersensitive 21 -kalakh 21 -msci 21 -frescoed 21 -kallin 21 -100-200 21 -tatlow 21 -delish 21 -escada 21 -doughy 21 -rentas 21 -game-by-game 21 -revelus 21 -extremis 21 -ormaechea 21 -daood 21 -chita 21 -uplink 21 -kanan 21 -gana 21 -bolognaise 21 -phenomenons 21 -once-promising 21 -origination 21 -baccarin 21 -steketee 21 -ehic 21 -sueddeutsche 21 -right-of-way 21 -maddeningly 21 -bagour 21 -hansa 21 -yarima 21 -immunise 21 -wragg 21 -rogers-seitz 21 -a-t 21 -brownhills 21 -term-limited 21 -cuvée 21 -daywear 21 -callosum 21 -coweta 21 -mami 21 -9cm 21 -l/bdr 21 -high-threat 21 -taliban-controlled 21 -jasgur 21 -malbec 21 -copiah 21 -25-acre 21 -florets 21 -rockery 21 -trembley 21 -havisham 21 -arouses 21 -all-nighter 21 -955 21 -954 21 -18-years 21 -renesys 21 -ardolf 21 -torremolinos 21 -mooloolaba 21 -drucker 21 -foglietta 21 -djanogly 21 -semler 21 -edinburg 21 -zetsche 21 -inter-ethnic 21 -taylorsville 21 -8-12 21 -dayle 21 -service-connected 21 -lachey 21 -humoured 21 -binx 21 -150billion 21 -lyonnais 21 -weirather 21 -sternberg 21 -gtr 21 -nobbys 21 -d'andre 21 -neitzel 21 -schnuk 21 -892 21 -larocque 21 -rearden 21 -nachoum 21 -giménez 21 -waylaid 21 -urgings 21 -corey-ochoa 21 -1669 21 -smileys 21 -unresponsiveness 21 -near-field 21 -sadi 21 -degrafreed 21 -reboots 21 -second-ever 21 -playgirl 21 -lagonda 21 -clamshell 21 -planed 21 -perrywinkle 21 -meridia 21 -frogmen 21 -speakership 21 -nicos 21 -pilotto 21 -unbothered 21 -cochabamba 21 -trundles 21 -bekken 21 -metamaterials 21 -e-elt 21 -2-day 21 -superjumbos 21 -lijnen 21 -irsan 21 -2044 21 -paape 21 -champlin 21 -western-educated 21 -signifier 21 -democratic-held 21 -hyacinths 21 -1,799 21 -fundy 21 -altona 21 -nyala 21 -cowhide 21 -relaxers 21 -587.5 21 -pressel 21 -wishy-washy 21 -bergamine 21 -salli 21 -surgut 21 -blewett 21 -gibraltan 21 -dartey 21 -kaizer 21 -soetjipto 21 -9:53 21 -mbolhi 21 -skyping 21 -piasecki 21 -asps 21 -aaryn 21 -soelden 21 -sanglah 21 -kivalina 21 -kanto 21 -dacia 21 -sijsling 21 -counter-argument 21 -tanqueray 21 -sufficed 21 -joh 21 -climaxing 21 -92.3 21 -fire-resistant 21 -44.4 21 -cross-government 21 -kleberson 21 -3/10 21 -inter-war 21 -fordgate 21 -kofe 21 -tadworth 21 -us-made 21 -16.95 21 -barest 21 -ihab 21 -grotte 21 -179,000 21 -mountbatten-windsor 21 -all-glass 21 -cgf 21 -extortionists 21 -hardan 21 -scalpay 21 -alamance 21 -overdiagnosis 21 -blore 21 -muhsin 21 -hunger-striking 21 -navfor 21 -sign-in 21 -pinar 21 -weedon 21 -pola 21 -gunplay 21 -sinuiju 21 -o'barry 21 -carerra 21 -baptising 21 -fount 21 -roddis 21 -dinged 21 -demond 21 -quaternary 21 -mitin 21 -cropland 21 -the-then 21 -jaramana 21 -al-janabi 21 -srt 21 -monita 21 -biloba 21 -uncontaminated 21 -fida 21 -colliers 21 -5.00 21 -al-tawheed 21 -robbery-homicide 21 -abductees 21 -gaa 21 -pariseleti 21 -anti-hunting 21 -clairefontaine 21 -tricare 21 -less-traveled 21 -glowering 21 -elysia 21 -gara 21 -truck-mounted 21 -anti-drone 21 -brandford 21 -staverton 21 -75ft 21 -gothic-style 21 -romig 21 -flitwick 21 -mitchells 21 -vineland 21 -sariwee 21 -normalises 21 -drano 21 -plamen 21 -wisconsin-milwaukee 21 -schiano 21 -transpennine 21 -guyton 21 -22per 21 -mass-produce 21 -bakhit 21 -sinuous 21 -acclimate 21 -lefkowitz 21 -archaea 21 -horseworld 21 -faceoff 21 -pro-uk 21 -kirwans 21 -creditworthiness 21 -724 21 -capilano 21 -25-point 21 -humourless 21 -hones 21 -pro-ouattara 21 -maio 21 -daunt 21 -zocalo 21 -cottbus 21 -hysom 21 -dupain 21 -judiciaria 21 -ksm 21 -burstyn 21 -musketeer 21 -34,600 21 -ajo 21 -bansi 21 -kabeer 21 -lutfiah 21 -926 21 -crowborough 21 -peregrina 21 -greenup 21 -diarrheal 21 -sickles 21 -1,345 21 -1,340 21 -crated 21 -onw 21 -wingspans 21 -stanchion 21 -913 21 -karly 21 -face-up 21 -1689 21 -barlett 21 -bruning 21 -gonshaw 21 -silversmith 21 -dog-walkers 21 -coni 21 -andele 21 -marc-vivien 21 -island-based 21 -lynden 21 -naproxen 21 -off-set 21 -three-digit 21 -complained-about 21 -workspaces 21 -aerostats 21 -teken 21 -prescot 21 -ex-colleague 21 -moneymaking 21 -casselman 21 -scarisbrick 21 -buik 21 -five-season 21 -beachgoer 21 -lamberti 21 -abmu 21 -streelman 21 -under-par 21 -ajla 21 -12.9-inch 21 -boycie 21 -beautyman 21 -deregulate 21 -halbreich 21 -reif 21 -mbodj 21 -merrell 21 -sunroom 21 -bumpkin 21 -pcns 21 -londell 21 -fibs 21 -beanbags 21 -worriers 21 -boyse 21 -haenow 21 -totoaba 21 -lalas 21 -u17s 21 -superyachtworld 21 -northrup 21 -doyley 21 -north-easterly 21 -500-a-night 21 -eliane 21 -obrycka 21 -outgrowing 21 -ratted 21 -mclauchlan 21 -redeploying 21 -129,000 21 -lechlade 21 -sexologists 21 -eruzione 21 -playrooms 21 -peonies 21 -godward 21 -higher-than-average 21 -doozy 21 -double-digits 21 -izumi 21 -nuckols 21 -parmer 21 -210million 21 -canavero 21 -seeberg 21 -zz 21 -barbie-themed 21 -baseball-sized 21 -76million 21 -zoabi 21 -coziness 21 -cyber-espionage 21 -friendster 21 -harn 21 -93million 21 -constrains 21 -coachman 21 -zingy 21 -volland 21 -feeley 21 -exuma 21 -cavallari 21 -pollos 21 -mizzy 21 -atpworldtour.com 21 -puddy 21 -tschogl 21 -all-singing 21 -four-seat 21 -philanthropies 21 -laminates 21 -digan 21 -perversions 21 -marianas 21 -spoty 21 -extortions 21 -sleepwalked 21 -straughan 21 -reassembling 21 -seismographs 21 -oguz 21 -littoral 21 -denisa 21 -1,105 21 -#israel 21 -uppity 21 -bittner 21 -beachcroft 21 -doureihi 21 -dynatac 21 -m56 21 -merrier 21 -side-step 21 -fraenkel 21 -100-minute 21 -sealed-off 21 -morlet 21 -yateley 21 -seurat 21 -shabalala 21 -pitying 21 -spaying 21 -exuberantly 21 -priors 21 -galatasary 21 -wsam 21 -weight-gain 21 -57,500 21 -appellants 21 -extra-wide 21 -gremlin 21 -1:55 21 -lattimore 21 -phas 21 -22:57 21 -five-way 21 -225million 21 -nowikiewicz 21 -antm 21 -kotelevskaya 21 -bretall 21 -town-hall 21 -u.s.-india 21 -zapf 21 -paranal 21 -less-than-perfect 21 -sneers 21 -taoist 21 -b53 21 -spick 21 -patient-centered 21 -dubanchet 21 -hamiltons 21 -tassie 21 -56.8 21 -coverdale 21 -synching 21 -lein 21 -benavidez 21 -self-assembly 21 -pervade 21 -webmd 21 -khalidiya 21 -arsalan 21 -godchildren 21 -toils 21 -sheils 21 -glassed-in 21 -kooning 21 -lemay 21 -tankersley 21 -thamsanqa 21 -blowfish 21 -dramatist 21 -1,995 21 -seh 21 -aude 21 -gross-out 21 -21:28 21 -máxima 21 -volte 21 -police-involved 21 -noncompliant 21 -u.k.-based 21 -aydemir 21 -beignets 21 -10.95 21 -licari 21 -9:35 21 -freeholder 21 -buenavista 21 -fede 21 -self-hatred 21 -kellan 21 -wrongfulness 21 -ferny 21 -vincelot 21 -braunstein 21 -goserelin 21 -transgressed 21 -gonen 21 -carvery 21 -sdsu 21 -suraev 21 -rzepka 21 -chimelong 21 -wd-40 21 -ellie-louise 21 -life-forms 21 -667c 21 -watenpaugh 21 -asshole 21 -yorath 21 -29ft 21 -melenchon 21 -newscasters 21 -koln 21 -hed 21 -hef 21 -perse 21 -martello 21 -kelly-ann 21 -volcan 21 -burgin 21 -transistors 21 -kasbah 21 -intermarried 21 -tennesse 21 -longchamps 21 -ex-members 21 -assiduous 21 -woodhull 21 -bispham 21 -rippy 21 -mercaptan 21 -earpods 21 -ghadiri 21 -eight-goal 21 -mayomi 21 -1538 21 -1,165 21 -concreting 21 -silbo 21 -resto 21 -39.2 21 -39.3 21 -tanden 21 -kirsopp 21 -tripit 21 -m75 21 -sicknesses 21 -scholesy 21 -kapila 21 -uncharged 21 -dubey 21 -deverdics 21 -preinstalled 21 -bazile 21 -carpetbagger 21 -unix 21 -698 21 -fat-cat 21 -salp 21 -shorted 21 -waldrum 21 -turnoff 21 -lewis-francis 21 -btk 21 -meckfessel 21 -elkann 21 -phlegmatic 21 -maro 21 -us-bound 21 -donlon 21 -vanderheiden 21 -zahran 21 -hitlers 21 -bertolucci 21 -redaction 21 -bisou 21 -pursglove 21 -pappert 21 -30-acre 21 -worldreader 21 -formentera 21 -39f 21 -snickered 21 -minnis 21 -ksbw 21 -clasicos 21 -pratillo 21 -brining 21 -glasser 21 -hight 21 -stateswoman 21 -emelec 21 -delabole 21 -appraising 21 -moodiness 21 -748 21 -williamses 21 -43.2 21 -reddihough 21 -afghan-pakistani 21 -shallowness 21 -metropole 21 -aeromobile 21 -metamora 21 -superweeds 21 -rebombo 21 -self-importance 21 -glynne 21 -high-schooler 21 -karama 21 -13-story 21 -freemason 21 -loker 21 -2:05 21 -brophy 21 -grazer 21 -1,323 21 -wildness 21 -721 21 -442nd 21 -wangaratta 21 -68mph 21 -leberge 21 -lacey-mae 21 -matravers 21 -zohar 21 -trimethylamine 21 -cliffhangers 21 -shrager 21 -goalcontrol 21 -highly-decorated 21 -fudging 21 -carie 21 -guiyang 21 -nikam 21 -cross-shot 21 -strawn 21 -krieg 21 -neesham 21 -smicer 21 -kroell 21 -mollema 21 -harmoush 21 -eccleshall 21 -nica 21 -inordinately 21 -ultraman 21 -baskett 21 -thin-skinned 21 -romanesque 21 -cloche 21 -2048 21 -j.g. 21 -duo/group 21 -mullion 21 -arm-wrestling 21 -pmi 21 -derbys 21 -axions 21 -inappropriateness 21 -video-taped 21 -mxit 21 -fiebig 21 -allissa 21 -eshraghi 21 -near-normal 21 -suranga 21 -eeva 21 -khilafa 21 -1:10 21 -1,265 21 -kade 21 -jayasinghe 21 -bvb 21 -runabout 21 -shuman 21 -22:16 21 -evacuates 21 -silbury 21 -amyl 21 -maraachli 21 -buffoni 21 -xk 21 -domicile 21 -94.4 21 -metabolisms 21 -al-azdi 21 -city2surf 21 -nyclu 21 -withholds 21 -coover 21 -lazell 21 -macri 21 -846 21 -841 21 -kottak 21 -boyt 21 -seashells 21 -roskilde 21 -earthrise 21 -birthdate 21 -frisina 21 -asbestos-related 21 -mvezo 21 -11cm 21 -refn 21 -incinerating 21 -ophone 21 -al-houthi 21 -cherelle 21 -ohoud 21 -olcay 21 -whibley 21 -beith 21 -guen 21 -johnna 21 -spillages 21 -3-11 21 -salil 21 -pooprints 21 -akunyili 21 -hard-to-treat 21 -matriarchal 21 -shavitz 21 -3.04 21 -vernace 21 -deadman 21 -mahzamani 21 -oaksterdam 21 -abuts 21 -vintner 21 -half-marathons 21 -warroad 21 -24-0 21 -sumà 21 -sisk 21 -diamandis 21 -35-day 21 -enumerated 21 -photocopying 21 -wheatcroft 21 -lib-lab 21 -broadmeadows 21 -munshi 21 -japp 21 -pandodaily 21 -hav 21 -mis-matched 21 -10-and-a-half 21 -fenny 21 -sunderman 21 -christendom 21 -aeromobil 21 -alejandre 21 -preteens 21 -thisara 21 -polio-free 21 -aggressions 21 -ast 21 -somersaulting 21 -keitai 21 -meer 21 -labrot 21 -ossad 21 -shockey 21 -devvarman 21 -halta 21 -tipped-off 21 -davidian 21 -jbs 21 -stian 21 -buccaneer 21 -robespierre 21 -claw-like 21 -10/11 21 -shapley 21 -ioane 21 -ioana 21 -vancallis 21 -kalimba 21 -extroverts 21 -veldwijk 21 -alyssia 21 -vocalize 21 -leverages 21 -syverson 21 -theorizing 21 -vitolo 21 -flameout 21 -nyom 21 -passé 21 -vyntra 21 -ashleymadison.com 21 -pontyberem 21 -mallucci 21 -conquistadors 21 -roberton 21 -h211 21 -limousin 21 -guru-murthy 21 -zoah 21 -overhangs 21 -face.com 21 -eskridge 21 -alcudia 21 -massachussetts 21 -38.2 21 -grisaffi 21 -sleeman 21 -mailsport 21 -30-0 21 -dakin 21 -moai 21 -lisa-marie 21 -khalfan 21 -hayball 21 -duodenum 21 -darnley 21 -mesquita 21 -21:46 21 -cenotes 21 -billingses 21 -descoings 21 -maratus 21 -under-eye 21 -adia 21 -wd 21 -storm-ravaged 21 -rubbishing 21 -437,000 21 -invisibly 21 -shareable 21 -elven 21 -shinn 21 -middle-man 21 -995,000 21 -plosky 21 -daub 21 -schlafly 21 -sirga 21 -1735 21 -jaren 21 -yeun 21 -knotty 21 -mimosa 21 -10-wicket 21 -penston 21 -calehr 21 -capistrano 21 -mcelvaney 21 -levonorgestrel 21 -ninety-six 21 -2.58 21 -kennon 21 -holz 21 -rhododendron 21 -al-rai 21 -thingy 21 -separatist-controlled 21 -hallyday 21 -plein 21 -pecoraro 21 -symes 21 -circumnavigated 21 -rollovers 21 -hargeisa 21 -anthropoids 21 -characterises 21 -azariah 21 -6.58 21 -khou11 21 -griping 21 -xna 21 -cotard 21 -warriewood 21 -airmiles 21 -rapamycin 21 -bozek 21 -gibe 21 -00:08 21 -sothebys 21 -calvesbert 21 -vosges 21 -keffiyeh 21 -al-gohary 21 -hermans 21 -six-piece 21 -pollyanna 21 -kidsandcars.org 21 -gunshon 21 -156million 21 -mariette 21 -rarities 21 -shipsides 21 -23:01 21 -spay 21 -lockable 21 -manouchehr 21 -comradeship 21 -6-12 21 -beltrao 21 -1033 21 -celeriac 21 -venerate 21 -race-goers 21 -menie 21 -martin_domin 21 -2006-08 21 -15-24 21 -markowitz 21 -807 21 -mockridge 21 -lambasts 21 -ostensible 21 -kinglake 21 -evelio 21 -withey 21 -kitajima 21 -climategate 21 -cheriton 21 -lf 21 -maltreated 21 -cudworth 21 -travelex 21 -down-ballot 21 -reassigning 21 -kayihura 21 -iarc 21 -super-cool 21 -anti-euro 21 -seabourn 21 -ravenstahl 21 -demi-leigh 21 -belding 21 -bad-ass 21 -spithead 21 -southfields 21 -superheros 21 -31,000-a-year 21 -navigon 21 -interest-rate 21 -dishonoring 21 -rapinoe 21 -14-12 21 -kneller 21 -coloma 21 -smitty 21 -shaela 21 -washrooms 21 -shortcake 21 -hailsham 21 -woodsby 21 -peretz 21 -swafford 21 -stallholders 21 -delfina 21 -voyles 21 -cross-reference 21 -capitalistic 21 -woodsman 21 -sielski 21 -manse 21 -sodexo 21 -arlia 21 -daohugou 21 -stop-and-search 21 -high-priority 21 -là 21 -she-ra 21 -20-20 21 -botnets 21 -netroots 21 -22:06 21 -corollary 21 -ducie 21 -stone-cold 21 -badia 21 -al-kanadi 21 -f-bombs 21 -tie-ups 21 -grottos 21 -hpv-related 21 -serban 21 -ballater 21 -monkfish 21 -hide-out 21 -relenting 21 -neutrally 21 -niederbrock 21 -egg-laying 21 -13-3 21 -ashars 21 -marjan 21 -lehi 21 -wadhwa 21 -eliasch 21 -diplo 21 -lincolns 21 -oag 21 -bonedigger 21 -collective-bargaining 21 -dislocates 21 -corsos 21 -reconfigure 21 -kalyn 21 -nutbrown 21 -jozi 21 -monegasque 21 -bandelier 21 -newnham 21 -leighanne 21 -michaelangelo 21 -gurinder 21 -23:22 21 -tug-of-love 21 -breast-cancer 21 -eshoo 21 -joellen 21 -sinker 21 -wimpole 21 -72.2 21 -eilish 21 -pugilist 21 -dragao 21 -laxalt 21 -babycentre 21 -11.44 21 -massport 21 -radclyffe 21 -paps 21 -toshio 21 -cerfontyne 21 -dac 21 -bjarne 21 -holyport 21 -saviors 21 -nijinsky 21 -signup 21 -neria 21 -rashawn 21 -izabel 21 -crimped 21 -dispersion 21 -prine 21 -shallotte 21 -arava 21 -gaiger 21 -trompe 21 -iri 21 -lonelier 21 -lapdancing 21 -hetchy 21 -bannisters 21 -leporatti 21 -spatters 21 -tax-raising 21 -brez 21 -arınç 21 -milligrammes 21 -sub-contractor 21 -48-hours 21 -moscicki 21 -immobilise 21 -verena 21 -blasios 21 -munis 21 -drug-tested 21 -dagestani 21 -toussie 21 -protestantism 21 -gulnara 21 -sucker-punch 21 -howett 21 -mathur 21 -hallum 21 -one-over-par 21 -barrelled 21 -eberl 21 -n95 21 -self-aggrandizing 21 -21-9 21 -brickhouse 21 -diego-area 21 -filleting 21 -anatole 21 -kalmadi 21 -merrillville 21 -drive-ins 21 -airbases 21 -2.11 21 -all-race 21 -detests 21 -soderstrom 21 -wasel 21 -maknojioa 21 -electree 21 -1,375 21 -saide 21 -sillitoe 21 -jafargholi 21 -mackoff 21 -gugick 21 -vautrey 21 -partywear 21 -cacau 21 -freephone 21 -crace 21 -adult-sized 21 -payal 21 -sizer 21 -echocardiogram 21 -stinker 21 -groll 21 -gosper 21 -leaderships 21 -manns 21 -whined 21 -outperforms 21 -00:43 21 -kosciuszko 21 -percussionist 21 -skywatchers 21 -kerris 21 -sukiyabashi 21 -mineo 21 -yeas 21 -paiute 21 -demoralize 21 -ravensthorpe 21 -dyers 21 -hepner 21 -british-run 21 -muoio 21 -keiller 21 -sarkeesian 21 -marsham 21 -glans 21 -80km/h 21 -shuwa 21 -jeffren 21 -talkeetna 21 -beccy 21 -manhattanhenge 21 -sago 21 -trivialized 21 -torro-flor 21 -1.91 21 -bushel 21 -83.5 21 -e-borders 21 -bartik 21 -universo 21 -facially 21 -danso 21 -semenov 21 -venters 21 -splice 21 -mauley 21 -runoffs 21 -syrian-led 21 -f-secure 21 -22:58 21 -leyal 21 -sambas 21 -gla 21 -hand-cranked 21 -glutathione 21 -euroskeptic 21 -scything 21 -purifier 21 -crunchers 21 -barati 21 -low-brow 21 -ulu 21 -re-booked 21 -biodynamic 21 -50-1 21 -sriharikota 21 -jazira 21 -furukawa 21 -emlyn 21 -re-iterated 21 -4,950 21 -kulina 21 -32f 21 -kistler 21 -0.09 21 -1mrt 21 -three-feet 21 -sculptured 21 -road-tested 21 -truex 21 -utoeya 21 -cnnradio 21 -ragazzino 21 -janaya 21 -79f 21 -lupine 21 -lynmouth 21 -r7 21 -niños 21 -double-checked 21 -bevilacqua 21 -clairol 21 -high-calibre 21 -rikuzentakata 21 -sword-wielding 21 -radiometer 21 -benzine 21 -big-headed 21 -raetz 21 -adlai 21 -rain-delayed 21 -6-year-olds 21 -pcn 21 -shrewton 21 -newens 21 -seiber 21 -contrarian 21 -hakizimana 21 -seim 21 -bogdanovic 21 -blackshaw 21 -radionova 21 -pullan 21 -jadeveon 21 -piedad 21 -jamarcus 21 -essebsi 21 -1587 21 -2.53 21 -dachstein 21 -paroline 21 -quazi 21 -wingwalkers 21 -mop-up 21 -tonibeth 21 -dl 21 -dv 21 -scuderi 21 -over-active 21 -rycroft 21 -polias 21 -deepcut 21 -conscripting 21 -rationalization 21 -karaoglan 21 -ajami 21 -darch 21 -sánchez 21 -super-efficient 21 -38-game 21 -lualua 21 -g-7 21 -ciarán 21 -dutch-based 21 -zvi 21 -chisolm 21 -unfrozen 21 -tomislav 21 -sportswriters 21 -adak 21 -volpi 21 -continent-wide 21 -greitens 21 -jackley 21 -1,092 21 -tada 21 -baggaley 21 -pole-sitter 21 -then-director 21 -lannoy 21 -jeno 21 -zhaoxu 21 -mckirdy 21 -shoah 21 -wonks 21 -elkton 21 -unthinking 21 -dupes 21 -bizimana 21 -400-mile 21 -prelox 21 -m-4 21 -gheit 21 -iaa 21 -kurkowski 21 -iveri 21 -perak 21 -trolltunga 21 -re-reading 21 -dgca 21 -belta 21 -vlasenko 21 -marsel 21 -staggs 21 -shamdasani 21 -sabbota 21 -pernilla 21 -tepe 21 -durga 21 -starner 21 -keillor 21 -dieudonné 21 -cilicap 21 -d'urbervilles 21 -derrieres 21 -antisec 21 -chocks 21 -westcountry 21 -11per 21 -gebruers 21 -schrödinger 21 -sacraments 21 -19-page 21 -crawly 21 -aktan 21 -schauder 21 -thermoplastic 21 -santel 21 -mutters 21 -most-recent 21 -grand-children 21 -calluses 21 -countach 21 -5per 21 -ganem 21 -six-feet 21 -trias 21 -foxworthy 21 -tughan 21 -kalamata 21 -hunterdon 21 -bongos 21 -goodhind 21 -67.3 21 -pool-side 21 -zeckendorf 21 -249th 21 -janetzko 21 -l'occitane 21 -prora 21 -skyped 21 -chis 21 -kuro 21 -r-kansas 21 -voskerician 21 -toben 21 -710,000 21 -lindnord 21 -widdop 21 -stylings 21 -onedin 21 -wunder 21 --47 21 -maximised 21 -eurocrat 21 -kherson 21 -kspr 21 -hanko 21 -kina 21 -setae 21 -month-by-month 21 -woodmansey 21 -reconnection 21 -3:55 21 -caroling 21 -purplish 21 -htv-2 21 -ellis-petersen 21 -rasab 21 -noradrenaline 21 -malenchenko 21 -safety-first 21 -peristeri 21 -horobin 21 -dhammika 21 -o'bara 21 -chicky 21 -haute-savoie 21 -church-owned 21 -randell 21 -paal 21 -atocha 21 -dpi 21 -upc 21 -childproof 21 -homewrecker 21 -timeouts 21 -sickert 21 -iphoto 21 -oradour 21 -rackley 21 -oxygen-rich 21 -shopworker 21 -bandera 21 -papis 21 -a-class 21 -scathingly 21 -apportioned 21 -chulalongkorn 21 -incentivized 21 -fist-bump 21 -bereavements 21 -aadmi 21 -wibberley 21 -veazey 21 -ronk 21 -denilson 21 -castigate 21 -bilad 21 -558 21 -557 21 -551 21 -55p 21 -anontune 21 -conversationalist 21 -schick 21 -come-back 21 -viktoras 21 -knutt 21 -publicly-owned 21 -state-approved 21 -248,000 21 -lungescu 21 -sussex-based 21 -then-16-year-old 21 -gdf 21 -delavan 21 -oglethorpe 21 -recumbent 21 -fontan 21 -nightingales 21 -mcquinn 21 -walz 21 -outspend 21 -ficks 21 -1,135 21 -consensually 21 -12.55 21 -strank 21 -democratisation 21 -eyeline 21 -stickiness 21 -bohnert 21 -cinthya 21 -frymann 21 -ghysels 21 -jemez 21 -787-8 21 -maggi 21 -83.7 21 -fanimo 21 -dronie 21 -christou 21 -diffuser 21 -mexborough 21 -nanshan 21 -disbelieve 21 -yura 21 -non-event 21 -hillard 21 -platzer 21 -morisset 21 -5:05 21 -negra 21 -selten 21 -quickflix 21 -craigellachie 21 -ganassi 21 -casco 21 -die-hards 21 -kitty-themed 21 -windchill 21 -newly-found 21 -3.09 21 -3.08 21 -3.01 21 -lapdog 21 -gopo 21 -herriman 21 -47.4 21 -murchison 21 -inah 21 -cystinosis 21 -arsala 21 -44,500 21 -renea 21 -scrimping 21 -trust-fund 21 -24lbs 21 -helliwell 21 -schwartlander 21 -rhames 21 -displaces 21 -nisshin 21 -courtesan 21 -21:48 21 -pro-israeli 21 -50-second 21 -binay 21 -2.96 21 -jungfrau 21 -soju 21 -azzouzi 21 -mattier 21 -beamon 21 -maroons 21 -11-9 21 -howarth-lees 21 -doubloon 21 -bosdet 21 -trishna 21 -seaver 21 -734 21 -dela 21 -al-khanssaa 21 -adrenaline-fuelled 21 -somchai 21 -1,083 21 -bl 21 -transavia 21 -tulse 21 -lotzia 21 -sayeeda 21 -smallville 21 -ksl-tv 21 -bayelsa 21 -vibrantly 21 -twice-a-day 21 -burkill 21 -khalife 21 -roscommon 21 -wildsmith 21 -hermetically 21 -tanweer 21 -bakara 21 -levete 21 -aspergillus 21 -clarey 21 -aboyne 21 -d-montana 21 -epcr 21 -bfc 21 -handicapping 21 -jaume 21 -coddle 21 -nephila 21 -hawkin 21 -luzhkov 21 -tow-truck 21 -farman 21 -riza 21 -asgard 21 -esquino 21 -180cm 21 -magid 21 -2:2 21 -dimpling 21 -captivates 21 -repartee 21 -toye 21 -cottee 21 -cotten 21 -mixup 21 -full-bodied 21 -eran 21 -grabham 21 -pop-ups 21 -once-a-day 21 -ella-paige 21 -marise 21 -emba 21 -19-12 21 -aldrete-davila 21 -2/10 21 -jenderseck 21 -beever 21 -rhinitis 21 -mohebbifar 21 -vadera 21 -450ft 21 -monguno 21 -burcham 21 -battambang 21 -lamair 21 -carioca 21 -old-timey 21 -bellflower 21 -easters 21 -almejo 21 -bilour 21 -mckeesport 21 -foster-burnell 21 -dawran 21 -fti 21 -enderle 21 -macgill 21 -perlstein 21 -90-year 21 -blagged 21 -axitinib 21 -quickened 21 -zoller 21 -feet-first 21 -mytheresa.com 21 -d-n.y. 21 -fonz 21 -h.a. 21 -ostapchuk 21 -ux 21 -burinskas 21 -ingesson 21 -gassy 21 -l'hydroptere 21 -shirwa 21 -weei 21 -five-day-old 21 -realtor.com 21 -home-buyers 21 -ehrisman-mickle 21 -satoru 21 -crocodilians 21 -tsuneoka 21 -bluntness 21 -dreamtime 21 -sieves 21 -brittans 21 -temerko 21 -trabelsi 21 -hardeep 21 -riseborough 21 -stroebele 21 -liberalizing 20 -personalizing 20 -eynesbury 20 -busfield 20 -29-28 20 -krem 20 -allin 20 -beckmann 20 -sayaka 20 -59p 20 -karane 20 -exacto 20 -vargic 20 -stensrud 20 -sandon 20 -40-years-old 20 -trivializing 20 -pitard 20 -tantillo 20 -cymothoa 20 -tapsfield 20 -paektu 20 -hamrdla 20 -electrocuting 20 -24in 20 -chinese-style 20 -unum 20 -dronett 20 -seventy-six 20 -witz 20 -westtown 20 -daveyton 20 -hempel 20 -zepeda 20 -forefather 20 -water-cooler 20 -circulator 20 -fatcat 20 -colletti 20 -jinjiang 20 -sunreef 20 -48mph 20 -street-style 20 -tengesdal 20 -gantries 20 -corriveau 20 -wasila 20 -cubestormer 20 -rail-thin 20 -cellini 20 -al-lahem 20 -lichtman 20 -marris 20 -aphibarnrat 20 -najee 20 -credulity 20 -103-mile 20 -noncommunicable 20 -eck 20 -tortola 20 -dms 20 -beckeles 20 -non-story 20 -off-colour 20 -nomenclature 20 -wifey 20 -croons 20 -yamaguchi-gumi 20 -wife-beater 20 -kadlec 20 -888poker 20 -bux 20 -bua 20 -houseguest 20 -pommery 20 -3.44 20 -imbruglia 20 -sarmina 20 -gascoyne 20 -q5 20 -ultra-fast 20 -drewer 20 -zaccagnino 20 -rtv6 20 -redressing 20 -parles 20 -'95 20 -emerald-cut 20 -lavishes 20 -22lb 20 -876 20 -senath 20 -solna 20 -1,012 20 -whew 20 -pinchot 20 -varnadoe 20 -zamboni 20 -ecce 20 -81f 20 -alona 20 -izzo 20 -kirschner 20 -individualist 20 -zeenat 20 -nietzsche 20 -iannelli 20 -mixology 20 -profit-driven 20 -garton 20 -maikel 20 -dressy 20 -co-treasurer 20 -noncommissioned 20 -striani 20 -llana 20 -arm-twisting 20 -off-line 20 -amplitude 20 -third-rate 20 -qalandia 20 -glancee 20 -egham 20 -newly-single 20 -diesels 20 -applebaum 20 -sammons 20 -bulchenko 20 -juliane 20 -dagler 20 -dikgacoi 20 -bogarde 20 -vilas 20 -kld 20 -,12 20 -undervalue 20 -hatchett 20 -#cancelcolbert 20 -2:11 20 -calexico 20 -impertinent 20 -chincoteague 20 -subdivided 20 -isidore 20 -zajac 20 -humblebrag 20 -re-record 20 -spieker 20 -three-class 20 -unsweetened 20 -brotherston 20 -tuitel 20 -jami 20 -brackenbury 20 -quoc 20 -salivate 20 -quoi 20 -tennen 20 -winnick 20 -bage 20 -1950-1953 20 -appreciably 20 -testarossa 20 -triple-negative 20 -fetishism 20 -hellenistic 20 -cny 20 -24th-minute 20 -phillipsburg 20 -al-shaar 20 -finkbeiner 20 -138th 20 -hartshorn 20 -nyland 20 -wind-blown 20 -adeeb 20 -outsports 20 -double-standard 20 -goebel 20 -toystory 20 -puller 20 -corseted 20 -sika 20 -zimny 20 -embley 20 -imbroglio 20 -devaughn 20 -singita 20 -slogged 20 -mokrzanowski 20 -rodenberg 20 -roche-posay 20 -wanat 20 -piringer 20 -iquitos 20 -stringers 20 -sushma 20 -pummelling 20 -pre-judge 20 -1:09 20 -intersected 20 -gibraltarian 20 -tullamore 20 -aleksandrov 20 -beckton 20 -prabang 20 -shaye 20 -morningstar 20 -pullovers 20 -edar 20 -fero 20 -pa-28 20 -wester 20 -duthie 20 -12bn 20 -receptacles 20 -ayanbadejo 20 -wasley 20 -guineans 20 -coqui 20 -851 20 -pureview 20 -headrick 20 -parvati 20 -gunns 20 -streamer 20 -geochemistry 20 -28-10 20 -child-support 20 -polito 20 -ratheram 20 -59.8 20 -uglish 20 -gbm 20 -crabzilla 20 -217,000 20 -cnac 20 -oxblood 20 -westy 20 -parcelforce 20 -homebound 20 -korosec 20 -nuclear-related 20 -pliosaur 20 -chalayan 20 -ysr 20 -reframing 20 -burges 20 -selanne 20 -procop 20 -geo-political 20 -yellow-carded 20 -anesthetized 20 -matovu 20 -dufnering 20 -kiev-based 20 -opportunistically 20 -biota 20 -al-issa 20 -dobrodumow 20 -samora 20 -dc10 20 -millburn 20 -14-3 20 -ongar 20 -defonseca 20 -progressiva 20 -toughing 20 -vaporetto 20 -rossen 20 -rossem 20 -fiorello 20 -lavillenie 20 -esseghaier 20 -mark-paul 20 -8.0.1 20 -nakba 20 -kirtsaeng 20 -sajjan 20 -cervi 20 -greengrocers 20 -job-seeking 20 -credentialing 20 -quibbles 20 -sharyl 20 -patinack 20 -woodbine 20 -abortionist 20 -chaudhari 20 -badeh 20 -radiumone 20 -wootten 20 -gona 20 -korkmaz 20 -objectifies 20 -ossuary 20 -84th-minute 20 -cnnheroes.com 20 -mbugua 20 -deant 20 -manoeuvrability 20 -mirai 20 -lunenburg 20 -v-12 20 -saltdean 20 -atk 20 -perley 20 -marja 20 -oglesby 20 -sissonville 20 -kestrels 20 -vandalise 20 -build-a-bear 20 -ernestine 20 -emami 20 -slack-jawed 20 -late-morning 20 -leyshon 20 -shyba 20 -spiffy 20 -whist 20 -nonconforming 20 -10,100 20 -wizened 20 -t-1000 20 -kearsarge 20 -knobil 20 -lagman 20 -solvers 20 -wide-range 20 -kaiping 20 -artiste 20 -saltmarsh 20 -reevaluated 20 -taipan 20 -kacy 20 -crediton 20 -jersey-born 20 -well-hidden 20 -22:07 20 -22:04 20 -afshin 20 -chimamanda 20 -coonan 20 -28-member 20 -burdall 20 -rahmani 20 -hipmunk 20 -anjou 20 -rika 20 -cowdrey 20 -rowlatt 20 -light-blue 20 -51.8 20 -51.1 20 -839 20 -semakau 20 -mahalia 20 -shamanic 20 -eagar 20 -lodha 20 -tono 20 -spanswick 20 --90 20 -lampela 20 -nagasu 20 -al-amoudi 20 -histamines 20 -currant 20 -dennie 20 -bayles 20 -vecchia 20 -stern-faced 20 -hannington 20 -moland 20 -60lb 20 -breakr 20 -ghalioun 20 -lisandro 20 -sundowner 20 -lightning-sparked 20 -somyot 20 -natalegawa 20 -media-driven 20 -sabino 20 -simonetti 20 -lyrically 20 -gleb 20 -johari 20 -electrocutions 20 -hawaii-bound 20 -xamax 20 -40.1 20 -tulku 20 -anglo-dutch 20 -saarinen 20 -stormfront 20 -mehrotra 20 -universitario 20 -mcmann 20 -mollycoddled 20 -ueno 20 -liekens 20 -frightfully 20 -mightiest 20 -five-over 20 -cuthell 20 -fitsat-1 20 -harryman 20 -faus 20 -selectmen 20 -rocklin 20 -toprak 20 -gbce 20 -arguido 20 -icpooch 20 -saoudi 20 -norepinephrine 20 -moviemakers 20 -girkin 20 -sensatori 20 -shron 20 -harpreet 20 -recapitalization 20 -purrington 20 -colarado 20 -gangbusters 20 -saltiest 20 -vilela 20 -bonaire 20 -ozarowski 20 -non-eurozone 20 -womad 20 -yaffe 20 -carlotto 20 -attilio 20 -1,395 20 -dehradun 20 -schild 20 -tree-climbing 20 -coate 20 -coati 20 -mazie 20 -wust 20 -puttnam 20 -11-10 20 -noise-cancelling 20 -86th-minute 20 -income-based 20 -maronite 20 -animal-based 20 -ostend 20 -payling 20 -pjd 20 -manko 20 -retronaut 20 -lipatov 20 -00:16 20 -maratheftis 20 -7.2-magnitude 20 -humanlike 20 -ef-1 20 -reoccur 20 -sokoloff 20 -collodi 20 -supermoons 20 -herzliya 20 -biltong 20 -anglicised 20 -pekin 20 -waimea 20 -asunder 20 -tapson 20 -pinto-duschinsky 20 -coppeard 20 -bootstraps 20 -krasic 20 -rassier 20 -heathen 20 -liuzzi 20 -cioaba 20 -packbot 20 -stanway 20 -zoosk 20 -bidaki 20 -ballentine 20 -tamper-proof 20 -watercourses 20 -hausch 20 -war-related 20 -charlo 20 -500lbs 20 -boxell 20 -colorite 20 -khirbet 20 -lebrigand 20 -bolding 20 -lightens 20 -visco 20 -oguchi 20 -reek 20 -gracas 20 -insignias 20 -ndileka 20 -delmas 20 -grabbers 20 -pyron 20 -briles 20 -officiates 20 -bakayoko 20 -thousandth 20 -panay 20 -gruener 20 -de-escalating 20 -al-amiri 20 -eight-division 20 -koulibaly 20 -chaar 20 -nierob 20 -p85d 20 -melosh 20 -bamforth 20 -diatoms 20 -colonna 20 -brewin 20 -ascendency 20 -nanotube 20 -baranauskas 20 -singman 20 -emden 20 -king-tv 20 -middle-order 20 -floreen 20 -couchsurfing 20 -113million 20 -rapid-reaction 20 -bundler 20 -priming 20 -romanticize 20 -alexandrou 20 -evangelization 20 -domine 20 -2008-2012 20 -minutely 20 -pindar 20 -matting 20 -dissolute 20 -foles 20 -1,000-foot 20 -60km/h 20 -unfurls 20 -mmo 20 -torti 20 -torte 20 -confidence-boosting 20 -eller 20 -heffner 20 -pigeonhole 20 -glasheen 20 -cutoffs 20 -kooyong 20 -jansson 20 -uncompleted 20 -besir 20 -over-worked 20 -piao 20 -elmendorf-richardson 20 -frydman 20 -dagless 20 -harpootlian 20 -offae 20 -morua 20 -adolphus 20 -561 20 -sacramental 20 -manik 20 -lip-sync 20 -fiddes 20 -arnaldo 20 -warburg 20 -komba 20 -flashier 20 -yoni 20 -digitise 20 -acaster 20 -oliveros 20 -filbert 20 -matuz 20 -lorinda 20 -quillin 20 -bringer 20 -swollocks 20 -foretell 20 -wojtecki 20 -17-18 20 -shatov 20 -nx 20 -pigeon-holed 20 -bushier 20 -downers 20 -differentiator 20 -athletico 20 -shahadah 20 -lansana 20 -winterthur 20 -bulut 20 -character-building 20 -'19 20 -roadrunner 20 -banbridge 20 -carrico 20 -kavita 20 -blessington 20 -tauaifaga 20 -11.59 20 -fleecy 20 -caze 20 -mtb 20 -domenyk 20 -coeducational 20 -scratchings 20 -vento 20 -pre-revolutionary 20 -backed-up 20 -wernbloom 20 -rubicam 20 -nadja 20 -blakesley 20 -890,000 20 -midwesterners 20 -dupuy 20 -lana-mai 20 -faber-castell 20 -moxon 20 -anticoagulant 20 -moharam 20 -swaleside 20 -bizzell 20 -sja 20 -mid-hudson 20 -foundling 20 -horovitz 20 -virbitsky 20 -7/3 20 -zingaro 20 -ostentatiously 20 -weatherproof 20 -lenton 20 -hunte 20 -strickler 20 -ullrich 20 -irina-camelia 20 -sreap 20 -pre-positioned 20 -makovecz 20 -.338 20 -borchardt 20 -7.36 20 -shrink-wrapped 20 -kolko 20 -inuits 20 -fight-or-flight 20 -disorientating 20 -delpani 20 -etive 20 -thiam 20 -single-car 20 -bridgeforth 20 -botica 20 -speyer 20 -alstead 20 -young-looking 20 -corke 20 -holdren 20 -saladin 20 -childlessness 20 -keay 20 -4.17 20 -hassam 20 -3000m 20 -p.d. 20 -flowerpot 20 -hingle 20 -gayatri 20 -irlam 20 -abbington 20 -mega-city 20 -boilerplate 20 -inlcuding 20 -winsome 20 -undiano 20 -rainshader 20 -kozlovska 20 -eel-like 20 -21:34 20 -sportscasters 20 -17/18 20 -marvellously 20 -domesticity 20 -emr 20 -bucchere 20 -bourret 20 -zdravko 20 -hegemonic 20 -merckx 20 -half-a-second 20 -torrijos 20 -6-year 20 -xanthohumol 20 -on-coming 20 -campbeltown 20 -mahaney 20 -gourds 20 -band-mates 20 -23:34 20 -700-page 20 -endoscopes 20 -fernley 20 -akgul 20 -1,432 20 -inborn 20 -cobar 20 -chaman 20 -rayan 20 -invisibra 20 -gisella 20 -al-jedda 20 -strathfield 20 -murton 20 -traipsed 20 -etwall 20 -condou 20 -dunhuang 20 -dewdney 20 -jurre 20 -4-12 20 -wacht 20 -30-meter 20 -killarney 20 -inculcated 20 -clumpy 20 -bitching 20 -peals 20 -kisspeptin 20 -spiegelman 20 -bundesen 20 -endos 20 -isthmian 20 -kingda 20 -wobbe 20 -three-floor 20 -cheapen 20 -interlocutors 20 -foxworth 20 -delaunay 20 -gob 20 -21-hour 20 -lab-made 20 -jonti 20 -schieber 20 -holub 20 -dog-lovers 20 -creswell 20 -sumar 20 -vagabond 20 -enlow 20 -camoranesi 20 -osteopathic 20 -daddy-daughter 20 -verbitsky 20 -hunkering 20 -luber 20 -haz-mat 20 -licia 20 -chemawa 20 -laikipia 20 -caracal 20 -chilwell 20 -taymor 20 -gores 20 -blink-182 20 -430-page 20 -merrifield 20 -thylmann 20 -coltman 20 -shinier 20 -gatward 20 -kombarov 20 -scorelines 20 -anti-homophobia 20 -orthotics 20 -ticker-tape 20 -pearlescent 20 -jencsik 20 -interjects 20 -macker 20 -ebola-ravaged 20 -cardle 20 -krakowski 20 -purton 20 -recapitalise 20 -chauffeuring 20 -boogaloo 20 -dalio 20 -tolworth 20 -jamrud 20 -seabright 20 -majorette 20 -tiriac 20 -trashcan 20 -y.e. 20 -gameday 20 -out-muscled 20 -roff 20 -barakoti 20 -garen 20 -langport 20 -johanns 20 -cybermen 20 -500-acre 20 -safra 20 -kayalar 20 -resound 20 -rust-colored 20 -elettra 20 -qualitatively 20 -lahoz 20 -23:53 20 -23:56 20 -rapiscan 20 -lynnwood 20 -12.07 20 -hydroptere 20 -bangash 20 -nine-under-par 20 -uttlesford 20 -32oz 20 -sub-station 20 -lasch 20 -code-breakers 20 -sosua 20 -parrying 20 -far-out 20 -habersham 20 -el-adly 20 -zalmai 20 -stroke-like 20 -blowup 20 -plessinger 20 -63billion 20 -lee-hai 20 -majak 20 -lani 20 -leonardi 20 -garrisons 20 -iressa 20 -childersburg 20 -marabou 20 -kapp 20 -pledger 20 -baccellini 20 -strum 20 -sobiech 20 -thorium 20 -muting 20 -chatto 20 -hamming 20 -remediate 20 -runup 20 -cherlin 20 -tincknell 20 -gaetjens 20 -opines 20 -halesworth 20 -unmentioned 20 -mcmicken 20 -dumbed-down 20 -1746 20 -1740 20 -78th-minute 20 -broberg 20 -cheapened 20 -marginalisation 20 -newbuy 20 -trenary 20 -offcuts 20 -jumping-off 20 -greenberger 20 -0.38 20 -d-colorado 20 -gov.uk 20 -dishonoured 20 -ryko 20 -surmaj 20 -kurpiel 20 -entendre 20 -sonam 20 -watchmakers 20 -skyrockets 20 -diet-related 20 -kine 20 -birches 20 -funereal 20 -toolbar 20 -vettriano 20 -krotz 20 -mc2 20 -prison-issue 20 -decanters 20 -graczyk 20 -35kg 20 -busey 20 -nbome 20 -enroute 20 -ilia 20 -millea 20 -uk-registered 20 -surer 20 -quraishi 20 -balthazard 20 -fach 20 -z-1 20 -constabularies 20 -hot-spots 20 -43mph 20 -mirabeau 20 -airside 20 -otherness 20 -calcioli 20 -walfish 20 -bci 20 -givhan 20 -juster 20 -17-years 20 -frodsham 20 -coriolanus 20 -mod-cons 20 -shiitake 20 -vioxx 20 -suboxone 20 -lycra-clad 20 -decareaux 20 -musavir 20 -all-wheel-drive 20 -shetlands 20 -mupuya 20 -freshening 20 -amed 20 -valdai 20 -barwuah 20 -fast-casual 20 -pre-telecast 20 -jevtana 20 -chifley 20 -nordberg 20 -mckesson 20 -goalref 20 -gratz 20 -piepmeier 20 -townhome 20 -lochgilphead 20 -genzyme 20 -sq.ft 20 -manhandle 20 -kana 20 -neuropathologist 20 -street-legal 20 -unicycling 20 -khairullah 20 -naoshima 20 -bronzefield 20 -19-day 20 -liège 20 -lavazza 20 -close-quarters 20 -gotland 20 -bather 20 -lundbeck 20 -sibutramine 20 -moharrak 20 -moroccanoil 20 -iapetus 20 -newbould 20 -d'oeuvres 20 -beachwood 20 -then-ceo 20 -birdseye 20 -1495 20 -actuator 20 -hameline 20 -callback 20 -shia-dominated 20 -dunya 20 -endoscopies 20 -bousada 20 -brevoort 20 -jurek 20 -jeskey 20 -amitriptyline 20 -300mg 20 -resort-style 20 -hayao 20 -forewarning 20 -nesmith 20 -snoddy 20 -pony-tailed 20 -plasterboard 20 -blood-pressure 20 -colbeck 20 -unready 20 -futurists 20 -kaleo 20 -45-54 20 -12/13/14 20 -oversleeping 20 -yoshi 20 -mcingvale 20 -palvin 20 -cordelli 20 -eddowes 20 -wigston 20 -hawaii-based 20 -mauser 20 -ergonomically 20 -dayo 20 -58.2 20 -zigzags 20 -hardt 20 -uhre 20 -qualls 20 -starships 20 -87mph 20 -mcquain 20 -pure-bred 20 -carbott 20 -sweet-smelling 20 -fyne 20 -40-a-day 20 -thorning 20 -senn 20 -shaggy-haired 20 -dysplasias 20 -stebbins 20 -nekrassov 20 -@rioferdy5 20 -colonizing 20 -cumnock 20 -pasic 20 -six-foot-tall 20 -changeling 20 -boy-band 20 -bem 20 -schleimer 20 -creedmoor 20 -under-representation 20 -corrin 20 -noergaard 20 -arica 20 -sb1062 20 -cold-called 20 -stridently 20 -multitaskers 20 -nurnberg 20 -khroma 20 -parasiuk 20 -modulate 20 -milania 20 -dscovr 20 -rivelino 20 -slott 20 -wadowice 20 -capon 20 -licata 20 -kedikoglou 20 -lbgt 20 -exarchopoulos 20 -keble 20 -lobotomy 20 -overpowers 20 -camera-ready 20 -tork 20 -bisected 20 -dailyburn 20 -bustles 20 -el-arish 20 -striven 20 -pessimist 20 -city-area 20 -olugbile 20 -reb 20 -vidar 20 -falcus 20 -legolas 20 -tahini 20 -135th 20 -palomino 20 -fallows 20 -rask 20 -covic 20 -toivonen 20 -manitowoc 20 -alharbi 20 -chasma 20 -ntale 20 -hetrick 20 -o'rear 20 -sing-alongs 20 -jevans 20 -sixth-ranked 20 -maunsel 20 -drogheda 20 -laveau 20 -printworks 20 -renda 20 -leiua 20 -six-goal 20 -pagasa 20 -dallasnews.com 20 -indeya 20 -squeezy 20 -baduel 20 -tzus 20 -craigavon 20 -killah 20 -krolikowski 20 -cbs13 20 -yadda 20 -cocteau 20 -sensationalising 20 -drane 20 -favazzo 20 -resubmitted 20 -conflate 20 -bekoji 20 -fenby 20 -wondergoal 20 -lassen 20 -robierb 20 -parathyroid 20 -bratic 20 -arman 20 -permaculture 20 -pgatour.com 20 -weehler-smith 20 -haygood 20 -marginalise 20 -munter 20 -benway 20 -wenling 20 -diacre 20 -couturiers 20 -allwright 20 -corelli 20 -friburgo 20 -oversold 20 -disembowelled 20 -illsley 20 -langran 20 -chymorvah 20 -ngetich 20 -preemies 20 -jahn 20 -ngog 20 -breslau 20 -non-attendance 20 -amarna 20 -nikhil 20 -bollig 20 -macnamara 20 -bulling 20 -anti-romney 20 -taschen 20 -warren-lean 20 -cathro 20 -conniff 20 -understrength 20 -drainpipes 20 -skyrunner 20 -ido 20 -arnon 20 -coimbra 20 -bridgens 20 -lineberry 20 -jamie-lee 20 -32in 20 -schauer 20 -merlyn 20 -mutha 20 -budzinski 20 -lidsky 20 -boughs 20 -prolapsed 20 -seceding 20 -rosebery 20 -singer-songwriters 20 -underling 20 -houry 20 -raimondo 20 -stefansson 20 -snuffing 20 -rediske 20 -mendham 20 -3.11 20 -kneading 20 -rieger 20 -smithtown 20 -mannatech 20 -waveland 20 -sanjiv 20 -adhesion 20 -cooly 20 -midi-length 20 -aiba 20 -hirsh 20 -leeper 20 -dallek 20 -counter-demonstrators 20 -side-impact 20 -zyana 20 -nedovyesov 20 -entomological 20 -sanches 20 -xxxxxx 20 -chopstick 20 -midrange 20 -pilbeam 20 -optogenetics 20 -watchet 20 -togs 20 -anti-human 20 -norfolk-based 20 -recalibrating 20 -grammy-winner 20 -lehner 20 -ebc-46 20 -sawalha 20 -stavropol 20 -nakedly 20 -tuckwell 20 -geoscientists 20 -lune 20 -2.82 20 -champs-Élysées 20 -multi-generational 20 -anti-gravity 20 -aynak 20 -graney 20 -villawood 20 -271,000 20 -estimada 20 -goheen 20 -pro-ukraine 20 -hollers 20 -rubbishes 20 -grandfather-of-five 20 -mujahadeen 20 -al-asad 20 -gorulenko 20 -alita 20 -mekdad 20 -makoun 20 -adp 20 -adh 20 -shortman 20 -garlanded 20 -cropton 20 -ruah 20 -unambitious 20 -tickell 20 -megaupload.com 20 -attentiveness 20 -berthold 20 -272,000 20 -lapine 20 -beddoes 20 -pejkovic 20 -playland 20 -rietze 20 -razvan 20 -lemke 20 -mullinger 20 -scolds 20 -eddies 20 -nta 20 -rationalized 20 -cci 20 -ccf 20 -raviv 20 -dogtooth 20 -talbott 20 -quirkiness 20 -defeo 20 -lebowitz 20 -salterton 20 -exploratorium 20 -overdiagnosed 20 -tebay 20 -hyoid 20 -rya 20 -rukin 20 -thuringia 20 -tahlia 20 -sint 20 -menkaure 20 -lomachenko 20 -malarial 20 -amcu 20 -18lb 20 -dillwyn 20 -london-centric 20 -french-american 20 -marxist-leninist 20 -disengaging 20 -jere 20 -2,722 20 -lop-sided 20 -wholeness 20 -office-based 20 -yunaska 20 -tm31 20 -haemolytic 20 -goulet 20 -salima 20 -richess 20 -rebhorn 20 -tna 20 -22:51 20 -22:52 20 -22:54 20 -hartigan 20 -3:22 20 -pedrinhas 20 -antioquia 20 -callister 20 -cawthon 20 -infact 20 -rawl 20 -preservationist 20 -dratch 20 -ditchling 20 -iniquitous 20 -adame 20 -darsh 20 -suwyn 20 -adewunmi 20 -visualizations 20 -lennar 20 -bethell 20 -lepley 20 -inflation-adjusted 20 -1,002 20 -potluck 20 -56.1 20 -benavides 20 -ingests 20 -paperfold 20 -brothers-in-arms 20 -treyarch 20 -roselli 20 -tothe 20 -outwith 20 -wonjah 20 -nesbø 20 -dismaying 20 -askap 20 -xojane.com 20 -sunfire 20 -flocke 20 -1,999 20 -herenton 20 -stonemasons 20 -21:27 20 -binney 20 -aras 20 -webo 20 -washcloth 20 -prikhodko 20 -chateau-style 20 -gallard 20 -queen-sized 20 -ninety-one 20 -761 20 -chandni 20 -bioengineered 20 -boulanger 20 -jodhpurs 20 -pontardawe 20 -rustiness 20 -rafalca 20 -entrancing 20 -illegitimacy 20 -hamawi 20 -angiogram 20 -2:26 20 -2:28 20 -kruezi 20 -holsten 20 -verrazano-narrows 20 -thomases 20 -dearman 20 -louis-area 20 -6.14 20 -okorie 20 -petrel 20 -coal-burning 20 -deuces 20 -pashley 20 -pilecki 20 -expressways 20 -goussis 20 -harnaam 20 -modesta 20 -m.c. 20 -solicitor-general 20 -:d 20 -charcuterie 20 -kernow 20 -phung 20 -axons 20 -alejandrina 20 -synesthesia 20 -ruri 20 -bougainvillea 20 -oulton 20 -self-explanatory 20 -iorio 20 -heatley 20 -gender-identity 20 -rivne 20 -replanting 20 -zumar 20 -muenster 20 -oxman 20 -ensour 20 -weeny 20 -asmussen 20 -hydroelectricity 20 -hydrographic 20 -lescowitch 20 -692 20 -grade-point 20 -heumann 20 -ctbuh 20 -cliff-edge 20 -tricolor 20 -gnus 20 -atmospherics 20 -jaleesa 20 -36-13 20 -superdelegate 20 -barnave 20 -valluzzo 20 -chelwood 20 -carinthia 20 -samet 20 -yearlings 20 -tastemakers 20 -raut 20 -frigaard 20 -meze 20 -asmaa 20 -kizzy 20 -andry 20 -p4 20 -waist-length 20 -zero-g 20 -01:18 20 -handpick 20 -rishell 20 -scintilla 20 -sbaraglia 20 -galvanic 20 -edythe 20 -karampour 20 -makhlouf 20 -gunda 20 -softball-sized 20 -rajeev 20 -janek 20 -26lbs 20 -globular 20 -currants 20 -eye-rolling 20 -qayyum 20 -lacs 20 -backes 20 -low-emission 20 -eeas 20 -full-screen 20 -welfare-to-work 20 -business-related 20 -olusanya 20 -single-cell 20 -sumerian 20 -earth-moon 20 -fellman 20 -ossificans 20 -mvula 20 -westgarth 20 -huot 20 -o'gara 20 -winglet 20 -dandridge 20 -aken 20 -nof 20 -union-led 20 -0400 20 -gallate 20 -damson 20 -ambrosia 20 -louisianans 20 -moralising 20 -fjc 20 -lsg 20 -frankum 20 -kmt 20 -malaak 20 -gasim 20 -darr 20 -geoscientist 20 -mikki 20 -paramours 20 -berney 20 -schoolroom 20 -szad 20 -imessages 20 -nwankwo 20 -motorhead 20 -23-19 20 -sympathizing 20 -riboflavin 20 -rassoul 20 -gambill 20 -letten 20 -polypropylene 20 -caligiuri 20 -lettered 20 -temuco 20 -speed-the-plow 20 -magalie 20 -wahhabism 20 -banas 20 -reality-show 20 -blinging 20 -confiscations 20 -gooseberries 20 -fuel-cell 20 -dissed 20 -levison 20 -erroll 20 -whitelegg 20 -googlers 20 -ripened 20 -coddling 20 -sadoway 20 -adherent 20 -awa-guaja 20 -iran-backed 20 -hc 20 -ebolavirus 20 -rossano 20 -hughenden 20 -victimising 20 -anisimov 20 -switch-off 20 -nivose 20 -pernet 20 -2042 20 -huila 20 -leahey 20 -iglesia 20 -shatila 20 -mother-to-child 20 -salwen 20 -pmo 20 -fobt 20 -ex-colleagues 20 -ersatz 20 -ef2 20 -cholesterol-busting 20 -68.3 20 -lewman 20 -stuck-up 20 -ventilating 20 -fast-approaching 20 -stoneware 20 -sumitomo 20 -crc 20 -welty 20 -22:12 20 -toucans 20 -schriock 20 -abovitz 20 -zarei 20 -flagstone 20 -american-educated 20 -ridda 20 -xy 20 -molaro 20 -minustah 20 -hypoglycemia 20 -lippmann 20 -takeo 20 -bloodlust 20 -halimah 20 -kehinde 20 -tecate 20 -disorientate 20 -redbull 20 -1,049 20 -rotavirus 20 -sakio 20 -pyramidal 20 -doff 20 -six-shooter 20 -kibbe 20 -neistat 20 -sensationalizing 20 -7g 20 -125m 20 -al-asaad 20 -enduringly 20 -equus 20 -stamper 20 -koomen 20 -sab 20 -somethings 20 -marmie 20 -navajos 20 -nobbs 20 -betide 20 -second-tallest 20 -ringgold 20 -neeley 20 -metallurgical 20 -chaddesden 20 -self-admitted 20 -fss 20 -carped 20 -distinctiveness 20 -cooties 20 -fdj 20 -knope 20 -merit-based 20 -prize-money 20 -bolly 20 -udd 20 -torreon 20 -mencer 20 -reorient 20 -hansell 20 -knock-offs 20 -sextantio 20 -silverlands 20 -hoodie-wearing 20 -992 20 -wilmette 20 -monoclonal 20 -handedly 20 -corralling 20 -berna 20 -sharking 20 -magunda 20 -valasek 20 -melida 20 -gillison 20 -saini 20 -japes 20 -hassard 20 -m.o. 20 -risperdal 20 -anticipatory 20 -ziuzina 20 -gencic 20 -tongeren 20 -distelmans 20 -kenichi 20 -sub-optimal 20 -valrico 20 -162-game 20 -aflutter 20 -claro 20 -harajuku 20 -pontiffs 20 -mcwethy 20 -title-chasing 20 -out-of-the-box 20 -adichie 20 -adjaye 20 -dc-based 20 -braeburn 20 -home-state 20 -7700 20 -water-damaged 20 -steamrolled 20 -carstairs 20 -hanlan 20 -wessexes 20 -mainstreaming 20 -highworth 20 -burqa-clad 20 -sub-let 20 -csgt 20 -dickov 20 -konan 20 -lutts 20 -schibbye 20 -unhook 20 -goutiere 20 -1,245 20 -overhyped 20 -delamere 20 -catchiest 20 -broadgreen 20 -piney 20 -pined 20 -roll-call 20 -visalia 20 -casalesi 20 -postwoman 20 -stop-smoking 20 -thijeel 20 -re-sit 20 -kelchner 20 -mamic 20 -bikila 20 -soutar 20 -pinecrest 20 -gartenberg 20 -swaddle 20 -malaviya 20 -tarbotton 20 -banishes 20 -radfords 20 -socal 20 -sado-masochistic 20 -kandil 20 -janikiewicz 20 -bessam 20 -state-mandated 20 -boyling 20 -winebrenner 20 -black-tailed 20 -ipurua 20 -olave 20 -klas-tv 20 -schlenker 20 -thin-film 20 -elantra 20 -pinkins 20 -201,000 20 -olinger 20 -nway 20 -florescent 20 -co-investigator 20 -haskamp 20 -stuffers 20 -de-registered 20 -matmo 20 -sidon 20 -lucena 20 -metalworking 20 -249.99 20 -giedrojc 20 -greenkeeper 20 -hungrily 20 -re-sentenced 20 -bookends 20 -domeij 20 -charpentier 20 -giesen 20 -linate 20 -misano 20 -dougal 20 -lavey 20 -southern-hemisphere 20 -buckaroo 20 -chann 20 -61.7 20 -rosamond 20 -scratch-resistant 20 -ryse 20 -tukker 20 -vermeille 20 -marmion 20 -dehayes 20 -reactivation 20 -chohan 20 -ufw 20 -2:46 20 -feiglin 20 -hankey 20 -110billion 20 -g/km 20 -racquetball 20 -pettine 20 -hobnobs 20 -poyck 20 -compositional 20 -crawcour 20 -6d 20 -satis 20 -stokke 20 -27.99 20 -ex-student 20 -star-filled 20 -aclj 20 -kmsp-tv 20 -zech 20 -gwynnie 20 -cardiff-born 20 -badruddin 20 -necrolysis 20 -breslow 20 -double-whammy 20 -terrero 20 -birding 20 -bradleys 20 -orris 20 -telex 20 -kunene 20 -three-nation 20 -ackers 20 -commando-style 20 -hectoring 20 -zokora 20 -messageboard 20 -cross-hairs 20 -pillai 20 -bardet 20 -loesch 20 -downend 20 -nasrin 20 -sexpo 20 -nctl 20 -cersosimo 20 -6.54 20 -greenhouse-gas 20 -nose-first 20 -ifergan 20 -garnham 20 -sinnott 20 -occ 20 -krlich 20 -flame-thrower 20 -ayyub 20 -msgr 20 -10mg 20 -telectroscope 20 -zeballos 20 -00:07 20 -klebahn 20 -methylhexaneamine 20 -risso 20 -d-rhode 20 -serially 20 -volkan 20 -smeele 20 -third-story 20 -celal 20 -recycler 20 -coq10 20 -eldergill 20 -56-year 20 -avec 20 -23:02 20 -expander 20 -rheinberg 20 -sulpovar 20 -ragdoll 20 -fecafoot 20 -ex-foreign 20 -nalgae 20 -strada 20 -back-heeled 20 -blinkers 20 -utøya 20 -satirize 20 -aila 20 -bilodeau 20 -cheesesteak 20 -hermine 20 -cravats 20 -worming 20 -mini-golf 20 -♥ 20 -hvizdo 20 -pava 20 -samos 20 -bottle-feeding 20 -andrex 20 -mineola 20 -yarwood 20 -deven 20 -beaumaris 20 -lived-in 20 -hodeida 20 -wasn 20 -liles 20 -ipt 20 -pre-diabetes 20 -anti-clinton 20 -crittenden 20 -humping 20 -77million 20 -nacogdoches 20 -sahota 20 -kxtv 20 -lorains 20 -psoriatic 20 -horrifies 20 -gilgo 20 -throughball 20 -f8 20 -disaster-hit 20 -nein 20 -stenger 20 -aneta 20 -bloodstreams 20 -kgi 20 -tsarina 20 -grable 20 -fulwood 20 -tokuda 20 -sidiropoulos 20 -soltan 20 -jerrard 20 -n'djamena 20 -monkee 20 -long-lens 20 -re-invented 20 -mancino 20 -baquet 20 -eyeliners 20 -atherstone 20 -knelly 20 -fungicide 20 -near-drowning 20 -55.7 20 -nelis 20 -geo-tagged 20 -blood-clotting 20 -volcanologists 20 -back-street 20 -metzgar 20 -minkow 20 -new-fangled 20 -intensive-care 20 -2.34 20 -2.31 20 -hobs 20 -cochin 20 -gudkov 20 -aryana 20 -ulrik 20 -groupthink 20 -cie 20 -ex-director 20 -kobach 20 -bangoura 20 -dhc 20 -wagasky 20 -ramachandran 20 -pre-cooked 20 -fok 20 -u.s.-canada 20 -2000-2001 20 -middle-of-the-night 20 -hambledon 20 -soozie 20 -zieminski 20 -hypermarket 20 -heatherwood 20 -teena 20 -rajic 20 -ashard 20 -odd-job 20 -sabin 20 -lutman 20 -dwaine 20 -haggled 20 -caihou 20 -bardy 20 -00:25 20 -00:27 20 -penitentiaries 20 -phillipp 20 -44.7 20 -44.3 20 -15,600 20 -koner 20 -verster 20 -blue-coloured 20 -cowra 20 -provident 20 -scattershot 20 -ilich 20 -fillinger 20 -heatly 20 -mzaik 20 -tornadic 20 -triggerman 20 -popplewell 20 -tts 20 -testosterone-fuelled 20 -mahiga 20 -harehills 20 -afterschool 20 -polemic 20 -ghesquière 20 -murata 20 -carbondale 20 -zin 20 -17-20 20 -anti-histamines 20 -epochs 20 -tereshkova 20 -d-iowa 20 -bergmann 20 -1,420 20 -scrooges 20 -prowting 20 -jackhammers 20 -18-35 20 -ex-aide 20 -walsingham 20 -unserious 20 -b/c 20 -loganville 20 -5,990 20 -kunkun 20 -herzl 20 -gherkins 20 -ajaib 20 -recommitted 20 -taylor-wood 20 -rohid 20 -by-standers 20 -stojka 20 -emeril 20 -caralyn 20 -alijah 20 -reestablishing 20 -ejects 20 -gnassingbe 20 -scopolamine 20 -jewish-american 20 -phoenix-area 20 -hoven 20 -3.82 20 -moderna 20 -littlehey 20 -eighty-four 20 -mustafina 20 -saratov 20 -centre-stage 20 -collodion 20 -tiggeman 20 -2,995 20 -biome 20 -hand-woven 20 -ill-founded 20 -septimus 20 -impressionists 20 -mcneice 20 -damman 20 -grass-covered 20 -under-funded 20 -kreighbaum 20 -russell-silver 20 -atr-72 20 -vectis 20 -kel 20 -spotlighting 20 -trevyn 20 -1778 20 -diwan 20 -norooz 20 -wegrow 20 -arcuri 20 -ayumi 20 -34a 20 -frattaroli 20 -umea 20 -reflectance 20 -motion-activated 20 -anti-zionist 20 -64.3 20 -piezoelectric 20 -whincup 20 -2.18 20 -gloopy 20 -160lb 20 -berggruen 20 -phoneline 20 -rieth 20 -samsonite 20 -mccain-feingold 20 -galkayo 20 -moyers 20 -escritt 20 -blee 20 -vikernes 20 -27-minute 20 -compartmentalize 20 -marwat 20 -schippers 20 -vinicius 20 -wasikowska 20 -ogara 20 -derreck 20 -menage 20 -mangle 20 -wide-screen 20 -misleads 20 -roel 20 -sotos 20 -abc1 20 -wawa 20 -buglers 20 -rehabbing 20 -83rd-minute 20 -1617 20 -1611 20 -00:45 20 -take-no-prisoners 20 -nbc5 20 -nudism 20 -golland 20 -coales 20 -2,850 20 -prita 20 -smocked 20 -23:44 20 -23:40 20 -finmeccanica 20 -aichi 20 -acaba 20 -isaias 20 -racq 20 -yuspahruddin 20 -zendesk 20 -winching 20 -rathore 20 -18-19 20 -fortress-like 20 -norml 20 -hedberg 20 -skuba 20 -chowed 20 -information-gathering 20 -arsenal.com 20 -luthor 20 -chaskel 20 -@burgerking 20 -rncm 20 -leaching 20 -touré 20 -cross-bred 20 -szepielow 20 -three-state 20 -gletow 20 -iqraa 20 -all-metal 20 -pseudoscience 20 -featherville 20 -msm 20 -bielecki 20 -remedying 20 -kyotango 20 -kostya 20 -badness 20 -angilau 20 -27-hour 20 -sseruma 20 -sub-glacial 20 -jayah 20 -altaeros 20 -twardzik 20 -pembury 20 -osteria 20 -conservative-liberal 20 -austral 20 -mucosal 20 -thumbnails 20 -redbubble 20 -awd 20 -wigg 20 -off-pitch 20 -8255 20 -philo 20 -1759 20 -downy 20 -bewley 20 -niketown 20 -slide.melbourne 20 -holland-kaye 20 -justinian 20 -sinopec 20 -spedan 20 -giulini 20 -reversion 20 -enjoined 20 -redemptive 20 -tallow 20 -shrout 20 -rothe 20 -16,000-square-foot 20 -lecun 20 -yasui 20 -bizarre-looking 20 -mccarten 20 -1,695 20 -al-haramain 20 -hie 20 -besmirched 20 -4.22 20 -4.26 20 -chada 20 -raqib 20 -lastest 20 -makary 20 -redbank 20 -learnings 20 -abdelbeset 20 -288,000 20 -marwood 20 -riverwalk 20 -glendenning 20 -draughtsman 20 -pubwatch 20 -efficacious 20 -irwig 20 -smartwitness 20 -trecarichi 20 -bolwell 20 -brick-built 20 -#gbbo 20 -ramer 20 -canstruction 20 -946 20 -fabi 20 -free-of-charge 20 -tsukuba 20 -agnel 20 -ennio 20 -dutra 20 -6-9 20 -xk120 20 -remarriage 20 -reys 20 -30,500 20 -emirs 20 -fastcompany.com 20 -guglielmino 20 -glamis 20 -finger-wagging 20 -huntress 20 -integris 20 -misuses 20 -afarensis 20 -5-week-old 20 -castells 20 -hydrofit 20 -quba 20 -castelli 20 -quacking 20 -braude 20 -inri 20 -loganair 20 -dx 20 -prop. 20 -obsolescence 20 -laudatory 20 -seoane 20 -sobyanin 20 -noël 20 -lawfulness 20 -borucki 20 -seethe 20 -filigree 20 -o'kelly 20 -ceritha 20 -sheheen 20 -79.8 20 -schlinger 20 -mun2 20 -corgan 20 --26 20 -prudes 20 -shaich 20 -allaying 20 -personel 20 -650m 20 -harrods.com 20 -six-weeks-old 20 -pineoblastoma 20 -battle-ready 20 -ammanford 20 -il-18 20 -filicia 20 -betina 20 -adventureland 20 -towle 20 -yountville 20 -bidston 20 -devries 20 -cottrill 20 -defence-splitting 20 -garavani 20 -mohapatra 20 -halprin 20 -bove 20 -pepco 20 -ardnamurchan 20 -trevorrow 20 -tunas 20 -53.6 20 -saransk 20 -mckendrick 20 -inkblot 20 -saloufest 20 -0.26 20 -zervakos 20 -garam 20 -ransomware 20 -al-assal 20 -korbely 20 -swift-water 20 -morano 20 -400mph 20 -10-months-old 20 -olarenshaw 20 -monopolise 20 -holtz-eakin 20 -4.47 20 -somare 20 -mdp 20 -cornellier 20 -savill 20 -rudland 20 -sandbagged 20 -soffel 20 -suntrust 20 -gurneys 20 -isaksson 20 -vovkovinskiy 20 -amazigh 20 -mcelderry 20 -ninevah 20 -sloppily 20 -al-nashef 20 -solaris 20 -crumpsall 20 -tiergarten 20 -braunstone 20 -fansite 20 -gamst 20 -bourjois 20 -co-operatives 20 -quirkier 20 -two-to-one 20 -hesham 20 -leff 20 -newton-le-willows 20 -longson 20 -ignasi 20 -landcruiser 20 -pebbly 20 -pah 20 -96million 20 -oki 20 -soehardi 20 -795,000 20 -mcniff 20 -lepere 20 -bostjan 20 -chief-executive 20 -bobbled 20 -industry-funded 20 -peepholes 20 -over-indulged 20 -palese 20 -apperson 20 -vorayuth 20 -wythe 20 -berland 20 -insouciance 20 -pampanga 20 -pepper-spray 20 -anastassia 20 -bbj 20 -cangrande 20 -heathland 20 -wana 20 -100-150 20 -alumbaugh 20 -hawken 20 -jardines 20 -milken 20 -tabanou 20 -rafique 20 -c.b. 20 -apprenticed 20 -1,480 20 -richardsons 20 -ashill 20 -honeydew 20 -amini 20 -earth-bound 20 -evatt 20 -jujitsu 20 -mcgroarty 20 -ring-leader 20 -miseries 20 -rovaniemi 20 -ventham 20 -enterocolitis 20 -cranley 20 -millenials 20 -robosimian 20 -yes-or-no 20 -woodger 20 -yatabare 20 -coxless 20 -debt-to-gdp 20 -iberdrola 20 -bartee 20 -nongovernment 20 -329,000 20 -napley 20 -air-strikes 20 -kens5 20 -grade-school 20 -sex-ed 20 -okri 20 -off-plan 20 -happn 20 -harnett 20 -dissents 20 -puja 20 -obscura 20 -burkinshaw 20 -superstate 20 -ravelo 20 -soas 20 -immunosuppressant 20 -dnata 20 -affronted 20 -7up 20 -blobfish 20 -jahfari 20 -mccafe 20 -akkari 20 -well-nourished 20 -amstrad 20 -bienvenue 20 -110lb 20 -ledward 20 -one-77 20 -140-page 20 -middel 20 -thallium 20 -internalised 20 -touchless 20 -benares 20 -105-year-old 20 -friedmann 20 -secaucus 20 -petrolheads 20 -75.6 20 -icg 20 -leapband 20 -seph 20 -glenrowan 20 -17-and-a-half 20 -coalface 20 -somersby 20 -bagarozzo 20 -dockworkers 20 -mfi 20 -timothee 20 -s8 20 -codswallop 20 -lengthwise 20 -clothesline 20 -3,000-foot 20 -far-sighted 20 -fibrodysplasia 20 -saeid 20 -animal-lovers 20 -layfield 20 -violas 20 -urinates 20 -vermicelli 20 -stonehill 20 -tongue-tie 20 -art-deco 20 -desean 20 -myfoxdfw.com 20 -asisi 20 -beaminster 20 -teaneck 20 -dawar 20 -baptismal 20 -2f 20 -schemers 20 -over-spending 20 -arrick 20 -christofi 20 -dichromate 20 -bds 20 -morland 20 -wala 20 -eighty-one 20 -dinning 20 -thaci 20 -giminez 20 -5:07 20 -klosterman 20 -late-life 20 -hiscox 20 -global-warming 20 -gertrud 20 -christof 20 -beaky 20 -bergdahls 20 -sydneysider 20 -abdominals 20 -microdermabrasion 20 -82.4 20 -4100 20 -westonzoyland 20 -pomerantz 20 -creasing 20 -ex-slave 20 -leaney 20 -navardauskas 20 -lockroy 20 -skyhawk 20 -coelux 20 -joei 20 -law-making 20 -imbedded 20 -bakrie 20 -honywood 20 -street-to-street 20 -nescafe 20 -christeson 20 -freemyer 20 -dinsey 20 -birchenough 20 -paternoster 20 -sumit 20 -lahaina 20 -paultons 20 -kelowna 20 -lyam 20 -alemseged 20 -zahovic 20 -argentinos 20 -lch 20 -boru 20 -247,000 20 -shakuntala 20 -kamaljit 20 -leite 20 -santilli 20 -true-to-life 20 -hermaphrodites 20 -skorik 20 -penury 20 -tshabalala 20 -nechells 20 -service-related 20 -after-parties 20 -mullenix 20 -bornean 20 -banchory 20 -zverotic 20 -harbingers 20 -letchford 20 -unhooked 20 -kimbler 20 -yueyue 20 -counterterrorist 20 -2.98 20 -ciutat 20 -maulvi 20 -darke 20 -ziyi 20 -15.95 20 -exel 20 -11-8 20 -day-out 20 -once-great 20 -49.3 20 -photo/the 20 -736 20 -anna-louise 20 -75kg 20 -summerhayes 20 -trailfinders 20 -knowhow 20 -garver 20 -jay-jay 20 -morrish 20 -irresistibly 20 -rosey 20 -lankston 20 -grevious 20 -can-can 20 -figueirense 20 -flumes 20 -critically-endangered 20 -goujons 20 -zaani 20 -post-date 20 -life-style 20 -typists 20 -foro 20 -agip 20 -koca 20 -terme 20 -equipments 20 -diesel-powered 20 -croyle 20 -elfie 20 -loughran 20 -short-circuiting 20 -landrover 20 -turpan 20 -klaveren 20 -lahiya 20 -eight-shot 20 -imahara 20 -edgecombe 20 -agboola 20 -kervin 20 -yuichiro 20 -neverseconds 20 -1564 20 -dossevi 20 -somerhalder 20 -inoperative 20 -grasmere 20 -get-well 20 -expropriation 20 -jayden-lee 20 -horseless 20 -blekko 20 -60,000-a-year 20 -all-america 20 -soring 20 -anti-pollution 20 -bedrest 20 -higher-priced 20 -otieno 20 -styrene 20 -pie-in-the-sky 20 -gurneyi 20 -mahassen 20 -mevlut 20 -super-volcano 20 -babakhani 20 -chemnitz 20 -loovens 20 -bootlegging 20 -ming-chi 20 -araminta 20 -chanse 20 -shoe-in 20 -heidt 20 -degeorge 20 -thirtysomething 20 -pre-holiday 20 -gda 20 -msud 20 -75th-minute 20 -lostock 20 -malyn 20 -fta 20 -sayid 20 -reiffel 20 -deayton 20 -wjec 20 -back-stabbing 20 -vanderheyden 20 -headship 20 -jieun 20 -shaik 20 -fourth-in-line 20 -paperbacks 20 -make-overs 20 -non-resident 20 -assen 20 -asser 20 -obligingly 20 -taplin 20 -hingston 20 -watchword 20 -semi-nomadic 20 -hss 20 -hajrovic 19 -nastya 19 -recapitalize 19 -highley 19 -ayllah-beau 19 -leics 19 -orchestrates 19 -aey 19 -three-litre 19 -thinkgeek 19 -extols 19 -trant 19 -armouries 19 -izzie 19 -macapagal 19 -geidt 19 -lit-up 19 -cadigan 19 -viaducts 19 -bertuccio 19 -eklund 19 -21:33 19 -storm-hit 19 -stoplights 19 -oma 19 -koma 19 -baan 19 -nipped-in 19 -driskell 19 -keer 19 -charlottetown 19 -amare 19 -trejos 19 -mirzakhani 19 -cornices 19 -lysaght 19 -150k 19 -mcgrory 19 -ragamuffin 19 -top-drawer 19 -omand 19 -d-georgia 19 -redstate 19 -berbers 19 -2008/2009 19 -ex-saints 19 -horoscopes 19 -libelled 19 -sideswipe 19 -kindled 19 -ece 19 -rouillon 19 -dm1 19 -gover 19 -feneck 19 -aquarid 19 -wrenches 19 -stanwood 19 -quadrupling 19 -porcini 19 -finalizes 19 -piedra 19 -reto 19 -portgual 19 -taniguchi 19 -edmunds.com 19 -1000th 19 -22:43 19 -atmar 19 -overbroad 19 -instapaper 19 -spiritualists 19 -criers 19 -d-mississippi 19 -tirekidis 19 -44ft 19 -surveilled 19 -13in 19 -picobrew 19 -3.70 19 -cool-down 19 -tenures 19 -sibbald 19 -sukkar 19 -sandwiching 19 -super-luxury 19 -strangford 19 -rustlers 19 -military-run 19 -slurpees 19 -quammen 19 -damagingly 19 -franceschini 19 -1404 19 -1,011 19 -vandalia 19 -chicas 19 -electromechanical 19 -bredesen 19 -computer-animated 19 -livvix 19 -ouzou 19 -babymoon 19 -achi 19 -brasstown 19 -amalienborg 19 -tabbed 19 -averill 19 -pacifying 19 -out-performed 19 -0c 19 -mousr 19 -pohang 19 -ondoa 19 -hand-holding 19 -absolutist 19 -so14 19 -suffused 19 -andrà 19 -sba 19 -monyela 19 -iit 19 -director-in-charge 19 -floodlight 19 -kekula 19 -hussing 19 -dueker 19 -henrys 19 -noboa 19 -mintec 19 -vanadium 19 -kurylenko 19 -pilchard-gosnell 19 -outnumbers 19 -77m 19 -juliann 19 -sebbage 19 -sherrard 19 -narey 19 -skane 19 -fkn 19 -argueta 19 -linnaeus 19 -re-appointed 19 -46.1 19 -lifecycle 19 -rakus 19 -mindie 19 -piledriver 19 -sundogs 19 -dower 19 -24hrs 19 -tinkle 19 -builth 19 -fuping 19 -safermedia 19 -attrill 19 -wimpey 19 -weininger 19 -ovono 19 -timidly 19 -doran-webb 19 -k-8 19 -nevado 19 -quami 19 -giri 19 -scobbie 19 -lemole 19 -zamost 19 -capitalises 19 -hassaun 19 -118million 19 -cnd 19 -gusti 19 -jawbones 19 -talentless 19 -anti-cull 19 -atrix 19 -1529 19 -auto-throttle 19 -vickerson 19 -vizzini 19 -nichelle 19 -irregulars 19 -4:24 19 -low-rent 19 -1439 19 -proofing 19 -pees 19 -mainers 19 -taillight 19 -mirandola 19 -dodman 19 -salutations 19 -farago 19 -konczyk 19 -arifjan 19 -isis-affiliated 19 -charlevoix 19 -chrisco 19 -whirlpools 19 -maximized 19 -red-bellied 19 -principia 19 -glossies 19 -wernick 19 -bullsh 19 -1:07 19 -much-lauded 19 -jeorgia 19 -saffold 19 -shafiee 19 -dores 19 -3:18 19 -oxybenzone 19 -shallenberger 19 -hot-water 19 -b.s. 19 -hsiung 19 -rieb 19 -01:26 19 -01:24 19 -redoing 19 -charlies 19 -el-janabi 19 -candia 19 -25-strong 19 -meiler 19 -tristar 19 -pelansi 19 -stogner 19 -lef 19 -reappraisal 19 -therefor 19 -faulcon 19 -occasioned 19 -292,000 19 -nirdosh 19 -massimino 19 -havas 19 -polity 19 -yego 19 -delaghetto 19 -seyyed 19 -haesler 19 -21:54 19 -standardise 19 -020 7629 9161 19 -kaza 19 -dollops 19 -newsmen 19 -21:18 19 -pathy 19 -ika 19 -excision 19 -age-progressed 19 -yahaya 19 -provolone 19 -antoine-curier 19 -flecha 19 -air-powered 19 -anchorwoman 19 -lomaia 19 -idiom 19 -bruckner 19 -dextrous 19 -hyper-vigilant 19 -lammily 19 -hesjedal 19 -feo 19 -brandan 19 -oxidizing 19 -sidefooted 19 -sasi 19 -asbell 19 -nasty-looking 19 -bagans 19 -1-800-273-talk 19 -1,013 19 -shiomura 19 -quaffed 19 -pull-back 19 -bloodstain 19 -waldemar 19 -book-signing 19 -maiorino 19 -brahma 19 -brahmi 19 -incredibeard 19 -tinkling 19 -handymen 19 -kilojoules 19 -nimby 19 -burgarello 19 -hildegard 19 -8 1/2 19 -olukolade 19 -chamique 19 -overbooked 19 -myong 19 -penis-shaped 19 -espanola 19 -nella 19 -milestotal 19 -gerety 19 -redwell 19 -fertilizing 19 -antacids 19 -jerald 19 -twigged 19 -ultra-luxury 19 -knuckleduster 19 -baluch 19 -berthing 19 -insubordinate 19 -wardrop 19 -khare 19 -khari 19 -beheshti 19 -cigar-smoking 19 -chocked 19 -woulda 19 -dinu 19 -klink 19 -brownhill 19 -melbourne-born 19 -fandler 19 -plutocrat 19 -androgen 19 -polo-playing 19 -zann 19 -misheloff 19 -lueken 19 -bernadean 19 -belliveau 19 -lotito 19 -12-feet 19 -9.55 19 -50-game 19 -ogasawara 19 -c2c 19 -prospero 19 -windlestone 19 -2011-13 19 -16-man 19 -lehrman 19 -norweigan 19 -firmest 19 -cheeseman 19 -fiances 19 -voice-overs 19 -34billion 19 -post-workout 19 -yonas 19 -27km 19 -rasool 19 -orchestration 19 -sangavaram 19 -za'atri 19 -1:21 19 -cso 19 -eyeglass 19 -trichomonas 19 -22:09 19 -gustin 19 -fawad 19 -re-training 19 -donta 19 -37.50 19 -soccket 19 -birkenstocks 19 -racoons 19 -vevers 19 -brum 19 -32.99 19 -deferment 19 -cameraphone 19 -kuek 19 -belzec 19 -houseman 19 -kuchins 19 -allseas 19 -837 19 -1,058 19 -oddsmakers 19 -aspley 19 -gizelle 19 -marginals 19 -budati 19 -heydon 19 -afscme 19 -cy-fair 19 -doge 19 -bobridge 19 -friend-of-the-court 19 -revesby 19 -najeeb 19 -soultrait 19 -rowden 19 -aphasia 19 -armour-plated 19 -cardio-respiratory 19 -0.28 19 -marzieh 19 -tensing 19 -skunkworks 19 -pervin 19 -haida 19 -salperton 19 -clausen 19 -boldface 19 -collates 19 -vallone 19 -mancillas 19 -el-baneh 19 -2,499 19 -well-cared 19 -hauck 19 -breaky 19 -waitomo 19 -wfaa.com 19 -panitan 19 -fidget 19 -75cm 19 -hirvonen 19 -bookmarking 19 -40.8 19 -40.6 19 -post-2015 19 -neigbours 19 -bygraves 19 -buckalew 19 -re-imagine 19 -lymphocytic 19 -post-civil 19 -800-acre 19 -proclivity 19 -all-english 19 -embalm 19 -sforza 19 -phlamachha 19 -mcilwain 19 -huntoon 19 -marcantel 19 -fredericksen 19 -jo-ann 19 -ibogaine 19 -23-24 19 -insincerity 19 -juniata 19 -shabak 19 -cuneiform 19 -simien 19 -eight-acre 19 -koki 19 -stapley 19 -160km 19 -weider 19 -kyrgiakos 19 -obstructionists 19 -obasuyi 19 -theall 19 -nabucco 19 -frugally 19 -rootlets 19 -unsentimental 19 -attention-seeker 19 -104-year-old 19 -krombach 19 -7,250 19 -anurag 19 -hildner 19 -anaika 19 -ratsiraka 19 -gorayeb 19 -periel 19 -family-planning 19 -trincomalee 19 -rah-rah 19 -miceli 19 -evangelizing 19 -colyton 19 -lovetta 19 -umpteen 19 -homocysteine 19 -baral 19 -al-salam 19 -forgeard 19 -90,000-a-week 19 -mallow 19 -bromo 19 -strife-hit 19 -embezzle 19 -short-form 19 -male-female 19 -1200s 19 -villetard 19 -supposing 19 -carabobo 19 -fowell 19 -natzke 19 -myall 19 -lukin 19 -22:22 19 -abracadabra 19 -lloydspharmacy 19 -cingolani 19 -ahmadiyah 19 -d'andrea 19 -skillset 19 -khatau 19 -scrumhalf 19 -red-state 19 -140/90 19 -allgemeine 19 -parrikar 19 -600-pound 19 -guercio 19 -nti 19 -godlike 19 -bahati 19 -gunmetal 19 -jozsef 19 -hadean 19 -30.1 19 -humanism 19 -dhekelia 19 -chevaliers 19 -flip-flopped 19 -comparethemarket.com 19 -zillmer 19 -quadrotors 19 -yesenia 19 -subsidizes 19 -whitesell 19 -21:53 19 -quarter-finalists 19 -c-x75 19 -10.09 19 -sundlof 19 -marlen 19 -unbelieveable 19 -hillsdale 19 -delacroix 19 -20percent 19 -meador 19 -wingtips 19 -al-furqan 19 -non-prosecution 19 -renne 19 -quadrocopters 19 -weyman 19 -curson 19 -mashael 19 -filmgoers 19 -lvg 19 -boop 19 -shubham 19 -1722 19 -moncur 19 -strap-on 19 -cockell 19 -electromagnets 19 -leggio 19 -horticulturists 19 -al-ahmed 19 -entropa 19 -trios 19 -headshot 19 -64-year-olds 19 -deremer 19 -gereshk 19 -pedicab 19 -khayelitsha 19 -quarterfinalists 19 -jeffersonville 19 -ex-defence 19 -flyertalk 19 -mireles 19 -800g 19 -enchant 19 -turanor 19 -currumbin 19 -macmuiris 19 -pinckney 19 -burcaw 19 -under-40s 19 -ex-prosecutor 19 -cuocolo 19 -may-december 19 -microwaveable 19 -worldperks 19 -goldsack 19 -vitae 19 -junctures 19 -palgrave 19 -offtime 19 -photoreceptors 19 -kimbra 19 -homeschool 19 -ventoux 19 -ainley 19 -wageningen 19 -cairncross 19 -siev 19 -crutcher 19 -better-paid 19 -gravitates 19 -andreev 19 -best-of-three 19 -lagoda 19 -tickner 19 -kaltoft 19 -gonadotropin 19 -ph2 19 -ph1 19 -goward 19 -waseda 19 -malgorzata 19 -mcconnel 19 -m'bolhi 19 -87million 19 -linfield 19 -forehands 19 -latifa 19 -00:31 19 -00:37 19 -sub-arctic 19 -mallin 19 -66m 19 -pish 19 -outnet 19 -grigory 19 -chambery 19 -cornishman 19 -badalamenti 19 -ooho 19 -australi 19 -gaszczak 19 -khurram 19 -après-ski 19 -phoenicia 19 -padalka 19 -chinawhite 19 -schnittman 19 -agloe 19 -100p 19 -corsetti 19 -harilela 19 -trial-and-error 19 -judean 19 -pepperell 19 -fotis 19 -klos 19 -marieke 19 -materasso 19 -karif 19 -khorshid 19 -700lbs 19 -puroll 19 -rozen 19 -rozel 19 -counteracting 19 -finaldi 19 -db6 19 -coomes 19 -schachner 19 -656,000 19 -short-cut 19 -uninstall 19 -epipens 19 -yamamay 19 -deprince 19 -sockeye 19 -three-and-half 19 -editorship 19 -once-prosperous 19 -borrowings 19 -b-list 19 -mabrey 19 -non-operational 19 -marmaray 19 -durose 19 -lozman 19 -wind-driven 19 -skvortsov 19 -skynet 19 -yemane-berhane 19 -kugel 19 -boulos 19 -bouabdillah 19 -gazillion 19 -hammerl 19 -cockiness 19 -obviate 19 -billion-year-old 19 -tatterson 19 -apres 19 -intoxicants 19 -song-taek 19 -ranjana 19 -cayan 19 -170m 19 -concertina 19 -mallalieu 19 -collins-rector 19 -bolat 19 -reverse-engineer 19 -kazenga 19 -sub-postmasters 19 -kealy 19 -4,000-square-foot 19 -o'neills 19 -puckered 19 -simonon 19 -inviolability 19 -faxon 19 -grudzinskas 19 -acetylcholine 19 -waylett 19 -zoroastrian 19 -kesselly 19 -brocco 19 -61mph 19 -kansal 19 -therian 19 -drive-thrus 19 -16-24-year-olds 19 -unswayed 19 -todos 19 -examiner.com 19 -heli-skiing 19 -torro 19 -preempted 19 -rigel 19 -arkush 19 -miyah 19 -anti-cuts 19 -fillery 19 -mersini-houghton 19 -city-centre 19 -egidio 19 -babyface 19 -falklanders 19 -poundbury 19 -maccarone 19 -muslim-owned 19 -1490 19 -asterix 19 -cerebellar 19 -glentree 19 -outwitting 19 -teck 19 -ormesher 19 -whosay 19 -ousby 19 -jic 19 -futcher 19 -ambiguities 19 -twila 19 -papped 19 -41.8 19 -abbe 19 -non-intervention 19 -1601 19 -vujevic 19 -manville 19 -ex-world 19 -encyclical 19 -goldhay 19 -60g 19 -reuptake 19 -mysupermarket 19 -offscreen 19 -staters 19 -dare-devil 19 -badiola 19 -3.8-litre 19 -31,700 19 -heeley 19 -good2go 19 -23:31 19 -23:33 19 -alt-j 19 -48f 19 -+49 19 -cross-sections 19 -leg-side 19 -cookes 19 -child-sex 19 -enka 19 -letourneau 19 -tekakwitha 19 -premium-rate 19 -microwatts 19 -gigli 19 -boocock 19 -klyzek 19 -gpo 19 -bouche 19 -johannsson 19 -15-18 19 -8800 19 -re-float 19 -al-shibh 19 -lychee 19 -stryde 19 -gravano 19 -wendel 19 -catatonia 19 -amadi 19 -sveinsson 19 -muro 19 -antonoff 19 -250cc 19 -friehling 19 -portuguese-born 19 -cyanide-laced 19 -an-26 19 -28mph 19 -plughole 19 -satwa 19 -grandfather-of-one 19 -bush-cheney 19 -gaslight 19 -golda 19 -buckler 19 -endor 19 -ism 19 -timmer 19 -nampa 19 -lozito 19 -a165 19 -news-gazette 19 -chacha 19 -tetney 19 -tpim 19 -westby 19 -over-plucked 19 -clocktower 19 -brandie 19 -woodhorn 19 -godlee 19 -ranald 19 -million-selling 19 -breakable 19 -23:19 19 -edginess 19 -co-writers 19 -93.2 19 -reveres 19 -shorthair 19 -chim 19 -jeneba 19 -2,500-mile 19 -lifenews 19 -1761 19 -crosstown 19 -skerry 19 -boyington 19 -clipboards 19 -lairs 19 -7.17 19 -smillie 19 -hallsworth 19 -moultrie 19 -autin 19 -kyrsten 19 -thursfield 19 -pantaleon 19 -kloster 19 -isme 19 -garlin 19 -tarpey 19 -culum 19 -hjk 19 -perna 19 -skow 19 -messiness 19 -deanery 19 -mediawatch-uk 19 -abulkhair 19 -qods 19 -nailsworth 19 -spiderweb 19 -o'shaugnessy 19 -prinz 19 -imbula 19 -13kg 19 -salopettes 19 -redshirt 19 -36c 19 -boltholes 19 -mueen-uddin 19 -marrabenta 19 -mcdean 19 -rigeur 19 -35,800 19 -107million 19 -kilduff 19 -marbe 19 -left-of-center 19 -rti 19 -re-grow 19 -7.1-magnitude 19 -magnitude-5 19 -british-american 19 -druggies 19 -norlevo 19 -atol 19 -jarraud 19 -dickman 19 -aiesha 19 -akana 19 -eoc 19 -soltanieh 19 -ozwald 19 -robertson-brown 19 -pettet 19 -lisani 19 -kaitaia 19 -keough 19 -samaranch 19 -hurstville 19 -orrostieta 19 -1599 19 -cordaro 19 -carano 19 -navel-gazing 19 -kingsbarns 19 -geim 19 -sifford 19 -readwriteweb 19 -lulin 19 -protégés 19 -homebrew 19 -jardine-paterson 19 -guideway 19 -faves 19 -d'oh 19 -balogh 19 -jarrae 19 -ambre 19 -muti 19 -emmi 19 -blanking 19 -berle 19 -toliver 19 -luella 19 -d'este 19 -anadarko 19 -humarr 19 -azmy 19 -nontoxic 19 -super-toned 19 -baggett 19 -foulston 19 -mcgavin 19 -bantered 19 -nunberg 19 -sprigg 19 -statesville 19 -14s 19 -bourdouleix 19 -racoon 19 -daye 19 -jewitt 19 -molyneaux 19 -rubel 19 -airless 19 -liddick 19 -smuggles 19 -butkus 19 -ubiera-cruz 19 -1,086 19 -arngrim 19 -loraine-smith 19 -recapitalisation 19 -foner 19 -dorf 19 -vanities 19 -butzier 19 -askin 19 -jurga 19 -4-1-3-2 19 -coarser 19 -shiseido 19 -hippen 19 -manchurian 19 -ghazarian 19 -wauthier 19 -shanklin 19 -severne 19 -burchett 19 -donatello 19 -chavistas 19 -hawksmoor 19 -water-powered 19 -basf 19 -ska2 19 -aric 19 -uninfected 19 -fédérale 19 -jà 19 -revenue-raising 19 -newly-acquired 19 -82million 19 -over-optimistic 19 -foxhunting 19 -6:18 19 -hutin 19 -rubano 19 -corwen 19 -600-acre 19 -cathar 19 -macys 19 -l6 19 -pqchat 19 -geographies 19 -then-illinois 19 -multiplicity 19 -ballydoyle 19 -mi-24 19 -horton-jones 19 -ultra-competitive 19 -bidmead 19 -halcrow 19 -superliga 19 -sanguino 19 -wickersham 19 -unbuckle 19 -shaoyang 19 -nuxe 19 -brain-computer 19 -buffalos 19 -vsu 19 -vsd 19 -insulates 19 -schumi 19 -malonyay 19 -vanvooren 19 -degen 19 -bar-room 19 -hosepipes 19 -bangkok-based 19 -muzzles 19 -kegui 19 -osment 19 -upper-crust 19 -hartsell 19 -rubber-stamping 19 -earth-orbiting 19 -casula 19 -breno 19 -21.99 19 -sigifredo 19 -krajcik 19 -salahadyn 19 -kisumu 19 -vassey 19 -crooners 19 -self-generated 19 -matronly 19 -bridgehead 19 -damascus-based 19 -timchenko 19 -steamboats 19 -bellister 19 -indented 19 -gratin 19 -bookscan 19 -shoebury 19 -provan 19 -bonhoeffer 19 -sada 19 -dinan 19 -resisters 19 -eco-resort 19 -cryptographers 19 -july/august 19 -milieu 19 -dolgatov 19 -stigmatisation 19 -hemangioma 19 -titmarsh 19 -16-under 19 -safesearch 19 -chloroplasts 19 -adfa 19 -half-filled 19 -primping 19 -tayla 19 -after-tax 19 -ebebiyin 19 -ambam 19 -reverie 19 -stournaras 19 -fauja 19 -dans 19 -mpla 19 -pag 19 -ripley_77 19 -sundaravej 19 -delanoe 19 -phang 19 -aminyar 19 -quarrelled 19 -islamophobes 19 -gertz 19 -gerth 19 -jawaid 19 -sybille 19 -sharen 19 -optn 19 -mockumentary 19 -multi-beam 19 -ziona 19 -ngi 19 -hispanic-american 19 -mec 19 -enrages 19 -careerbuilder 19 -uncritical 19 -balinsky 19 -borrell 19 -peacefulness 19 -gibbens 19 -coverups 19 -hog-tied 19 -vnuk 19 -parlave 19 -blancs 19 -buhl 19 -nanking 19 -401k 19 -sugar-coated 19 -maillet 19 -54m 19 -sharpens 19 -mojtaba 19 -stiffest 19 -darvill 19 -klotho 19 -mom-to-be 19 -whole-heartedly 19 -kettley 19 -naoko 19 -waspish 19 -knbc-tv 19 -krusevac 19 -solalinde 19 -vekselberg 19 -bridles 19 -papaconstantinou 19 -tap-dancing 19 -materialises 19 -mazzetti 19 -kurzban 19 -hashmi 19 -warnick 19 -sampaio 19 -awale 19 -pre-menstrual 19 -barberry 19 -argentineans 19 -4:55 19 -myrrh 19 -wishlists 19 -lorie 19 -karak 19 -mcreynolds 19 -knowledge-based 19 -sibrel 19 -oadby 19 -bluth 19 -steppenwolf 19 -al-mahmoudi 19 -smillie-scavelli 19 -carapace 19 -merin 19 -albacar 19 -afriqiyah 19 -human-computer 19 -tutterrow 19 -19-20 19 -50,000-plus 19 -fourth-choice 19 -dhu 19 -jaipaul 19 -robinson-white 19 -reagent 19 -graddy 19 -crawlspace 19 -guarantors 19 -edelin 19 -worshiper 19 -airlie 19 -mohawks 19 -reduced-fat 19 -paperclips 19 -matase 19 -tightly-controlled 19 -bagby 19 -willman 19 -5.09 19 -meikhtila 19 -long-dormant 19 -centring 19 -fazes 19 -kameez 19 -anti-allergy 19 -quilling 19 -alakija 19 -re-mortgage 19 -first-timer 19 -orfevre 19 -casseroles 19 -sharpsburg 19 -#broadchurch 19 -yohana 19 -babitu 19 -socom 19 -katami 19 -1xtra 19 -kaseasbeh 19 -zenato 19 -valasquez 19 -brianie 19 -el-abidine 19 -michiel 19 -crabby 19 -once-in-a-generation 19 -non-gmo 19 -hta 19 -bawl 19 -quagliata 19 -bureaux 19 -kightley 19 -winhoffer 19 -ayzlee 19 -modems 19 -sina.com 19 -utis 19 -panamanians 19 -fee-payers 19 -leg-up 19 -girdner 19 -ochsner 19 -rickinger 19 -shuanggui 19 -furler 19 -gaar 19 -outmatched 19 -breathalysers 19 -counteroffensive 19 -fredette 19 -e.o. 19 -kimberly-clark 19 -possibles 19 -apalachicola 19 -syllabuses 19 -wingrove 19 -policymaker 19 -00:32 19 -besharova 19 -30-days 19 -tattis 19 -jizan 19 -free-tailed 19 -faulding 19 -third-best 19 -onr 19 -kalleen 19 -915 19 -yarmulke 19 -house-proud 19 -actuarial 19 -mgayiya 19 -bws 19 -49-year 19 -@tomdaley1994 19 -bangu 19 -a-year 19 -third-division 19 -then-white 19 -christoulas 19 -smacker 19 -aberavon 19 -hitt 19 -nicho 19 -ubah 19 -1571 19 -adeje 19 -titbits 19 -shakhtarsk 19 -topolski 19 -naas 19 -atlast 19 -festivalgoers 19 -agnostics 19 -160billion 19 -fiorentino 19 -palio 19 -ridgeview 19 -bakkies 19 -jaser 19 -foam-like 19 -tormenter 19 -black-outs 19 -w.t. 19 -oui 19 -crista 19 --70 19 -last-place 19 -timekeeper 19 -no-good 19 -tough-love 19 -waverider 19 -aouate 19 -1.01 19 -cheiker 19 -saman 19 -golf-ball 19 -savannahs 19 -12g 19 -eruptive 19 -arabesque 19 -jet-skiing 19 -gilly 19 -nasional 19 -time-travelling 19 -93.5 19 -nazeri 19 -diers 19 -healy-rae 19 -13-15 19 -13-10 19 -low-pitched 19 -bedroomed 19 -allston 19 -lecerf 19 -usn 19 -nonstate 19 -asner 19 -paulo-based 19 -al-anzi 19 -grousing 19 -owino 19 -benzoyl 19 -shi'a 19 -preclinical 19 -freerunning 19 -hongshan 19 -papachristou 19 -doggerland 19 -darkseoul 19 -rivest 19 -aynaw 19 -self-report 19 -ibee 19 -lofting 19 -frayssinous 19 -sawfish 19 -halfback 19 -lateysha 19 -raemer 19 -quarter-hour 19 -savvier 19 -demoting 19 -over-inflated 19 -13cm 19 -moraine 19 -foodservice 19 -non-voting 19 -okanogan 19 -velu 19 -tianyi 19 -582 19 -d'qwell 19 -rightist 19 -hanners 19 -jalapenos 19 -ankle/foot 19 -2007-8 19 -manea 19 -75-foot 19 -wrcb 19 -rolexes 19 -vieri 19 -al-mihdhar 19 -hoth 19 -76-year 19 -multipack 19 -bolzano 19 -26,400 19 -khalib 19 -mud-walled 19 -mcgahn 19 -greek-style 19 -harleys 19 -larcher 19 -pay-it-forward 19 -shish 19 -poite 19 -forthrightly 19 -h&h 19 -five-speed 19 -polygraphs 19 -rocknest 19 -slithers 19 -iliffe 19 -menashe 19 -chasez 19 -balz 19 -ex-ministers 19 -hunching 19 -shechita 19 -harvard-trained 19 -2083 19 -quinceañeras 19 -ellie-jean 19 -ebden 19 -yuanchao 19 -0300 123 8018 19 -tight-head 19 -over-70s 19 -chinua 19 -disparagement 19 -concurrence 19 -maniatis 19 -viana 19 -joska 19 -mid-match 19 -testaments 19 -1:52 19 -foreclosing 19 -kswb 19 -84.6 19 -10-storey 19 -ruggedly 19 -torrente 19 -3:23 19 -troubadour 19 -multiagency 19 -talford 19 -setanta 19 -thornber 19 -ansley 19 -clymer 19 -unrestored 19 -voir 19 -below-the-knee 19 -saguaro 19 -ofsted-style 19 -kuba 19 -kitzbuehel 19 -bagwell 19 -'88 19 -kimmins 19 -impassible 19 -immorally 19 -u-576 19 -brannen 19 -sloes 19 -1,006 19 -separatist-held 19 -leis 19 -frame-by-frame 19 -alima 19 -sacredness 19 -u.c. 19 -résistance 19 -3800 19 -sumon 19 -bullwinkle 19 -lovelite 19 -non-hostile 19 -car-sharing 19 -u.n.-sponsored 19 -injury-enforced 19 -iwork 19 -de-iced 19 -hong-won 19 -sommeliers 19 -three-pointers 19 -yulin 19 -kirkton 19 -radiography 19 -isse 19 -suler 19 -wimberly 19 -streamlines 19 -130lbs 19 -custom-fit 19 -megaphones 19 -terim 19 -barnegat 19 -ciabatta 19 -769 19 -deol 19 -3-minute 19 -tawse 19 -reiko 19 -up-tempo 19 -two-three 19 -laursen 19 -rollason 19 -buri 19 -wehrlein 19 -cassella 19 -@todayshow 19 -club-like 19 -underinsured 19 -brisa 19 -j'ssiah 19 -hamelle 19 -hadrosaur 19 -78million 19 -medlicott 19 -one-kilometre 19 -pleasants 19 -schaberg 19 -tangherlini 19 -kurmanbek 19 -paradiso 19 -wissenden 19 -carbonara 19 -2007/8 19 -burkard 19 -republishing 19 -mandurah 19 -basler 19 -murderabilia 19 -damping 19 -hec 19 -martelli 19 -longest-lived 19 -hatorah 19 -harrises 19 -71.6 19 -widowhood 19 -recchia 19 -cathi 19 -sebert 19 -1,875 19 -ym 19 -nine-day-old 19 -soft-drink 19 -md-83 19 -1533 19 -plaids 19 -gochenour 19 -greenbird 19 -timor-leste 19 -samalas 19 -saadiya 19 -steenhoek 19 -geotechnical 19 -immortalising 19 -photo-realistic 19 -five-round 19 -race-day 19 -indiegogo.com 19 -psyches 19 -ex-rangers 19 -gessell 19 -undiluted 19 -balaj 19 -kewley 19 -wdc 19 -borgia 19 -vanhise 19 -ez-zor 19 -rosi 19 -bhai 19 -tanera 19 -shaniesha 19 -rigoberto 19 -youell 19 -green-and-white 19 -kokesh 19 -mcmuffins 19 -calisthenics 19 -yale-new 19 -rdx 19 -co-working 19 -libert 19 -ebonics 19 -guardrails 19 -bta 19 -al-qaeda-inspired 19 -viljoen 19 -tomball 19 -invigilators 19 -beckingham 19 -28-9 19 -weisberg 19 -kilis 19 -16-12 19 -intrudes 19 -flanary 19 -kewark 19 -kaeng 19 -urwand 19 -meers 19 -al-naimi 19 -verdean 19 -alignments 19 -shorting 19 -jo@samaritans.org 19 -marv 19 -barankov 19 -864 19 -glenrothes 19 -warm-down 19 -700-mile 19 -riversimple 19 -bulstrode 19 -dictaphone 19 -shubin 19 -learco 19 -25-second 19 -teeth-whitening 19 -jetway 19 -tg24 19 -houck 19 -invicta 19 -bunions 19 -government-provided 19 -in-service 19 -cassy 19 -gosselins 19 -scs 19 -trellis 19 -focusses 19 -11-and-a-half 19 -milbank 19 -danil 19 -fluminese 19 -radium 19 -trolleywise 19 -khou-tv 19 -splitters 19 -melita 19 -huis 19 -kabou 19 -croxall 19 -wiedersehen 19 -fjp 19 -placating 19 -dunalley 19 -enchiladas 19 -soueif 19 -famara 19 -once-mighty 19 -westhoughton 19 -boby 19 -abu-mulal 19 -chika 19 -jundallah 19 -900kg 19 -cekic 19 -future-proof 19 -slobbering 19 -nobodies 19 -meltham 19 -uba 19 -schelotto 19 -colangelo 19 -serevi 19 -ppk 19 -beacher 19 -liberata 19 -aan 19 -stuka 19 -cricket-related 19 -8-years-old 19 -23-17 19 -hyped-up 19 -bield 19 -saltiness 19 -disorganization 19 -myvett 19 -mcglinn 19 -truculent 19 -23-hour 19 -electroencephalogram 19 -koriath 19 -yuliya 19 -seraj 19 -5,000-acre 19 -furst 19 -blasi 19 -malevolence 19 -phifer 19 -hovey 19 -aeon 19 -richner 19 -malamutes 19 -lejuez 19 -bernsen 19 -repercussion 19 -yacouba 19 -dramatizes 19 -arced 19 -weekend-long 19 -marky 19 -crewlink 19 -zair 19 -ependymoma 19 -raucously 19 -poti 19 -egea 19 -gauhati 19 -macguire 19 -pensionable 19 -workbench 19 -bazinet 19 -pro-hamas 19 -grinham 19 -ashwood 19 -half-decent 19 -glamorise 19 -baguio 19 -135mph 19 -hawksworth 19 -buncombe 19 -kuchma 19 -alpari 19 -rateable 19 -x6 19 -gallan 19 -abstractions 19 -hednesford 19 -alex-oxlade 19 -3,500-year-old 19 -1:19 19 -furno 19 -f-35s 19 -ativan 19 -bva 19 -hydrolysis 19 -22:18 19 -22:19 19 -22:10 19 -re-engaging 19 -emasculated 19 -deeping 19 -sompob 19 -kenshil 19 -acorah 19 -beah 19 -99mph 19 -redcoat 19 -kitbag 19 -berghardt 19 -ex-editor 19 -aviaries 19 -av-8b 19 -chuo 19 -yassir 19 -messiest 19 -karnak 19 -sivac 19 -siemoniak 19 -mccarthyism 19 -cyclosa 19 -54.6 19 -bou-simon 19 -blackhole 19 -tanenbaum 19 -neutrogena 19 -lyveden 19 -single-track 19 -cheslea 19 -robinette 19 -benetti 19 -turpitude 19 -uptalk 19 -phra 19 -momen 19 -loveclough 19 -paralympicsgb 19 -rekindles 19 -rangsit 19 -80.7 19 -neisseria 19 -16-metre 19 -24-26 19 -24-23 19 -campiglio 19 -ikezi 19 -nit 19 -unadventurous 19 -gener 19 -29-point 19 -#goldenglobes 19 -85.3 19 -blepharoplasty 19 -redstate.com 19 -gratify 19 -apeldoorn 19 -shmyrova 19 -leolah 19 -55kg 19 -father-of-nine 19 -ventimiglia 19 -nkosazana 19 -effete 19 -cad$ 19 -encumbered 19 -jaafar 19 -82nd-minute 19 -federalists 19 -not-so-distant 19 -ex-convicts 19 -creepily 19 -kyran 19 -surfrider 19 -millimeter-wave 19 -bazi 19 -standing-room 19 -73.8 19 -hauchard 19 -québec 19 -cmp 19 -ballena 19 -prawfsblawg 19 -mdrs 19 -spacial 19 -didgeridoos 19 -milibands 19 -british-educated 19 -actavis 19 -yerrakalva 19 -thoughtlessly 19 -swantek 19 -fahour 19 -aun 19 -dilawar 19 -huttick 19 -marit 19 -maric 19 -elmsall 19 -betabrand 19 -ruy 19 -shantelle 19 -pissing 19 -city-bound 19 -mclinn 19 -ruination 19 -turn-offs 19 -1310 19 -pluribus 19 -suppiah 19 -do-not-use 19 -rutted 19 -grossi 19 -j.e. 19 -9.63 19 -mize 19 -rottentomatoes.com 19 -weightlifters 19 -fekete 19 -konecki 19 -mastoloni 19 -acharya 19 -non-approved 19 -telco 19 -unlucky-in-love 19 -carrollwood 19 -shahida 19 -resenting 19 -riversleigh 19 -floral-print 19 -starkville 19 -vardalos 19 -damage-control 19 -22:39 19 -szatkowski 19 -50-inch 19 -televicentro 19 -frontiersman 19 -10.1-inch 19 -afrobeats 19 -zea 19 -penwortham 19 -namasivayam 19 -grauer 19 -venkateswaran 19 -mamil 19 -1050 19 -70.7 19 -co-curator 19 -custom-fitted 19 -manuell 19 -honiara 19 -unfollowed 19 -anti-fungal 19 -cordrey 19 -sorceress 19 -pashmina 19 -payson 19 -fashola 19 -bourner 19 -periodontal 19 -titley 19 -acclimatising 19 -love-life 19 -olins 19 -five-car 19 -reinserted 19 -youson 19 -stigmatise 19 -madalina 19 -pharr 19 -awareness-raising 19 -12-and-a-half 19 -octogenarians 19 -carefirst24 19 -orsborn 19 -rajsombath 19 -blackboards 19 -14th-floor 19 -333,000 19 -askance 19 -kidswear 19 -mrt 19 -first-set 19 -crowd-pleasers 19 -389,000 19 -100-fold 19 -2,487 19 -frenchy 19 -minna 19 -viticulture 19 -belak 19 -kajouji 19 -adif 19 -70billion 19 -wk 19 -wp 19 -dj-ing 19 -bookend 19 -monbiot 19 -vasalgel 19 -5-foot-5 19 -five-o 19 -shoemake 19 -badgley 19 -arapaima 19 -negroni 19 -briand 19 -onishchenko 19 -galica 19 -paged 19 -daud 19 -theme-park 19 -dustup 19 -seidemann 19 -7.43 19 -co-lead 19 -snowmobilers 19 -triny 19 -malaren 19 -shaminda 19 -mustering 19 -brambilla 19 -iitate 19 -sat-navs 19 -under-25 19 -gagnier 19 -grachauskas 19 -rostered 19 -hartung 19 -army-run 19 -44.8 19 -sakura 19 -zagala 19 -hashemite 19 -mcmichael 19 -beckster 19 -ebersol 19 -bimbos 19 -brominated 19 -yapping 19 -high-caffeine 19 -marketwatch 19 -rudite 19 -sensationalize 19 -samardali 19 -peller 19 -osby 19 -nydia 19 -etobicoke 19 -c17 19 -commited 19 -breckon 19 -72-day 19 -eichinger 19 -thermal-imaging 19 -rungna 19 -hochstein 19 -1780s 19 -marshallsea 19 -well-researched 19 -10,000-year-old 19 -fraternising 19 -disenfranchising 19 -vice-captaincy 19 -mytton 19 -communally 19 -sirnak 19 -cowpens 19 -sekou 19 -jealousies 19 -realists 19 -tajbakhsh 19 -anti-sexual 19 -faux-fur 19 -etty 19 -350lbs 19 -cauliflowers 19 -zooplankton 19 -tamiko 19 -mikado 19 -lijing 19 -gallina 19 -halas 19 -bauhaus 19 -airbaltic 19 -langbehn 19 -23:49 19 -social-network 19 -albentosa 19 -bourn 19 -squier 19 -appleinsider 19 -greenburgh 19 -gunsmith 19 -self-promotional 19 -boniek 19 -thylacine 19 -bisque 19 -nitkowski 19 -lakebed 19 -slumlord 19 -ankireddy 19 -12.2-inch 19 -242million 19 -gerba 19 -wynette 19 -rajendran 19 -carreno 19 -eppolito 19 -biletnikoff 19 -portus 19 -mcmanis 19 -proffer 19 -220mph 19 -neelie 19 -pa-46 19 -betchley 19 -madde 19 -maddi 19 -rouass 19 -geforce 19 -novoselic 19 -abridged 19 -fd 19 -google.cn 19 -kwajalein 19 -flagstones 19 -bohr 19 -barmen 19 -cumbrians 19 -diciples 19 -11-game 19 -guest-edited 19 -paiz 19 -hermel 19 -yews 19 -centr 19 -walwyn 19 -forbrig 19 -sephton 19 -freehand 19 -mutesi 19 -explosives-packed 19 -okcupid.com 19 -aqualyx 19 -0.44 19 -0.41 19 -neureuther 19 -rocard 19 -sillier 19 -spookily 19 -juttla 19 -once-over 19 -natsu 19 -baulkham 19 -undisc 19 -kingson 19 -tizi 19 -mccubbin 19 -chick-lit 19 -lawan 19 -13,800 19 -tebar 19 -glassdoor.com 19 -newsmax 19 -2,000,000 19 -creno-king 19 -25-64 19 -raelyn 19 -pussy-bow 19 -busselton 19 -jfa 19 -squalls 19 -j.a. 19 -bayat 19 -lijiang 19 -fox4 19 -jaco 19 -sxswi 19 -nakheel 19 -fluoro 19 -markeaton 19 -pre-birth 19 -vt. 19 -gravoin 19 -tuva 19 -nyclass 19 -sluggishness 19 -emptage 19 -british-style 19 -criggion 19 -sedivec 19 -myo 19 -alawite-dominated 19 -streener 19 -kotick 19 -vid 19 -23:21 19 -novarette 19 -totemic 19 -wedel 19 -eclat 19 -half-tonne 19 -nazarov 19 -aaas 19 -poulos 19 -champers 19 -epinal 19 -gobs 19 -booysen 19 -dummied 19 -inadvisable 19 -gelb 19 -walkabouts 19 -udacity 19 -bagus 19 -18-and-a-half 19 -jesseka 19 -anti-oxidant 19 -30-page 19 -psaila 19 -apia 19 -daf 19 -libertarianism 19 -tucuman 19 -laki 19 -yele 19 -dencia 19 -inexcusably 19 -onamade 19 -post-abc 19 -atomics 19 -tagliavini 19 -kaua'i 19 -kase 19 -kasa 19 -heacham 19 -body-sculpting 19 -unsafely 19 -14th-minute 19 -ogborn 19 -irk 19 -steelhead 19 -yu55 19 -gillenwater 19 -kliebert 19 -19p 19 -unsaturated 19 -omnipotent 19 -rabble-rousing 19 -marcoses 19 -agyei 19 -pande 19 -p.r. 19 -bening 19 -watkiss 19 -varnished 19 -atterbury 19 -kepler-22b 19 -burglarize 19 -gunmakers 19 -seiji 19 -windsurfer 19 -onesti 19 -kraftwerk 19 -cimb 19 -hiv-free 19 -mannish 19 -nouble 19 -infowars.com 19 -extravagances 19 -moyenne 19 -tsoi 19 -workbook 19 -coppin 19 -15-metre 19 -one-in-three 19 -thwaytes 19 -21-0 19 -avianca 19 -prideaux 19 -mass-casualty 19 -liquefaction 19 -goofed 19 -ppv 19 -nairab 19 -bekhechi 19 -lalli 19 -ostentation 19 -shot-making 19 -cologne-based 19 -2.12 19 -skivvies 19 -all-german 19 -disconnects 19 -serreze 19 -25-yards 19 -himala 19 -divulges 19 -cinque 19 -acupressure 19 -bellerbys 19 -steans 19 -mulayam 19 -p.g. 19 -windshuttle 19 -ridgeline 19 -hypothesize 19 -cagefighter 19 -vincenzi 19 -cochineal 19 -15,400 19 -cfcs 19 -paramore 19 -minyard 19 -as2 19 -wncn 19 -meissner 19 -leber 19 -selbie 19 -gopers 19 -krispie 19 -bloomin 19 -nightjar 19 -729,000 19 -humanistic 19 -erc 19 -go-betweens 19 -pterodactyl 19 -stupefied 19 -knitter 19 -deshon 19 -semen-filled 19 -czechoslovakian 19 -syrian-based 19 -bloodsuckers 19 -hotham 19 -castries 19 -eno 19 -mertelj 19 -00:44 19 -self-congratulation 19 -abstinence-only 19 -@mercedesamgf1 19 -pesce 19 -thebault 19 -mccomish 19 -desomorphine 19 -rare-earth 19 -auroch 19 -over-rate 19 -liveability 19 -characterising 19 -demonisation 19 -23:47 19 -purgin 19 -sarbandi 19 -matsuda 19 -y-shaped 19 -opine 19 -supersymmetry 19 -kreitlein 19 -dusek 19 -gleision 19 -peadophile 19 -stockley 19 -ovidiu 19 -lesula 19 -dachiya 19 -12-months 19 -paintballing 19 -weigl 19 -craigie 19 -piggybacking 19 -ort 19 -chancing 19 -donn 19 -370million 19 -pdrc 19 -metronomic 19 -45cm 19 -meleanie 19 -pooler 19 -zakir 19 -citta 19 -icesave 19 -mikhailovich 19 -welsby 19 -higher-than-expected 19 -caborn 19 -finnstrom 19 -quickens 19 -dirks 19 -34-31 19 -pro-bono 19 -slob 19 -wwltv 19 -deportes 19 -garstang 19 -sumlin 19 -grigny 19 -kitchen/breakfast 19 -hyphenated 19 -goeuro 19 -karimah 19 -#tay4hottest100 19 -cleal 19 -taurine 19 -4300 19 -izabela 19 -t.v. 19 -cast-off 19 -paek 19 -jaron 19 -buzz.com 19 -pu'u 19 -pucks 19 -cassity 19 -0808 800 5000 19 -kvvu 19 -ranasinghe 19 -mitrovica 19 -spahr 19 -overhit 19 -rauner 19 -0.07 19 -diorskin 19 -bredbury 19 -non-black 19 -chevonne 19 -cage-free 19 -dabbashi 19 -everyones 19 -montréal 19 -cyclic 19 -barm 19 -tindle 19 -r44 19 -colón 19 -p-plates 19 -anti-abuse 19 -greendale 19 -no-bid 19 -damningly 19 -baclofen 19 -naish 19 -miami-bound 19 -betesh 19 -outshines 19 -akhurst 19 -ame 19 -maram 19 -eccleshare 19 -perestroika 19 -retesting 19 -98million 19 -monocoque 19 -12.75 19 -snap-happy 19 -trade-ins 19 -lunecase 19 -86.4 19 -dheepthi 19 -shufai 19 -belgorod 19 -extra-special 19 -sheresky 19 -vaishya 19 -kimaiyo 19 -kyliyah 19 -photodynamic 19 -chanteuse 19 -hipaa 19 -sacko 19 -hines-randle 19 -lidstone 19 -day-time 19 -8:35 19 -boover 19 -allbutt 19 -andrade-gaytan 19 -menary 19 -kakehi 19 -8,000-square-foot 19 -spicejet 19 -wsoctv 19 -still-born 19 -hoddesdon 19 -necrotic 19 -+30 19 -scott-garrett 19 -oseltamivir 19 -nevins 19 -silke 19 -fira 19 -poway 19 -krumholz 19 -garrod 19 -67p/c-g 19 -bday 19 -ibsley 19 -aulas 19 -ganesharajah 19 -hofstetter 19 -vindictiveness 19 -seascapes 19 -tactus 19 -tinkerer 19 -tiaffay 19 -elyria 19 -greenlit 19 -150,000-a-year 19 -beltsville 19 -kootenay 19 -morfitt 19 -mangling 19 -morgan-glover 19 -first-tier 19 -pizzi 19 -revisionism 19 -paris-style 19 -asmare 19 -cackett 19 -woolfork 19 -sveti 19 -showalter 19 --23 19 -logitech 19 -66billion 19 -listers 19 -londis 19 -baranovich 19 -sw3 19 -erlend 19 -minnies 19 -blasingame 19 -gastroenterologists 19 -in-kind 19 -adar 19 -tz 19 -megan-leigh 19 -hazaribagh 19 -trengove 19 -gocompare 19 -huelkenberg 19 -renat 19 -martinez-gonzalez 19 -schlozman 19 -cantering 19 -gainfully 19 -illusionists 19 -goanna 19 -raley 19 -narva 19 -ramp-up 19 -welland 19 -kelis 19 -nechemya 19 -liedtke 19 -schefter 19 -neuroses 19 -alpes 19 -two-sided 19 -3b 19 -misfires 19 -pramanik 19 -dimola 19 -0.20 19 -@nico_rosberg 19 -heathlands 19 -connivance 19 -faulconer 19 -biringa 19 -islamification 19 -benzocaine 19 -quvenzhane 19 -longridge 19 -buscombe 19 -calvia 19 -child-protection 19 -jettisoning 19 -toros 19 -mapham 19 -quibbling 19 -sunburns 19 -mala 19 -adagio 19 -devall 19 -haub 19 -preoccupations 19 -53m 19 -rideau 19 -hoghton 19 -concretely 19 -couvertier 19 -brych 19 -puritanism 19 -then-congressman 19 -acpra 19 -woodfox 19 -sabol 19 -gallantree 19 -rearmament 19 -964 19 -gambier 19 -uab 19 -1652 19 -1656 19 -lasagnes 19 -compstat 19 -critz 19 -tracheal 19 -friending 19 -charmless 19 -muffler 19 -brisben 19 -ihsanullah 19 -esam 19 -hornblower 19 -oxidized 19 -floris 19 -perforations 19 -e45 19 -lamason 19 -etcetera 19 -coagulated 19 -chambray 19 -rumford 19 -alejos 19 -obama-clinton 19 -robocoin 19 -ecstacy 19 -failla 19 -mykaela 19 -gobbi 19 -schear 19 -30-45 19 -bowdich 19 -re-engineering 19 -portier 19 -redflex 19 -dosanjh 19 -babywearing 19 -squirms 19 -anger-management 19 -wattel 19 -self-loading 19 -xenna 19 -prete 19 -gotobed 19 -europhiles 19 -brookgate 19 -panas 19 -shafii 19 -lufti 19 -haagen-dazs 19 -impale 19 -passcodes 19 -mows 19 -hessenthaler 19 -lozowski 19 -800km 19 -murphey 19 -rollie 19 -counter-revolutionary 19 -bryde 19 -glanister 19 -hc-130 19 -traffic-free 19 -baumgarten 19 -schori 19 -patey 19 -3.24 19 -karsiah 19 -junfeng 19 -acquits 19 -allain 19 -kering 19 -gratis 19 -carpio 19 -pagaent 19 -ahdel 19 -hi-c 19 -schellhas 19 -witonis 19 -hamdallah 19 -roulette-style 19 -satterwhite 19 -martineta 19 -midwesterner 19 -dpm 19 -evocation 19 -27-member 19 -d'amour 19 -tringale 19 -17per 19 -leisha 19 -homan 19 -shaquan 19 -tabar 19 -henner 19 -dimond 19 -picchiotti 19 -9-4 19 -9-9 19 -ici 19 -sepe 19 -centralise 19 -miko 19 -draughty 19 -skulduggery 19 -sintering 19 -demme 19 -bonsall 19 -riaa 19 -day-in 19 -9a 19 -lusczynski 19 -lahun 19 -cokey 19 -israeli-controlled 19 -utilisation 19 -aml 19 -half-chances 19 -northstowe 19 -scruton 19 -scheimer 19 -sculptress 19 -backburner 19 -classico 19 -proulx 19 -robenalt 19 -christiansburg 19 -tamping 19 -bohbot 19 -accumbens 19 -golani 19 -damocles 19 -karelia 19 -over-diagnosis 19 -500km 19 -fama 19 -conchas 19 -deltas 19 -collegian 19 -rohani 19 -transfiguration 19 -bhardwaj 19 -holthouse 19 -101million 19 -proschwitz 19 -waxahachie 19 -ottos 19 -snowcapped 19 -specialism 19 -pascarella 19 -1,136 19 -55lbs 19 -quadrantids 19 -mcqueenie 19 -bargain-basement 19 -housings 19 -helpfulness 19 -web-like 19 -battin 19 -gaslamp 19 -croak 19 -eco-lodge 19 -36mph 19 -307-year-old 19 -betteridge 19 -82.1 19 -buy-one-get-one-free 19 -pilfer 19 -crabble 19 -oto 19 -devises 19 -balapovi 19 -monfort 19 -lennard 19 -abudu 19 -instore 19 -apsalyamov 19 -pigtail 19 -aparecido 19 -cornealious 19 -ex-real 19 -best-of 19 -flower-filled 19 -macedonians 19 -metrico 19 -midsized 19 -modig 19 -mafiosi 19 -deformations 19 -demian 19 -five-setter 19 -badman 19 -portmarnock 19 -ananda 19 -paule 19 -caudate 19 -stange 19 -r-ky 19 -recce 19 -shogun 19 -savannah-chatham 19 -bourgault 19 -industrially 19 -hurriya 19 -thrashers 19 -balentine 19 -wohl 19 -supercluster 19 -ihor 19 -kamakura 19 -kraybill 19 -cargile 19 -spano 19 -glasshole 19 -78.9 19 -78.7 19 -ismayilova 19 -witheford 19 -byars 19 -disease-ridden 19 -charnock 19 -hever 19 -litan 19 -nyingi 19 -guly 19 -landover 19 -mantor 19 -depriest 19 -switchers 19 -bhutanese 19 -wedge-shaped 19 -kovacik 19 -49.4 19 -kulukundis 19 -missouri-based 19 -panelbase 19 -twerks 19 -mclarens 19 -mdds 19 -espirito 19 -brengle 19 -mahi 19 -frogg 19 -sex-marriage 19 -whoi 19 -throttles 19 -jordine 19 -18per 19 -blood-letting 19 -boel 19 -chide 19 -sexualities 19 -#sandy 19 -57m 19 -dilaudid 19 -faouzi 19 -priscila 19 -moated 19 -schoenmann 19 -general-secretary 19 -folkard 19 -ever-rising 19 -tree-house 19 -organic-rich 19 -denyse 19 -oosterbroek 19 -sizzled 19 -auto-complete 19 -chicest 19 -timberlea 19 -checkpost 19 -927 19 -ev71 19 -defrances 19 -arthrogryposis 19 -five-decade 19 -garrigus 19 -persephone 19 -arnside 19 -msl 19 -msa 19 -lilienfeld 19 -end-of-the-year 19 -well-thought-out 19 -langoustine 19 -bondar 19 -c.f. 19 -udrea 19 -giddily 19 -magie 19 -manier 19 -berkland 19 -vichai 19 -patella 19 -pro-u.s. 19 -gawked 19 -trenberth 19 -patthanathabut 19 -roura 19 -anne-sophie 19 -suryani 19 -gingras 19 -no-fault 19 -jeunesse 19 -party-aligned 19 -nikkita 19 -3,000-square-foot 19 -weeki 19 -23-minute 19 -rossides 19 -accrediting 19 -kotv 19 -levi-blu 19 -sacral 19 -oratorical 19 -chogm 19 -krachan 19 -67.9 19 -viddy 19 -laclair 19 -nicolaidis 19 -burciaga 19 -ribblesdale 19 -bartal 19 -orlando-area 19 -butties 19 -slavitt 19 -bobilya 19 -bartolo 19 -autopen 19 -mchaffie 19 -nonvoters 19 -yle 19 -synchronising 19 -kultala 19 -doughton 19 -upbraided 19 -meuron 19 -diebold 19 -blanchflower 19 -most-read 19 -ballboy 19 -raggett 19 -tempora 19 -gourmand 19 -parnes 19 -jemaa 19 -dehaven 19 -enthusing 19 -shair 19 -535,000 19 -jean-gilles 19 -voser 19 -carnevale 19 -392,000 19 -mookie 19 -rogers-ratcliffe 19 -racton 19 -greeters 19 -strimmer 19 -18-under 19 -thomaston 19 -satisfyingly 19 -scandic 19 -granda 19 -premenopausal 19 -technomic 19 -shinbach 19 -noggin 19 -maycock 19 -ring-shaped 19 -terebins 19 -spidery 18 -13bn 18 -fakers 18 -kamerman 18 -nougat 18 -moriarity 18 -hassakeh 18 -knu 18 -ductal 18 -milhouse 18 -entryways 18 -fillion 18 -witching 18 -bast 18 -harakat 18 -wine-producing 18 -beachcomber 18 -tyro 18 -goldfield 18 -21:39 18 -agog 18 -elliott-joahill 18 -isolationists 18 -prison-like 18 -liliger 18 -wensley 18 -sherrilyn 18 -geochemical 18 -kurdi 18 -neo-fascist 18 -pankration 18 -byelection 18 -vanillin 18 -tshwane 18 -tiukhtyaev 18 -hultz 18 -weezer 18 -mark-ups 18 -fera 18 -hitmakers 18 -cardi 18 -hartle 18 -esmin 18 -1508 18 -1,176 18 -allergen-free 18 -boreanaz 18 -fishler 18 -tisei 18 -mamohato 18 -grieves-smith 18 -botch 18 -newspaperman 18 -frakt 18 -hieronymus 18 -voiceovers 18 -kellenberger 18 -langfield 18 -9.11 18 -b.o.b. 18 -inglenook 18 -azimkar 18 -arbour 18 -grevin 18 -flybys 18 -albright-byrd 18 -80-page 18 -ottoman-era 18 -white-only 18 -kayyem 18 -maundrell 18 -olopade 18 -kassovitz 18 -ellerbe 18 -tums 18 -monarchists 18 -thula 18 -ginge 18 -re-mission 18 -quintus 18 -paschi 18 -carnesville 18 -korine 18 -davtyan 18 -habanero 18 -obnoxiously 18 -22:40 18 -lumpectomies 18 -candelabras 18 -lenhoff 18 -movie-going 18 -four-step 18 -eastwick 18 -atropine 18 -imperils 18 -below-normal 18 -jayhawk 18 -chuene 18 -carwash 18 -71million 18 -issey 18 -huskers 18 -scuds 18 -yosses 18 -yandex 18 -wenran 18 -wahaca 18 -alizza 18 -truk 18 -nightlinger 18 -shabangu 18 -schemel 18 -voort 18 -roig-debellis 18 -brienna 18 -chudi 18 -fotheringhay 18 -linington 18 -gorbals 18 -viñoly 18 -basey 18 -sumeray 18 -tantra 18 -khanfar 18 -80-acre 18 -al-dura 18 -kunekune 18 -sunnie 18 -less-than-flattering 18 -winship 18 -rotkovich 18 -kerbside 18 -arrowe 18 -phuc 18 -jarramplas 18 -mainstone 18 -cakey 18 -exif 18 -noos 18 -morticians 18 -limiter 18 -112-mile 18 -feedbacks 18 -activehours 18 -soukup 18 -a&w 18 -glutton 18 -hongfang 18 -mellons 18 -gwilym 18 -two-face 18 -hareford 18 -madoc 18 -juliani 18 -mouette 18 -lance-corporal 18 -canvassers 18 -gdynia 18 -48-inch 18 -dirndl 18 -disdained 18 -rembrandts 18 -czack 18 -littleport 18 -el-e 18 -pikus-pace 18 -minadaki 18 -1,109 18 -rula 18 -185mph 18 -tshering 18 -hilsenteger 18 -swonger 18 -toongabbie 18 -stavro 18 -371,000 18 -tissington 18 -nadelmann 18 -flowerdew 18 -ceefax 18 -koyama 18 -mancha 18 -shirokov 18 -abeche 18 -ensar 18 -involvements 18 -rebroadcast 18 -clearcast 18 -travoltas 18 -duhuk 18 -hinn 18 -yedioth 18 -gloop 18 -m.l. 18 -haboob 18 -endeavored 18 -a4wp 18 -jokily 18 -falkenberg 18 -dili 18 -kaylin 18 -non-professional 18 -misters 18 -coubertin 18 -mansueto 18 -litherland 18 -age-defying 18 -jma 18 -spennymoor 18 -kalanit 18 -insein 18 -suining 18 -perrotta 18 -run-through 18 -dawoud 18 -sunna 18 -90kg 18 -sadar 18 -pointlessly 18 -croon 18 -blumsom 18 -jad 18 -gardiners 18 -honesdale 18 -nakuru 18 -ryedale 18 -409,000 18 -hemraj 18 -bonera 18 -tampico 18 -superbikes 18 -palaniappan 18 -norphel 18 -ohrdruf 18 -mont. 18 -28-page 18 -cutis 18 -masaki 18 -theologically 18 -eef 18 -senkakus 18 -scharber 18 -boros 18 -repositories 18 -mcgaughey 18 -paudert 18 -13,100 18 -65.2 18 -jigsaws 18 -tsegaye 18 -tricycles 18 -1:06 18 -porphyria 18 -cus 18 -abdiaziz 18 -longwell 18 -tsg 18 -barataria 18 -siluanov 18 -per-mathias 18 -450g 18 -mirgind 18 -moderate-intensity 18 -endorphin 18 -u-visa 18 -goduti 18 -ported 18 -alighting 18 -quashes 18 -anglaise 18 -gessling 18 -derian 18 -religious-based 18 -cheif 18 -cobie 18 -wimax 18 -setola 18 -diction 18 -jumpin 18 -killerton 18 -dbr1 18 -10-strong 18 -953 18 -857 18 -holahan 18 -lysacek 18 -rouch 18 -nisanyan 18 -dip-dyed 18 -cakarel 18 -117-111 18 -newly-revealed 18 -chipman 18 -pensively 18 -mileva 18 -weezy 18 -po-faced 18 -tuffley 18 -bath-time 18 -sportive 18 -traycoff 18 -midlands-based 18 -groin/pelvis 18 -levittown 18 -witchhunt 18 -tolliver 18 -21:19 18 -disbursements 18 -orrick 18 -crawshay 18 -gimson 18 -coppertone 18 -iwaki 18 -klaxons 18 -150-acre 18 -shinpads 18 -756 18 -754 18 -owen-jones 18 -fiascos 18 -px 18 -goodly 18 -swinhoe 18 -carona 18 -isabell 18 -yame 18 -much-derided 18 -munera 18 -owens-thurston 18 -sheens 18 -waterkloof 18 -swordsman 18 -bogotá 18 -allem 18 -schweppes 18 -durran 18 -9.85 18 -tech-industry 18 -shinichi 18 -muckraking 18 -mackinac 18 -moisturises 18 -pyeongtaek 18 -stopera 18 -embryoscope 18 -vassilis 18 -whitetail 18 -pettiford 18 -dhows 18 -ontario-based 18 -school-run 18 -@david_cameron 18 -gadot 18 -981 18 -528,000 18 -matula 18 -gawky 18 -subpoenaing 18 -absorbers 18 -shuji 18 -daugaard 18 -maddex 18 -dabbles 18 -gaza-egypt 18 -categorizes 18 -imsi 18 -uncuffed 18 -ramped-up 18 -art-lovers 18 -two-over-par 18 -pollex 18 -in-crowd 18 -job-creation 18 -4,450 18 -french-german 18 -self-balancing 18 -bonnin 18 -watsa 18 -dampney 18 -hydroplane 18 -plate-glass 18 -tarah 18 -re-registered 18 -defrancesco 18 -bogor 18 -33lb 18 -hrant 18 -mudge 18 -casimir 18 -leawood 18 -uysal 18 -divesting 18 -non-interference 18 -lafever 18 -stich 18 -esipisu 18 -balearics 18 -j-15 18 -transited 18 -get-rich-quick 18 -floodway 18 -bolotnaya 18 -skerrett 18 -apparatchiks 18 -vanhorn 18 -forck 18 -bookout 18 -peverell 18 -c-130s 18 -gosun 18 -aster 18 -categorizing 18 -1.71 18 -sun-herald 18 -mordred 18 -tyringham 18 -shoshone 18 -newgate 18 -ulibarri 18 -olanoff 18 -jon-paul 18 -degorski 18 -palling 18 -karjalainen 18 -riz 18 -abayas 18 -interoperability 18 -genium 18 -51.4 18 -dunoon 18 -neville-jones 18 -start-rite 18 -sumi 18 -prejudging 18 -chérif 18 -al-thawadi 18 -schalkwyk 18 -doga 18 -inward-looking 18 -?!! 18 -hoverbike 18 -rodden 18 -somdev 18 -11bn 18 -pedialyte 18 -irek 18 -narinder 18 -saprissa 18 -naturalisation 18 -heyburn 18 -couloute 18 -espn2 18 -near-certain 18 -ius 18 -repeals 18 -playsuits 18 -wootan 18 -stix 18 -sluggishly 18 -thomond 18 -bounkham 18 -evren 18 -ofthe 18 -respers 18 -wigstrom 18 -spanish-owned 18 -frisby 18 -colourways 18 -tregre 18 -dlamini-zuma 18 -turkish-armenian 18 -froom 18 -filial 18 -shoe-bomber 18 -tero 18 -on-lookers 18 -munns 18 -markovich 18 -swizz 18 -lefts 18 -ringwald 18 -carnaval 18 -contoret 18 -ice-age 18 -marsek 18 -stoodley 18 -russian-based 18 -brackenridge 18 -i-81 18 -tax-deductible 18 -benefer 18 -briercliffe 18 -uehara 18 -armistead 18 -torner 18 -risha 18 -macala 18 -mehrabad 18 -leyne 18 -poyser 18 -23-26 18 -artery-clogging 18 -lookebill 18 -lightsquared 18 -2.69 18 -ruffner 18 -roeske 18 -kinloss 18 -galeries 18 -jellicoe 18 -320km 18 -2,000-acre 18 -5pointz 18 -bobbling 18 -a55 18 -hassel 18 -uninvestigated 18 -ghesquiere 18 -cripplingly 18 -liraglutide 18 -aquatica 18 -mk1 18 -frierson 18 -bleeping 18 -627,000 18 -charbonneau 18 -gobena 18 -auteuil 18 -sanaullah 18 -35st 18 -abera 18 -yale-loehr 18 -raouna 18 -shallot 18 -isted 18 -hard-right 18 -silcock 18 -galant 18 -inessa 18 -riggleman 18 -oleds 18 -paisey 18 -chaundy 18 -jef 18 -unconstructive 18 -sub-divide 18 -masilela 18 -swivels 18 -drug-addict 18 -tetsuya 18 -cannibalised 18 -two-word 18 -60bn 18 -abbotabad 18 -00:13 18 -beautifulpeople.com 18 -besmirch 18 -galil 18 -desmier 18 -lexie-mae 18 -ef-2 18 -swivelled 18 -hollywoodlife.com 18 -1,255 18 -stuhlbarg 18 -22:27 18 -blinis 18 -syp 18 -ru486 18 -djordjevic 18 -coronations 18 -panoxyl 18 -throught 18 -centipedes 18 -gruyere 18 -teletubby 18 -bourton-on-the-water 18 -aimi 18 -zebedee 18 -haueter 18 -pneumonic 18 -wtm 18 -mizanur 18 -manaway 18 -rayney 18 -1,075 18 -gerasimov 18 -enlistees 18 -west-facing 18 -geophysicists 18 -krasnogorsk 18 -foreshadows 18 -mig-29 18 -dionysus 18 -reinvesting 18 -grosicki 18 -deyapp 18 -comity 18 -oaten 18 -typhoo 18 -thorton 18 -gebre 18 -cackles 18 -forsane 18 -club-goers 18 -casagrande 18 -lugs 18 -90billion 18 -duenas 18 -chedjou 18 -black-white 18 -l'alma 18 -defacement 18 -quangocrats 18 -63mph 18 -perspiring 18 -mini-mart 18 -self-avowed 18 -afrikaners 18 -fairyland 18 -undateables 18 -seminaries 18 -ramogi 18 -schulenburg 18 -fau 18 -south-coast 18 -hang-up 18 -emerick 18 -skelmersdale 18 -anti-cellulite 18 -davo 18 -sutton-wasmund 18 -car-like 18 -tamm 18 -throughput 18 -ruano 18 -pre-human 18 -hirrell 18 -sertraline 18 -375million 18 -pyestock 18 -entropy 18 -al-sanussi 18 -badelj 18 -benaroon 18 -instabilities 18 -thankfulness 18 -psn 18 -sub-inspector 18 -mikhailov 18 -quee 18 -löfven 18 -quivered 18 -rafizadeh 18 -woodiwiss 18 -58.7 18 -fanni 18 -wallpapers 18 -one-lap 18 -seventy-four 18 -lean-to 18 -ayerst 18 -nordin 18 -bathsheba 18 -re-painted 18 -dressing-down 18 -glues 18 -arpanet 18 -massereene 18 -franzini 18 -trivium 18 -jaymin 18 -spiridigliozzi 18 -bidot 18 -10-5 18 -dissolvable 18 -aircraftman 18 -taren 18 -finbow 18 -teat 18 -recyclers 18 -jamelle 18 -houseplant 18 -glassy-eyed 18 -9.96 18 -66.3 18 -13-under 18 -annick 18 -grigelyte 18 -woolrich 18 -frayssinet 18 -00:39 18 -saffir-simpson 18 -pot-infused 18 -prevost 18 -lgc 18 -dosimeters 18 -21-23 18 -21-24 18 -supervalu 18 -brightwell 18 -299.99 18 -stupider 18 -dhahran 18 -botero 18 -mellish 18 -silkscreen 18 -lampley 18 -oohs 18 -presov 18 -radiologic 18 -23:18 18 -23:17 18 -23:14 18 -wiffen 18 -turford 18 -17-14 18 -francophone 18 -even-tempered 18 -ovid 18 -matsuoka 18 -adeoye 18 -islah 18 -brownville 18 -underdown 18 -aguadilla 18 -tribulation 18 -much-delayed 18 -balsa 18 -datamart 18 -burien 18 -antacid 18 -tats 18 -lovitz 18 -seven-person 18 -intertwining 18 -db4 18 -hartley-wass 18 -savors 18 -mckinzie 18 -rosetti 18 -aliona 18 -asadi 18 -barrass 18 -reddington 18 -non-serious 18 -chloé 18 -millerbergs 18 -winge 18 -dula 18 -tbm 18 -tbc 18 -chappatte 18 -lavenham 18 -telekinesis 18 -30in 18 -oxenberg 18 -treliske 18 -hafsat 18 -sphero 18 -nemours 18 -helmy 18 -5.83 18 -megaton 18 -pagesix 18 -boonville 18 -soundproofing 18 -mercyhurst 18 -hosam 18 -ergenekon 18 -horniest 18 -pfoa 18 -cini 18 -133rd 18 -mateja 18 -pannirello 18 -staden 18 -orellana-clark 18 -cash-for-questions 18 -grewal 18 -clammed 18 -moumouris 18 -50.9 18 -ghostface 18 -allying 18 -8:55 18 -orenstein 18 -whole-wheat 18 -150lb 18 -shucks 18 -candylipz 18 -samadi 18 -ss-led 18 -branum 18 -nose-down 18 -whole-hearted 18 -forden 18 -wassom 18 -charminster 18 -al-buti 18 -betters 18 -wenk 18 -komur 18 -malnati 18 -chambal 18 -30-years 18 -umpired 18 -navy-blue 18 -fehr 18 -turbofan 18 -awaran 18 -car-seat 18 -rutan 18 -48-page 18 -approximating 18 -hogewey 18 -akila 18 -double-wide 18 -thoresby 18 -delude 18 -threlfall 18 -forces-iraq 18 -bbw 18 -gwendolen 18 -suroor 18 -arb 18 -arnfield 18 -ammie 18 -lundstram 18 -tuitupou 18 -montmelo 18 -cockrum 18 -bucatinsky 18 -13.30 18 -zammit 18 -1991-92 18 -jib 18 -biafra 18 -chawton 18 -bioinformatics 18 -loan-to-value 18 -hussam 18 -freyre 18 -malé 18 -karran 18 -derrière 18 -gigg 18 -13s 18 -stila 18 -spooned 18 -leatherette 18 -air-tight 18 -taillights 18 -enver 18 -sleep-driving 18 -seastrand 18 -khem 18 -map-reading 18 -irx3 18 -daylan 18 -shevon 18 -palaver 18 -employment-based 18 -lollie 18 -scottish-based 18 -koranic 18 -g35 18 -dorrance 18 -pohlman 18 -blackballed 18 -23:37 18 -volgyesi 18 -sauers 18 -sumbawa 18 -pikeville 18 -rundgren 18 -resurfaces 18 -white-tipped 18 -kuldip 18 -prattle 18 -417,000 18 -zurich-based 18 -coggeshall 18 -man-portable 18 -amiel 18 -thykier 18 -self-taken 18 -ungar 18 -iowa-based 18 -97mph 18 -brightsource 18 -hevilift 18 -hilts 18 -bungie 18 -sants 18 -wgn-tv 18 -lowcock 18 -yeon 18 -plungers 18 -glasnost 18 -sà 18 -borrero 18 -otel 18 -laumoli 18 -ffestiniog 18 -prerequisites 18 -slideshows 18 -scimitar 18 -throop 18 -panky 18 -ayombekov 18 -seigel 18 -amaker 18 -misogynists 18 -backdoors 18 -walli 18 -internet-savvy 18 -dmca 18 -chappa 18 -carolann 18 -bohdan 18 -7-7 18 -single-mother 18 -crystallising 18 -10-round 18 -brauw 18 -ebrard 18 -rapace 18 -undiplomatic 18 -gravenell 18 -cila 18 -wgrz 18 -1768 18 -princetown 18 -blair-brown 18 -ex-politician 18 -7.12 18 -96.5 18 -high-living 18 -duffner 18 -harbinder 18 -35s 18 -updegrove 18 -kiwanja 18 -22-page 18 -ten-second 18 -darvell 18 -consumable 18 -wassim 18 -dishcloth 18 -dettelbach 18 -atwill 18 -4.32 18 -a36 18 -kameny 18 -jpeg 18 -trill 18 -ex-couple 18 -corrente 18 -gojali 18 -sufian 18 -farnon 18 -nakia 18 -blockchain 18 -13km 18 -front-of-house 18 -trendier 18 -sakthivel 18 -colum 18 -231,000 18 -epitomise 18 -coglaiti 18 -emancipate 18 -blumenauer 18 -perica 18 -sobol 18 -spokewoman 18 -raisers 18 -wouldn 18 -portentous 18 -635,000 18 -delvecchio 18 -thisday 18 -spellar 18 -devesey 18 -jacikas 18 -pock-marked 18 -brodrick 18 -bayrou 18 -piggins 18 -eborders 18 -kinnucan 18 -malley 18 -choson 18 -lexani 18 -wadden 18 -30-pound 18 -ssri 18 -knill 18 -fossil-fuel 18 -viirs 18 -sunni-majority 18 -kerdasa 18 -14-years 18 -self-identity 18 -93mph 18 -richards-ross 18 -nadina 18 -killjoys 18 -homare 18 -despicably 18 -landsbanki 18 -4x400 18 -last-wicket 18 -12.05 18 -118-mile 18 -crawler 18 -shervon 18 -slags 18 -ghat 18 -stratheden 18 -tenteki 18 -ka'leah 18 -lillee 18 -hi-de-hi 18 -krigger 18 -assaultive 18 -cousans 18 -sanaria 18 -jarrah 18 -pre-event 18 --14 18 -sin-binning 18 -self-deception 18 -moji 18 -zhongshan 18 -elmahdy 18 -donmar 18 -right-to-life 18 -portimao 18 -implacably 18 -hallaton 18 -6,000-strong 18 -1,500-mile 18 -lavonne 18 -6-foot-7 18 -singha 18 -14g 18 -salafism 18 -mazile 18 -colegio 18 -muasher 18 -selvaratnam 18 -grumeti 18 -braidon 18 -ruediger 18 -exynos 18 -santino 18 -ozar 18 -mersea 18 -tubectomies 18 -anti-ebola 18 -knuckleball 18 -grandmother-of-five 18 -pigford 18 -rangy 18 -natchitoches 18 -finessed 18 -dorp 18 -dsp 18 -bolen 18 -fassnidge 18 -patuxent 18 -acceptances 18 -homebuilder 18 -subcompact 18 -haidarasl 18 -moorlands 18 -frontwoman 18 -canarsie 18 -dementia-like 18 -bitney 18 -thai-born 18 -guiltycount 18 -coatman 18 -acetic 18 -repackaging 18 -i-35w 18 -foreign-policy 18 -rudderman 18 -wonzey 18 -jahvaris 18 -peeples 18 -pre-screened 18 -seaburn 18 -6:13 18 -daqduq 18 -cared-for 18 -scherzo 18 -feldt 18 -mettler 18 -cogdell 18 -lr 18 -lucasville 18 -four-level 18 -garett 18 -mirarchi 18 -netty 18 -onwarat 18 -takara 18 -repute 18 -shamiya 18 -fernback 18 -re-make 18 -957 18 -paulley 18 -itvbe 18 -jeonbuk 18 -readman 18 -naughtiness 18 -mbemba 18 -kerk 18 -ivy-clad 18 -fraunhofer 18 -khal 18 -iovera 18 -cockfight 18 -up-skirt 18 -whitefriars 18 -worldliness 18 -weiners 18 -emilia-romagna 18 -sixth-generation 18 -cyl 18 -duritskaya 18 -delfin 18 -bca 18 -bcr 18 -class-b 18 -harten 18 -kristofer 18 -intimidatory 18 -taxiways 18 -hagee 18 -80-90 18 -unrewarded 18 -sub-aqua 18 -magdy 18 -bethann 18 -sejad 18 -theobromine 18 -steverson 18 -fergal 18 -nkrumah 18 -godrevy 18 -gravesites 18 -waitemata 18 -weakley 18 -lovins 18 -pilatus 18 -dulaney 18 -north-westerly 18 -koco-tv 18 -musante 18 -donegan 18 -single-person 18 --33 18 -funassyi 18 -douchez 18 -riebling 18 -balanchine 18 -dinas 18 -bodden 18 -comrie 18 -calland 18 -reme 18 -1100s 18 -tetlow 18 -quader 18 -loubser 18 -anti-ballistic 18 -diesel-electric 18 -cannulas 18 -post-olympic 18 -organophosphorus 18 -hebrews 18 -arrojas 18 -lewison 18 -fragranced 18 -shirreff 18 -sharlet 18 -mantecore 18 -anti-gm 18 -rabillard 18 -kwik 18 -wfie 18 -yanfei 18 -murphie 18 -obeidi 18 -iconoclast 18 -bangsamoro 18 -fancourt 18 -subtraction 18 -kinesio 18 -kononenko 18 -deshchytsia 18 -hurlburt 18 -nuñez 18 -deborra-lee 18 -80-strong 18 -simpleton 18 -richthofen 18 -money-printing 18 -under-equipped 18 -mccartin 18 -wons 18 -graan 18 -much-mocked 18 -m27 18 -mandisa 18 -sedaghatzadeh 18 -triple-glazed 18 -recently-published 18 -taccone 18 -1,970 18 -three-ring 18 -afroduck 18 -ibi 18 -pyatov 18 -carefirst 18 -forestville 18 -coloradoan 18 -kalinic 18 -abati 18 -tewksbury 18 -caymen 18 -moote 18 -white-water 18 -shweeb 18 -wicketkeeper-batsman 18 -chesmore 18 -pennsylvanians 18 -ayahna 18 -intially 18 -niazy 18 -kante 18 -persecutors 18 -gordas 18 -single-serve 18 -palome 18 -glamourising 18 -e.m. 18 -aple 18 -hilde 18 -elgersma 18 -kseniya 18 -karta 18 -haruki 18 -timmothy 18 -gunnersbury 18 -delpy 18 -eydelman 18 -schimanski 18 -kungur 18 -humaira 18 -hanspaul 18 -nagra 18 -tanzer 18 -peyronie 18 -tilsa 18 -nist 18 -fayoum 18 -anacostia 18 -bloodsworth 18 -carmike 18 -dersingham 18 -raven-symone 18 -andalus 18 -aucker 18 -petrakis 18 -gooner 18 -tolleson 18 -1558 18 -suppressants 18 -codpiece 18 -kochanski 18 -17lb 18 -tonjes 18 -prues 18 -balcon 18 -whittamore 18 -kinesiology 18 -strops 18 -sackey 18 -oddo 18 -naco 18 -creaked 18 -malonga 18 -terzi 18 -gtech 18 -karley 18 -nishantha 18 -brumit 18 -icebar 18 -francaise 18 -delinquencies 18 -bedsides 18 -aerolineas 18 -gleen 18 -pyong 18 -micronutrients 18 -encyclopedias 18 -ambrosiadou 18 -kalb 18 -phet 18 -rosemond 18 -masterclasses 18 -sadjadpour 18 -whichello 18 -coauthors 18 -kims 18 -kime 18 -tax-payers 18 -450kg 18 -retrials 18 -4-years-old 18 -short-distance 18 -salzer 18 -sten 18 -one-eighth 18 -canaanites 18 -yai 18 -problem-free 18 -no-strings-attached 18 -5.05 18 -trichologist 18 -ex-raf 18 -tidiness 18 -stepbrothers 18 -23,400 18 -creston 18 -ellerbeck 18 -11-strong 18 -back-alley 18 -suva 18 -romanenko 18 -mashaba 18 -freis 18 -whisenhunt 18 -semelia 18 -gammacore 18 -volkert 18 -flails 18 -1784 18 -hiley 18 -aktobe 18 -basaran 18 -tunnellers 18 -astutely 18 -sykes-picot 18 -md-82 18 -3mph 18 -atareb 18 -eskandarian 18 -co-directors 18 -heyns 18 -21-inch 18 -garifuna 18 -sharko 18 -stansgate 18 -plantar 18 -telcos 18 -anamarie 18 -springvale 18 -lucchese 18 -badmouthing 18 -artibonite 18 -fielden 18 -photorealistic 18 -littlerock 18 -land-dwelling 18 -centralizers 18 -envira 18 -krajewski 18 -walport 18 -macloughlin 18 -berati 18 -attention-getting 18 -fls 18 -faisalabad 18 -maib 18 -6,750 18 -johannson 18 -spent-fuel 18 -leali'ifano 18 -elisany 18 -evette 18 -chieu 18 -564 18 -jadin 18 -constancy 18 -oundle 18 -migdal 18 -leered 18 -dredgers 18 -wench 18 -inowashi 18 -cherrie 18 -marber 18 -handsprings 18 -overflight 18 -groome 18 -3,150 18 -capozziello 18 -kaylene 18 -brotman 18 -barbar 18 -kremmling 18 -ineligibility 18 -mdx 18 -lagares 18 -pragmatically 18 -jinhua 18 -nerad 18 -japanese-born 18 -heros 18 -ascl 18 -velvets 18 -rationalizing 18 -doren 18 -yodeling 18 -cranbourne 18 -kaing 18 -arieh 18 -decathlete 18 -beardon 18 -ajc.com 18 -coppler 18 -mesaba 18 -28p 18 -pierre-pierre 18 -ielpi 18 -parol 18 -mellard 18 -royall 18 -breakwell 18 -nuova 18 -froggy 18 -re-edited 18 -theophilus 18 -fiesty 18 -offenbach 18 -alusaimi 18 -necklacing 18 -sainbury 18 -kumbuka 18 -ligeia 18 -footscray 18 -wcsg 18 -wyvell 18 -hartcher 18 -tamaki 18 -rudaw 18 -golinski 18 -rpas 18 -foulks 18 -keepy-uppies 18 -expropriated 18 -maybelle 18 -dirr 18 -ammunitions 18 -stayin 18 -hovertravel 18 -thynne 18 -chronometer 18 -multibillion-pound 18 -farhat 18 -sarofim 18 -snow-white 18 -vorontsova 18 -wais 18 -khorog 18 -josà 18 -aubree 18 -domonic 18 -californian-based 18 -dappled 18 -barberton 18 -most-famous 18 -mubaraks 18 -icar 18 -valeska 18 -ejaculating 18 -55.8 18 -clanton 18 -mugu 18 -primack 18 -asashoryu 18 -sammartino 18 -appellation 18 -cranmore 18 -musion 18 -cste 18 -fohounhedo 18 -spoon-fed 18 -wienermobile 18 -azealia 18 -belfies 18 -loffredo 18 -bankulla 18 -qasimi 18 -britains 18 -generosa 18 -hra 18 -bugbears 18 -lassoed 18 -allegorical 18 -nizam 18 -conceptualized 18 -kishtwar 18 -nco 18 -zx 18 -z$ 18 -safehouse 18 -al-liby 18 -ashlie 18 -southern-most 18 -front-wheel 18 -sorger 18 -neurofibromas 18 -full-color 18 -okagbare 18 -egotism 18 -skinless 18 -chepkurgor 18 -zwirner 18 -evermore 18 -rodriguez-gerada 18 -burkino 18 -civis 18 -cárdenas 18 -hookworm 18 -40-36 18 -muska 18 -private-equity 18 -equestrians 18 -osmayev 18 -back-story 18 -939 18 -racquel 18 -ambassadress 18 -sustrans 18 -topcliffe 18 -koby 18 -21-19 18 -mcfatter 18 -95.5 18 -keto 18 -spota 18 -alcopop 18 -khor 18 -mcgaha 18 -#daretobare 18 -fosbrooke 18 -chicagoan 18 -celts 18 -0000 18 -grevemberg 18 -commendably 18 -lap-dancing 18 -geopark 18 -wakering 18 -h&r 18 -louwana 18 -legarrette 18 -meon 18 -bargain-hunting 18 -4:16 18 -prayerbook 18 -angriest 18 -363,000 18 -seanie 18 -2,180 18 -telefoot 18 -greek-cypriot 18 -body-weight 18 -pianigiani 18 -tarkowski 18 -earnestness 18 -flotillas 18 -aristy 18 -60ml 18 -marbaugh 18 -tukurua 18 -alligood 18 -wars-themed 18 -benching 18 -morn 18 -nanotech 18 -1,220 18 -regressing 18 -nygren 18 -ivy-covered 18 -suljic 18 -22:56 18 -edmar 18 -sasan 18 -ordos 18 -shyamalan 18 -eight-metre 18 -shelter-in-place 18 -consigliere 18 -affirmations 18 -verrill 18 -albatros 18 -maylam 18 -1,149 18 -hydrazine 18 -ginamarie 18 -khaliya 18 -lemaire 18 -cioca 18 -karney 18 -#fail 18 -shattos 18 -yano 18 -prefontaine 18 -kozerski 18 -1,007 18 -courchee 18 -quimby 18 -osilka 18 -benit 18 -5 18 -giggsy 18 -beukes 18 -12,000-pound 18 -longyearbyen 18 -scene-stealing 18 -masuglia 18 -arians 18 -grehan 18 -#givingtuesday 18 -brimmed 18 -vermiculite 18 -aqeel 18 -roggasch 18 -stobo 18 -nepalis 18 -danon 18 -hpi 18 -weak-willed 18 -kemo 18 -american-statesman 18 -neelam 18 -groner 18 -hexavalent 18 -apres-ski 18 -inlays 18 -inhalable 18 -1781 18 -buro 18 -gristly 18 -upsilon 18 -analogues 18 -islamaphobia 18 -newson6 18 -installers 18 -ictr 18 -randstad 18 -rovny 18 -infectiously 18 -2:29 18 -scarecrows 18 -pinkerton 18 -coquettish 18 -low-sugar 18 -junky 18 -tyumen 18 -flatlands 18 -sportback 18 -donayre 18 -winchcombe 18 -2,000-plus 18 -bookmarks 18 -babbled 18 -15-count 18 -outmaneuver 18 -tanneries 18 -castledine 18 -pueblos 18 -alfre 18 -hohlbaum 18 -krupinski 18 -piatti 18 -memorandums 18 -batshon 18 -kates 18 -71.5 18 -modeste 18 -abilify 18 -stiefel 18 -seismicity 18 -box-set 18 -blackening 18 -8477 18 -herald-leader 18 -schweiss 18 -1535 18 -weligton 18 -allibon 18 -small-government 18 -pollo 18 -muhamad 18 -al-jabouri 18 -moynahan 18 -jotting 18 -jobmatch 18 -water-soluble 18 -supermum 18 -kealey 18 -pantomimes 18 -eimer 18 -hydras 18 -myitkyina 18 -tumuhirwe 18 -harbormaster 18 -bielski 18 -six-course 18 -█ 18 -d'en 18 -8g 18 -linette 18 -hackerazzi 18 -crundwell 18 -kempston 18 -mopp 18 -caved-in 18 -castrodad 18 -vidalia 18 -mangalyaan 18 -openskies 18 -self-evidently 18 -punch-ups 18 -watsky 18 -zhiyong 18 -nicodemus 18 -viviani 18 -us-backed 18 -60.5 18 -w-l-h 18 -fidgeted 18 -dextre 18 -fourth-minute 18 -derlis 18 -kraushaar 18 -22-match 18 -kwak 18 -smugness 18 -wookiee 18 -ferrata 18 -rids 18 -salt-water 18 -blomfield 18 -chenery 18 -lynnette 18 -99.3 18 -1914-1918 18 -muwaqqar 18 -chena 18 -sabiha 18 -ezzat 18 -macdiarmid 18 -marl 18 -wsu 18 -kankava 18 -calf-length 18 -teacher-student 18 -pankhania 18 -reconstructs 18 -bullfrog 18 -blokeish 18 -duato 18 -kurtulus 18 -seung-yul 18 -515,000 18 -shut-in 18 -post-colonial 18 -gilbart 18 -boozers 18 -cisgender 18 -wholewheat 18 -39c 18 -esfandiar 18 -late-1990s 18 -pressy 18 -anti-counterfeiting 18 -ayahana 18 -ploughshares 18 -ponorata 18 -trophic 18 -kefalonia 18 -knock-outs 18 -nazir-ali 18 -340g 18 -glamorises 18 -safa'a 18 -bookers 18 -746 18 -shugart 18 -gouda 18 -talat 18 -mottola 18 -pontus 18 -pasang 18 -34d 18 -tejay 18 -maharajah 18 -shellshock 18 -zilch 18 -astound 18 -cowans 18 -gallucci 18 -dolans 18 -bogdanovich 18 -no-nos 18 -lanna 18 -petrina 18 -masciarella 18 -mzoli 18 -modalities 18 -protostar 18 -al-hassi 18 -folksmen 18 -angelita 18 -geddy 18 -albalwi 18 -water-tight 18 -thirty-something 18 -mather-lees 18 -olinga 18 -kpbs 18 -belched 18 -shoalhaven 18 -rivkie 18 -road-going 18 -outranked 18 -strasbourg-based 18 -pawtucket 18 -multiday 18 -kestler 18 -mantoloking 18 -subianto 18 -deja-vu 18 -bonmarche 18 -jiangshan 18 -noninvasive 18 -luciani 18 -unpackaged 18 -precept 18 -getchell 18 -balducci 18 -entonox 18 -crypto 18 -tatsuya 18 -bleecker 18 -zahedan 18 -l'avion 18 -heye 18 -heys 18 -shosetsu 18 -manfredini 18 -unceasing 18 -lieven 18 -helfrich 18 -lovie 18 -124th 18 -mbaye 18 -galli 18 -2011-2014 18 -collarbones 18 -ocean-front 18 -gentiles 18 -enchilada 18 -'` 18 -fastener 18 -mcpake 18 -lenoir 18 -ovcharov 18 -kaufmans 18 -turin-based 18 -colonnade 18 -libations 18 -damsels 18 -cloud-free 18 -honved 18 -dutchwoman 18 -zaloumis 18 -wallethub 18 -brasier 18 -self-funding 18 -bootylicious 18 -windiest 18 -boote 18 -wedgetail 18 -perrement 18 -harte-mcareavey 18 -free-flying 18 -suwannee 18 -bi-sexual 18 -spagna 18 -ukpn 18 -keep-ball 18 -hannaway 18 -chamberlains 18 -race-baiting 18 -lufeng 18 -aji 18 -najwa 18 -croucher 18 -biggles 18 -whiddon 18 -finra 18 -yaping 18 -mclagan 18 -anstead 18 -sharp-shooting 18 -gatpandan 18 -groake 18 -subcommittees 18 -brownson 18 -refitting 18 -hecking 18 -qaiser 18 -searson 18 -konigsberg 18 -catalyzed 18 -zemlja 18 -cauldrons 18 -annah 18 -laojiao 18 -labour-intensive 18 -keynesian 18 -beasties 18 -amodeo 18 -re-formed 18 -jalan 18 -guez 18 -24-25 18 -chul 18 -sheherazad 18 -eyeballing 18 -ahmadiyya 18 -24,206 18 -milliliters 18 -athena-marie 18 -a350-1000 18 -ebner 18 -thar 18 -dulaimi 18 -usoyan 18 -jack-in-the-box 18 -psycho-social 18 -high-explosive 18 -stonington 18 -pooh-poohed 18 -dass 18 -golodryga 18 -ohsu 18 -shylocks 18 -maclaughlin 18 -stenton 18 -moralistic 18 -rotstein 18 -grooved 18 -groover 18 -raggedy 18 -lapa 18 -sisounong 18 -cybervandalism 18 -free-diving 18 -nine-term 18 -well-presented 18 -ronayne 18 -ex-downing 18 -edens 18 -mortgaging 18 -timme 18 -junot 18 -primarolo 18 -mccaskey 18 -arley 18 -nowshera 18 -2.79 18 -joele 18 -drama-free 18 -cross-cum-shot 18 -slobber 18 -1,520 18 -36-foot 18 -devil-may-care 18 -full-force 18 -ultra-prime 18 -cmn 18 -well-manicured 18 -outstayed 18 -hydro-electric 18 -incompetency 18 -elderts 18 -paid-up 18 -gracetown 18 -pacelli 18 -month-and-a-half 18 -2900 18 -swantee 18 -rosies 18 -sisters-in-law 18 -harborside 18 -out-going 18 -observer-dispatch 18 -birstall 18 -flatline 18 -ornelas 18 -rc-135 18 -irrigating 18 -luxford-noyes 18 -gurls 18 -strozzi 18 -labrang 18 -10-years 18 -headstands 18 -lelo 18 -wind-chill 18 -wiling 18 -nitrite 18 -sinacori 18 -abet 18 -suckley 18 -maynes 18 -sexualize 18 -clean-energy 18 -weisenberger 18 -zeidman 18 -frontenac 18 -gosse 18 -amylase 18 -tallaght 18 -pyranha 18 -ritmiller 18 -screwball 18 -115-113 18 -1,244 18 -loafer 18 -california-davis 18 -sebastion 18 -wellenreuther 18 -bartendaz 18 -groundlings 18 -safarov 18 -azana 18 -octavius 18 -69.95 18 -masslive.com 18 -al-maqdisi 18 -siddiqa 18 -70.2 18 -danika 18 -l'etoile 18 -nining 18 -sanson 18 -dewees 18 -latia 18 -nurick 18 -jiwon 18 -hundal 18 -wwj 18 -hegre 18 -pouria 18 -then-speaker 18 -artyem 18 -38.1 18 -kyte 18 -al-khateeb 18 -esm 18 -blackhurst 18 -lower-paid 18 -potenza 18 -radner 18 -geisenheyner 18 -szalai 18 -undershirts 18 -claydon 18 -yellowfin 18 -fockers 18 -culex 18 -mizuho 18 -arktos 18 -78-year 18 -sciaraffo 18 -beira 18 -sadushi 18 -superfit 18 -marmots 18 -yoshizawa 18 -arms-length 18 -over-prescribing 18 -ex-smokers 18 -non-financial 18 -17,600 18 -edmundo 18 -palowski 18 -5-foot-2 18 -asphyxiate 18 -pachulia 18 -abraxane 18 -megawati 18 -repton 18 -antonescu 18 -purisima 18 -derouen 18 -avena 18 -isonzo 18 -kio 18 -sinaiticus 18 -full-circle 18 -2,550 18 -best-off 18 -stepford 18 -72,500 18 -aidala 18 -taichung 18 -nutri 18 -scale-covered 18 -ssangyong 18 -razzle 18 -yogic 18 -starmus 18 -00:47 18 -hillen 18 -detectorist 18 -mahlangu 18 -heat-sensitive 18 -ptv 18 -6.08 18 -niederhoffer 18 -seances 18 -fennessy 18 -tulafono 18 -biehl 18 -beekman 18 -zadran 18 -retuning 18 -tixylix 18 -murderess 18 -stoosh 18 -co-opting 18 -bergsma 18 -1111 18 -reykjavík 18 -post-holiday 18 -asya 18 -384,000 18 -ordem 18 -inconsolably 18 -dusk-to-dawn 18 -lorick 18 -kouachis 18 -hemings 18 -aldosterone 18 -awu 18 -liverpool-based 18 -re-posting 18 -kormoran 18 -fulani 18 -albedo 18 -stepanov 18 -goshawk 18 -hurlston 18 -rawes 18 -brunches 18 -inoue 18 -janaury 18 -henares 18 -matcha 18 -gobbledegook 18 -daguerreotype 18 -sabre-toothed 18 -1,090 18 -rinku 18 -russes 18 -musicianship 18 -sasko 18 -revivals 18 -krohn 18 -bonneau 18 -analyser 18 -thorin 18 -drayson 18 -out-of-school 18 -nordine 18 -athas 18 -knowshon 18 -final-year 18 -verger 18 -verged 18 -suppes 18 -72nd-minute 18 -ouellet 18 -close-in 18 -martellus 18 -clip-in 18 -ultra-sound 18 -multitalented 18 -immordino 18 -sartiau 18 -avey 18 -23:09 18 -three-bathroom 18 -under-performed 18 -badaun 18 -putri 18 -megayacht 18 -non-party 18 -rocos 18 -unbending 18 -souder 18 -heckard 18 -chedi 18 -infest 18 -data-gathering 18 -athenee 18 -111skin 18 -8,750 18 -riess 18 -al-abed 18 -sinovac 18 -voinovich 18 -trabi 18 -seventy-one 18 -marshaling 18 -hair-like 18 -11,750 18 -ridesharing 18 -spero 18 -ganaway 18 -dobb 18 -biomarin 18 -tomy 18 -wavers 18 -cardillo 18 -ntsc 18 -westhuizen 18 -climaxes 18 -then-record 18 -third-graders 18 -armstrong-bland 18 -catheterization 18 -56-page 18 -under-valued 18 -little-used 18 -aplin 18 -perma-tanned 18 -drabs 18 -quso 18 -dressmaking 18 -co-habiting 18 -10.13 18 -julie-ann 18 -arneson 18 -timbavati 18 -jebaliya 18 -120,000-a-year 18 -mockett 18 -iphone5 18 -xviii 18 -baughman 18 -jinelle 18 -ex-congressman 18 -polypterus 18 -aznar 18 -pangkalan 18 -hour-mark 18 -7.24 18 -larkhall 18 -90-foot 18 -programme-maker 18 -comair 18 -djerejian 18 -dimuth 18 -zambrotta 18 -leonberger 18 -55.4 18 -55.3 18 -ruziga 18 -a614 18 -0.40 18 -open-sided 18 -comercio 18 -nazih 18 -hypnobirthing 18 -magaw 18 -self-immolators 18 -account-holders 18 -brasileiro 18 -hyperventilation 18 -bowland 18 -tonyrefail 18 -ampthill 18 -viscose 18 -second-richest 18 -ibru 18 -asphyxiating 18 -loftier 18 -tamblyn 18 -chancers 18 -tianducheng 18 -zarabozo 18 -caslow 18 -ibook 18 -deltawing 18 -distressingly 18 -gastonia 18 -pourciau 18 -superfight 18 -delingpole 18 -kayode 18 -ornamentation 18 -minhas 18 -tweetie 18 -reusens 18 -kahane 18 -elyounoussi 18 -mareb 18 --180 18 -huntington-whitely 18 -lomong 18 -hambycast 18 -zurutuza 18 -scribner 18 -13-6 18 -meaker 18 -morte 18 -s.a. 18 -bayaa 18 -stawski 18 -fox6 18 -oaf 18 -keesler 18 -exemplifying 18 -popsci 18 -elkington 18 -dziwisz 18 -cordasco 18 -21-13 18 -perez-rivera 18 -sloten 18 -linchpins 18 -melis 18 -night-shift 18 -lozenges 18 -matta 18 -raynard 18 -interstitial 18 -madhur 18 -al-islamiya 18 -babyland 18 -vasyl 18 -tarlow 18 -bereza 18 -voce 18 -enbridge 18 -wolds 18 -72.3 18 -72.9 18 -maziar 18 -imbue 18 -tabacchi 18 -18-wheel 18 -coffee-table 18 -lipkis 18 -morad 18 -titanosaur 18 -d'annunzio 18 -tatjana 18 -out-qualified 18 -655,000 18 -benge 18 -tape-recorded 18 -syson 18 -ellia 18 -92.91 18 -nkwelle 18 -sweetening 18 -kleist 18 -satirizing 18 -metamaterial 18 -wilbraham 18 -eclair 18 -saye 18 -vamos 18 -musica 18 -lewandowska 18 -marijuana-laced 18 -kuske 18 -vovinam 18 -re-captured 18 -cndp 18 -dreamlifter 18 -577,000 18 -8-week-old 18 -genoveva 18 -nazma 18 -grafman 18 -bra-less 18 -three-tonne 18 -ghiraldini 18 -tsingtao 18 -brunello 18 -mcanna 18 -african-based 18 -benckiser 18 -phototherapy 18 -newtok 18 -912 18 -muzzling 18 -socio-cultural 18 -piggy-back 18 -randolph-macon 18 -excedrin 18 -rantisi 18 -paranorman 18 -17.25 18 -re-inventing 18 -100-a-week 18 -schiro 18 -gomaa 18 -mcwhinnie 18 -roselyn 18 -varty 18 -shishmaref 18 -web-only 18 -q-tip 18 -smooches 18 -liquidating 18 -160-year-old 18 -off-the-rack 18 -ppb 18 -pph 18 -broxtowe 18 -2.14 18 -oxford-based 18 -synchronizing 18 -guti 18 -ziah 18 -schlumpf 18 -fazackerley 18 -stalactite 18 -mulkey 18 -first-aiders 18 -ludgrove 18 -equips 18 -kacaniklic 18 -giorgios 18 -glug 18 -planetsolar 18 -undergarment 18 -listserv 18 -tvline 18 -bestial 18 -agers 18 -fleurs 18 -telecommute 18 -contadora 18 -bluffdale 18 -abdurahman 18 -ifly 18 -balks 18 -ayelabola 18 -sprason 18 -fatenah 18 -heartaches 18 -draw-down 18 -pictued 18 -abcs 18 -safety-related 18 -blowin 18 -acworth 18 -cornhuskers 18 -anti-science 18 -visayan 18 -dothard 18 -remanding 18 -alami 18 -pietermaritzburg 18 -00:40 18 -speed-dating 18 -dirham 18 -routon 18 -cableway 18 -snaza 18 -live-stream 18 -knoller 18 -gogel 18 -avie 18 -partook 18 -fifth-highest 18 -pdvsa 18 -ivar 18 -13,000-a-year 18 -broadfield 18 -patterning 18 -headford 18 -darnelle 18 -one-hundredth 18 -falmer 18 -points-based 18 -wrinkle-busting 18 -cumhuriyet 18 -turbo-prop 18 -sa80 18 -imitator 18 -poky 18 -'48 18 -epoque 18 -colerne 18 -flood-damaged 18 -krzanich 18 -bobbleheads 18 -35-year-olds 18 -construe 18 -appropriating 18 -roundstone 18 -all-sky 18 -knute 18 -inductions 18 -boudhanath 18 -❤ 18 -kannan 18 -1,776-foot 18 -westerfield 18 -babette 18 -townes 18 -mbio 18 -million-square-foot 18 -out-of-service 18 -bandt 18 -cyckowski 18 -wantagh 18 -freightliner 18 -snorkeller 18 -midtable 18 -shreve 18 -k.g. 18 -non-hispanics 18 -yucaipa 18 -raimondi 18 -weisinger 18 -earwax 18 -give-and-take 18 -tutankhamen 18 -soei 18 -alhusni 18 -mongers 18 -tomorrows 18 -1756 18 -sullying 18 -glo 18 -kourou 18 -wagah 18 -coburg 18 -gerrymandered 18 -over-reaching 18 -alamosaurus 18 -10,000,000 18 -boned 18 -53.8 18 -thorsen 18 -near-certainty 18 -proofed 18 -unripe 18 -kaplon 18 -rozanne 18 -ign 18 -whos 18 -quinns 18 -gure 18 -university-purdue 18 -hottle 18 -blackhall 18 -fraggle 18 -sohan 18 -r-idaho 18 -6:28 18 -assa 18 -tortuga 18 -sambrook 18 -caverly 18 -onedrive 18 -16in 18 -brucie 18 -buehler 18 -75,000-a-year 18 -sorvino 18 -cohesiveness 18 -big-league 18 -rock-and-roll 18 -ghumman 18 -bolsa 18 -kaguri 18 -caravansary 18 -atx-101 18 -abc/washington 18 -co-designer 18 -alemanno 18 -microbloggers 18 -30mins 18 -wildeman 18 -10-place 18 -miskell 18 -pearcey 18 -11 1/2 18 -vianney 18 -asakusa 18 -1635 18 -right-minded 18 -khairat 18 -superskinny 18 -87.7 18 -ivanhoe 18 -christopherson 18 -barzeh 18 -calne 18 -razzies 18 -pro-immigrant 18 -asaram 18 -2,000-strong 18 -velveeta 18 -headguards 18 -toensing 18 -trouble-maker 18 -devon-based 18 -bonjour 18 -rawnsley 18 -idolizing 18 -paez 18 -whitely 18 -homebirth 18 -alisyn 18 -wachee 18 -neice 18 -animatronics 18 -oung 18 -d7 18 -hohhot 18 -petts 18 -apshawa 18 -guppies 18 -hisses 18 -d.k. 18 -faringdon 18 -nicci 18 -10mins 18 -ratifies 18 -nayar 18 -wor 18 -10-term 18 -caresses 18 -matika 18 -characterful 18 -higher-paying 18 -routt 18 -23:54 18 -brickbats 18 -maryon 18 -inescapably 18 -corsican 18 -knockabout 18 -eggrel 18 -park-like 18 -c-list 18 -icecream 18 -tibial 18 -three-wood 18 -organix 18 -backbeat 18 -britos 18 -protrusion 18 -221,000 18 -josephson 18 -hucksters 18 -swt 18 -begrudging 18 -monosyllabic 18 -15-6 18 -szrodecki 18 -balkwell 18 -mid-1920s 18 -abruption 18 -barometric 18 -yogini 18 -bris 18 -bria 18 -conlumino 18 -tortellini 18 -cabrini 18 -cosentino 18 -fox31 18 -lpd 18 -verdens 18 -poonia 18 -biblically 18 -speeder 18 -brandle 18 -silchenko 18 -loi 18 -3,281 18 -845,000 18 -hoffacker 18 -eastchurch 18 -zayatte 18 -futebol 18 -dhanota 18 -foxholes 18 -peggie 18 -hossack 18 -53.1 18 -hendi 18 -armless 18 -trellick 18 -bucci 18 -shearsmith 18 -maroof 18 -antle 18 -fangman 18 -camurat 18 -fix-a-flat 18 -encirclement 18 -outscoring 18 -iten 18 -umeå 18 -morang 18 -shoe-string 18 -al-rabeeah 18 -el-shater 18 -pre-operative 18 -irates 18 -4.42 18 -non-itunes 18 -footbonaut 18 -british-ruled 18 -yuliana 18 -ziemann 18 -roofed 18 -over-enthusiastic 18 -malde 18 -fmla 18 -huggers 18 -higa 18 -t.e. 18 -lidcombe 18 -preservative-free 18 -takayuki 18 -rarified 18 -gluteus 18 -theorem 18 -jennette 18 -four-wheelers 18 -amrozi 18 -hemphill 18 -foldaway 18 -al-asal 18 -iztapalapa 18 -abortion-related 18 -a-bomb 18 -non-functional 18 -shuttlesworth 18 -glaude 18 -cft 18 -saghir 18 -dipika 18 -then-unknown 18 -surkov 18 -mix-ups 18 -blast-off 18 -2015-2016 18 -family-of-four 18 -maeda 18 -decomposes 18 -hbcus 18 -kneejerk 18 -uefaeuropaleague 18 -malialis 18 -southbourne 18 -engelkamp 18 -quarter-finalist 18 -oulu 18 -gaylardo 18 -gbohouo 18 -200,000-per-week 18 -dissections 18 -treves 18 -15-bedroom 18 -goolagong 18 -headerlinks 18 -betson 18 -trenchard 18 -selfridges.com 18 -paulison 18 -prepas 18 -kepler-62 18 -karna 18 -rebuffing 18 -oporto 18 -hendren 18 -0-40 18 -roti 18 -hyett 18 -oversimplified 18 -sky-rocket 18 -loaiza 18 -mengel 18 -ralphee 18 -dairylea 18 -nueces 18 -freelancing 18 -good-naturedly 18 -skyrunning 18 -sgarbi 18 -hongza 18 -teepees 18 -quaids 18 -binaries 18 -padraic 18 -well-fitting 18 -niggly 18 -campagna 18 -2006-2012 18 -3.28 18 -thelonious 18 -kalani 18 -3:50 18 -borbor 18 -asderakis 18 -canevari 18 -usha 18 -warrimoo 18 -momento 18 -antónio 18 -kudryavtseva 18 -muskox 18 -self-care 18 -zong 18 -unsc 18 -60-foot-long 18 -all-woman 18 -shettima 18 -show-business 18 -criscuolo 18 -feeny 18 -13-week 18 -clegger 18 -tomita 18 -1762 18 -belgian-born 18 -ubaydah 18 -briefer 18 -imedeen 18 -love-child 18 -centerpoint 18 -istria 18 -lacava 18 -tribespeople 18 -jezebel.com 18 -ferres 18 -15-story 18 -european-wide 18 -levein 18 -marlohe 18 -monomoy 18 -understaffing 18 -danter 18 -foreign-backed 18 -geele 18 -1,513 18 -ards 18 -crime-free 18 -cristofaro 18 -wpri 18 -90mins 18 -ball-boy 18 -molaison 18 -unshaken 18 -kelly-marie 18 -kralovec 18 -authentic-looking 18 -hondo 18 -sn 18 -edamame 18 -wehrey 18 -396,000 18 -chaur 18 -preschools 18 -thexton 18 -telemarketer 18 -denard 18 -gerling 18 -2004-2007 18 -anti-strike 18 -penalizes 18 -rosemead 18 -seasonality 18 -hidden-camera 18 -consonant 18 -repatriations 18 -sens 18 -tekken 18 -rehearses 18 -man-eaters 18 -cubero 18 -2,000-pound 18 -non-residents 18 -903 18 -hobgoblin 18 -1672 18 -2010s 18 -sasai 18 -pewsey 18 -gaudin 18 -anti-wall 18 -bhasin 18 -chasse 18 -synwell 18 -patera 18 -blokey 18 -parallax 18 -winograd 18 -kingship 18 -oboe 18 -scantlin 18 -ascencio 18 -difava 18 -bickoff 18 -chauffer 18 -morenci 18 -ohanneson 18 -1542 18 -hurtigruten 18 -punctuates 18 -law-makers 18 -ribbleton 18 -maggs 18 -cut-up 18 -emrah 18 -everywoman 18 -liebman 18 -harmonizing 18 -april-lee 18 -daubney 18 -al-haq 18 -avvenire 18 -nubile 18 -marionettes 18 -roupe 18 -deboer 18 -0915 18 -riri 18 -kallie 18 -akhdar 18 -dowell 18 -aksal 18 -aldhelm 18 -qiantang 18 -tolliday 18 -three-run 18 -jackrabbits 18 -ghillie 18 -homages 18 -fanshawe 18 -glovers 18 -breakages 18 -seven-months-old 18 -294,000 18 -buckskin 18 -debris-strewn 18 -kahlili 18 -boulange 18 -244,000 18 -soyuz-fg 18 -izz 18 -dorset-based 18 -shakirullah 18 -lalla 18 -whiteaker 18 -@americanair 18 -hair-loss 18 -double-barreled 18 -fenney 18 -back-facing 18 -genette 18 -abubakr 18 -silverberg 18 -psychometric 18 -joughin 18 -34-day 18 -lavie 18 -mason-cox 18 -soco 18 -12-bore 18 -morgen 18 -wuennenberg 18 -mosser 18 -adenoids 18 -volumetric 18 -sreesanth 18 -dervish 18 -heavener 18 -rijks 18 -bagheera 18 -crizotinib 18 -duursma 18 -kuehn 18 -jengo 18 -police-issue 18 -comins 18 -raunch 18 -19km 18 -kopchak 18 -samoyed 18 -gollop 18 -wdtn 18 -eggplants 18 -circumscribed 18 -dautzenberg 18 -unchangeable 18 -raylee 18 -garrn 18 -evelin 18 -65-year-olds 18 -peeves 18 -live-tweet 18 -moore-bick 18 -newscorp 18 -first-world 18 -okan 18 -pittsburgh-area 18 -cross-strait 18 -super-agent 18 -11-3 18 -ihmc 18 -beddington 18 -narang 18 -fos 18 -sotu 18 -whelchel 18 -5ive 18 -beatboxing 18 -pricewaterhouse 18 -refreezes 18 -chronograph 18 -16oz 18 -6-pound 18 -riewoldt 18 -guzzler 18 -pacesetter 18 -578 18 -319,000 18 -ibanda 18 -tnsm 18 -attritional 18 -lapworth 18 -choreographing 18 -wajid 18 -intercountry 18 -dogmas 18 -domesticate 18 -williford 18 -chartrand 18 -3,143 18 -denuded 18 -bowens 18 -theyâ 18 -levett 18 -weisskopf 18 -928 18 -jigging 18 -tamiami 18 -sapin 18 -meridith 18 -rearm 18 -falsity 18 -scorcese 18 -69.8 18 -kantha 18 -@cnnlightyears 18 -tapir 18 -nanowires 18 -supercharge 18 -bronchopneumonia 18 -bioarts 18 -big-eyed 18 -calpe 18 -chintzy 18 -nalin 18 -bismark 18 -mustin 18 -welds 18 -1,840 18 -hafsa 18 -stroudsburg 18 -nicko 18 -apallic 18 -mid-on 18 -r5 18 -big-wave 18 -claud 18 -seckler 18 -eiu 18 -scas 18 -garrik 18 -yaqoub 18 -gairdner 18 -liliuokalani 18 -stealers 18 -hedy 18 -noren 18 -kosciusko-morizet 18 -redlich 18 -8.51 18 -goreski 18 -cymbals 18 -hangst 18 -sectioning 18 -aioli 18 -1k 18 -chinnock 18 -creasey 18 -passport-free 18 -signaller 18 -test-drive 18 -fast-spreading 18 -bluest 18 -siswick 18 -kstp 18 -yeatman 18 -reichelt 18 -tov 18 -taxi-driver 18 -glittered 18 -dog-owner 18 -cul-de-sacs 18 -work-family 18 -brownstones 18 -3.67 18 -cannold 18 -orlando-based 18 -jonsdottir 18 -hahahahaha 18 -yellowtail 18 -2080 18 -linux-based 18 -stobaugh 18 -freixenet 18 -keng 18 -haupt 18 -little-seen 18 -naudel 18 -puno 18 -retro-themed 18 -off-market 18 -40-1 18 -schepp 18 -morganella 18 -427,000 18 -prideful 18 -ibolya 18 -33c 18 -jotham 18 -masuda 18 -abbassian 18 -dead-rubber 18 -pliss 18 -saucier 18 -grifio 18 -gradiente 18 -kapalua 18 -kilner 18 -pusey 18 -valena 18 -beignet 18 -brereton 18 -tapers 18 -flutings 18 -changzhou 18 -gerra 18 -garrow 18 -galvani 18 -wicca 18 -vanni 18 -haine 18 -screensavers 18 -infernos 18 -hanway 18 -seven-seater 18 -surfleet 18 -re-boot 18 -rockfish 18 -beziers 18 -batalla 18 -11/5 18 -nbn 18 -noomi 18 -22:20 18 -cross-species 18 -cossett 18 -divot 18 -tenaha 18 -prow 18 -miscreant 18 -glistens 18 -bezzoubenko 18 -hula-hooping 17 -everlast 17 -bayona 17 -mtawarira 17 -flyersrights.org 17 -hornaday 17 -needell 17 -bionda 17 -naxos 17 -hanker 17 -blurts 17 -afolabi 17 -walton-le-dale 17 -haras 17 -abney 17 -two-block 17 -positas 17 -eluana 17 -50,000-year-old 17 -neshin 17 -ichabod 17 -pro-military 17 -elli 17 -encanto 17 -dinkle 17 -heinrichs 17 -spezia 17 -wassef 17 -vacillating 17 -mulroney 17 -brown-eyed 17 -seventh-minute 17 -v.p. 17 -cambogia 17 -terracing 17 -rhinoceroses 17 -kees 17 -hairmyres 17 -grebes 17 -hebes 17 -.05 17 -epad 17 -backstrom 17 -heckel 17 -50,400 17 -makhdoom 17 -suncoast 17 -whelehan 17 -russian-led 17 -coachbuilders 17 -sydney-born 17 -blackdown 17 -joorabchian 17 -meno 17 -de-listed 17 -al-alwani 17 -archy 17 -115ft 17 -pascali 17 -badghis 17 -saltires 17 -gardea 17 -archicebus 17 -cordis 17 -printmaking 17 -allyn 17 -hard-sell 17 -restinga 17 -spokesmodel 17 -ohtake 17 -clementina 17 -cleathero 17 -castana 17 -gilmer 17 -nunu 17 -nagbe 17 -20,000-a-week 17 -cristales 17 -18mph 17 -edyta 17 -candi 17 -brusquely 17 -fenech 17 -anniston 17 -frailer 17 -tuma 17 -bissonette 17 -alluvial 17 -339,000 17 -teampoison 17 -half-acre 17 -berrow 17 -pollstar 17 -nicqueel 17 -bottoming 17 -mattern 17 -daikon 17 -papuan 17 -fallibility 17 -cybernetic 17 -tm5 17 -docomo 17 -bumfights 17 -42c 17 -ogar 17 -slomka 17 -3.47 17 -mbarushimana 17 -taliban-like 17 -pastika 17 -hifter 17 -alvensleben 17 -stress-induced 17 -laqonna 17 -hand-embroidered 17 -bonica 17 -denigration 17 -mekki 17 -cannabinoid 17 -geocaching 17 -lower-paying 17 -nytol 17 -fanatically 17 -snowless 17 -lévy 17 -wolgan 17 -1,019 17 -spill-related 17 -hanshaw 17 -kurowski 17 -ayana 17 -28-31 17 -yeadon 17 -baratta 17 -solveig 17 -brienne 17 -@pippatips 17 -dnschanger 17 -wuaki 17 -al-jaber 17 -leynaud 17 -directtv 17 -benedicte 17 -escott 17 -unorganized 17 -cabreja 17 -bromberg 17 -natura 17 -dotage 17 -fountainhead 17 -head-cam 17 -semi-private 17 -iin 17 -sapiecha 17 -veto-proof 17 -khadaroo 17 -sugarcoating 17 -island-hopping 17 -zirkle 17 -lokmeh 17 -abwehr 17 -botterill 17 -grunander 17 -leeroy 17 -outloud 17 -krongard 17 -co-main 17 -beatt 17 -eilman 17 -bongiovi 17 -fluorosis 17 -sackville 17 -46.2 17 -u.s.-cuban 17 -trewhella 17 -brita 17 -stokley 17 -jaimee-lee 17 -barkers 17 -asiasat 17 -quizzically 17 -cobbold 17 -d-calif. 17 -glavin 17 -cornmeal 17 -blue-sky 17 -abusively 17 -argonne 17 -hague-based 17 -zagaris 17 -mournfully 17 -lightheartedly 17 -jlens 17 -lapis 17 -bamfield 17 -florencia 17 -seven-second 17 -out-gunned 17 -laywer 17 -pre-agreed 17 -panagopoulos 17 -160gb 17 -commandeering 17 -spectres 17 -snake-handling 17 -perryville 17 -mason-sesay 17 -earthworks 17 -conille 17 -tisa 17 -orica 17 -jun. 17 -juni 17 -mutinied 17 -jamiat 17 -kleiman 17 -dronestagram 17 -grahams 17 -tory-held 17 -grenda 17 -willets 17 -legitimised 17 -ballymoney 17 -miles-long 17 -sierras 17 -nighthawks 17 -mandal 17 -fader 17 -qayoumi 17 -solon 17 -jas 17 -shangri 17 -mouseketeer 17 -ebor 17 -c40 17 -nonpayment 17 -masako 17 -lika 17 -annulus 17 -low-fare 17 -28mm 17 -tuoi 17 -17-acre 17 -fonteyn 17 -shafak 17 -historics 17 -bullmastiffs 17 -52mph 17 -moqbel 17 -sathwik 17 -rough-hewn 17 -65.9 17 -1:08 17 -kaen 17 -1-month-old 17 -borallo 17 -hand-finished 17 -copyist 17 -lhcb 17 -douvall 17 -40k 17 -espling 17 -450m 17 -mathurin 17 -instep 17 -d'autet 17 -insistently 17 -inoculate 17 -riet 17 -szychulski 17 -kneaded 17 -seventy-seven 17 -predeceased 17 -ottosen 17 -incomings 17 -tarring 17 -bessbrook 17 -wölk 17 -kalpoe 17 -pepin 17 -57029 17 -900th 17 -loincloths 17 -reigh 17 -pavon 17 -antonino 17 -hilli 17 -microprocessors 17 -gersten 17 -hadeed 17 -hapner 17 -anti-histamine 17 -sacchetti 17 -weerawansa 17 -new-ball 17 -grobler 17 -274637 17 -sartorially 17 -crocks 17 -guevares 17 -edmonston 17 -arghandab 17 -clavin 17 -mclaw 17 -134million 17 -geping 17 -universalist 17 -garrulous 17 -themis 17 -lewine 17 -half-buried 17 -mclaughlan 17 -rowen 17 -queen-in-waiting 17 -bevvy 17 -felt-tip 17 -barisan 17 -hourigan 17 -porthcurno 17 -english-style 17 -bleat 17 -156th 17 -adkin 17 -long-abandoned 17 -abberley 17 -pv 17 -bulawka 17 -wotif.com 17 -abrahamian 17 -daraz 17 -low-back 17 -mcilhenny 17 -pogson 17 -erdal 17 -soloing 17 -dartboard 17 -mwaka 17 -kram 17 -nellum 17 -afful 17 -overshooting 17 -ranier 17 -contracture 17 -formalizing 17 -baildon 17 -13-acre 17 -sanjey 17 -sitz 17 -fall-outs 17 -kading 17 -bi-racial 17 -kerchers 17 -earthworm 17 -zilberstein 17 -platon 17 -29,035 17 -tombaugh 17 -experimenter 17 -cubetto 17 -dami 17 -grindler 17 -match-fit 17 -mukalla 17 -telegraphy 17 -urquiza 17 -seventh-largest 17 -zyuganov 17 -slateford 17 -cnn-affiliate 17 -keino 17 -half-decade 17 -sollers 17 -squaretrade 17 -cls 17 -unstudied 17 -generalization 17 -puryear 17 -18,750 17 -lillia 17 -hominem 17 -zeinab 17 -nameplates 17 -doubleday 17 -langdell 17 -chavira 17 -whoopee 17 -birders 17 -signallers 17 -m.i.a 17 -brizendine 17 -teem 17 -ryall 17 -lalesh 17 -ilsa 17 -notaries 17 -aperol 17 -leso 17 -00:05 17 -calenders 17 -dawna 17 -schmaltzy 17 -andalucian 17 -mistle 17 -mujahedin-e-khalq 17 -galilean 17 -ystrad 17 -incapacitation 17 -origone 17 -spiderlings 17 -maffeo 17 -7-day 17 -upsee 17 -ashes-winning 17 -megadrought 17 -bodices 17 -sunetra 17 -peyote 17 -105mm 17 -akaila 17 -bootlegger 17 -heiser 17 -1:23 17 -mokena 17 -beatdown 17 -dobbed 17 -swiftness 17 -anderson-lopez 17 -two-months 17 -franjic 17 -susanto 17 -chowchilla 17 -16-25 17 -pilton 17 -krenski 17 -thwack 17 -51.9 17 -traves 17 -rawhide 17 -miyako 17 -gaiety 17 -levitch 17 -7,750 17 -herbalists 17 -yasushi 17 -scraggly 17 -stagecraft 17 -letzigrund 17 -eri 17 -16-stone 17 -vehicle-to-vehicle 17 -taraji 17 -hellen 17 -angarsk 17 -hamdiya 17 -south-westerly 17 -aecio 17 -backus 17 -post-coital 17 -hadrosaurs 17 -gintz 17 -1260 17 -baden-wuerttemberg 17 -azraq 17 -kissel 17 -glonass 17 -zohreh 17 -pigott 17 -toland 17 -labour-led 17 -aboodowleh 17 -babestation 17 -despiegelaere 17 -pro-women 17 -washtub 17 -carpetright 17 -6ft-tall 17 -wttg 17 -orascom 17 -empson 17 -:00 17 -142.4 17 -job-hunting 17 -zavvi 17 -batfish 17 -long-stay 17 -vivarium 17 -churchyards 17 -madudu 17 -komlani 17 -ishan 17 -sizwe 17 -tonner 17 -fast-developing 17 -resemblances 17 -cira 17 -alexandro 17 -defensiveness 17 -raveesh 17 -dehydrating 17 -variances 17 -acp 17 -mechelen 17 -acu 17 -#gop 17 -plaxico 17 -eyeshadows 17 -pano 17 -amatil 17 -schistosomiasis 17 -30-50 17 -changjiang 17 -pettijohn 17 -sw7 17 -500-year 17 -hamas-controlled 17 -nonoo 17 -wincanton 17 -mingdong 17 -thames-side 17 -northport 17 -stonebridge 17 -herodium 17 -soboroff 17 -corot 17 -zili 17 -lazicki 17 -avielle 17 -plainspoken 17 -retards 17 -chubbier 17 -priestesses 17 -prynt 17 -subordination 17 -schramm 17 -kettlebell 17 -cheruiyot 17 -tambopata 17 -wexner 17 -cayetana 17 -re-growth 17 -fucking 17 -shuanghui 17 -ministered 17 -avg 17 -ex-communicated 17 -eight-speed 17 -criminalises 17 -canopied 17 -drunker 17 -bomb-laden 17 -regin 17 -bullfinch 17 -sicilians 17 -flashiest 17 -playmaking 17 -prize-giving 17 -baltimore/washington 17 -roskell 17 -tarnawskyj 17 -kassandra 17 -clintonville 17 -9.77 17 -estancia 17 -waterland 17 -209p/linear 17 -hasib 17 -eib 17 -brumby 17 -paltz 17 -ziegfeld 17 -braless 17 -yohn 17 -rowson 17 -km/hr 17 -impostors 17 -78kg 17 -nedimyer 17 -schmidheiny 17 -gangwon 17 -plagiocephaly 17 -dda 17 -bamberger 17 -pinajian 17 -guillot-guyard 17 -fittipaldi 17 -wederell 17 -littlehales 17 -inheritor 17 -kayhan 17 -1024 17 -plutocrats 17 -rain-drenched 17 -curto 17 -moneywatch 17 -loots 17 -detlef 17 -hudak 17 -#tbt 17 -ccl4 17 -look-at-me 17 -ex-microsoft 17 -seashell 17 -54.99 17 -'06 17 -aosta 17 -housebuilders 17 -rocina 17 -275million 17 -nusrah 17 -haver 17 -moneymakers 17 -ett 17 -xfor 17 -unprintable 17 -soporific 17 ->> 17 -gryce 17 -smidgeon 17 -unmodified 17 -nucci 17 -fessed 17 -roaster 17 -post-polio 17 -alycia 17 -stroe 17 -summariser 17 -21:57 17 -evensong 17 -xanadu 17 -mintor 17 -wfan 17 -india-pakistan 17 -tuttoilmondo 17 -dmgt 17 -megalopolis 17 -kilicdaroglu 17 -earpieces 17 -spielplatz 17 -balchin 17 -mischaracterize 17 -summerlin 17 -gaggenau 17 -ewins 17 -mabe 17 -brander 17 -xujiayao 17 -pariser 17 -cruft 17 -tama 17 -league-educated 17 -alweiss 17 -1725 17 -haltwhistle 17 -trusses 17 -lubin 17 -belfodil 17 -marymount 17 -perpetu 17 -blosom 17 -lightbody 17 -shiffman 17 -avers 17 -gettleman 17 -d'alessandro 17 -gallup-healthways 17 -terrill 17 -mogae 17 -12-pound 17 -alben 17 -0.56 17 -steelman 17 -worn-down 17 -equestrianism 17 -nwaiwu 17 -louka 17 -2.41 17 -monstrously 17 -58.4 17 -58.3 17 -homa 17 -excitingly 17 -janina 17 -headcam 17 -elfsborg 17 -ucles 17 -tuason 17 -billiet 17 -knobbly 17 -manhattanites 17 -faithfulness 17 -trekdesk 17 -newly-developed 17 -behzad 17 -cragside 17 -66ft 17 -abc30 17 -4,493 17 -myvouchercodes.co.uk 17 -salmonellosis 17 -sebah 17 -south-easterly 17 -455,000 17 -beynon 17 -iacub 17 -disruptors 17 -mangal 17 -freiberg 17 -9.98 17 -hijazi 17 -yegazu 17 -gruenther 17 -tarsoly 17 -fogo 17 -civic-minded 17 -netropolitan 17 -2,224 17 -three-block 17 -vuk 17 -00:33 17 -00:30 17 -round-ups 17 -maariv 17 -wxia-tv 17 -sempers 17 -energizes 17 -3d-printing 17 -mxe 17 -parubiy 17 -sidime 17 -higher-grade 17 -23:16 17 -levanas 17 -non-russian 17 -zarni 17 -17-19 17 -msrp 17 -aggregators 17 -osiel 17 -movie-makers 17 -100w 17 -1001 17 -wannerton 17 -cac-40 17 -garang 17 -islan 17 -down-home 17 -even-par 17 -bowcock 17 -962 17 -state-led 17 -sugarhood 17 -schmit 17 -style-conscious 17 -fotouh 17 -cheriegate 17 -cost-sharing 17 -moreton-in-marsh 17 -slatter 17 -picture-taking 17 -olisa 17 -koshinsky 17 -kamanzi 17 -meraj 17 -foretaste 17 -arsia 17 -visek 17 -14-count 17 -siobhain 17 -12-metre 17 -reoccurrence 17 -scooted 17 -pro-republican 17 -israel-free 17 -temba 17 -write-down 17 -southwesterly 17 -deangelis 17 -prophetically 17 -20-yards 17 -baylay 17 -disarmingly 17 -haynie 17 -erdoğan 17 -posnanski 17 -digitisation 17 -5.85 17 -lyvette 17 -marcellino 17 -dehart 17 -q13fox 17 -aquavit 17 -udoaka 17 -parols 17 -reinstein 17 -butt-head 17 -blow-drying 17 -meloy 17 -skate-off 17 -kdp 17 -belghar 17 -viggo 17 -g.k. 17 -62.4 17 -62.2 17 -2,012 17 -timisoara 17 -deferens 17 -tentacled 17 -mumm 17 -swiveling 17 -tiharihondi 17 -vise 17 -muktar 17 -buglife 17 -roocroft 17 -upfronts 17 -non-catholics 17 -check-outs 17 -off-the-charts 17 -groundhogs 17 -codeshare 17 -datia 17 -gali 17 -idp 17 -selam 17 -cartee 17 -civil-military 17 -mistranslation 17 -ersan 17 -nobre 17 -market-rate 17 -dadkhah 17 -imie 17 -family-based 17 -197,000 17 -cianna 17 -josipovic 17 -fialho 17 -ownphones 17 -jamessalmon79 17 -storyboards 17 -19th-minute 17 -warks 17 -19-mile 17 -bechdel 17 -ferre 17 -10.32 17 -czeslaw 17 -11,600 17 -backhands 17 -calley 17 -81st-minute 17 -hansberry 17 -hansons 17 -thermodynamics 17 -rctv 17 -27g 17 -camren 17 -14-11 17 -jif 17 -hot-seat 17 -retells 17 -memorialised 17 -ponty 17 -kelderman 17 -spozhmai 17 -magazine-style 17 -chelmsley 17 -cut-rate 17 -gavrilov 17 -tutera 17 -neoliberal 17 -elumelu 17 -barreda 17 -classist 17 -animists 17 -i-25 17 -healthmap 17 -neustadter 17 -extracorporeal 17 -sacrum 17 -drobny 17 -poloncarz 17 -jareds 17 -war-fighting 17 -jamon 17 -bratty 17 -maumelle 17 -pre-clinical 17 -23:39 17 -23:38 17 -tift 17 -bijl 17 -steakhouses 17 -oppenheim 17 -focaccia 17 -wakeman 17 -squashes 17 -wnyc 17 -facebooking 17 -geoghegan 17 -left-footer 17 -tenosique 17 -el-hadji 17 -porting 17 -stratofortress 17 -8.17 17 -11.31 17 -lacanivalu 17 -long-life 17 -meyerson 17 -mcclarnon 17 -ext 17 -crandon 17 -rusev 17 -icaza 17 -renvek 17 -urfa 17 -graziotti 17 -cannizzaro 17 -riesending 17 -batang 17 -quasid 17 -militaria 17 -ferreira-carrasco 17 -hildwin 17 -deliciano 17 -endow 17 -0808-272-0808 17 -3.98 17 -silver-coloured 17 -nnamdi 17 -leciester 17 -pillared 17 -chhatrapati 17 -mid-section 17 -facemask 17 -40,000-plus 17 -two-year-long 17 -minuted 17 -somatosensory 17 -ultra-religious 17 -khdair 17 -neli 17 -yahia 17 -malle 17 -export-led 17 -zegers 17 -kanga 17 -misapplied 17 -peleg 17 -prinholato 17 -lock-in 17 -342,000 17 -pascucci 17 -hardiest 17 -castlemorton 17 -paltalk 17 -nanosecond 17 -pan-fried 17 -razzak 17 -acclimatisation 17 -pammy 17 -figure-flattering 17 -gorey 17 -povich 17 -3-month 17 -reeman 17 -davian 17 -sajedinia 17 -earnt 17 -17-strong 17 -hoag 17 -blasberg 17 -jenesse 17 -modestus 17 -coldblooded 17 -jeopardises 17 -lagat 17 -4.37 17 -ben-yishai 17 -uclan 17 -282,000 17 -amory 17 -trapwire 17 -b-movies 17 -gadson 17 -tank-like 17 -ceaselessly 17 -knudstorp 17 -oregonlive.com 17 -spoilsport 17 -spidercam 17 -al-qaradawi 17 -indignados 17 -demigod 17 -spaans 17 -utor 17 -lilburn 17 -2million-a-year 17 -jambalaya 17 -ky3 17 -cowan-dickie 17 -angelini 17 -zimbardo 17 -bramham 17 -winborn 17 -cwele 17 -bradford-born 17 -siar 17 -hankies 17 -mordecai 17 -hayhoe 17 -desert-like 17 -caffeine-free 17 -drugged-up 17 -ex-teammate 17 -monetarily 17 -cassady 17 -arakanese 17 -pontcanna 17 -44-page 17 -laetitia 17 -1627 17 -32-week 17 -landré 17 -kharey 17 -lightheaded 17 -beach-bound 17 -cyberthreats 17 -137.5 17 -superspeedway 17 -maglaya 17 -bandura 17 -bruer 17 -disaster-response 17 -kashiwa 17 -katanga 17 -four-stroke 17 -saginor 17 -in-joke 17 -shel 17 -wenzel 17 -lewisohn 17 -clague 17 -deforest 17 -tory-run 17 -yushan 17 -wegg-prosser 17 -back-country 17 -hatin 17 -13-strong 17 -oaklands 17 -crouth 17 -gaxiola 17 -aversive 17 -anti-brussels 17 -non-small 17 -rees-jones 17 -machining 17 -cecilio 17 -khansa 17 -tosha 17 -perarnau 17 -priebe 17 -nelsons 17 -brumlow 17 -hyphernkemberly 17 -laser-based 17 -sinister-looking 17 -wnt 17 -zdeno 17 -kronthaler 17 -amerasians 17 -lighter-than-air 17 -poitiers 17 -jack3d 17 -zalman 17 -heretofore 17 -categoric 17 -cardell 17 -authenticating 17 -sandboarding 17 -schnyder 17 -hettrick 17 -bayfield 17 -218million 17 -55937 17 -champa 17 -anti-world 17 -underpowered 17 -needled 17 -heena 17 -warstler 17 -weaponize 17 -flighted 17 -jeana 17 -yaroslavsky 17 -ernstein 17 -laubach 17 -stax 17 -tilburg 17 -all-knowing 17 -palouse 17 -hapifork 17 -subsumed 17 -bougainville 17 -noblest 17 -vredefort 17 -cbs5 17 -mesac 17 -metered 17 -freiwald 17 -sura 17 -nine-person 17 -gilgamesh 17 -trepanning 17 -6.0-magnitude 17 -agressive 17 -rutshuru 17 -ex-business 17 -darlaston 17 -underpaying 17 -okwanyama 17 -longings 17 -crà 17 -brynner 17 -hadad 17 -33p 17 -medlin 17 -1,000-a-week 17 -citters 17 -waynesville 17 -curtis-taylor 17 -accretion 17 -laube 17 -ravenswood 17 -tramping 17 -1,660 17 -herlihy 17 -yensi 17 -moisturizers 17 -thematically 17 -march-grier 17 -hendersons 17 -self-diagnose 17 -4.59 17 -wescott 17 -sharia-compliant 17 -skofic 17 -alarmists 17 -abarr 17 -pre-implantation 17 -carro 17 -handbooks 17 -war-scarred 17 -garmback 17 -grieves-cook 17 -ashtyn 17 -golfed 17 -lazarat 17 -felted 17 -larimore 17 -absolutism 17 -dunsborough 17 -malaki 17 -sanctities 17 -jump-starting 17 -anti-torture 17 -gulden 17 -alday 17 -hollingshead 17 -epicurean 17 -holzman 17 -sarre-union 17 -harzi 17 -flatford 17 -stalagmite 17 -tax-cutting 17 -ulladulla 17 -ice-skater 17 -smucker 17 -hipstamatic 17 -footstep 17 -ultramist 17 -lambe 17 -nail-biter 17 -toxics 17 -zarrillo 17 -meggie 17 -eligon 17 -1643 17 -erste 17 -kusa-tv 17 -sexualise 17 -bann 17 -+5 17 -junk-food 17 -janez 17 -sivasspor 17 -staubach 17 -newsmagazine 17 -lumigrids 17 -muntadhar 17 -greyson 17 -cyo 17 -lyndal 17 -discontinuation 17 -high-spending 17 -ex-home 17 -house-senate 17 -ewelina 17 -grunted 17 -neckerchief 17 -phonebox 17 -traigh 17 -workhouses 17 -maccoy 17 -22,200 17 -homebuyer 17 -white-power 17 -nut-free 17 -legionary 17 -unalienable 17 -gamed 17 -myrta 17 -stela 17 -koofi 17 -parsa 17 -7:26 17 -stephanopolous 17 -898 17 -ahcc 17 -société 17 -akash 17 -scousewives 17 -horgan-wallace 17 -anastasiya 17 -greatfire.org 17 -farias 17 --36 17 -chola 17 -twincities.com 17 -minority-owned 17 -cadell 17 -16mph 17 -fxx 17 -tabun 17 -tkts 17 -salonika 17 -unbounded 17 -submitters 17 -ste 17 -near-space 17 -panhellenic 17 -1985-86 17 -kezman 17 -filers 17 -scaremonger 17 -brooks-dutton 17 -debase 17 -flayed 17 -gsces 17 -chigvintsev 17 -ototo 17 -aglow 17 -demissie 17 -vatanka 17 -boules 17 -gcn 17 -two70 17 -crawleys 17 -unscrew 17 -cinematographers 17 -yali 17 -altarpiece 17 -mauni 17 -meara 17 -yorkies 17 -pymble 17 -untraditional 17 -porco 17 -kratt 17 -herschend 17 -signhild 17 -uselessness 17 -665,000 17 -lunchbreak 17 -sitara 17 -mcgreskin 17 -onofrio 17 -unmapped 17 -nicolás 17 -busbice 17 -unsteadiness 17 -redcross 17 -winklevosses 17 -relativistic 17 -mpi 17 -non-latinos 17 -carloads 17 -morelle 17 -penebre 17 -fourth-best 17 -22-mile 17 -natarajan 17 -tomasky 17 -fastest-ever 17 -abberton 17 -one-litre 17 -chat-show 17 -wasnâ 17 -colour-blind 17 -kua 17 -ambreen 17 -drabble 17 -politically-correct 17 -shcherbakov 17 -sibal 17 -l.a.-based 17 -lead-lined 17 -crookes 17 -pre-surgery 17 -dustyn 17 -traub 17 -self-induced 17 -66-1 17 -c&a 17 -futura 17 -unlearn 17 -googler 17 -6.29 17 -switchboards 17 -dancehall 17 -1661 17 -u.s.-africa 17 -kalra 17 -scrunchies 17 -kepu 17 -cherise 17 -ordovician 17 -shweyga 17 -rhiya 17 -twerked 17 -m.e. 17 -kingfield 17 -wolfpack 17 -fuxing 17 -niners 17 -multi-award 17 -1553 17 -barracked 17 -dettol 17 -57.8 17 -moroz 17 -alarcon 17 -pozzi 17 -raluca 17 -whip-round 17 -fukumaru 17 -floyd-henry 17 -cosham 17 -rukhsar 17 -m15 17 -zoë 17 -interconnectedness 17 -yarmolenko 17 -deselection 17 -valterri 17 -goymer 17 -stancil 17 -kozlenko 17 -clean-ups 17 -rous 17 -love/hate 17 -ov 17 -mizzou 17 -gyrations 17 -400km 17 -007-style 17 -orenburg 17 -chloie 17 -unmoored 17 -eleby 17 -diastolic 17 -jarret 17 -bustled 17 -flagpoles 17 -kincannon 17 -19-21 17 -62.1 17 -hazle 17 -right-of-centre 17 -pessl 17 -anza 17 -tostao 17 -gyunel 17 -heidrun 17 -bines 17 -khushal 17 -3.33 17 -rasp 17 -fokkens 17 -idolaters 17 -duckduckgo 17 -off-payroll 17 -rilee 17 -belli 17 -junkers 17 -crif 17 -ketley 17 -heritable 17 -exascale 17 -tsuchiya 17 -raouf 17 -cartonnage 17 -hamalaw 17 -ozel 17 -klong 17 -pupping 17 -sandton 17 -outspending 17 -spiciness 17 -norbu 17 -mahtani 17 -dorito 17 -hebditch 17 -gracida 17 -adebiyi 17 -non-judicial 17 -photo-bombed 17 -mcclellen 17 -hebshi 17 -11km 17 -nonce 17 -4,922 17 -lolitas 17 -hövding 17 -273,000 17 -whimsically 17 -comission 17 -bottom-left 17 -heijst 17 -ryon 17 -1,625 17 -faddish 17 -shaqab 17 -23:08 17 -chatel 17 -buccino 17 -jutted 17 -gunningham 17 -antoniello 17 -averie 17 -diamond-coated 17 -tufty 17 -hongqiao 17 -naa 17 -shush 17 -lachiram 17 -schimpf 17 -malam 17 -blake-bowell 17 -etelin 17 -drought-hit 17 -redoubtable 17 -coaldale 17 -sardo 17 -sanchez-blazquez 17 -netjets 17 -casarona 17 -republish 17 -1000s 17 -5,125 17 -marjoram 17 -mintram 17 -bear-hug 17 -kayser 17 -sihombing 17 -gerland 17 -single-mindedness 17 -woodridge 17 -fortieth 17 -pajitnov 17 -144-year-old 17 -spinetti 17 -sukkari 17 -mearig 17 -broiled 17 -ice-breaker 17 -popkin 17 -tree-ring 17 -every1 17 -de-clutter 17 -lerin 17 -pendry 17 -tombola 17 -biebs 17 -mahiedine 17 -wachtstetter 17 -vartanian 17 -lurchers 17 -waster 17 -170mph 17 -itaipu 17 -renelique 17 -magmatic 17 -boscawen 17 -nuits 17 -cem 17 -two-by-four 17 -lowest-rated 17 -said. 17 -fee-for-service 17 -centerplate 17 -congreve 17 -pouted 17 -cut-and-paste 17 -northeasterly 17 -ovell 17 -mems 17 -mud-slinging 17 -dyffryn 17 -diverticulitis 17 -orekunrin 17 -ormiston 17 -treeline 17 -1,000-a-month 17 -lower-middle 17 -rishikesh 17 -symbiosis 17 -ujiri 17 -muncey 17 -kuhns 17 -eldar 17 -vecuronium 17 -jasen 17 -prothero 17 -kulwin 17 -letty 17 -eagled 17 -larkspur 17 -mysinglefriend.com 17 -maxian 17 -ferrybridge 17 -test-fires 17 -devalon 17 -livity 17 -rapidly-growing 17 -whenary 17 -krasnoperov 17 -beersheba 17 -iridescence 17 -negligee 17 -hanne 17 -rimpac 17 -castleman 17 -idgaf 17 -traviata 17 -griffen 17 -mthembu 17 -yamhill 17 -nauseam 17 -spectating 17 -dears 17 -meteor-like 17 -bearson 17 -antidotes 17 -camaros 17 -digress 17 -wayuu 17 -pettengell 17 -tecumseh 17 -dawlat 17 -yaghi 17 -kansagra 17 -orestis 17 -lana'i 17 -rosseau 17 -13-17 17 -lb1 17 -father-figure 17 -jarl 17 -13per 17 -numpty 17 -below-zero 17 -elastane 17 -sub-contractors 17 -01:21 17 -skulking 17 -moesha 17 -subhani 17 -toni-ann 17 -off-cuts 17 -strider 17 -kandilian 17 -verdin 17 -2.84 17 -godmothers 17 -sun-loungers 17 -10-to-1 17 -arantes 17 -erects 17 -limavady 17 -cowl 17 -seventh-seeded 17 -de-mining 17 -eventbrite 17 -sowa 17 -estero 17 -serota 17 -mouthpieces 17 -fgcu 17 -12-night 17 -rubbernecking 17 -unoriginal 17 -pompoms 17 -cozier 17 -rear-mounted 17 -counterfeited 17 -pollens 17 -abdul-malik 17 -ballinasloe 17 -lympstone 17 -30,000-a-week 17 -hot-blooded 17 -snoops 17 -old-timers 17 -junes 17 -labour-snp 17 -zayne 17 -maney 17 -feuer 17 -dace 17 -ex-spurs 17 -giant-killers 17 -933 17 -rasht 17 -non-conformist 17 -movin 17 -95.7 17 -wureh 17 -lishman 17 -l'atelier 17 -habtoor 17 -wineland 17 -.14 17 -torrevieja 17 -fly-by-wire 17 -kolchin 17 -on-course 17 -tiptoed 17 -1510 17 -qaradawi 17 -e-types 17 -trivino 17 -.500 17 -erythropoietin 17 -all-dancing 17 -gook 17 -18lbs 17 -eastell 17 -phillippines 17 -biomolecules 17 -4:17 17 -boohoo.com 17 -billet 17 -gobel 17 -student-teacher 17 -pro-gm 17 -mix-a-lot 17 -twinkly 17 -chador 17 -shammy 17 -mutschke 17 -super-pac 17 -forcefulness 17 -harari 17 -59.5 17 -astounds 17 -smartflash 17 -anti-epilepsy 17 -over-claiming 17 -ainscow 17 -edmundsson 17 -kahl 17 -tear-stained 17 -periodicals 17 -battery-related 17 -outdoing 17 -bre 17 -22:53 17 -tracers 17 -damiano 17 -3.72 17 -ausnes 17 -krasny 17 -plaiting 17 -longhaul 17 -helal 17 -denville 17 -gold-leaf 17 -quadrophenia 17 -kickable 17 -speidi 17 -al-bassam 17 -under-prepared 17 -ladrera 17 -whangarei 17 -vawa 17 -yaron 17 -140kg 17 -deportment 17 -zhongxing 17 -halik 17 -skateistan 17 -adrar 17 -inimical 17 -tangmere 17 -nunthorpe 17 -1,003 17 -hopley 17 -shojaei 17 -wednesfield 17 -scrawls 17 -apcs 17 -blind-sided 17 -tufail 17 -kepler-438b 17 -edinburgh-born 17 -ditties 17 -woodcutter 17 -maull 17 -tweedie 17 -janeane 17 -heavy-set 17 -specialisation 17 -weak-minded 17 -ahearn 17 -38-foot 17 -kravis 17 -maleenee 17 -pay-and-display 17 -gorgonzola 17 -2011-now 17 -sportscars 17 -liquified 17 -ecall 17 -ehab 17 -self-destructed 17 -oefelein 17 -nakumatt 17 -shamansky 17 -microsieverts 17 -sev 17 -underlay 17 -virally 17 -u.s.a 17 -halmstad 17 -soopun 17 -bunching 17 -baptistao 17 -kinloch 17 -holacracy 17 -arap 17 -ajifa 17 -side-kick 17 -geodetic 17 -rapfogel 17 -wmsc 17 -shanon 17 -763 17 -courvoisier 17 -tinkerman 17 -daube 17 -9km 17 -gulbuddin 17 -glycemic 17 -frequent-flier 17 -sublet 17 -antiterrorism 17 -samitivej 17 -big-rig 17 -mulcahey 17 -2-years-old 17 -gaca 17 -luckhurst 17 -foggett 17 -semih 17 -broaching 17 -broadfoot 17 -2:23 17 -blood-brain 17 -qinetiq 17 -emotiv 17 -equivocal 17 -0200 17 -vryenhoef 17 -bowery-falco 17 -pamlico 17 -seventy-nine 17 -refracting 17 -ticklish 17 -potocari 17 -gluons 17 -indo-pacific 17 -full-colour 17 -caddied 17 -jobes 17 -augurs 17 -herard 17 -oped 17 -gish 17 -r-nevada 17 -bioscience 17 -296,000 17 -toynbee 17 -magnanti 17 -noehren 17 -71.7 17 -exerciser 17 -papeete 17 -pentreath 17 -news4jax 17 -pièce 17 -pleasingly 17 -nduwawe 17 -tasnim 17 -younger-looking 17 -helford 17 -kenitzer 17 -nikitin 17 -pop-art 17 -guff 17 -1534 17 -berwickshire 17 -pilau 17 -kucka 17 -bertenshaw 17 -kipstr 17 -proof-of-life 17 -modalu 17 -humza 17 -adamjshergold 17 -bazzle 17 -over-confidence 17 -ryazanskiy 17 -msd 17 -marmo 17 -5:55 17 -firearm-related 17 -gleams 17 -rosaries 17 -andermatt 17 -davington 17 -valueless 17 -20-team 17 -helgeland 17 -swalec 17 -oregonians 17 -igas 17 -amphetamine-like 17 -call-to-arms 17 -agender 17 -moonlighted 17 -ordinariness 17 -stranathan 17 -nonprescription 17 -barsana 17 -dlc 17 -destructing 17 -zollitsch 17 -furries 17 -leviev 17 -cordray 17 -fantasises 17 -ruden 17 -16-game 17 -six-night 17 -preciousness 17 -huachuca 17 -rittenband 17 -ussery 17 -railfans 17 -ratzon 17 -3.52 17 -zao 17 -jewers 17 -clinica 17 -spleens 17 -extrapolating 17 -sleep-wake 17 -ladner 17 -gaugamela 17 -kenna 17 -b-17s 17 -andri 17 -zvezda 17 -01:17 17 -azeri 17 -amphora 17 -realestate.com.au 17 -presdient 17 -yipiii 17 -syrian-american 17 -suad 17 -gundy 17 -charie 17 -hootenanny 17 -bobbly 17 -dodley 17 -time-traveling 17 -satirising 17 -islam4uk 17 -stepanova 17 -mykhaylivskyy 17 -autoworkers 17 -taniyah 17 -moen 17 -rebuck 17 -kidane 17 -fastenings 17 -periodontitis 17 -39m 17 -duckmanton 17 -vandana 17 -devos 17 -tilly-may 17 -fondation 17 -pie-eating 17 -majali 17 -frempong 17 -shareholdings 17 -nudd 17 -590ft 17 -rhabdomyolysis 17 -u-s-a 17 -walkley 17 -phorose 17 -raich 17 -tmao 17 -addyman 17 -conveyancing 17 -kirisome 17 -titillated 17 -denesh 17 -fight-back 17 -self-starting 17 -43.4 17 -pergamon 17 -cha-ching 17 -negm 17 -a.k. 17 -kapwepwe 17 -skink 17 -excercise 17 -collège 17 -lsl 17 -kamaishi 17 -booher 17 -futsal 17 -andreolli 17 -dejectedly 17 -child-minder 17 -zamorano 17 -secateurs 17 -gasoline-powered 17 -brandner 17 -occassion 17 -ingvild 17 -reggina 17 -cuckfield 17 -at-times 17 -wind-down 17 -casselberry 17 -hillis 17 -umma 17 -linker 17 -schacter 17 -1992-1995 17 -marrara 17 -humphry 17 -krenzler 17 -uwayezu 17 -imbibed 17 -mcdivitt 17 -manieri 17 -4.80 17 -ex-southampton 17 -22-second 17 -chaifetz 17 -orionids 17 -shyy 17 -kanagawa 17 -four-speed 17 -pookie 17 -hiro 17 -high-fiber 17 -vermonters 17 -polyunsaturated 17 -mccardell 17 -beighton 17 -kirksville 17 -once-powerful 17 -budongo 17 -bombonera 17 -white-supremacist 17 -college-level 17 -epitomize 17 -rimless 17 -kofman 17 -pachyderms 17 -low-sodium 17 -viator 17 -nematodes 17 -valk 17 -Álvarez 17 -6.12 17 -6.16 17 -paret 17 -shepherdson 17 -bandstands 17 -extravaganzas 17 -tanksley 17 -pinkel 17 -joileen 17 -navitus 17 -keesee 17 -9.48 17 -maudlin 17 -harmonise 17 -oscillated 17 -scuffing 17 -digney 17 -najiba 17 -nijel 17 -bearskins 17 -musselshell 17 -dombrovskis 17 -cubbyhole 17 -trebuchet 17 -angst-ridden 17 -mellie 17 -myness 17 -marinating 17 -greenert 17 -cervantez 17 -wieliczka 17 -favalora 17 -pinwheel 17 -d'estaing 17 -wargo 17 -post-impressionist 17 -after-dark 17 -brockler 17 -750g 17 -re-offended 17 -gatecrashing 17 -tasselled 17 -hypoglycemic 17 -book-keeper 17 -94.1 17 -sukuk 17 -mangrum 17 -mega-mansion 17 -arashiyama 17 -auto-erotic 17 -communes 17 -bootleggers 17 -chaytor 17 -roncero 17 -rødal 17 -84m 17 -race-car 17 -intones 17 -1470 17 -roke 17 -gricks 17 -cortinez 17 -heartwrenching 17 -sosnowski 17 -dries-jenkins 17 -shiite-majority 17 -steffensen 17 -crash-land 17 -win-at-all-costs 17 -macomber 17 -euxton 17 -zloty 17 -kristiana 17 -suttons 17 -extricating 17 -citrate 17 -jedlica 17 -afgooye 17 -romas 17 -arguello 17 -988 17 -skullduggery 17 -henkel 17 -slalomed 17 -ijen 17 -gravatt 17 -kusal 17 -84million 17 -kassigs 17 -late-summer 17 -itt 17 -80.5 17 -prezzo 17 -foucrault 17 -bloodsport 17 -juanmi 17 -grafite 17 -deki 17 -one-sentence 17 -slingsby 17 -85.7 17 -crabster 17 -hbs 17 -catley 17 -indecipherable 17 -lobato 17 -ridwan 17 -piutau 17 -lavern 17 -suburbanites 17 -shawqat 17 -80mm 17 -barnhill 17 -romanian-born 17 -cist 17 -anmar 17 -2,570 17 -simental 17 -yiqian 17 -cadw 17 -low-performing 17 -mvrdv 17 -impotency 17 -venture-capital 17 -rashford 17 -pomposity 17 -panchayat 17 -white-throated 17 -non-dairy 17 -wanderson 17 -demetria 17 -polyandry 17 -24lb 17 -regalado 17 -petras 17 -kopetsky 17 -mcilwraith 17 -penlee 17 -2.70 17 -kostrzewa 17 -scarification 17 -aquabumps 17 -150-pound 17 -pot-smoking 17 -mass-producing 17 -nro 17 -retroviruses 17 -buzakhar 17 -loginova 17 -lefkow 17 -predispositions 17 -machined 17 -díaz 17 -#oscars 17 -50st 17 -kahnweiler 17 -u.n.-brokered 17 -600-mile 17 -ghaffar 17 -ecovative 17 -turn-based 17 -long-scheduled 17 -michoacán 17 -uwingu 17 -summer-born 17 -marib 17 -coldwater 17 -collonges 17 -marauders 17 -rejuvenates 17 -39,999 17 -peppery 17 -machinegun 17 -rutten 17 -revokes 17 -9.69 17 -khafre 17 -capus 17 -kimron 17 -ss100 17 -pku 17 -eitan 17 -savviest 17 -erler 17 -hotan 17 -boriana 17 -exolance 17 -cafeteros 17 -buckby 17 -kayum 17 -peripatetic 17 -lenagan 17 -otwell 17 -burnouts 17 -shahidi 17 -80-plus 17 -noyer 17 -olbas 17 -plain-speaking 17 -steeping 17 -hoxha 17 -kitagawa 17 -samya 17 -blow-dries 17 -3.90 17 -rustler 17 -f-ing 17 -45s 17 -elfreth 17 -hartpury 17 -uffindell 17 -lithosphere 17 -hermanstorfer 17 -zek 17 -cryptologists 17 -epistle 17 -belluci 17 -govortsova 17 -callington 17 -adra 17 -marcelli 17 -cbes 17 -gn 17 -26.99 17 -sadik-khan 17 -solicitous 17 -al-farhan 17 -then-18-year-old 17 -sinodinos 17 -wisbrod 17 -ynet 17 -'21 17 -whirls 17 -liguori 17 -1450 17 -1,069 17 -tempelhof 17 -janan 17 -country-style 17 -hullermann 17 -low-caste 17 -travyon 17 -bence 17 -bashfully 17 -reductive 17 -meira 17 -sonnen 17 -fasen 17 -doyle-price 17 -juric 17 -gerwig 17 -multichoice 17 -vedvik 17 -cahalan 17 -metro-goldwyn-mayer 17 -rarmoul-bouhadjar 17 -sardis 17 -double-winning 17 -pahigian 17 -touch-ups 17 -ojeikere 17 -lindsley 17 -azizi 17 -evangelina 17 -lucent 17 -21:43 17 -fineman 17 -schroer 17 -spokesman-review 17 -lizama 17 -diminution 17 -nobakht 17 -sqaure 17 -autoplay 17 -belay 17 -bleier 17 -prosocial 17 -couriered 17 -imprisonments 17 -fishguard 17 -5-foot-4 17 -ru-486 17 -police-style 17 -fft 17 -gontmakher 17 -shing 17 -neko 17 -odes 17 -bralyn 17 -barringer 17 -rear-ending 17 -venker 17 -kiniklioglu 17 -furloughing 17 -colten 17 -terabits 17 -darshana 17 -zegeye 17 -wheezy 17 -play-based 17 -hookahs 17 -westwick 17 -alin 17 -rugani 17 -wolnick 17 -timofey 17 -centuries-long 17 -jesslyn 17 -payamps 17 -mamhead 17 -kulasekara 17 -60-54 17 -cahow 17 -silesia 17 -haikou 17 -bascom 17 -rodbourne 17 -whig 17 -3-year-olds 17 -cayzer 17 -yucatán 17 -keohane 17 -zacharias 17 -converses 17 -jiali 17 -el-haddad 17 -salta 17 -clairvoy 17 -vegosen 17 -castellane 17 -janerio 17 -rampell 17 -extra-tropical 17 -nikes 17 -abdikadir 17 -square-shaped 17 -sciacca 17 -bohar 17 -adoni 17 -mwanza 17 -upmost 17 -radick 17 -o'briain 17 -encinitas 17 -brachioplasty 17 -24-28 17 -sotoudeh 17 -heathcote-drury 17 -reshuffles 17 -pierro 17 -voltages 17 -ship-shape 17 -geisbert 17 -17,700 17 -22k 17 -camron 17 -law-and-order 17 -alturas 17 -head-dress 17 -aukse 17 -mabona 17 -flatts 17 -bhoy 17 -36-inch 17 -dreamworld 17 -blotch 17 -wrvs 17 -rinchen 17 -montcuq 17 -cibolo 17 -knievel 17 -york/new 17 -non-domestic 17 -mischaracterizing 17 -latecomer 17 -scrushy 17 -anoint 17 -bluesky 17 -airfarewatchdog.com 17 -charkaoui 17 -backs-to-the-wall 17 -qurashi 17 -prescod 17 -iaconi-stewart 17 -jainaba 17 -knoche 17 -zhengfu 17 -ex-formula 17 -mecham 17 -riverina 17 -brzi 17 -m83 17 -duflot 17 -dictum 17 -u.s.-style 17 -benjie 17 -53-year 17 -sooliman 17 -al-ittihad 17 -unexceptional 17 -warilla 17 -tennessee-based 17 -80f 17 -tarangire 17 -schekman 17 -rosemann 17 -outram 17 -ruminating 17 -upwelling 17 -kihl-jae 17 -greilsamer 17 -takeimi 17 -gallogly 17 -ecdc 17 -yerkel 17 -lais 17 -bausch 17 -cap'n 17 -wtov 17 -wtoc 17 -tcs 17 -cassini-huygens 17 -elbaz 17 -ipf 17 -ipi 17 -chniti 17 -medivac 17 -krantz 17 -cà 17 -lawyering 17 -minnich 17 -fios 17 -well-beaten 17 -lawrey 17 -nides 17 -hughett 17 -spraining 17 -pomade 17 -ball-striking 17 -askarzada 17 -voorhees 17 -jurden 17 -heger 17 -nazeem 17 -334,000 17 -soroush 17 -vigna 17 -dhondt 17 -shortell 17 -douro 17 -bremmer 17 -forklifts 17 -lecour 17 -undoes 17 -parkville 17 -hollowell 17 -uhw 17 -energia 17 -thackery 17 -ransford 17 -extents 17 -neurosky 17 -21/7 17 -0.43 17 -re-investigation 17 -badreddine 17 -mccurdy 17 -vcu 17 -clear-the-air 17 -palmisano 17 -teign 17 -albumen 17 -preska 17 -coulrophobia 17 -arletha 17 -johnstons 17 -zumiez 17 -apophenia 17 -34-minute 17 -ireneusz 17 -intestate 17 -medi-clinic 17 -vt 17 -muskoka 17 -swirral 17 -gronowski 17 -gelada 17 -smash-up 17 -cruithne 17 -lupica 17 -81.7 17 -farrakhan 17 -safety-net 17 -i-94 17 -larked 17 -tension-filled 17 -rs4 17 -rsm 17 -under-cooked 17 -spanners 17 -glendive 17 -okinawans 17 -usurpation 17 -jasim 17 -brockett 17 -thirdlove 17 -tauber 17 -hermés 17 -groote 17 -mongan 17 -bed-hopping 17 -uncaged 17 -synonyms 17 -nyetimber 17 -00:21 17 -akesson 17 -anti-imperialist 17 -pantiles 17 -deadline.com 17 -goalball 17 -sacom 17 -stefanik 17 -habat 17 -banns 17 -gabourey 17 -osper 17 -9-13 17 -sauna-like 17 -bm 17 -behrs 17 -ble 17 -gta5 17 -galleys 17 -23:24 17 -backward-looking 17 -legwarmers 17 -cohen-greene 17 -baume 17 -merc 17 -muhsen 17 -bloodsucking 17 -belly-up 17 -madlen 17 -seventy-three 17 -shapers 17 -11-storey 17 -cobbe 17 -2-foot 17 -gsu 17 -hipolito 17 -8,848 17 -lowrider 17 -dossi 17 -undrinkable 17 -solicits 17 -802.11 17 -gunmaker 17 -bacon-wrapped 17 -deep-blue 17 -immemorial 17 -icwa 17 -mcelhiney 17 -barahonas 17 -apis 17 -retrenchment 17 -anther 17 -mcgreevy 17 -tallia 17 -maxwellisation 17 -bullas 17 -sammut 17 -workin 17 -xochi 17 -kondek 17 -annadurai 17 -urey 17 -diogene 17 -super-powered 17 -matiz 17 -parroting 17 -technology-based 17 -3.80 17 -tookes 17 -values-based 17 -inquisitively 17 -amphoux 17 -dirir 17 -00:17 17 -meaney 17 -military-related 17 -fishenko 17 -moase 17 -jhung 17 -khanh 17 -kuang 17 -qssi 17 -2,999 17 -emma-grace 17 -518,000 17 -hemme 17 -louis-based 17 -60256 17 -misoprostol 17 -sundlun 17 -shipbreaking 17 -haweswater 17 -indentations 17 -gouna 17 -rolison 17 -mentos 17 -benignly 17 -reseller 17 -rickards 17 -141st 17 -nzili 17 -chongjin 17 -7.06 17 -drivetrain 17 -neurosis 17 -as-sahab 17 -ryong-hae 17 -jesson 17 -lemkus 17 -ppo 17 -0.61 17 -0.69 17 -nectarines 17 -full-speed 17 -makgatho 17 -aspie 17 -onge 17 -hurn 17 -wbal-tv 17 -4.06 17 -a24 17 -arakan 17 -nzeribe 17 -baldur 17 -sipson 17 -funster 17 -propyl 17 -zumyah 17 -blonder 17 -rossellini 17 -haught 17 -tacklers 17 -hitwise 17 -2,010 17 -marseillaise 17 -58329 17 -faroese 17 -eq 17 -hillsborough-style 17 -5/4 17 -orthopaedics 17 -tohinaka 17 -maalim 17 -reaffirmation 17 -five-months 17 -halevi 17 -uclh 17 -kibosh 17 -vasari 17 -duboc 17 -soenardi 17 -storari 17 -gusset 17 -abc6 17 -pissarro 17 -agag 17 -whines 17 -vvb 17 -00:46 17 -saldivar 17 -demetrios 17 -ics 17 -plumridge 17 -california-santa 17 -habashi 17 -croyde 17 -megaconus 17 -8:11 17 -18,400 17 -berezovskaya 17 -sawston 17 -super-injunction 17 -ruiter 17 -nine-darter 17 -thami 17 -ziyad 17 -wanna-be 17 -teensy 17 -francome 17 -bauke 17 -shepley 17 -high-schoolers 17 -hiromi 17 -wycliffe 17 -bombino 17 -mclintock 17 -tints 17 -snub-nosed 17 -heli 17 -trimesters 17 -antiaircraft 17 -white-footed 17 -mid-bedfordshire 17 -muntz 17 -scicluna 17 -barling 17 -shoot-off 17 -mahaffy 17 -perpetuation 17 -nefertiti 17 -orf 17 -jaywick 17 -coelacanth 17 -morter 17 -skyflash 17 -idler 17 -man-powered 17 -unselectable 17 -pooles 17 -125-pound 17 -gladman 17 -mid-rise 17 -peddles 17 -enache 17 -vatnajökull 17 -arness 17 -siv 17 -443,000 17 -funtasy 17 -cushingberry 17 -photonics 17 -102million 17 -freerunner 17 -ecuadoran 17 -hopkin 17 -maimonides 17 -cuvier 17 -10-carat 17 -gimbal 17 -liquidised 17 -ponied 17 -kidiaba 17 -suss 17 -fredo 17 -fredi 17 -#syria 17 -cornhill 17 -cornbleet 17 -420million 17 -12-16 17 -lebow 17 -bluestone 17 -rymer 17 -1757 17 -mid-america 17 -laugharne 17 -jeglum 17 -clowson 17 -selfie-obsessed 17 -seawalls 17 -mung 17 -acra 17 -subtypes 17 -fairbrass 17 -copestake 17 -bustin 17 -chron 17 -kennedale 17 -32e 17 -t-shaped 17 -ferrar 17 -chat-up 17 -shafted 17 -53.4 17 -second-seeded 17 -becciu 17 -davontae 17 -brydson 17 -ultra-realistic 17 -niac 17 -causality 17 -non-active 17 -counter-ied 17 -grandes 17 -quillian 17 -maisonettes 17 -izmaylov 17 -grossinger 17 -4.23 17 -workaholism 17 -miller-young 17 -christabel 17 -hapag-lloyd 17 -bellatrix 17 -794 17 -110-year-old 17 -calzone 17 -resupplying 17 -salah-eldin 17 -bojack 17 -kuehne 17 -dequan 17 -saboteur 17 -damodaran 17 -obasi 17 -loaner 17 -occasionwear 17 -86.2 17 -simoncini 17 -overreacts 17 -now-disgraced 17 -tabare 17 -rinus 17 -vpa 17 -johannesburg-based 17 -ashjian 17 -supermaxi 17 -anxiousness 17 -2,025 17 -eappen 17 -pappy 17 -sipho 17 -schlank 17 -submerges 17 -martinsville 17 -stream-of-consciousness 17 -balatbat 17 -tjiong 17 -ottobock 17 -last-chance 17 -hand-grenade 17 -tamogami 17 -yet-to-be-released 17 -erzurum 17 -cawson 17 -tuneful 17 -1581 17 -1584 17 -babybel 17 -crinkly 17 -jarlett 17 -shambhala 17 -reserva 17 -torturer 17 -news/marist 17 -linoleum 17 -millercoors 17 -nordahl 17 -betsie 17 -tarun 17 -unsupportable 17 -roofe 17 -petetan 17 -canadarm2 17 -2mph 17 -moret 17 -colonnades 17 -fully-trained 17 -seagate 17 -trend-setter 17 -79.3 17 -79.5 17 -tradie 17 -zaibat 17 -183cm 17 -prentiss 17 -age-restricted 17 -nimitz-class 17 -sourovelis 17 -strb 17 -trunfio 17 -sloviansk 17 -kadie 17 -ded 17 -crispness 17 -measles-like 17 -izod 17 -ryaboi 17 -johannesson 17 -saed 17 -peppiatt 17 -kondal 17 -yrvind 17 -conflict-ridden 17 -mtonga 17 -airtel 17 -jingjing 17 -1987-88 17 -fabel 17 -safire 17 -fussell 17 -phebe 17 -taptalk 17 -milstead 17 -near-collision 17 -namur 17 -kelvedon 17 -backpedaled 17 -fayhan 17 -junkermeier 17 -horrorcore 17 -panamanian-flagged 17 -keepy 17 -research-based 17 -sporran 17 -garut 17 -karimova 17 -collectplus 17 -luvin 17 -yolkr 17 -yili 17 -wladyslaw 17 -decentralize 17 -seffrin 17 -stuart-cole 17 -klinefelter 17 -roedean 17 -superhydrophobic 17 -amaia 17 -levitas 17 -kah 17 -stupors 17 -qena 17 -kristyn 17 -pybus 17 -longline 17 -heworth 17 -3,000-strong 17 -11th-minute 17 -proximal 17 -incomprehension 17 -5mins 17 -miscanthus 17 -turbot 17 -d11 17 -youssou 17 -risher 17 -risheq 17 -meitner 17 -webmail 17 -bapu 17 -markosian 17 -wasit 17 -fatso 17 -gyros 17 -okee 17 -okey 17 -wagamama 17 -belty 17 -9:48 17 -windward 17 -61-year 17 -lanham 17 -weeks-old 17 -surroundweb 17 -moncada 17 -derakhshan 17 -narrow-angle 17 -arleigh 17 -alyami 17 -2-1/2 17 -hornig 17 -vásquez 17 -kulls 17 -aog 17 -levelup 17 -cannon-brookes 17 -shotts 17 -sultanahmet 17 -prinsengracht 17 -phonedog 17 -corrals 17 -subbuteo 17 -66.4 17 -survey-takers 17 -battle-tested 17 -physicals 17 -zev 17 -loxton 17 -202mph 17 -eighty-three 17 -paa 17 -96m 17 -unwound 17 -kleine 17 -beighley 17 -jalopy 17 -tax-paying 17 -take-two 17 -katsidis 17 -bobbles 17 -polperro 17 -berto 17 -pizzazz 17 -lusardo 17 -wellinghoff 17 -27-30 17 -hooton 17 -detail-oriented 17 -capece 17 -childe 17 -jubelin 17 -zon 17 -rejoices 17 -buesseler 17 -decroce 17 -67.6 17 -dolomite 17 -cliff-face 17 -soft-focus 17 -quitbit 17 -holeve 17 -martz 17 -mistrustful 17 -2k13 17 -jonesy 17 -raylene 17 -villepin 17 -kiril 17 -kun-hee 17 -sportsbet 17 -0-100 17 -cardiff-based 17 -playgolf 17 -caldron 17 -poom 17 -englund 17 -hebborn 17 -labyrinthitis 17 -quality-control 17 -sublimely 17 -cuneo 17 -lundquist 17 -797 17 -80-yard 17 -chalin 17 -preti 17 -desreen 17 -dojo 17 -driver-side 17 -firb 17 -in-keeping 17 --41 17 -unrelentingly 17 -maximises 17 -adreian 17 -bendle 17 -bully-boy 17 -appliqué 17 -snacker 17 -daisuke 17 -energy-dense 17 -karabo 17 -ryce 17 -extended-stay 17 -kino 17 -influenza-like 17 -yeakel 17 -l.k 17 -cephalopods 17 -odebrecht 17 -cyberbullies 17 -slobbery 17 -stuttle 17 -bodomov 17 -rion 17 -200-yard 17 -strangio 17 -binge-watch 17 -winterfell 17 -constantinou 17 -caimans 17 -silberkleit 17 -video-recorded 17 -ever-greater 17 -herrara 17 -semi-subs 17 -yat-sen 17 -noordwijk 17 -realisable 17 -red-head 17 -oswell 17 -drumheller 17 -remunerated 17 -kocer-bowman 17 -vitarelli 17 -heffer 17 -bosporus 17 -anzacs 17 -alcee 17 -afer 17 -tyco 17 -lauge 17 -phonebook 17 -sicher 17 -german-style 17 -2003-2011 17 -ick 17 -samita 17 -konneh 17 -hur 17 -non-combatant 17 -northcliffe 17 -12,200 17 -guna 17 -tampere 17 -4.65 17 -well-resourced 17 -baraclough 17 -westra 17 -mfa 17 -kostetskaya 17 -toddling 17 -press-up 17 -towse 17 -penaflor 17 -flagships 17 -2002-2004 17 -reinvestigation 17 -cryogenics 17 -tangi 17 -kavuala 17 -vladeck 17 -28-hour 17 -nhra 17 -h4h 17 -pollution-free 17 -tennesseans 17 -saint-laurent 17 -kingsclere 17 -paygo 17 -misquote 17 -ridout 17 -quality-of-life 17 -abdel-fatah 17 -ipad2 17 -punk-rock 17 -mexico-based 17 -theranos 17 -parascandola 17 -goater 17 -under-employed 17 -vehicle-borne 17 -now-deleted 17 -maika 17 -melchior 17 -unsuspected 17 -90c 17 -902 17 -excepted 17 -scuttles 17 -vlt 17 -stromgodset 17 -contemptuously 17 -wehner 17 -hastens 17 -pregabalin 17 -clarisonic 17 -lofven 17 -plastic-wrapped 17 -assuaged 17 -29per 17 -pre-occupied 17 -52.7 17 -52.2 17 -piggery 17 -jackenheimer 17 -folami 17 -contos 17 -ueda 17 -crimewave 17 -klipper 17 -vannoy 17 -cheapness 17 -lyness 17 -volkswagon 17 -gedo 17 -gede 17 -new-generation 17 -7:13 17 -non-consecutive 17 -shipshape 17 -samajwadi 17 -mouser 17 -ibarbo 17 -strohmeyer 17 -roka 17 -hassling 17 -retrievable 17 -megève 17 -vanquishing 17 -villalba 17 -mid-calf 17 -4od 17 -in-hospital 17 -guest-starred 17 -careaga 17 -sappy 17 -pishides 17 -vampish 17 -tehachapi 17 -mud-brick 17 -levins 17 -leyson 17 -87.5 17 -oirere 17 -milkmen 17 -fudacz 17 -joep 17 -sappington 17 -sun-baked 17 -ballycastle 17 -u-shape 17 -privatisations 17 -postbag 17 -sst 17 -renfrew 17 -dahlin 17 -2,370 17 -chavasse 17 -stfu 17 -namiq 17 -machine-made 17 -ragunan 17 -royds 17 -arduini 17 -dangerman 17 -sohacki 17 -fennec 17 -hypoplasia 17 -bavidge 17 -zubakova 17 -vanishingly 17 -antimalarial 17 -benales 17 -namdar 17 -zala 17 -cartogram 17 -germane 17 -beheads 17 -detectorists 17 -mujahed 17 -castlefield 17 -gawlitta 17 -gearstick 17 -ex-general 17 -feely 17 -kilham 17 -despairs 17 -35,000-a-year 17 -anklet 17 -bassiouni 17 -angelfish 17 -m-pact 17 -earland 17 -wojciechowski 17 -pincombe 17 -streb 17 -ilicic 17 -vbac 17 -second-last 17 -zwilling 17 -tatsuma 17 -perrys 17 -jailbait 17 -mohicans 17 -ziya 17 -therm 17 -4,350 17 -ceilidh 17 -electroluminescent 17 -quarrelling 17 -skibound 17 -meldon 17 -five-pound 17 -re-sentencing 17 -b5 17 -bv 17 -five-bedroomed 17 -central-defender 17 -glovebox 17 -esteves 17 -pearland 17 -250,000-a-week 17 -bell-ringing 17 -kupstys 17 -caipirinha 17 -neuroimaging 17 -abc-tv 17 -moth-eaten 17 -tsvetanov 17 -step-overs 17 -dome-like 17 -clarke-salter 17 -iraqi-syrian 17 -umarova 17 -carpooling 17 -922 17 -owlets 17 -ayyam 17 -swabi 17 -1697 17 -trend-led 17 -sealyham 17 -nuu 17 -ropey 17 -khnl 17 -currentc 17 -ovadia 17 -votive 17 -thistlegorm 17 -descartes 17 -polman 17 -disbandment 17 -perusal 17 -22mph 17 -escuela 17 -academi 17 -longmuir 17 -sambany 17 -amukamara 17 -home-care 17 -1565 17 -anti-system 17 -dedrick 17 -well-led 17 -tedmed 17 -harmonised 17 -laundrette 17 -564.1 17 -b-word 17 -mid-length 17 -mander 17 -pilli 17 -goal-shy 17 -jauch 17 -karmichael 17 -nine-mile 17 -oz. 17 -duckmarine 17 -flesh-devouring 17 -nahda 17 -irrigon 17 -islamist-rooted 17 -perkovic 17 -2,716 17 -lackeys 17 -zakat 17 -berge 17 -savelli 17 -anonma 17 -maranhao 17 -encapsulating 17 -atsuto 17 -7.44 17 -spookers 17 -zirkelbach 17 -mytholmroyd 17 -rumspringa 17 -senkwekwe 17 -grepper 17 -edel 17 -seedless 17 -beida 17 -re-investigate 17 -glasenberg 17 -knodel 17 -5.39 17 -keno 17 -re-issue 17 -trond 17 -cuevana 17 -pleiades 17 -spieldenner 17 -elongating 17 -anti-iran 17 -hippopotamuses 17 -line-outs 17 -yibo 17 -urso 17 -villahermosa 17 -reinstates 17 -five-step 17 -work-to-rule 17 -pelusa 17 -jarosite 17 -penalises 17 -powerlines 17 -merriam 17 -okuma 17 -okumu 17 -grindal 17 -al-malki 17 -stargate 17 -lancey 17 -scholey 17 -juxtaposing 17 -barnacle 17 -kimba 17 -tanouye 17 -glibly 17 -mccue-masone 17 -horfield 17 -badoer 17 -sdn 17 -rohbock 17 -spearmon 17 -damour 17 -1,615 17 -jet-setter 17 -betaworks 17 -underperformance 17 -antebi 17 -oxtail 17 -free-living 17 -assef 17 -concreted 17 -halper 17 -chondrites 17 -marinos 17 -fat-laden 17 -panio 17 -c-suite 17 -fil 16 -programmatic 16 -bricks-and-mortar 16 -palazzi 16 -mache 16 -osofsky-mcgonigle 16 -dumanli 16 -sarif 16 -low-gravity 16 -agonize 16 -lecompte 16 -suburgatory 16 -mary-gaye 16 -ae1 16 -32per 16 -a344 16 -2:32 16 -2:38 16 -varona 16 -bladet 16 -13.25 16 -mccorkell 16 -redlener 16 -jackdaws 16 -northam 16 -vindaloo 16 -seroquel 16 -grayhek 16 -sugarman 16 -kingstanding 16 -regaling 16 -cheliotis 16 -galitzine 16 -puxty 16 -mus 16 -loins 16 -depressants 16 -senzee 16 -discoverers 16 -abeilles 16 -mammut 16 -al-gaoud 16 -penman 16 -thitinan 16 -spalletti 16 -1504 16 -melamine-tainted 16 -abets 16 -exfoliator 16 -quasi-religious 16 -874,000 16 -linguistically 16 -schoolbooks 16 -warrilow 16 -yarmuth 16 -sharain 16 -shteyngart 16 -7:51 16 -meriweather 16 -dahman 16 -grovel 16 -rehydrated 16 -hosie 16 -elizarova 16 -9.16 16 -kicca 16 -maclennan 16 -suribachi 16 -136th 16 -krahn 16 -fist-pump 16 -up-dos 16 -sprengel 16 -grousbeck 16 -tallmadge 16 -oversimplify 16 -emde 16 -wiesbaden 16 -rust-coloured 16 -capnocytophaga 16 -thereau 16 -perkin 16 -ravings 16 -tazewell 16 -kinoshita 16 -suskind 16 -self-certification 16 -35,500 16 -january-march 16 -back-burner 16 -gademotta 16 -22:47 16 -halden 16 -moneysavingexpert 16 -3.41 16 -authoring 16 -ratp 16 -rato 16 -rokus 16 -armoire 16 -opemipo 16 -drug-dealers 16 -woolard 16 -deutch 16 -ondrovic 16 -teodora 16 -hambrook 16 -club-mates 16 -fortes 16 -then-south 16 -mutineer 16 -hardesty 16 -3.74 16 -tuebrook 16 -validly 16 -detlor 16 -gueckedou 16 -yida 16 -phenol 16 -hilter 16 -cowbells 16 -aguila 16 -878 16 -non-americans 16 -vardags 16 -basted 16 -tremblant 16 -retke 16 -appendectomy 16 -roussos 16 -thirlwell 16 -get-tough 16 -merrion 16 -morphy 16 -mascarpone 16 -late-in-life 16 -baylson 16 -hrcp 16 -steinar 16 -duncombe 16 -fifth-ranked 16 -38p 16 -campeanu 16 -upstaging 16 -ozgecan 16 -satyr 16 -gabanna 16 -puglisi 16 -minicabs 16 -bubbledogs 16 -over-rule 16 -goads 16 -jaish-e-mohammed 16 -toolan 16 -harasser 16 -loewy 16 -abcde 16 -wrap-up 16 -waldie 16 -margareta 16 -pictograph 16 -ardingly 16 -kakira 16 -seppe 16 -nll 16 -ressler 16 -923,000 16 -carelli 16 -savitt 16 -desegregated 16 -highest-priced 16 -costos 16 -coston 16 -norcott 16 -mexxy 16 -madory 16 -madore 16 -perrette 16 -trevor-morgan 16 -zhenya 16 -phymean 16 -46.8 16 -council-funded 16 -boac 16 -boag 16 -mccullen 16 -psychopharmacology 16 -2,580 16 -ephesus 16 -taconic 16 -then-coach 16 -mini-trial 16 -l'agent 16 -resupplied 16 -holbeach 16 -#vpdebate 16 -3.018 16 -clemens-cooney 16 -ziegel 16 -65-mile 16 -momoa 16 -birbiglia 16 -sloops 16 -9,440 16 -vilamoura 16 -shamus 16 -manzi 16 -lowcountry 16 -longue 16 -borgstrom 16 -even-keeled 16 -blesma 16 -timewasting 16 -olvera 16 -mirador 16 -keyt 16 -1,552 16 -oraon 16 -134,565 16 -45.4 16 -koupparis 16 -leapfrogs 16 -bo-dene 16 -super-majority 16 -slaughterman 16 -jaiswal 16 -shargel 16 -sociopolitical 16 -october-december 16 -coverley 16 -sarto 16 -far-western 16 -matzzie 16 -grandiosity 16 -preemie 16 -alancier 16 -lieber 16 -cedeno 16 -goldston 16 -deadlifts 16 -hamadto 16 -ramunas 16 -marijana 16 -caramadre 16 -whaite 16 -rimando 16 -karamargin 16 -mandan 16 -ncov 16 -chawner 16 -6.24 16 -merckle 16 -bernauer 16 -pellebon 16 -wec 16 -wer 16 -audry 16 -either-or 16 -mao-aweys 16 -verifiably 16 -re-marry 16 -pre-requisite 16 -sclerotic 16 -iloilo 16 -sukhumvit 16 -babbage 16 -all-year 16 -lizarazu 16 -northenden 16 -walsby 16 -daher 16 -wsbt 16 -armoring 16 -bercy 16 -mcllroy 16 -realtytrac 16 -deficit-cutting 16 -self-regulate 16 -oddballs 16 -25-hour 16 -lilting 16 -publicity-shy 16 -28-22 16 -kania 16 -check-point 16 -jo-anne 16 -coladas 16 -shamelessness 16 -nappa 16 -tofino 16 -cheltenham-based 16 -sensei 16 -hurray 16 -flan 16 -pre-screening 16 -mum-of-four 16 -scarnici 16 -speculator 16 -olewine 16 -dalí 16 -aoyama 16 -wood-framed 16 -babergh 16 -phenix 16 -wps 16 -endogenous 16 -eisele 16 -leinkauf 16 -long-line 16 -33-month 16 -chicco 16 -prekindergarten 16 -bhut 16 -toed 16 -capp 16 -tonypandy 16 -herbaceous 16 -doberti 16 -22p 16 -belly-dancing 16 -kleshna 16 -adroit 16 -1646 16 -feliks 16 -full-stretch 16 -tick-tock 16 -luminescence 16 -pranjic 16 -infinitesimal 16 -huairou 16 -iraq-syria 16 -77kg 16 -motorpoint 16 -four-pack 16 -19per 16 -miryanov 16 -telegraphing 16 -implausibly 16 -25.99 16 -mawes 16 -muthaura 16 -40in 16 -newly-designed 16 -mcelrath 16 -landor 16 -kexin 16 -mcguirk 16 -stanford-le-hope 16 -green-jackson 16 -fadnes 16 -300-a-month 16 -osbournes 16 -jospeh 16 -red-and-black 16 -hyper-sensitive 16 -aah 16 -demartino 16 -fuss-free 16 -80-degree 16 -elmes 16 -kleindl 16 -doleful 16 -noicos 16 -non-disparagement 16 -cervo 16 -microseconds 16 -pantera 16 -nelle 16 -kas 16 -shisler 16 -henningsen 16 -commitee 16 -snpl 16 -al-yousef 16 -erfani-ghadimi 16 -abella 16 -nazli 16 -20-storey 16 -sucos 16 -straker 16 -caronia 16 -komsa 16 -dubost 16 -neonicotinoid 16 -majority-black 16 -bell-shaped 16 -euroskeptics 16 -matchdays 16 -bremerhaven 16 -barceloneta 16 -aena 16 -barucke 16 -shibley 16 -hafid 16 -nightshade 16 -pajak 16 -anti-occupy 16 -pantex 16 -p-i 16 -off-day 16 -corrector 16 -digitizing 16 -parriott 16 -homeaway 16 -untraced 16 -editorial@dailymailonline.co.uk 16 -mids. 16 -awaida 16 -calcutt 16 -kehler 16 -klingner 16 -thorpedo 16 -tiziana 16 -grazers 16 -ballo 16 -hoser 16 -graduate-level 16 -paszek 16 -00:04 16 -cfmeu 16 -8.33 16 -fal 16 -taibi 16 -lebeau 16 -friday-to-sunday 16 -orang 16 -single-breasted 16 -purves 16 -guiyu 16 -framlingham 16 -good-humored 16 -foerster 16 -corelogic 16 -double-bogeys 16 -j-20 16 -michette 16 -meucci 16 -underestimation 16 -gravettian 16 -warmhearted 16 -pinette 16 -clawback 16 -csf 16 -jubilees 16 -chaiyasate 16 -54mph 16 -pre-written 16 -afros 16 -presidente 16 -step-granddaughter 16 -walmarts 16 -cimmino 16 -firestarter 16 -mynors 16 -rexall 16 -chelseafc.com 16 -elysées 16 -1040 16 -tissint 16 -brus 16 -brue 16 -fancher 16 -rutman 16 -51.2 16 -once-a-week 16 -combermere 16 -woolnough 16 -wearying 16 -zlatko 16 -foyers 16 -bitrus 16 -wvu 16 -keizer 16 -crowdfunded 16 -nubian 16 -sumy 16 -naskrecki 16 -goodfella 16 -forriest 16 -risquà 16 -easy-to-wear 16 -zanden 16 -pinners 16 -parrotfish 16 -ancil 16 -drugscope 16 -paris-saint 16 -crackhead 16 -bakula 16 -dinos 16 -un-arab 16 -metaphysics 16 -stefanski 16 -concealers 16 -volturi 16 -wugang 16 -refines 16 -interferences 16 -mohareb 16 -snb 16 -sns 16 -dafen 16 -aragorn 16 -pieri 16 -third-fastest 16 -firelighters 16 -assoc 16 -loliondo 16 -hannum 16 -113mph 16 -arianespace 16 -dystrophic 16 -2,495 16 -rovinj 16 -ulaanbaatar 16 -co-own 16 -bogeying 16 -shotley 16 -mynydd 16 -stevensite 16 -scafaria 16 -blocher 16 -orbcomm 16 -119.6 16 -singer/actress 16 -otolaryngologist 16 -oakhurst 16 -adamou 16 -goldgenie 16 -envy-inducing 16 -klondike 16 -d'état 16 -kamila 16 -ultra-cheap 16 -henneberg 16 -boschi 16 -miera 16 -kalkbrenner 16 -impudent 16 -2:59 16 -uncorrected 16 -rigger 16 -bertolt 16 -spielmann 16 -professional-grade 16 -non-transferable 16 -excretions 16 -colbie 16 -c-5 16 -etchingham 16 -botley 16 -ohlinger 16 -bogeymen 16 -obinna 16 -zagel 16 -eightfold 16 -hollman 16 -whistl 16 -weaponised 16 -dagens 16 -460million 16 -2.66 16 -verbose 16 -ravished 16 -tripper 16 -olinda 16 -tamang 16 -smith-brown 16 -chiropractors 16 -pomme 16 -dudding 16 -shimkus 16 -megadeth 16 -minnan-wong 16 -pre-publication 16 -lipliner 16 -cabazitaxel 16 -weisbrod 16 -purée 16 -rufford 16 -ghirelli 16 -spumante 16 -khwai 16 -brocton 16 -dazzlingly 16 -shayea 16 -bolt-action 16 -hatfill 16 -gay-marriage 16 -wet-weather 16 -healthsouth 16 -turgut 16 -clampett 16 -rigmarole 16 -sarten 16 -pelion 16 -deftness 16 -post-release 16 -enchaine 16 -#peterpanlive 16 -ostrowski 16 -500/1 16 -ps2 16 -wcax 16 -wcau 16 -digitizer 16 -2010/2011 16 -odoni 16 -sheregesh 16 -casualwear 16 -tweenies 16 -crookham 16 -levithan 16 -massagers 16 -kyna 16 -reacquaint 16 -toyo 16 -00:10 16 -merve 16 -zmijewski 16 -rockslide 16 -guarana 16 -catterton 16 -joumaa 16 -superglued 16 -worthlessness 16 -six-packs 16 -@cnn 16 -caler 16 -arthroscopic 16 -morgellons 16 -skiiers 16 -merchiston 16 -turkistan 16 -life-giving 16 -twc 16 -22:25 16 -normalizes 16 -home-from-home 16 -two-course 16 -matney 16 -raffo 16 -guntrip 16 -tibi 16 -ex-gang 16 -reponse 16 -engles 16 -quast 16 -quasi 16 -mullens 16 -norio 16 -mind-reading 16 -goan 16 -sirolimus 16 -thy1 16 -pimental 16 -tikes 16 -paxil 16 -then-foreign 16 -once-bustling 16 -touquet 16 -dana-farber 16 -khanum 16 -loro 16 -owner-occupiers 16 -touch-based 16 -glaetzer 16 -Île 16 -frankenfish 16 -musson 16 -stranraer 16 -bobbin 16 -1976-83 16 -radda 16 -lifetab 16 -mini-budget 16 -latisha 16 -disproportionally 16 -craete 16 -masciopinto 16 -non-permanent 16 -stiehm 16 -pokies 16 -lannisters 16 -rothschilds 16 -olumegbon 16 -corvalan 16 -winer 16 -dwina 16 -klitzman 16 -maidwell 16 -ridgeland 16 -domiz 16 -felecia 16 -metters 16 -vigors 16 -needlepoint 16 -vibrato 16 -medishare 16 -octavian 16 -toadstool 16 -morumbi 16 -pacquot 16 -bhangra 16 -mckeating 16 -fiorente 16 -deva 16 -sauvage 16 -restylane 16 -osmel 16 -refinanced 16 -brandes 16 -defacto 16 -rapid-response 16 -r-calif. 16 -boronia 16 -untended 16 -amsprop 16 -sharrif 16 -keers 16 -dorrie 16 -imprimatur 16 -quaaludes 16 -mooneyham 16 -materializes 16 -teahupoo 16 -barkoff 16 -emmart 16 -krefeld 16 -ugl 16 -pu'er 16 -superintendant 16 -fil-a 16 -eur 16 -ntep 16 -dobie 16 -wouters 16 -10-part 16 -fravel 16 -tomasica 16 -drug-dealer 16 -hna 16 -306,000 16 -sea-levels 16 -delahunty 16 -maddah 16 -wmaz 16 -hornais 16 -ch4 16 -dragne 16 -gerardi 16 -zeeuw 16 -huebner 16 -wing-like 16 -kaput 16 -ganic 16 -littig 16 -scott-heron 16 -bourdon 16 -albertson 16 -electro-pop 16 -hofburg 16 -bait-and-switch 16 -aerotoxic 16 -6.47 16 -bellas 16 -parkrun 16 -enfamil 16 -freese 16 -dorm-room 16 -86-year 16 -hyperlink 16 -loudell 16 -40-60 16 -valuers 16 -sydnor 16 -nzou 16 -leatherslade 16 -15bn 16 -rovell 16 -ockendon 16 -vivaldi 16 -j&d 16 -bukowski 16 -big-spenders 16 -danyal 16 -astiz 16 -matus 16 -bartolomucci 16 -bmt 16 -squawked 16 -bond-themed 16 -kitai 16 -clubby 16 -flom 16 -altamirano 16 -sherie 16 -shoreham-by-sea 16 -convention-goers 16 -rmp 16 -eckersall 16 -modee 16 -amvets 16 -fests 16 -caltrans 16 -piccarreta 16 -fouts 16 -32st 16 -philipsburg 16 -sportsgirl 16 -grg 16 -tett 16 -latour 16 -karloff 16 -chantler 16 -teguise 16 -podd 16 -10,900 16 -lulea 16 -plumas 16 -kuwaiti-born 16 -staykov 16 -khabarovsk 16 -rockbridge 16 -25th-minute 16 -orange-coloured 16 -pissarides 16 -arkel 16 -silted 16 -montesarchio 16 -apperance 16 -quenby 16 -grandfatherly 16 -rabble-rouser 16 -ahuja 16 -123,200 16 -hostelling 16 -water-ice 16 -workhorses 16 -sugru 16 -adjerid 16 -vis-à-vis 16 -cockx 16 -fireguard 16 -seelig 16 -bech 16 -interethnic 16 -intermingling 16 -wart 16 -malalas 16 -goldbergs 16 -ramaphosa 16 -1,200-acre 16 -dugongs 16 -germond 16 -teletype 16 -inch-and-a-half 16 -kozlov 16 -ananias 16 -low-powered 16 -disrobing 16 -supercommittee 16 -startupbus 16 -above-mentioned 16 -machetto 16 -damnedest 16 -sparham 16 -sarpong 16 -hysterectomies 16 -odie 16 -blethyn 16 -shweta 16 -nena 16 -sables 16 -lakdawalla 16 -wolens 16 -yale-educated 16 -dishman 16 -santé 16 -70-metric-ton 16 -meddlesome 16 -62.7 16 -sakyo 16 -equalizes 16 -chi-chi 16 -170g 16 -quita 16 -corthine 16 -bluestonehenge 16 -kigen 16 -reprinting 16 -pingyao 16 -clamper 16 -jadeite 16 -adhesions 16 -clubgoers 16 -zirconium 16 -well-grounded 16 -healthplan 16 -repaved 16 -selah 16 -under-13 16 -sasso 16 -d'ancona 16 -magnan 16 -bossier 16 -wenz 16 -a16 16 -remote-operated 16 -swinford 16 -nyx 16 -loofah 16 -nales 16 -torri 16 -6:55 16 -fascinosa 16 -macgruber 16 -bouhanni 16 -arzola 16 -truces 16 -humanizing 16 -ectopia 16 -eboni 16 -safwat 16 -post-training 16 -kfor-tv 16 -#isis 16 -stereophonics 16 -ségolène 16 -co-sanctioned 16 -sherra 16 -neston 16 -cocooning 16 -cabcharge 16 -ionia 16 -marasigan 16 -clottey 16 -numeral 16 -lyssavirus 16 -crothall 16 -credentialed 16 -al-nuri 16 -nakash 16 -recapped 16 -standard-bearers 16 -chromebooks 16 -matlok 16 -twill 16 -caltex 16 -rifai 16 -destini 16 -dehumanised 16 -walthall 16 -caricaturing 16 -hyneman 16 -2005-2009 16 -kplr 16 -southey 16 -60k 16 -ploegsteert 16 -four-weeks-old 16 -anaïs 16 -demagogue 16 -wheezes 16 -american-israeli 16 -16-week-old 16 -high-ceilinged 16 -aybar 16 -zimbelman 16 -boh 16 -pspos 16 -23:32 16 -cienfuegos 16 -deadpans 16 -polona 16 -faceted 16 -trenin 16 -rabb 16 -boyack 16 -sophiia 16 -rochers 16 -damboa 16 -338.3 16 -schuilwerve 16 -wwii-era 16 -gpr 16 -herwig 16 -whizzkid 16 -yacone 16 -caralis 16 -giveforward.com 16 -coursera 16 -wender 16 -trash-talking 16 -kanjeng 16 -mediapart 16 -neu5gc 16 -steeves 16 -fightin 16 -farteg 16 -14-storey 16 -sepinwall 16 -ballou 16 -mrna 16 -citibike 16 -cytotec 16 -androgyny 16 -asexuality 16 -ewings 16 -security-conscious 16 -1.81 16 -rpij 16 -histology 16 -cutrufelli 16 -wear-and-tear 16 -tulear 16 -oil-free 16 -chinese-owned 16 -wapa 16 -thernstrom 16 -ojha 16 -sidra 16 -elizalde 16 -karoly 16 -mcseveney 16 -isn 16 -panadol 16 -seatmate 16 -zoleka 16 -bull-type 16 -wenchuan 16 -mantar 16 -gaza-israel 16 -300,00 16 -guoqiang 16 -garaufis 16 -mangine 16 -tribhuvan 16 -pre-filled 16 -charish 16 -madah 16 -pursey 16 -corn-based 16 -sarnie 16 -nela 16 -hage 16 -chirk 16 -taurima 16 -margera 16 -constants 16 -jiyeon 16 -arizona-mexico 16 -pliosaurs 16 -lissa 16 -lisse 16 -7.11 16 -bevis 16 -hardys 16 -red-shirted 16 -aiims 16 -debarquement 16 -inkaterra 16 -19bn 16 -treasurers 16 -meetups 16 -rochereau 16 -eits 16 -aberporth 16 -ifr 16 -ajose 16 -2.08 16 -mammadova 16 -brusaw 16 -whiton 16 -tree-lighting 16 -deprivations 16 -campen 16 -kalan 16 -embroider 16 -rozalia 16 -gilgit 16 -46.6 16 -mccollough 16 -lhotse 16 -radbourne 16 -78p 16 -6:32 16 -craniotomy 16 -thickets 16 -vladikavkaz 16 -bohemians 16 -shayona 16 -bravehearts 16 -tingles 16 -full-throttle 16 -whilby 16 -sheheryar 16 -latchmere 16 -biochar 16 -banisters 16 -finigan 16 -rehashed 16 -stockholm-based 16 -sherin 16 -stana 16 -eyup 16 -puppala 16 -zilber 16 -dyna 16 -ezeamuzie 16 -besma 16 -reaganomics 16 -losail 16 -matherne 16 -herz 16 -sebastopol 16 -ginetta 16 -inconspicuously 16 -mitnick 16 -fieschi 16 -38,500 16 -hristos 16 -bernheim 16 -dipsy 16 -carminati 16 -bronzino 16 -1790s 16 -yennaris 16 -blumenstein 16 -cisma 16 -gies 16 -nytimes.com 16 -hedbo 16 -stina 16 -tony-winning 16 -kharel 16 -62f 16 -morticia 16 -stiffed 16 -fwhr 16 -sonning 16 -kirpan 16 -shem 16 -shez 16 -free-to-use 16 -finger-like 16 -audiophiles 16 -sharmin 16 -zeron 16 -discourteous 16 -substantiating 16 -eldad 16 -ryncarz 16 -benvenuto 16 -diacetyl 16 -tattersfield 16 -54ft 16 -birgeneau 16 -#respect 16 -hanoverian 16 -plentyoffish.com 16 -chloral 16 -cnn/tea 16 -times-call 16 -listlessly 16 -cézanne 16 -feonyx 16 -avast 16 -mireia 16 -venable 16 -lionized 16 -vollero 16 -heavy-metal 16 -kandhamal 16 -cafod 16 -488,000 16 -dimelow 16 -crabbie 16 -juárez 16 -serious-minded 16 -pash 16 -padoin 16 -palen 16 -533,000 16 -bolderson 16 -deuterium 16 -beitashour 16 -gombert 16 -karempelis 16 -highliner 16 -ttyl 16 -regurgitation 16 -dors-lake 16 -onoade 16 -takin 16 -toyland 16 -mcmurrey 16 -miroslava 16 -onslows 16 -black-owned 16 -greenough 16 -bonington 16 -wisnu 16 -eisfeld 16 -rubisch 16 -holmgren 16 -rubem 16 -play-fight 16 -flavorings 16 -tripartite 16 -carribbean 16 -1,082 16 -cursey 16 -two-earner 16 -taca 16 -maximiliano 16 -sakubai 16 -countervailing 16 -1,775 16 -dsn 16 -yungas 16 -katiba 16 -samaszko 16 -hickinbottom 16 -lebovich 16 -fager 16 -petsafe 16 -jalaeipour 16 -shuxia 16 -0.30 16 -heyford 16 -dmondaine 16 -lizza 16 -sotkin 16 -cupids 16 -opre 16 -over-corrected 16 -guillot 16 -iovane 16 -mousses 16 -wasserman-schultz 16 -forseeable 16 -prusak 16 -ciantar 16 -sunni-backed 16 -bukokhe 16 -tomfoolery 16 -maehlee 16 -high-roller 16 -kljestan 16 -ultra-right 16 -juckes 16 -transvaginal 16 -maley 16 -familiarisation 16 -projecteo 16 -paralympic-style 16 -alianza 16 -spaceliner 16 -barnier 16 -wahoo 16 -45,500 16 -quote-unquote 16 -sunni-ruled 16 -jowl 16 -broiling 16 -vacillated 16 -kinabalu 16 -neumar 16 -remacle 16 -harpooned 16 -fumigation 16 -flappers 16 -12-story 16 -virmani 16 -layperson 16 -ngbapo 16 -stroganoff 16 -kassidiaris 16 -off-the-peg 16 -27-month 16 -lowder 16 -redoine 16 -63.3 16 -63.8 16 -kotek 16 -corkin 16 -fdm 16 -cullis 16 -a90 16 -guoliang 16 -800-mile 16 -femicide 16 -budhathoki 16 -gyatso 16 -generically 16 -6.5-magnitude 16 -muqrin 16 -200-a-night 16 -pegram 16 -roller-skating 16 -ilife 16 -ta-da 16 -opensecrets.org 16 -middleborough 16 -doralie 16 -bonkbuster 16 -al-harmoush 16 -alleman 16 -manaa 16 -miniaturization 16 -jae-in 16 -trifling 16 -berrimah 16 -947 16 -vainer 16 -sirhowy 16 -rsv 16 -gusoff 16 -pogrom 16 -hirschmann 16 -abian 16 -rosenkrantz 16 -dauntingly 16 -dampha 16 -mutti 16 -wardman 16 -seashores 16 -troubleshooter 16 -earliest-known 16 -wallachia 16 -caister 16 -fitz-james 16 -reionisation 16 -comella 16 -meminger 16 -pangbourne 16 -13-bedroom 16 -dhahri 16 -hardisty 16 -clayworth 16 -raylie 16 -batton 16 -gds 16 -gouker 16 -falorni 16 -primary-care 16 -tancharoen 16 -ayuso 16 -114million 16 -frost-covered 16 -edisto 16 -colebourn 16 -eyemouth 16 -16-megapixel 16 -wi-fi-only 16 -stl 16 -elstow 16 -bolton-le-sands 16 -bengston 16 -homecomings 16 -earthquake-prone 16 -british-themed 16 -bloks 16 -retinopathy 16 -bortolussi 16 -firmament 16 -photographically 16 -hazan 16 -eldemire 16 -26.50 16 -kosuke 16 -self-limiting 16 -resistor 16 -jet-skis 16 -swigged 16 -loehnis 16 -tritto 16 -1499 16 -kruk 16 -lower-court 16 -smap 16 -lawndale 16 -salander 16 -postiglione 16 -sanel 16 -saner 16 -pre-meditation 16 -leggat 16 -hygienically 16 -oddly-shaped 16 -riggers 16 -harpsichord 16 -succarieh 16 -sylvestre 16 -zirconia 16 -thermite 16 -300ml 16 -koskoff 16 -bichard 16 -ex-forces 16 -hartgrove 16 -xc90 16 -mp5 16 -wiedemann 16 -16-mile 16 -vanarama 16 -2210 16 -nonmedical 16 -twerker 16 -inexpensively 16 -plops 16 -guiseley 16 -flexdirect 16 -powerboats 16 -wasnt 16 -lagan 16 -pillory 16 -4.74 16 -iniala 16 -wishtv 16 -a/b 16 -kersbrook 16 -besch 16 -dunsmore 16 -double-glazed 16 -midgets 16 -cardington 16 -zhirkov 16 -snoozes 16 -itches 16 -wortzel 16 -fischbeck 16 -impalas 16 -alternator 16 -humaneness 16 -precancerous 16 -eat-in 16 -3100 16 -vanbuskirk 16 -sonicable 16 -canadiens 16 -sharp-edged 16 -kamat 16 -hiawatha 16 -quickstep 16 -00:20 16 -21.95 16 -leyen 16 -waitstaff 16 -judgeships 16 -973 16 -qurban 16 -senz 16 -methley 16 -typify 16 -timson 16 -titling 16 -mishawaka 16 -elmont 16 -szostok 16 -bradford-based 16 -hockett 16 -ex-service 16 -shilov 16 -shap 16 -cga 16 -playacting 16 -beraki 16 -bes 16 -34e 16 -protess 16 -rhosneigr 16 -appraisers 16 -1557 16 -mcgillivray 16 -perinova 16 -57.6 16 -24-48 16 -pozzo 16 -vrignaud 16 -castmember 16 -palmeiro 16 -merchan 16 -grandmother-of-one 16 -barcomb 16 -mihalik 16 -smentek 16 -58631 16 -microdot 16 -abrogation 16 -rer 16 -pitbull-type 16 -escudero 16 -airglow 16 -arevalos 16 -luiza 16 -383,000 16 -atrophying 16 -made-for-television 16 -freestyler 16 -karaj 16 -audun 16 -seismograph 16 -ahad 16 -curtseyed 16 -unexcused 16 -unflustered 16 -fundraised 16 -poore 16 -dallied 16 -kostner 16 -21:47 16 -katrantzou 16 -darwinism 16 -anika 16 -mitsui 16 -joubouri 16 -gradovich 16 -counce 16 -irib 16 -irin 16 -pavelich 16 -deposing 16 -bierfeldt 16 -hyperventilate 16 -stoumbos 16 -742 16 -kunze 16 -4:53 16 -sro 16 -2,360 16 -stec 16 -dipg 16 -re-published 16 -supriyadi 16 -intraparty 16 -futaba 16 -fedarcyk 16 -yaz 16 -yat 16 -pilieva 16 -non-residential 16 -heifers 16 -fitters 16 -fibroid 16 -bamigboye 16 -brazillian 16 -75.5 16 -adeola 16 -anren 16 -talgarth 16 -izabelle 16 -gav 16 -sundresses 16 -altemus 16 -sevare 16 -glik 16 -knoop 16 -ryals 16 -fordlandia 16 -schiavocampo 16 -23:23 16 -2006-7 16 -metrocard 16 -garforth 16 -non-stun 16 -1786 16 -disalvo 16 -six-digit 16 -radojkovic 16 -ultra-strong 16 -r-colorado 16 -blockhead 16 -saiful 16 -air-dropped 16 -number-crunching 16 -haimi 16 -allergenic 16 -eunan 16 -ritchey 16 -16-0 16 -morcombes 16 -hollinger 16 -scallions 16 -allsaints 16 -134th 16 -morton-hoffman 16 -cafcass 16 -naj 16 -dauksas 16 -cwiakalo 16 -ngala 16 -co-existing 16 -300bhp 16 -70-plus 16 -ex-imf 16 -jinga 16 -teemed 16 -illumiroom 16 -423,000 16 -all-state 16 -wsoc-tv 16 -scads 16 -motari 16 -kamehameha 16 -trask 16 -newish 16 -dumitrita 16 -subhan 16 -thabane 16 -0.76 16 -leas 16 -baarle 16 -twite 16 -roseau 16 -vietnamese-american 16 -megahertz 16 -lachner 16 -cookouts 16 -bonazzola 16 -tullow 16 -lenka 16 -noordin 16 -voc 16 -babs 16 -autogyro 16 -cont 16 -tinder-dry 16 -sixto 16 -deigned 16 -activewear 16 -nikka 16 -bgf 16 -322,000 16 -under-staffed 16 -fumigate 16 -oligarchy 16 -adventurism 16 -kcal9 16 -juul 16 -freemasonry 16 -c.a. 16 -blairsville 16 -868 16 -tealights 16 -slama 16 -milmo 16 -radhakrishnan 16 -caunt 16 -teleprompters 16 -vessyl 16 -briony 16 -okereke 16 -b-double 16 -dadis 16 -nayel 16 -musto 16 -six-decade 16 -cespedes 16 -atlee 16 -o'prey 16 -nagata 16 -28-minute 16 -al-gaddafi 16 -georgen 16 -nagai 16 -c919 16 -ketchell 16 -kreuger 16 -dennington 16 -wcsc 16 -superleague 16 -countrywoman 16 -re-located 16 -antonelli 16 -d'erlanger 16 -al-majali 16 -crocheting 16 -80th-minute 16 -nieve 16 -dwyer-skeats 16 -curatorial 16 -worron 16 -4.11 16 -maugans 16 -metalworker 16 -liew 16 -rabanne 16 -3:44 16 -a1c 16 -schouten 16 -3.14 16 -3.16 16 -calianna 16 -thirsting 16 -burkhead 16 -lletget 16 -trekkies 16 -leiber 16 -jested 16 -pizzolatto 16 -aibo 16 -fly-by-night 16 -kumin 16 -ummi 16 -frippery 16 -macinnis 16 -pup-cake 16 -one-hundred 16 -ahrc 16 -swill 16 -artemivsk 16 -sedge 16 -snodin 16 -black-on-black 16 -duick 16 -i-4 16 -minibars 16 -33st 16 -cleavage-enhancing 16 -bygott 16 -tubal 16 -arlberg 16 -maries 16 -russellandbromley.co.uk 16 -burnaby 16 -antonovich 16 -antiq 16 -terdiman 16 -friendzy 16 -tesfay 16 -malignancy 16 -menelaou 16 -catchup 16 -cocksure 16 -0.97 16 -azfamily.com 16 -broadland 16 -finger-tip 16 -chequebooks 16 -#love 16 -pinturault 16 -andile 16 -selloff 16 -30ff 16 -farahani 16 -nmrn 16 -whatmore 16 -givanni 16 -romcom 16 -4/7 16 -fatuous 16 -a+e 16 -miyazato 16 -9:13 16 -9:19 16 -bindmans 16 -pushovers 16 -kempf 16 -rejig 16 -tjosaas 16 -ippon 16 -sujata 16 -maigret 16 -parminder 16 -satisfries 16 -parapagus 16 -unsecure 16 -wildt 16 -shanked 16 -mollusks 16 -bull-running 16 -clacy 16 -adc 16 -front-foot 16 -tulowitzki 16 -camelford 16 -haustein 16 -etayem 16 -creepier 16 -youthful-looking 16 -pinkeye 16 -deferrari 16 -2004-2009 16 -dellwood 16 -brasov 16 -pearse 16 -boxx 16 -sunjic 16 -95-year 16 -brightside 16 -34in 16 -sotolongo 16 -smidt 16 -beavering 16 -piloxing 16 -angeli 16 -26-17 16 -british-iranian 16 -warfighting 16 -hatchell 16 -gizzell 16 -overpay 16 -gop-leaning 16 -zyla 16 -aduna 16 -eisenstadt 16 -world-beaters 16 -longhand 16 -choirboys 16 -gurdeep 16 -canowindra 16 -maron 16 -morones 16 -tunneled 16 -eggert 16 -datastickies 16 -selene 16 -azzan 16 -lodo 16 -non-renewable 16 -brinkerhoff 16 -washbag 16 -9.03 16 -slops 16 -bracanov 16 -languidly 16 -20-a-day 16 -haynesworth 16 -holeman 16 -arista 16 -fani 16 -ebs 16 -rivonia 16 -spanky 16 -dogtown 16 -canen 16 -hospitalier 16 -houseoffraser.co.uk 16 -bonelli 16 -propellants 16 -ex-mother-in-law 16 -mythili 16 -wunsiedel 16 -discomforts 16 -500-600 16 -duodenoscopes 16 -karagandy 16 -hettema 16 -brz 16 -mugunga 16 -psychically 16 -yanqi 16 -poobalan 16 -pastrikos 16 -libertyville 16 -gruenenthal 16 -ihsanoglu 16 -600cc 16 -tramline 16 -street-food 16 -rickner 16 -pyrgos 16 -salvages 16 -calliope 16 -stubbington 16 -mosteller 16 -beatz 16 -7inch 16 -paladins 16 -140km 16 -prominences 16 -seven-a-side 16 -lower-profile 16 -glocks 16 -touchwiz 16 -recasting 16 -haik 16 -prehypertension 16 -cibs 16 -thistles 16 -polesitter 16 -dog-owners 16 -poret 16 -810million 16 -nanobots 16 -vivus 16 -chikin 16 -dressing-up 16 -rapidity 16 -kvist 16 -title-holder 16 -timbre 16 -fall-winter 16 -gyetvai 16 -biagioni 16 -change-up 16 -paillex 16 -cyriac 16 -guruswamy 16 -skubic 16 -stabled 16 -pisciuneri 16 -leftward 16 -webchat 16 -tabin 16 -21:23 16 -magaye 16 -shuga 16 -letter-writer 16 -rhymney 16 -431,000 16 -mavros 16 -sukhdeo 16 -mid-point 16 -tatishvili 16 -lamont-doherty 16 -mathijsen 16 -staszak 16 -budget-busting 16 -74,500 16 -zelt 16 -lacsa 16 -isacson 16 -likers 16 -vukasin 16 -neaz 16 -unrated 16 -two-times 16 -35cm 16 -gristle 16 -challapalca 16 -kamiar 16 -pg-rated 16 -aquila 16 -noroviruses 16 -yamanaka 16 -locanda 16 -wettstein 16 -fage 16 -raver 16 -freefalling 16 -katakinas 16 -balling 16 -a320s 16 -quadrocopter 16 -brandman 16 -beyayo 16 -gentles 16 -horejsi 16 -sisely 16 -best-run 16 -dementia-suffering 16 -coarsely 16 -illum 16 -eifion 16 -realpolitik 16 -biopics 16 -crema 16 -nakai 16 -arakaki 16 -carron 16 -relents 16 -2,300-year-old 16 -rohm 16 -a419 16 -bushmills 16 -elma 16 -pottawatomie 16 -rugby-tackled 16 -vaccine-preventable 16 -colaprete 16 -welshmen 16 -homilies 16 -230m 16 -lip-synched 16 -steely-eyed 16 -wiggum 16 -akhito 16 -nvq 16 -71.3 16 -cascais 16 -cardinale 16 -cae 16 -claypole 16 -bradby 16 -gleamed 16 -180th 16 -rawah 16 -conker 16 -x-acto 16 -apropos 16 -sessler 16 -whybrow 16 -rishworth 16 -wish-tv 16 -footrest 16 -vietcong 16 -beausoleil 16 -hormonally 16 -stubbe 16 -livability 16 -dicephalic 16 -2008-9 16 -4:38 16 -6,000-page 16 -lewis-jones 16 -ratliffe 16 -gsma 16 -rostain 16 -moisten 16 -sofyan 16 -biostatistics 16 -imjin 16 -scriven 16 -9.26 16 -britainsdna 16 -okasha 16 -mckibben 16 -bhat 16 -#debates 16 -downpayment 16 -dulcet 16 -metabolized 16 -hi-def 16 -cross-referencing 16 -surdyka 16 -anti-child 16 -64-year 16 -bernini 16 -zr-1 16 -stendal 16 -politicisation 16 -beji 16 -fourth-wicket 16 -deregnaucourt 16 -atsuko 16 -lanson 16 -ctc 16 -btc 16 -tls 16 -munford 16 -viviano 16 -60.8 16 -60.9 16 -shareef 16 -bankes 16 -3.54 16 -teausant 16 -wallinger 16 -konidaris 16 -stockwood 16 -puckeridge 16 -con-man 16 -stayer 16 -jayna 16 -80-100 16 -net-worth 16 -levelheaded 16 -ormandy 16 -contini 16 -sub-group 16 -farthings 16 -al-kadhim 16 -chilled-out 16 -daintily 16 -dziedzic 16 -mccarroll 16 -skinniest 16 -861 16 -weissmuller 16 -tjuta 16 -poofters 16 -anti-marijuana 16 -bamboozling 16 -shukarno 16 -gamson 16 -glowacki 16 -auburndale 16 -self-directed 16 -chugs 16 -dyn 16 -saugus 16 -newly-arrived 16 -rubberised 16 -hwanghae 16 -oareford 16 -mcintrye 16 -mccutchen 16 -39p 16 -ryton 16 -h7n7 16 -scc 16 -espin 16 -kallmyer 16 -ceyhan 16 -iwade 16 -zaporowski 16 -mycroft 16 -plesel 16 -munich-based 16 -half-indian 16 -danin 16 -165ft 16 -russian-flagged 16 -monosodium 16 -tsou 16 -fodmaps 16 -amaranth 16 -salamone 16 -currying 16 -anderko 16 -polyakov 16 -flesch 16 -43.9 16 -nikolsky 16 -011-33/2 16 -nadaud 16 -6am-9am 16 -mtukudzi 16 -nega 16 -karpuc 16 -asuna 16 -talerico 16 -60-inch 16 -vargova 16 -35.99 16 -milanova 16 -trainwreck 16 -mcnealy 16 -junked 16 -primatologists 16 -sibyl 16 -belenenses 16 -highest-resolution 16 -velayati 16 -hosseiniamraei 16 -ossetians 16 -jarad 16 -wi-fi-enabled 16 -pocketknives 16 -tenors 16 -sachsenring 16 -syler 16 -pirouetting 16 -sun-powered 16 -axial 16 -commodes 16 -berghof 16 -home-field 16 -conniford 16 -22-months 16 -gipson 16 -badi 16 -presage 16 -maierhofer 16 -75,000-a-week 16 -saffo 16 -louganis 16 -linx 16 -1,540 16 -huguenot 16 -baby-sitter 16 -wide-leg 16 -tukel 16 -mantola 16 -centralia 16 -overdressed 16 -akimbo 16 -a-game 16 -re-tweet 16 -ex-students 16 -gazzaniga 16 -meringues 16 -hay-on-wye 16 -robey 16 -zehaf 16 -mischer 16 -fowlkes 16 -stepfamilies 16 -mincer 16 -decribed 16 -ogallala 16 -schuldies 16 -penner 16 -quiroga 16 -isobelle 16 -fumo 16 -metroplex 16 -2,695 16 -shrimp-like 16 -staine 16 -15-14 16 -66th-minute 16 -rawal 16 -impugned 16 -8/6 16 -bayon 16 -wheelbase 16 -farhatullah 16 -dott 16 -zabuli 16 -fedotowsky 16 -crusaded 16 -newser 16 -urologists 16 -lower-class 16 -camera-phone 16 -rippington 16 -haygarth 16 -ecclesia 16 -goffman 16 -aveda 16 -baby-making 16 -freemantle 16 -l.l. 16 -pontoise 16 -crt 16 -nutrioso 16 -over-indulge 16 -wencewicz 16 -perpetua 16 -90-plus 16 -unbalance 16 -sell-offs 16 -krusher 16 -schansman 16 -wortham 16 -mortonhall 16 -94.5 16 -japanese-made 16 -bichir 16 -x4 16 -wacoal 16 -removalist 16 -half-a-century 16 -milkins 16 -nant 16 -averts 16 -southard 16 -zimmern 16 -1,449 16 -1,444 16 -high-pressured 16 -arbitrage 16 -couldnt 16 -alamgir 16 -lown 16 -brashear 16 -saucer-like 16 -drosophila 16 -tammam 16 -uranium-based 16 -signing-on 16 -1,048 16 -Ángel 16 -gazi 16 -bhattarai 16 -shalikashvili 16 -revalidation 16 -speigel 16 -mcrel 16 -zarzycki 16 -sergent 16 -7a 16 -chesler 16 -moca 16 -600-foot 16 -meuse 16 -co-sign 16 -croisette 16 -nairne 16 -vescio 16 -tilsley 16 -gyang 16 -squeamishness 16 -sabritas 16 -waggling 16 -31/5/86 16 -17-goal 16 -hummers 16 -gaspard 16 -chainey 16 -elitists 16 -gemmill 16 -sailstorfer 16 -sandakan 16 -stethoscopes 16 -synthia 16 -resized 16 -poonch 16 -absolves 16 -shill 16 -preposterously 16 -mongeluzzi 16 -r.s. 16 -hukkelaas 16 -floro 16 -21:51 16 -gamlen 16 -under-aged 16 -indo-european 16 -bealey 16 -umina 16 -chimera 16 -power-unit 16 -anechoic 16 -pinboard 16 -udy 16 -madikwe 16 -mallaig 16 -despotism 16 -tualatin 16 -bearman 16 -360ft 16 -catherall 16 -ferrin 16 -satka 16 -roundhead 16 -11-acre 16 -refract 16 -three-banded 16 -solorzano 16 -mainds 16 -amores 16 -existentialist 16 -tee-off 16 -jamala 16 -quesarito 16 -aniarael 16 -grimston 16 -prances 16 -wztv 16 -v.k. 16 -u.n.-led 16 -16.30 16 -nri 16 -weina 16 -konate 16 -septal 16 -mankiller 16 -baalke 16 -rickroll 16 -remarries 16 -mirabella 16 -impermanence 16 -stiuso 16 -sattam 16 -joromie 16 -undeliverable 16 -sketchbooks 16 -chaya 16 -dramatisation 16 -behrend 16 -ring-tailed 16 -cued 16 -gladney 16 -brightey 16 -relaid 16 -human-carrying 16 -on-base 16 -quine 16 -badlo 16 -spanaway 16 -highest-placed 16 -britton-prior 16 -teff 16 -massof 16 -6.70 16 -razon 16 -touzar 16 -specially-equipped 16 -vallier 16 -depressant 16 -inculcate 16 -2061 16 -japonica 16 -8.29 16 -us100 16 -nightclubbing 16 -cogley 16 -ibadan 16 -sun-dried 16 -bitterns 16 -nanomaterials 16 -eight-lane 16 -trouble-makers 16 -ikechukwu 16 -nine-goal 16 -legal-aid 16 -casita 16 -mottos 16 -1:36 16 -100,000-strong 16 -intermediate-range 16 -brock-doyle 16 -sasselov 16 -textual 16 -hare-brained 16 -borgess 16 -re-assert 16 -authorises 16 -clathrates 16 -fouchier 16 -wheelmen 16 -al-hawsawi 16 -percolated 16 -radiographers 16 -liming 16 -siavash 16 -stassi 16 -audie 16 -900-acre 16 -70.6 16 -cruttenden 16 -malagasy 16 -dred 16 -kentucky-based 16 -first-serve 16 -bunnell 16 -al-turkmani 16 -mottingham 16 -paugh 16 -befuddling 16 -contrave 16 -823 16 -mogavero 16 -r.l. 16 -tobon 16 -chizhov 16 -makayabella 16 -254,000 16 -biathlete 16 -ertugrul 16 -meter-long 16 -kickin 16 -cartmell 16 -well-represented 16 -kingswinford 16 -esk 16 -svante 16 -centralising 16 -okuda 16 -mackalonis 16 -mohrenschildt 16 -bioprinting 16 -daming 16 -carnel 16 -giscard 16 -heldon 16 -hijuelos 16 -sukow 16 -side-swiped 16 -anti-collision 16 -best-rated 16 -undemanding 16 -haitian-born 16 -breder 16 -monserrate 16 -skyfire 16 -10.37 16 -amundsen-scott 16 -loudermilk 16 -fakhouri 16 -collation 16 -croaky 16 -botanicals 16 -segars 16 -first-edition 16 -gertie 16 -crumbed 16 -mini-movie 16 -banegas 16 -kyie 16 -printemps 16 -willo 16 -nickelback 16 -insurances 16 -antorino 16 -nine-wicket 16 -mayoclinic.com 16 -98.3 16 -angstroms 16 -quadricycle 16 -turasinze 16 -dyde 16 -ruit 16 -telfair 16 -sibson 16 -kiser 16 -tuxford 16 -rogerstone 16 -wdbj 16 -tongue-lashing 16 -viti 16 -leanne-grace 16 -abuzeid 16 -aprilia 16 -pabla 16 -herrera-bast 16 -distrusts 16 -crack-down 16 -helpine 16 -dinkum 16 -ahsanullah 16 -superferry 16 -dheere 16 -2.52 16 -brawler 16 -hok 16 -hov 16 -cyrillic 16 -naturalness 16 -fitocracy 16 -lochnagar 16 -kokkinaki 16 -bluepearl 16 -1572 16 -185million 16 -qijianglong 16 -grabiner 16 -wmbf 16 -cataclysm 16 -3-in-1 16 -supercavitation 16 -kumasi 16 -petites 16 -susskind 16 -mashford 16 -eidos 16 -134.5 16 -flyboarding 16 -embrey 16 -4-bedroom 16 -exelby 16 -weirwolf 16 -purifiers 16 -plugged-in 16 -paracels 16 -12-years 16 -ambitiously 16 -arabidopsis 16 -tangentially 16 -süddeutsche 16 -9.84 16 -9.87 16 -33rd-minute 16 -duggleby 16 -reichardt 16 -barrois 16 -write-ups 16 -tarmey 16 -sleight-of-hand 16 -price-conscious 16 -seaborn 16 -botella 16 -vision-impaired 16 -knauss 16 -occultation 16 -slanderers 16 -109,318 16 -crustal 16 -birkenstock 16 -67m 16 -teasmade 16 -zhaoxing 16 -pakenham 16 -ingemar 16 -unitedhealthcare 16 -behaviourist 16 -badalyan 16 -alpha-male 16 -tvb 16 -jahar 16 -edwards-jones 16 -aves 16 -23:04 16 -hitlist 16 -sprauer 16 -welle 16 -leicht 16 -engagingly 16 -rhinaman 16 -kiddy 16 -maldi-msi 16 -backcombed 16 -adelstein 16 -bersin 16 -borzellieri 16 -1,406 16 -shaeri 16 -rafaeli 16 -zaina 16 -afanaseva 16 -'04 16 -schonau 16 -bonanni 16 -23:41 16 -mcshera 16 -overstayers 16 -antone 16 -griddle 16 -soulsby 16 -officious 16 -lochlan 16 -ramnit 16 -birtwell 16 -alaric 16 -defrocking 16 -watermill 16 -vekic 16 -life-span 16 -1214 16 -firebug 16 -mayakoba 16 -maces 16 -milliners 16 -nine-strong 16 -camilli 16 -shesahomewrecker.com 16 -kaul 16 -tchida 16 -hance 16 -vegard 16 -belambay 16 -dorthy 16 -smg 16 -wast 16 -wasl 16 -ripoll 16 -74mph 16 -paramita 16 -re-watch 16 -monopolistic 16 -beijingers 16 -10.18 16 -hella 16 -denuclearize 16 -galdikaite 16 -ghg 16 -180-foot 16 -mendte 16 -thirtieth 16 -dishon 16 -cusiter 16 -basunti 16 -under-35s 16 -seraph 16 -pintxos 16 -cookin 16 -yeandle 16 -daws 16 -hengistbury 16 -tanu 16 -milius 16 -hummed 16 -1717 16 -cryptologic 16 -even-numbered 16 -h.m. 16 -flaux 16 -oliva-torres 16 -nerguizian 16 -whitford 16 -lakisha 16 -elbow-length 16 -home-style 16 -6.7-magnitude 16 -parnis 16 -sussed 16 -canalis 16 -dalil 16 -shaftan 16 -mantesso 16 -0.48 16 -b-tag 16 -delpopolo 16 -sushilkumar 16 -hideyuki 16 -conteh 16 -happel 16 -dolatabadi 16 -housewarming 16 -kalle 16 -omnimedia 16 -weebot 16 -beamer 16 -culcheth 16 -orgone 16 -pittuck 16 -samura 16 -cit 16 -esiebo 16 -borrego 16 -sarcoidosis 16 -court-mandated 16 -chibanda 16 -neti 16 -conflict-torn 16 -wirelurker 16 -boxee 16 -paveet 16 -kaira 16 -icarly 16 -baskin 16 -christiania 16 -fizzles 16 -collieries 16 -al-arabi 16 -conflict-of-interest 16 -rs5 16 -intercommunal 16 -daryne 16 -7.60 16 -standard-class 16 -costadinos 16 -badenoch 16 -2,090 16 -prophesy 16 -seance 16 -porthtowan 16 -50-seat 16 -feith 16 -self-knowledge 16 -xls 16 -egalitarians 16 -byblos 16 -10-under-par 16 -imarat 16 -shekels 16 -al-maqdessi 16 -dive-bombed 16 -glass-panelled 16 -degli 16 -then-2-year-old 16 -plympton 16 -megalithic 16 -vta 16 -00:29 16 -sauchelli 16 -44.9 16 -farinas 16 -thorburn 16 -21-14 16 -suicide-prevention 16 -april-may 16 -tigana 16 -rackham 16 -habas 16 -woosie 16 -night-sky 16 -banny 16 -clicky 16 -cribbage 16 -overachiever 16 -leighanna 16 -rattner 16 -bhati 16 -abbotswood 16 -wtvm 16 -xiaopeng 16 -bonehead 16 -handicraft 16 -hootie 16 -49p 16 -flyaway 16 -branwell 16 -samms 16 -chagford 16 -bleaney 16 -ramm 16 -benchmarking 16 -ireen 16 -klass-jan 16 -101m 16 -rls 16 -well-toned 16 -wixted 16 -renaissance-era 16 -72.1 16 -causally 16 -single-file 16 -bekri 16 -merendino 16 -parady 16 -abbeville 16 -bachi 16 -aledo 16 -hydrocortisone 16 -euskaltel 16 -302,000 16 -fastback 16 -moulay 16 -fifer 16 -muntjac 16 -11.49 16 -mushikiwabo 16 -ebron 16 -ewg 16 -clementines 16 -marajh 16 -dak 16 -z30 16 -anssari 16 -kaznikova 16 -mcbee 16 -pichichi 16 -tammo 16 -perfects 16 -luton-based 16 -yamauchi 16 -weakling 16 -downdraft 16 -yevgenia 16 -nnal 16 -kash 16 -hipkin 16 -hemberger 16 -auricchio 16 -gavai 16 -elgan 16 -******** 16 -rexona 16 -draftee 16 -presaged 16 -samaritans.org 16 -19y 16 -vanalstyne 16 -disembarkation 16 -eynon 16 -mandeep 16 -maria-teresa 16 -glbt 16 -inactivation 16 -predilections 16 -gotovchikov 16 -440million 16 -brittny 16 -stegmann 16 -presupposes 16 -unburdened 16 -eccleston-todd 16 -rochefort 16 -emplacement 16 -radar-evading 16 -billeted 16 -aamina 16 -127th 16 -1,725 16 -bluefish 16 -solden 16 -waffling 16 -dornoch 16 -hubcaps 16 -lythgoe 16 -machida 16 -theatre-goers 16 -at-at 16 -dynaste 16 -matheus 16 -hoehen 16 -w.e. 16 -550m 16 -semi-submerged 16 -candy-colored 16 -0.63 16 -pérignon 16 -watchhouse 16 -dawdle 16 -marzouk 16 -benzoate 16 -cuesta 16 -microclimate 16 -refrigerator-sized 16 -gullah/geechee 16 -hulks 16 -nxt 16 -wisla 16 -antivenom 16 -overcompensate 16 -faheem 16 -unrefrigerated 16 -pre-clearance 16 -aokigahara 16 -reviveaphone 16 -leinders 16 -73.4 16 -plessy 16 -jehovahs 16 -lubavitch 16 -ayapaneco 16 -european-american 16 -reoffended 16 -136.5 16 -ployee 16 -basket-case 16 -rubikin 16 -somra 16 -gardenhire 16 -ruzzle 16 -pistelli 16 -jv 16 -marchessini 16 -camera-wielding 16 -persuasively 16 -non-related 16 -moneyfacts 16 -re-integration 16 -otterstrom 16 -eine 16 -tinari 16 -peery 16 -masbate 16 -00:42 16 -vietjet 16 -uncf 16 -www.ba.com 16 -mellar 16 -63f 16 -ament 16 -dibinga 16 -adhamiya 16 -koman 16 -105ft 16 -soft-touch 16 -jopek 16 -roll-top 16 -kaboom 16 -anje 16 -700bhp 16 -kaminskas 16 -life-enhancing 16 -mcfeeture 16 -longchambon 16 -post-crash 16 -ex-radio 16 -non-title 16 -six-count 16 -antiphospholipid 16 -kampuchea 16 -cansiz 16 -biko 16 -yates-taui 16 -saqer 16 -inverting 16 -wigwam 16 -coade 16 -awe-struck 16 -siniora 16 -conservative-run 16 -chehalis 16 -testis 16 -bell-ringers 16 -canale 16 -liwei 16 -glial 16 -attains 16 -bodyboarder 16 -444,000 16 -wallows 16 -masoma 16 -carneal 16 -higher-ranked 16 -tangena 16 -anning 16 -penaflorida 16 -benes 16 -aarron 16 -minott 16 -privateers 16 -wwl-tv 16 -freewinds 16 -applewood 16 -juddering 16 -leteve 16 -tilsehir 16 -kaplowitz 16 -ossi 16 -jamraya 16 -oberland 16 -titleist 16 -shada 16 -marwah 16 -gerfa 16 -de-star 16 -protectiveness 16 -okai-koi 16 -zeestraten 16 -macuser 16 -ad-libbed 16 -scepter 16 -dever-jakusz 16 -dezinno 16 -collera 16 -placoderms 16 -second-trimester 16 -22:59 16 -2022-23 16 -folkloric 16 -encroachments 16 -mini-cab 16 -autographer 16 -romanticised 16 -deso 16 -highly-acclaimed 16 -godard 16 -romanos 16 -s60 16 -nema 16 -hernandez-llanas 16 -susy 16 -carolers 16 -nariman 16 -429,000 16 -ak-47-style 16 -fuck 16 -plus-or-minus 16 -holcombe 16 -mungia 16 -marcelle 16 -evel 16 -1753 16 -marauded 16 -suppressors 16 -#westgate 16 -miler 16 -vaporize 16 -goergl 16 -juggernauts 16 -begnoche 16 -eufrazio 16 -srirasmi 16 -showrunners 16 -curveballs 16 -frankl 16 -ferraz 16 -56ft 16 -mezhyhirya 16 -53.2 16 -self-produced 16 -biman 16 -0.00 16 -carmitchel 16 -itokawa 16 -room-to-room 16 -1,920 16 -battenberg 16 -supra 16 -cut-offs 16 -nurudeen 16 -1,690 16 -26-storey 16 -babycham 16 -mid-latitudes 16 -basenjis 16 -geladas 16 -4.27 16 -durocher 16 -exumas 16 -sohag 16 -zarine 16 -ilounge 16 -melodifestivalen 16 -yarris 16 -mbc 16 -ovals 16 -798 16 -6:23 16 -ettima 16 -soumaya 16 -rooty 16 -izmailov 16 -chengmai 16 -knighting 16 -bubear 16 -abdesslem 16 -what-ifs 16 -sarag 16 -guzman-hurtado 16 -shellfire 16 -gubarev 16 -45per 16 -willstrop 16 -zatsarenny 16 -emigrant 16 -sponge-like 16 -loeser 16 -habilis 16 -huby 16 -aroldis 16 -petar 16 -ginger.io 16 -omnivores 16 -runnerup 16 -falciparum 16 -2,650 16 -rohack 16 -86.5 16 -shehab 16 -cheeseboard 16 -brigstocke 16 -everquest 16 -methodological 16 -niinisto 16 -tsiaras 16 -milosavlevici 16 -24-karat 16 -vanic 16 -crapnik 16 -kyrenia 16 -perineum 16 -defrock 16 -forli 16 -synched 16 -tranby 16 -hinkins 16 -ludden 16 -animal-free 16 -mustoe 16 -mcbrearty 16 -roquet 16 -wahr 16 -career-long 16 -welsh-speaking 16 -friendly-fire 16 -dormouse 16 -ficken 16 -yacon 16 -linyi 16 -beachbot 16 -hitner 16 -blinco 16 -alteus 16 -d8 16 -baden-baden 16 -formidably 16 -kirana 16 -relativism 16 -fukunaga 16 -re-tried 16 -gili 16 -lavaca 16 -oades 16 -28per 16 -brca-1 16 -antimicrobials 16 -caftans 16 -woolhead 16 -passionfruit 16 -post-pc 16 -clarke-harris 16 -actor-comedian 16 -tareen 16 -crump-raiswell 16 -dolo 16 -lutein 16 -dumler 16 -boël 16 -800-foot 16 -self-deport 16 -non-disabled 16 -1596 16 -head-over-heels 16 -intelcenter 16 -bungy 16 -wau 16 -stand-still 16 -home-wrecker 16 -vallecas 16 -al-shater 16 -easah 16 -current-day 16 -edgefield 16 -magdanz 16 -jimmer 16 -45.6 16 -tissainayagam 16 -5.51 16 -up-ended 16 -handwoven 16 -israeli-born 16 -secondments 16 -eaglesham 16 -#icebucketchallenge 16 -hayu 16 -double-headed 16 -soules 16 -slinger 16 -lucraft 16 -yohannes 16 -kirshenbaum 16 -tennesee 16 -thereâ 16 -exalt 16 -doctor-assisted 16 -myglass 16 -wien 16 -shouta 16 -groundshare 16 -luckman 16 -hydrogen-powered 16 -al-joulani 16 -gehringer 16 -rolled-out 16 -ayelet 16 -couvade 16 -hammamet 16 -podolny 16 -joiners 16 -telling-off 16 -iskandar 16 -rehoboth 16 -gloomier 16 -althaus 16 -slunk 16 -mable 16 -datasift 16 -sixfields 16 -harlen 16 -tyvek 16 -duccini 16 -luhman 16 -hamoir 16 -seren-rose 16 -morgentaler 16 -sella 16 -1,671 16 -guichard 16 -orthopedics 16 -40st 16 -woodroffe 16 -crisford 16 -950million 16 -minnesotan 16 -disapprovingly 16 -planet-hunting 16 -terrier-type 16 -tarte 16 -shaeffel 16 -10-16 16 -hanjin 16 -ebanks-blake 16 -cross-state 16 -curiel 16 -official-looking 16 -croscombe 16 -besnik 16 -palm-sized 16 -persuadable 16 -thorpe-beeston 16 -clewlow 16 -bratcher 16 -cat-sitter 16 -mi-8 16 -keels 16 -nbcdfw.com 16 -haun 16 -alkali 16 -53p 16 -plowman 16 -120-pound 16 -elbrus 16 -erawan 16 -skill-set 16 -bogside 16 -shipside 16 -fire-fighter 16 -wilcockson 16 -baldrich 16 -fateh-110 16 -sk-ii 16 -stancza 16 -aharon 16 -successively 16 -glamming 16 -66.2 16 -disemboweling 16 -three-peat 16 -harcourts 16 -fuerth 16 -nudibranchs 16 -re-emerges 16 -nahum 16 -1658 16 -out-stretched 16 -vrt 16 -mesbah 16 -star-banner 16 -storrie 16 -gojcevic 16 -earbud 16 -funny-looking 16 -calla 16 -sunni-shia 16 -pellegrine 16 -jakiyah 16 -meinhardt 16 -depletes 16 -muralist 16 -tag-team 16 -donnellan 16 -179th 16 -recombination 16 -subplots 16 -minuses 16 -spix 16 -croxford 16 -sulkowicz 16 -nabs 16 -kweder 16 -sucess 16 -bilborough 16 -calipari 16 -dincer 16 -rbl 16 -moonstruck 16 -shimonoseki 16 -bhola 16 -ascap 16 -microfossils 16 -deconstruction 16 -7:35 16 -near-complete 16 -249,000 16 -88m 16 -chirivella 16 -benbecula 16 -giblin 16 -determinate 16 -amharic 16 -indochina 16 -freeney 16 -4dx 16 -star-gazing 16 -autobots 16 -town-based 16 -barile 16 -jobi 16 -doodled 16 -vinoly 16 -digester 16 -suranne 16 -motorcyle 16 -spilsbury 16 -okosi 16 -hidenori 16 -sux 16 -elemis 16 -self-promoters 16 -oriole 16 -3.23 16 -0715 16 -earthquake-proof 16 -shikoku 16 -milles 16 -gauzy 16 -belka 16 -9bn 16 -four-room 16 -teymuraz 16 -queen-size 16 -panzi 16 -dulls 16 -selway 16 -iott 16 -quietness 16 -midline 16 -c6 16 -jorrie 16 -helpouts 16 -manasquan 16 -orpheus 16 --19 16 -phalluses 16 -eibenschutz 16 -janeya 16 -thaine 16 -lmc 16 -nathman 16 -slanting 16 -vert 16 -17-3 16 -1794 16 -0.181 16 -treimsa 16 -naunton 16 -mullenger 16 -lancia 16 -toe-poke 16 -gerberding 16 -insulin-dependent 16 -silwan 16 -endothelial 16 -beene 16 -drug-running 16 -hensler 16 -fantastico 16 -9-6 16 -Ó 16 -kosminski 16 -oake 16 -corderoy 16 -shotwell 16 -saiba 16 -darkalstanian 16 -namibians 16 -propst 16 -glamorously 16 -flirtatiously 16 -makos 16 -abidi 16 -bayliff 16 -klepper 16 -jankowski 16 -16per 16 -fatimid 16 -sendong 16 -a.t. 16 -lubiene 16 -berjawi 16 -buitenhuis 16 -kru 16 -gaiae 16 -5-11 16 -sakwa 16 -embarassment 16 -alby 16 -jamerson 16 -hallenga 16 -bancorpsouth 16 -supersoft 16 -transposition 16 -leezan 16 -drinkhall 16 -strait-laced 16 -torngat 16 -gephyrostegus 16 -weinraub 16 -predisposes 16 -functionary 16 -gps-enabled 16 -joint-second 16 -petrucci 16 -vla 16 -sreckovic 16 -guglielmo 16 -guglielmi 16 -hesla 16 -alliston 16 -go-fast 16 -deveaux 16 -franz-peter 16 -geometries 16 -home-brewed 16 -cdp 16 -debucquoy-dodley 16 -reetz 16 -52.6 16 -bugden 16 -musawi 16 -moberg 16 -daytrippers 16 -thack 16 -stal 16 -7,000-year-old 16 -154m 16 -landman 16 -timm 16 -birchfield 16 -ruuxa 16 -borkgren 16 -two-fingered 16 -cudi 16 -flightstats 16 -battie 16 -corder 16 -ncap 16 -kingsville 16 -kups 16 -counterprotesters 16 -7:19 16 -europe-based 16 -wcf 16 -chelsi 16 -yash 16 -opt-outs 16 -brinley 16 -multi-story 16 -brayan 16 -hobnob 16 -notional 16 -forelegs 16 -82.7 16 -solarbox 16 -semi-circle 16 -double-faults 16 -6400 16 -mathenge 16 -brutter 16 -hudl2 16 -pirila 16 -defazio 16 -ceylanpinar 16 -shelbi 16 -matan 16 -wkow 16 -b-boy 16 -chides 16 -clouser 16 -meows 16 -human-driven 16 -mynatt 16 -kupiec 16 -tembin 16 -ashbee 16 -nighties 16 -etap 16 -chapeltown 16 -dongfeng 16 -hebblethwaite 16 -lampton 16 -47.3 16 -debaty 16 -shant 16 -h10n8 16 -apportioning 16 -nawa 16 -jubilantly 16 -ayachi 16 -mirabelle 16 -lechmere 16 -karajah 16 -jumblatt 16 -glutinous 16 -narcissus 16 -nordrum 16 -breasseale 16 -massler 16 -skicross 16 -deloizy 16 -uncompensated 16 -pothead 16 -iordache 16 -insensitively 16 -fair-weather 16 -90lb 16 -curtiss 16 -underwiring 16 -gree 16 -caseloads 16 -dte 16 -pyonyang 16 -ewoks 16 -zen-like 16 -23-carat 16 -wittier 16 -bradwell 16 -huffine 16 -41,600 16 -175ml 16 -fernandez-karavetsos 16 -riolo 16 -kagisho 16 -morgaen 16 -nephrology 16 -inverell 16 -al-hamad 16 -down-at-heel 16 -78.8 16 -78.1 16 -aquazzura 16 -pinhasi 16 -vuduris 16 -glassett 16 -lower-than-expected 16 -jantar 16 -weer 16 -manochantr 16 -shannah 16 -sunbaking 16 -theorise 16 -aldermaston 16 -pubis 16 -chodas 16 -leibniz 16 -thriepland 16 -chesting 16 -wolffe 16 -bh 16 -fat-busting 16 -methanogens 16 -liposomes 16 -dulmatin 16 -odone 16 -bryfonski 16 -nerve-jangling 16 -reagan-era 16 -nasiruddin 16 -crowdrise 16 -wctv 16 -beetham 16 -centrism 16 -64.4 16 -nophone 16 -disc-based 16 -lertzman 16 -janesah 16 -papell 16 -akter 16 -924 16 -kanal 16 -vajda 16 -easy-access 16 -powerplay 16 -creepy-crawlies 16 -mantooth 16 -inflections 16 -elkan 16 -hershel 16 -beddow 16 -90.9 16 -derrion 16 -vrindavan 16 -cramphorn 16 -fairleigh 16 -threepenny 16 -156m 16 -1,117 16 -axioma 16 -ru 16 -phubbing 16 -routemasters 16 -majority-minority 16 -slumming 16 -astrolabe 16 -fine-art 16 -dille 16 -5:23 16 -5:27 16 -aranzubia 16 -laddie 16 -gessen 16 -vozdvyzhenka 16 -brightly-painted 16 -miniter 16 -hit-maker 16 -michelson 16 -comprehending 16 -mcfall 16 -74-day 16 -thrilla 16 -baksh 16 -rosoman 16 -storyful 16 -egyptology 16 -megraw 16 -0/5 16 -privations 16 -hilaire 16 -outgassing 16 -great-great-granddaughter 16 -disassembling 16 -dashboard-mounted 16 -hibbins 16 -short-run 16 -rioux 16 -moreen 16 -bastani 16 -sillcock 16 -seven-fold 16 -1:46 16 -valproate 16 -eisenman 16 -half-cleared 16 -now-adult 16 -963,000 16 -six-years 16 -treacherously 16 -cnn-sponsored 16 -3.68 16 -lowles 16 -blu-rays 16 -verran 16 -ali-mohammadi 16 -gainers 16 -jurmala 16 -laporto 16 -bisect 16 -heide 16 -packington 16 -freudian 16 -programed 16 -old-timer 16 -waltzes 16 -cng-powered 16 -shinwell 16 -killman 16 -contextually 16 -dolma 16 -malya 16 -daggs 16 -daggy 16 -alakrana 16 -crepin 16 -monopolised 16 -boslikouski 16 -laa 16 -bananagrams 16 -maluku 16 -tootsies 16 -chicoj 16 -ms-dos 16 -mashing 16 -barrowford 16 -treadwell-collins 16 -tadros 16 -colleyville 16 -lanced 16 -folkstone 16 -vasculitis 16 -satu 16 -storerooms 16 -amaan 16 -bradley-colleary 16 -forthwith 16 -nobrega 16 -#alexfromtarget 16 -heflin 16 -dovercourt 16 -blumstein 16 -lokuta 16 -ameneh 16 -tapps 16 -concentrator 16 -doronin 16 -11/2 16 -anant 16 -decongestants 16 -pirot 16 -89million 16 -spruiking 16 -lederer 16 -singletary 16 -pranged 16 -tillamook 15 -navias 15 -precluding 15 -keratsini 15 -chail 15 -taylor-smith 15 -longjing 15 -drivable 15 -feria 15 -warrender 15 -kgmb 15 -sobrato 15 -drybrough 15 -food-service 15 -frites 15 -ingeborg 15 -green-lighting 15 -elmar 15 -baseliner 15 -base-jumping 15 -varas 15 -farge 15 -season-defining 15 -menchu 15 -dailymail 15 -trigo 15 -over-development 15 -lirette 15 -gitonga 15 -bare-legged 15 -cratering 15 -coaxes 15 -teelow 15 -fiji-born 15 -radogno 15 -schmuck 15 -anastas 15 -centre-piece 15 -ome 15 -akroyd 15 -nothern 15 -mckelvey 15 -feagley 15 -rose-coloured 15 -vha 15 -country-western 15 -corus 15 -kurda 15 -quasimodo 15 -naualna 15 -gilgoff 15 -runcie 15 -models.com 15 -newsy 15 -308,000 15 -1,575 15 -rewinding 15 -mui 15 -ramrod 15 -amerson 15 -.02 15 -horta-osório 15 -hydraulically 15 -navratil 15 -gogoi 15 -timofei 15 -lakha 15 -cardy 15 -web-savvy 15 -laureen 15 -allosaurus 15 -qusra 15 -stablehand 15 -ap-gfk 15 -blewitt 15 -facebookers 15 -deforested 15 -rosenkratz 15 -ebola.com 15 -avilla 15 -1,171 15 -non-perishable 15 -1164 15 -tinton 15 -jewishness 15 -mewling 15 -inscribe 15 -ment 15 -topological 15 -gioeli 15 -krafft 15 -stepakoff 15 -dinapoli 15 -carvell 15 -luisao 15 -bccs 15 -cold-pressed 15 -hoogewerf 15 -45-page 15 -ncmp 15 -30-27 15 -provera 15 -greengate 15 -everythingapplepro 15 -written-off 15 -amba 15 -ogunbanwo 15 -stuffiness 15 -usdp 15 -tohme 15 -kenward 15 -boutros 15 -rollo 15 -ec1 15 -ecu 15 -chintz 15 -snicker 15 -wroxham 15 -dma 15 -blood-smeared 15 -brenneman 15 -calibrating 15 -rennix 15 -curriers 15 -jonás 15 -mentee 15 -swedish-based 15 -stremlau 15 -dstrkt 15 -delgarde 15 -berea 15 -mokthar 15 -whippings 15 -vasavada 15 -altamaha 15 -blitsch 15 -tmd 15 -balustrade 15 -22:46 15 -33-10 15 -tomsic 15 -laisterdyke 15 -khonsari 15 -schnall 15 -sports-car 15 -lifes 15 -charlwood 15 -fusty 15 -pullella 15 -delisted 15 -creepy-crawly 15 -rayudu 15 -al-amrani 15 -osmek 15 -nieporent 15 -alber 15 -2,950 15 -zip-lining 15 -850m 15 -salvadore 15 -tumelty 15 -dediare 15 -moyston 15 -kwarasey 15 -passant 15 -eure 15 -lavell 15 -ex-penn 15 -1,017 15 -mitzelfeld 15 -guney 15 -food-poisoning 15 -sproutling 15 -olwyn 15 -madang 15 -surena 15 -demarche 15 -hensel 15 -synchronisation 15 -truc 15 -dles 15 -right-arm 15 -allergy-free 15 -madelaine 15 -11-night 15 -unblinking 15 -24-hour-a-day 15 -broadsided 15 -rowhedge 15 -xpress 15 -heathers 15 -bomb-grade 15 -half-joking 15 -paean 15 -light-bulb 15 -kizilay 15 -antti 15 -rapidshare 15 -wolf-whistling 15 -d'ampezzo 15 -phur 15 -zennstrom 15 -gigantor 15 -120.6 15 -sbb 15 -122nd 15 -kirkdale 15 -21:32 15 -joyriders 15 -bagnasco 15 -iia 15 -iim 15 -oggie 15 -gainiyev 15 -gardere 15 -call-centre 15 -double-helix 15 -littleover 15 -christophers 15 -ennobled 15 -schoeck 15 -1,310 15 -scollin 15 -benaissa 15 -seppo 15 -saccomanni 15 -freecycle 15 -cyberterrorism 15 -ostia 15 -juliano 15 -zhongxun 15 -footlocker 15 -super-excited 15 -simos 15 -laszewski 15 -post-star 15 -country-by-country 15 -ruhullah 15 -bramblett 15 -bomanji 15 -above-the-knee 15 -wasmer 15 -polson 15 -uberpop 15 -ciencin 15 -paabo 15 -gabo 15 -avowedly 15 -kpho-tv 15 -60-strong 15 -2:13 15 -2:16 15 -interfraternity 15 -harrel 15 -wallowed 15 -mcgregor-johnson 15 -female-dominated 15 -jti 15 -deberry 15 -22-yard 15 -skylor 15 -lettice 15 -lepard 15 -grand-daughters 15 -musker 15 -262ft 15 -hemant 15 -hayemaker 15 -bronxville 15 -multipolar 15 -zagan 15 -ellahi 15 -whited 15 -forssell 15 -kacavas 15 -mid-tier 15 -nieuwenhuis 15 -savattere 15 -mohiuddin 15 -escalatory 15 -wpcs 15 -pedestrian-friendly 15 -camellia 15 -cnu 15 -nick-named 15 -houy 15 -farnaz 15 -twenty-one-year-old 15 -3-dimensional 15 -fundmynose.co.uk 15 -acheampong 15 -poincheval 15 -wbre 15 -anantyowati 15 -al-mazwagi 15 -hogencamp 15 -bukharee 15 -chandran 15 -nazi-inspired 15 -discworld 15 -diprose 15 -garmisch-partenkirchen 15 -saqib 15 -fuld 15 -18-piece 15 -hypnotising 15 -ryujin 15 -olla 15 -kulich 15 -presentational 15 -ncos 15 -chak 15 -headmounted 15 -derwish 15 -1347 15 -jap 15 -video-calling 15 -wep 15 -supping 15 -camargue 15 -kktv 15 -moelis 15 -unladylike 15 -miya 15 -2mins 15 -wipe-out 15 -gwaii 15 -teran 15 -5gb 15 -ghafar 15 -ulee 15 -shuaib 15 -pontine 15 -money-raising 15 -lizcano 15 -tarakhail 15 -abdillahi 15 -post-coup 15 -salemme 15 -shakopee 15 -al-youm 15 -fountaine 15 -mumby 15 -chickened 15 -balafoutis 15 -federated 15 -enderby 15 -ill-thought-out 15 -kyoko 15 -piebald 15 -messel 15 -65.1 15 -sulpice 15 -loman 15 -castlereagh 15 -unclipped 15 -composes 15 -tsc 15 -hulland 15 -foams 15 -american-arab 15 -time-waster 15 -pletnev 15 -3:19 15 -oxyrhynchus 15 -senseable 15 -redbook 15 -mccrystal 15 -18.95 15 -drop-dead 15 -coritiba 15 -tete-a-tete 15 -zhuri 15 -1062 15 -vandivert 15 -fazli 15 -karbonn 15 -natali 15 -dennings 15 -moskalenko 15 -cassagnes 15 -scaled-up 15 -unpicking 15 -meola 15 -stockroom 15 -savana 15 -michie 15 -caso 15 -859 15 -85g 15 -then-treasury 15 -1995-1996 15 -esterson 15 -zoff 15 -mcclintick 15 -portago 15 -nsamba 15 -376,000 15 -chivenor 15 -835,000 15 -appended 15 -mcalary 15 -n'bouke 15 -59.3 15 -59.4 15 -prokop 15 -cuming 15 -akerson 15 -vss 15 -schanzer 15 -1247 15 -high-gloss 15 -hsph 15 -adolphe 15 -noorullah 15 -yasar 15 -jurafsky 15 -223million 15 -back-of-the-envelope 15 -kelwick 15 -hart-davis 15 -byatt 15 -one-trick 15 -pathi 15 -monksummers 15 -laverdiere 15 -sub-plots 15 -gulam 15 -marie-claire 15 -idemili 15 -trento 15 -regenerates 15 -callery 15 -krissinger 15 -kreider 15 -bretigny-sur-orge 15 -riordon 15 -gopalpur 15 -mietus 15 -agudelo 15 -beleagured 15 -anti-spam 15 -75g 15 -madin 15 -extinguishes 15 -overclaimed 15 -latrobe 15 -bambini 15 -mruke 15 -isabeli 15 -rationales 15 -screamers 15 -highest-paying 15 -shima 15 -daran 15 -vitra 15 -price-rutherford 15 -gleaning 15 -presenteeism 15 -keokuk 15 -longhua 15 -satterlee 15 -taib 15 -pelletiers 15 -poitou 15 -10.06 15 -fiorella 15 -nonmetallic 15 -aladeen 15 -interreligious 15 -uhura 15 -27-years-old 15 -gribetz 15 -negras 15 -kilzer 15 -roxborough 15 -983 15 -footrace 15 -lucky12345 15 -newsham 15 -lochan 15 -tae-young 15 -sensorimotor 15 -kornbluh 15 -wpec 15 -14per 15 -nottingham-born 15 -conshohocken 15 -35per 15 -jabbai 15 -jakubyszyn 15 -epee 15 -misanthropic 15 -garretts 15 -sit-com 15 -out-played 15 -sekulic 15 -sonupe 15 -nesv 15 -tuanjai 15 -garishly 15 -pinups 15 -klint 15 -vocalizations 15 -mcgahee 15 -zyad 15 -jokulsarlon 15 -smell-o-vision 15 -clearings 15 -gameboy 15 -oliviera 15 -f-22s 15 -r.i.p.d. 15 -0515 15 -zani 15 -bottom-dwelling 15 -jetlev 15 -clampers 15 -creamier 15 -lavine 15 -huntingdonshire 15 -set-plays 15 -cmas 15 -rubberized 15 -nonperishable 15 -stenning 15 -blizzard-like 15 -hodkinson 15 -sixfold 15 -9.56 15 -modestini 15 -veelers 15 -maby 15 -choriocarcinoma 15 -grein 15 -blacklock 15 -anti-porn 15 -nassour 15 -swinstead 15 -human-wildlife 15 -taiba 15 -mccartneys 15 -disaster-relief 15 -newman-young 15 -schmaltz 15 -xuereb 15 -simple-minded 15 -super-high 15 -eu-u.s. 15 -urbanism 15 -rapturously 15 -millikin 15 -owlet 15 -saib 15 -rwaramba 15 -106-year-old 15 -agapito 15 -sciullo 15 -portion-controlled 15 -csm 15 -marybeth 15 -severomorsk 15 -bryne 15 -upadhya 15 -e-petitions 15 -dahlan 15 -goathland 15 -rienzi 15 -flag-covered 15 -fixitor 15 -rent-stabilized 15 -meglin 15 -uneconomical 15 -life-insurance 15 -gelernter 15 -deadhead 15 -14-10 15 -reestablished 15 -3,000-a-year 15 -lochtes 15 -vona 15 -rit 15 -bruv 15 -sastry 15 -dummer 15 -twizzlers 15 -uestlove 15 -excusable 15 -look-a-likes 15 -picciano 15 -grey-brown 15 -lotz 15 -news-herald 15 -morteza 15 -santley 15 -club-by-club 15 -chelly 15 -wssrc 15 -lademacher 15 -kintbury 15 -1,057 15 -psychosomatic 15 -ekstrom 15 -bahler 15 -olivos 15 -kuhl 15 -ernhart 15 -vollard 15 -nelba 15 -cervellon 15 -42-14 15 -chevrons 15 -three-pound 15 -nightsticks 15 -bestiary 15 -anigo 15 -nutmegs 15 -ansotegi 15 -lopetegui 15 -pittwater 15 -sunned 15 -aisne 15 -gruppo 15 -smartcane 15 -biryukov 15 -tartans 15 -firouzian 15 -schering-plough 15 -authorites 15 -schouler 15 -godot 15 -houchen 15 -2,490 15 -garmsir 15 -karber 15 -poundstretcher 15 -arnica 15 -unfulfilling 15 -aperitifs 15 -beckers 15 -goodby 15 -27-mile 15 -ramjit 15 -four-fingered 15 -uralkali 15 -ndiku 15 -rhic 15 -large-format 15 -mahboob 15 -frood 15 -corbusier 15 -karegeya 15 -vapshot 15 -stutters 15 -bibury 15 -capacious 15 -goydos 15 -woodcraft 15 -shimao 15 -canepa 15 -fritos 15 -dothraki 15 -thebarman 15 -i-84 15 -sensitised 15 -bathory 15 -holtkamp 15 -co-defensive 15 -homepages 15 -taylor-crossdale 15 -shlimon 15 -shoja 15 -tea-drinking 15 -kingwood 15 -ethie 15 -bearding 15 -galanthus 15 -jiff 15 -betcha 15 -openreach 15 -seattlepi.com 15 -duntulm 15 -puk 15 -charlot 15 -moise 15 -mcfc 15 -calvillo 15 -23-20 15 -castellers 15 -barresi 15 -020 15 -blastoff 15 -romine 15 -centralizing 15 -multicam 15 -childishly 15 -cerbero 15 -skie 15 -ex-senator 15 -bohannon 15 -lampkin 15 -rebury 15 -mclearn 15 -tawfiq 15 -goodier 15 -lumpini 15 -warts-and-all 15 -sirkin 15 -radioisotopes 15 -jean-charles 15 -arrowing 15 -customer-focused 15 -misti 15 -hydrofoil 15 -anguishing 15 -padbury 15 -tapings 15 -buba 15 -voorhies 15 -para-sport 15 -schrama 15 -dreadnought 15 -hideemail 15 -chepchugov 15 -gaffar 15 -careline 15 -mariata 15 -pancaked 15 -dinets 15 -khory 15 -kringle 15 -avx 15 -raipur 15 -hercog 15 -crooned 15 -shovel-ready 15 -cytomegalovirus 15 -hooning 15 -v8s 15 -mountney 15 -malloch 15 -gunnison 15 -wuss 15 -sit-up 15 -cymbaluk 15 -tripura 15 -spectroradiometer 15 -harre 15 -11-16 15 -rededication 15 -amblyopia 15 -mier 15 -renouf 15 -lasering 15 -mid-town 15 -rundell 15 -bouna 15 -cadwell 15 -demartin 15 -rydges 15 -glenside 15 -karlesha 15 -pleather 15 -galia 15 -kardashian-west 15 -71st-minute 15 -minigolf 15 -uchimura 15 -portent 15 -tangerang 15 -500gb 15 -sheinman 15 -carlota 15 -bollen 15 -jabra 15 -batik 15 -two-and-a-half-years 15 -accedes 15 -nattrass 15 -fishbourne 15 -sidika 15 -22:24 15 -22:29 15 -half-chance 15 -ccr5 15 -firooz 15 -saybrook 15 -relisted 15 -grumblings 15 -julieta 15 -saveliev 15 -sours 15 -rennolds 15 -flagbearer 15 -acedia 15 -ellisor 15 -seemanpillai 15 -killy 15 -1,470 15 -gabay 15 -metta 15 -tough-minded 15 -coachloads 15 -phosphoric 15 -habash 15 -bakonyi 15 -hard-driving 15 -811 15 -upadhyaya 15 -nhsbt 15 -mawlawi 15 -redressed 15 -oumar 15 -charrette 15 -128mph 15 -gadfly 15 -lykos 15 -intramural 15 -catacomb 15 -slutsker 15 -snippy 15 -remorselessly 15 -deering 15 -florianopolis 15 -#teamnigella 15 -merch 15 -virus-free 15 -arechiga 15 -+48 15 -too-short 15 -diva-like 15 -dissipation 15 -epigenetic 15 -jabiru 15 -hate-crimes 15 -double-bogeyed 15 -mbda 15 -jean-georges 15 -mifepristone 15 -basco 15 -giorgia 15 -kirksey 15 -denmark-based 15 -wiltsey 15 -afrin 15 -immelman 15 -324,000 15 -kersys 15 -geremi 15 -inebriation 15 -molannen 15 -baize 15 -unsolvable 15 -arvid 15 -70-ton 15 -ktf 15 -dworkin 15 -24-14 15 -24-13 15 -10.02 15 -rick@ricksteves.com, 15 -sisterson 15 -gowland 15 -not-so-good 15 -344,000 15 -scrubba 15 -scrubby 15 -life-expectancy 15 -chekevdia 15 -bakeware 15 -arbuthnott 15 -cokie 15 -khobar 15 -ebbett 15 -hellard 15 -afonina 15 -gamescom 15 -21in 15 -armond 15 -briain 15 -praxedis 15 -montville 15 -bad-check 15 -aadvantage 15 -mepham 15 -nightclothes 15 -104.3 15 -1721 15 -sommers 15 -49.1 15 -12-12-12 15 -jableh 15 -cyzan 15 -bicameral 15 -jalade-ekeinde 15 -sharipova 15 -spiridon 15 -nonie 15 -captive-bred 15 -citgo 15 -beltline 15 -akousa 15 -1,399 15 -foskett 15 -toshimitsu 15 -11th-placed 15 -selchert 15 -mcvige 15 -lewisburg 15 -displaysearch 15 -2.42 15 -58.8 15 -kakapo 15 -bazelon 15 -santuomo 15 -servin 15 -charissa 15 -cocu 15 -1101 15 -cp24 15 -walaker 15 -s/2010 15 -smegielski 15 -torta 15 -mcpike 15 -korky 15 -invovled 15 -opeke 15 -winterisation 15 -puppy-dog 15 -6-day 15 -ogunagbadaro 15 -kokua 15 -wrx 15 -sound-off 15 -edgell 15 -cipolletti 15 -muroff 15 -non-us 15 -assateague 15 -xijing 15 -touchable 15 -co-ceos 15 -apb 15 -zuurbier 15 -al-harati 15 -mega-churches 15 -bravissimo 15 -willers 15 -down-payment 15 -mcdougal 15 -tex-mex 15 -ruggieri 15 -creepy-looking 15 -paraprofessional 15 -deadening 15 -lulling 15 -hypertrophy 15 -hyden 15 -pesquero 15 -broaderick 15 -hosan 15 -bouthaina 15 -palliser 15 -esthetician 15 -140g 15 -gupton 15 -9.90 15 -zalouti 15 -kilfoyle 15 -peka 15 -mazoch 15 -scanty 15 -narcotrafficking 15 -emboldens 15 -hell-raiser 15 -kreis 15 -commodus 15 -alhadeff 15 -abkco 15 -hakeemullah 15 -sign-language 15 -alfonzo 15 -raffia 15 -anatomic 15 -plainer 15 -crematory 15 -bona-fide 15 -shellenberger 15 -muqdad 15 -jivamukti 15 -sherard 15 -y-fronts 15 -unexciting 15 -facebook-style 15 -course-record 15 -rasberry 15 -tamilnet.com 15 -desvallons 15 -academica 15 -esha 15 -150-200 15 -brigantia 15 -greasing 15 -pavillon 15 -gun-trafficking 15 -5,000-a-week 15 -batambuze 15 -23:10 15 -gumatj 15 -simsek 15 -sharell 15 -spacewalker 15 -teleporting 15 -trombley 15 -yellowface 15 -flot 15 -hincks 15 -double-act 15 -gunrunning 15 -athletica 15 -175mph 15 -ayoreo 15 -hochuli 15 -nihiwatu 15 -967 15 -betch 15 -pilkhana 15 -tremont 15 -brookville 15 -rumple 15 -sylvinho 15 -kotara 15 -community-minded 15 -cartlidge 15 -fromholz 15 -bulged 15 -professional-looking 15 -compartmentalized 15 -tobi 15 -banquettes 15 -yount 15 -mekkhala 15 -classwork 15 -dbe 15 -bisto 15 -batasuna 15 -ecstasy-type 15 -mgmt 15 -cchs 15 -bandannas 15 -80-hour 15 -scavelli 15 -vetrulli 15 -cowlings 15 -iras 15 -brianti 15 -plum-coloured 15 -henriette 15 -ajibade 15 -seelie 15 -hosp 15 -kazuhiro 15 -petroplus 15 -edge-on 15 -quarter-on-quarter 15 -gfhc 15 -arrianna 15 -meridor 15 -bradshaws 15 -tearaways 15 -landreau 15 -jaque 15 -absconders 15 -shaqra 15 -fils 15 -supposes 15 -lunceford 15 -sonics 15 -scollar 15 -@mairicnn 15 -poorly-maintained 15 -blasdel 15 -skycycle 15 -mataitonga 15 -memex 15 -manalad 15 -lazovic 15 -gift-giver 15 -mcclary 15 -zaffar 15 -malte 15 -fcb 15 -dardery 15 -zadie 15 -flubs 15 -cradock 15 -turnarounds 15 -kamrul 15 -wotsits 15 -beiler 15 -diehm 15 -dahm 15 -al-fadhli 15 -clef 15 -sindane 15 -shmona 15 -hobkinson 15 -3,840 15 -valeen 15 -hypnotise 15 -muggleton 15 -96-hour 15 -cabdrivers 15 -sandpiper 15 -riggan 15 -retractions 15 -50.6 15 -50.2 15 -parisienne 15 -aswell 15 -observatory-2 15 -cosplayers 15 -shong 15 -rhobh 15 -fuel-flow 15 -non-destructive 15 -ardor 15 -jouini 15 -scoffield 15 -hopkinsville 15 -lightoller 15 -22-10 15 -22-19 15 -safford 15 -0.77 15 -sexing 15 -karamay 15 -flourescent 15 -siaka 15 -blandamura 15 -red-colored 15 -15mins 15 -cartes 15 -b612 15 -rassam 15 -tauro 15 -potbelly 15 -swaffer 15 -burtron 15 -80-mile 15 -mcconchie 15 -archenemy 15 -stafforshire 15 -4.14 15 -4.19 15 -becchio 15 -dagblad 15 -seagrass 15 -gtx 15 -8,250 15 -hewings 15 -calorie-burning 15 -al-nahyan 15 -glenburn 15 -7-speed 15 -tunney 15 -half-smoked 15 -spookiest 15 -bhanu 15 -high-up 15 -groundings 15 -bowlby 15 -abc11 15 -greenwash 15 -depor 15 -kaist 15 -sj 15 -tooled 15 -fenham 15 -priddy 15 -torben 15 -embarassed 15 -shealy 15 -kulibayev 15 -mulford 15 -fudd 15 -exercisers 15 -zip-wire 15 -whiteville 15 -hutongs 15 -intan 15 -qide 15 -sriram 15 -lucey 15 -indemnify 15 -scrivenor 15 -still-living 15 -stoetter 15 -grech 15 -preda 15 -6-mile 15 -skiiing 15 -emc 15 -non-europeans 15 -backrow 15 -glamorize 15 -pratje 15 -law-priddey 15 -blasetti 15 -deregulating 15 -bajo 15 -nyhavn 15 -spilker 15 -woo-suk 15 -essaid 15 -f/lt 15 -lotan 15 -calif 15 -nasiriya 15 -eculizumab 15 -low-price 15 -fontanella 15 -abigayle 15 -pay-for-play 15 -o'o 15 -editorial@mailonline.co.uk 15 -under-floor 15 -tremberg 15 -23:35 15 -euromoney 15 -goodhart 15 -mcclurkin 15 -nereus 15 -gautama 15 -napolean 15 -three-country 15 -chatwal 15 -kemmer 15 -cannery 15 -sureshbhai 15 -blots 15 -larna 15 -olive-green 15 -shirkers 15 -pierre-henri 15 -lantigua 15 -goel 15 -keacher 15 -high-backed 15 -evgenia 15 -filmography 15 -hyper-vigilance 15 -partitioning 15 -elmley 15 -socata 15 -dumor 15 -25-pound 15 -lamay 15 -anneke 15 -pantucci 15 -aissa 15 -re-register 15 -bubby 15 -faried 15 -sowells 15 -penalty-taker 15 -cheese-making 15 -gritter 15 -sibelius 15 -anastasio 15 -bairns 15 -hemophilia 15 -stylo 15 -rongming 15 -misfeasance 15 -newcastle-born 15 -uh-1y 15 -sombre-looking 15 -kice 15 -barthe 15 -amstel 15 -torncello 15 -owumi 15 -tup 15 -rahmatullah 15 -sandever 15 -summerskill 15 -alfieri 15 -12mm 15 -jesica 15 -beanz 15 -ginormica 15 -lorente 15 -saftler 15 -compilers 15 -lamb-creasey 15 -costes 15 -4,080 15 -blaugrana 15 -granturismo 15 -darie 15 -petrolia 15 -labyrinths 15 -revamps 15 -senad 15 -cordeiro 15 -landform 15 -foreseeing 15 -tachograph 15 -bielawski 15 -chiocci 15 -approximations 15 -1767 15 -1764 15 -misraje 15 -call-and-response 15 -shoe-throwing 15 -gormless 15 -suffield 15 -tour-level 15 -dutchbat 15 -iaconesi 15 -dattilo 15 -bonafide 15 -coltan 15 -grimbsy 15 -strazzullo 15 -0.10 15 -897 15 -tufted 15 -arabe 15 -full-board 15 -logies 15 -manjula 15 -maksimir 15 -gastroesophageal 15 -kalinin 15 -finalises 15 -ngly1 15 -62-year 15 -1140 15 -maz 15 -mcvea 15 -6:36 15 -enteral 15 -barthrop 15 -carpe 15 -portsmouth-based 15 -splitter 15 -cuspers 15 -chattel 15 -17-country 15 -gramps 15 -olding 15 -rhijn 15 -tomanovich 15 -horgan 15 -transfrontier 15 -thoms 15 -ulna 15 -kxly.com 15 -mayfly 15 -birdseed 15 -55.5 15 -plinths 15 -yamma 15 -rtc 15 -currey 15 -pakistani-controlled 15 -magnitude-4 15 -bearcat 15 -ogunnaike 15 -sliproad 15 -thundow 15 -uccs 15 -gaborova 15 -percussive 15 -hoodlum 15 -groot 15 -youngminds 15 -gwags 15 -neylon 15 -krajnak 15 -pds 15 -then-district 15 -dinkel 15 -yusufzai 15 -gobbles 15 -self-involved 15 -leghorn 15 -masikryong 15 -amoebas 15 -dlouhy 15 -kpnx 15 -pterodactyls 15 -handicapper 15 -commandoes 15 -paperchase 15 -8:08 15 -calor 15 -d'isère 15 -brainstormed 15 -murph 15 -shopworkers 15 -inactions 15 -reallocated 15 -begat 15 -staker 15 -cella 15 -curcumin 15 -haredim 15 -scareeradvice 15 -strangles 15 -leigh-anne 15 -displease 15 -fist-bumping 15 -texel 15 -telehealth 15 -72mph 15 -witeck 15 -23-second 15 -slow-walking 15 -witheringly 15 -saugatuck 15 -pych 15 -12.06 15 -nagi 15 -88mph 15 -coshed 15 -cytokines 15 -web-streaming 15 -quesadilla 15 -garai 15 -fiends 15 -waffen-ss 15 -suef 15 -putterill 15 -tomahawks 15 -bergantiños 15 -mayorga 15 -memebon 15 -mokoena 15 -zal 15 -kalikawe 15 -t.rex 15 -smasher 15 -audio-only 15 -trump-owned 15 -kotor 15 -venusian 15 -scullery 15 -differentials 15 -kepler-442b 15 -moju 15 -kai-tsu 15 -technology-driven 15 -rowland-fry 15 -pulmonologist 15 -hipbone 15 -oberleutnant 15 -vsv-ebov 15 -lakeville 15 -bokhari 15 -reeni 15 -familiarising 15 -kopetzky 15 -un-run 15 -lundestad 15 -tawton 15 -137-page 15 -captchas 15 -ushaka 15 -dorna 15 -convulsive 15 -golfs 15 -fishcakes 15 -multi-millionairess 15 -slip-ons 15 -hatte 15 -near-capacity 15 -5.47 15 -5.48 15 -automatism 15 -gopi 15 -amping 15 -gmg 15 -chalkley 15 -rubeo 15 -murkle 15 -richmond-upon-thames 15 -nazarbayeva 15 -ynwa 15 -microbiologists 15 -beachcombing 15 -schwendtner 15 -affiliating 15 -modeller 15 -spangles 15 -piscataqua 15 -click-and-collect 15 -copil 15 -ganz 15 -gadau 15 -al-raqqawi 15 -clia 15 -body-con 15 -hot-pink 15 -1,770 15 -doro 15 -zoroastrians 15 -destinee 15 -per-person 15 -whatclinic.com 15 -sub-letting 15 -flea-ridden 15 -f.e.a.r. 15 -76.9 15 -españa 15 -kinabuti 15 -houghtaling 15 -vaux 15 -arbid 15 -mojos 15 -kohistani 15 -babylonians 15 -wrage 15 -#forcaneymar 15 -basa 15 -cross-bench 15 -moire 15 -ravenscourt 15 -same-store 15 -vra 15 -1,590 15 -ner 15 -bussen 15 -nabakooba 15 -demobbed 15 -mcx 15 -m.p. 15 -forages 15 -mariéme 15 -moneda 15 -post-second 15 -mullany-mills 15 -kinsmen 15 -freeze-frame 15 -faherty 15 -highly-educated 15 -amreeki 15 -birthweight 15 -chukwu 15 -slo-mo 15 -catflap 15 -dayspring 15 -gassée 15 -herniman 15 -wirtz 15 -teaira 15 -lantz 15 -long-on 15 -maneuverings 15 -interventionism 15 -scrutinizes 15 -beyah 15 -nationally-televised 15 -tanaya 15 -niigata 15 -100-point 15 -cmos 15 -jme 15 -need-to-know 15 -kokoda 15 -wing-kovarik 15 -museu 15 -mima 15 -firoved 15 -stirio 15 -brassica 15 -ex-lapd 15 -dalya 15 -rosehill 15 -sm-g925f 15 -normadie 15 -wvue 15 -jomaa 15 -portraitist 15 -mardjono 15 -existences 15 -melnikov 15 -nonresidents 15 -nutrient-dense 15 -spiracles 15 -salters 15 -turell 15 -smiga 15 -malcontent 15 -basurin 15 -8:28 15 -akanbi 15 -unstunned 15 -diamantakos 15 -rehabilitator 15 -sudlow 15 -hailin 15 -technophiles 15 -scaled-back 15 -leading-edge 15 -onlooking 15 -sensitized 15 -8-18 15 -8-11 15 -leshan 15 -mi7b 15 -clot-busting 15 -meia 15 -12.29 15 -ferneyhough 15 -evertontv 15 -desbrow 15 -kettings 15 -self-governance 15 -houlding 15 -deselle 15 -fortwo 15 -discomforting 15 -nakaima 15 -ink-ite 15 -ex-eastenders 15 -hasakah 15 -omarov 15 -moten 15 -qmi 15 -rsf 15 -tolgay 15 -vanwinkle 15 -daniher 15 -hand-out 15 -shamsiddin 15 -deconstructing 15 -wide-set 15 -claunch 15 -kamuzu 15 -burtka 15 -yobbish 15 -bezuidenhout 15 -stephney 15 -balogun 15 -sponheim 15 -15-months-old 15 -mauss 15 -gastritis 15 -coderdojo 15 -kirkendall 15 -nissin 15 -olswang 15 -greenleaf 15 -chunkier 15 -gyroscopic 15 -brealey 15 -itsunori 15 -fistfuls 15 -cristallo 15 -revealingly 15 -merka 15 -25-29 15 -dockets 15 -missing-persons 15 -stronach 15 -stepchild 15 -glassblowing 15 -pfetten 15 -132.5 15 -bowl-shaped 15 -moncet 15 -nigeria-based 15 -w-2 15 -lacz 15 -hand-pick 15 -t20s 15 -slinkachu 15 -russell-boumzar 15 -deep-dish 15 -rudeineh 15 -sts 15 -larribe 15 -tromsø 15 -cordell-reeh 15 -bonkowski 15 -uninviting 15 -dfcs 15 -chewton 15 -vaginoplasty 15 -16g 15 -longport 15 -jägermeister 15 -d-hawaii 15 -moustapha 15 -gesser 15 -curdled 15 -lampson 15 -swaggart 15 -meracle 15 -birely 15 -uproarious 15 -atlantica 15 -panya 15 -rabun 15 -lyceum 15 -in-vehicle 15 -dogfish 15 -ious 15 -dause 15 -stuart-smith 15 -dunsfold 15 -carrot-and-stick 15 -18g 15 -agron 15 -22,300 15 -dinking 15 -famke 15 -anti-epileptic 15 -509th 15 -truants 15 -isabellina 15 -6-foot-8 15 -liptack 15 -burnand 15 -greizmann 15 -tindell 15 -seven-piece 15 -nsubuga 15 -98020 15 -mants 15 -massoum 15 -jonski 15 -vosloorus 15 -pugilistic 15 -ibt 15 -akeelah 15 -raunchiest 15 -corea 15 -put-together 15 -@kimkardashian 15 -pembrey 15 -arki 15 -meslin 15 -19-time 15 -five-tonne 15 -taliqa 15 -maritimo 15 -cryosphere 15 -dockal 15 -u.s.s.r. 15 -authoritatively 15 -edgeley 15 -latvian-based 15 -berntsen 15 -ripcord 15 -manchild 15 -then-head 15 -tameness 15 -kefah 15 -ibizan 15 -curr 15 -dispirito 15 -u.s.-korea 15 -gisevius 15 -watch-lists 15 -4014 15 -hypocritically 15 -brims 15 -c.t. 15 -harinder 15 -bluesman 15 -el-bashir 15 -tarma 15 -presbyterians 15 -cotts 15 -pahang 15 -jou 15 -bransholme 15 -willsher 15 -001 15 -eight-months-pregnant 15 -kettled 15 -kettler 15 -reckis 15 -redrew 15 -musga 15 -street-wise 15 -squelching 15 -carrum 15 -garston 15 -92.6 15 -f150 15 -aletse 15 -roba 15 -ricki-lee 15 -mcvicker 15 -off-the-grid 15 -metallinos 15 -india-based 15 -gbtv 15 -stijn 15 -koff 15 -burt-murray 15 -potrero 15 -976 15 -gobbledygook 15 -carter-johnson 15 -61m 15 -greyish 15 -frail-looking 15 -8:41 15 -hyper-connected 15 -eberstein 15 -unveilings 15 -salsano 15 -murti 15 -mangosteen 15 -hotpot 15 -20,000-seat 15 -bew 15 -rudkin 15 -skippering 15 -fleak 15 -brembo 15 -sea-bed 15 -candeleda 15 -jabez 15 -chalian 15 -pupate 15 -child-welfare 15 -vilest 15 -lekshmanan 15 -kernen 15 -vilar 15 -slackliner 15 -domus 15 -1770s 15 -c.c. 15 -ronna 15 -natzler 15 -loden 15 -sundo 15 -coalition-building 15 -1,495 15 -motorman 15 -ramljak 15 -re-timer 15 -goyett 15 -heroin-related 15 -reznick 15 -footer 15 -igoe 15 -rougier 15 -ahab 15 -170-year-old 15 -eight-wicket 15 -torpedo-shaped 15 -shuang 15 -pro-euro 15 -writer-producer 15 -stemberger 15 -semi-submersible 15 -lsst 15 -ar15 15 -ex-chancellor 15 -hamil 15 -lonelyplanet.com 15 -disowning 15 -rajavi 15 -dogsbody 15 -ushahidi 15 -prostate-specific 15 -1,490 15 -suppressor 15 -vaporub 15 -unobtrusively 15 -facetiously 15 -tax-writing 15 -formic 15 -lily-ann 15 -aiwa 15 -74mins 15 -merrin 15 -workfare 15 -naypyitaw 15 -3.34 15 -fraschetti 15 -micki 15 -pavlok 15 -sendgrid 15 -bohinen 15 -pellerin 15 -#special1s 15 -galyon 15 -hinterberger 15 -bernardez 15 -wyburn 15 -mnda 15 -memmer 15 -on-the-pitch 15 -nattering 15 -gott 15 -crim 15 -cordiale 15 -viktorija 15 -saber-toothed 15 -gau 15 -myotonic 15 -stupefying 15 -molan 15 -scuka 15 -anthracis 15 -cross-checking 15 -shailesh 15 -1930s-era 15 -sengi 15 -mitchelle 15 -unaccredited 15 -graffiti-covered 15 -mintoff 15 -carbonised 15 -bi-lateral 15 -fighter-bomber 15 -wheaties 15 -rosewall 15 -6,000-a-month 15 -skintone 15 -etherington-smith 15 -frogfish 15 -exigua 15 -ligers 15 -765,000 15 -eight-second 15 -alibhai-brown 15 -moviemaking 15 -roederer 15 -superficiality 15 -astronomic 15 -stearn 15 -laplace 15 -alemi 15 -whither 15 -2001-2004 15 -lunetta 15 -sewa 15 -weddell 15 -shafay 15 -ersin 15 -3,450 15 -wegg 15 -keio 15 -reorganised 15 -sateen 15 -averis 15 -delco 15 -electrochemical 15 -short-changing 15 -katti 15 -sota 15 -makaya 15 -careen 15 -almera 15 -pussies 15 -shekhar 15 -exemplars 15 -plug-ins 15 -dauny 15 -keepence 15 -riggien 15 -kokom 15 -rackspace 15 -lys 15 -hessan 15 -bodu 15 -bodh 15 -meikle 15 -cofer 15 -copier 15 -winglets 15 -8:52 15 -receptiveness 15 -towill 15 -goal-kicker 15 -37per 15 -klep 15 -banged-up 15 -gompertz 15 -pencourage 15 -raffael 15 -caliente 15 -65.5 15 -dozierwalker 15 -verwoerd 15 -rohana 15 -concialdi 15 -23,700 15 -disrupters 15 -septuagenarians 15 -sheung 15 -cbssports.com 15 -munnerlyn 15 -pakstaite 15 -ayala-gaona 15 -inabnit 15 -rold 15 -zamparini 15 -fyle 15 -charsadda 15 -eliezer 15 -jannie 15 -cavite 15 -916 15 -avellino 15 -tarence 15 -opah 15 -re-working 15 -determinants 15 -flanery 15 -reminiscence 15 -toomas 15 -helgenberger 15 -simin 15 -2014/2015 15 -draey 15 -eu-us 15 -wanderings 15 -blinn 15 -jellybean 15 -minni 15 -mandarin-speaking 15 -chapel-en-le-frith 15 -dsquared2 15 -flegg 15 -monetized 15 -1575 15 -prasanna 15 -rezaei 15 -russie 15 -julep 15 -jojic 15 -millner 15 -top-line 15 -ayoubi 15 -jeepney 15 -lumbini 15 -shokat 15 -cannabis-based 15 -islamabad-based 15 -salyer 15 -75mg 15 -40-feet 15 -sherston 15 -recieving 15 -jemini 15 -botolph 15 -lofa 15 -unionizing 15 -b-12 15 -carbon-free 15 -macintrye 15 -road-trip 15 -keyrings 15 -nuovo 15 -kertz 15 -johno 15 -capitalizes 15 -twas 15 -panose-1 15 -neuschwanstein 15 -fallacies 15 -eatwell 15 -wiegand 15 -geneve 15 -rasputin 15 -pro-actively 15 -110-year 15 -russky 15 -thelwall 15 -topor-stanley 15 -potsdamer 15 -zanni 15 -tie-breaking 15 -half-board 15 -addaway 15 -earth-sun 15 -relaxer 15 -barzalona 15 -aladair 15 -haemmerle 15 -removers 15 -saldia 15 -coplestone 15 -42.3 15 -bpc 15 -lanker-simons 15 -audermars 15 -skipsea 15 -3:47 15 -3:49 15 -berners 15 -akinyemi 15 -crackly 15 -unfed 15 -benejam 15 -bumpus 15 -bitel 15 -sagehorn 15 -bobtail 15 -bridge-gate 15 -balletic 15 -thejakusuma 15 -mum-to-be 15 -flavoursome 15 -minbar 15 -wxix 15 -plourde 15 -ingrams 15 -j1 15 -couplets 15 -105.4 15 -whitner 15 -maugham 15 -ful 15 -chinchillas 15 -sodomised 15 -yuanqing 15 -hassinger 15 -solms 15 -kildee 15 -ndiaye 15 -vatuvei 15 -miyaichi 15 -crybaby 15 -poolsawat 15 -ust 15 -temazcal 15 -curtained 15 -sailer 15 -piwowarski 15 -amezquita 15 -savvakis 15 -costumer 15 -stunners 15 -herts. 15 -misapprehension 15 -khichi 15 -hate-preacher 15 -wowt 15 -zurenko 15 -2.83 15 -goliaths 15 -molesley 15 -yvo 15 -tinian 15 -plodded 15 -slat 15 -power-dressing 15 -proton-beam 15 -stanford-educated 15 -perjured 15 -sorbie 15 -groton 15 -20-fold 15 -lindzen 15 -mcbroom 15 -shylah 15 -thyself 15 -epicenters 15 -whitebread 15 -dulin 15 -new-boys 15 -quetiapine 15 -frankley 15 -maka 15 -6x6 15 -hardier 15 -egcg 15 -boudiccan 15 -faiella 15 -volokh 15 -askold 15 -michio 15 -egotist 15 -singly 15 -brainiac 15 -fishkill 15 -wuornos 15 -schwalbe 15 -cozied 15 -14/15 15 -bernas 15 -i.m. 15 -powder-blue 15 -fawwaz 15 -monongalia 15 -catnap 15 -kennish 15 -arthouse 15 -adjourns 15 -voyer 15 -morwenna 15 -hinxton 15 -fosuhene 15 -nine-storey 15 -chiudinelli 15 -relaunches 15 -21.50 15 -olm 15 -fleetwith 15 -fams 15 -vim 15 -fter 15 -thorgerson 15 -kneen 15 -northiam 15 -savader 15 -pentecostals 15 -non-royal 15 -brasseur 15 -redmon 15 -hampi 15 -carcinomas 15 -linconshire 15 -scoundrel 15 -waratah 15 -brislington 15 -coutant-peyre 15 -galletly 15 -fact-checked 15 -rodemeyer 15 -.15 15 -.18 15 -cliffe 15 -cloudflare 15 -trobriand 15 -simpering 15 -nikolayev 15 -dandies 15 -jahdine 15 -metrocentre 15 -show-and-tell 15 -7:06 15 -khoie 15 -inyo 15 -animal-themed 15 -4:18 15 -midflight 15 -carlsson 15 -lower-resolution 15 -wussler 15 -giraudo 15 -29-years-old 15 -7:47 15 -godric 15 -napoletano 15 -gateposts 15 -90g 15 -levu 15 -8.41 15 -windle 15 -pomahac 15 -dardenne 15 -first-responder 15 -liebenow 15 -vandeweghe 15 -42in 15 -femurs 15 -fono 15 -pm10 15 -lissencephaly 15 -takieddine 15 -barna 15 -twinings 15 -single-room 15 -fourchon 15 -caner 15 -masroor 15 -wheatsheaf 15 -self-heating 15 -geneviève 15 -3.5-liter 15 -moghaddam 15 -puetz 15 -1:53 15 -stalinism 15 -1,229 15 -110.4 15 -revival-style 15 -bright-yellow 15 -hydrofoils 15 -gob-smacked 15 -3:27 15 -3:29 15 -mcgloin 15 -damiana 15 -spraggan 15 -freudenberg 15 -fairman 15 -eaze 15 -unconstrained 15 -cartel-related 15 -dongcheng 15 -bolyna 15 -tandra 15 -hoback 15 -no-hopers 15 -burslem 15 -dongtan 15 -chela 15 -coble 15 -rosenblit 15 -urbino 15 -halil 15 -imeson 15 -22mm 15 -karnes 15 -hickerson 15 -mijatovic 15 -eucharistic 15 -jango 15 -mcenery 15 -prizefighter 15 -23.50 15 -military-issue 15 -orleanians 15 -leadoff 15 -reheard 15 -harmonisation 15 -hurcomb 15 -zegas 15 -interferon 15 -karski 15 -soori 15 -ufdg 15 -#legend 15 -cabela 15 -linguine 15 -quadrants 15 -flocka 15 -shushed 15 -brimmer 15 -oldest-known 15 -eyecatching 15 -shinya 15 -nyantakyi 15 -laytown 15 -numismatic 15 -alcubierre 15 -mehmed 15 -ofek 15 -solksjaer 15 -guaranty 15 -hps 15 -hakimi 15 -break-dancing 15 -crestwood 15 -decelerating 15 -2,420 15 -raygor 15 -azura 15 -cia-backed 15 -emigres 15 -81mph 15 -runyan 15 -shutterfly 15 -asics 15 -tiene 15 -mali-t768 15 -edeania 15 -r.w. 15 -silaghi 15 -tesch 15 -hageland 15 -obsessive-like 15 -kilbey 15 -fogged 15 -116-112 15 -coquelles 15 -fatiguing 15 -c/2012 15 -#usmnt 15 -myalgic 15 -#worldcup 15 -gopros 15 -microusb 15 -spliffs 15 -middaugh 15 -novoselov 15 -khachigian 15 -cattivera 15 -pontarolo 15 -osyth 15 -varon-levy 15 -dv6985se 15 -taylor-pendlebury 15 -kaffirs 15 -inch-thick 15 -paracas 15 -angra 15 -bovington 15 -al-attar 15 -corrodes 15 -auto-correct 15 -thermokarst 15 -mutasa 15 -acma 15 -heh 15 -simonovic 15 -hamra 15 -hansum 15 -winnetka 15 -lese-majeste 15 -salvado 15 -1,132 15 -sugata 15 -primped 15 -geoid 15 -burghart 15 -76billion 15 -gouliaditis 15 -lawhorne 15 -overestimates 15 -berlei 15 -leilani 15 -valuckas 15 -caz 15 -cav 15 -nhulunbuy 15 -soerensen 15 -azagury 15 -wachter 15 -factionalism 15 -1539 15 -kellsey 15 -1,166 15 -serbin 15 -eb-5 15 -ragonese 15 -ahmedinejad 15 -wessing 15 -bergholt 15 -meai 15 -stifel 15 -unreality 15 -concomitant 15 -guscott 15 -reauthorizing 15 -kaukauna 15 -softballs 15 -one-paced 15 -lythe 15 -lieberthal 15 -5-pound 15 -inthe 15 -pacsun 15 -haruf 15 -correspondingly 15 -crotches 15 -double-yolkers 15 -igad 15 -kellaway 15 -shamin 15 -oversimplifying 15 -boroujerdi 15 -capio 15 -warhorse 15 -glühwein 15 -boness 15 -letha 15 -longboats 15 -davis-balfour 15 -bretholz 15 -concisely 15 -cscl 15 -8k 15 -frears 15 -relegations 15 -camano 15 -furrier 15 -jenneke 15 -adipose 15 -inkberrow 15 -narayanan 15 -greenawalt 15 -riascos 15 -rudey 15 -separator 15 -hasani 15 -fonzo 15 -engelman 15 -bulk-buying 15 -bedwei 15 -high-pressing 15 -bluntson 15 -recordkeeping 15 -greenman 15 -beavon 15 -cts 15 -ctf 15 -galeras 15 -4:43 15 -flintridge 15 -wiffle 15 -kiyoshi 15 -3:05 15 -60.4 15 -60.7 15 -60.3 15 -pre-storm 15 -coetzer 15 -3.59 15 -small-sized 15 -gryphon 15 -dikshit 15 -closed-down 15 -wocheng 15 -fakenham 15 -parkingeye 15 -investitures 15 -a-plus 15 -sitrick 15 -muntean 15 -myfoxdetroit.com 15 -born-and-bred 15 -baisden 15 -mandery 15 -prototypical 15 -re-surfaced 15 -canstar 15 -balta 15 -gandhi-bot 15 -soliz 15 -1416 15 -carefully-crafted 15 -mundlos 15 -el-barghouty 15 -avgeeks 15 -sonisphere 15 -moyet 15 -kotkin 15 -re-creates 15 -kgun 15 -illuzzi-orbon 15 -tawhid 15 -preorder 15 -kilgallon 15 -acclimatize 15 -schleswig-holstein 15 -svitlana 15 -compos 15 -bunte 15 -rossini 15 -donis 15 -moez 15 -supers 15 -wrightington 15 -karabakh 15 -tory-lib 15 -riddling 15 -beerwah 15 -boonsongpaisan 15 -skjelbred 15 -60-metre 15 -macgyver 15 -mallatere 15 -deira 15 -colwill 15 -espie 15 -rectally 15 -ollerton 15 -borodino 15 -3-ounce 15 -bhawana 15 -yasuhito 15 -thorniest 15 -doughboy 15 -westie 15 -reawaken 15 -youth-led 15 -proffering 15 -alessandria 15 -43.8 15 -juiciest 15 -sub-sea 15 -88.7 15 -xh558 15 -overachieving 15 -speculatively 15 -powershot 15 -two-toned 15 -ores 15 -10-day-old 15 -feinblatt 15 -hüseyin 15 -ghalibaf 15 -tahr 15 -secretly-recorded 15 -sneade 15 -giraud 15 -casslyn 15 -greenham 15 -ubl 15 -rendezvoused 15 -macmannis 15 -shunga 15 -hollenbach 15 -star-news 15 -armidale 15 -sosa-martinez 15 -cubadebate 15 -maistre 15 -wlodzimierz 15 -hoverboards 15 -durrah 15 -colombes 15 -sabbagh 15 -patch.com 15 -ipscs 15 -toxocara 15 -pountney 15 -under-11s 15 -june-july 15 -three-tenths 15 -harebrained 15 -imps 15 -npy 15 -777-300er 15 -wadeson 15 -chabat 15 -1,895 15 -xylitol 15 -anzhelina 15 -1,145 15 -1,142 15 -moorpark 15 -reoccupy 15 -two-letter 15 -pariahs 15 -dearne 15 -a350s 15 -deportee 15 -arney 15 -winkworth 15 -bretos 15 -limbered 15 -vall 15 -alpa 15 -broadview 15 -dc-9s 15 -hatherleigh 15 -bootcut 15 -1,000-page 15 -sentido 15 -balci 15 -charlea 15 -engine-room 15 -gomphotheres 15 -gasparilla 15 -2046 15 -2049 15 -9.41 15 -o'flaherty 15 -elvington 15 -iaquinta 15 -kuril 15 -approximated 15 -seongnam 15 -huxtables 15 -times-news 15 -euromaidan 15 -fair-play 15 -depressurize 15 -kyat 15 -idevices 15 -femmes 15 -floristry 15 -orcs 15 -arni 15 -68.4 15 -68.1 15 -68.9 15 -cigale 15 -fobbing 15 -inside-the-beltway 15 -ex-sas 15 -marcussen 15 -kuzma 15 -fafsa 15 -masquerades 15 -seppala 15 -uc-santa 15 -1:18 15 -1:13 15 -prabhu 15 -91.4 15 -f-35c 15 -teen-aged 15 -pääbo 15 -yessenia 15 -hirschhorn 15 -evohome 15 -mukundan 15 -wabi 15 -mukhabarat 15 -hien 15 -flyaways 15 -ziemba 15 -picat 15 -ksn 15 -mengniu 15 -victoriously 15 -bartolini 15 -nand 15 -rathgeb 15 -1079 15 -fubar 15 -zachow 15 -throw-ins 15 -stevens-rosine 15 -katopodis 15 -blue-grey 15 -pock 15 -ludian 15 -foreign-registered 15 -osram 15 -milzman 15 -poliakoff 15 -slow-mo 15 -liberalised 15 -nineteenth-century 15 -young-gwon 15 -ormonde 15 -wattie 15 -xstat 15 -republican-backed 15 -titicaca 15 -headstart 15 -ohhh 15 -berlingo 15 -eslite 15 -10tv 15 -walk-outs 15 -oldowan 15 -doft 15 -antonetti 15 -chumlong 15 -tsarneav 15 -behrendt 15 -middleburg 15 -1258 15 -pierpaolo 15 -15-meter 15 -curve-hugging 15 -masochistic 15 -motsinger 15 -angelia 15 -annas 15 -logelin 15 -31-page 15 -part-owns 15 -pasteurisation 15 -incarcerating 15 -jaspal 15 -ereaders 15 -mashiter 15 -80.3 15 -wearied 15 -bransgrove 15 -toehold 15 -bettors 15 -last-ever 15 -lithonia 15 -soccer-related 15 -kabal 15 -daillon 15 -suprised 15 -neurobiologist 15 -risk-takers 15 -hairball 15 -85.1 15 -perra 15 -monokini 15 -hydroquinone 15 -evenhanded 15 -bonnyrigg 15 -bettes 15 -tisha 15 -belcuore 15 -diplegic 15 -rappaport 15 -amalie 15 -mithras 15 -eyke 15 -visitlondon.com 15 -abf 15 -nishinoshima 15 -cadi 15 -bahah 15 -denniston 15 -14-months-old 15 -breker 15 -a-changin 15 -agazzi 15 -thomastown 15 -riderless 15 -crystal-studded 15 -mid-career 15 -dharmender 15 -alapati 15 -100-round 15 -mulatto 15 -gideons 15 -bickers 15 -bisping 15 -bleakness 15 -succulents 15 -outsmarted 15 -997 15 -phusion 15 -rathfinny 15 -perceval 15 -almokdad 15 -frutti 15 -balajthy 15 -vgo 15 -hymel 15 -2.72 15 -workcover 15 -corpe 15 -chlorhexidine 15 -romita 15 -malaysian-born 15 -santoso 15 -a41 15 -nrw 15 -73.5 15 -vapourises 15 -31-day 15 -imbues 15 -petunias 15 -three-year-long 15 -whiled 15 -lirr 15 -severine 15 -waterslides 15 -alliterative 15 -kaydence 15 -tdic 15 -snow-filled 15 -dime-size 15 -el-faisal 15 -riluzole 15 -muallem 15 -klammer 15 -bermeister 15 -dishonourably 15 -colston-hayter 15 -excretion 15 -windowpane 15 -kaneko 15 -tinner 15 -leavesden 15 -kfsn 15 -acerbi 15 -wangyang 15 -flippin 15 -teletica 15 -9.65 15 -al-walid 15 -skloot 15 -flimsiest 15 -corsages 15 -gialluisi 15 -innocuous-looking 15 -nasution 15 -beauchesne 15 -aleksei 15 -verwood 15 -brownley 15 -vod 15 -better-paying 15 -wheatland 15 -co-valedictorian 15 -70mm 15 -130,000-a-year 15 -antunez 15 -1:38 15 -worldcom 15 -eichholz 15 -isf 15 -full-out 15 -art-house 15 -tpo 15 -suspicionless 15 -22:30 15 -pentaerythritol 15 -deco-style 15 -russia-oriented 15 -recently-crowned 15 -steerable 15 -45g 15 -kristinia 15 -leaman 15 -samih 15 -40miles 15 -100-bed 15 -areata 15 -lemire-elmore 15 -810,000 15 -tousel 15 -fonderie 15 -aranzabal 15 -alphabets 15 -3-week-old 15 -vegetable-based 15 -70.9 15 -tafari 15 -akeman 15 -matternet 15 -ardipithecus 15 -loux 15 -autumns 15 -'24 15 -heisler 15 -extracellular 15 -varndean 15 -821 15 -alagille 15 -tunningley 15 -gold-medal-winning 15 -beny 15 -mikitani 15 -bhutia 15 -maligning 15 -hacksaws 15 -dollars-worth 15 -stuebing 15 -osnabruck 15 -lifsey 15 -cinderblock 15 -debacles 15 -cianjur 15 -marana 15 -toor 15 -stepovers 15 -byways 15 -akubra 15 -brockhurst 15 -segregationists 15 -delorme 15 -reformulation 15 -underprepared 15 -jaradat 15 -rechristened 15 -chantome 15 -boodle 15 -segmentation 15 -labonge 15 -sieberg 15 -toe-tapping 15 -megli 15 -geevor 15 -leray 15 -re-do 15 -litter-strewn 15 -scale-up 15 -ladipo 15 -ksfy 15 -sinya 15 -bourguiba 15 -sot 15 -usplabs 15 -lenzerheide 15 -2,480 15 -indecisiveness 15 -fishpool 15 -w6 15 -modarresi 15 -maytag 15 -yakushima 15 -spadafora 15 -900g 15 -fly-bys 15 -tehreek-i-taliban 15 -bradys 15 -lamely 15 -postured 15 -mccomiskie 15 -tvn24 15 -flood-control 15 -narayana 15 -61.2 15 -ancoats 15 -reconfirm 15 -lagrou 15 -hesson 15 -rusnok 15 -pollinated 15 -war-mongering 15 -98.4 15 -98.7 15 -crowhurst 15 -hostelry 15 -heejun 15 -cybulska 15 -awuah 15 -processionary 15 -garg 15 -injunctive 15 -tshirt 15 -oink 15 -repenting 15 -50-years-old 15 -phone-call 15 -2.81 15 -47mph 15 -etou 15 -pagford 15 -pto 15 -larger-than-usual 15 -yeahs 15 -kenneally 15 -straley 15 -1.5-mile 15 -symphysis 15 -jeffcoat 15 -akinsanya 15 -touchet 15 -kwazulu 15 -exedra 15 -muzik 15 -dawari 15 -broad-shouldered 15 -hambly 15 -nbc2 15 -creedence 15 -ultra-marathon 15 -codis 15 -ecatepec 15 -ronjon 15 -bohan 15 -dendermonde 15 -incisor 15 -schranz 15 -tenggara 15 -han-sol 15 -ben-ami 15 -flulike 15 -bilon 15 -seedier 15 -wundrum 15 -revalue 15 -17-page 15 -keri-anne 15 -adjudicating 15 -6.52 15 -masanjia 15 -quenched 15 -89.2 15 -garcon 15 -luqman 15 -gouldburn 15 -bouie 15 -leiomyosarcoma 15 -special-edition 15 -titanoboa 15 -copeman 15 -borth 15 -rollerskating 15 -67s 15 -liquefy 15 -mcnairn 15 -back-ups 15 -27ft 15 -37billion 15 -microwaving 15 -l.p. 15 -c-reactive 15 -thorazine 15 -minesweepers 15 -phin 15 -shammary 15 -two-party-preferred 15 -aveo 15 -big-picture 15 -tico 15 -disinfects 15 -inniss 15 -expressionism 15 -sansone 15 -houstons 15 -40-piece 15 -blessedly 15 -atira 15 -mhlaba 15 -lytton 15 -1,401 15 -19-foot 15 -umbarger 15 -2/7 15 -afterall 15 -tecau 15 -spaceports 15 -dajani 15 -roxette 15 -#gamergate 15 -vuuren 15 -super-mini 15 -2006-2009 15 -roadsters 15 -metre-high 15 -rattler 15 -bowersox 15 -nadhoim 15 -burscough 15 -over-ambitious 15 -uh-uh 15 -grafters 15 -serres 15 -wolfskin 15 -maghaberry 15 -adjournments 15 -bioglow 15 -chritten 15 -sotherby 15 -teavana 15 -hsm 15 -roboticist 15 -deflates 15 -status-of-forces 15 -coleman-guerrido 15 -digiorno 15 -paradigms 15 -gorelick 15 -jenri 15 -24-point 15 -hindery 15 -camillo 15 -spierers 15 -recipease 15 -holsworthy 15 -westerplatte 15 -channahon 15 -ilabaca 15 -murmurations 15 -haberfield 15 -minnick 15 -microburst 15 -numéro 15 -10.11 15 -10.16 15 -metabolize 15 -stagings 15 -erian 15 -detroiters 15 -dive-bomb 15 -khalq 15 -afro-brazilian 15 -unsubscribe 15 -tucano 15 -cavorts 15 -codacons 15 -burdock 15 -billson 15 -steel-toed 15 -effusively 15 -draperies 15 -331,000 15 -angood 15 -alleviates 15 -pervais 15 -mid-forties 15 -grob 15 -gorgie 15 -jerrie 15 -hideko 15 -7.27 15 -rivendell 15 -34,000-a-year 15 -fiori 15 -uhlar 15 -55.6 15 -feibush 15 -0.46 15 -dzioba 15 -mansi 15 -daigham 15 -kushkush 15 -saintpaul 15 -previously-unseen 15 -funeralcare 15 -hibben-white 15 -cresciani 15 -pre-christian 15 -g-star 15 -flocken 15 -whovians 15 -energy-related 15 -offiah 15 -nilay 15 -moneys 15 -concert-goer 15 -pratte 15 -ex-olympic 15 -quake-ravaged 15 -fumio 15 -meron 15 -burka-clad 15 -chumo 15 -crosshouse 15 -emptiest 15 -thamby 15 -maret 15 -rsi 15 -loog 15 -sida 15 -cenac 15 -lowest-performing 15 -gration 15 -lunine 15 -garden-variety 15 -prefixes 15 -novikov 15 -pro-democratic 15 -shoebox-sized 15 -jalozai 15 -guindos 15 -faff 15 -livesley 15 -bectu 15 -matsuri 15 -peopled 15 -sauropods 15 -unchain 15 -storrs 15 -qpid.me 15 -long-jumper 15 -bloxham 15 -china-north 15 -shopkins 15 -adrenaline-pumping 15 -fevold 15 -rollison 15 -x26 15 -23:26 15 -laminack 15 -twitter-sphere 15 -lankan-born 15 -libor-fixing 15 -bio-security 15 -school-related 15 -crabbe 15 -roof-mounted 15 -true-crime 15 -ealy 15 -ramu 15 -ramo 15 -sympathising 15 -ariza 15 -absaroka 15 -cohle 15 -mh-60 15 -1,426 15 -vahle 15 -gabbi 15 -goby 15 -devers 15 -efit 15 -douzis 15 -129.99 15 -72.4 15 -5-inches 15 -sebolela 15 -gneiser 15 -cobbs 15 -myris 15 -generalities 15 -super-heavyweight 15 -rother 15 -dunfee 15 -trinka 15 -subsisted 15 -grbic 15 -nicotra 15 -f-15c 15 -446,000 15 -asplin 15 -200,00 15 -theatregoers 15 -13,900 15 -maritimes 15 -road-test 15 -bengt 15 -eu-russia 15 -unfriendliest 15 -toke 15 -nebel 15 -amfix 15 -layth 15 -philipe 15 -800-plus 15 -prescription-drug 15 -opdyke 15 -4.8-inch 15 -feenstra 15 -pitter 15 -vanee 15 -underpasses 15 -fredericton 15 -85cm 15 -wtih 15 -wallet-busting 15 -batemans 15 -subramaniam 15 -memogate 15 -wenberg 15 -yucel 15 -3.87 15 -field-goal 15 -stannis 15 -40bn 15 -evanier 15 -saraqeb 15 -kirchoff 15 -brunelli 15 -calcraft 15 -2001-03 15 -immolation 15 -lindos 15 -2-hour 15 -room-by-room 15 -gns 15 -katu.com 15 -rehousing 15 -splenda 15 -macroscopic 15 -tsongas 15 -cimi 15 -scorched-earth 15 -east-facing 15 -luise 15 -seecoomar 15 -1777 15 -1771 15 -kikkoman 15 -beadwork 15 -bp-owned 15 -3,850 15 -physiologic 15 -correo 15 -medich 15 -cat-lover 15 -latticed 15 -gokcen 15 -antico 15 -grinded 15 -shamichael 15 -5500 15 -sakkar 15 -yefren 15 -dug-outs 15 -pps 15 -triangle-shaped 15 -316,000 15 -mud-splattered 15 -aquarist 15 -iet 15 -non-olympic 15 -nicorette 15 -guto 15 -bodney 15 -bellion 15 -all-suite 15 -1155 15 -life-savings 15 -6:43 15 -juraj 15 -webb-hayes 15 -arbabi 15 -1,200-member 15 -beantown 15 -gold-medallist 15 -unfaithfulness 15 -canabal 15 -rosebourne 15 -flunking 15 -totter 15 -ez 15 -wasyluk 15 -loadsamoney 15 -monge 15 -danita 15 -asia-based 15 -milch 15 -bunkum 15 -rajon 15 -birsa 15 -43billion 15 -benalmadena 15 -iden 15 -pes 15 -centre-ground 15 -bi-national 15 -r'us 15 -jaax 15 -kiedis 15 -ccm 15 -london-wide 15 -ruston 15 -northon 15 -visayas 15 -00:41 15 -sun-scorched 15 -pearle 15 -1699 15 -sweepstake 15 -centrella 15 -sluggers 15 -gillie 15 -lipstadt 15 -inchierchiro 15 -mew 15 -26-acre 15 -bunsen 15 -specially-created 15 -cseries 15 -hoffstrom 15 -wello 15 -rambosk 15 -blanched 15 -nonmember 15 -quasicrystals 15 -tartars 15 -brahmaputra 15 -cyark 15 -transceiver 15 -darbishire 15 -aycock 15 -vizcaya 15 -1,195 15 -dimer 15 -non-democratic 15 -armley 15 -fitr 15 -schtum 15 -ronis 15 -abu-garbeyyeh 15 -zohydro 15 -non-digital 15 -arfan 15 -shibam 15 -helo 15 -shrink-wrap 15 -2009/2010 15 -great-uncles 15 -71mins 15 -penitent 15 -talamantes 15 -gibbins 15 -encores 15 -broadbridge 15 -unsullied 15 -immunological 15 -regattas 15 -one-button 15 -mcphillips 15 -renationalise 15 -father-of-17 15 -ori 15 -amiably 15 -tawe 15 -rosalee 15 -180lbs 15 -pencil-thin 15 -kraby 15 -botta 15 -250lb 15 -mariki 15 -menton 15 -two-season 15 -moko 15 -diemer 15 -idimeshev 15 -airmanship 15 -well-tested 15 -seducer 15 -rend 15 -renu 15 -karmello 15 -foist 15 -porchester 15 -network-based 15 -ramanjit 15 -tga 15 -tinseth 15 -bainton 15 -get-up-and-go 15 -heslov 15 -pre-diabetic 15 -cyber-stalking 15 -areola-hernandez 15 -inextricable 15 -hairy-nosed 15 -#josie 15 -bagot 15 -fike 15 -quarterbacking 15 -emoya 15 -250-strong 15 -novato 15 -bolshy 15 -supercentenarians 15 -pavegen 15 -iscariot 15 -greifeld 15 -cravins 15 -commercialised 15 -.177 15 -badri 15 -doles 15 -100mls 15 -hertoghe 15 -anicotte 15 -francom 15 -tuberous 15 -super-rats 15 -deescalate 15 -letrent 15 -thetan 15 -lockney 15 -borlaug 15 -turboroo 15 -self-built 15 -neesyn 15 -macris 15 -6-feet 15 -self-perception 15 -purifies 15 -yaxley-lennon 15 -duk 15 -rejigged 15 -discography 15 -douw 15 -centrum 15 -british-registered 15 -vint 15 -sandamas 15 -mcowen 15 -franka 15 -inderjot 15 -aquadvantage 15 -ceril 15 -32p 15 -tuggerah 15 -cancerian 15 -olympic-themed 15 -samaan 15 -pre-market 15 -1,925 15 -mega-church 15 -mulhall 15 -us-uk 15 -closely-fought 15 -joyner-kersee 15 -shop-owner 15 -mailbag 15 -whoo 15 -elixirs 15 -sundaram 15 -unlikable 15 -figueras 15 -4.29 15 -lachelle 15 -half-black 15 -fyndoune 15 -793 15 -791 15 -hudon-barbeau 15 -cowbridge 15 -undescended 15 -rootless 15 -sarpy 15 -hamadoun 15 -galeran 15 -zsolt 15 -daisy-ray 15 -stecki 15 -chakras 15 -greenfelder 15 -ralepelle 15 -surveilling 15 -sangam 15 -overhand 15 -angioedema 15 -ladin 15 -sniggers 15 -marak 15 -olufemi 15 -18,200 15 -sousse 15 -8.0-magnitude 15 -outrunning 15 -remitting 15 -middlemo 15 -solodyankina 15 -rashmi 15 -fictions 15 -brenan 15 -lanterne 15 -brassiere 15 -riehl 15 -nutall 15 -hagerman 15 -aboutaleb 15 -thrice-married 15 -all-year-round 15 -itar 15 -self-replicating 15 -quadruped 15 -macmanus 15 -siddeeq 15 -preca 15 -944 15 -africana 15 -1637 15 -nits 15 -metropark 15 -260m 15 -colonsay 15 -ahmeds 15 -filipa 15 -8:39 15 -8:31 15 -stigler 15 -shamrocks 15 -minka 15 -gianvito 15 -subcontract 15 -synthesizers 15 -montelongo 15 -westroads 15 -put-in-bay 15 -calcagno 15 -laurita 15 -150-member 15 -coffeehouses 15 -ficker 15 -tajiks 15 -vanderbilts 15 -pownall 15 -eight-core 15 -murder-suicides 15 -michalski 15 -cheapflights.co.uk 15 -holdalls 15 -d6 15 -dn 15 -saidee 15 -hazelden 15 -deflector 15 -fiengo 15 -then-texas 15 -nanny-state 15 -speedback 15 -1-ranked 15 -french-based 15 -parrs 15 -trypophobia 15 -15-44 15 -makhmour 15 -caylyn 15 -peschisolido 15 -minelli 15 -government-supported 15 -lamby 15 -ad-libbing 15 -pollett 15 -garcias 15 -bosbach 15 -interdict 15 -stiggers 15 -inconsiderable 15 -#lfc 15 -uncommunicative 15 -ghizzi 15 -crispr-cas9 15 -siddharth 15 -tilmon 15 -doney 15 -nadon 15 -oughta 15 -herradura 15 -ghilarducci 15 -anti-sleaze 15 -then-british 15 -bourgass 15 -bitmead 15 -service-based 15 -lrch 15 -hanin 15 -fat-soluble 15 -over-shadowed 15 -gaven 15 -unforgivably 15 -15-1 15 -perseveres 15 -canonizations 15 -bougherra 15 -ludhiana 15 -glass-like 15 -53-47 15 -adas 15 -jughead 15 -radhika 15 -upvc 15 -tambora 15 -uncrowded 15 -highest-quality 15 -koestler 15 -@europaleague 15 -chrzaszcz 15 -uppers 15 -1,052 15 -cozart 15 -9:17 15 -snettisham 15 -tobbal 15 -kau 15 -raghead 15 -stimulators 15 -dove-grey 15 -egglishaw 15 -a350-800 15 -nonsectarian 15 -valadez 15 -johor 15 -vermin-infested 15 -cnnhealth 15 -disciplinarians 15 -headboards 15 -roberts-smith 15 -himym 15 -mayweather-pacquiao 15 -1,247 15 -150cm 15 -polycyclic 15 -ghosting 15 -naseeb 15 -3k 15 -wirehaired 15 -wktv 15 -bigbury 15 -bizzarri 15 -linhof 15 -handsy 15 -loerke 15 -jolson 15 -3ft-long 15 -nuclear-free 15 -iaf 15 -serv 15 -baps 15 -tanegashima 15 -wasiq 15 -baragona 15 -zaghawa 15 -gisha 15 -reissuing 15 -yoyos 15 -smite 15 -schielzeth 15 -22:38 15 -chelios 15 -casaliggi 15 -kissi 15 -ontlametse 15 -zenavia 15 -tinglan 15 -sarafan 15 -doz 15 -sticklers 15 -jacamo 15 -dimsdale 15 -issara 15 -southlake 15 -kaelyn 15 -newly-established 15 -layzell 15 -hurban 15 -thermally 15 -heun 15 -inputted 15 -sentino 15 -1,599 15 -66.6 15 -ub40 15 -emetophobia 15 -a330-300 15 -kuerten 15 -marchione 15 -bowraville 15 -hartshorne 15 -d-tennessee 15 -diffusing 15 -binbin 15 -highschool 15 -proliferators 15 -misdiagnoses 15 -musters 15 -invoicing 15 -apostolos 15 -debasing 15 -cross-dressers 15 -anno 15 -cfc 15 -pancakebot 15 -backdropped 15 -postville 15 -gurkiren 15 -lalita 15 -barbecoa 15 -frasca 15 -raiderette 15 -ragu 15 -frenchie 15 -geotagging 15 -mousehole 15 -18-wheelers 15 -absolom 15 -intu 15 -cheesecakes 15 -sufis 15 -15,200 15 -crud 15 -ex-met 15 -55.50 15 -guerline 15 -sadia 15 -alsager 15 -dror 15 -mixed-up 15 -feces-covered 15 -14,000-square-foot 15 -yearnings 15 -al-baghdadia 15 -trusgnach 15 -simões 15 -viorel 15 -883 15 -6,000-square-foot 15 -unhealthier 15 -ova 15 -70.4 15 -lajvardi 15 -raffaelle 15 -wanis 15 -40-week 15 -canna 15 -42per 15 -dionisio 15 -well-understood 15 -toa 15 -six-pointer 15 -doom-laden 15 -jean-bernard 15 -storer 15 -web-connected 15 -gavage 15 -epaulettes 15 -duka 15 -dukw 15 -kamm 15 -pitre 15 -alinsky 15 -kdfw 15 -edgeworth 15 -socolow 15 -270-degree 15 -mid-2016 15 -eglise 15 -last.fm 15 -delineates 15 -sarvari 15 -pre-operation 15 -17g 15 -gissy 15 -binse 15 -proeller 15 -inhumans 15 -genii 15 -shanie 15 -qmc 15 -43per 15 -rededicated 15 -6.2-magnitude 15 -fiyaz 15 -4,033 15 -koory 15 -templestowe 15 -deshong 15 -tebbutts 15 -klaasen 15 -faizey 15 -sabata 15 -hallow 15 -re-visit 15 -pierre-louis 15 -bobby-jo 15 -yvan 15 -chinchorro 15 -4-inches 15 -candido 15 -rousset 15 -madziwa 15 -decent-sized 15 -facciola 15 -straightjacket 15 -h.e. 15 -bavuma 15 -14mph 15 -kooluris 15 -lieverse 15 -bonis 15 -cornelio 15 -dundon 15 -schorsch 15 -kozhevnikova 15 -binch 15 -36.9 15 -boultbee 15 -75.9 15 -omega-6 15 -marsell 15 -turbin 15 -hickstead 15 -kippah 15 -alkozai 15 -mithoefer 15 -quavers 15 -inch-wide 15 -cia-run 15 -northfleet 15 -taepodong-2 15 -tithes 15 -clunking 15 -hutto 15 -yohji 15 -theft-related 15 -akiko 15 -non-drinkers 15 -bertsche 15 -safe-house 15 -australian-led 15 -mid-to-low 15 -abdulhakim 15 -news24 15 -weichel 15 -south-african 15 -crime-riddled 15 -v-6 15 -v-j 15 -mazari 15 -mladenovich 15 -graddersonline 15 -readouts 15 -rosburg 15 -dhhs 15 -market-oriented 15 -lohud.com 15 -rasheda 15 -2a 15 -phillimore 15 -al-qaida-inspired 15 -thibodaux 15 -litterst 15 -1679 15 -1676 15 -broschart 15 -lingua 15 -kaufenberg 15 -baml 15 -varroa 15 -mongkok 15 -kahumbu 15 -hitfix 15 -okefenokee 15 -manduca 15 -6mph 15 -levia 15 -bercows 15 -privacyfix 15 -musclemen 15 -populaire 15 -semitic 15 -ambassadeurs 15 -nullarbor 15 -kunkel 15 -ftp 15 -anti-woman 15 -wakatipu 15 -brandet 15 -koba 15 -52.8 15 -52.3 15 -curtsy 15 -mississippian 15 -tomsche 15 -sandycombe 15 -boucheron 15 -vetter 15 -kenmoe 15 -halal-certified 15 -mamut 15 -zaeef 15 -radiofrequency 15 -labour-held 15 -rasmus 15 -castleside 15 -teardrop-shaped 15 -ledgerwood 15 -anti-wind 15 -chromosphere 15 -gallstone 15 -baykal 15 -d'antonio 15 -davin 15 -towhey 15 -1275 15 -120mm 15 -odeo 15 -oded 15 -ferryhill 15 -reverand 15 -fox-hunting 15 -volchkin 15 -scattergun 15 -82.6 15 -beaten-up 15 -vrabel 15 -day-trip 15 -groggily 15 -murawski 15 -swizzels 15 -post-storm 15 -bruckman 15 -low-balling 15 -re-air 15 -cockerham 15 -lipari 15 -spowers 15 -knipe 15 -harrying 15 -undervaluing 15 -80888 15 -blueseed 15 -87.1 15 -warhols 15 -mindblowing 15 -al-hawa 15 -mamakos 15 -furniss 15 -quasi-military 15 -kobilinsky 15 -manscaping 15 -aderholt 15 -babbo 15 -flaked 15 -oftsed 15 -#sharknado 15 -schott 15 -a.m.-8 15 -3.03 15 -3.07 15 -cloud-computing 15 -@font 15 -bartone 15 -attridge 15 -vulfpeck 15 -extroversion 15 -e-cards 15 -5.13 15 -qahtani 15 -dismont 15 -camisoles 15 -dusts 15 -klinkel 15 -broadcom 15 -anti-glare 15 -fox25 15 -laterry 15 -newmans 15 -kamarck 15 -overtaxed 15 -woonsocket 15 -protoplanetary 15 -jealty 15 -chilliest 15 -gayla 15 -hanx 15 -brown-skinned 15 -power-ups 15 -sullinger 15 -pot-shots 15 -ishpeming 15 -arminia 15 -devotedly 15 -exploiters 15 -rohdy 15 -ultra-conservatives 15 -abdelhakim 15 -cueva 15 -scotusblog 15 -stickier 15 -rapebait 15 -surrey-born 15 -joyland 15 -20-story 15 -milltown 15 -inglethorpe 15 -78.5 15 -yayladagi 15 -sfl 15 -jianmin 15 -jaeger-lecoultre 15 -byrds 15 -whitty 15 -gulu 15 -randles 15 -ween 15 -dupage 15 -9:03 15 -lurex 15 -apigenin 15 -153rd 15 -49.6 15 -49.2 15 -hertfordshire-based 15 -revolights 15 -jeremi 15 -duplicative 15 -laresce 15 -suru 15 -mega-money 15 -al-sudani 15 -nadira 15 -a.v. 15 -besner 15 -burland 15 -xisha 15 -liddicoat 15 -vaporizer 15 -greenwall 15 -b9 15 -livaja 15 -toyboys 15 -hardings 15 -thatched-roof 15 -liss 15 -silver-vallance 15 -mischenko 15 -superleggera 15 -type-1 15 -cyanobacteria 15 -quelccaya 15 -two-pieces 15 -keenum 15 -cambyses 15 -imprisons 15 -samedov 15 -wajih 15 -kortner 15 -64.1 15 -64.9 15 -soppitt 15 -teliga 15 -laurae 15 -bonbon 15 -oon 15 -wheelchair-accessible 15 -stainforth 15 -500mg 15 -921 15 -sandhill 15 -christe 15 -coliform 15 -1696 15 -1698 15 -seven-metre 15 -mejia-ramos 15 -bed-in 15 -abducts 15 -spitefully 15 -alaed 15 -mothman 15 -amidon 15 -160cm 15 -eight-stone 15 -lily-may 15 -martlesham 15 -clif 15 -xishuangbanna 15 -uniter 15 -lenczewski 15 -declaratory 15 -absurdist 15 -dixter 15 -socceroo 15 -lurpak 15 -magubane 15 -1749 15 -gullibility 15 -hernik 15 -sobriquet 15 -pickell 15 -1,115 15 -al-almani 15 -coram 15 -jurf 15 -sagawa 15 -bided 15 -pullar 15 -mmmm 15 -sinh 15 -holmesdale 15 -cgil 15 -hesco 15 -millu 15 -fuddy-duddy 15 -kooks 15 -kirkenes 15 -dalling 15 -38mph 15 -katherina 15 -kosair 15 -applesauce 15 -6,000-mile 15 -padme 15 -gorillaz 15 -kupchan 15 -masella 15 -deckers 15 -mitja 15 -dorwan 15 -gunton 15 -satiric 15 -larkhill 15 -ritcherson 15 -wollerau 15 -67.2 15 -3ft-wide 15 -yashika 15 -schooley 15 -agostini 15 -shebang 15 -cosies 15 -casino-hotel 15 -senneville 15 -dorin 15 -c'an 15 -middlemarch 15 -bouillabaisse 15 -meacock 15 -elora 15 -heini 15 -isaps 15 -mirvaso 15 -5.38 15 -riah 15 -ledean 15 -oh-so 15 -moscariello 15 -stintz 15 -carter-ruck 15 -yagid 15 -anti-iraq 15 -terroir 15 -blay 15 -gdl 15 -sieved 15 -darra 15 -jackanory 15 -pretexts 15 -miniaturize 15 -steepness 15 -critcised 15 -rain/snow 15 -shammi 15 -then-home 15 -then-russian 15 -blagger 15 -meppershall 15 -40-story 15 -hematomas 15 -prineville 15 -hacene-chaouch 15 -weidmann 15 -depsite 15 -orators 15 -woodsy 15 -reinvestigate 15 -schutzstaffel 15 -lauderhill 15 -hellos 15 -afterburners 15 -ahtia 15 -slicklogin 15 -0.36 15 -cerak 15 -haydr 15 -polish-american 15 -cordyceps 15 -torin 15 -khosa 15 -callihan 15 -wrong-doers 15 -covenants 15 -ioo 15 -iod 15 -ioa 15 -trat 15 -wurlitzer 15 -hqs 15 -hisashi 15 -positron 15 -27per 15 -fox2now 15 -kenn 15 -manali 15 -22:28 15 -codewords 15 -trifonovs 15 -jhonny 15 -optometry 15 -cluely 15 -meadowhall 15 -20.99 15 -mruga 15 -chandor 14 -aadam 14 -khelya 14 -kjrh 14 -springthorpe 14 -penitence 14 -boltons 14 -avails 14 -unlikeable 14 -yates-badley 14 -clabo 14 -oakford 14 -molly-mae 14 -kansu 14 -codemasters 14 -three-volume 14 -seafarer 14 -ativ 14 -handballed 14 -stratification 14 -extremophiles 14 -exacts 14 -archdioceses 14 -beskitas 14 -carruth 14 -callouts 14 -paradiski 14 -buttie 14 -ceramicist 14 -w.h.o. 14 -ruckman 14 -then-12-year-old 14 -wampach 14 -anti-military 14 -khalidi 14 -flankers 14 -pauffley 14 -kouri 14 -bestinvest 14 -faiola 14 -dual-carriageway 14 -kwik-fit 14 -tossup 14 -doong 14 -plymel 14 -mega-bucks 14 -re-interment 14 -kinahan 14 -85-pound 14 -easytone 14 -ritot 14 -downswing 14 -mcdonell 14 -corkscrews 14 -mielnik 14 -glazyev 14 -phonetic 14 -habur 14 -cusanelli 14 -coriat 14 -case-shiller 14 -rushen 14 -waking-up 14 -1,577 14 -duncan-bailey 14 -speechwriters 14 -hatzistefanis 14 -doctorow 14 -willington 14 -indiana-based 14 -26-23 14 -balakhnichev 14 -succesfully 14 -ad-din 14 -1,860 14 -tandjung 14 -no-tolerance 14 -awkward-looking 14 -homebush 14 -hillfort 14 -colac 14 -heisserer 14 -cavaco 14 -raygoza-garcia 14 -triplane 14 -macchi 14 -hotmani 14 -qusay 14 -coal-powered 14 -lestari 14 -beckwith-wiedemann 14 -anaesthesiologist 14 -ultravox 14 -ruffs 14 -mary-anne 14 -5:41 14 -dusko 14 -4:05 14 -kazakhs 14 -zilkic 14 -funi 14 -ingot 14 -32dd 14 -lamé 14 -tramuntana 14 -chocolate-chip 14 -challoner 14 -bi-weekly 14 -beeper 14 -nettie 14 -kangwon 14 -antonio-lackland 14 -u.n.-sanctioned 14 -9.17 14 -soutter 14 -porsz 14 -purcellville 14 -lapasset 14 -jainism 14 -o'porter 14 -beardmore 14 -pyromaniac 14 -shamokin 14 -?!?! 14 -stirrings 14 -aveley 14 -cole-schwartz 14 -mccuistion 14 -recordable 14 -hacche 14 -chargeable 14 -ex-professional 14 -e-ticket 14 -bawa-garba 14 -discombobulated 14 -tmt 14 -ilulissat 14 -jentleson 14 -peplow 14 -mujeeb 14 -well-defended 14 -carisa 14 -adora 14 -hard-to-detect 14 -gandini 14 -inexpressible 14 -cseter 14 -frumin 14 -shih-tzu 14 -velden 14 -unquantifiable 14 -kenoi 14 -reller 14 -gujranwala 14 -barez-brown 14 -sancho 14 -adriaan 14 -chungyalpa 14 -overpopulating 14 -three-word 14 -myspace.com 14 -cowpox 14 -wra 14 -wri 14 -grinter 14 -fugen 14 -874 14 -mesocyclone 14 -surinder 14 -hopefulness 14 -wmata 14 -gnasher 14 -1,018 14 -waple 14 -emancipator 14 -pubescent 14 -dael 14 -owner-occupied 14 -cyro 14 -atopic 14 -jrc 14 -turere 14 -unsubsidized 14 -poisson 14 -gherity 14 -challons 14 -home-invasion 14 -purdah 14 -tobe 14 -quarts 14 -bakalej 14 -stargaze 14 -hnida 14 -infiltrates 14 -f18 14 -baset 14 -street-side 14 -767s 14 -better-funded 14 -winkelman 14 -laryngitis 14 -mousy 14 -deinosuchus 14 -first-home 14 -four-floor 14 -patricks 14 -thind 14 -homecare 14 -crachiola 14 -rap/sung 14 -wamala 14 -laois 14 -shepherdswell 14 -quints 14 -conditsis 14 -kele 14 -saleable 14 -190th 14 -a&f 14 -escare 14 -andronicus 14 -beseeching 14 -towyn 14 -gabardine 14 -41per 14 -heidecker 14 -fugere 14 -picardy 14 -shaja'ia 14 -sanitise 14 -wyll 14 -14/5 14 -commentor 14 -uluwatu 14 -eye-socket 14 -labbing 14 -maters 14 -hartinger 14 -half-jokingly 14 -newitz 14 -over-crowding 14 -co-chairwoman 14 -mother-of-11 14 -sarongs 14 -barbagallo 14 -ktla-tv 14 -mccaulley 14 -terrazas 14 -waterton 14 -azibert 14 -málaga 14 -checkmate 14 -stock-market 14 -carbon-14 14 -universalis 14 -gowen 14 -miscue 14 -@cnnbrk 14 -goron 14 -scheidler 14 -sandbrook 14 -re-development 14 -vergara-martinez 14 -ipilimumab 14 -serhant 14 -hale-bopp 14 -mercuriceratops 14 -papakalodoukas 14 -sead 14 -svend 14 -bundibugyo 14 -traffic-clogged 14 -doctoroff 14 -cuckoos 14 -ehiem 14 -fire-fight 14 -gallus 14 -gondolier 14 -pinhead 14 -massari 14 -check-cashing 14 -hansman 14 -jahmani 14 -phong 14 -elkes 14 -verta 14 -.26 14 -marisella 14 -blart 14 -procellarum 14 -gregori 14 -afash 14 -djemba-djemba 14 -combet 14 -chequer 14 -microorganism 14 -1,156 14 -hikkim 14 -earthshaking 14 -superimposes 14 -cleeves 14 -second-storey 14 -hotcake 14 -kattan 14 -lindemann 14 -property-owning 14 -miaow 14 -khon2 14 -toyko 14 -export-driven 14 -shadoe 14 -asbill 14 -dragoncon 14 -non-family 14 -aleppo-based 14 -diepsloot 14 -jinger 14 -highly-addictive 14 -7-month 14 -coundon 14 -ineffable 14 -handpainted 14 -aberaeron 14 -peeped 14 -6.23 14 -sayulita 14 -1340 14 -jalfrezi 14 -zamir 14 -ocean-side 14 -yara 14 -warnaco 14 -ucan 14 -dribs 14 -periodico 14 -kincora 14 -tatami 14 -18km 14 -ostracods 14 -900ad 14 -itzy 14 -droukdel 14 -dual-sim 14 -five-feet 14 -hosemann 14 -blaikie 14 -barma 14 -re-sale 14 -forebear 14 -sianey 14 -paintballs 14 -maximizes 14 -5.59 14 -mooching 14 -tor-kristian 14 -pretorian 14 -after-market 14 -keepy-ups 14 -mkultra 14 -gashi 14 -abdul-rasheed 14 -tatad 14 -al-ali 14 -cokehead 14 -higher-risk 14 -target-driven 14 -god-daughter 14 -bwh 14 -ledet 14 -atilano 14 -seaplex 14 -uraemic 14 -28-29 14 -trivialisation 14 -hall-trujillo 14 -scapula 14 -magloire 14 -pizjuan 14 -lay-bys 14 -skorupski 14 -liliesleaf 14 -nonjudgmental 14 -car-chase 14 -extrapolation 14 -hijacks 14 -1064 14 -monklands 14 -galvanizes 14 -roundworms 14 -thun 14 -thum 14 -speech-language 14 -hazza 14 -shaleem 14 -aeolian 14 -tardini 14 -teater 14 -fpc 14 -papier-mâché 14 -fpi 14 -calpin 14 -sayeh 14 -hammac 14 -onionhead 14 -marling 14 -much-debated 14 -malaysia-based 14 -bodymedia 14 -rentfrow 14 -63.7 14 -pereda 14 -fact-checker 14 -moonie 14 -marble-sized 14 -werde 14 -tagine 14 -glitziest 14 -4-mile 14 -sophee 14 -59.1 14 -25-yarder 14 -yanliang 14 -malizia 14 -eddin 14 -marionville 14 -holyland 14 -href 14 -kahanamoku 14 -fact-specific 14 -.2011 14 -eglinton 14 -iolani 14 -fayyaz 14 -thereto 14 -kadari 14 -impounding 14 -asana 14 -duchamp 14 -capuano 14 -myfreecams 14 -gps-based 14 -khatalla 14 -qutb 14 -ionut 14 -simoneau-meunier 14 -whizz-kid 14 -underreporting 14 -cruzado 14 -tasseled 14 -kinninmont 14 -shieler 14 -elvidge 14 -repetitious 14 -penetrations 14 -pomford 14 -paintbrushes 14 -balleza 14 -grisogono 14 -oshin 14 -e320 14 -amanmuradova 14 -ciman 14 -gluta 14 -rhoa 14 -norberg 14 -low-protein 14 -michalowski 14 -joule 14 -pre-condition 14 -brive 14 -clang 14 -200c 14 -double-blind 14 -ilstrup 14 -haavisto 14 -wildes 14 -campione 14 -palu 14 -shamsi-basha 14 -mangione 14 -yazzie 14 -balinskaya 14 -tiffs 14 -hero-worship 14 -shamina 14 -atapuerca 14 -tipps 14 -opalev 14 -lignin 14 -moyle 14 -14bn 14 -heritages 14 -schizoaffective 14 -c10 14 -castro-montes 14 -mahter 14 -fatburger 14 -shumate 14 -sulaymaniyah 14 -barto 14 -confucianism 14 -ríos 14 -gloucestershire-based 14 -thirty-year-old 14 -lecht 14 -promissory 14 -borbely 14 -gargling 14 -mankin 14 -mdanat 14 -al-ruqai 14 -kapteyn 14 -liewald 14 -on-ice 14 -45-year-olds 14 -bjoergen 14 -gyrates 14 -kapinus 14 -mid-conversation 14 -schiele 14 -long-wave 14 -enteric 14 -anfisa 14 -jibunoh 14 -decarlo 14 -abdirizak 14 -eliseu 14 -incident-packed 14 -water-loving 14 -ueli 14 -weltman 14 -477,000 14 -colma 14 -commiserated 14 -khaleda 14 -campodimele 14 -entico 14 -chalices 14 -fastidiously 14 -fowley 14 -frogmore 14 -trazodone 14 -beason 14 -magor 14 -transunion 14 -audigier 14 -sahab 14 -yoichi 14 -chocking 14 -abdellah 14 -devonish 14 -godefroid 14 -combelic 14 -connived 14 -alltech 14 -holtaway 14 -minchinhampton 14 -frappucino 14 -oscar-tipped 14 -cageprisoners 14 -wanganeen 14 -thundersnow 14 -kamalesh 14 -nuru 14 -yasmeen 14 -majumdar 14 -vye 14 -unwerth 14 -spera 14 -body-builder 14 -fanciers 14 -stab-proof 14 -sheepskins 14 -mahbod 14 -co-pays 14 -zanthe 14 -strydom 14 -pummels 14 -100-a-night 14 -1:28 14 -1:24 14 -abdelilah 14 -1,271 14 -1,270 14 -935,000 14 -500,000-a-year 14 -adsley 14 -goadsby 14 -gravesen 14 -winesi 14 -raha 14 -syphilitic 14 -ravensworth 14 -riku 14 -re-negotiate 14 -rif 14 -bruh 14 -okoroji 14 -lacierda 14 -aquagenic 14 -meningioma 14 -161.5 14 -macphee 14 -durdaller 14 -bán 14 -niwatthamrong 14 -mashhad 14 -mid-1600s 14 -pitofsky 14 -bedevil 14 -lillies 14 -burgdorf 14 -borowitz 14 -cyber-crimes 14 -kaminsky 14 -fingermarks 14 -cleon 14 -bozo 14 -unarguable 14 -cranstoun 14 -foundas 14 -fabianksi 14 -colourist 14 -fecteau 14 -8-pound 14 -otranto 14 -cutting-room 14 -beledweyne 14 -urmas 14 -americanus 14 -westbrooke 14 -benhaffaf 14 -alcoves 14 -anubis 14 -padoan 14 -ameri 14 -715,000 14 -tricuspid 14 -attardo 14 -then-assistant 14 -maada 14 -oxy 14 -panwar 14 -moctar 14 -puede 14 -presti 14 -stil 14 -hair-trigger 14 -lagos-based 14 -unami 14 -edry 14 -backend 14 -multi-family 14 -non-selective 14 -missourian 14 -desgroseillers 14 -dolley 14 -kyteman 14 -arquiett 14 -follette 14 -golden-brown 14 -stirrer 14 -bonmarché 14 -shihab 14 -nh1 14 -tetrick 14 -raghu 14 -tamerton 14 -bolch 14 -dramatises 14 -acrylics 14 -tub-thumping 14 -shut-off 14 -calorie-free 14 -billionths 14 -rhim 14 -fron 14 -goatskin 14 -egle 14 -estrela 14 -taha'a 14 -21kg 14 -subliminally 14 -sheglabo 14 -saroo 14 -ignitions 14 -recto 14 -40.9 14 -khq 14 -khe 14 -anholt 14 -hreik 14 -foucan 14 -getaround 14 -taku 14 -azougar 14 -cayea 14 -pank 14 -rottum 14 -sianagh 14 -hammergren 14 -sira 14 -sirs 14 -saxmundham 14 -multicopter 14 -progestin 14 -nesquik 14 -concertos 14 -35-acre 14 -franti 14 -camelopardalids 14 -amann 14 -fiber-rich 14 -ausiello 14 -woodshed 14 -patraucean 14 -orpen 14 -2.61 14 -crystallizes 14 -capgemini 14 -pini 14 -fizzling 14 -biweekly 14 -gawenda 14 -microlensing 14 -mkz 14 -kretschmer 14 -money-coutts 14 -soft-shell 14 -burkle 14 -monell 14 -medero 14 -fahri 14 -trek-style 14 -goldmans 14 -one-click 14 -bhujle 14 -566,000 14 -hizbul 14 -9:47 14 -hasn 14 -r-patz 14 -silberberger 14 -korean-born 14 -ekho 14 -shlaferman 14 -gonul 14 -kalina 14 -quanah 14 -moshers 14 -pollutes 14 -geroge 14 -keifer 14 -stassen 14 -carcases 14 -chingalings 14 -roosts 14 -oita 14 -pisanty 14 -mcatamney 14 -cumbrae 14 -gizmo5 14 -90min 14 -lovette 14 -clemmie 14 -oophorectomy 14 -zhukovska 14 -all-square 14 -baras 14 -bioreactor 14 -358,000 14 -hexapod 14 -whitted 14 -quake-prone 14 -pac-12 14 -rebelle 14 -28,000-a-year 14 -unquestioningly 14 -jackaway 14 -overworking 14 -szeged 14 -cales 14 -ayaz 14 -bollea 14 -lassoing 14 -3mins 14 -pastel-colored 14 -eavesdroppers 14 -landlord-tenant 14 -posteriors 14 -badaru 14 -u.s.-saudi 14 -2-door 14 -profanity-filled 14 -wytham 14 -car-dependent 14 -kenoyer 14 -thailand-based 14 -refered 14 -eireann 14 -pletka 14 -raymo 14 -leena 14 -impoverishment 14 -ruminations 14 -marylyn 14 -shawky 14 -sardinero 14 --32 14 -black-eye 14 -tasneem 14 -wydra 14 -io9.com 14 -hayles 14 -nimbin 14 -pilsen 14 -belman 14 -nieuws 14 -4,550 14 -liverpool-bound 14 -re-energised 14 -aphid 14 -culshaw 14 -deloris 14 -84.99 14 -d'eon 14 -padley 14 -kepler-93b 14 -ahrens 14 -600bc 14 -mcteer 14 -cuverville 14 -atria 14 -steinke 14 -baronetcy 14 -125-mile 14 -videography 14 -18th-minute 14 -step-over 14 -taitung 14 -kopelan 14 -stalemated 14 -urby 14 -unitedhealth 14 -1,000-a-day 14 -bardelli 14 -dimetrodon 14 -antonio-based 14 -benoni 14 -zolfo 14 -seijas 14 -folk-rock 14 -peterka 14 -edgartown 14 -transmittable 14 -emmrich 14 -korowai 14 -greyer 14 -still-young 14 -giampedroni 14 -callans 14 -hospital-level 14 -midnight-blue 14 -15-under 14 -ablow 14 -ilchester 14 -microtia 14 -politifact.com 14 -panam 14 -rohrich 14 -cave-like 14 -exotically 14 -lip-smacking 14 -misch 14 -varina 14 -poppycock 14 -maira 14 -sona 14 -connectome 14 -radlett 14 -267lbs 14 -nushin 14 -22cm 14 -steep-sided 14 -jailbreaks 14 -well-reasoned 14 -1,299 14 -hartzer 14 -ice-pack 14 -homeopaths 14 -muslim-led 14 -pahs 14 -reactivating 14 -schrager 14 -jet-like 14 -i.v. 14 -varya 14 -7.52 14 -subfreezing 14 -thoughtlessness 14 -32.50 14 -kopel 14 -al-jihad 14 -brainerd 14 -over-eager 14 -filariasis 14 -publicity-seeking 14 -22,700 14 -1,396 14 -streetsboro 14 -duut 14 -jacquetta 14 -0.59 14 -austerlitz 14 -janullah 14 -lacovara 14 -astraea 14 -00s 14 -bergquist 14 -al-maidan 14 -wouldnt 14 -poker-faced 14 -manchego 14 -grogan-cannella 14 -bucketload 14 -propoggia 14 -box-cutter 14 -nido 14 -scalfaro 14 -metronome 14 -meche 14 -mittagong 14 -hathcock 14 -ruplenas 14 -riptides 14 -makins 14 -mauchlen 14 -josina 14 -villaggio 14 -pay-cut 14 -spymasters 14 -valentini 14 -stockist 14 -x-51a 14 -dry-aged 14 -epix 14 -wehbe 14 -conseil 14 -superrich 14 -corrall 14 -poolman 14 -happend 14 -ahmose 14 -chesson 14 -wharmby 14 -card-sized 14 -denbies 14 -twiselton 14 -baitha 14 -ripped-off 14 -nyman 14 -juhu 14 -no-tax 14 -unpersuaded 14 -protein-based 14 -kiyani 14 -bonnen 14 -iswai 14 -hazarika 14 -rpa 14 -gordie 14 -hokayem 14 -colombina 14 -mcinturff 14 -emasculating 14 -lingonberry 14 -nose-diving 14 -alverez 14 -iqaluit 14 -military-themed 14 -sapkota 14 -mereohra 14 -palitoy 14 -900-pound 14 -candra 14 -fritze 14 -eichengreen 14 -65cm 14 -shambo 14 -pitsford 14 -accel 14 -stay-away 14 -25.00 14 -exercise-induced 14 -26-and-a-half 14 -g-shot 14 -chutima 14 -firek 14 -tafdc 14 -rovetto 14 -dishington 14 -icelolly.com 14 -near-simultaneous 14 -perc 14 -squeaker 14 -londyn 14 -gayoso 14 -lawbreaker 14 -wreal 14 -sinhala 14 -sharbat 14 -début 14 -71.4 14 -googolplex 14 -shinoda 14 -11-months-old 14 -bedale 14 -corrigall 14 -kitley 14 -bmo 14 -scaramanga 14 -foltz 14 -wynnewood 14 -opals 14 -deviousness 14 -alacrity 14 -regenerist 14 -hiltzik 14 -belstone 14 -sejong 14 -avgeek 14 -odegbune 14 -190km 14 -whisman 14 --196 14 -drottningholm 14 -blowhard 14 -portslade 14 -merrilees 14 -mhlongo 14 -schoolmasters 14 -'18 14 -hi-way 14 -lifeboatmen 14 -77.4 14 -77.5 14 -77.7 14 -clifden 14 -chloe-jasmine 14 -gunma 14 -stephy 14 -vandervlist 14 -mayassa 14 -streif 14 -lomasney 14 -11.56 14 -crabber 14 -cameroon-born 14 -tear-jerker 14 -lollichon 14 -neisler 14 -joynson 14 -al-qassab 14 -qaqa 14 -suparman 14 -short-termist 14 -landrigan 14 -shenzhou-10 14 -mickayla 14 -tamryn 14 -ihsa 14 -1225 14 -dinks 14 -borana 14 -parroted 14 -metanomski 14 -rubenfeld 14 -mccarthy-scarsbrook 14 -hicksville 14 -jerudong 14 -sorin 14 -mavima 14 -arborist 14 -ascribing 14 -cosma 14 -kiem 14 -consomme 14 -sleman 14 -golby 14 -aalsmeer 14 -wildlife-rich 14 -saxelby 14 -napac 14 -shallcross 14 -outterside 14 -sciaf 14 -5.80 14 -smith-squire 14 -lavington 14 -sonu 14 -broadsides 14 -transferees 14 -hamwi 14 -grimsley 14 -abutment 14 -martti 14 -consero 14 -skimpiest 14 -ead 14 -one-second 14 -rodenticides 14 -harveys 14 -skinners 14 -tressler 14 -isadora 14 -kanes 14 -leaper 14 -mimes 14 -look-alikes 14 -62.8 14 -588,000 14 -thinkin 14 -crichel 14 -yasemin 14 -underplaying 14 -gayest 14 -hair-do 14 -7.39 14 -43,750 14 -brighton-born 14 -auton 14 -centreforum 14 -alport 14 -dumbass 14 -relight 14 -orange-clad 14 -interconnect 14 -slouches 14 -writs 14 -noureddine 14 -schoelkopf 14 -karaman 14 -losordo 14 -invert 14 -tatlises 14 -police-led 14 -kinnaird 14 -ph. 14 -thyssenkrupp 14 -bobadilla 14 -anesthetist 14 -centralize 14 -polygon 14 -ribald 14 -tilt-shift 14 -boxpark 14 -hls 14 -hlf 14 -ndc 14 -madams 14 -cifuentes 14 -visualises 14 -oft-quoted 14 -dechen 14 -4.12 14 -shapland 14 -lief 14 -child-porn 14 -a18 14 -in-situ 14 -gantlet 14 -moa 14 -circumzenithal 14 -finnie 14 -scarper 14 -cf. 14 -zaks 14 -venancio 14 -brengel 14 -re-enrolled 14 -gayther 14 -el-fattah 14 -camillus 14 -baengnyeong 14 -neiderer 14 -mardale 14 -pollok 14 -3,525 14 -davoren 14 -anna-lena 14 -okfuskee 14 -littles 14 -laudani 14 -vannak 14 -slagged 14 -scallion 14 -u.s.-chinese 14 -anti-pornography 14 -mtus 14 -owen-darcy 14 -vickerman 14 -52-page 14 -micromanagement 14 -third-worst 14 -kariba 14 -eco-marathon 14 -byung 14 -great-nephews 14 -azoff 14 -crosswalks 14 -dister 14 -13.1-mile 14 -deliverables 14 -sawicki 14 -man-to-man 14 -forrer 14 -mountie 14 -ghostwritten 14 -bauxite 14 -mesons 14 -kondo 14 -ascher 14 -foria 14 -chatillon 14 -non-emergencies 14 -smales 14 -prettiness 14 -sparrowhawk 14 -balustrades 14 -giyen 14 -deady 14 -etherson 14 -devidjian 14 -sayn-wittgenstein 14 -personae 14 -1,295 14 -manigault 14 -asky 14 -dysrhythmia 14 -dighera 14 -sequoyah 14 -stamets 14 -comerica 14 -imogene 14 -telegeography 14 -justas 14 -nouvelle 14 -petrol-electric 14 -three-hour-long 14 -e-nable 14 -creationists 14 -misericordia 14 -hot-tub 14 -blackwall 14 -rovello 14 -meur 14 -lamboy 14 -lechuza 14 -bothe 14 -drummey 14 -1,437 14 -9.23 14 -sivertsen 14 -eye-line 14 -2000-02 14 -zandajan 14 -b.c 14 -rukundo 14 -cmpg 14 -cuffee 14 -150-seat 14 -tuojiang 14 -6,475 14 -vonck 14 -shrimping 14 -1-800-flowers 14 -money-hungry 14 -babied 14 -sidling 14 -lyneham 14 -0844 493 0787 14 -poh 14 -lourens 14 -springerville 14 -darfuri 14 -pettybourne 14 -makowski 14 -eswein 14 -mapaction 14 -chemical-free 14 -neola 14 -zipes 14 -wantroba 14 -akkersdijk 14 -obgyn 14 -olmec 14 -wykes 14 -barrasford 14 -duns 14 -hawver 14 -autothrottle 14 -orichalcum 14 -stanstead 14 -setubal 14 -madhav 14 -163rd 14 -backcomb 14 -fessenheim 14 -nevek 14 -disburse 14 -journal/nbc 14 -kynan 14 -3.97 14 -willson-pemberton 14 -champagne-colored 14 -assim 14 -boban 14 -hofl-riesch 14 -ricca 14 -chachi 14 -kavcic 14 -vangilder 14 -mantas 14 -captura 14 -makenna 14 -grails 14 -charisa 14 -ambrym 14 -haddam 14 -zistel 14 -coster 14 -benally 14 -ukfi 14 -tingirides 14 -cetinbag 14 -micro-apartments 14 -cbsalary.com 14 -mosca 14 -warmonger 14 -mallo 14 -dubiously 14 -93.7 14 -run-around 14 -theaker 14 -nexavar 14 -markle 14 -ryerson 14 -1,755 14 -aiyegbeni 14 -annualized 14 -perimenopause 14 -311,000 14 -baffa 14 -strathallan 14 -ntas 14 -undefended 14 -dobes 14 -maybank 14 -delac 14 -12-piece 14 -unfenced 14 -a-320 14 -serafina 14 -false-colour 14 -koch-backed 14 -missfeldt 14 -kenfig 14 -verrucas 14 -ifj 14 -celi-parr 14 -julienne 14 -chaton 14 -montgomeryshire 14 -stuchbery 14 -obsessional 14 -chinawhys 14 -ondu 14 -ziff 14 -wichmann 14 -buttell 14 -moonies 14 -1145 14 -two-weeks-old 14 -stephentown 14 -liberal-minded 14 -maa 14 -781 14 -visine 14 -ellin 14 -17-12 14 -misra 14 -murwillumbah 14 -handarat 14 -greenspace 14 -trans-canada 14 -brandts 14 -zucotti 14 -cage-like 14 -freres 14 -vakil 14 -fettuccine 14 -gaboon 14 -hobbs.co.uk 14 -bodemeister 14 -three-month-long 14 -neotrogla 14 -gasko 14 -ceniceros 14 -n'jie 14 -e.t 14 -floor-sweeping 14 -glendinning 14 -mekota 14 -sias 14 -magnitude-8 14 -milby 14 -couldnâ 14 -teme 14 -chemaly 14 -100.4 14 -kopra 14 -bebras 14 -schachter 14 -eveline 14 -biography.com 14 -1.5-litre 14 -s.d. 14 -roget 14 -espadrilles 14 -trawally 14 -grood 14 -padania 14 -npes 14 -pigging 14 -nodule 14 -walkin 14 -exonerees 14 -sunoco 14 -textural 14 -earthling 14 -cuvelier 14 -baillieu 14 -chambre 14 -nudie 14 -couloir 14 -cozens 14 -frickin 14 -viktoriya 14 -mmmmm 14 -multi-engine 14 -thicknesses 14 -vinecki 14 -ex-city 14 -regime-controlled 14 -anke 14 -kashani 14 -lucido 14 -lee-ann 14 -sunan 14 -fasd 14 -bachata 14 -1592 14 -oscillate 14 -medians 14 -non-viable 14 -dushyant 14 -croute 14 -restyled 14 -renteria 14 -bihe 14 -noncontagious 14 -poyner 14 -58-year 14 -gr4s 14 -1998-2004 14 -cecilie 14 -accost 14 -condren 14 -insider-trading 14 -toshi 14 -cardioverter 14 -dms.article.init 14 -mazzola 14 -insensible 14 -billesley 14 -aleah 14 -digitalis 14 -over-80s 14 -11.18 14 -coccyx 14 -jessamine 14 -moradabad 14 -memeger 14 -trichomoniasis 14 -chip-and-pin 14 -vivaro 14 -bordo 14 -race2recovery 14 -lewinson 14 -671,000 14 -anaplastic 14 -guttridge 14 -jambos 14 -brevan 14 -215m 14 -sehat 14 -nebulizer 14 -vaughters 14 -mommsen 14 -tfg 14 -spoken-word 14 -bednarz 14 -recalculated 14 -monosomy 14 -sniffling 14 -citrix 14 -headington 14 -sneed 14 -6-foot-9 14 -leeanna 14 -eary 14 -jibo 14 -groundnut 14 -34-27 14 -journee 14 -karpaty 14 -racher 14 -110,000-a-year 14 -sanjot 14 -oxidised 14 -hyper-competitive 14 -fifties-style 14 -long-missing 14 -l'abidine 14 -fargam 14 -http://nbclosangeles.com 14 -shuhandler 14 -lasserre 14 -wygant 14 -burnishing 14 -lipis 14 -once-close 14 -cramond 14 -bampfylde 14 -chadbourne 14 -proposer 14 -torrentes 14 -ooten 14 -yamalo-nenets 14 -alward 14 -giovane 14 -ganjam 14 -1,088 14 -dieli 14 -prescience 14 -distresses 14 -thesiger 14 -beatify 14 -perfector 14 -hudgins 14 -schwabing 14 -you-know-what 14 -mawrey 14 -mesh-like 14 -zeeland 14 -salthouse 14 -imprinting 14 -d'vorak 14 -sarukhan 14 -red-card 14 -77,500 14 -fainga'a 14 -grossers 14 -noirs 14 -narborough 14 -rocketry 14 -euro-zone 14 -patentable 14 -bhatupa 14 -micula 14 -willcocks 14 -cancri 14 -wittig 14 -miniaturised 14 -dango 14 -propensities 14 -cosmeditour 14 -al-kabir 14 -pettyfer 14 -exacerbation 14 -gottschalk 14 -haileybury 14 -kumaris 14 -pebbled 14 -sadiquallah 14 -falak 14 -kresty 14 -14,250 14 -margit 14 -corncobs 14 -jonan 14 -15/8 14 -planche 14 -liveson 14 -win-loss 14 -shylea 14 -jones-drew 14 -l7 14 -l8 14 -asturian 14 -dammann 14 -kaylei 14 -twosie 14 -holier 14 -guardedly 14 -goal-scorers 14 -turkmens 14 -writebols 14 -coexisted 14 -jardins 14 -romona 14 -pall-bearers 14 -adjei 14 -head-to-heads 14 -quevedo 14 -flounce 14 -hussein-era 14 -bogen 14 -ncsl 14 -kesho 14 -octocopter 14 -tarantola 14 -414,000 14 -lastorino 14 -vinoodh 14 -somabar 14 -karra 14 -amantle 14 -jolo 14 -fence-jumper 14 -accor 14 -marcoule 14 -3663 14 -negromonte 14 -dellamonica 14 -statecraft 14 -re-telling 14 -bodger 14 -wollman 14 -penalty-shootout 14 -non-scientific 14 -banh 14 -swearingen 14 -silberstein 14 -sabbatini 14 -cadences 14 -scud-type 14 -marbaix 14 -close-run 14 -u.s.-trained 14 -tarp-covered 14 -dunwell 14 -elcano 14 -qasam 14 -paschal 14 -vanier 14 -jomsom 14 -reynard 14 -smouldered 14 -delattre 14 -bitti 14 -fire-proof 14 -rodarte 14 -jobrani 14 -favourability 14 -phyllisity 14 -blaer 14 -bcb 14 -reverently 14 -three-plus 14 -levassor 14 -wikibear 14 -penello 14 -doillon 14 -wheyhey 14 -cojocaru 14 -12.28 14 -portuguese-speaking 14 -bullhorns 14 -zeita 14 -nitzan 14 -maruf 14 -healdsburg 14 -blythman 14 -gustard 14 -hanauer 14 -vonachen 14 -disney-style 14 -brissette 14 -lead-based 14 -stratham 14 -7:29 14 -7:28 14 -mid-latitude 14 -rajya 14 -trachelectomy 14 -macmurray 14 -ice-strengthened 14 -trueview 14 -karon 14 -crisscrosses 14 -pennefather 14 -pongi 14 -dharmana 14 -uzma 14 -sopping 14 -delagrange 14 -suit-wearing 14 -nissim 14 -moutiers 14 -yavlinsky 14 -dmytryszyn 14 -domingos 14 -therapeutically 14 -splutter 14 -waifish 14 -17-6 14 -17-8 14 -seizure-like 14 --38 14 --34 14 -gianelli 14 -encloses 14 -sudha 14 -yarema 14 -rinchich 14 -hughes-hallett 14 -polom 14 -rems 14 -spotlessly 14 -well-advised 14 -pbdes 14 -porquerolles 14 -408-foot 14 -persad 14 -kelham 14 -al-ghanam 14 -parkinsons 14 -comprehended 14 -salamis 14 -oyama 14 -natal-san 14 -azog 14 -broughty 14 -cully 14 -0700 14 -saenz-tamez 14 -savary 14 -bagatelle 14 -dascombe 14 -6-years-old 14 -waggonway 14 -hogged 14 -levonelle 14 -oedipus 14 -jasleen 14 -maryum 14 -wcbd 14 -ex-special 14 -cambuslang 14 -sener 14 -ahve 14 -battuello 14 -schlemmers 14 -devisingh 14 -unrecoverable 14 -santoyo 14 -karakul 14 -latrez 14 -caio 14 -mckellan 14 -1999-00 14 -geismar 14 -gerstle 14 -mcanally 14 -3-to-1 14 -tehran-based 14 -i-20 14 -longines 14 -woolmer 14 -vimy 14 -methylmercury 14 -ponferrada 14 -chatterboxes 14 -31c 14 -hubristic 14 -hinchinbrook 14 -cabers 14 -lacma 14 -bugaev 14 -flours 14 -holomisa 14 -cruzes 14 -romanista 14 -swinnerton 14 -4gee 14 -bugzee 14 -flashman 14 -iby 14 -stegall 14 -split-toe 14 -ife 14 -chillery 14 -fatto 14 -11-match 14 -'57 14 -chippies 14 -ganchi 14 -9:57 14 -temel 14 -runyon 14 -minsiter 14 -andreena 14 -biodefense 14 -grinner 14 -eyewatering 14 -2008-10 14 -hafer 14 -mulaudzi 14 -16lb 14 -consultant-led 14 -50,000,000 14 -proxima 14 -kubota 14 -hamida 14 -gratefulness 14 -54p 14 -5v 14 -ronzulli 14 -gun-for-hire 14 -jados 14 -dacic 14 -xanthe 14 -vicca 14 -yamin 14 -2004-06 14 -record-shattering 14 -ten-part 14 -sit-on 14 -fissas 14 -batkivshchyna 14 -yonathan 14 -humidor 14 -laiyla 14 -nazi-style 14 -glucosinolates 14 -sifuna 14 -c&c 14 -hoyo 14 -jolean 14 -putintseva 14 -filippis 14 -gyamfi 14 -sarveswaran 14 -hypothesizes 14 -donavan 14 -not-spots 14 -hopp 14 -iilgner 14 -klonda 14 -cordileone 14 -recurs 14 -squinch 14 -taormino 14 -coln 14 -romano-british 14 -saremi 14 -8:48 14 -government-to-government 14 -awakenings 14 -shake-and-bake 14 -roxon 14 -ashraful 14 -27-28 14 -three-setter 14 -index-linked 14 -superhumans 14 -oguzhan 14 -ekangamene 14 -darfuris 14 -arvidson 14 -1555 14 -selectable 14 -richville 14 -purgatorius 14 -pontbriand 14 -stelle 14 -melanocytes 14 -reinaud 14 -antwaun 14 -brocquy 14 -kemboi 14 -marsa 14 -rez 14 -ree 14 -redshirted 14 -mikhailovsky 14 -4:54 14 -toggling 14 -risius 14 -limata 14 -likley 14 -heavily-armoured 14 -caulk 14 -techy 14 -brokerages 14 -same-gender 14 -zimet 14 -kloehn 14 -faily 14 -al-tikriti 14 -wermke 14 -watchin 14 -erna 14 -cyberweapons 14 -ahae 14 -switchbacks 14 -rackety 14 -requena 14 -abse 14 -shure 14 -inui 14 -manvel 14 -mega-yachts 14 -roy-chowdhury 14 -lardy 14 -canin 14 -torv 14 -greenbush 14 -whirley 14 -super-villain 14 -ballan 14 -lyminge 14 -mbna 14 -enrollee 14 -ahmadi-roshan 14 -unionville 14 -phalaenopsis 14 -praus 14 -rhyne 14 -chanderpaul 14 -diwaniya 14 -phillipps 14 -counteracted 14 -moonwalking 14 -prognosticating 14 -campolongo 14 -cwmcarn 14 -waza 14 -good-luck 14 -tranquillo 14 -brittnacher 14 -planeload 14 -3.39 14 -missing-child 14 -counter-clockwise 14 -non-practicing 14 -vaghela 14 -seiden 14 -kunshan 14 -arry 14 -re-housed 14 -derivation 14 -crip 14 -spoerry 14 -dafniya 14 -kushwaha 14 -massino 14 -ryng 14 -parave 14 -cybersex 14 -nangle 14 -krystina 14 -infringers 14 -oyewole 14 -wackenhut 14 -committeeman 14 -half-moon 14 -masayuki 14 -gisondi 14 -hunter-killer 14 -outlasting 14 -schloter 14 -denominated 14 -wilsonville 14 -7.31 14 -mccown 14 -mennim 14 -ludington 14 -acbps 14 -sladek 14 -woking-based 14 -zrioul 14 -turturro 14 -n&n 14 -travelogue 14 -methow 14 -consumer-driven 14 -drug-producing 14 -restyle 14 -denigrates 14 -frappier 14 -kostic 14 -mallenco 14 -16-8 14 -16-9 14 -rushona 14 -icelander 14 -bennett-jenkins 14 -all-england 14 -hypocrisies 14 -braindead 14 -janelia 14 -lysebettens 14 -cathryn 14 -anstruther-gough-calthorpe 14 -maylanie 14 -lichsteiner 14 -kameda 14 -30-tonne 14 -marvelously 14 -spago 14 -sensata 14 -lockups 14 -vinita 14 -huashan 14 -light-fingered 14 -daquan 14 -7-years-old 14 -h3c 14 -gancz 14 -post-concussion 14 -rozeman 14 -thomasina 14 -tatalova 14 -pernell 14 -ksc 14 -bellringers 14 -mccart 14 -whiteface 14 -xcaret 14 -wates 14 -quacks 14 -tulleken 14 -infective 14 -semiprofessional 14 -bangour 14 -locksmiths 14 -acquiescing 14 -priceline.com 14 -pro-social 14 -hardiness 14 -http://nbcdfw.com 14 -m-16s 14 -1-ton 14 -155ft 14 -wickler 14 -600-lb 14 -starpath 14 -rowaida 14 -rope-a-dope 14 -lepine 14 -hairpieces 14 -hoyda 14 -jobin 14 -courant.com 14 -tren 14 -voz 14 -barban 14 -mistral-class 14 -croplands 14 -soomro 14 -upski 14 -kammler 14 -potties 14 -vistors 14 -kevo 14 -westphalia 14 -monster-in-law 14 -yeatts 14 -ice-bucket 14 -blini 14 -lobrutto 14 -super-featherweight 14 -pre-olympics 14 -koiter 14 -minns 14 -domingues 14 -anah 14 -mobileme 14 -pozuelo 14 -enforcements 14 -back-slapping 14 -citymanchester 14 -bedevilled 14 -rovigo 14 -universitat 14 -buddh 14 -666.66 14 -1570 14 -1577 14 -1,127 14 -cunneely 14 -unequaled 14 -19,300 14 -disfavor 14 -medjani 14 -directionless 14 -bradgate 14 -omniscient 14 -rgb 14 -gomi 14 -planer 14 -broderie 14 -itani 14 -damascene 14 -blue-light 14 -269,000 14 -haylemaryam 14 -anti-smuggling 14 -wadhurst 14 -30-mph 14 -ripens 14 -612,000 14 -xxi 14 -cdebaca 14 -semi-literate 14 -moseman 14 -lovells 14 -wynd 14 -lwin 14 -rossett 14 -deonte 14 -moorehead 14 -wingwalking 14 -customer-friendly 14 -pallor 14 -mansolillo 14 -connotes 14 -spammer 14 -rightwing 14 -abergele 14 -mallets 14 -tope 14 -friday-night 14 -couriering 14 -palop 14 -douala 14 -ninth-place 14 -4children 14 -toptal 14 -backhanders 14 -izzadeen 14 -single-malt 14 -gletty 14 -leeds-bradford 14 -polynesians 14 -destructed 14 -continence 14 -weaving-shorrocks 14 -cycliste 14 -lavishly-furnished 14 -six-fight 14 -raqa 14 -nys 14 -kwek 14 -break-point 14 -unromantic 14 -armagnac 14 -gigafactory 14 -parwani 14 -wulchak 14 -glos. 14 -troms 14 -log-ins 14 -mamales 14 -snowtown 14 -obakin 14 -schrute 14 -knopps 14 -junctional 14 -winddancer 14 -sadlier 14 -atchoum 14 -theknot.com 14 -aif 14 -cebit 14 -in-credit 14 -1,800-year-old 14 -jump-off 14 -andrada 14 -spurgeon 14 -aped 14 -harlie 14 -well-functioning 14 -swanner 14 -zaleski 14 -naimat 14 -puppie 14 -mctell 14 -bandwith 14 -godaddy.com 14 -11-man 14 -webgl 14 -syndey 14 -synder 14 -harriss 14 -slammers 14 -navara 14 -carcassonne 14 -satran 14 -samana 14 -al-gharbi 14 -icrar 14 -barris 14 -occipital 14 -dafne 14 -hakala 14 -party-led 14 -giffa 14 -tchividjian 14 -hualapai 14 -hykeham 14 -countywide 14 -post-leveson 14 -orgasmed 14 -argh 14 -dodwell 14 -moraga 14 -phalatse 14 -acknowledgments 14 -three-metres 14 -oversteer 14 -manacci 14 -70k 14 -sharleen 14 -sciame 14 -killelea 14 -ecologo 14 -lanzinger 14 -nilly 14 -white-skinned 14 -prensky 14 -ex-stripper 14 -netbrain 14 -airfoil 14 -evgeniya 14 -downunder 14 -bowsprit 14 -cragun 14 -pieres 14 -canopic 14 -polarize 14 -nusoj 14 -dziegielewska 14 -six-over-par 14 -brekke 14 -synthetics 14 -goldwyn 14 -horseguards 14 -lower-fat 14 -1,367 14 -whiffs 14 -kayak.com 14 -9g 14 -ofcourse 14 -fosh 14 -witts 14 -shereen 14 -wedding-related 14 -fluted 14 -kokomoor 14 -eunson 14 -baseballer 14 -road-testing 14 -meekulu 14 -nihang 14 -exiling 14 -dewaegeneire 14 -conserves 14 -million-worth 14 -caree 14 -midamar 14 -troadec 14 -laplante 14 -minoans 14 -budgens 14 -letarnec 14 -nebuliser 14 -20-over 14 -callejas 14 -line-item 14 -moya-smith 14 -barkan 14 -leetch 14 -kc-30a 14 -undifferentiated 14 -bexarotene 14 -5:35 14 -goom 14 -slithery 14 -inverlochy 14 -4:12 14 -honeyed 14 -rutt 14 -akureyri 14 -billen 14 -llwyd 14 -didonato 14 -blatchford 14 -afrasia 14 -30-30 14 -topsham 14 -wessington 14 -metoposaurus 14 -cartwheeling 14 -akdeniz 14 -allrounder 14 -maisch 14 -9.06 14 -8.43 14 -8.48 14 -prognoses 14 -pot-holed 14 -cobblestoned 14 -burkholder 14 -matenaer 14 -one-fingered 14 -9per 14 -plexus 14 -hot-car 14 -hypervenom 14 -fane 14 -ravenstonedale 14 -serbu 14 -bupa-run 14 -dnf 14 -kraig 14 -semb 14 -mccumber 14 -anousone 14 -benesse 14 -get-out-of-jail-free 14 -haiyang 14 -spiolek 14 -1:57 14 -1,228 14 -devora 14 -kswo 14 -3.88 14 -84.5 14 -wretchedly 14 -bedales 14 -saffire 14 -gamaliel 14 -jabaliya 14 -sheahen 14 -treviño 14 -publicly-traded 14 -3.71 14 -3.78 14 -787-10 14 -emma-jayne 14 -brucellosis 14 -nabarro 14 -baysore 14 -11-member 14 -40-room 14 -ganfield 14 -polunsky 14 -banbury-based 14 -t-band 14 -borderless 14 -gez 14 -asrawe 14 -'86 14 -orlistat 14 -overplaying 14 -collick 14 -56.2 14 -kotter 14 -airventure 14 -summernats 14 -ohhhh 14 -yumurtalik 14 -600-plus 14 -dredd 14 -wasdell 14 -second-innings 14 -girvan 14 -chespirito 14 -coot 14 -cobram 14 -knighten 14 -balote 14 -one-meter 14 -peckover 14 -farsighted 14 -pieterse 14 -krayem 14 -lasula 14 -tehrani 14 -blue-haired 14 -vavrinec 14 -lemar 14 -strassheim 14 -sniffen 14 -honey-rae 14 -cabell 14 -cavazos 14 -worry-free 14 -mayhle 14 -snow-dusted 14 -pre-campaign 14 -sek 14 -matura 14 -tyreece 14 -hatreds 14 -all-aluminium 14 -pro-europeans 14 -pennells 14 -multi-planet 14 -hpc 14 -mawby 14 -mcfarlin 14 -fermor 14 -aspirated 14 -coomaraswamy 14 -piacenza 14 -non-core 14 -un-australian 14 -hillclimb 14 -boeke 14 -laymond 14 -p-plate 14 -scalford 14 -camouflage-clad 14 -vuong 14 -speedwell 14 -tynan 14 -76m 14 -grayer 14 -all-pervasive 14 -double-parked 14 -latvian-born 14 -altamonte 14 -maen 14 -1.6-liter 14 -schonfeld 14 -kellam 14 -fonteviot 14 -most-nominated 14 -wiko 14 -kindnesses 14 -brain-machine 14 -anti-climactic 14 -berlanga 14 -bellone 14 -doonan 14 -torn-up 14 -skiwear 14 -afe 14 -afr 14 -italico 14 -aldis 14 -work-based 14 -vajiralongkorn 14 -giantkilling 14 -goodge 14 -ex-paratrooper 14 -yawar 14 -2:27 14 -2:24 14 -wwt 14 -dropper 14 -arand 14 -lagerstedt 14 -michelob 14 -michelina 14 -1,305 14 -raggle 14 -a414 14 -medicentre 14 -arlan 14 -dolphins1925 14 -bayram 14 -cultish 14 -bakiyev 14 -romney/ryan 14 -tutik 14 -castellacci 14 -mehrdad 14 -lifebelt 14 -barbel 14 -cicig 14 -martelle 14 -250,000-a-year 14 -sanchez-casal 14 -casamigos 14 -schawbel 14 -hamre 14 -bemusing 14 -mcgriff 14 -annamaria 14 -staveley 14 -katania 14 -71.1 14 -baronnet 14 -longbow 14 -ketan 14 -glossybox 14 -kemball-cook 14 -untypical 14 -plateful 14 -wajeha 14 -finishings 14 -six-monthly 14 -weijing 14 -ruidoso 14 -whillans 14 -kirkintilloch 14 -re-wiring 14 -jankowitz 14 -adamstown 14 -suckered 14 -punch-drunk 14 -40,500 14 -hiv-aids 14 -italian-americans 14 -lestrange 14 -picchio 14 -4:37 14 -gavaskar 14 -valdis 14 -erogenous 14 -6.39 14 -lizard-like 14 -israr 14 -sell-outs 14 -lobs 14 -immunoglobulin 14 -onley 14 -riffe 14 -amoxicillin 14 -8.65 14 -circus-like 14 -cbs/new 14 -contrive 14 -quavering 14 -nitpicking 14 -bojangles 14 -breona 14 -sevenfold 14 -edy 14 -kudzu 14 -spotkick 14 -east-bound 14 -winshape 14 -16,700 14 -walser 14 -norlander 14 -v-day 14 -becuase 14 -formalwear 14 -well-controlled 14 -cf-18 14 -82,500 14 -pre-schools 14 -kippa 14 -lerry 14 -5:09 14 -glavine 14 -1,207 14 -energy-efficiency 14 -ctu 14 -demobilization 14 -bachini 14 -d'elia 14 -buckweed 14 -mamounia 14 -overtired 14 -noltin 14 -re-shuffle 14 -full-beam 14 -samey 14 -samen 14 -anti-machete 14 -onagawa 14 -laggard 14 -mitrione 14 -mischaracterization 14 -misspell 14 -broder 14 -reemergence 14 -duchatelet 14 -memrise 14 -ephesians 14 -riffed 14 -vitara 14 -softly-softly 14 -eyepiece 14 -quackers 14 -unreceptive 14 -shawne 14 -doobie 14 -telchadder 14 -darul 14 -acetyl 14 -1,021 14 -1,026 14 -humblest 14 -car-bomb 14 -ilg 14 -tondar 14 -6.85 14 -haisheng 14 -woos 14 -wook 14 -toca 14 -kamphuis 14 -micklewhite 14 -fragoso 14 -uwm 14 -coconutters 14 -flame-grilled 14 -68.8 14 -83million 14 -jean-julien 14 -carnie 14 -2,000-page 14 -all-season 14 -hulett 14 -multihull 14 -27-storey 14 -buaben 14 -cisotti 14 -kumbaya 14 -tawanda 14 -39a 14 -misreported 14 -lroc 14 -litvinov 14 -chelsa 14 -fenlon 14 -side-lines 14 -85kg 14 -think-tanks 14 -brushette 14 -ackerberg 14 -seatac 14 -actress-singer 14 -danie 14 -duleep 14 -a340s 14 -fearsomely 14 -carnforth 14 -show-stopper 14 -immune-system 14 -402,000 14 -cos. 14 -khloé 14 -badenhorst 14 -shoot-to-kill 14 -jaw-droppingly 14 -hegazy 14 -mardin 14 -globe-winning 14 -wjtv 14 -rawmarsh 14 -stockard 14 -kathryne 14 -planeloads 14 -88.4 14 -goreaciuc 14 -hasanat 14 -400-year 14 -267-page 14 -bienstock 14 -55in 14 -nouk 14 -17-stone 14 -amethysts 14 -gartenstein-ross 14 -fantauzzo 14 -570million 14 -suthamtewakul 14 -vandam 14 -kayson 14 -sudikova 14 -tolkein 14 -esportiva 14 -bernek 14 -2:03 14 -alphadog 14 -bastrykin 14 -carparks 14 -1:17 14 -excommunicate 14 -posit 14 -hironimus 14 -egg-freezing 14 -hickton 14 -250-pound 14 -hilltout 14 -zwicky 14 -kelsea 14 -metallurgy 14 -angiography 14 -shame-faced 14 -23-10 14 -kauser 14 -leicester-based 14 -point-by-point 14 -nottm 14 -black-rimmed 14 -ar-15s 14 -gregan 14 -ocasek 14 -saxo-tinkoff 14 -belches 14 -coracle 14 -unscreened 14 -benyak 14 -sutured 14 -jeffry 14 -1,100-mile 14 -1,545 14 -six-over 14 -klingenschmitt 14 -konart 14 -stltoday.com 14 -200-a-week 14 -blatche 14 -test-takers 14 -pro-cannabis 14 -karmali 14 -sardinians 14 -kadyhrob 14 -okine 14 -hashes 14 -farrimond 14 -latynina 14 -buggers 14 -seagoing 14 -zamalka 14 -dilates 14 -far-infrared 14 -brynin 14 -airhead 14 -chippie 14 -equidistant 14 -slake 14 -ex-intelligence 14 -heedless 14 -madinat 14 -pelissier 14 -tupper 14 -espley 14 -welham 14 -xaver 14 -tabraue 14 -non-economic 14 -onwurah 14 -hardacre 14 -nmecha 14 -9.49 14 -warrenton 14 -alhenawi 14 -triassic-jurassic 14 -keyser 14 -carlisa 14 -pmp 14 -pmk 14 -gallo-chasanoff 14 -magnuson 14 -160lbs 14 -overage 14 -county-by-county 14 -crossville 14 -ef0 14 -ef1 14 -christoffer 14 -harkened 14 -lilliputian 14 -bronc 14 -68.6 14 -cobbling 14 -levigh 14 -noynoy 14 -xd 14 -scrupulosity 14 -fixating 14 -then-french 14 -kitaoka 14 -piscataway 14 -tresco 14 -elfyn 14 -hallman 14 -gamine 14 -jeremic 14 -lomaglio 14 -1:11 14 -chiarini 14 -writedown 14 -stratotanker 14 -trl 14 -95billion 14 -acclamation 14 -offerton 14 -intermediates 14 -charmin 14 -chaudry 14 -steinbauer 14 -ficarelli 14 -ivin 14 -swissair 14 -snow-clearing 14 -dead-eyed 14 -ixtapa 14 -rijn 14 -36-minute 14 -zimmers 14 -single-runway 14 -greenstein 14 -skifest 14 -chitchat 14 -omoyele 14 -uncertified 14 -paleracio 14 -livepool 14 -rachman 14 -chur 14 -molin 14 -nizeyimana 14 -1940s-style 14 -allbaugh 14 -moghe 14 -long-buried 14 -stanchart 14 -caltrops 14 -humped 14 -neater 14 -beguiled 14 -ayley 14 -alicja 14 -omidele 14 -45-mile 14 -kemnitz 14 -toal 14 -al-bukamal 14 -olivia-leigh 14 -europelta 14 -kapikanya 14 -laes 14 -billups 14 -stefanoni 14 -stand-offish 14 -crepey 14 -exp 14 -montevrain 14 -holtzbergs 14 -countermeasure 14 -seismometers 14 -seven-pound 14 -moldes 14 -mcclinton 14 -birthdates 14 -sadeghnia 14 -haniszewski 14 -makunova 14 -underinvestment 14 -slightly-built 14 -2,395 14 -esser 14 -wuzhen 14 -23mm 14 -danke 14 -pineal 14 -eu/imf 14 -adshead 14 -hethmon 14 -gaspari 14 -106906 14 -altham 14 -zhuravsky 14 -mogollon 14 -half-timbered 14 -mision 14 -salif 14 -gianotti 14 -luverne 14 -sowton 14 -veliz 14 -pannu 14 -caizergues 14 -nullification 14 -ad-rock 14 -p.t. 14 -ten-and-a-half 14 -work-study 14 -radelius 14 -jaspects 14 -fdi 14 -maar 14 -zehr 14 -hassler 14 -walmer 14 -nowt 14 -yamanashi 14 -40/1 14 -tübingen 14 -2inches 14 -barnbrook 14 -cassama 14 -underwrote 14 -mikes 14 -colwick 14 -pegden 14 -kalibala 14 -kravetz 14 -mcmahons 14 -zihan 14 -azzuri 14 -alka 14 -juanito 14 -steppingstone 14 -heartstopping 14 -grandmother-of-six 14 -latanya 14 -langhart 14 -raynsford 14 -reform-oriented 14 -jenkinjones 14 -elit 14 -elis 14 -rupesh 14 -syrinx 14 -99m 14 -ystad 14 -993 14 -naisbitt 14 -badilisha 14 -seda 14 -2.78 14 -ritts 14 -ellenwood 14 -5.16 14 -nsx 14 -brewpub 14 -cristofoletti 14 -dukla 14 -disgraces 14 -tangshan 14 -superstorms 14 -laugh-in 14 -modernisers 14 -impelled 14 -szymanowicz 14 -kooyoufas 14 -cmo 14 -jimbaran 14 -berthillon 14 -anheuser 14 -pruessing 14 -appley 14 -marriageable 14 -lip-synced 14 -pembrook 14 -galeazzi 14 -fairfuel 14 -non-exempt 14 -paschal-placker 14 -cryopreservation 14 -equanimity 14 -tinashe 14 -bamberg 14 -ekin 14 -behrman 14 -rebozo 14 -lustily 14 -quatro 14 -andone 14 -mcclaine 14 -mormile 14 -fascinatingly 14 -brigadier-general 14 -inestimable 14 -125-year-old 14 -whirr 14 -9.60 14 -supersmoker 14 -biria 14 -ebba 14 -matignon 14 -montagne 14 -satiated 14 -post-feminist 14 -woodhall 14 -kini 14 -enimehmedov 14 -balkaran 14 -thunen 14 -liveried 14 -topliss 14 -three-stroke 14 -firfirey 14 -beimel 14 -tent-like 14 -coleford 14 -langlie 14 -morrisville 14 -mubarakah 14 -hluben 14 -stommen 14 -anti-white 14 -then-republican 14 -pig-nosed 14 -avramenko 14 -smadar 14 -wynne-jones 14 -de-radicalization 14 -iyanla 14 -22:36 14 -karangasem 14 -geo-tv 14 -must-try 14 -shapshay 14 -somalia-born 14 -piner 14 -menomonie 14 -bulman 14 -chatshow 14 -deshpande 14 -game-high 14 -lightheadedness 14 -carryover 14 -siders 14 -bogost 14 -rhodium 14 -hydrofluoric 14 -hajrudin 14 -gy 14 -evolutionarily 14 -flight-line 14 -krysten 14 -f*ck 14 -felch 14 -multi-nationals 14 -kamal-yanni 14 -joint-third 14 -82m 14 -over-regulation 14 -1,065 14 -senselessness 14 -atagana 14 -noni 14 -holycross 14 -ursino 14 -starkers 14 -manwaring 14 -plovers 14 -guano 14 -benzofury 14 -tantalus 14 -escot 14 -esl 14 -sanyo 14 -oymen 14 -toop 14 -re-instate 14 -lcr 14 -outjumped 14 -re-vamp 14 -bulled 14 -sobolev 14 -ksg 14 -tpl-25 14 -akery 14 -rashash 14 -prescription-strength 14 -grindtv 14 -asaib 14 -syringomyelia 14 -astrologers 14 -55billion 14 -guillemot 14 -chicksen 14 -changeovers 14 -didelphys 14 -argarkov 14 -powter 14 -soc 14 -myunghee 14 -televison 14 -sidor 14 -paraguayans 14 -orient-express 14 -52,500 14 -jam-making 14 -krikorian 14 -xshot 14 -hindquarters 14 -obstetrician-gynecologist 14 -lifschitz 14 -impinges 14 -telekinetic 14 -7,000-square-foot 14 -alkalising 14 -burdon 14 -pathologies 14 -empathized 14 -cep 14 -batpod 14 -neumeier 14 -newburg 14 -a.o. 14 -oppressively 14 -vitus 14 -tassi 14 -harrow-educated 14 -dookie 14 -melora 14 -aquitaine 14 -sak 14 -bhavan 14 -arbeen 14 -skullcracker 14 -floresiensis 14 -820ft 14 -gomer 14 -1738 14 -1731 14 -furlow 14 -heavy-handedness 14 -dewhirst 14 -kramarik 14 -constantia 14 -ashtanga 14 -beccalli-falco 14 -ufj 14 -muhe 14 -lancs. 14 -mariska 14 -7.42 14 -tanorexic 14 -uhns 14 -square-feet 14 -2.86 14 -silvena 14 -teel 14 -vamps 14 -politiken 14 -entomologists 14 -kouvelis 14 -ptt 14 -penlington 14 -willowbank 14 -sinéad 14 -terrey 14 -aldabra 14 -kicks-off 14 -gilkses 14 -vai 14 -fugallo 14 -hod 14 -hol 14 -villalon 14 -corns 14 -sun-dappled 14 -yardy 14 -jiale 14 -awoonor 14 -dongou 14 -a63 14 -carene 14 -mle 14 -ionising 14 -meryem 14 -hoffens 14 -dendias 14 -poingdestre 14 -universitaire 14 -king-chinnery 14 -icracked 14 -heslewood 14 -flexaccount 14 -borgezie 14 -ostomy 14 -santis 14 -jediism 14 -mini-winnie 14 -arion 14 -everlys 14 -doggers 14 -howkins 14 -lewell-buck 14 -emigre 14 -al-bedoul 14 -goal-getter 14 -vieria 14 -recluses 14 -kirchherr 14 -iridum 14 -22g 14 -8.39 14 -leete 14 -older-model 14 -curtatone 14 -carvoeiro 14 -89.4 14 -jhonathan 14 -punnets 14 -cliburn 14 -brickworks 14 -roadtrip 14 -367,000 14 -demobilize 14 -royd 14 -kolibree 14 -omphalocele 14 -trela 14 -rapaport 14 -music-lovers 14 -para-cycling 14 -framerate 14 -kamiji 14 -villescas 14 -ramseur 14 -31st-floor 14 -154th 14 -unga 14 -counter-demonstration 14 -bushatz 14 -3-d-printed 14 -hesburgh 14 -lodato 14 -kekovich 14 -arabtec 14 -sartell 14 -third-longest 14 -problem-solvers 14 -radionuclides 14 -in-field 14 -bougon 14 -cornicing 14 -noorduin 14 -trevillian 14 -war-crimes 14 -wallies 14 -beinart 14 -365-day 14 -19.98 14 -tomorrowworld 14 -neilsen 14 -karsnia 14 -silveria 14 -eighteen-month-old 14 -kamath 14 -yoram 14 -vitantonio 14 -gambardella 14 -mushroom-shaped 14 -dorcas 14 -recently-built 14 -raimundo 14 -hoarseness 14 -caparros 14 -cinna 14 -well-judged 14 -nastassja 14 -rephrase 14 -riverine 14 -madalyn 14 -chedd 14 -ledwidge 14 -chizik 14 -casares 14 -spectrums 14 -donovans 14 -formula1.com 14 -luisi 14 -110ft 14 -170lbs 14 -watchmaking 14 -r-sport 14 -turn-down 14 -'05 14 -homebuilding 14 -data-collection 14 -anti-arab 14 -46664 14 -myley 14 -ebbrell 14 -remora 14 -moodily 14 -vasiliy 14 -hailemariam 14 -caballeros 14 -kastner 14 -broomhead 14 -retrospectives 14 -17,800 14 -subsection 14 -felina 14 -eberspacher 14 -zhiping 14 -elastics 14 -1390 14 -hsa 14 -meiwes 14 -penfolds 14 -olatunjiojo 14 -imputed 14 -pizzerias 14 -trabuco 14 -schireson 14 -aflak 14 -shaariibuu 14 -ign.com 14 -colognes 14 -stilkey 14 -wtop 14 -tcb 14 -swaggered 14 -gawthrop 14 -vanstone 14 -parmley 14 -sitko 14 -jumbo-sized 14 -65-page 14 -bomba 14 -hamidiyeh 14 -helmke 14 -four-country 14 -#whyistayed 14 -lutzenkirchen 14 -semenko 14 -cumulonimbus 14 -gauri 14 -nowarah 14 -hopedale 14 -noody 14 -khali 14 -vitamin-d 14 -ramírez 14 -20.00 14 -zwickau 14 -macay 14 -daybreaker 14 -22bn 14 -tuiloma 14 -chastanet 14 -bi-gender 14 -affeldt 14 -al-ramel 14 -huey-you 14 -belabbas 14 -snickering 14 -frente 14 -wudunn 14 -game-play 14 -ausilio 14 -ulner 14 -trans-tasman 14 -eviscerate 14 -ahlenius 14 -lats 14 -lato 14 -nduka 14 -alou 14 -atimah 14 -crucero 14 -35lbs 14 -eaglet 14 -roughs 14 -ntds 14 -re-arranged 14 -26billion 14 -sun-filled 14 -15,000-strong 14 -muskiet 14 -rehydrating 14 -gholdoian 14 -36p 14 -darndest 14 -nossel 14 -jiminy 14 -solberg 14 -caesium 14 -betzold 14 -craigslist.com 14 -lasmar 14 -chojnowski 14 -puusepp 14 -basti 14 -kovalyov 14 -people-friendly 14 -bodman 14 -ktva 14 -zich 14 -presque 14 -journo 14 -@britishmonarchy 14 -ouray 14 -1136 14 -ultra-sensitive 14 -nativism 14 -azmat 14 -kelcey 14 -long-limbed 14 -pay-rise 14 -novikova 14 -x-1 14 -vf 14 -weatherston 14 -2,073 14 -danaher 14 -northgate 14 -fratricide 14 -indermuhle 14 -magnavox 14 -green-card 14 -rothery 14 -papier 14 -marea 14 -marer 14 -70-degree 14 -mary-jo 14 -libération 14 -petes 14 -boonsong 14 -negrillo 14 -sidr 14 -clonazepam 14 -rebeca 14 -beatbullying 14 -re-invested 14 -vernall 14 -teshima 14 -parco 14 -cinematics 14 -niklewicz 14 -uncontactable 14 -wyrick 14 -keynotes 14 -kimberling 14 -250,00 14 -eight-term 14 -pgi 14 -pgp 14 -cintas 14 -guitaut 14 -chiatura 14 -brushstroke 14 -symphonic 14 -5x 14 -blachford 14 -out-of-office 14 -hamermesh 14 -ktrk-tv 14 -romanovich 14 -youlus 14 -smocks 14 -simundza 14 -libman 14 -göttingen 14 -el-gharani 14 -12.85 14 -blk 14 -danesfield 14 -alepotrypa 14 -register-guard 14 -elodie 14 -ekman 14 -hoer 14 -tokyoites 14 -stepchange 14 -zuley 14 -shoulda 14 -turnham 14 -2013-present 14 -poparic 14 -krzepkowski 14 -wadiwala 14 -maybes 14 -farley-jones 14 -mcgeouch 14 -dc-8 14 -wkd 14 -400-500 14 -breezeway 14 -pro-football 14 -non-english-speaking 14 -hands-only 14 -gwan 14 -burpee 14 -67-year 14 -re-enlisted 14 -parure 14 -nubs 14 -serif 14 -749,000 14 -female-driven 14 -nagorno-karabakh 14 -unrolling 14 -late-onset 14 -prednisone 14 -name-change 14 -shreider 14 -273-talk 14 -frankenfoods 14 -grivelle 14 -o'brien-trained 14 -donan 14 -belgium-based 14 -perversity 14 -1234 14 -foot-wide 14 -multiplexes 14 -shinning 14 -kimmi 14 -2,500-year-old 14 -westbury-on-trym 14 -quintas 14 -akpa 14 -cushion-shaped 14 -kotsenburg 14 -jex-blake 14 -onecue 14 -beer-drinking 14 -negrito 14 -tah 14 -doolin 14 -ehiogu 14 -cornershop 14 -3.84 14 -3.89 14 -finnell 14 -58th-minute 14 -seberger 14 -stainthorpe 14 -paleo-eskimos 14 -belek 14 -nutrisystem 14 -boyarsky 14 -badaling 14 -iwao 14 -biber 14 -araceli 14 -sambou 14 -2,996 14 -pembleton 14 -soko 14 -1997-1998 14 -nataly 14 -sixth-seeded 14 -bulu 14 -montblanc 14 -slighter 14 -ekstrand 14 -safdarjung 14 -balado 14 -cable-tv 14 -leysath 14 -escapologist 14 -2ft-long 14 -p12 14 -p13 14 -otrando 14 -chetumal 14 -acquaint 14 -tensas 14 -heppenstall 14 -thumma 14 -moumblow 14 -kanefke 14 -milbury 14 -weafer 14 -automating 14 -sailson 14 -margiotta 14 -7.07 14 -miscues 14 -antica 14 -34f 14 -eshetu 14 -nyheter 14 -haith 14 -bohdana 14 -22-24 14 -ppr 14 -0.60 14 -0.64 14 -0.65 14 -scholfield 14 -rushmer 14 -deign 14 -flookburgh 14 -isnt 14 -aramburu 14 -64.6 14 -nine-fold 14 -francinaldo 14 -munguia 14 -pleasured 14 -gaffield 14 -anti-monarchist 14 -interest-paying 14 -1156 14 -cabellero 14 -drews 14 -drewe 14 -sizzles 14 -gyres 14 -mccollins 14 -sublimotion 14 -haliburton 14 -6 14 -datalogix 14 -alu 14 -storm-chasing 14 -mars-sized 14 -sneakerheads 14 -8.9-magnitude 14 -pucino 14 -lemessurier 14 -pendelton 14 -e2 14 -21/10 14 -sampayo 14 -jhumpa 14 -approachability 14 -f.a. 14 -ase 14 -29lbs 14 -substantiates 14 -dormion 14 -petch 14 -farzat 14 -civvy 14 -anopheles 14 -6pr 14 -candlish 14 -olen 14 -liberdade 14 -cthulhu 14 -prioritisation 14 -bottler 14 -sun-earth 14 -#britishpublic0josiecunningham1 14 -re-capture 14 -sub-lieutenant 14 -alpers 14 -ravetto 14 -pek 14 -vunk 14 -abc2 14 -must-sees 14 -agence-france 14 -958,000 14 -2,275 14 -picured 14 -taiko 14 -long-promised 14 -fado 14 -phallic-shaped 14 -exton 14 -shanina 14 -instagrammer 14 -doink 14 -he-said 14 -63p 14 -631 14 -shearen 14 -yevgeniy 14 -school-sponsored 14 -week-by-week 14 -melky 14 -jots 14 -hit-men 14 -nerida 14 -unfilmable 14 -minev 14 -o'connors 14 -skyview 14 -becelaert 14 -detroit-based 14 -merewether 14 -r-s.c 14 -40ft-high 14 -yet-to-be-named 14 -uo 14 -3.92 14 -reshoot 14 -carbon-composite 14 -mongiat 14 -postsecondary 14 -nevius 14 -glenmorangie 14 -smart-phone 14 -nother 14 -wisconsin-based 14 -danial 14 -misquoting 14 -galkina 14 -haleh 14 -karpf 14 -iniguez 14 -2,130 14 -mylan 14 -tobii 14 -vientiane 14 -under-achievement 14 -pektemek 14 -valedictory 14 -mubasher 14 -shajarian 14 -queensberry 14 -parp 14 -aundrea 14 -glukosa 14 -lutek 14 -gaith 14 -re-electing 14 -buybacks 14 -halgren 14 -guolee 14 -iava 14 -seven-stone 14 -mawdsley 14 -al-hasakah 14 -stieber 14 -3600 14 -botts 14 -gauravdeep 14 -hockleys 14 -long-since 14 -schettler 14 -dufosse 14 -130,000-a-week 14 -thie 14 -mohanad 14 -mouawad 14 -glm 14 -skaer 14 -bajracharya 14 -29mph 14 -wikström 14 -jewson 14 -clywedog 14 -susi 14 -pre-digital 14 -jet-skier 14 -wolfdog 14 -higuita 14 -404,000 14 -chandeleur 14 -powerup 14 -cals 14 -laxa 14 -ambrogio 14 -mid-1940s 14 -i-17 14 -chatelaine 14 -khakimov 14 -dreher 14 -million-member 14 -dynevor 14 -colourpop 14 -stokowski 14 -adewole 14 -iui 14 -53.9 14 -rodela 14 -6.1-magnitude 14 -bingu 14 -inter-generational 14 -kodjoe 14 -whiskered 14 -penelopi 14 -mudginberri 14 -igc 14 -drug-making 14 -hollingbery 14 -hil 14 -baru 14 -barf 14 -peros 14 -amblecote 14 -avraham 14 -wyverns 14 -zephania 14 -nordisk 14 -sleepwalker 14 -wheel-to-wheel 14 -1170 14 -eshel 14 -orfield 14 -spassky 14 -hmcs 14 -greely 14 -6:22 14 -unburied 14 -royie 14 -kaushik 14 -riether 14 -rawdon 14 -robow 14 -animal-print 14 -polyamory 14 -burki 14 -bentilee 14 -zurzolo 14 -keypads 14 -roxburghshire 14 -50-degree 14 -mccullom 14 -traffick 14 -zoro 14 -filet-o-fish 14 -unctuous 14 -shastar 14 -kocab 14 -promotion-chasing 14 -gopman 14 -unmentionable 14 -short-termism 14 -eyjafjallajökull 14 -refreeze 14 -vesti 14 -inaccessibility 14 -eurocentric 14 -20th-minute 14 -forkin 14 -86.1 14 -86.3 14 -facepaint 14 -york-new 14 -4g-ready 14 -12-under-par 14 -talignani 14 -banjarnegara 14 -bayeh 14 -moqtada 14 -bramshill 14 -eberhardt 14 -pcv 14 -cartwheeled 14 -fakhri 14 -mid-17th 14 -lozoya 14 -color-coordinated 14 -nahki 14 -saffiano 14 -1638 14 -merkozy 14 -quartile 14 -mapplethorpe 14 -crüe 14 -123456 14 -208mph 14 -kesq 14 -no-holds 14 -maithripala 14 -wing-walking 14 -mid-game 14 -enlarges 14 -0.93 14 -schornstein 14 -5:26 14 -rfh 14 -iriondo 14 -semaphore 14 -corigliano 14 -zhumashov 14 -side-stepping 14 -lukich 14 -uneventfully 14 -airbender 14 -hiland 14 -noordhuizen 14 -shareif 14 -middle-ranking 14 -1585 14 -pakistan-afghan 14 -maliyah 14 -dibo 14 -reheat 14 -time-wasters 14 -pierini 14 -komang 14 -talisca 14 -hidalgo-clyne 14 -petti 14 -wittner 14 -posher 14 -nlcs 14 -lorphelin 14 -waltis 14 -gws 14 -hend 14 -littleboy 14 -then-national 14 -alphie 14 -31per 14 -bakul 14 -annett 14 -savarese 14 -roffman 14 -guwahati 14 -huberty 14 -cardinalate 14 -79.9 14 -robotuna 14 -kootenai 14 -mackereth 14 -157million 14 -lantana 14 -firhill 14 -mecp2 14 -paciello 14 -stirn 14 -parmar 14 -100-cap 14 -shales 14 -supermarionation 14 -234million 14 --21 14 -momaday 14 -premila 14 -30.75 14 -jurassica 14 -streator 14 -cutcher 14 -privott 14 -650s 14 -margaritaville 14 -izumo 14 -tamdin 14 -ianetti 14 -tej 14 -sousaphone 14 -precociously 14 -quaife 14 -beer-swilling 14 -steeles 14 -shock-absorbing 14 -casarrubias 14 -r-alaska 14 -dzerkacz 14 -minute-and-a-half 14 -ruthell 14 -propagates 14 -estrosi 14 -luxuriating 14 -one-week-old 14 -counterrevolutionary 14 -80lb 14 -5.58 14 -270g 14 -moshtaghian 14 -xxxxxxx 14 -lalinsky 14 -ghaly 14 -sowards 14 -14-night 14 -american-backed 14 -accelerants 14 -lusher 14 -yegna 14 -6.4-magnitude 14 -1,093 14 -embarcadero 14 -rearrest 14 -osian 14 -damm 14 -middle-ground 14 -californian-style 14 -taints 14 -caja 14 -hannaleigh 14 -ropp 14 -monetizing 14 -unh 14 -boukhari 14 -38-14 14 -38-16 14 -tailcoat 14 -garrels 14 -artcurial 14 -retoucher 14 -british-grown 14 -brianda 14 -grossest 14 -transience 14 -mcmanaway 14 -renovators 14 -lambright 14 -0.29 14 -innocuously 14 -magica 14 -flamingos-2 14 -glassdoor 14 -spattering 14 -sere 14 -aspel 14 -jaxx 14 -hermiston 14 -stigmatizes 14 -orsini 14 -usury 14 -4.46 14 -melville-smith 14 -sterpini 14 -9:49 14 -mdu 14 -manipulators 14 -6:05 14 -brûlée 14 -khurmatu 14 -ramarley 14 -10-13 14 -bachao 14 -olivetti 14 -verstegen 14 -hugjiltu 14 -gotha 14 -trastevere 14 -yeshi 14 -537,000 14 -freegan 14 -placemaking 14 -c.w. 14 -angerame 14 -caraway 14 -nano-sized 14 -cherkley 14 -rudo 14 -blackburn-smith 14 -30,700 14 -mcgugan 14 -abdirahmaan 14 -wynnum 14 -thamesdown 14 -jowhar 14 -sidefoot 14 -gracelands 14 -end-of-days 14 -vidler 14 -lloyd-harris 14 -chungui 14 -ismagulov 14 -lbs. 14 -barbaturex 14 -sengeh 14 -hooding 14 -multichannel 14 -966 14 -wrns 14 -chucho 14 -patisseries 14 -kudla 14 -biermann 14 -1653 14 -koga 14 -well-intended 14 -degtiareva 14 -yagruma 14 -ethridge 14 -mantua 14 -forni 14 -thuso 14 -ashutosh 14 -loughren 14 -palest 14 -hryhoruk 14 -hslda 14 -e-liquid 14 -kellagher 14 -ottis 14 -red-and-green 14 -reengage 14 -27-21 14 -wyverstone 14 -unhealed 14 -saluki 14 -verlin 14 -must-stop 14 -rive 14 -tszyu 14 -yussuf 14 -58g 14 -tesi 14 -narok 14 -mabanag 14 -laudisio 14 -wagman 14 -adx 14 -lucescu 14 -svendborg 14 -tearooms 14 -disallows 14 -moonfleet 14 -ceatec 14 -baktun 14 -cezar 14 -wickersley 14 -ituri 14 -kereru 14 -frat-boy 14 -iberico 14 -cordonnier 14 -2,753 14 -luzier 14 -cropley 14 -carmat 14 -six-star 14 -caretaking 14 -gillick 14 -sandefjord 14 -vanleeuwen 14 -aucoin 14 -penrhos 14 -resourcing 14 -cerrone 14 -hila 14 -94-year 14 -cosmetologist 14 -sonik 14 -luxy 14 -asutaits 14 -groeneveld 14 -open-pit 14 -tenesha 14 -telegraphic 14 -bernardin 14 -chhabra 14 -starbug 14 -travon 14 -hydrophilic 14 -hotcakes 14 -c$ 14 -spenser 14 -reorganisations 14 -pitzen 14 -cappleman 14 -bochco 14 -jtbc 14 -verhoog 14 -larcombe 14 -oxer 14 -70-litre 14 -daow 14 -u-20 14 -likey 14 -bombshells 14 -xcel 14 -appendices 14 -southfork 14 -cahn 14 -dardick 14 -memorization 14 -alchemists 14 -heydrich 14 -longest-reigning 14 -boxford 14 -unpinned 14 -neverquest 14 -warman 14 -admasu 14 -atheltic 14 -garnett-paul 14 -milivoje 14 -nneka 14 -hortobagy 14 -obsesses 14 -diaghilev 14 -careens 14 -akhshabi 14 -jonio 14 -jsoc 14 -kilmersdon 14 -talcahuano 14 -judit 14 -judie 14 -futons 14 -cavernoma 14 -evangelism 14 -reffet 14 -hux 14 -photovoltaics 14 -mcgillis 14 -beccario 14 -godet 14 -curcean 14 -bythell 14 -photostream 14 -4.69 14 -splodge 14 -soondressen 14 -humby 14 -macayla 14 -mantels 14 -asifa 14 -terrachoice 14 -hm.com 14 -double-cross 14 -saporta 14 -corticosteroid 14 -mega-cities 14 -prosector 14 -thoresen 14 -tightly-packed 14 -koppel 14 -chloroquine 14 -bleekrode 14 -autochthonous 14 -ultramodern 14 -dresdner 14 -tunkara 14 -bruchac 14 -copperas 14 -comanchero 14 -re-tested 14 -adelakun 14 -ktla5 14 -#wakeupcall 14 -archdiocesan 14 -dance-floor 14 -aquaventure 14 -tumbu 14 -ogata 14 -dedier 14 -call-handler 14 -448,000 14 -1,353 14 -1,356 14 -keepmoat 14 -tirawi 14 -armbar 14 -claymore 14 -rocd 14 -giddens 14 -lambaditis 14 -avijit 14 -brotton 14 -144mph 14 -nipp 14 -compston 14 -ritch 14 -spads 14 -onta 14 -subverts 14 -nomophobia 14 -r.e.m 14 -belharra 14 -naver 14 -el-sawah 14 -massata 14 -benaim 14 -divvy 14 -ladele 14 -djinn 14 -ezeiza 14 -mozambicans 14 -cdn 14 -mcfadzen 14 -300bc 14 -second-youngest 14 -back-pay 14 -tarhouni 14 -7mins 14 -looked-after 14 -warshaw 14 -dogba 14 -1546 14 -reyher 14 -quapaw 14 -nafjan 14 -russian-supplied 14 -koyunlu 14 -lovasi 14 -leverton 14 -798,000 14 -12.51 14 -12.54 14 -koffman 14 -musicality 14 -sibiga 14 -30-bedroom 14 -viswanathan 14 -1099 14 -desensitisation 14 -rds 14 -4:42 14 -dryman 14 -sabadell 14 -83.1 14 -calveras 14 -jianguo 14 -castaignos 14 -3-years-old 14 -ebay.co.uk 14 -roridula 14 -rightness 14 -30-60 14 -daulton 14 -iennys 14 -1420 14 -conservancies 14 -misa 14 -flip-side 14 -viviscal 14 -schweidenback 14 -ghaderzadeh 14 -day/night 14 -multibeam 14 -okupe 14 -waveform 14 -div 14 -channer 14 -nawroz 14 -87.8 14 -87.3 14 -baste 14 -sudep 14 -uran 14 -frisbees 14 -archerfish 14 -1,797 14 -move-in 14 -negre 14 -schlag 14 -perón 14 -pelayo 14 -67mph 14 -anglerfish 14 -merchandisers 14 -bammeke 14 -kake 14 -harratt 14 -shushing 14 -torrealba 14 -ssm 14 -waye 14 -over-exposure 14 -abelson 14 -mrobo 14 -a.m.-9 14 -security-cleared 14 -stfc 14 -tibbits 14 -formhalls 14 -storekeeper 14 -anti-hunger 14 -sherr 14 -5.12 14 -immunising 14 -unlikelihood 14 -calverts 14 -pula 14 -ride-alongs 14 -courtesans 14 -gfc 14 -mende 14 -aforethought 14 -iyke 14 -turvy 14 -morishige 14 -abdulzai 14 -hannant 14 -cudney 14 -ex-government 14 -ukrainian-russian 14 -mehrban 14 -5,895 14 -two-in-one 14 -wyse 14 -hanh 14 -mcleay 14 -stenman 14 -prompter 14 -atanas 14 -impugn 14 -milenio 14 -finks 14 -3.56 14 -hananto 14 -oxburgh 14 -excitation 14 -piriton 14 -refuels 14 -recalibration 14 -tiiaana 14 -out-performing 14 -implodes 14 -basir 14 -samford 14 -face-blindness 14 -skeletonized 14 -streetscape 14 -straightforwardly 14 -chmelar 14 -brookman 14 -kettleman 14 -fiers 14 -ob/gyns 14 -thimphu 14 -al-fayez 14 -militarizing 14 -bentalls 14 -6300 14 -myruan 14 -faya 14 -unhurried 14 -hsp 14 -sukkur 14 -atahualpa 14 -corbo 14 -m'bia 14 -licensure 14 -walder 14 -newly-laid 14 -mombassa 14 -balwyn 14 -galesburg 14 -11-5 14 -treeless 14 -beanpole 14 -birdhouse 14 -annunciation 14 -shiga 14 -menkes 14 -21cm 14 -g550 14 -end-user 14 -gionfriddo 14 -latkes 14 -boen 14 -bz 14 -mophie 14 -campmates 14 -2,485 14 -rahmat 14 -rahmah 14 -shucked 14 -mittenwald 14 -smoke-damaged 14 -arnaz 14 -massai 14 -72-year 14 -chits 14 -haution 14 -eyman 14 -mindlab 14 -stumler 14 -394-year 14 -audenshaw 14 -syosset 14 -up-coming 14 -glamorizes 14 -indium 14 -d'antibes 14 -sirois 14 -chan-wook 14 -tunny 14 -cretins 14 -chipolatas 14 -reyn 14 -ariadne 14 -69.1 14 -99-cent 14 -ometepec 14 -phaethon 14 -rintel 14 -bmj.com 14 -youlden 14 -5,000-mile 14 -marwick 14 -christmann 14 -garbarino 14 -schou 14 -deadened 14 -4:10 14 -mavala 14 -van-eda 14 -machineguns 14 -1,110 14 -photo-shoots 14 -rw 14 -r$ 14 -yarosh 14 -giachini 14 -jamiel 14 -uscirf 14 -18-karat 14 -putrajaya 14 -fisken 14 -rfn 14 -strafed 14 -modra 14 -zemmour 14 -250-acre 14 -rainiest 14 -socialistic 14 -generalise 14 -sixsmith 14 -tamael 14 -diapered 14 -cropscience 14 -huizen 14 -khawad 14 -zaentz 14 -q-and-a 14 -bakst 14 -cymbal 14 -passione 14 -privé 14 -superhead 14 -sawsan 14 -slegers 14 -allstars 14 -undernourishment 14 -polnay 14 -middleweights 14 -#fatduckmelbourne 14 -malucelli 14 -gines 14 -newsies 14 -lynagh 14 -67.4 14 -tates 14 -deathstalker 14 -khorosan 14 -allaby 14 -bsf 14 -tesla-founder 14 -auxiliaries 14 -wandle 14 -joannie 14 -ehrhart 14 -unprofessionalism 14 -connexions 14 -dulle 14 -3.62 14 -half-white 14 -unlined 14 -star-making 14 -kielty 14 -cywka 14 -bellerby 14 -mirnyi 14 -deader 14 -millau 14 -sin-bins 14 -13g 14 -winterwatch 14 -5.31 14 -squishing 14 -riat 14 -knafelc 14 -goyt 14 -sundries 14 -wimes 14 -Édouard 14 -ineptly 14 -carreras 14 -yazeed 14 -vlada 14 -distil 14 -ftd 14 -crime-plagued 14 -drafters 14 -ramazanova 14 -barilla 14 -mahmoon 14 -mustakim 14 -bellhop 14 -yasi 14 -public-funded 14 -sinbad 14 -2.92 14 -alini 14 -computer-assisted 14 -boente 14 -smitheman 14 -hohne 14 -lances 14 -silicates 14 -home-rule 14 -oarsmen 14 -merrygold 14 -gradi 14 -anum 14 -timon 14 -manona 14 -funhouse 14 -cluny 14 -angello 14 -balfron 14 -lisbie 14 -self-storage 14 -asov 14 -sdk 14 -ziaten 14 -keyshawn 14 -slacken 14 -meekerorum 14 -pro-west 14 -healthalicious 14 -capener 14 -647,000 14 -appetisers 14 -poorna 14 -eurasians 14 -wiimote 14 -ballenger 14 -uspto 14 -11/1 14 -eastburn 14 -yellow-brown 14 -prob 14 -fegs 14 -71m 14 -cailey-anne 14 -puya 14 -ex-first 14 -pydde 14 -freedmen 14 -verplank 13 -sub-antarctic 13 -nine-course 13 -cokas 13 -baudry 13 -zeod 13 -amelia-lilly 13 -gijs 13 -anshu 13 -witn 13 -vvip 13 -kelton 13 -recell 13 -kree 13 -ferguson-prayogg 13 -hygeine 13 -mock-tudor 13 -sanzar 13 -ae2 13 -fugatt 13 -dehtiar 13 -conventioneers 13 -abedi 13 -serous 13 -reuther 13 -chiron 13 -craiglockhart 13 -jirus 13 -emberton 13 -tailgates 13 -traffics 13 -ablest 13 -swinnen 13 -over-the-shoulder 13 -sheâ 13 -ebanks-landell 13 -berchtold 13 -out-of-network 13 -084 13 -350lb 13 -jendayi 13 -terrarium 13 -alfoneh 13 -washbasins 13 -re-mortgaged 13 -piggies 13 -rain-interrupted 13 -crorepati 13 -pipad 13 -komi 13 -baaf 13 -tetons 13 -sudi 13 -man-mark 13 -hemiplegic 13 -traffic-light 13 -saltburn-by-the-sea 13 -shonky 13 -corian 13 -abizaid 13 -wcvb.com 13 -nephrologist 13 -crewing 13 -balkanization 13 -curati 13 -faster-than-light 13 -brutalizing 13 -room-only 13 -parkstone 13 -abdul-hamed 13 -warehoused 13 -self-satisfied 13 -entorhinal 13 -lobjoit 13 -abounding 13 -elapatha 13 -massaquoi 13 -orrett 13 -guclu 13 -anatomists 13 -krew 13 -74.7 13 -74.4 13 -booger 13 -orleans-based 13 -hasebe 13 -abbis 13 -kajieme 13 -1,179 13 -capasso 13 -digital-only 13 -preisdent 13 -awdry 13 -parasomnia 13 -dearie 13 -menb 13 -nicholle 13 -papini 13 -revelstoke 13 -siestas 13 -champions-elect 13 -noemie 13 -schifter 13 -queensbridge 13 -farrage 13 -pre-scheduled 13 -prepon 13 -rosali 13 -7:56 13 -1368 13 -drinkin 13 -house-sized 13 -clementino 13 -isanyoneup.com 13 -langeder 13 -gastornis 13 -ride-share 13 -line-by-line 13 -502,000 13 -near-anarchy 13 -castrovillari 13 -sandblasting 13 -monzo 13 -dodelson 13 -bastogne 13 -gazipur 13 -norvill 13 -dm2 13 -galop 13 -sprenger 13 -tievoli 13 -6.9-magnitude 13 -hamdo 13 -norelli 13 -anti-blasphemy 13 -backfoot 13 -stroessner 13 -samm 13 -gowers 13 -britnie 13 -tanga 13 -stevanovic 13 -seabeck 13 -anonymized 13 -ancestrydna 13 -scandinavian-style 13 -zhoushan 13 -joshing 13 -22:42 13 -dahlen 13 -grevers 13 -myerscough 13 -hashtagged 13 -mainstreamed 13 -helmes 13 -assemblages 13 -kovats 13 -back-lit 13 -ratu 13 -gittler 13 -underachiever 13 -sainte-mere-eglise 13 -still-life 13 -marquina 13 -insubstantial 13 -moment-by-moment 13 -naderi 13 -snappier 13 -haobo 13 -savon 13 -french-italian 13 -8500 13 -salvadori 13 -70-strong 13 -underdressed 13 -tiwari 13 -masa 13 -nivens 13 -snobbiest 13 -in-the-know 13 -treharne 13 -mankoff 13 -trubshaw 13 -manacled 13 -naivasha 13 -veta 13 -carnitas 13 -gaur 13 -gatso 13 -klijnsma 13 -out-thought 13 -s/he 13 -randers 13 -daejeon 13 -lodice 13 -sanba 13 -vlasic 13 -nedergaard 13 -well-structured 13 -bacchanal 13 -marylou 13 -chory 13 -ikos 13 -fignon 13 -dorrine 13 -39,600 13 -southbury 13 -packed-out 13 -jessi-cat 13 -jenko 13 -dyble 13 -house-cleaning 13 -myfoxorlando.com 13 -glass-steagall 13 -sheaves 13 -pre-katrina 13 -three-headed 13 -ostrofsky 13 -vieja 13 -mariola 13 -lyonne 13 -unscrewing 13 -heavy-drinking 13 -bladen 13 -shape-ups 13 -françois-henri 13 -outraised 13 -nbcdfw 13 -songun 13 -matott 13 -railtrack 13 -synchronises 13 -barbarella 13 -szukala 13 -danilenko 13 -jaffa-bodden 13 -big-haired 13 -nobler 13 -supanova 13 -oresko 13 -lip-read 13 -woodsen 13 -mcauthur 13 -dispassionately 13 -non-hodgkins 13 -kempley 13 -kimsey 13 -plushest 13 -ags 13 -icss 13 -kralik 13 -widawsky 13 -blocos 13 -too-big-to-fail 13 -horn-rimmed 13 -nasik 13 -zorbing 13 -youth-oriented 13 -loredo 13 -mulino 13 -sensationalised 13 -poggibonsi 13 -ludvigsen 13 -coiffure 13 -baywash 13 -britwell 13 -mirkovic 13 -samplers 13 -1,334 13 -bazin 13 -fabig 13 -pro-north 13 -60-80 13 -madill 13 -heckuva 13 -24kg 13 -taurasi 13 -florencio 13 -thakor 13 -minimally-invasive 13 -hdi 13 -2-point 13 -upends 13 -yinyu 13 -papoutsis 13 -grinyer 13 -cack-handed 13 -ridder 13 -hafwen 13 -cuanas 13 -jamba 13 -.24 13 -rights-era 13 -keyana 13 -dark-horse 13 -chaparro 13 -outlandishly 13 -manero 13 -sado-masochism 13 -sangbad 13 -over-heated 13 -astigmatism 13 -shanthi 13 -calker 13 -kochems 13 -funda 13 -sternest 13 -strati 13 -skinsley 13 -bollwage 13 -chillers 13 -ocucaje 13 -pen-to-paper 13 -pregorexia 13 -pekingese 13 -qra 13 -27mph 13 -hutzler 13 -dyed-in-the-wool 13 -triennial 13 -141-page 13 -134m 13 -wey 13 -8.18 13 -8.11 13 -dabbawala 13 -hedgecock 13 -terai 13 -gudda 13 -zhoukoudian 13 -food-safety 13 -webisodes 13 -tramways 13 -squidge 13 -sahade 13 -rodica 13 -broadus 13 -millersville 13 -percolate 13 -tisza 13 -mbodji 13 -distal 13 -alfredsson 13 -conjurer 13 -athabasca 13 -cheekiness 13 -trewick 13 -immortalize 13 -tribelnig 13 -amosu 13 -gnrd 13 -10-ton 13 -arborfield 13 -abuzar 13 -dobrow 13 -athukorale 13 -pygmalion 13 -barnaul 13 -tss 13 -sodhi 13 -hard-hat 13 -gore-tex 13 -dorey 13 -wacs 13 -marmoy 13 -philpot 13 -capacitors 13 -spdc 13 -pod-like 13 -carbonaceous 13 -park-and-ride 13 -noblesville 13 -wellner 13 -jayme 13 -kecia 13 -chromatophore 13 -valeriya 13 -furqan 13 -ands 13 -kripps 13 -bluebottles 13 -ginsters 13 -thut 13 -lakhi 13 -abbasiya 13 -1,630 13 -vitishko 13 -hally 13 -czin 13 -detwiler 13 -indolence 13 -fraijo 13 -aol.com 13 -re-train 13 -85f 13 -1460 13 -1462 13 -1,033 13 -noke 13 -sandvine 13 -ludmila 13 -leu 13 -sensitize 13 -ship-destroyer 13 -ruffell 13 -dago 13 -resurged 13 -kanjo 13 -times/cbs 13 -ibitz 13 -miyoko 13 -superkilen 13 -dollar-peg 13 -manhattan-bound 13 -hoberman 13 -sakhi 13 -lungfish 13 -maclagan 13 -dxa 13 -folliculitis 13 -botros 13 -venza 13 -bay-style 13 -12/12/12 13 -.2012 13 -henny 13 -actives 13 -latha 13 -louwanna 13 -calverton 13 -meurs 13 -olive-skinned 13 -bosio 13 -grimpel 13 -tenbury 13 -longest-lasting 13 -11th-grader 13 -cybill 13 -1719 13 -alonza 13 -203.17 13 -23lb 13 -buhler 13 -wahiawa 13 -lingerie-clad 13 -slendertone 13 -baganda 13 -hygge 13 -carlitos 13 -notorangeli 13 -truanting 13 -bivouac 13 -wawona 13 -samuni-blank 13 -maestracci 13 -instantly-recognisable 13 -75s 13 -ziolkowski 13 -alongisde 13 -ulises 13 -glass-topped 13 -pick-pocketing 13 -dross 13 -heimaey 13 -viveash 13 -seminarian 13 -distrusting 13 -sung-ryong 13 -saint-jean-sur-richelieu 13 -fex 13 -sorn 13 -glut1 13 -pushnote 13 -rhok 13 -hunny 13 -manguel 13 -zlitni 13 -bucket-list 13 -sexpot 13 -arrivabene 13 -14-5 13 -yopougon 13 -chirped 13 -ruscha 13 -vogts 13 -reichl 13 -griston 13 -citv 13 -197million 13 -sifakis 13 -nik_simon88 13 -schuffenhauer 13 -high-strung 13 -bilotti 13 -bloice 13 -carillon 13 -rossel 13 -argenteuil 13 -mosgrave 13 -fordingbridge 13 -wifelets 13 -siti 13 -peyghambarian 13 -verano 13 -margarethe 13 -8-bit 13 -ravenna 13 -jvc 13 -wcrf 13 -bordon 13 -khattiya 13 -maccabee 13 -cuauhtemoc 13 -slickers 13 -numbersusa 13 -stubbing 13 -jase 13 -klang 13 -togethers 13 -knuckleheads 13 -food-stamp 13 -24mm 13 -alagui 13 -wieners 13 -19,800 13 -gun-obsessed 13 -morrogh 13 -photomicrography 13 -waybright 13 -centrale 13 -slavko 13 -spyeye 13 -odd-shaped 13 -girl-group 13 -gatter 13 -gorst 13 -non-survivable 13 -lior 13 -territorians 13 -kenawi 13 -billionsaved 13 -venturers 13 -mulenga 13 -reinstall 13 -dramatise 13 -magcorp 13 -kovvali 13 -pre-super 13 -16,300 13 -re-trained 13 -budo 13 -nightlift 13 -1,990 13 -spillers 13 -meter-high 13 -fairlie 13 -derham 13 -tijan 13 -ulamec 13 -srinivasa 13 -cutscenes 13 -heklina 13 -mclane 13 -destabilizes 13 -major-label 13 -tiziani 13 -draycott 13 -6.01 13 -6.04 13 -wideout 13 -turiel 13 -wait-list 13 -18months 13 -roadshows 13 -sail-shaped 13 -balloon-like 13 -kamkwamba 13 -seawolf 13 -8.37 13 -pegi 13 -shimano 13 -brabant 13 -non-physical 13 -philistines 13 -madgwick 13 -seubert 13 -sakawa 13 -aeroastro 13 -comac 13 -perforating 13 -wellbelove 13 -computes 13 -ramireza 13 -aveion 13 -caesium-137 13 -omeprazole 13 -monserrat 13 -considerately 13 -amasses 13 -wga 13 -post-qualifying 13 -cs5 13 -dallas/forth 13 -guitar-playing 13 -gottingen 13 -josefa 13 -elfman 13 -biu 13 -bip 13 -convergent 13 -blinky 13 -fishwick 13 -loveman 13 -mallorie 13 -re-pay 13 -kidsaid 13 -cityville 13 -vidiya 13 -berezutski 13 -gheorge 13 -hottel 13 -breath-tested 13 -hoaxed 13 -hotsenpiller 13 -1,456 13 -20-ton 13 -65kg 13 -degryse 13 -nine-dart 13 -assiette 13 -hildyard 13 -dubbert 13 -over-rated 13 -hauraki 13 -grapefruits 13 -frp 13 -chells 13 -moderate-conservative 13 -lulah 13 -muller-moore 13 -bladeless 13 -mccrossan 13 -austfonna 13 -babick 13 -nimbuzz 13 -unsealing 13 -seldes 13 -unarguably 13 -mq-1 13 -volocopter 13 -mouner 13 -ex-bolton 13 -ulhaq 13 -water-repellent 13 -third-tallest 13 -depledge 13 -baffoni 13 -mollymook 13 -marmaduke 13 -nevada-based 13 -enclade 13 -kawauchi 13 -amery 13 -self-supporting 13 -deforming 13 -concessionary 13 -cheerier 13 -kessab 13 -ganjgal 13 -purloined 13 -heesom 13 -dujmovic 13 -assoua 13 -sandyford 13 -tetranitrate 13 -bee-line 13 -lovewell 13 -specialization 13 -serwer 13 -lubang 13 -votruba 13 -sna 13 -trogdon 13 -harlee 13 -magee-womens 13 -chambered 13 -stik 13 -faultlessly 13 -four-count 13 -milperra 13 -newtownabbey 13 -3,664 13 -zaloom 13 -grenache 13 -middle-finger 13 -kallas 13 -10.21 13 -clumber 13 -500-ton 13 -54th-minute 13 -kaggia 13 -chintu 13 -small-claims 13 -40,000-strong 13 -carharrack 13 -chevey 13 -szymon 13 -cannabis-infused 13 -portland-based 13 -hauksson 13 -velella 13 -75cl 13 -119.9 13 -jorsling 13 -hawcroft 13 -sederbaum 13 -landsman 13 -apria 13 -daiquiris 13 -tartini 13 -seacroft 13 -anthropocene 13 -40.4 13 -schnoll 13 -408,000 13 -convents 13 -karasular 13 -power-generating 13 -chompers 13 -kanya 13 -smartmat 13 -xcelerator 13 -machine-learning 13 -orrock 13 -stadil 13 -fulde 13 -nanci 13 -zhongnanhai 13 -lobart 13 -admonitions 13 -rawlings-blake 13 -varela-casaus 13 -craftwork 13 -guelmim 13 -sportif 13 -umno 13 -forward-leaning 13 -pui 13 -bigwarfe 13 -waif-like 13 -medical-marijuana 13 -shanghainese 13 -mid-ranking 13 -music-sharing 13 -bronchi 13 -maeena 13 -mogwanja 13 -alifom 13 -totters 13 -amscreen 13 -care-givers 13 -#thanksmichelleobama 13 -al-talli 13 -maid-of-honour 13 -parnevik 13 -onaga 13 -skytree 13 -fondazione 13 -salud 13 -offensiveness 13 -politicshome 13 -what-if 13 -balloch 13 -mammal-like 13 -stakanoo 13 -privet 13 -gakpe 13 -roosa 13 -schembri 13 -leipheimer 13 -katsuya 13 -rade 13 -molina-iglesias 13 -vetten 13 -pollinator 13 -assailing 13 -howtocorp 13 -kailey 13 -apprehensively 13 -onuora 13 -sextet 13 -puneet 13 -mustached 13 -photo-taking 13 -dhaliwal 13 -scruffts 13 -apoe4 13 -10/8 13 -herstmonceux 13 -cricket-loving 13 -stripped-back 13 -catrambone 13 -guardhouse 13 -akpro 13 -trischler 13 -pluralist 13 -runscorer 13 -11-15 13 -brutnell 13 -moussi 13 -unbanked 13 -11-years 13 -16-months-old 13 -nnmt 13 -ripoffreport.com 13 -0.53 13 -ohioan 13 -retched 13 -unifem 13 -9.74 13 -eryk 13 -sedaris 13 -memoranda 13 -135lbs 13 -kyrese 13 -700g 13 -grump 13 -brucato 13 -kernaghan 13 -baraz 13 -ascoli 13 -wheelwright 13 -sulkhowitz 13 -00:15 13 -weardale 13 -ruminate 13 -sharecroppers 13 -blitzes 13 -cadeau 13 -18inch 13 -niema 13 -dugal 13 -iriepa 13 -shou 13 -shod 13 -southborough 13 -22:26 13 -guernica 13 -door-knocking 13 -opitz 13 -cetuximab 13 -rolston 13 -500-seat 13 -bairnsfather 13 -sibaya 13 -trackingpoint 13 -kanae 13 -mactavish 13 -one-upped 13 -kressley 13 -enoh 13 -3:37 13 -3:34 13 -nami 13 -cold-callers 13 -maccormack 13 -trattoria 13 -khatab 13 -ill-mannered 13 -effervescence 13 -swampscott 13 -41-month 13 -kelloggs 13 -jegley 13 -ripert 13 -kuka 13 -officemax 13 -hammed 13 -817 13 -celebrity-driven 13 -blustered 13 -zelenoff 13 -kauder 13 -bussing 13 -ruoso 13 -hollowness 13 -ex-rep 13 -ogogo 13 -ginepri 13 -wmtw 13 -lower-energy 13 -nieuwe 13 -re-investigated 13 -peasy 13 -tallchief 13 -bokila 13 -et3 13 -youle 13 -yahweh 13 -watercar 13 -tar-like 13 -heaversedge 13 -marchwood 13 -nurrish 13 -hric 13 -96km 13 -in-roads 13 -blayney 13 -ferguson-florissant 13 -woodburner 13 -re-issued 13 -makino 13 -zaillian 13 -sikelel 13 -psittacosaurus 13 -80-years-old 13 -#likeagirl 13 -ipsum 13 -la'shay 13 -eastley 13 -pocas 13 -zavarzina 13 -recalculation 13 -delmar 13 -hajer 13 -nucky 13 -iwi 13 -samut 13 -uliana 13 -nitcher 13 -frontierville 13 -40ml 13 -pest-control 13 -daresay 13 -20-tonne 13 -lusatian 13 -jackon 13 -nogueira 13 -kallon 13 -10.04 13 -nonsexual 13 -433.2 13 -otions 13 -izraa 13 -ryhope 13 -cadby 13 -privately-held 13 -turco 13 -councilmen 13 -poulton-le-fylde 13 -hemingford 13 -concretions 13 -calombaris 13 -400-strong 13 -undercurrents 13 -sindy 13 -yadi 13 -kritik 13 -wilke 13 -pifas 13 -jäger 13 -gado 13 -liliya 13 -mesnard 13 -caac 13 -paho 13 -eulian 13 -concepción 13 -saltaire 13 -papillon 13 -nanak 13 -ball-shaped 13 -7.51 13 -7.56 13 -7.57 13 -now-notorious 13 -nutso 13 -disney-owned 13 -jerkins 13 -unremorseful 13 -sarafina 13 -lucre 13 -sobrinho 13 -lassania 13 -uchenna 13 -light-water 13 -roncalli 13 -ps1 13 -bombsite 13 -anupam 13 -timekeepers 13 -habesha 13 -elbaum 13 -white-faced 13 -pre-prep 13 -overrepresented 13 -hondas 13 -coiffured 13 -58.6 13 -delorenzo 13 -bellman 13 -cataldi 13 -celebutante 13 -ruggaber 13 -fullers 13 -kalma 13 -67,500 13 -briseno 13 -doonesbury 13 -creaks 13 -20-piece 13 -carlock 13 -jabber 13 -chs 13 -tibbets 13 -china-japan 13 -roosevelts 13 -zeiger 13 -piltdown 13 -millionshares 13 -businessperson 13 -scalco 13 -eco-guards 13 -rockpool 13 -self-motivation 13 -foxhounds 13 -asceticism 13 -wastebasket 13 -esque 13 -22,000-tonne 13 -kalaycioglu 13 -persuaders 13 -macqueen 13 -one-in-four 13 -abdella 13 -tihanovs 13 -three-cylinder 13 -drip-fed 13 -espinel 13 -full-moon 13 -80lbs 13 -soap-opera 13 -okrzesik 13 -two-volume 13 -welcome-home 13 -psychodrama 13 -rorie 13 -ramadhan 13 -ouimet 13 -120-seat 13 -leoz 13 -incensing 13 -augusteijn 13 -115.5 13 -definer 13 -pre-conditions 13 -galvao 13 -schoolbags 13 -linsday 13 -pha 13 -trelise 13 -decembers 13 -templin 13 -uncivil 13 -jabr 13 -5mg 13 -gangsterism 13 -markovitz 13 -rebiya 13 -holischeck 13 -keher 13 -amroliwala 13 -agerton 13 -21-22 13 -shortcode 13 -graubunden 13 -tittering 13 -evilness 13 -ix35 13 -ellory 13 -sissi 13 -french-inspired 13 -perkal 13 -perring 13 -jdrf 13 -dukedom 13 -affability 13 -aldworth 13 -platting 13 -agyemang 13 -grained 13 -machine-guns 13 -music-industry 13 -dash-camera 13 -ifran 13 -chavista 13 -charitably 13 -non-flammable 13 -kauffeld 13 -ogio 13 -economides 13 -loukas 13 -luvvie 13 -r-mich. 13 -overweening 13 -tidd 13 -risborough 13 -musuem 13 -55th-minute 13 -jamiro 13 -novogratz 13 -redrow 13 -norzal 13 -mountfield 13 -1,410 13 -bathew 13 -garey 13 -abayed 13 -hamadeh 13 -curva 13 -benbow 13 -radioisotope 13 -kociela 13 -score-sheet 13 -gr3 13 -parul 13 -cussed 13 -atlantico 13 -636,000 13 -industry-backed 13 -mikumi 13 -wozniaki 13 -puroland 13 -partially-covered 13 -11.52 13 -99lbs 13 -long-jump 13 -saccharin 13 -hirschsprung 13 -adult-like 13 -non-voters 13 -texas-sized 13 -pbr 13 -38in 13 -28-years-old 13 -overstressed 13 -298,000 13 -sun-damaged 13 -80,000-per-week 13 -vittek 13 -ewer 13 -textor 13 -conarco 13 -igoogle 13 -snorkellers 13 -francesa 13 -schulke 13 -125kg 13 -fyson 13 -77-ton 13 -infra 13 -saltcoats 13 -hosk 13 -simpson-bowles 13 -geren 13 -undroppable 13 -video-streaming 13 -sidle 13 -sabater 13 -polos 13 -sunapee 13 -rotem 13 -gogeaskoetxea 13 -pammie 13 -celebrity-packed 13 -umpqua 13 -ustari 13 -jackdaw 13 -cooknell 13 -eraso 13 -hotchpotch 13 -anti-al 13 -tegwen 13 -tethys 13 -boers 13 -low-scoring 13 -schober 13 -firfer 13 -hintz 13 -bergel 13 -sand-filled 13 -beefcake 13 -525million 13 -whitcher 13 -peterhouse 13 -fossum 13 -shackleford 13 -narraweena 13 -zaporizhia 13 -run-a-ball 13 -smooshi 13 -o'berry 13 -botallack 13 -manzanita 13 -50in 13 -chander 13 -akasaki 13 -malty 13 -despising 13 -right-backs 13 -boehler 13 -15-game 13 -15-13 13 -20-ounce 13 -brahler 13 -smilde 13 -boik 13 -punnet 13 -zorreguieta 13 -permeability 13 -heliosheath 13 -grabol 13 -dahi 13 -giron 13 -pass-master 13 -clet 13 -winnemucca 13 -neurobiological 13 -kylan 13 -flatness 13 -sortland 13 -up24 13 -mear 13 -pusha 13 -zighy 13 -norbit 13 -lisette 13 -inhalants 13 -wellwisher 13 -narrow-gauge 13 -7.32 13 -johanson 13 -kitzenberg 13 -lupsa 13 -thecityuk 13 -bumbled 13 -mazursky 13 -busari 13 -island-wide 13 -cretz 13 -37f 13 -verello 13 -autosport.com 13 -derner 13 -4-foot-tall 13 -hawsawi 13 -sarisuluk 13 -churchis 13 -indiya 13 -heatherdown 13 -ahronoth 13 -monasmith 13 -landsdale 13 -non-animal 13 -idu 13 -hallucigenia 13 -under-11 13 -under-14 13 -huesos 13 -chene 13 -smartband 13 -bioethanol 13 -dual-clutch 13 -under-achieving 13 -much-wanted 13 -self-possessed 13 -brimob 13 -lieb 13 -half-century-old 13 -merchandizing 13 -krüger 13 -london-listed 13 -oscar-nominee 13 -gallivan 13 -eynsham 13 -wollam 13 -time-critical 13 -court-authorized 13 -unpainted 13 -laurentien 13 -moher 13 -impingement 13 -wahiba 13 -post-september 13 -apuzzo 13 -tetracycline 13 -kbtx 13 -yappy 13 -filmic 13 -arn 13 -chicory 13 -diaoyu/senkaku 13 -robbyn 13 -forget-me-not 13 -myfoxboston.com 13 -kazanjy 13 -insead 13 -positrons 13 -pyrenean 13 -unprepossessing 13 -lowlights 13 -sunbeams 13 -pfaff 13 -francisquini 13 -epically 13 -regus 13 -tennants 13 -mega-droughts 13 -wigfull 13 -perishes 13 -vimlendu 13 -sarsgaard 13 -el-hanafi 13 -allcock 13 -dujarric 13 -six-floor 13 -cappy 13 -cliftonville 13 -sealift 13 -mortarman 13 -loo-cy 13 -sealegs 13 -bohner 13 -mocktails 13 -1608 13 -swissotel 13 -asia-europe 13 -vpns 13 -mounsombath 13 -pervasively 13 -ukraine-born 13 -vanian 13 -frames-per-second 13 -tatia 13 -yu-na 13 -caringbah 13 -after-the-fact 13 -boogied 13 -beachings 13 -hanney 13 -cottenham 13 -brinton 13 -engadine 13 -kaste 13 -disant 13 -eacock 13 -mccorvey 13 -docudrama 13 -multilateralism 13 -knxv-tv 13 -two-feet 13 -spanglish 13 -gramacho 13 -1,000-1 13 -10a 13 -skeptically 13 -1,430 13 -72billion 13 -hydrophones 13 -gpm 13 -mistah 13 -larson-green 13 -bedpans 13 -widdows 13 -remote-sensing 13 -sancerre 13 -1-year 13 -public/private 13 -gurizada 13 -affixing 13 -air-breathing 13 -shutl 13 -ozyukselen 13 -poi 13 -jubbly 13 -front-loaded 13 -soft-core 13 -toho 13 -indoor/outdoor 13 -off-message 13 -renderman 13 -liverani 13 -ivette 13 -jone 13 -laglio 13 -masaaki 13 -powerbag 13 -vlba 13 -shampooed 13 -numero 13 -colthurst 13 -hindmarsh 13 -shaurya 13 -uppercase 13 -rbz 13 -neophitou 13 -mbola 13 -brahney 13 -irvani 13 -marrie-claire 13 -bartha 13 -theonlyone87 13 -bootcamps 13 -ise 13 -loveflutter 13 -ciardi 13 -egyptian-israeli 13 -bourdy 13 -samiullah 13 -carzola 13 -craftily 13 -tsarev 13 -w.m. 13 -behaviorist 13 -1:1 13 -46mins 13 -debattista 13 -socorro 13 -10.44 13 -joelene 13 -suprise 13 -stimac 13 -communing 13 -beany 13 -lorentz 13 -dolling 13 -kathrada 13 -rathi 13 -nassry 13 -postandfly 13 -adesina 13 -krystyna 13 -9.1-magnitude 13 -betham 13 -gyor 13 -93.6 13 -netanya 13 -islamorada 13 -havret 13 -conemaugh 13 -gaily 13 -lilypad 13 -rascally 13 -bridgeway 13 -all-business 13 -mansor 13 -doth 13 -aitkenhead 13 -7.23 13 -slaloming 13 -robing 13 -7.14 13 -re-enrollment 13 -mangano 13 -co-housing 13 -skout 13 -ice-creams 13 -lasantha 13 -steadiness 13 -whitewashes 13 -underpayments 13 -nelda 13 -nibelung 13 -protoype 13 -charrettes 13 -vaporising 13 -10-room 13 -relton 13 -chenin 13 -stookey 13 -meatmarketman 13 -whitepod 13 -2.06 13 -2.01 13 -phobic 13 -886 13 -three-quarter-length 13 -copenhagen-based 13 -lazed 13 -lalonde 13 -staycationers 13 -one-leg 13 -challen 13 -huso 13 -copyists 13 -sdoia 13 -59mins 13 -rebuts 13 -smulian 13 -1142 13 -pavlensky 13 -grasscourt 13 -conville 13 -bartolone 13 -petmatch 13 -ingenuous 13 -mab 13 -4,000-plus 13 -teuscher 13 -6:37 13 -garel-jones 13 -violentacrez 13 -bogoslavski 13 -17-16 13 -cooper-hewitt 13 -badgerys 13 -shaikha 13 -pallamary 13 -sylvania 13 -kelantan 13 -middle-schooler 13 -greenbacks 13 -20222 13 -langoustines 13 -modbury 13 -handbagging 13 -eighth-seeded 13 -sidings 13 -belived 13 -disassociating 13 -paternalism 13 -redken 13 -enmarch 13 -explosiveness 13 -kokkalakis 13 -50,000-per-week 13 -snaffled 13 -152mph 13 -mid-performance 13 -gabler 13 -tem1 13 -hery 13 -nyquil 13 -crout 13 -cavaliere 13 -wankhede 13 -ainsdale 13 -yongxing 13 -ginette 13 -indiewire 13 -libera 13 -iced-over 13 -gizmag 13 -waterbed 13 -billie-jo 13 -under-dressed 13 -phevos 13 -p-40 13 -83.9 13 -tameru 13 -pro-legalization 13 -johar 13 -sefanov 13 -letterkenny 13 -día 13 -1622 13 -1624 13 -marlton 13 -outfought 13 -carabosse 13 -square-cut 13 -al-abidine 13 -shoeing 13 -sakamoto 13 -cannibalizing 13 -8:04 13 -8:05 13 -500,00 13 -lifton 13 -fully-laden 13 -adult-oriented 13 -grayish 13 -mind4 13 -furgo 13 -glantz 13 -curtailment 13 -bax 13 -caldbec 13 -harward 13 -23:57 13 -thang 13 -inkley 13 -ruaridh 13 -sideras 13 -1593 13 -squanders 13 -reedley 13 -132million 13 -forints 13 -gogobot 13 -yetnikoff 13 -3:06 13 -3:02 13 -annita 13 -tokai 13 -dirty-looking 13 -60.1 13 -84kg 13 -figleaves 13 -@justinbieber 13 -ral 13 -rau 13 -oceanarium 13 -dum-dum 13 -hornbeam 13 -tartt 13 -nourse 13 -goodridge 13 -decamping 13 -killingworth 13 -artform 13 -import-export 13 -apple-owned 13 -zwarts 13 -single-earner 13 -lolz 13 -aboukhadijeh 13 -wizner 13 -fastjet 13 -perrysburg 13 -weskoppies 13 -kenning 13 -marshalltown 13 -fergouche 13 -0945 13 -onomichi 13 -fractal 13 -etuhu 13 -rozas 13 -helmcken 13 -11.14 13 -brightley 13 -urmia 13 -medo 13 -conceptualize 13 -signe 13 -woxy 13 -rangkuti 13 -spencerport 13 -cst-01 13 -aravane 13 -tyburn 13 -12noon 13 -hypercar 13 -ginned 13 -hammoud 13 -ahangarani 13 -whitbeck 13 -rohtak 13 -applicability 13 -gaggioli 13 -hopatcong 13 -matusiewcz 13 -psychogenic 13 -965,000 13 -bauders 13 -whidbey 13 -cudiner 13 -desmonte 13 -fairport 13 -chernikov 13 -phyu 13 -jarablus 13 -penda 13 -nabawy 13 -struy 13 -obraniak 13 -southern-style 13 -wheen 13 -twenty20s 13 -kopeski 13 -jackel 13 -licentious 13 -http://nbcbayarea.com 13 -double-yolked 13 -hatchet-wielding 13 -5.46 13 -kepler-69c 13 -mewett 13 -muratyan 13 -javaris 13 -katrine 13 -noorwali 13 -rauhut 13 -couplie 13 -cantabria 13 -pa-32 13 -weintz 13 -mumbengegwi 13 -word-processing 13 -transfused 13 -hyperhidrosis 13 -75mins 13 -rainford 13 -six-episode 13 -causeworld 13 -visualaz 13 -leeds-born 13 -dining-room 13 -janicek 13 -lampeter 13 -witchell 13 -cyber-warfare 13 -ready-meal 13 -hopfner 13 -chichi 13 -lolldaiga 13 -u.s-based 13 -denker 13 -1748 13 -dsl 13 -dsc 13 -baiyun 13 -umc 13 -teutenberg 13 -wme 13 -bright-red 13 -commercial-scale 13 -mykayla 13 -zahau-loehner 13 -76.2 13 -deciliter 13 -habibov 13 -bjarnason 13 -planet-sized 13 -tied-up 13 -200km/h 13 -mrozowski 13 -villatoro 13 -vicroads 13 -0.34 13 -kingsbridge 13 -hoidahl 13 -osbrany 13 -out-of-focus 13 -blankmeyer 13 -chemistdirect 13 -essig 13 -callout 13 -hand-signed 13 -1,665 13 -andrija 13 -watch-like 13 -rajevac 13 -microraptor 13 -eastwood-directed 13 -coast-born 13 -tchiroma 13 -cyber-bullies 13 -kurji 13 -timeframes 13 -keem 13 -petchey 13 -rahnama 13 -4.53 13 -photo-journalist 13 -1160 13 -nei 13 -busser 13 -bussey 13 -single-tier 13 -theorising 13 -mcp 13 -riot-control 13 -paviglianiti 13 -unmanly 13 -samani 13 -grise 13 -ranvir 13 -3.27 13 -baranski 13 -gurule 13 -shahriari 13 -stoton 13 -encinas 13 -grandmother-of-seven 13 -malee 13 -mother-in-laws 13 -merriweather 13 -wbur 13 -post-90s 13 -408th 13 -25-member 13 -lehair 13 -ln 13 -vice-premier 13 -ski-mask 13 -one-parent 13 -raoufi 13 -sewart 13 -dailymail.co.uk 13 -aamna 13 -burkinabe 13 -pre-grammys 13 -beurden 13 -259,000 13 -bosniak 13 -wuxor 13 -emley 13 -turnball 13 -macro-economic 13 -sauteed 13 -carbin 13 -46ft 13 -gursharan 13 -hendryx 13 -jifeng 13 -butrym 13 -man-in-the-middle 13 -makar 13 -starchitect 13 -althought 13 -ojc 13 -arch-nemesis 13 -kassiu 13 -350kg 13 -alaves 13 -eiko 13 -nishibayashi 13 -car-park 13 -jewel-toned 13 -ashdon 13 -timberlake-evans 13 -hit-girl 13 -34st 13 -wankel 13 -malteser 13 -zillah 13 -henrichson 13 -fund-raise 13 -ascetics 13 -gualazzini 13 -ettlinger 13 -19,600 13 -water-use 13 -brissett 13 -depressurization 13 -lehnhardt 13 -2,868 13 -tillinghast 13 -eastwell 13 -parkhill 13 -castiglia 13 -29-foot 13 -bethke 13 -densmore 13 -mpemba 13 -8-16 13 -u-turned 13 -rafe 13 -sindies 13 -arial 13 -credulous 13 -25bn 13 -re-fueling 13 -dailymailus 13 -georgiades 13 -shervin 13 -jordet 13 -khonor 13 -mid-month 13 -ammiano 13 -football-wise 13 -gtl 13 -babacan 13 -glacially 13 -kitzbühel 13 -7:24 13 -basting 13 -befalls 13 -pwn2own 13 -eye-view 13 -alizadeh 13 -pericard 13 -contrino 13 -avila-lopez 13 -ticino 13 -sumampau 13 -levantine 13 -maxims 13 -pressurizing 13 -kentuckian 13 -cinelli 13 -manteresting 13 -paston 13 -alfreda 13 -;p 13 -unhesitatingly 13 -arkhipov 13 -nakashima 13 -overlying 13 -enmities 13 -inquisitiveness 13 -reevell 13 -25-27 13 -25-26 13 -mumma 13 -wgc-accenture 13 -lumens 13 -guccio 13 -mitoq 13 -streetly 13 -javian 13 -henpower 13 -alberson 13 -smog-free 13 -sulis 13 -kwangmyongsong-3 13 -georgieva 13 -321,000 13 -shenzhou-8 13 -seifullah 13 -36per 13 -engen 13 -machimosaurus 13 -nayara 13 -storyboard 13 -unagi 13 -gethsemane 13 -latshaw 13 -taransay 13 -quand 13 -boikov 13 -mcelwee 13 -naxi 13 -objets 13 -arlette 13 -fifth-degree 13 -vanguard-class 13 -swine-flu 13 -alexanderplatz 13 -preservers 13 -tripodi 13 -snowboardcross 13 -prodi 13 -hazar 13 -boumedienne 13 -mauroy 13 -dubiniec 13 -biosensor 13 -sofi 13 -maza 13 -seminaked 13 -23-year-olds 13 -aubrac 13 -nuray 13 -sobers 13 -dieng 13 -dumbfounding 13 -buildups 13 -brahmbhatt 13 -ex-assistant 13 -cais 13 -caim 13 -birzeit 13 -academie 13 -ibraham 13 -cleanspace 13 -heney 13 -hemorrhoids 13 -wyers-roebuck 13 -schnell 13 -aborts 13 -lobiondo 13 -skokholm 13 -borrelia 13 -woodward-hill 13 -turrini 13 -@mattprior13 13 -kailyn 13 -pardeep 13 -ratnage 13 -howeson 13 -antiseptics 13 -novigrad 13 -kamasho 13 -airshows 13 -lajos 13 -palatucci 13 -bristol-myers 13 -weathersby 13 -kibria 13 -thanko 13 -beltran-leyva 13 -1,645 13 -trevor-roper 13 -ligonnes 13 -pravastatin 13 -danae 13 -bennelong 13 -deflationary 13 -unburned 13 -annandale 13 -iowan 13 -tavitian 13 -vasconcelos 13 -9:54 13 -renald 13 -newco 13 -newly-named 13 -30-feet 13 -siricharoen 13 -allamby 13 -midgett 13 -top-left 13 -trianon 13 -gough-irwin 13 -plekhanov 13 -zeitler 13 -balaam 13 -gate-crashed 13 -debernardo 13 -wilkinsons 13 -bowkett 13 -plasmodium 13 -bradt 13 -maunganui 13 -pseudonymous 13 -fitco 13 -particularized 13 -jung-gu 13 -lompoc 13 -sammir 13 -cankiri 13 -bodyshop 13 -kapis 13 -,400 13 -chernin 13 -women-led 13 -semca 13 -m&c 13 -corsaro 13 -oakfield 13 -adarabioyo 13 -teia 13 -2,620 13 -almen 13 -lowline 13 -refiling 13 -dark-rimmed 13 -j.z. 13 -ungentlemanly 13 -mapquest 13 -sinanaj 13 -971 13 -979 13 -97p 13 -keverne 13 -1662 13 -helgesen 13 -riveros 13 -falkner 13 -demeritt 13 -buuren 13 -15-tonne 13 -1960-61 13 -26per 13 -qalamoun 13 -24per 13 -rehbein 13 -maarat 13 -navdy 13 -0.2-inches 13 -lauterbrunnen 13 -symmetrically 13 -dejiang 13 -metacritic 13 -llera 13 -neferefre 13 -shau 13 -shac 13 -57.9 13 -colaradas 13 -bigbrain 13 -xueming 13 -jiddah 13 -27-29 13 -1,815 13 -1,813 13 -shamraze 13 -oberlander 13 -aircell 13 -bukharina 13 -57.7 13 -57.3 13 -showpieces 13 -talumpa 13 -dermatographia 13 -money-driven 13 -ndes 13 -benerito 13 -carbonite 13 -self-fund 13 -300-room 13 -stena 13 -newly-published 13 -onita 13 -7:03 13 -baader 13 -durness 13 -two-pack 13 -chestfield 13 -0900 13 -froilan 13 -jantz 13 -angalifu 13 -gallaghers 13 -gurbaksh 13 -bekim 13 -villemin 13 -pre-recording 13 -appy 13 -disaster-stricken 13 -titters 13 -knobby 13 -al-qidra 13 -ringu 13 -folorunsho 13 -value-based 13 -canid 13 -torp 13 -kiradech 13 -counter-revolution 13 -montanaro 13 -22-carat 13 -zeaxanthin 13 -mccartan 13 -talkase 13 -peier 13 -deezer 13 -mazzilli 13 -umbrian 13 -montu 13 -obus 13 -childminding 13 -libels 13 -ebayisajoke 13 -832,000 13 -milnrow 13 -tjx 13 -jedis 13 -wtsp.com 13 -tannenberg 13 -two-front 13 -srs 13 -lightsey 13 -susanthika 13 -rasc 13 -deia 13 -larger-sized 13 -55mins 13 -prayoga 13 -cabarceno 13 -14.75 13 -pre-facebook 13 -1,009 13 -gota 13 -baby-boomer 13 -kelpie 13 -yakunin 13 -emptier 13 -haroun 13 -solario 13 -izabella 13 -menem 13 -subjectively 13 -bovanenkovo 13 -ozeh 13 -okusanya 13 -meletse 13 -phosphorescent 13 -moomins 13 -hull-based 13 -hico 13 -anti-soviet 13 -zuwara 13 -colossi 13 -waxtan 13 -poncharal 13 -yeasts 13 -beisel 13 -triple-murder 13 -tollefson 13 -kholoud 13 -porat 13 -papagayo 13 -memoto 13 -greeves 13 -sanga 13 -7:07 13 -everyblock 13 -tamilnet 13 -poyang 13 -colfer-williams 13 -waddoups 13 -egor 13 -nzebele 13 -1530s 13 -costarakis 13 -elzevir 13 -arthurworrey 13 -dreamboys 13 -wealth-sharing 13 -lewand 13 -self-tying 13 -klemovich 13 -opolot 13 -joosten 13 -eira 13 -92million 13 -stag-do 13 -quadriplegia 13 -française 13 -kamensky 13 -slaved 13 -space-like 13 -hebranko 13 -earll 13 -countercultural 13 -torvalds 13 -walk-ins 13 -edersee 13 -disanto 13 -kabuto 13 -search-and-destroy 13 -8.85 13 -hofit 13 -kiogima-mcgill 13 -liquigas 13 -mdgs 13 -lesaulnier 13 -metastasizing 13 -@sweepyface 13 -sieff 13 -nancie 13 -typhimurium 13 -polymath 13 -refurb 13 -unilateralism 13 -42,285 13 -pecis 13 -9400 13 -alvirez 13 -linera 13 -heiresses 13 -holthaus 13 -petrolhead 13 -occultist 13 -aakjaer 13 -outfox 13 -kavallerie 13 -roll-neck 13 -gazelle.com 13 -mamata 13 -ekrem 13 -sub-concussive 13 -crocodylus 13 -zubowsky 13 -miskin 13 -600-strong 13 -algemeen 13 -tooby 13 -1,217 13 -tomato-based 13 -cleaver-wielding 13 -jewkes 13 -moto3 13 -quarrymen 13 -1-metre 13 -canadian-made 13 -letton 13 -menteng 13 -perdida 13 -behind-the-scene 13 -khaimah 13 -scriptural 13 -cerebal 13 -elvers 13 -duggal 13 -mochi 13 -mtambu 13 -mr2 13 -hindhaugh 13 -lindord 13 -benomar 13 -belitsky 13 -yorkshiremen 13 -becton 13 -hero3 13 -anbang 13 -lahad 13 -bytham 13 -naviyd 13 -kaitlynn 13 -bobrovski 13 -hartin 13 -allens 13 -rockslides 13 -pro-obamacare 13 -siriano 13 -apple-like 13 -rushanara 13 -narcisse 13 -switcheroo 13 -reichenbach 13 -umass-dartmouth 13 -shatterproof 13 -rgs 13 -kochman 13 -diagouraga 13 -persoone 13 -28g 13 -donhou 13 -blub 13 -rossmore 13 -much-ballyhooed 13 -survery 13 -106million 13 -over-prescription 13 -usatf 13 -guajardo 13 -dordrecht 13 -pineville 13 -pankratius 13 -marc-antoine 13 -playdough 13 -facinelli 13 -timlin 13 -newly-engaged 13 -versfeld 13 -excitability 13 -lamberts 13 -narin 13 -hypersomnia 13 -zarkandar 13 -subsets 13 -balkenende 13 -riner 13 -cherney 13 -cavaday 13 -lanzillotti 13 -nespoli 13 -paralegals 13 -mary-louise 13 -xi_b 13 -sakine 13 -moonwalkers 13 -shtayyeh 13 -zanna 13 -biancone 13 -doostang 13 -rebelliousness 13 -hillstrand 13 -jofi 13 -wapusk 13 -beng 13 -latecomers 13 -seitler 13 -communist-run 13 -kurzer 13 -china-russia 13 -bpp 13 -shymbulak 13 -texturecam 13 -zero-emissions 13 -gibsons 13 -spu 13 -frogman 13 -leatrice 13 -conisbrough 13 -ditlow 13 -folan 13 -noumea 13 -beckles 13 -clausura 13 -50-75 13 -bobin 13 -puscas 13 -westmeath 13 -steamroll 13 -audoire 13 -battleaxe 13 -12v 13 -5.22 13 -5.28 13 -t.g.i. 13 -counteraction 13 -austin-bergstrom 13 -offshoring 13 -steacy 13 -usmc 13 -three-lane 13 -worawi 13 -soundtracked 13 -gendreau 13 -19/20 13 -chansler 13 -condoleeza 13 -aeruginosa 13 -rosenbergs 13 -filomena 13 -second-flight 13 -femling 13 -duy 13 -libbrecht 13 -chelesa 13 -proba-2 13 -friesian 13 -anoka-hennepin 13 -monetization 13 -kristjan 13 -wickherst 13 -bhide 13 -kamalaya 13 -higuera 13 -clyst 13 -unterweger 13 -forchion 13 -arteriosus 13 -suddard 13 -tech-news 13 -h.j. 13 -bayoneted 13 -temir 13 -labeet 13 -hadramawt 13 -revins 13 -mastic 13 -peripheries 13 -dawson-damer 13 -1,601 13 -man-marked 13 -ray-bans 13 -bournemouth-based 13 -kapusniak 13 -kitchenaid 13 -oonacat 13 -conurbation 13 -slad 13 -huko 13 -german-designed 13 -then-pope 13 -joronen 13 -sub-camp 13 -appiano 13 -troccoli 13 -israeli-owned 13 -mawtus 13 -rate-setting 13 -deblasio 13 -eig 13 -vomitting 13 -intoxicant 13 -haret 13 -wyo. 13 -frostiness 13 -shanker 13 -singla 13 -timetabled 13 -lubecki 13 -kocho 13 -huston-tillotson 13 -ormond-walshe 13 -cozies 13 -shrewdest 13 -buckminster 13 -iffley 13 -casella 13 -kronenbourg 13 -u-bahn 13 -bespolka 13 -lipscomb 13 -gliwice 13 -danish-born 13 -stawicki 13 -mangus 13 -identifiably 13 -petrosino 13 -mid-terms 13 -rocknak 13 -fawzia 13 -molong 13 -molony 13 -leck 13 -employer-based 13 -kolbeck 13 -1-yard 13 -mollins 13 -most-listened 13 -seine-et-marne 13 -short-selling 13 -jany 13 -jans 13 -slide-rule 13 -64mins 13 -milteer 13 -ngai 13 -93p 13 -paling 13 -subversives 13 -trochowski 13 -adventure-loving 13 -ghannoum 13 -hache 13 -andraka 13 -57kg 13 -mirella 13 -lec 13 -no-name 13 -guidry 13 -sarmada 13 -khoi 13 -sunlounger 13 -.12 13 -.11 13 -thermostabilised 13 -rousso 13 -vomit-inducing 13 -thomas-hameen 13 -tirunesh 13 -kidded 13 -wirskye 13 -skiving 13 -74-6 13 -travelcard 13 -1517 13 -macnab 13 -tcp/ip 13 -kombouare 13 -gbt 13 -nosing 13 -depetrillo 13 -limbed 13 -short-cuts 13 -poults 13 -tietjens 13 -goot 13 -4:11 13 -peruzzi 13 -lari 13 -59-year 13 -tuckshop 13 -unfillable 13 -putzmeister 13 -a'ishah 13 -zillow.com 13 -preconception 13 -shivnarine 13 -udoo 13 -bristowe 13 -wesam 13 -aleix 13 -diaphanous 13 -sibbons 13 -full-figured 13 -giuntoli 13 -super-volcanoes 13 -erbe 13 -savours 13 -all-russian 13 -maudlen 13 -turbo-charge 13 -aliana 13 -untarnished 13 -nikah 13 -schwan 13 -maryport 13 -anti-al-assad 13 -andrassy 13 -belson 13 -krait 13 -ticket-fixing 13 -curiouser 13 -karibe 13 -maldonados 13 -tulu 13 -afsana 13 -grandfather-of-six 13 -conficker.c 13 -helberg 13 -kellog 13 -tremayne 13 -altcourse 13 -shutterbugs 13 -krystine 13 -bermudan 13 -13/14 13 -84.7 13 -84.4 13 -hard-hearted 13 -cuddlr 13 -disarms 13 -jacquet 13 -highton 13 -reduced-price 13 -about.com 13 -rappl 13 -ragtime 13 -twomlow 13 -narwhal 13 -rawa 13 -ganglion 13 -repressions 13 -guber 13 -hengel 13 -ntahe 13 -freshway 13 -budson 13 -chell 13 -hip/thigh 13 -islamiya 13 -calif.-based 13 -three-dozen 13 -ventanas 13 -krikowa 13 -urkov 13 -ananya 13 -dark-green 13 -20-0 13 -homoerotic 13 -wittiest 13 -stolworthy 13 -whiteoak 13 -carolin 13 -finne 13 -grabarz 13 -rosalio 13 -proculus 13 -dowding 13 -rampone 13 -wengen 13 -open-toed 13 -avivah 13 -cassar 13 -savoir 13 -toile 13 -byfield 13 -stress-inducing 13 -unexamined 13 -jumaa 13 -sacremento 13 -kelby 13 -al-harithi 13 -postulate 13 -russian-leased 13 -mbasogo 13 -disease-ravaged 13 -raciest 13 -jenji 13 -boxofficemojo.com 13 -misurkin 13 -coppice 13 -watchorn 13 -ganesha 13 -jhang 13 -reducer 13 -mini-heatwave 13 -goedog 13 -letisha 13 -sitch 13 -demetriades 13 -gulli 13 -nicotine-containing 13 -combusting 13 -sharlto 13 -sebagh 13 -skyn 13 -frivolities 13 -sekete 13 -firuza 13 -humm 13 -humungous 13 -dryades 13 -lwanga 13 -cow-calf 13 -nimbler 13 -9:38 13 -nmw 13 -geron 13 -waraksa 13 -kingaroy 13 -underperform 13 -keansburg 13 -gimigliano 13 -pitocco 13 -7,000-strong 13 -puzo 13 -delyth 13 -boozed-up 13 -drybath 13 -44mph 13 -pasala 13 -arbin 13 -iz 13 -ghais 13 -macie 13 -wowereit 13 -harked 13 -sachedina 13 -unsourced 13 -thrashings 13 -merfyn 13 -toolis 13 -single-camera 13 -kou 13 -action-comedy 13 -buchhorn 13 -sdss 13 -backwardness 13 -saif-al 13 -watan 13 -army-style 13 -fly-tipped 13 -diahann 13 -14-carat 13 -yeffet 13 -zanini 13 -shalimar 13 -anassa 13 -us-owned 13 -jakubczyk 13 -seatwave 13 -obscenity-laced 13 -snoras 13 -walb 13 -garney 13 -poleon 13 -bayamo 13 -vice-presidents 13 -seismometer 13 -embroil 13 -1,302 13 -chevallier 13 -wire-rimmed 13 -machine-washable 13 -790million 13 -fox4kc 13 -basich 13 -svava 13 -wojiechowski 13 -coeds 13 -shabbily 13 -tube-like 13 -half-and-half 13 -mcwatt 13 -well-know 13 -barnham 13 -nsabb 13 -kennea 13 -psyching 13 -end-times 13 -yesufu 13 -bovard 13 -soft-bodied 13 -akris 13 -teals 13 -teale 13 -szostak 13 -wothers 13 -barnstorm 13 -ex-midfielder 13 -llanfairfechan 13 -astrobiologists 13 -ninth-floor 13 -bootham 13 -villavaso 13 -deryk 13 -tiswas 13 -dehydrogenase 13 -silver-plated 13 -2pac 13 -skimpier 13 -middle-men 13 -hellinikon 13 -lunokhod 13 -sharers 13 -rooftoppers 13 -nasreen 13 -ceccaldi 13 -fat-fighting 13 -screen-time 13 -larders 13 -old-boy 13 -mikolaj 13 -3,374 13 -mahir 13 -award-winners 13 -konnie 13 -denaby 13 -patriota 13 -bardon 13 -santillana 13 -atici 13 -shoket 13 -5:52 13 -4:32 13 -fortson 13 -361,000 13 -shanmugam 13 -re-assignment 13 -d'aosta 13 -osagie 13 -one-dayer 13 -stromsheim 13 -holcroft 13 -liddiatt 13 -paechter 13 -kolly 13 -clunker 13 -1351 13 -sa-2 13 -raloxifene 13 -rajar 13 -armories 13 -al-canadi 13 -mspca 13 -state-imposed 13 -pancholi 13 -noncriminal 13 -sabas 13 -castaic 13 -non-spanish 13 -first-century 13 -chipperfield 13 -abia 13 -wacha 13 -cappello 13 -muscle-building 13 -400mm 13 -bouch 13 -greencroft 13 -far-west 13 -belkalem 13 -colchis 13 -misbehaves 13 -uncompromisingly 13 -oberholtzer 13 -co-parents 13 -hudis 13 -1-800-273-825 13 -pith 13 -iskandariya 13 -sisarova 13 -melas 13 -chunnel 13 -hand-selected 13 -bassy 13 -salo 13 -mars500 13 -1,290 13 -break-outs 13 -secondarily 13 -voteman 13 -derosier 13 -carrickfergus 13 -off-the-pitch 13 -wheelan 13 -regionalised 13 -unc-chapel 13 -brockbank 13 -3:03 13 -3.58 13 -zamfir 13 -post-watergate 13 -zan 13 -zad 13 -trevizo 13 -tortugas 13 -aricept 13 -28-0 13 -bruelhart 13 -carlingford 13 -450,000-a-year 13 -luncheons 13 -marchenko 13 -hiros 13 -wilchcomb 13 -floaters 13 -hidebound 13 -gedi 13 -caison 13 -tischler 13 -uncouple 13 -sevruga 13 -866 13 -drelich 13 -drama-filled 13 -1,022 13 -gavrin 13 -cd-rom 13 -parnham-cope 13 -super-quick 13 -wjhg 13 -asseri 13 -taxidermied 13 -helfman 13 -couts 13 -glimpsing 13 -dealwis 13 -15-piece 13 -webos 13 -carter-williams 13 -doxy 13 -storino 13 -woot 13 -anti-injunction 13 -clinco 13 -grant-copeland 13 -emps 13 -ahuas 13 -393,000 13 -wilfert 13 -kyrstin 13 -parthenogenesis 13 -turriff 13 -corlew 13 -rickrolling 13 -commissaries 13 -super-hero 13 -himeji 13 -zauzmer 13 -lineham 13 -byfleet 13 -al-ahdal 13 -aomori 13 -fela-durotoye 13 -design-led 13 -honoria 13 -jacques-yves 13 -hearthrob 13 -jiggled 13 -bouncier 13 -reginella 13 -e-crime 13 -schnauzers 13 -ijf 13 -rajani 13 -cachtice 13 -oregan 13 -balayage 13 -gugu 13 -brassell 13 -morken 13 -scrapbooking 13 -silvino 13 -duhok 13 -frazee 13 -brownless 13 -neigbouring 13 -australia-india 13 -assouline 13 -matouk 13 -meynell 13 -eulogize 13 -sloss 13 -hairband 13 -confounds 13 -comports 13 -43.1 13 -soso 13 -contraflow 13 -bellboy 13 -last-known 13 -daudi 13 -gemunu 13 -88.9 13 -negi 13 -78mins 13 -thiemo 13 -insinuates 13 -xisca 13 -g500 13 -faustian 13 -ohio.com 13 -lsc 13 -pro-environment 13 -daalder 13 -chadima 13 -naf 13 -parvaneh 13 -cannizzo 13 -winston-peters 13 -recollected 13 -blubbing 13 -210-pound 13 -earlene 13 -lotto-belisol 13 -benouville 13 -erasers 13 -groupers 13 -czestochowa 13 -pantlin 13 -mercedez 13 -non-teaching 13 -surace 13 -wanner 13 -talking-to 13 -bobrovsky 13 -frazier-doody 13 -aleh 13 -7.80 13 -poorhouse 13 -dracul 13 -schaufuss 13 -shehadi 13 -nutri-grain 13 -61cm 13 -pinedale 13 -1,329 13 -1,322 13 -brännström 13 -neo-georgian 13 -bafe 13 -scandi 13 -elko 13 -wonderstone 13 -studenmund 13 -220-pound 13 -daughtrey 13 -escentric 13 -gergely 13 -ved 13 -vea 13 -bunkered 13 -korzen 13 -bisphenol-a 13 -cost-effectively 13 -ménage 13 -synth 13 -4.84 13 -fitfully 13 -saili 13 -gumshield 13 -benard 13 -demarquis 13 -kibort 13 -over-the-moon 13 -cedrick 13 -54-hole 13 -3mp 13 -huggable 13 -hutterite 13 -half-length 13 -ipotty 13 -othona 13 -two-to-three 13 -19.75 13 -günther 13 -moralee 13 -vector-borne 13 -duathlon 13 -ninth-ranked 13 -zakayev 13 -muthoni 13 -gpu 13 -dimi 13 -tezcan 13 -shiromani 13 -aotearoa 13 -high-potency 13 -hf 13 -choline 13 -skinflint 13 -801,000 13 -bbc.co.uk 13 -guenzel 13 -@ant_crolla 13 -137million 13 -reekie 13 -kaytlynn 13 -bubble-like 13 -laiza 13 -cather 13 -jianping 13 -oberender 13 -tweetchat 13 -swankiest 13 -mollypops 13 -kayvon 13 -cauda 13 -mozah 13 -jetro 13 -isbell 13 -ayham 13 -lenape 13 -favor-hamilton 13 -bickford 13 -dunnigan 13 -over-charging 13 -marmutt 13 -larocca 13 -8.03 13 -kamoji 13 -1980s-style 13 --46 13 -befit 13 -amoy 13 -wha 13 -intercultural 13 -responsiblity 13 -39-foot 13 -tlusty 13 -t34 13 -kaunas 13 -mezzo 13 -rawhani 13 -trystan 13 -cnn/us 13 -all-rounders 13 -gins 13 -wardega 13 -edginton 13 -68.7 13 -wickedest 13 -haman 13 -esmaili 13 -vandamme 13 -recently-retired 13 -komid 13 -isreal 13 -saar 13 -menston 13 -unprecedentedly 13 -ownbey 13 -poklonskaya 13 -best-in-class 13 -snitching 13 -scribblings 13 -non-dom 13 -shelbayah 13 -pearn 13 -1:16 13 -marginalizes 13 -balser 13 -high-range 13 -tru 13 -topknot 13 -kingstone 13 -bossut 13 -monoplane 13 -lessiter 13 -shuangjiang 13 -shakiness 13 -iafrate 13 -kitna 13 -1994-1995 13 -jovially 13 -donut-shaped 13 -drouin 13 -28/1 13 -logvynenko 13 -trustwave 13 -segueing 13 -herbison 13 -one-horse 13 -moholoholo 13 -frayn 13 -crac 13 -sydni 13 -nyac 13 -loujain 13 -adalja 13 -three-sided 13 -betsch 13 -witton 13 -pro-rata 13 -leolites 13 -cutlets 13 -not-so-little 13 -oudtshoorn 13 -xelil 13 -non-clinical 13 -ruhleben 13 -yeomen 13 -takapuna 13 -ritblat 13 -1,042 13 -wauck 13 -air-to-surface 13 -renewableuk 13 -godinet 13 -54.3 13 -team-high 13 -2rrf 13 -nizami 13 -nesters 13 -mcanuff 13 -wittily 13 -martin-artajo 13 -jeet 13 -wadhams 13 -moser-sullivan 13 -reignwood 13 -no-strings 13 -98p 13 -caulley 13 -met-h 13 -zankovic 13 -odjick 13 -˚f 13 -exhort 13 -shipowners 13 -tyla 13 -widely-read 13 -obdurate 13 -mummers 13 -pro-labour 13 -friers 13 -shibin 13 -it. 13 -cobane 13 -vancamp 13 -danko 13 -armandariz 13 -progressions 13 -80.6 13 -3,650 13 -mocktail 13 -kalaba 13 -280g 13 -24-22 13 -kenealy 13 -millboro 13 -katehis 13 -repeller 13 -mamils 13 -isagba 13 -moredon 13 -snow-bound 13 -hot-shot 13 -bezels 13 -boxcutter 13 -williams-sonoma 13 -repar 13 -haseler 13 -huden 13 -weekslong 13 -muukua 13 -arben 13 -mirabel 13 -yuille 13 -metoyer 13 -wemple 13 -9,250 13 -chimichurri 13 -minneapolis/st 13 -one-paragraph 13 -intralipid 13 -eunuch 13 -msumba 13 -raval 13 -pitter-patter 13 -plockton 13 -shukor 13 -porschla 13 -sisu 13 -alki 13 -roofers 13 -gosley-shaw 13 -counterproposal 13 -coffin-shaped 13 -nkubana 13 -lily-rose 13 -scrunching 13 -programing 13 -small-car 13 -white-bearded 13 -slickest 13 -lawrenceburg 13 -withdrawl 13 -toe-poked 13 -eye-poppingly 13 -goitein 13 -pre-tour 13 -armstead 13 -14kg 13 -21per 13 -colsey 13 -4,000-a-month 13 -2.71 13 -vgt 13 -meinert 13 -giedo 13 -travelsupermarket.com 13 -lagrangian 13 -hemmeter 13 -homestays 13 -baskin-robbins 13 -a4a 13 -edley 13 -laneways 13 -73.6 13 -baby-sat 13 -ex-employer 13 -juhasz 13 -naika 13 -buey 13 -metrostars 13 -taner 13 -anti-capitalism 13 -asf 13 -midfields 13 -anti-communism 13 -crisp-beard 13 -skin-coloured 13 -caligula 13 -fowlds 13 -balbir 13 -bohmer 13 -aub 13 -slicer 13 -wath-upon-dearne 13 -revenue-sharing 13 -philanderers 13 -big-boned 13 -hourmann 13 -quatre 13 -healings 13 -jette 13 -levounis 13 -reusability 13 -paninis 13 -kickoffs 13 -owlstone 13 -arrhythmic 13 -8.21 13 -hexagons 13 -pirrone 13 -feroze 13 -chualar 13 -abey 13 -highclare 13 -bashline 13 -trou 13 -assay 13 -dupuytren 13 -ramano 13 -montaña 13 -garfinkel 13 -croaking 13 -santaquin 13 -mbatha 13 -markarian 13 -cvs/pharmacy 13 -minor-league 13 -tannerite 13 -oishi 13 -gadani 13 -eske 13 -southdown 13 -1:32 13 -difenderfer 13 -cpm 13 -rustled 13 -rizkallah 13 -penza 13 -ktla.com 13 -carnwath 13 -tpn 13 -wood-frame 13 -hooted 13 -scions 13 -westerham 13 -kise 13 -hanalei 13 -widegates 13 -45f 13 -zacconi 13 -ultra-luxe 13 -weddington 13 -hutterites 13 -asnicar 13 -patiala 13 -armpits4august 13 -arachnophobes 13 -candomble 13 -nihal 13 -tesco.com 13 -enni 13 -wheelers 13 -zigzagged 13 -krisztian 13 -martin-jenkins 13 -salter-bromley 13 -e20 13 -pelvises 13 -rhi 13 -ziplock 13 -73billion 13 -gr 13 -mciroy 13 -droga5 13 -clines 13 -mattrick 13 -'22 13 -apparatchik 13 -gigatons 13 -daryn 13 -keva 13 -incompetents 13 -nisbett 13 -1,066 13 -janas 13 -guana 13 -ryosuke 13 -hairpins 13 -colting 13 -bortolami 13 -recoba 13 -4,546 13 -www.crimestoppersvic.com.au 13 -esh 13 -maiwand 13 -bilimoria 13 -choux 13 -dingxi 13 -honaker 13 -alcalá 13 -seredova 13 -humongously 13 -abukhdair 13 -gramm 13 -strathaven 13 -lakemaid 13 -lacquan 13 -four-and-a-half-hour 13 -raptiva 13 -wieslawa 13 -sof 13 -toledano 13 -ontop 13 -polli 13 -sjahrial 13 -mylo 13 -kemery 13 -ciaron 13 -7.6-magnitude 13 -ogbedo 13 -reprogramme 13 -lauria 13 -10.34 13 -kusick 13 -creightons 13 -daynès 13 -bathie 13 -mentally-disabled 13 -fully-fitted 13 -mass-transit 13 -dremel 13 -right-left 13 -ghats 13 -multitouch 13 -stalham 13 -seventh-ranked 13 -taqwa 13 -crotchety 13 -meiyappan 13 -kingshill 13 -jasarevic 13 -nghe 13 -rigobert 13 -1,355 13 -61.9 13 -stratus 13 -morrill 13 -temuri 13 -clavera 13 -scatty 13 -pagel 13 -tala 13 -lanter 13 -partido 13 -autozone 13 -tottered 13 -troughton-smith 13 -pro-surfer 13 -mcminn 13 -wojtak 13 -7ib 13 -larn 13 -ufa 13 -2:43 13 -confocal 13 -mineralogical 13 -gherat 13 -medibank 13 -carusone 13 -well-sourced 13 -13-week-old 13 -apennine 13 -goswami 13 -buffoons 13 -el-din 13 -fitzgeralds 13 -mushfiqur 13 -domestic-related 13 -irranca-davies 13 -shakuwra 13 -sinbo 13 -religious-affiliated 13 -kerrville 13 -walayat 13 -incidentals 13 -2.59 13 -vax 13 -saylorsburg 13 -avdijaj 13 -nakajima 13 -jagpal 13 -runic 13 -crimefighter 13 -comprehensiveness 13 -wonnacott 13 -personals 13 -rushby 13 -tillekeratne 13 -wmbb 13 -ayfon 13 -jolie-pitt 13 -rudderham 13 -.99 13 -biffle 13 -hand-operated 13 -prussians 13 -micro-house 13 -glycans 13 -nev. 13 -showtimes 13 -madyson 13 -zelin 13 -nonmembers 13 -unquote 13 -kahala 13 -cudmore 13 -frogmouths 13 -scree 13 -site-specific 13 -sagano 13 -opsec 13 -ferdinando 13 -zittrain 13 -apps/goals 13 -low-priority 13 -looby 13 -lapatin 13 -four-wheeling 13 -non-violently 13 -parlatore 13 -sniffers 13 -ablution 13 -89.1 13 -mclavin 13 -bisley 13 -jarrard 13 -newly-introduced 13 -unrecovered 13 -basaltic 13 -plekan 13 -oil-soaked 13 -people-carrier 13 -custodio 13 -classen 13 -corretja 13 -ock 13 -oldboy 13 -marketability 13 -anyene 13 -o'cearrbhail 13 -sstl 13 -brown-forman 13 -shiplake 13 -bogomolov 13 -street-art 13 -friedreich 13 -lexicographer 13 -gabito 13 -cdma 13 -seven-over 13 -scherbenske 13 -sutty 13 -mass-circulation 13 -foulquie 13 -bulls-eye 13 -gleiberman 13 -increment 13 -thay 13 -touchwood 13 -mezyk 13 -sycophants 13 -elkus 13 -sreenivasan 13 -gestating 13 -78.2 13 -citytv 13 -shaws 13 -hudspeth 13 -serbian-born 13 -chelsea-bound 13 -telhami 13 -conditionality 13 -kendell 13 -trillo 13 -23:03 13 -epochal 13 -siegle 13 -chicago-style 13 -illustris 13 -rockier 13 -longstocking 13 -popham 13 -dorcan 13 -qaboun 13 -bregu 13 -rotunno 13 -hyper-local 13 -two-lap 13 -42-acre 13 -week-to-week 13 -nucleic 13 -saiger 13 -dermabrasion 13 -aili 13 -x-type 13 -hariutomo 13 -daleste 13 -katwala 13 -dwan 13 -inseperable 13 -yarkon 13 -imperiling 13 -furhmann 13 -rain-saturated 13 -biodiverse 13 -492ft 13 -parabens 13 -somi 13 -yazd 13 -mylee 13 -ghassemi 13 -alinejad 13 -rain-swept 13 -zalando 13 -jethwa 13 -verrazano 13 -bestie 13 -2,300-a-night 13 -leeden 13 -cross-continental 13 -dyakov 13 -presumptively 13 -supercraft 13 -hand-luggage 13 -automatons 13 -63rd-minute 13 -guion 13 -outpolled 13 -dcm 13 -huskinson 13 -acoustically 13 -stitt 13 -raikonnen 13 -slatkin 13 -mosbaugh 13 -kupinsky 13 -ruffinelli 13 -stetten 13 -talisker 13 -shakier 13 -longest-held 13 -vinger 13 -faller 13 -otford 13 -uriminzokkiri 13 -2102 13 -200-room 13 -eulex 13 -euler 13 -spasming 13 -spendlove 13 -swayne 13 -tcp 13 -seahenge 13 -ephrata 13 -buynitsky 13 -llonch 13 -ever-shrinking 13 -schenck 13 -lavere 13 -gordievsky 13 -branam 13 -townies 13 -mopac 13 -predominance 13 -soul-mate 13 -yakut 13 -subordinated 13 -saarah 13 -dakoutros 13 -maître 13 -pro-wrestler 13 -nzohabonayo 13 -country-specific 13 -45-years-old 13 -latchem 13 -kheder 13 -cyberbully 13 -misick 13 -fensterman 13 -zdorovetskiy 13 -87,500 13 -backfield 13 -rainfalls 13 -seventies-style 13 -bressan 13 -troopship 13 -pflp-gc 13 -bodyjam 13 -megachurches 13 -mahfooz 13 -dewa 13 -aldermen 13 -abukar 13 -dutchcot 13 -14-14 13 -16-second 13 -one-for-one 13 -hmcts 13 -francke 13 -metrocab 13 -smartphone-like 13 -janeah 13 -víctor 13 -babakhan 13 -mafiha 13 -12.4-mile 13 -vauxhalls 13 -surfeit 13 -behing 13 -gumbs 13 -uhf 13 -once-banned 13 -perevalnoye 13 -7.28 13 -60-story 13 -leppington 13 -coddett 13 -italian-based 13 -paleoanthropologist 13 -oil-pressure 13 -36f 13 -shumpert 13 -overheats 13 -55.9 13 -febri 13 -peatland 13 -0.47 13 -0.42 13 -agyare 13 -arvidsson 13 -detriot 13 -jerice 13 -sackboy 13 -jalinski 13 -co-piloting 13 -low-heeled 13 -rishon 13 -contee 13 -splotchy 13 -devengoechea 13 -lecco 13 -hmc 13 -over-tired 13 -trotman 13 -eared 13 -ketland 13 -cosworth 13 -dickies 13 -alsaud 13 -sleep/wake 13 -neurocysticercosis 13 -piscoglio 13 -game-show 13 -panico 13 -bernardini 13 -huguenin 13 -homeopath 13 -playability 13 -romanée-conti 13 -bisbee 13 -teacakes 13 -ghost-written 13 -cig 13 -lightweights 13 -emlyn-jones 13 -admited 13 -s-shape 13 -laurent-perrier 13 -do-not-resuscitate 13 -dieter-eckerdt 13 -shesho 13 -julianni 13 -tehreek 13 -kuck 13 -jinns 13 -lower-than-average 13 -candy-rae 13 -81.2 13 -howlin 13 -euro-atlantic 13 -vitruvian 13 -zyban 13 -fedexfield 13 -ukelele 13 -hincker 13 -druggie 13 -cavalrymen 13 -unflinchingly 13 -pakistani-based 13 -rse 13 -yntema 13 -defensor 13 -tashmoo 13 -24-page 13 -13-2 13 -13-5 13 -rashie 13 -neilum 13 -oxalate 13 -popularising 13 -psst 13 -paddle-boarding 13 -caissons 13 -46cm 13 -colombian-born 13 -400-metre 13 -randoseru 13 -granqvist 13 -jouanno 13 -stop-offs 13 -gallow 13 -shaffi 13 -registe 13 -cabbagetown 13 -pharoahs 13 -2005-2010 13 -ebersman 13 -garmisch 13 -hirschfield 13 -61p 13 -21-18 13 -eligo 13 -theremin 13 -eastpointe 13 -test-driving 13 -raiden 13 -eighth-ranked 13 -anti-saleh 13 -daiwa 13 -sandersi 13 -reconfiguring 13 -u-verse 13 -al-shammari 13 -mccaughan 13 -grzywacz 13 -43-page 13 -twin-propeller 13 -holzbach 13 -wjrt 13 -mac-10 13 -brooklynites 13 -heyward 13 -naker 13 -blr 13 -grosgrain 13 -backwell 13 -pcsk9 13 -pashminas 13 -monsoonal 13 -fourth-set 13 -paddon 13 -ledwick 13 -bloody-minded 13 -dlamini-manaway 13 -stoppelkamp 13 -masud 13 -kareemah 13 -zil 13 -30,000-strong 13 -five-over-par 13 -merk 13 -astronautical 13 -rasta 13 -armato 13 -death-with-dignity 13 -98mph 13 -gymkhana 13 -glenshee 13 -17kg 13 -bas-reliefs 13 -salling 13 -knud 13 -rebic 13 -gela 13 -penetrators 13 -saifullah 13 -phasuk 13 -paintin 13 -fullbrook 13 -64billion 13 -binning 13 -reappointment 13 -waldrip 13 -morar 13 -glints 13 -argentinian-born 13 -sincura 13 -6:33 13 -iran-based 13 -refutation 13 -terranea 13 -behooves 13 -fusari 13 -airblade 13 -wikipedians 13 -embossing 13 -third-richest 13 -dav 13 -tividale 13 -euskadi 13 -targetman 13 -langhorne 13 -coverall 13 -huddy 13 -musc 13 -gochanour 13 -concetta 13 -mentis 13 -human-shaped 13 -specially-constructed 13 -bjerke 13 -reverberation 13 -darrius 13 -teacher-led 13 -debanks 13 -off-the-plan 13 -noten 13 -byo 13 -eliassen 13 -micro-home 13 -raimund 13 -ah-1w 13 -golec 13 -appropriators 13 -guardian/icm 13 -blunter 13 -villacanas 13 -paedophilic 13 -klempner 13 -stone-knapping 13 -movehub 13 -tamarindo 13 -load-bearing 13 -terrorista 13 -staglin 13 -48in 13 -degreasing 13 -mega-drought 13 -hyperthyroidism 13 -outsources 13 -werbowy 13 -gerin-ricard 13 -dubosarsky 13 -3per 13 -lavau 13 -power-broker 13 -fbr 13 -buzzcocks 13 -gianstefani 13 -malina 13 -plagiarised 13 -catherin 13 -zacynthius 13 -six-seat 13 -ansun 13 -youssuf 13 -dammaj 13 -goose-stepped 13 -boldrick 13 -bodmer 13 -mujahidin 13 -vermont-based 13 -she-said 13 -keh 13 -harbach 13 -booboo 13 -pawlak 13 -khazir 13 -egg-throwing 13 -mauderlys 13 -bordello 13 -brienza 13 -ligatures 13 -amarvilas 13 -soldeu 13 -3,856 13 -villiger 13 -floppy-haired 13 -louann 13 -hurds 13 -pitch-dark 13 -7.02 13 -neuroscientific 13 -batstone 13 -aggers 13 -manouevre 13 -snow-topped 13 -12-3 13 -post-viewing 13 -fotokol 13 -cuzick 13 -alberdi 13 -dzsudzsak 13 -inasmuch 13 -meizhen 13 -hydroponically 13 -subban 13 -affilliate 13 -banque 13 -mtalimanja 13 -shalgham 13 -umer 13 -coitus 13 -anti-pyongyang 13 -patchell 13 -roskam 13 -salalah 13 -noordeinde 13 -rectitude 13 -perms 13 -zyklon 13 -snappycam 13 -aircraftsman 13 -laclere 13 -half-measures 13 -automates 13 -arna 13 -hypermarkets 13 -rafaella 13 -4.04 13 -decant 13 -katsu 13 -frizell 13 -ivanschitz 13 -embeddable 13 -aranovsky 13 -nemeses 13 -squalene 13 -gcsb 13 -riverkeeper 13 -antwan 13 -tharp 13 -5,280 13 -tannen 13 -20-day-old 13 -unpicked 13 -enneagram 13 -barceló 13 -reassign 13 -murandu 13 -33-page 13 -smeaton 13 -disassociation 13 -tube-shaped 13 -umashankar 13 -coronas 13 -boccia 13 -fasters 13 -cv-22 13 -pre-departure 13 -haidrasl 13 -kellyville 13 -backdate 13 -imperfectly 13 -trussville 13 -al-hujaili 13 -chavez-nelson 13 -minuum 13 -moscato 13 -balkh 13 -ianson 13 -deadfall 13 -lanzo 13 -u.s.-soviet 13 -money-grabber 13 -nejloveanu 13 -uk-linked 13 -sovaldi 13 -scotchford 13 -laurin 13 -ramalho 13 -oliveri 13 -olivers 13 -molenbeek 13 -2,270 13 -casher 13 -godambe 13 -nahin 13 -gastrostomy 13 -bako 13 -flat-bottomed 13 -mellat 13 -layabouts 13 -throckmorton 13 -wwmt 13 -kathrine 13 -carpentaria 13 -soul-crushing 13 -bushy-tailed 13 -athey 13 -46mph 13 -admonishes 13 -pint-size 13 -rakoci 13 -emulsified 13 -stoney-faced 13 -gtcw 13 -gtcs 13 -tideway 13 -whiles 13 -windows-based 13 -i-phone 13 -venditti 13 -cresta 13 -misanthrope 13 -35-44 13 -muqdadiya 13 -fonepad 13 -saleha 13 -shefford 13 -lakmas 13 -steveston 13 -badinter 13 -rnb 13 -a-ok 13 -dusen 13 -59ft 13 -cursi 13 -near-vertical 13 -pro-islamic 13 -orthotic 13 -mid-race 13 -greggsnut 13 -octobers 13 -okello 13 -weisure 13 -low-alcohol 13 -120cm 13 -afoa 13 -12,300 13 -sarraff 13 -fly-over 13 -christer 13 -swechha 13 -consumables 13 -freundlich 13 -ashin 13 -schwander 13 -hazout 13 -tahuri 13 -balaclava-wearing 13 -fenella 13 -radan 13 -styria 13 -eisinger 13 -bio-fuel 13 -pupcakes 13 -well-served 13 -gillmor 13 -4r 13 -30.50 13 -peine 13 -chrisopher 13 -jeanty 13 -matzke 13 -salida 13 -tarbosaurus 13 -2140 13 -barrel-bomb 13 -messis 13 -jadran 13 -oyez 13 -37in 13 -tip-toes 13 -378,000 13 -futenma 13 -lohmar 13 -armatix 13 -pullicino 13 -e-3d 13 -runty 13 -alwoodley 13 -porritt 13 -molino 13 -kolosova 13 -heartbreaks 13 -grassing 13 -al-sadah 13 -10.56 13 -chalkwell 13 -umut 13 -rensen 13 -film-related 13 -shirine 13 -puzzlephone 13 -nigerien 13 -conatser 13 -cbn 13 -untucked 13 -streicher 13 -mottistone 13 -mcgladrey 13 -glu 13 -benladghem 13 -gaouette 13 -24-ounce 13 -mouthwashes 13 -creepiness 13 -non-jury 13 -ex-mps 13 -coulda 13 -escapology 13 -sanpher 13 -pegula 13 -seston 13 -landsburg 13 -hokusai 13 -unevenness 13 -maillard 13 -ganso 13 -parables 13 -bfd 13 -1tb 13 -17.45 13 -12-17 13 -leino 13 -casbolt 13 -barwis 13 -175m 13 -1,740 13 -tyrolean 13 -kimbell 13 -misca 13 -lokon 13 -morrisoni 13 -glaciation 13 -co-publisher 13 -earth-shaking 13 -free-ranging 13 -goalkicking 13 -bungs 13 -humored 13 -kasperzak 13 -impracticable 13 -durley 13 -dellaventura 13 -rudrum 13 -muderis 13 -cluff 13 -halberstam 13 -shivashankar 13 -fortnight-long 13 -daler 13 -pro-rebel 13 -53.7 13 -mind-controlled 13 -consol 13 -gell 13 -jaxs 13 -70-68 13 -eyeborg 13 -graham-bailey 13 -weerasena 13 -elster 13 -podiatry 13 -katlego 13 -florias 13 -hir 13 -ultra-fit 13 -new-car 13 -130-mile 13 -gurr 13 -3,660 13 -balmond 13 -retina-tracking 13 -punning 13 -linnington 13 -1,118 13 -malavath 13 -rylie 13 -pre-employment 13 -wackiness 13 -klingler 13 -biofluorescence 13 -ruzanna 13 -harrisonburg 13 -trafford-james 13 -leeching 13 -deuteronomy 13 -measles-mumps-rubella 13 -tremulous 13 -hodgsons 13 -co-plaintiffs 13 -co-discoverer 13 -launchpads 13 -blando 13 -cinder-block 13 -lurette 13 -chakrabarty 13 -farbstein 13 -+64 13 -sarae 13 -tubane 13 -primavera 13 -313,000 13 -alferi 13 -galsworthy 13 -aliso 13 -salvini 13 -d'oro 13 -oxenhope 13 -rwd 13 -seung-woo 13 -bidets 13 -kavalier 13 -trpm8 13 -gesu 13 -coogler 13 -#mh17 13 -bardwil 13 -teksta 13 -4-star 13 -pernod 13 -liberte 13 -adlam 13 -must-reads 13 -finkelhor 13 -proofread 13 -pertiwi 13 -slepski 13 -antlered 13 -huepetuhe 13 -castilian 13 -trimbitas 13 -rocketnews24 13 -sunhat 13 -over-water 13 -hinging 13 -keolis 13 -nitv 13 -rassi 13 -corpuz 13 -histiocytosis 13 -muscleman 13 -scad 13 -chicken-and-egg 13 -13th-placed 13 -market-driven 13 -desegregate 13 -169.99 13 -jova 13 -hanyu 13 -apert 13 -plonking 13 -lampshades 13 -substantiation 13 -giampiero 13 -jeromes 13 -1586 13 -pangea 13 -vicissitudes 13 -feehan 13 -detractor 13 -air-sea 13 -pennlive.com 13 -b-girls 13 -carluke 13 -pre-auction 13 -marandi 13 -factoid 13 -wkyc-tv 13 -jemal 13 -kalaidzhi 13 -sandifer 13 -dy 13 -flather 13 -bahram 13 -rukajarvi 13 -co-sleep 13 -trilling 13 -fils-aime 13 -bullet-holes 13 -lomo 13 -trypophobic 13 -ceelo 13 -heterogeneous 13 -mytheresa 13 -listowel 13 -pazar 13 -kick-starts 13 -eiseman 13 -wieseltier 13 -cameronians 13 -ashmead 13 -akard 13 -firehouses 13 -samokutyaev 13 -gandys 13 -agung 13 -quadrantid 13 -tiaamii 13 -mismatches 13 -nufc 13 -vogler 13 -video-gaming 13 -mallesh 13 -dex 13 -rajwinder 13 -5,000,000 13 -goldenhar 13 -minora 13 --22 13 -wxyz-tv 13 -montjiro 13 -mcilwee 13 -calarts 13 -rivera-pitre 13 -asaad 13 -inter-governmental 13 -chetnik 13 -year.the 13 -rehabs 13 -hydrological 13 -petapixel 13 -belittles 13 -keidar 13 -#skybluepink 13 -velmahos 13 -middlemore 13 -hania 13 -scheibel 13 -momoi 13 -institutionalization 13 -gavea 13 -hoyte 13 -vajazzle 13 -32,200 13 -15-8 13 -15-3 13 -shikha 13 -rodda 13 -ancell 13 -kennamer 13 -warkwickshire 13 -170billion 13 -chania 13 -5.53 13 -12-tonne 13 -fentress 13 -shanki 13 -morrocco 13 -hydroplaned 13 -tsuchida 13 -heitor 13 -nbc6 13 -gilan 13 -jalabert 13 -jaelise 13 -curled-up 13 -a.g. 13 -fixed-price 13 -olimpicks 13 -balenziaga 13 -khajuria 13 -clostridia 13 -well-chosen 13 -bulgur 13 -mobula 13 -six-way 13 -granulosa 13 -squito 13 -zandio 13 -sturdevant 13 -hazelton 13 -raëlians 13 -uns 13 -pommie 13 -parasitical 13 -terrys 13 -bierdneau 13 -felman 13 -adur 13 -redgate 13 -hamburg-based 13 -unicorning 13 -bsee 13 -3s 13 -3l 13 -60-meter 13 -qwest 13 -good-time 13 -anti-bloomberg 13 -mareesha 13 -grellner 13 -nikolova-trask 13 -oberdorfer 13 -19-0 13 -worksite 13 -iab 13 -sers 13 -morant 13 -hapsburg 13 -constricts 13 -biomedicine 13 -kwiecien 13 -task-force 13 -5,750 13 -malyshev 13 -covance 13 -kooteninchela 13 -slowed-down 13 -topalov 13 -4.48 13 -eac 13 -lobbe 13 -9:43 13 -9:42 13 -pygott 13 -appomattox 13 -wending 13 -bakkour 13 -esty 13 -ayerza 13 -cost-savings 13 -calligraphers 13 -podgorica 13 -zalze 13 -thomspon 13 -servicemember 13 -10-10 13 -knauf 13 -taulant 13 -bachar 13 -rose-marie 13 -stieler 13 -talk-radio 13 -ward-buck 13 -hiwula 13 -brack 13 -yusufeli 13 -lib-dems 13 -0815 13 -murmuration 13 -symeou 13 -yeshe 13 -iosif 13 -etxeita 13 -2:55 13 -goldfinch 13 -insulin-like 13 -artless 13 -tainan 13 -haddioui 13 -friese-greene 13 -43ft 13 -whirred 13 -mallaby 13 -nebraska-based 13 -jlo 13 -imane 13 -unusually-shaped 13 -al-akhbar 13 -pre-loved 13 -tiberias 13 -maroni 13 -al-eryani 13 -2020health 13 -66.9 13 -austswim 13 -errs 13 -pellett 13 -vacates 13 -pokharel 13 -ivybridge 13 -pa3 13 -pekish 13 -dharmendra 13 -caminada 13 -81.3 13 -freakout 13 -wwf-uk 13 -pro-iranian 13 -1655 13 -mccarren 13 -ghayth 13 -macotakara 13 -gentian 13 -brooklynn 13 -vallely 13 -santer 13 -chettle 13 -finely-tuned 13 -strelka 13 -rosco 13 -huckelberry 13 -splashback 13 -feebly 13 -boyton 13 -verboten 13 -nozedar 13 -33lbs 13 -el-gezawi 13 -form-filling 13 -bluemotion 13 -democratise 13 -ivabradine 13 -one-pound 13 -bottos 13 -petman 13 -cruickshanks 13 -yellan 13 -varrichione 13 -c-fu 13 -biao 13 -kring 13 -hoger 13 -hooijdonk 13 -madisonville 13 -rb8 13 -microlipo 13 -ransomed 13 -speith 13 -petry 13 -flashers 13 -cyp2d6 13 -hubbard-wilson 13 -handsjuk 13 -mandia 13 -cullatori 13 -romines 13 -u.s.-canadian 13 -climie 13 -66,396 13 -perfectly-preserved 13 -tabuteau 13 -17,400 13 -resize 13 -akaichi 13 -belives 13 -7:42 13 -wibowo 13 -dziewit 13 -rimington 13 -preto 13 -407,000 13 -loafing 13 -sanso 13 -sparboe 13 -cdph 13 -witchy 13 -caldey 13 -rnoh 13 -schwank 13 -gamla 13 -internationalism 13 -johnson-freese 13 -british-flagged 13 -hamley 13 -quartermain 13 -kamooneh 13 -unrepeatable 13 -enlightens 13 -remnev 13 -ncds 13 -salesforce.com 13 -180c 13 -pro-gbagbo 13 -heazell 13 -naxalites 13 -torralba 13 -bodyboard 13 -connersville 13 -3.21 13 -piermont 13 -rudloff 13 -pccw 13 -delineated 13 -untrusted 13 -cyberbullied 13 -3:57 13 -jayes 13 -multi-mission 13 -mckeague 13 -nondefense 13 -bartnowski 13 -left-behind 13 -fox10 13 -5,000-a-year 13 -schepper 13 -flamethrowers 13 -ambroise 13 -bellchambers 13 -calf/shin 13 -c7 13 -end-run 13 -kitestring 13 -one-shouldered 13 -aeriel 13 -pruszynski 13 -#qantas 13 -sky-rocketing 13 -gascón 13 -bope 13 -nonpareil 13 -sim-only 13 -ranby 13 -low-temperature 13 -anons 13 -heydey 13 -grogginess 13 -darter 13 -loggia 13 -vaute 13 -zamani 13 -haulier 13 -self-assembling 13 -1,780 13 -sandt 13 -valte 13 -yevhenia 13 -jerram 13 -upf 13 -upa 13 -yoxall 13 -cottontail 13 -above-normal 13 -ted2010 13 -kusher 13 -fattier 13 -lindstedt 13 -kaylynn 13 -cornelis 13 -1453 13 -chango-alvarez 13 -biodegrade 13 -kimbrell 13 -82per 13 -livejournal 13 -humidifier 13 -gentner 13 -kingsmead 13 -silversun 13 -lighter-weight 13 -boffin 13 -220m 13 -icf 13 -brisby 13 -shaktar 13 -hatten 13 -polonetsky 13 -feltgen 13 -beaune 13 -takayama 13 -gangmaster 13 -rectums 13 -pre-dynastic 13 -trunkfield 13 -grazie 13 -357,000 13 -gung 13 -doorframe 13 -11-story 13 -schlumberger 13 -fujiwara 13 -legesse 13 -carrieanne 13 -puddu 13 -no-entry 13 -rickett 13 -mappleton 13 -myung-bo 13 -badam 13 -100ft-long 13 -al-tahtawi 13 -saint-louis 13 -popup 13 -314,000 13 -phobos-ground 13 -egfr 13 -evenden 13 -sadden 13 -breightmet 13 -mauderly 13 -bowdery 13 -5-12 13 -alasa 13 -b-side 13 -brotherhood-backed 13 -knuth 13 -faultlines 13 -tff 13 -hauntings 13 -six-figures 13 -retrying 13 -gosden-trained 13 -life-ending 13 -kingi 13 -naki'o 13 -anisiobi 13 -pett 13 -7-23 13 -akaka 13 -3,000-5 13 -oid 13 -grube 13 -meas 13 -semo 13 -burgeoned 13 -avishai 13 -schelkunova 13 -osteogenesis 13 -stablemates 13 -ballgames 13 -montez 13 -bejarano 13 -commercializing 13 -rushin 13 -jory 13 -tippets 13 -369,000 13 -shiatsu 13 -c-cup 13 -bankson 13 -diamant 13 -oxana 13 -bouvay 13 -double-dipping 13 -rakhimova 13 -benbetka 13 -1,825 13 -vucetich 13 -milwaukie 13 -cleavage-sparing 13 -half-second 13 -wsvn.com 13 -phytoliths 13 -gottesman 13 -mercato 13 -oil-fired 13 -1543 13 -zebra-striped 13 -1,134 13 -delvonte 13 -sailosi 13 -business-savvy 13 -predestined 13 -20-ft 13 -lynsay 13 -4inch 13 -perise 13 -meebo 13 -thibault-lecuivre 13 -omara 13 -zhanaozen 13 -blowtorches 13 -corded 13 -ayyoub 13 -haleem 13 -mirada 13 -miroslaw 13 -pincott 13 -partakes 13 -onade 13 -mcbusted 13 -cornstarch 13 -svdr 13 -suntech 13 -harmonize 13 -gamecocks 13 -82.8 13 -triallist 13 -foldscope 13 -deguerin 13 -mirkan 13 -rinda 13 -serah 13 -mautum 13 -tallat 13 -gionta 13 -5.44 13 -surgically-enhanced 13 -vishwanath 13 -swierczynski 13 --60 13 -blois 13 -kubina 13 -bonnell 13 -hiawayi 13 -over-burdened 13 -horological 13 -600-a-night 13 -beria 13 -36,700 13 -bridet 13 -top-15 13 -foxgloves 13 -lomon 13 -chattaway 13 -nuerburgring 13 -health-food 13 -carmindy 13 -monceau 13 -ssd 13 -media-obsessed 13 -hink 13 -23rd-minute 13 -bottle-feed 13 -solari 13 -a.m.-4 13 -ichat 13 -markyate 13 -vaendre 13 -specialness 13 -sub-adult 13 -nicoise 13 -highfields 13 -herbstritt 13 -1300 659 467 13 -cockerton 13 -narweena 13 -5.19 13 -cernescu 13 -post-work 13 -vivier 13 -adey 13 -bocchi 13 -badauskas 13 -freerunners 13 -laeken 13 -home-court 13 -vellacott 13 -giorno 13 -three-breasted 13 -pontes 13 -al-jamri 13 -crausewell 13 -snow-packed 13 -oddysses 13 -coulby 13 -volterra 13 -githongo 13 -keeper-batsman 13 -quanell 13 -hant 13 -leaseholders 13 -deregister 13 -woonona 13 -al-nuaimi 13 -maurie 13 -evco 13 -gorings 13 -beledi 13 -kidizoom 13 -wiyanto 13 -korolczuk 13 -baodong 13 -ekdal 13 -kosciusko 13 -terrel 13 -mabry 13 -gun-slinging 13 -godín 13 -shahriar 13 -dorst 13 -quercus 13 -corrientes 13 -sjekloca 13 -sherburn 13 -dagworth 13 -plugins 13 -lichty 13 -wissous 13 -kahney 13 -state-of-the-nation 13 -4200 13 -clinkle 13 -leaf-tailed 13 -yanie 13 -31.50 13 -subletting 13 -acaye 13 -ravina 13 -coshocton 13 -patala 13 -hererra 13 -vat-free 13 -1,631 13 -amber-red 13 -pyres 13 -half-man 13 -22-time 13 -2.94 13 -2.93 13 -intrusiveness 13 -pineheath 13 -38-page 13 -oluwatobi 13 -2014-2020 13 -2007/2008 13 -kafranbel 13 -hard-shelled 13 -pre-emption 13 -foamed 13 -rakhimov 13 -margret 13 -spindles 13 -notonthehighstreet.com 13 -unbeliever 13 -hasselt 13 -feal 13 -hipparcos 13 -73m 13 -j1407b 13 -uniworld 13 -aisons 13 -30metres 13 -piemonte 13 -chapti 13 -freemen 13 -infrasonic 13 -colourants 13 -gelana 13 -naishuller 13 -sensa 13 -spytma 13 -seventh-century 13 -anglo-german 13 -mohlis 13 -signalfan 13 -return-to-play 13 -arberg 13 -aerated 13 -haroldson 13 -saffiya 13 -pyrite 13 -all-cash 13 -arpan 13 -800lb 13 -mintz-plasse 13 -amphorae 13 -back-heeling 13 -20-person 13 -vohman 13 -wilbourn 13 -maccormac 13 -tayton 13 -krucker 13 -wengenn 13 -brinkworth 13 -ash-sham 13 -flame-retardant 13 -disempowerment 13 -amirli 13 -donaghue 13 -post-hurricane 13 -kazman 13 -@catrionadavies 13 -oof 13 -rifle-wielding 13 -923 13 -contactable 13 -wristify 13 -honsch 13 -15th-floor 13 -jawfish 13 -1692 13 -seca 13 -comps 13 -farragut 13 -clerkship 13 -dirtiness 13 -69.3 13 -69.7 13 -readitlater 13 -refinery29 13 -ripponden 13 -lekon 13 -arizonan 13 -nuo 13 -deiter 13 -sadegh 13 -tu-160 13 -decreeing 13 -whitsun 13 -arconada 13 -klutch 13 -90.2 13 -shaadi.com 13 -50555 13 -kellye 13 -sheridans 13 -1567 13 -hypoglycaemia 13 -birthplaces 13 -bradian 13 -post-punk 13 -abjectly 13 -menchaca 13 -chuma 13 -paper-based 13 -sdf 13 -croaked 13 -hudkins 13 -gym-goer 13 -tiffany-rose 13 -keila 13 -rff 13 -is-held 13 -kaibab 13 -plackowska 13 -raudhatul 13 -265million 13 -stratovolcano 13 -beiteinu 13 -rubinowitz 13 -vorayud 13 -darcys 13 -counter-suit 13 -balfe 13 -wcit 13 -baidoo 13 -oosten 13 -double-handed 13 -igls 13 -8.56 13 -mexico-u.s. 13 -untiring 13 -vedomosti 13 -then-democratic 13 -letup 13 -386,000 13 -abyssal 13 -sturgeons 13 -cuba-to-florida 13 -eaa 13 -milquet 13 -squirrelled 13 -70,000-a-week 13 -bumpiness 13 -kote 13 -birgit 13 -broek 13 -fbi-led 13 -borch 13 -weidner 13 -killock 13 -part-timer 13 -castrellon 13 -misunderstands 13 -whittaker-axon 13 -koffler 13 -19-17 13 -hajrizi 13 -revo 13 -413,000 13 -darshan-leitner 13 -1:49 13 -1:48 13 -anti-family 13 -kangbashi 13 -tou 13 -toh 13 -vetigel 13 -bsg 13 -monozygotic 13 -18-room 13 -22/1 13 -graphisoft 13 -ickenham 13 -rocasolano 13 -rebelato 13 -stossel 13 -309,000 13 -meritocratic 13 -doggedness 13 -chansa 13 -incantations 13 -walch 13 -earwig 13 -non-celebrities 13 -stefanel 13 -rothbauer 13 -cupar 13 -tings 13 -afmadow 13 -tourist-friendly 13 -blau 13 -blaj 13 -750ft 13 -2,977 13 -2,976 13 -b20 13 -resonator 13 -kalmring 13 -senkaku/diaoyu 13 -lindau 13 -carisbrooke 13 -despatching 13 -ef-3 13 -skyes 13 -throwdown 13 -gwtw 13 -exchanger 13 -cost-prohibitive 13 -free-runner 13 -carwood 13 -tudur 13 -firebrands 13 -borgnine 13 -varadero 13 -isspresso 13 -chickaway 13 -liszt 13 -11-stone 13 -utv 13 -culebra 13 -flower-shaped 13 -guerra-doce 13 -circulations 13 -cummin 13 -tokaji 13 -glycolic 13 -20mg 13 -muircroft 13 -wernham 13 -al-nouman 13 -hennie 13 -xinwen 13 -evnin 13 -identifed 13 -navarrete 13 -hoystead 13 -straight-leg 13 -1732 13 -brute-force 13 -224m 13 -setz 13 -sfpd 13 -mawar 13 -torres-puello 13 -two-family 13 -tappy 13 -ahmazing 13 -sheerwind 13 -kitamura 13 -longini 13 -tabachnick 13 -nbp 13 -autumnwatch 13 -tapioca 13 -cross-london 13 -dendy 13 -notifiable 13 -time-to-time 13 -achmat 13 -wisconsinites 13 -ranthambore 13 -deloreans 12 -fim 12 -superdad 12 -garamendi 12 -sleuk 12 -dash-mounted 12 -al-kharboush 12 -proselytising 12 -post-accident 12 -kulesza 12 -cbsla 12 -mesnick 12 -leard 12 -redfin 12 -devouassoux 12 -snowboarded 12 -pixilated 12 -kashe 12 -4-foot-9 12 -aliy 12 -cornrow 12 -amoako-ackah 12 -atiq 12 -urzua 12 -biondi 12 -co19 12 -hi-viz 12 -vodaphone 12 -nemani 12 -acknowledgements 12 -jarba 12 -fleishman 12 -german-trained 12 -locally-made 12 -descriptors 12 -blacklists 12 -ski-jumper 12 -sandoz 12 -whatapp 12 -heraldo 12 -spain-based 12 -feeble-minded 12 -bergthold 12 -infowars 12 -htc-highroad 12 -gotovina 12 -evynn 12 -undergaro 12 -stenmark 12 -377,000 12 -sparsely-populated 12 -aimette 12 -36billion 12 -rhodes-butler 12 -monastiriotis 12 -vanconett 12 -rationalist 12 -mande 12 -mid-2020s 12 -17-under 12 -spawton 12 -digenova 12 -stir-fries 12 -spiderwebs 12 -44-tonne 12 -hoganson 12 -mckagan 12 -samjiyon 12 -embroideries 12 -long-track 12 -microsoft-owned 12 -hff 12 -missileer 12 -deliverer 12 -sharopetrosian 12 -galeano 12 -locher 12 -1/1 12 -740million 12 -buhera 12 -tigard 12 -russian-owned 12 -572,000 12 -exultant 12 -whitesmith 12 -ovitz 12 -bernasconi 12 -unwraps 12 -synergistic 12 -falaise 12 -volkow 12 -caller-times 12 -26-27 12 -.01 12 -u.n.-mandated 12 -braniff 12 -weakland 12 -satyajit 12 -boals 12 -yezidis 12 -backardjiev 12 -tepee 12 -1,864 12 -1,863 12 -self-adhesive 12 -helfer 12 -mouland 12 -tamazight 12 -dombrowski 12 -matchwinning 12 -424,000 12 -1,175 12 -1,177 12 -fergusons 12 -stiffs 12 -yasgur 12 -graasten 12 -riverbend 12 -parsippany 12 -non-runner 12 -axs 12 -vette 12 -simkin 12 -5:44 12 -5:47 12 -recalculate 12 -glowy 12 -zemack 12 -4:09 12 -qps 12 -utiashvili 12 -gardez 12 -1491 12 -wuyi 12 -lime-green 12 -harts 12 -reinvestigated 12 -trickey 12 -bootsy 12 -onslaughts 12 -plover 12 -basses 12 -poucher 12 -togas 12 -easy-to-read 12 -kantrowitz 12 -baby-gro 12 -weyers 12 -westdale 12 -tsranaev 12 -ellingham 12 -wilken 12 -dushanbe 12 -wanoa 12 -unsubtle 12 -hitori 12 -walkup 12 -carscadden 12 -siggi 12 -nowozeniuk 12 -markwalder 12 -m1911 12 -crabapple 12 -target.com 12 -tumi 12 -sabiiti 12 -wreathes 12 -third-straight 12 -shibani 12 -sudal 12 -rauball 12 -bertani 12 -bryant-heron 12 -compellingly 12 -1,235 12 -nhl.com 12 -brinsworth 12 -tunnah 12 -memedovic 12 -411,000 12 -valsamma 12 -22:45 12 -bolt-on 12 -longshaw 12 -luntz 12 -dorgu 12 -3:38 12 -42p 12 -lomeli 12 -polyphonic 12 -22-6 12 -druery 12 -broadgate 12 -mkii 12 -lafonse 12 -rata 12 -flavelle 12 -penpal 12 -fiad 12 -turtlenecks 12 -14.00 12 -spacemen 12 -campbell-harris 12 -tawkon 12 -francecca 12 -bracadale 12 -al-dawla 12 -come-hither 12 -swindells 12 -keary 12 -qkd 12 -hooey 12 -canoga 12 -cutsems 12 -emulation 12 -cleveland-area 12 -dontadrian 12 -cockcroft 12 -looksery 12 -uriarte 12 -disaster-management 12 -fugee 12 -kegler 12 -byung-eun 12 -capitols 12 -al-bisan 12 -inkhel 12 -manacles 12 -unfeasibly 12 -32-second 12 -offhanded 12 -fekter 12 -schlitzkus 12 -benno 12 -imperfecta 12 -scacchi 12 -teversal 12 -omaima 12 -wolk 12 -bahrain-based 12 -328i 12 -hoepfner 12 -frogmouth 12 -jetovator 12 -dingus 12 -theblaze.com 12 -f1s 12 -toy-related 12 -well-fitted 12 -paramjit 12 -shoyu 12 -mont-blanc 12 -mcginness 12 -staves 12 -jelacic 12 -imire 12 -1,980 12 -9,000-square-foot 12 -borsje 12 -gediminas 12 -mynach 12 -chinaaid 12 -rimbaud 12 -epicure 12 -zoomlion 12 -harasses 12 -keiler 12 -grandfather-of-seven 12 -beiges 12 -horse-loving 12 -sechrist 12 -sandborn 12 -buttrose 12 -weaponization 12 -volcanically 12 -silberberg 12 -gütsch 12 -red-necked 12 -weal 12 -highly-organised 12 -huns 12 -daccord 12 -wolf-like 12 -vautour 12 -winnenden 12 -nlp 12 -sawicz 12 -27-room 12 -chaotically 12 -over-budget 12 -vulcans 12 -sisnett 12 -allgaier 12 -blondin 12 -citarelli 12 -kieth 12 -nyirumbe 12 -maycomb 12 -speargun 12 -larowe 12 -al-hashid 12 -okpako 12 -reimagines 12 -neema 12 -oustretched 12 -seven-months-pregnant 12 -r.v. 12 -kashim 12 -killion 12 -feroz 12 -levengood 12 -adjuvant 12 -92mph 12 -kennelly 12 -gabb 12 -,13 12 -antonacci 12 -nanson 12 -couplings 12 -fratangelo 12 -entingh 12 -30,000-square-foot 12 -nsidc 12 -hatzes 12 -rovos 12 -beeler 12 -motti 12 -8,399 12 -menarche 12 -chastagner 12 -spagnolo 12 -mother-of-13 12 -anyang 12 -deselect 12 -titfer 12 -tangaroa 12 -prier 12 -twede 12 -1,336 12 -1,335 12 -bimla 12 -heydar 12 -whaymand 12 -cubbage 12 -herter 12 -hooson 12 -cashin 12 -pisit 12 -gev 12 -125-year 12 -girt 12 -gird 12 -hoke 12 -schuetz 12 -olayemi 12 -assault-type 12 -near-unanimous 12 -b52 12 -zamosc 12 -four-feet 12 -bruhn 12 -lauchlan 12 -4.90 12 -iwueke 12 -7/11 12 -1,556 12 -ropas 12 -superfruit 12 -allegany 12 -x-prize 12 -highchair 12 -buthorn 12 -823,000 12 -non-plussed 12 -anti-law 12 -pimenova 12 -yo-yos 12 -libertas 12 -light-headedness 12 -re-editing 12 -community-wide 12 -abeyance 12 -ascari 12 -rossiyskaya 12 -valdez-simeon 12 -end-of 12 -dimaio 12 -ukuleles 12 -fundi 12 -deferments 12 -zazzara 12 -tandja 12 -nonthreatening 12 -fully-charged 12 -microstamping 12 -cerminara 12 -agonists 12 -kehnast 12 -arkadiy 12 -4:27 12 -re-done 12 -begleiter 12 -leah-beth 12 -privette 12 -ex-russian 12 -wood-harber 12 -6.22 12 -42,000-a-year 12 -1349 12 -liepman 12 -perfidious 12 -wtsp-tv 12 -wem 12 -zucchetto 12 -ivakova 12 -hastie 12 -news-gathering 12 -tusayan 12 -abdul-hakim 12 -schuerman 12 -vlachonis 12 -argus-is 12 -ragwort 12 -charge-coupled 12 -pnp 12 -re-modelled 12 -foad 12 -sotto 12 -ampollini 12 -monts 12 -c-119 12 -sedro-woolley 12 -arieff 12 -ambati 12 -lilford 12 -tilak 12 -kopy 12 -1181 12 -graham-trott 12 -guy-blache 12 -trivago.co.uk 12 -leopard-skin 12 -insularity 12 -tzortzis 12 -cretin 12 -1998/99 12 -orbitofrontal 12 -steffey 12 -disinvited 12 -catcall 12 -watauga 12 -sieving 12 -wide-man 12 -hignell 12 -dermo 12 -65.3 12 -1,218 12 -pisses 12 -canada-to-texas 12 -kaem 12 -ar12192 12 -workflow 12 -ontiveros 12 -louro 12 -edjabe 12 -kimmie 12 -boda 12 -bargain-priced 12 -gryffindor 12 -graystock 12 -malyan 12 -psaltis 12 -2019-20 12 -ellisville 12 -improvisations 12 -ink-stained 12 -sense8 12 -doodads 12 -verret 12 -grand-niece 12 -postcard-perfect 12 -l'homme 12 -dulling 12 -dirtbag 12 -golliwogs 12 -voll 12 -off-stump 12 -gaber 12 -potheads 12 -ieropoulos 12 -largeman-roth 12 -assail 12 -assis 12 -goodblanket 12 -600-page 12 -8888 12 -flavonoid 12 -sumptuously 12 -scelsi 12 -ul-naseer 12 -856 12 -20.0 12 -ultrathin 12 -terifay 12 -mcoca 12 -bamboo-like 12 -bitstrips 12 -slobs 12 -mudflows 12 -gunge 12 -re-impose 12 -1977-78 12 -festively 12 -alresford 12 -encouragements 12 -savlon 12 -50,000-a-plate 12 -gourmands 12 -hille 12 -lamarr 12 -bleiler 12 -ktvi-tv 12 -conneticut 12 -seecrypt 12 -post-gaddafi 12 -stovetop 12 -lourenco 12 -montlake 12 -10-count 12 -chetan 12 -odzhan 12 -974.8 12 -circuito 12 -salmon-coloured 12 -four-year-long 12 -gorgonio 12 -derksen 12 -gotthard 12 -tatta 12 -concertgoer 12 -emoting 12 -yancy 12 -young-at-heart 12 -iki 12 -metzitzah 12 -nitty 12 -coynes 12 -horbury 12 -kahlenberg 12 -monetising 12 -willowbrook 12 -tiddy 12 -1501 12 -hochheiser 12 -percheron 12 -rice-based 12 -diprosopus 12 -wymeswold 12 -siegels 12 -tlaxcala 12 -@cnntech 12 -plevneliev 12 -pectin 12 -high-readiness 12 -dimitriou 12 -pn 12 -sharp-force 12 -mafi 12 -northpark 12 -fire-related 12 -sweat-stained 12 -4,000-acre 12 -114.7 12 -alrifai 12 -haithem 12 -song-writing 12 -sevenraj 12 -wiht 12 -keeva 12 -oritz 12 -digiovanni 12 -allez 12 -freemans 12 -airbed 12 -brokenness 12 -aai 12 -aaj 12 -pali 12 -australian-themed 12 -adeniran 12 -acclimating 12 -libtards 12 -kayobotsi 12 -betti 12 -zizzi 12 -metering 12 -jovie 12 -uca 12 -elmet 12 -freyberg 12 -moser-proll 12 -katehi 12 -astro-mapping 12 -dc-10s 12 -#elizabethlauten 12 -58,800 12 -600mph 12 -haemolacria 12 -double-storey 12 -hurler 12 -wawrzynski 12 -single-gender 12 -wilser 12 -schisms 12 -cardinalle 12 -boling 12 -football-playing 12 -mongodb 12 -fuyang 12 -r-ky. 12 -outer-space 12 -laceby 12 -infosys 12 -155-mile 12 -finnick 12 -bogies 12 -showmen 12 -townhomes 12 -lutton 12 -asst. 12 -lanting 12 -ntini 12 -ramanauskas 12 -wets 12 -darkaiser 12 -shifnal 12 -helmore 12 -lekic 12 -destructiveness 12 -lioy 12 -nonylphenol 12 -jdidi 12 -demella 12 -skorpion 12 -as-somali 12 -kepler-16b 12 -omarska 12 -gassama 12 -szymany 12 -bietch 12 -siezed 12 -reanimation 12 -medelci 12 -burra 12 -technorama 12 -generis 12 -58kg 12 -baptizing 12 -unmentionables 12 -tompkinsville 12 -league-n 12 -panter 12 -sornoza 12 -caitriona 12 -fa'afafine 12 -microcomputer 12 -storedot 12 -sagely 12 -gaytm 12 -warmington 12 -szubin 12 -seyam 12 -frago 12 -lofficier 12 -multipronged 12 -marranos 12 -pro-ana 12 -bacchanalian 12 -frisks 12 -lambrecht 12 -schwein 12 -6.02 12 -blagden 12 -karabanov 12 -yusif 12 -1323 12 -1325 12 -132m 12 -stone-lined 12 -eatin 12 -pixelstick 12 -novokuznetsk 12 -photogrammetry 12 -crosswind 12 -delahunt 12 -tuheitia 12 -cranke 12 -alberton 12 -localisation 12 -ul-fitr 12 -heidsieck 12 -8.31 12 -edlington 12 -liberum 12 -two-wheeler 12 -salkantay 12 -schoolbus 12 -gonads 12 -kaydon 12 -prospers 12 -teres 12 -anasagasti 12 -kaspa 12 -epicurious 12 -kataria 12 -navotas 12 -airbnb.com 12 -100-watt 12 -sag-aftra 12 -demodex 12 -boric 12 -voice-control 12 -bredasdorp 12 -lauriston 12 -makaburi 12 -misener 12 -sallal 12 -second-screen 12 -iddings 12 -porkers 12 -ahimbisibwe 12 -dafabet 12 -angry-looking 12 -brinklow 12 -sex-themed 12 -3.5-carat 12 -grade-i 12 -grade-a 12 -retrench 12 -plumpness 12 -whitish 12 -lovecchio 12 -brazil-based 12 -eaaf 12 -raht 12 -perrott 12 -sacre-coeur 12 -d'avignon 12 -thakoon 12 -kalypso 12 -wenping 12 -displeasing 12 -fire-retardant 12 -scruffs 12 -boog 12 -druridge 12 -traver 12 -1-800-423-tips 12 -curington 12 -sabinas 12 -sararogha 12 -laliberte 12 -farrior 12 -stretch-wool 12 -terawatts 12 -kalidou 12 -1,051 12 -1,054 12 -u.s.-aided 12 -capsular 12 -brisco 12 -vasseur 12 -dropcard 12 -sanibel 12 -colonials 12 -flixton 12 -dueled 12 -plain-packaging 12 -starkess 12 -butera 12 -erlinda 12 -stationhouse 12 -off-chance 12 -unwinds 12 -poofy 12 -topi 12 -300-a-night 12 -smoldered 12 -corré 12 -shihri 12 -stoffel 12 -gada 12 -helles 12 -dnieper 12 -micoach 12 -satao 12 -gilmar 12 -extrication 12 -scmp 12 -champenois 12 -lightbown 12 -bafil 12 -rufo 12 -yuschenko 12 -epilepticus 12 -money-back 12 -mullins-trained 12 -rishawi 12 -oast 12 -roover 12 -winco 12 -cherabin 12 -disconsolately 12 -hennon 12 -bulkheads 12 -nothe 12 -out-of-the-blue 12 -kassidy 12 -davino 12 -grimthorpe 12 -folkston 12 -jaeden 12 -larbert 12 -aulton 12 -satiate 12 -ogboye 12 -myers-santana 12 -baroz 12 -moceanu 12 -datsyuk 12 -algerian-born 12 -drumpants 12 -w.s. 12 -oaxacan 12 -aleida 12 -vigano 12 -10.26 12 -balder 12 -hoose 12 -chastleton 12 -nonjury 12 -baseley 12 -irianto 12 -abaad 12 -buckfast 12 -towey 12 -tatkowski 12 -sholeh 12 -lumbers 12 -balaji 12 -zuch 12 -rewound 12 -rocchelli 12 -ishag 12 -rheasilvia 12 -it-girl 12 -pravin 12 -wiredu 12 -#ebola 12 -musicologist 12 -ganzi 12 -wissman 12 -mccullar 12 -friern 12 -kroc 12 -axolotl 12 -jumbaz 12 -vegh 12 -siegrist 12 -awosogba 12 -hard-living 12 -militant-held 12 -piguet 12 -78mph 12 -clas 12 -akhmed 12 -k-ballet 12 -choeung 12 -bone-crunching 12 -communiqué 12 -govier 12 -denesevich 12 -vlogs 12 -i.t. 12 -i-85 12 -placekicker 12 -montford 12 -ahearne-grant 12 -jesy 12 -paytas 12 -game-like 12 -666,000 12 -true-blue 12 -ultra-slim 12 -encodes 12 -puc 12 -tameem 12 -lie-ins 12 -collegiality 12 -shruti 12 -self-aggrandising 12 -thibes 12 -anirban 12 -auty 12 -vuhlehirsk 12 -downlink 12 -wuerl 12 -capriglione 12 -rothrauff 12 -smartgrids 12 -tantallon 12 -manyara 12 -hairnet 12 -villere 12 -kinsale 12 -hallberg 12 -iztuzu 12 -anodes 12 -convenience-store 12 -backchat 12 -akihiro 12 -guajira 12 -immel 12 -160kg 12 -strozier 12 -a50 12 -a57 12 -piacitelli 12 -kathe 12 -aðalsteinsson 12 -jenison 12 -british-record 12 -floren 12 -rockit 12 -whotv.com 12 -smartening 12 -double-faced 12 -poldhu 12 -al-nuaymi 12 -burps 12 -wiart 12 -djalal 12 -obrenovac 12 -gotay 12 -co-developed 12 -aloush 12 -mahle 12 -1,588 12 -caprock 12 -ndb 12 -fetter 12 -shuhina 12 -most-discussed 12 -liebig 12 -courroye 12 -kisner 12 -patrizio 12 -garizabalo 12 -doffed 12 -elysian 12 -6.60 12 -luana 12 -miliote 12 -health-and-safety 12 -jel 12 -3000ft 12 -rifle-toting 12 -dangjin 12 -kominas 12 -unpremeditated 12 -lucic 12 -team-based 12 -irishwoman 12 -hershfield 12 -zapalski 12 -squillaci 12 -miel 12 -mien 12 -jumma 12 -pitte 12 -nombre 12 -77per 12 -fattori 12 -6:06 12 -dawla 12 -iwasaki 12 -yemini 12 -barbus 12 -bluffed 12 -repeaters 12 -mysandyhookfamily.org 12 -ameer 12 -hymers 12 -talksportdrive 12 -dubberley 12 -sadlers 12 -lexie-mai 12 -penciling 12 -maciver 12 -thereon 12 -7-inches 12 -sotakoun 12 -maxfield 12 -socialisation 12 -1,259 12 -1,257 12 -lisa-ann 12 -shati 12 -gietzen 12 -twe 12 -12-seater 12 -much-feared 12 -kashk 12 -triangulum 12 -lokesh 12 -stenciling 12 -sahoury 12 -79million 12 -gilland 12 -palomas 12 -rann 12 -rokos 12 -polemics 12 -becalmed 12 -idolizes 12 -food-processing 12 -nabari 12 -sidorov 12 -overmedicated 12 -stormer 12 -subarachnoid 12 -1026 12 -kakenya 12 -rko 12 -usd$ 12 -zaldua 12 -thys 12 -typaldos 12 -comradery 12 -dalbandin 12 -othniel 12 -longest-married 12 -bradshaw-bean 12 -miena 12 -cppcc 12 -geopolitically 12 -hafith 12 -81p 12 -satlok 12 -maoism 12 -rearick 12 -buckharee 12 -kiattipong 12 -fruitfulness 12 -russell-wade 12 -loreen 12 -ardwick 12 -avozilla 12 -tare 12 -81-year 12 -dhunjibhoy 12 -mcconlogue 12 -minor-latin 12 -tadao 12 -nomerz 12 -overstayer 12 -sensenberger 12 -piquing 12 -onyenaychi 12 -leanin.org 12 -dc-cam 12 -bachmaier 12 -quarm 12 -annotation 12 -guerrilla-style 12 -upcycled 12 -gilfach 12 -nsdap 12 -1205 12 -viney 12 -w50 12 -kianna 12 -riese 12 -draymond 12 -cockfosters 12 -rosendo 12 -rosende 12 -shaab 12 -143802 12 -al-hazmi 12 -srsich 12 -pmdd 12 -hematology 12 -home-improvement 12 -stykes 12 -pieterson 12 -appalatch 12 -hander 12 -star-lord 12 -slurped 12 -planet-forming 12 -fino 12 -60651 12 -shamsul 12 -uintah 12 -against-the-odds 12 -sebaceous 12 -sharifa 12 -gender-bending 12 -showscan 12 -55per 12 -carmella 12 -voltarol 12 -esight 12 -hit-or-miss 12 -haziq 12 -kosimov 12 -pig-shaped 12 -20.12 12 -overcapacity 12 -nerang 12 -wiggans 12 -f-22a 12 -zivot 12 -oocysts 12 -superdrug.com 12 -tornado-like 12 -estuarine 12 -tiddly 12 -101.5 12 -rutty 12 -red-orange 12 -s-1 12 -broeck 12 -skjellerup 12 -dengate 12 -eco-fashion 12 -kaddish 12 -yade 12 -thomasville 12 -diboll 12 -latraverse 12 -al-barassi 12 -lashaun 12 -principalist 12 -tams 12 -cyclades 12 -mythos 12 -69th-minute 12 -self-parking 12 -divino 12 -bruffin 12 -gredos 12 -re-inspection 12 -malacanang 12 -nanuqsaurus 12 -miles-wildin 12 -hannaford 12 -re-shaped 12 -morewood 12 -dark-blue 12 -tomboyish 12 -arntzen 12 -politico.com 12 -chukwueke 12 -marshall-plewes 12 -weatherboard 12 -gelson 12 -za'dariyah 12 -mogan 12 -lookyanov 12 -pliant 12 -lamalou-les-bains 12 -pottengal 12 -samoylicz 12 -rudnev 12 -@mailonline 12 -multi-directional 12 -25i-nbome 12 -4,620 12 -trust-building 12 -plowshares 12 -baraawe 12 -2008-2011 12 -ervan 12 -800k 12 -omanis 12 -post-watershed 12 -2,740 12 -pogel 12 -provisioned 12 -braymiller 12 -6-point 12 -1104 12 -profilers 12 -hydrologic 12 -drew-honey 12 -trolly 12 -800-page 12 -bankert 12 -muharram 12 -bairn 12 -gold-medalist 12 -summercourt 12 -berwind 12 -location-specific 12 -shesol 12 -2,040 12 -wbtw 12 -columned 12 -utsi 12 -sendall 12 -misinterpretations 12 -bohlayer 12 -mexican-themed 12 -16 1/2 12 -ranshi 12 -bertin 12 -horvat 12 -mwanawasa 12 -jean-françois 12 -owosso 12 -misbranded 12 -afterburner 12 -slavs 12 -bryansk 12 -buttonhole 12 -nderitu 12 -gsce 12 -gregorek 12 -fluting 12 -one-cap 12 -6.48 12 -skydome 12 -glamazon 12 -vermaat 12 -wherwell 12 -biobots 12 -wlex 12 -iceton 12 -al-moualem 12 -9.91 12 -jackiey 12 -birla 12 -siddhanta 12 -telesforo 12 -tentpole 12 -carnesi 12 -adjuster 12 -bravas 12 -read-through 12 -manis 12 -all-ages 12 -morgana 12 -vinader 12 -gehle 12 -cerebrolysin 12 -red-winged 12 -eka 12 -lakoda 12 -mixed-gender 12 -66p 12 -salines 12 -galvanises 12 -vaccari 12 -chest-beating 12 -turnkey 12 -yona 12 -meaux 12 -eighth-round 12 -saint-michel 12 -trebilco 12 -12-meter 12 -ex-inter 12 -socarides 12 -wyton 12 -shir 12 -blacc 12 -nyoman 12 -feyernoord 12 -hesford 12 -cotes 12 -nhs.uk 12 -greenedge 12 -mazzuca 12 -obama-backed 12 -chemello 12 -szalacinski 12 -nonfat 12 -duct-taping 12 -397,000 12 -nq 12 -overrules 12 -anna-maria 12 -tecpan 12 -1,411 12 -ployment 12 -5-day 12 -wildenberg 12 -hueneme 12 -wiseau 12 -grp 12 -hiba 12 -dowrick 12 -xrs 12 -dufton 12 -garyan 12 -amiga 12 -boudlal 12 -photo-bombing 12 -top-trending 12 -americus 12 -suat 12 -blomme 12 -grotz 12 -ureta 12 -rumination 12 -mullingar 12 -swivelling 12 -tuthill 12 -eidson 12 -playfighting 12 -woutersz 12 -olegario 12 -freeguard 12 -four-vehicle 12 -yuko 12 -whitnell 12 -thomass 12 -chapulines 12 -emollients 12 -sivanandan 12 -bajner 12 -red-tailed 12 -empathised 12 -patiño 12 -tindley 12 -anice 12 -bishopp 12 -faryal 12 -dadullah 12 -eight-weeks-old 12 -sugra 12 -bexi 12 -whigham 12 -handless 12 -vidya 12 -kesgrave 12 -vandy 12 -keefer 12 -128th 12 -bertolacci 12 -charlestain 12 -murchie 12 -bellan 12 -after-care 12 -cybertheft 12 -souderton 12 -il-62 12 -581g 12 -anni-frid 12 -guoan 12 -desbiez 12 -orthopedist 12 -marathi 12 -privatefly 12 -arzu 12 -13.0 12 -house-bound 12 -18f 12 -idzikowski 12 -helmn 12 -11,900 12 -treatement 12 -kazakhstani 12 -luxuria 12 -caninos 12 -dual-track 12 -adeshokan 12 -goodbrand 12 -7/8 12 -macgraw 12 -swanbourne 12 -edmundsbury 12 -freest 12 -paleyfest 12 -filali 12 -one-two-three 12 -saugstad 12 -bodacious 12 -obeys 12 -comped 12 -wway 12 -collinsworth 12 -cabarrus 12 -14k 12 -aristegui 12 -bayode 12 -horseflies 12 -dymchurch 12 -80-pound 12 -8200 12 -note-perfect 12 -memarian 12 -trencheny 12 -mouthguard 12 -patchnride 12 -quake-damaged 12 -supressed 12 -juvederm 12 -ganieva 12 -foyles 12 -nyakane 12 -goudeau 12 -roll-on 12 -7.37 12 -7.38 12 -vesterbacka 12 -hellebore 12 -yellowism 12 -120-foot 12 -minehunter 12 -mamoun 12 -nordtveit 12 -photosharing 12 -davidsons 12 -jheri 12 -caglayan 12 -montreat 12 -0.73 12 -0.78 12 -filipowska 12 -larchmont 12 -redial 12 -wishmakers 12 -lilya 12 -cristóbal 12 -manuszewski 12 -pittenweem 12 -dragonair 12 -fereydoun 12 -biophysics 12 -nibs 12 -claros 12 -publicizes 12 -zborowski 12 -outpointing 12 -thine 12 -wenn 12 -wend 12 -four-leaf 12 -greenestone 12 -imir 12 -shigella 12 -rummler 12 -chatterji 12 -,200 12 -once-a-year 12 -abdulrahim 12 -sourav 12 -krzysztonek 12 -stocksbridge 12 -mikandi 12 -natta 12 -girgenti 12 -2,060 12 -buni 12 -ossel 12 -kaylan 12 -vinyasa 12 -panjwayi 12 -spiros 12 -illume 12 -orlean 12 -goldenvoice 12 -sirul 12 -sheryll 12 -oshman 12 -bavents 12 -shorter-term 12 -wysoczanska 12 -childen 12 -vouchercloud 12 -title-deciding 12 -hepa 12 -rescinds 12 -albaghdady 12 -grimstead 12 -pancuronium 12 -surfsand 12 -bakelite 12 -kremlin-friendly 12 -guardino 12 -moldovans 12 -bensalaman 12 -saunier 12 -33mph 12 -maslov 12 -vodacom 12 -foya 12 -rubric 12 -zinkevicius 12 -ofc 12 -functionalities 12 -waterstudio 12 -power-to-weight 12 -hootsuite 12 -weleetka 12 -nellist 12 -rattlers 12 -160g 12 -afropolitan 12 -rebollero 12 -stilo 12 -de-stressed 12 -nicoletta 12 -baumruk 12 -spiriting 12 -antigravity 12 -misalignment 12 -reprocess 12 -longhorne 12 -caenorhabditis 12 -boustead 12 -orderella 12 -cressman 12 -shepway 12 -dress-making 12 -strabismus 12 -fairfax-ipsos 12 -re-visited 12 -alamdar 12 -nonplayer 12 -manzke 12 -four-years 12 -meramec 12 -verapamil 12 -insley 12 -weitzberg 12 -care-home 12 -preferentially 12 -roguish 12 -pirozek 12 -mercey 12 -monerville 12 -micro-units 12 -out-fought 12 -roithmayr 12 -300-member 12 -davies-hughes 12 -meccas 12 -zwerg 12 -mancunians 12 -flavanols 12 -mspy 12 -manalapan 12 -bothy 12 -j-pop 12 -nenets 12 -vehicle-related 12 -besty 12 -brainbox 12 -yigit 12 -insets 12 -20,700 12 -sealander 12 -rebuffs 12 -devlaming 12 -64th-minute 12 -bareato 12 -westfeldt 12 -hoste 12 -monastir 12 -play-it-safe 12 -fontenot 12 -sanctified 12 -pansold 12 -capen 12 -malpensa 12 -warbelow 12 -gouvea 12 -moderniser 12 -higher-res 12 -on-road 12 -k-league 12 -shutt 12 -lipsy.co.uk 12 -underclothes 12 -sonangol 12 -tambunting 12 -fischhuber 12 -ligation 12 -normal-looking 12 -doudou 12 -modipa 12 -lali 12 -yeom 12 -murt 12 -307,000 12 -best-trained 12 -urquidez 12 -raksin 12 -pheobe 12 -nonsurgical 12 -jona 12 -franco-prussian 12 -anmarie 12 -wedmore 12 -zhiznevsky 12 -mcfayden 12 -sharpish 12 -ha'il 12 -brookhouse 12 -standbys 12 -haselhuhn 12 -gingery 12 -tionne 12 -jamarat 12 -cnes 12 -communist-ruled 12 -3.94 12 -earthquake-hit 12 -jingling 12 -scissons 12 -life-shattering 12 -photosensitive 12 -toy-like 12 -off-exhibit 12 -two-plus 12 -lustyik 12 -britland 12 -prevaricate 12 -peatlands 12 -transpiration 12 -coupledom 12 -rotatable 12 -hq124 12 -scotus 12 -ghash 12 -madan 12 -maday 12 -pursel 12 -gatilov 12 -cagaptay 12 -ladykillers 12 -fischler 12 -nayely 12 -bisaso 12 -nemelka 12 -brevick 12 -huart 12 -lurlene 12 -trouble-making 12 -then-13-year-old 12 -lopez-ruiz 12 -filion 12 -helland 12 -commandaria 12 -sixties-style 12 -ansty 12 -co-operates 12 -metre-deep 12 -outsells 12 -93.3 12 -mistreats 12 -racial/ethnic 12 -cile 12 -iannetta 12 -horder 12 -iwaszkiewicz 12 -sea-view 12 -northwesterly 12 -biven 12 -1,754 12 -doto 12 -strømnes 12 -rahmon 12 -ulema-e-islam-fazal 12 -fluidly 12 -uks 12 -steriliser 12 -7.18 12 -dietzel 12 -hertzak 12 -krohn-dehli 12 -laser-focused 12 -nip/tuck 12 -genaro 12 -buran 12 -milan-san 12 -ruffer 12 -reiteration 12 -35w 12 -crofting 12 -chinhoyi 12 -marent 12 -ploughman 12 -tornoe 12 -,6 12 -deactivates 12 -jiracek 12 -ewww 12 -zawadi 12 -iff 12 -ifk 12 -bennelle 12 -kurlantzick 12 -miaowing 12 -whns 12 -suben 12 -nuthampstead 12 -rudel 12 -tsia 12 -sinensis 12 -thakeham 12 -aloes 12 -safir 12 -secher 12 -brimah 12 -shaffner 12 -4.38 12 -4.31 12 -4.34 12 -frankton 12 -for-sale 12 -re-watching 12 -blather 12 -athelney 12 -foliot 12 -782 12 -126mph 12 -6:34 12 -34-page 12 -greuel 12 -dudeism 12 -finessing 12 -barrhead 12 -odongo 12 -150mg 12 -biddolph 12 -dinner-party 12 -brite 12 -tingley 12 -meta-data 12 -moneygall 12 -guizers 12 -copper-alloy 12 -mbulu 12 -hafan 12 -bula 12 -tailfin 12 -taylormade 12 -johannsen 12 -matenopoulos 12 -talmud 12 -terrariums 12 -sollar 12 -hours-a-day 12 -kefferty 12 -lightroom 12 -postcard-sized 12 -zahavi 12 -el-mami 12 -1,384 12 -oystercatchers 12 -pulido 12 -glogau 12 -mirvac 12 -gulick 12 -veco 12 -macadamias 12 -ganglani 12 -maryjane 12 -masullo 12 -curren 12 -uther 12 -ex-prisoners 12 -lyoto 12 -foxall 12 -keays 12 -marunchak 12 -two-cylinder 12 -o'regan 12 -kavadi 12 -microsecond 12 -aleister 12 -ouderkirk 12 -quibell-smith 12 -hausswolff 12 -blechman 12 -nürburgring 12 -quelch 12 -re-floated 12 -giddiness 12 -trehin 12 -mustard-coloured 12 -odb 12 -gellhorn 12 -jwaili 12 -moadamiyeh 12 -asare 12 -+212 12 -boelter 12 -cadereyta 12 -natter 12 -murkiness 12 -frump 12 -skillman 12 -buckie 12 -jamielee 12 -lisowski 12 -wingador 12 -bruel 12 -magennis 12 -trease 12 -8:03 12 -beberg 12 -five-alarm 12 -surmises 12 -ulyanovsk 12 -balague 12 -tym 12 -antonucci 12 -there.caller 12 -unicyclist 12 -glamourised 12 -pichu 12 -1598 12 -unsee 12 -1,184 12 -babaji 12 -lindsay-hague 12 -kazako 12 -camerawork 12 -podcaster 12 -pakravan 12 -summerbee 12 -3420 12 -depauw 12 -dehumidifier 12 -cannell 12 -microsleep 12 -davinia 12 -lefevers 12 -iraq-style 12 -lost-and-found 12 -degenerates 12 -s.o.s. 12 -ghar 12 -sweeties 12 -tarty 12 -clarke-dilly 12 -adairsville 12 -nassim 12 -mazzoli 12 -pastureland 12 -100-years-old 12 -life-raft 12 -hema 12 -power-washed 12 -obodzinski 12 -culpin 12 -vuepod 12 -litten 12 -burgett 12 -solovetsky 12 -ex-cbs 12 -lulia 12 -angsty 12 -goldenrod 12 -technoshape 12 -oscar-worthy 12 -cuillin 12 -heavensbee 12 -ready-to-use 12 -sandbars 12 -taxicabs 12 -abargil 12 -ammonite 12 -ostick 12 -sainted 12 -oshlag 12 -kourage 12 -hawkins-gaar 12 -d-nev. 12 -red-capped 12 -khashoggi 12 -hackings 12 -williams-mercedes 12 -lanh 12 -tkachuk 12 -croissant-doughnut 12 -kamboni 12 -shiftless 12 -684,000 12 -sangstha 12 -skoll 12 -hitschmann 12 -grava 12 -tartly 12 -napoles 12 -askins 12 -nkosiyapha 12 -yeaman 12 -neapolitans 12 -streiter 12 -willerslev 12 -lampel 12 -handelsblatt 12 -laschober 12 -quick-release 12 -rossville 12 -rowse 12 -astral 12 -in-air 12 -anticancer 12 -giovannitti 12 -lofaro 12 -yem 12 -insolent 12 -non-elite 12 -snidey 12 -bed-sharing 12 -antecedent 12 -bi-monthly 12 -tianmen 12 -idlibi 12 -0.007 12 -waubant 12 -australian-style 12 -mesas 12 -thrihnukagigur 12 -captcha 12 -rossella 12 -al-waha 12 -aionoaei 12 -real-term 12 -appleford 12 -antonio-fort 12 -lhs 12 -cheezburger 12 -bous 12 -girish 12 -novelas 12 -vreni 12 -otellini 12 -hamisi 12 -10.54 12 -80-metre 12 -christmas-time 12 -karawaiez 12 -three-party 12 -cacophonous 12 -schlosberg 12 -banier 12 -1741 12 -1,779 12 -beccles 12 -jonjoe 12 -gartnavel 12 -foulness 12 -43120 12 -carrabelle 12 -electromagnet 12 -wali-ur-rehman 12 -blackcurrants 12 -pastelok 12 -sibold 12 -dimock 12 -arsuaga 12 -#freeajstaff 12 -s-type 12 -javine 12 -riffraff 12 -hygienists 12 -ex-inmate 12 -skycity 12 -kolesnikova 12 -sowetan 12 -rashtrapati 12 -97.6 12 -edugyan 12 -geauxjudge 12 -32-month 12 -bench-clearing 12 -wardour 12 -4.54 12 -imei 12 -a-c 12 -talise 12 -adelia 12 -plesiosaurs 12 -brockworth 12 -torne 12 -6:16 12 -6:14 12 -pury 12 -hutia 12 -perrault 12 -okura 12 -multi-screen 12 -vukovich 12 -phys.org 12 -60-something 12 -euthanization 12 -fischer-beards 12 -brynmawr 12 -lh 12 -northshore 12 -abertillery 12 -neurotoxins 12 -anti-foreigner 12 -piantedosi 12 -penwith 12 -apple.com 12 -push-button 12 -hisbah 12 -7.7-magnitude 12 -aldine 12 -lotharios 12 -cat-fight 12 -andrejcak 12 -pyong-so 12 -bounce-back 12 -netley 12 -fender-bender 12 -smatterings 12 -pumilus 12 -majoli 12 -mcbain 12 -85-year 12 -arriba 12 -radies 12 -rmit 12 -pompeys 12 -zerbest 12 -tshiring 12 -futurologist 12 -waimanalo 12 -sagiev 12 -demetris 12 -leef 12 -mimo 12 -theirry 12 -shirky 12 -non-slip 12 -karrinyup 12 -asmatullah 12 -walker-peters 12 -god-awful 12 -rainsford 12 -959 12 -wife-beating 12 -95p 12 -63.9 12 -knotting 12 -realigning 12 -njoy 12 -aldana 12 -tvguide.com 12 -alabi 12 -conservative-only 12 -hord 12 -2-minute 12 -supercop 12 -zwakman 12 -decodes 12 -hedd-bowen 12 -seven-shot 12 -tusi 12 -diegel 12 -islamic-rooted 12 -ukanwoke 12 -fyles 12 -fetzer 12 -seatback 12 -overdid 12 -jonathas 12 -30-room 12 -bruisyard 12 -re-took 12 -goldwein 12 -ludlum 12 -kerpen 12 -roundheads 12 -beshenivsky 12 -parmigiano 12 -schönberger 12 -boudin 12 -snail-paced 12 -adrionna 12 -bohrer 12 -kampen 12 -whalan 12 -scorchingly 12 -overthink 12 -rebalanced 12 -lechter 12 -twerton 12 -riefenstahl 12 -kitano 12 -lysa 12 -shalhoub 12 -decklen 12 -rangemaster 12 -finelli 12 -livio 12 -raisina 12 -bakti 12 -windjana 12 -farnden 12 -apr. 12 -walvius 12 -boatmen 12 -nagey 12 -expectedly 12 -safari-style 12 -harangue 12 -latrice 12 -flavell 12 -ddb 12 -career-driven 12 -jhon 12 -firman 12 -cesna 12 -white-spunner 12 -prepper 12 -ex-ceo 12 -nerdist 12 -2,749 12 -rackemann 12 -to-ing 12 -longueville 12 -sado 12 -joji 12 -dumoulin 12 -hoegh-guldberg 12 -japan-u.s. 12 -mendiola-soto 12 -1inch 12 -aventine 12 -eareckson 12 -shays 12 -bossman 12 -billion-euro 12 -omaree 12 -subsitute 12 -certificated 12 -sto 12 -matsuko 12 -200bn 12 -joiner-orman 12 -halicephalobus 12 -aquilina 12 -noche 12 -brascia 12 -162million 12 -willam 12 -laryngeal 12 -scourfield 12 -60/40 12 -kingsport 12 -garambois 12 -sednaoui 12 -ryvita 12 -tilford 12 -khatri 12 -waterholes 12 -galisteu 12 -setlist 12 -miski 12 -512,000 12 -warney 12 -biopolymer 12 -man-child 12 -glob 12 -22kg 12 -meat-heavy 12 -1497 12 -war-wracked 12 -zomg 12 -douchebag 12 -kander 12 -scampia 12 -auto-enrolled 12 -re-stock 12 -spectaculars 12 -1,792 12 -1,795 12 -karslake 12 -eccentrically 12 -vo5 12 -mind-bogglingly 12 -brawne 12 -correns 12 -fitzjohn 12 -skellow 12 -acor 12 -11,000-square-foot 12 -swept-back 12 -mintues 12 -live-saving 12 -photog 12 -lambley 12 -hanoman 12 -dervla 12 -three-city 12 -60-feet 12 -people-smugglers 12 -ponamarev 12 -borromeo 12 -budgen 12 -knaggs 12 -himebaugh 12 -amada 12 -unrehearsed 12 -heyhoe 12 -moroder 12 -blue-striped 12 -burulea 12 -banifatemi 12 -ibd 12 -azzata 12 -twiglets 12 -le'veon 12 -baqa 12 -siddeley 12 -non-work 12 -nizewitz 12 -drupsteen 12 -bilsborough 12 -schönwerth 12 -gimple 12 -kalee 12 -gallifuoco 12 -sixth-graders 12 -chakales 12 -toxicologists 12 -alkhouri 12 -4.76 12 -widely-known 12 -kiyanga 12 -9:55 12 -9:51 12 -mctiernan 12 -mazar-e-sharif 12 -chinery-hesse 12 -posawatz 12 -haselow 12 -lundvall 12 -41billion 12 -maoa 12 -2008-11 12 -itchen 12 -torero 12 -peer-review 12 -castlemartyr 12 -submarine-launched 12 -sutterfield 12 -a.s. 12 -chara 12 -wickenden 12 -grandfather-to-be 12 -alkira 12 -skift 12 -prattville 12 -faunce 12 -tomintoul 12 -merengues 12 -magola 12 -yesil 12 -trans-alaska 12 -15,300 12 -coquimbo 12 -kuk 12 -edge-to-edge 12 -bannino 12 -abdulbaki 12 -white-coloured 12 -elo 12 -news12 12 -karrayyu 12 -2-liter 12 -hants. 12 -h-bomb 12 -immigrant-rights 12 -chicherit 12 -2004-07 12 -v2v 12 -m&a 12 -home-brewing 12 -threshing 12 -19,400 12 -nantz 12 -hevo 12 -bellin 12 -punchestown 12 -shpagina 12 -child-size 12 -losekoot 12 -whatmough 12 -then-26-year-old 12 -velleman 12 -c&s 12 -kuzj 12 -five-litre 12 -sakhnin 12 -snowmass 12 -carbonero 12 -reinsurance 12 -mezut 12 -imovie 12 -umhlanga 12 -arkley 12 -wymersch 12 -magness 12 -hydroxyl 12 -croquettes 12 -connaway 12 -deillon 12 -goeas 12 -highest-energy 12 -1552 12 -zephyhills 12 -freak-out 12 -livr 12 -charitywatch 12 -8:42 12 -centthe 12 -ashaka 12 -bridled 12 -cookstoves 12 -berahimi 12 -cg4 12 -cgm 12 -bek 12 -jalapeños 12 -serhan 12 -lyrebird 12 -do-overs 12 -gibraltarians 12 -tautolo 12 -defar 12 -huajiao 12 -barkes 12 -estephan 12 -dime-sized 12 -lorry-load 12 -martinet 12 -not-so-veiled 12 -pharmacologist 12 -cuddlers 12 -donator 12 -2,440 12 -oasis-stores 12 -bickleigh 12 -joint-most 12 -garmo 12 -sproston 12 -anyika 12 -mnemonic 12 -32km 12 -analgesia 12 -10,00 12 -spira 12 -7:04 12 -mapper 12 -mimmenger 12 -empirically 12 -haussman 12 -8.80 12 -grindhouse 12 -franciso 12 -bekir 12 -kennell 12 -l'abbaye 12 -undisputable 12 -melgar 12 -garageband 12 -camisa 12 -alabama-florida 12 -paulish 12 -exonerates 12 -deeply-held 12 -moche 12 -lovelier 12 -lovelies 12 -tahmoor 12 -72ft 12 -claredale 12 -silverburn 12 -killcare 12 -montanari 12 -amerijet 12 -thewlis 12 -p!nk 12 -uffington 12 -left-of-centre 12 -pulsates 12 -marsi 12 -ismet 12 -damen 12 -anzu 12 -garnishing 12 -armor-plated 12 -brimhall 12 -schroders 12 -heart-in-mouth 12 -exempt/commissioner 12 -sellstrom 12 -tugce 12 -brocklebank 12 -casitas 12 -26-hour 12 -pellè 12 -4/6 12 -1708 12 -woffinden 12 -ºc 12 -overlong 12 -sangatte-style 12 -anti-ahmadinejad 12 -filets 12 -bednarek 12 -tarmacked 12 -okalany 12 -kausar 12 -turn-over 12 -desk-bound 12 -rib-eye 12 -henrie 12 -elsner 12 -5.07 12 -5.06 12 -terre'blanche 12 -altenburg 12 -lorenzini 12 -anti-ukip 12 -chock-full 12 -run-outs 12 -border-crossers 12 -mangaratiba 12 -oramorph 12 -hunt-vasquez 12 -khawahir 12 -mutuality 12 -bankrolls 12 -balanta 12 -non-hazardous 12 -154million 12 -21-page 12 -sayne 12 -juelz 12 -golabek 12 -aronne 12 -48-run 12 -paratico 12 -congenitally 12 -migicovsky 12 -snowbird 12 -artiphon 12 -birtel 12 -proskins 12 -free-climb 12 -villarroel 12 -campanas 12 -yusupov 12 -kivell 12 -amaryllis 12 -lappin 12 -paarl 12 -mitzvahs 12 -betterly 12 -dellon 12 -bachner 12 -sangh 12 -press-gfk 12 -vitamin-rich 12 -monsey 12 -november-december 12 -j-cap 12 -boganyi 12 -pachencho 12 -build-out 12 -etro 12 -correll 12 -unsustainably 12 -page-turner 12 -chungs 12 -lend-lease 12 -clodagh 12 -istvantelek 12 -nonalcoholic 12 -clusaz 12 -kepari 12 -eurovegas 12 -wolff-parkinson-white 12 -nayna 12 -arla 12 -boortz 12 -cerda 12 -boqer-ore 12 -manchand 12 -mulya 12 -wcpo-tv 12 -hurd-wood 12 -jaroslawicz 12 -macdonald-walker 12 -ofir 12 -abdul-latif 12 -sharky 12 -100miles 12 -black-footed 12 -friuli 12 -wyken 12 -outernet 12 -avanti 12 -box-like 12 -hotel-room 12 -arez 12 -eleana 12 -halal-only 12 -floethe 12 -vinland 12 -gauna 12 -uncritically 12 -nar 12 -anti-flood 12 -fleser 12 -dnepropetrovsk 12 -fishin 12 -martie 12 -kaisers 12 -perdition 12 -hufton 12 -klumb 12 -el-kikhia 12 -gilst 12 -cedano 12 -malad 12 -beijing-backed 12 -daxing 12 -goodhead 12 -walled-in 12 -magruder 12 -14th-placed 12 -slickly-produced 12 -fromong 12 -lilacs 12 -alarious 12 -glugging 12 -teddie 12 -sochor 12 -crowders 12 -lifestyle-related 12 -ski-resort 12 -mercuri 12 -ammaz 12 -outflanked 12 -tarom 12 -weatherzone 12 -geodesic 12 -sandri 12 -f7 12 -0.71 12 -1,214 12 -toothman 12 -purell 12 -mursal 12 -bazell 12 -asset-stripping 12 -opodo 12 -penumbral 12 -corvids 12 -1,347 12 -roseae 12 -talulla 12 -vietti 12 -end-permian 12 -masturbates 12 -handshaking 12 -chennouf 12 -shirt-pulling 12 -haikui 12 -alberge 12 -1684 12 -quickly-taken 12 -overindulgent 12 -dalgleish 12 -ladens 12 -battening 12 -knegt 12 -entangle 12 -resection 12 -marchella 12 -top-of-the 12 -baksht 12 -delehanty 12 -ovechkin 12 -initiators 12 -owling 12 -ulvestad 12 -fontein 12 -then-police 12 -cedes 12 -lycoming 12 -toddington 12 -74-acre 12 -cex 12 -warty 12 -feiwel 12 -yurick 12 -revoe 12 -s$ 12 -aharonovitch 12 -mini-state 12 -sakuda 12 -hennglise 12 -falciani 12 -cariad 12 -mischka 12 -faired 12 -adventurist 12 -nalyvaichenko 12 -solariums 12 -bhaktipada 12 -#nyc 12 -corporeal 12 -wambugu 12 -norrin 12 -baskey 12 -boonara 12 -al-ramouni 12 -kolon 12 -holbrooks 12 -awana 12 -black-box 12 -tasi'u 12 -hervàs 12 -wral.com 12 -futurologists 12 -5:13 12 -gomo 12 -crpf 12 -shammari 12 -nypl 12 -ozaukee 12 -supranational 12 -75ml 12 -clayton-le-moors 12 -kohlmann 12 -guiteau 12 -hlh 12 -przewalski 12 -robofish 12 -beaven-desjardins 12 -supercapacitor 12 -euroleague 12 -resetar 12 -harle 12 -aragoncillo 12 -autodrive 12 -post-super 12 -truvolo 12 -musth 12 -polidano 12 -hubcap 12 -uytvanck 12 -amaa 12 -chaplow 12 -eliason 12 -narathiwat 12 -foll 12 -kutztown 12 -2.04 12 -cyberdefense 12 -guiry 12 -70-years-old 12 -phonesat 12 -stop-loss 12 -mcfadzean 12 -dulcie 12 -continental-style 12 -roofline 12 -kagome 12 -korvin 12 -austyn 12 -p'trique 12 -anti-censorship 12 -co-equal 12 -narsingh 12 -saulire 12 -dunnavant 12 -fullam 12 -1986-87 12 -apter 12 -fenugreek 12 -yeosu 12 -congress-led 12 -pappalardo 12 -w.e.b. 12 -hanno 12 -l'aouffir 12 -trefor 12 -engin 12 -ashulia 12 -edifying 12 -kopelman 12 -emollient 12 -norena 12 -kankakee 12 -underrepresentation 12 -daybrook 12 -curtseying 12 -lobért 12 -724,000 12 -sexed-up 12 -nati 12 -trademarking 12 -hand-beaded 12 -cartilaginous 12 -kiled 12 -all-premier 12 -t-ball 12 -news@dailymail.co.uk 12 -bodysculpt 12 -witter 12 -hazelbaker 12 -fux 12 -watroba 12 -#fake 12 -dmitrijeva 12 -updrafts 12 -baken 12 -downour 12 -randol 12 -lahoud 12 -aizoon 12 -marcescens 12 -mothersbaugh 12 -mjemer 12 -keppler 12 -oversensitive 12 -varelas 12 -necrophiliac 12 -double-dealing 12 -sanaz 12 -blood-filled 12 -hadley-piggin 12 -baroque-style 12 -mccoid 12 -bosanko 12 -clarivu 12 -eggeman 12 -us1 12 -mccarter 12 -brzozowski 12 -stolar 12 -shipyourenemiesglitter.com 12 -perturbations 12 -splotch 12 -arruda 12 -catherine-de-barnes 12 -secessionists 12 -softies 12 -hayer 12 -restates 12 -saurabh 12 -drawcard 12 -tonking 12 -broadwells 12 -olayinka 12 -bungles 12 -vanderhorst 12 -pigalle 12 -38-second 12 -sgr 12 -throbs 12 -ahliyah 12 -playbill 12 -body-slamming 12 -mayadeen 12 -kolodjay 12 -veruschka 12 -ine 12 -inf 12 -loud-mouthed 12 -handschu 12 -zelaznog 12 -hrp 12 -bar-ray 12 -1:39 12 -4/9 12 -mistrials 12 -saraqib 12 -9:11 12 -9:14 12 -ncb 12 -coonrod 12 -goodman-hill 12 -barbieris 12 -torrejon 12 -reindl 12 -high-poverty 12 -mlle 12 -community-led 12 -grear 12 -oceangoing 12 -lofton 12 -70c 12 -lemanis 12 -19-7 12 -worsnop 12 -usaceva 12 -dicle 12 -panhandles 12 -mcatear 12 -qaeda-related 12 -heligoland 12 -salvagers 12 -rescreening 12 -2,080 12 -arbon 12 -soini 12 -burmeister 12 -wolfenden 12 -drenches 12 -carinhall 12 -severson 12 -zuzanna 12 -oras 12 -egeland 12 -haru 12 -herlinda 12 -kruglov 12 -velo 12 -58p 12 -court-martialled 12 -dryden-chouen 12 -pià 12 -#olympics 12 -digitising 12 -taglines 12 -wosskow 12 -hand-deliver 12 -pilibhit 12 -pulpits 12 -quantro 12 -su-wei 12 -#mynypd 12 -buglass 12 -milazzo 12 -kotchey 12 -steamships 12 -pre-record 12 -bumstead 12 -vanover 12 -non-aggressive 12 -ajman 12 -chondrite 12 -55-64 12 -pearlstein 12 -1,365 12 -muzaffarnagar 12 -kambala 12 -umit 12 -olu 12 -gainline 12 -934 12 -93m 12 -korshunova 12 -emburey 12 -kztv 12 -140billion 12 -four-and-a-half-years 12 -lolland 12 -kurtic 12 -cuckold 12 -jackson-liday 12 -self-named 12 -snakebites 12 -100-a-month 12 -khalik 12 -khalif 12 -accessorises 12 -kromberg 12 -d-south 12 -ohana 12 -blushwood 12 -ashikalis 12 -christ-centered 12 -tebbe 12 -ldpr 12 -mti 12 -.19 12 -magimix 12 -double-yolk 12 -kiriakis 12 -noctilucent 12 -cc1 12 -llap 12 -kiddee 12 -yushin 12 -knowlden 12 -four-litre 12 -arp 12 -ex-sunderland 12 -semi-open 12 -1514 12 -chrisann 12 -15-round 12 -air-launched 12 -narrabeen 12 -60-odd 12 -al-sufi 12 -hashir 12 -desiccated 12 -murunga 12 -workweeks 12 -reichart 12 -manila-based 12 -marom 12 -garib 12 -stobbe 12 -panucci 12 -jocelyne 12 -roecker 12 -micro-pig 12 -re-form 12 -qna 12 -pleurisy 12 -duk-ha 12 -sacranie 12 -guller 12 -frejus 12 -niigaki 12 -7:41 12 -30-34 12 -badillo 12 -alia-grace 12 -20ft-long 12 -906 12 -aristarchus 12 -s.k. 12 -leva 12 -payo 12 -holyday 12 -birds-eye-view 12 -haemangiomas 12 -yongsan 12 -arhab 12 -magnanimity 12 -american-inspired 12 -markowski 12 -bloggs 12 -cantori 12 -u.s.c. 12 -disgracing 12 -fudgie 12 -bronchiectasis 12 -rocque 12 -kowt 12 -ahmer 12 -real-deal 12 -majic 12 -three-hole 12 -sema 12 -jacquneaux 12 -abdulhamid 12 -sang-hak 12 -fossen 12 -yinan 12 -ex-sen 12 -emei 12 -harrisons 12 -self-seeking 12 -grannie 12 -sany 12 -unrewarding 12 -9.09 12 -sectu 12 -spurlin 12 -55-day 12 -luzhny 12 -phaeton 12 -miqdad 12 -1:58 12 -1:54 12 -1,225 12 -oprandi 12 -nerc 12 -staperfene 12 -short-circuits 12 -fawzy 12 -away-goals 12 -hertswood 12 -3:26 12 -kasai 12 -baity 12 -suppositions 12 -leftwing 12 -silver-colored 12 -modern-looking 12 -outrank 12 -hallisay 12 -phraseology 12 -fazan 12 -belgaum 12 -rimicaris 12 -anrig 12 -jayavarman 12 -f35 12 -krums 12 -felyk 12 -ges 12 -binbags 12 -montejo 12 -timmendequas 12 -bacau 12 -halie 12 -ex-marines 12 -hoehn 12 -swansborough 12 -synchronizes 12 -97million 12 -sierks 12 -20-7 12 -role-based 12 -coscia 12 -knuckledusters 12 -meshbesher 12 -epi-marks 12 -a537 12 -in-network 12 -inactivate 12 -gava 12 -madonsela 12 -kombis 12 -neringa 12 -susann 12 -burckhalter 12 -carpet-ready 12 -serg 12 -laming 12 -kloppers 12 -beirendonck 12 -co-ownership 12 -mid-flow 12 -muyi 12 -scozzafava 12 -gouin 12 -ladles 12 -cornwall-based 12 -lindahl 12 -myplate 12 -1299 12 -1297 12 -micucci 12 -highest-risk 12 -bankrate.com 12 -mini-figures 12 -masanori 12 -protaras 12 -siberian-born 12 -re-buried 12 -ariani 12 -opossums 12 -syeda 12 -kechiche 12 -luddington 12 -lte-advanced 12 -consorted 12 -prefered 12 -ski-lift 12 -ninety-seven 12 -shahr 12 -anner 12 -deporter-in-chief 12 -sakari 12 -schenke 12 -makey 12 -arning 12 -centerria 12 -stronger-than-expected 12 -nechirvan 12 -skyjacker 12 -holubova 12 -skanks 12 -rfff 12 -gwatney 12 -busybody 12 -meysey 12 -inayatullah 12 -busin 12 -gasovski 12 -self-testing 12 -libow 12 -great-tasting 12 -mini-skirted 12 -kakslauttanen 12 -twenty-two-year-old 12 -behardien 12 -oompah 12 -jowsey 12 -lazarin 12 -drug-affected 12 -ciega 12 -buttresses 12 -beeton 12 -anti-monopoly 12 -superfruits 12 -cleaned-up 12 -islamaphobic 12 -annihilating 12 -mimosas 12 -kamali 12 -kauhajoki 12 -27-second 12 -issaka 12 -multi-stage 12 -dunietz 12 -queensway 12 -kalugin 12 -volkskrant 12 -al-hashemi 12 -gidleigh 12 -vala 12 -bad-mouthing 12 -20km/h 12 -hardhorn 12 -arts-and-crafts 12 -gas-giant 12 -nonlinear 12 -79mins 12 -gurgles 12 -rioufol 12 -chitolie 12 -kyriakidis 12 -rebuttals 12 -incipient 12 -consumerlab.com 12 -committe 12 -rohe 12 -44billion 12 -airram 12 -yoshino 12 -yeang 12 -twice-widowed 12 -skippack 12 -livingsun 12 -volkswagens 12 -roane 12 -de-stressing 12 -dropoff 12 -oefner 12 -giss 12 -95-run 12 -unbefitting 12 -al-dana 12 -termes 12 -ellingworth 12 -italicized 12 -darker-skinned 12 -waitlist 12 -tooth-whitening 12 -a82 12 -96-80 12 -cedaw 12 -dinoire 12 -peregian 12 -makhmalbaf 12 -khil 12 -frechette 12 -dziekanski 12 -mvc 12 -61mins 12 -nally 12 -.35 12 -anes 12 -cobridge 12 -shamwari 12 -mummify 12 -obsioma 12 -barretto 12 -conked 12 -placentia 12 -cressoni 12 -cross-sectional 12 -entomophagy 12 -al-magariaf 12 -kwanliso 12 -1650s 12 -kaiju 12 -wattenberg 12 -cascio 12 -callixte 12 -lamonica 12 -ekes 12 -margeson 12 -langland 12 -kusturica 12 -house-trained 12 -allaire 12 -abdollahzadeh 12 -soutra 12 -akinkugbe 12 -giant-sized 12 -2,477 12 -5:59 12 -4:36 12 -dfree 12 -ruru 12 -shedid 12 -nykkole 12 -reroutes 12 -kuznecov 12 -fareedun 12 -holmsley 12 -milis 12 -milin 12 -lungren 12 -elayna 12 -paper-like 12 -kamra 12 -6.33 12 -6.36 12 -croly 12 -neutzling 12 -toose 12 -desalinated 12 -lusts 12 -trashes 12 -leavis 12 -ali-ahmed 12 -shibboleth 12 -pok 12 -42km 12 -bima 12 -bopper 12 -unthinkably 12 -monuc 12 -part-exchange 12 -2,296 12 -boardinghouse 12 -krahenbuhl 12 -neagle 12 -apronectomy 12 -fx35 12 -two-nil 12 -herrmans 12 -bunged 12 -8a 12 -excipio 12 -dld 12 -british-pakistani 12 -todenhöfer 12 -nunhead 12 -streight 12 -recovery.gov 12 -brister 12 -siauliai 12 -panyangara 12 -generalisations 12 -halligen 12 -sieswerda 12 -bergantz 12 -achekzai 12 -comrades-in-arms 12 -l.j. 12 -mcleroy 12 -lindstrand 12 -ctg 12 -clapperboard 12 -zahlavova-strycova 12 -kanger 12 -patrouille 12 -ajibola 12 -bts 12 -armacost 12 -sinewy 12 -short-change 12 -psyllium 12 -half-breed 12 -nail-studded 12 -cosgriff 12 -zam 12 -chest-thumping 12 -prefects 12 -isnit 12 -ambrosetti 12 -journos 12 -sheil 12 -epstein-barr 12 -4-point 12 -asmar 12 -tourbillon 12 -swindles 12 -hellewell 12 -nfc-enabled 12 -parotid 12 -todays 12 -self-refraction 12 -twinset 12 -matlean 12 -idrissou 12 -self-written 12 -domperidone 12 -ekmeleddin 12 -en-masse 12 -chimerex 12 -loretto 12 -schwarzschild 12 -fourmile 12 -pulmo 12 -distillate 12 -istiklal 12 -flextime 12 -badjao 12 -gizzi 12 -anees 12 -cage-fighting 12 -oldest-ever 12 -bakan 12 -leconte 12 -benguet 12 -kosi 12 -2003-2005 12 -lft 12 -segreto 12 -hresha 12 -agoda.com 12 -ultra-fine 12 -banika 12 -shaharin 12 -1,300-year-old 12 -radoi 12 -cubano 12 -apperances 12 -fraraccio 12 -thrice-divorced 12 -bizarro 12 -re-house 12 -geriatrics 12 -76.8 12 -lach 12 -tsukii 12 -cusnir 12 -undulations 12 -wenfang 12 -metastases 12 -kinzua 12 -multisensory 12 -hadow 12 -shulton 12 -thiele 12 -sambora 12 -quotidiano 12 -penélope 12 -poma 12 -ringhardt 12 -invalidates 12 -docwra 12 -shireen 12 -watamu 12 -kateryna 12 -badiali 12 -siaya 12 -pescod 12 -milband 12 -kaifu 12 -gallastegui 12 -down-on-its-luck 12 -quagliana 12 -nitroglycerin 12 -reames 12 -coffer 12 -slee 12 -worst-off 12 -moldings 12 -gum-chewing 12 -salka 12 -chariklo 12 -laffon 12 -shanyna 12 -diabolically 12 -brugos 12 -heidar 12 -martinkeown5 12 -sredoje 12 -us-cert 12 -lenexa 12 -mccomiskey 12 -temporao 12 -mistiming 12 -carlinhos 12 -dallas-bound 12 -esteve 12 -chaebol 12 -ouyang 12 -wastebook 12 -n-hexane 12 -lihue 12 -dimasi 12 -aid-in-dying 12 -bobb 12 -10-goal 12 -116.9 12 -tantaros 12 -kuusamo 12 -stjarnan 12 -magliari 12 -marcoux 12 -non-linear 12 -sze-tsung 12 -brewmaster 12 -jaray 12 -ingles 12 -kurgan 12 -philadelphia-born 12 -ex-rugby 12 -sotoul 12 -brecht 12 -torricelli 12 -skipcar 12 -staffords 12 -colloquialism 12 -14mm 12 -katsouranis 12 -1,324 12 -cainen 12 -ummm 12 -meddings 12 -poinsettia 12 -harlin 12 -500bn 12 -houraney 12 -leduff 12 -naquin 12 -ves 12 -veh 12 -magali 12 -spreadbury 12 -nicollin 12 -duflo 12 -58mph 12 -puffball 12 -picu 12 -milutinovic 12 -d'emic 12 -propoganda 12 -rojer 12 -jeffro 12 -linq 12 -impd 12 -reconditioning 12 -usaaf 12 -hudaly 12 -wistv 12 -ng/ml 12 -ortigoza 12 -mhc 12 -y-40 12 -ninian 12 -angi 12 -iyengar 12 -fungicides 12 -carib 12 -top-sellers 12 -al-naas 12 -chabal 12 -hsinchu 12 -arabians 12 -flein 12 -71425 12 -ayanda 12 -lievremont 12 -durso 12 -1,141 12 -1,148 12 -kronenberger 12 -putonghua 12 -roversi 12 -tita 12 -ghanbari 12 -gov.-elect 12 -lalala 12 -home-ownership 12 -p.s 12 -nonviolently 12 -130-pound 12 -mini-dresses 12 -high-adrenaline 12 -scrat 12 -2008/9 12 -hippa 12 -camouflages 12 -cybersquatters 12 -rigamonti 12 -dong-gook 12 -supergiant 12 -odd-numbered 12 -boxset 12 -gullah 12 -double-crossed 12 -heyn 12 -30/30 12 -3-and-a-half 12 -bershadker 12 -longest-ever 12 -espina 12 -tianhe-2 12 -kuranda 12 -aircrewman 12 -2047 12 -stadiem 12 -@ellenpage 12 -free-up 12 -wide-awake 12 -bellando 12 -abdellatif 12 -pom-pom 12 -29st 12 -1988-89 12 -el-beltagy 12 -gehad 12 -49600 12 -dimino 12 -traumatise 12 -letcombe 12 -integer 12 -fukui 12 -47-page 12 -hiser 12 -kosa 12 -masika 12 -28,200 12 -100kph 12 -sasol 12 -freshly-baked 12 -klimenko 12 -koncz 12 -sobotka 12 -hamam 12 -blankley 12 -bossons 12 -masso 12 -varvatos 12 -ascribes 12 -contentiously 12 -surfline 12 -1,267 12 -evetts 12 -tail-wagging 12 -50,000-strong 12 -schmittmann 12 -foleshill 12 -hopkinton 12 -bvi 12 -trh 12 -earth-water 12 -vastra 12 -alysson 12 -washingon 12 -fabella 12 -galtieri 12 -knezovich 12 -talismans 12 -kaysing 12 -syrah 12 -super-heavy 12 -cordone 12 -bixente 12 -pirate-themed 12 -yiu 12 -uppies 12 -stutterer 12 -25,000-square-foot 12 -laatste 12 -daresbury 12 -aeropostale 12 -quedgeley 12 -head-scratcher 12 -cadicamo 12 -saniya 12 -qfa 12 -langton-gilks 12 -stargardt 12 -mileski 12 -hogben 12 -symptomless 12 -gt40 12 -reassertion 12 -sigint 12 -chub 12 -50-cent 12 -poco 12 -fotouhi 12 -gawthorpe 12 -ex-ira 12 -wissahickon 12 -kupa 12 -84f 12 -adamantium 12 -mokwena 12 -sulman 12 -15-foot-long 12 -schamel 12 -a449 12 -storybooks 12 -prognosticator 12 -nole 12 -ampa 12 -freney 12 -sivero 12 -one-year-olds 12 -feher 12 -rosleigh 12 -california-mexico 12 -icke 12 -grete 12 -storch 12 -slackness 12 -homebuilders 12 -uys 12 -sellable 12 -generations-old 12 -runton 12 -tortious 12 -durian 12 -shorebirds 12 -proner 12 -pro-marriage 12 -inkheart 12 -post-taliban 12 -bringhurst 12 -glashütte 12 -lemmy 12 -nightclubber 12 -pre-sold 12 -aloofness 12 -2-10 12 -boshe 12 -angelil 12 -lead-out 12 -deeded 12 -parilla 12 -annat 12 -gibbering 12 -lampre 12 -ss7 12 -pro-hillary 12 -espoo 12 -elemen 12 -mahsud 12 -carb-heavy 12 -anti-female 12 -80.4 12 -carrero 12 -rameses 12 -russell-andrews 12 -lepper 12 -w.h. 12 -boatright 12 -mechals 12 -kanhaiya 12 -132mph 12 -5-month 12 -end-game 12 -gabbie 12 -chancy 12 -aderin-pocock 12 -guantanamo-style 12 -fleful 12 -har-noy 12 -nii 12 -nin 12 -nid 12 -postnuptial 12 -goat-like 12 -hongxia 12 -irritatingly 12 -modern-style 12 -genel 12 -waddy 12 -kiunsi 12 -chranowski 12 -1million-a-year 12 -neurologically 12 -iorworth 12 -coruña 12 -barracking 12 -modeen 12 -bumbo 12 -85.9 12 -malodorous 12 -hodor 12 -heroin-addicted 12 -pities 12 -scrapings 12 -maceo 12 -laurent-auger 12 -colourant 12 -dibben 12 -chima 12 -taqwacore 12 -darger 12 -high-tide 12 -advil 12 -audiology 12 -125lbs 12 -midis 12 -forgey 12 -sternbeck 12 -filmstar 12 -haqbeen 12 -kalie 12 -pro-death 12 -nicolaou 12 -bienvenido 12 -lampshade 12 -szor 12 -degroff 12 -curricular 12 -byam-cook 12 -jye 12 -gadhia 12 -mukerjee 12 -n38 12 -boice 12 -gerstein 12 -oyler 12 -18.75 12 -markevitch 12 -bonhomme 12 -@colbertreport 12 -atherstone-on-stour 12 -fromeside 12 -jape 12 -elim 12 -siann 12 -income-generating 12 -ambala 12 -ampk 12 -sandshrew 12 -densely-packed 12 -hona 12 -hah 12 -hax 12 -black-hooded 12 -tanasugarn 12 -perthnow 12 -a45 12 -1,522 12 -1,524 12 -18-month-long 12 -khun 12 -105-94 12 -haider-maurer 12 -post-college 12 -73.3 12 -3-2-1 12 -defreece 12 -faster-growing 12 -174mph 12 -mamnoon 12 -team-up 12 -buen 12 -desalinate 12 -ergon 12 -fusions 12 -magallanes 12 -af447 12 -anticholinergic 12 -bourneville 12 -dmytruk 12 -ashiq 12 -liquiglide 12 -ehredt 12 -yanggakdo 12 -apurimac 12 -siphons 12 -satiation 12 -dehel 12 -shirenewton 12 -hatice 12 -lantra 12 -nyfw 12 -laiaddee 12 -czywczynski 12 -wpbsa 12 -15-match 12 -10.0 12 -javadekar 12 -defago 12 -mcglashan 12 -w.i.p 12 -laverton 12 -elad 12 -hillerman 12 -shukri 12 -extra-virgin 12 -fabara 12 -kwaku 12 -mindfulness-based 12 -r-nebraska 12 -jack-of-all-trades 12 -tongchang-ri 12 -khanal 12 -lemonidis 12 -1313 12 -femskin 12 -alesi 12 -freeny 12 -lavant 12 -s.m. 12 -tediously 12 -princetonian 12 -.300 12 -8.24 12 -8.27 12 -amil 12 -zuckman 12 -papert 12 -shipwrights 12 -chucks 12 -generalists 12 -prissy 12 -nusi 12 -horsed 12 -klanja 12 -stagni 12 -moffy 12 -much-travelled 12 -sherawi 12 -mcburney 12 -ncc 12 -drobik 12 -kerron 12 -voi 12 -bensimhon 12 -bonaddio 12 -well-fortified 12 -boddie 12 -braman 12 -zuberi 12 -69.9 12 -pesic 12 -1:31 12 -anto 12 -lutful 12 -1,243 12 -police-community 12 -ncpa 12 -fratricidal 12 -perlotto 12 -hunter-choat 12 -babyliss 12 -tpg 12 -mawe 12 -wilking 12 -59722 12 -philippot 12 -111million 12 -kisa 12 -gelineau 12 -azfamily 12 -everall 12 -c'jai 12 -sihame 12 -decuffa 12 -anthemic 12 -rebeckah 12 -tahar 12 -highly-flammable 12 -asterion 12 -ammouche 12 -lhouraii 12 -heintzelman 12 -poesy 12 -trinkley 12 -hosono 12 -giacchino 12 -el-kurd 12 -ynez 12 -lomita 12 -infallibility 12 -sulo 12 -taofifenua 12 -garritano 12 -hypertext 12 -kerar 12 -trofeo 12 -razzle-dazzle 12 -ingrain 12 -dishonouring 12 -sturman 12 -crash-for-cash 12 -ceaton 12 -supermini 12 -opining 12 -hoodwinking 12 -nerdiness 12 -taxonomists 12 -michale 12 -phare 12 -30-7 12 -quake-hit 12 -eukaryotes 12 -bort 12 -igrow 12 -142nd 12 -koeverden 12 -choudry 12 -bourguignon 12 -napolis 12 -wippa 12 -afters 12 -speranza 12 -bretton-gordon 12 -alpro 12 -18-acre 12 -romagna 12 -capehart 12 -riverwood 12 -romeos 12 -sturr 12 -airframes 12 -super-short 12 -linseed 12 -kinova 12 -armourer 12 -ransoming 12 -hbot 12 -bachelot 12 -feagin 12 -stepanenko 12 -w.p. 12 -number-plate 12 -zelikow 12 -10.33 12 -chaiken 12 -cozza 12 -over-stated 12 -macchiato 12 -thought-controlled 12 -wv 12 -braf 12 -ever-larger 12 -trover 12 -60,000-per-week 12 -military.com 12 -blancaflor 12 -overmatched 12 -i360 12 -cimino 12 -sook 12 -issus 12 -shirtsleeves 12 -gangland-style 12 -food-based 12 -avseenko 12 -1,354 12 -knork 12 -gamboru 12 -islamist-backed 12 -61.8 12 -apple-tipster 12 -re-assigned 12 -mapes-crupi 12 -fitow 12 -gordimer 12 -tadry 12 -cashel 12 -bettendorf 12 -commentariat 12 -sidetrack 12 -honchos 12 -olympic-level 12 -sidles 12 -nasonti 12 -companionable 12 -drawdowns 12 -pounders 12 -khazem 12 -kristan 12 -caba 12 -brightly-lit 12 -cerebrovascular 12 -r-class 12 -lary 12 -vercruysse 12 -bositis 12 -pocket-size 12 -sarazen 12 -gearon 12 -nadra 12 -parter 12 -smelter 12 -cassiopeia 12 -congruent 12 -delly 12 -reinauer 12 -moccia 12 -superposition 12 -kibale 12 -kaman 12 -phillipson 12 -mccrackens 12 -mittelstadt 12 -drought-resistant 12 -petrow 12 -caetano 12 -worldâ 12 -armstrong-thorpe 12 -4.9-litre 12 -peak-hour 12 -kilmore 12 -ubaida 12 -kabler 12 -vadym 12 -1-800-577-tips 12 -hassenger 12 -altadena 12 -doodlers 12 -bodinus 12 -shutouts 12 -katon 12 -neufeld 12 -toilet-trained 12 -misgiving 12 -ckd 12 -oyongo 12 -jogela 12 -kavli 12 -photofit 12 -lamest 12 -soft-shelled 12 -mdpv 12 -ichikawa 12 -planitia 12 -rayven 12 -infection-control 12 -obear 12 -jautz 12 -omalanga 12 -bauzon 12 -hacking-related 12 -deppi 12 -boertje-obed 12 -louisville-duke 12 -ashan 12 -bridi 12 -propulsive 12 -barkie 12 -gulino 12 -desalegn 12 -samsonov 12 -garbage-strewn 12 -wackrow 12 -divincenzo 12 -butler-creagh 12 -somma 12 -hecla 12 -tinling 12 -cervera 12 -whale-hunting 12 -entrepreneurialism 12 -poels 12 -kwaik 12 -rasheeda 12 -geys 12 -68mins 12 -hand-me-down 12 -self-perpetuating 12 -wind-ups 12 -154lbs 12 -ice-bound 12 -89.6 12 -89.9 12 -wishard 12 -adepitan 12 -legwear 12 -naam 12 -lenk 12 -dingler 12 -pointed-toe 12 -pomeranz 12 -salwar 12 -monaco-based 12 -allooh 12 -pid 12 -foodspotting 12 -bouin 12 -2,230 12 -serzh 12 -sahaab 12 -sunhats 12 -qubits 12 -eveson 12 -polar-orbiting 12 -saska 12 -hearths 12 -transcriptions 12 -squinty 12 -feminista 12 -unattributed 12 -soundview 12 -rearrangement 12 -bushati 12 -pira 12 -billboard.com 12 -courtships 12 -mundill 12 -aevin 12 -hetzel 12 -veart 12 -tobler 12 -al-qaisi 12 -low-water 12 -swdt 12 -hosseiniamrae 12 -2,899 12 -two-round 12 -smallprint 12 -zerby 12 -non-africans 12 -mc10 12 -45-yard 12 -92-year 12 -waff 12 -waychoff 12 -thair 12 -kasik 12 -hebras 12 -consuelo 12 -safeco 12 -76mph 12 -shillcott 12 -leitner 12 -maznah 12 -hauptmann 12 -mihok 12 -ora.tv 12 -27-24 12 -derides 12 -beelzebub 12 -pre-payment 12 -i-say 12 -athari 12 -hypersexuality 12 -arizona-born 12 -barabas 12 -non-sectarian 12 -auto-enrolment 12 -o'briens 12 -retro-inspired 12 -ukti 12 -imperilled 12 -melanotan 12 -2006-09 12 -ontuesday 12 -bendeler 12 -ringfenced 12 -body-hugging 12 -swaminarayan 12 -ensnaring 12 -glyzelle 12 -lambaste 12 -pleming 12 -plights 12 -winnow 12 -peace-time 12 -brandies 12 -ertegun 12 -gibreel 12 -tasr 12 -ursetta 12 -disempowering 12 -haulena 12 -maclellan 12 -out-of-season 12 -gullick 12 -ganchos 12 -triblive.com 12 -blalock 12 -odedra 12 -dch 12 -vabre-tizac 12 -countersniper 12 -stinebrickner-kauffman 12 -civita 12 -demouh 12 -richell 12 -e-day 12 -roslan 12 -lamp-posts 12 -1,193 12 -galliott 12 -tikhonova 12 -fibrillating 12 -leisel 12 -going-away 12 -heeks 12 -2103 12 -hvidbro-mitchell 12 -chateaubriand 12 -bohjalian 12 -nature.com 12 -kreindler 12 -simpkin 12 -tcr 12 -tcl 12 -sapeur 12 -anti-bailout 12 -khalouf 12 -head-turner 12 -dogwood 12 -suppositories 12 -disdainfully 12 -297,000 12 -hernadez 12 -macnair 12 -cayson 12 -ipy 12 -il-76 12 -aberfan 12 -polziec 12 -cephalexin 12 -roadsweeper 12 -chyulu 12 -kickabouts 12 -buntingford 12 -razu 12 -tigerlily 12 -palhares 12 -nackaerts 12 -gaughran 12 -mullery 12 -cydonia 12 -huel 12 -reddest 12 -photobox 12 -hardrict 12 -gaura 12 -satha-sambo 12 -stobbs 12 -jamaleldine 12 -mannerism 12 -furling 12 -thei 12 -cobweb 12 -ormoc 12 -chiarello 12 -undescribed 12 -comb-over 12 -extragalactic 12 -episcopalians 12 -houshang 12 -hornbuckle 12 -527,000 12 -codylily 12 -fj 12 -neir 12 -specular 12 -179.99 12 -egil 12 -f-words 12 -wyee 12 -valen 12 -blisse 12 -wice 12 -bjorkman 12 -serratia 12 -halimi 12 -kulah 12 -vidal-hall 12 -bohm 12 -botia 12 -view-style 12 -sangre 12 -moger 12 -pointz 12 -millilitre 12 -1712 12 -harpersville 12 -1,706 12 -quiwa 12 -sportlobster 12 -bioengineer 12 -muhtar 12 -tank-top 12 -gutmann 12 -gumby 12 -emaciation 12 -hillbrow 12 -head-shot 12 -hermer 12 -pillar-box 12 -bartlet 12 -erad3 12 -7.26 12 -sealase 12 -speckle 12 -fraternizing 12 -isis-style 12 -stypulkowski 12 -1,275 12 -bananarama 12 -selaron 12 -36d 12 -wicketless 12 -non-biodegradable 12 -zataari 12 -cloudbreak 12 -inter-island 12 -krishnamaya 12 -venkman 12 -bassendean 12 -coppins 12 -scherrer 12 -sentri 12 -southeasterly 12 -clore 12 -noshat 12 -cowhig 12 -tanumihardja 12 -hmo 12 -super-massive 12 -uc-davis 12 -17,900 12 -nutmegging 12 -tottington 12 -arli 12 -plovdiv 12 -drug-treatment 12 -coby 12 -byway 12 -soccer-crazy 12 -kareena 12 -diyat 12 -mnf 12 -vantablack 12 -ludford 12 -fáil 12 -rabiu 12 -gyula 12 -three-strikes 12 -rugby-mad 12 -naafi 12 -schlitz 12 -bosnjak 12 -heloise 12 -v4 12 -edenfield 12 -haszeldine 12 -antrax 12 -factcheck.org 12 -veratti 12 -itzhak 12 -détente 12 -colloseum 12 -kaaya 12 -wheal 12 -beastiality 12 -rothert 12 -luangrath 12 -114,000-ton 12 -dudson 12 -elwin 12 -gold-embossed 12 -superclasico 12 -bhagavan 12 -ilker 12 -v53 12 -bottom-right 12 -scheider 12 -servati 12 -courteille 12 -saldamando 12 -tênis 12 -reuss 12 -negobot 12 -spinelle 12 -xlv 12 -liberian-american 12 -circovirus 12 -2.5-acre 12 -concept_one 12 -jenart 12 -gebauer 12 -pre-ipo 12 -stroup 12 -abstinent 12 -newlife 12 -gametes 12 -vicenzo 12 -snowkiting 12 -viswakanth 12 -manho 12 -coromandel 12 -myredbook.com 12 -metastasize 12 -kuhlman 12 -shootin 12 -daryoush 12 -leached 12 -medina-mora 12 -90-run 12 -ashgrove 12 -transamerica 12 -pierzynski 12 -spiral-bound 12 -44.2 12 -hilditch 12 -chanukah 12 -mccarrick 12 -cheesed 12 -palmero 12 -catia 12 -altmeyer 12 -17-second 12 -28-acre 12 -jalali 12 -r-1 12 -jie-ae 12 -sotiris 12 -contretemps 12 -bug-eyed 12 -post-surgical 12 -kushiro 12 -milligram 12 -shoria 12 -budberg 12 -watagan 12 -siân 12 -== 12 -elsohly 12 -steinmann 12 -49b 12 -braddon 12 -road-side 12 -grenadine 12 -ifed 12 -reprioritize 12 -ivon 12 -self-absorption 12 -c.h. 12 -1,700-year-old 12 -aelita 12 -wroblewski 12 -boichat 12 -dalma 12 -taoufik 12 -beatnik 12 -sanctify 12 -72.6 12 -wattpad 12 -myrie 12 -hoola 12 -lateran 12 -blackfield 12 -16th-placed 12 -blood-drenched 12 -reconvening 12 -volesky 12 -hombre 12 -southmoore 12 -nicaraguans 12 -untracked 12 -4,850 12 -d-arkansas 12 -british-australian 12 -dexmo 12 -jerrell 12 -zoola 12 -24-strong 12 -abood 12 -donati 12 -worobey 12 -siswosuwarno 12 -bhadresh 12 -6.65 12 -colledge 12 -qayum 12 -houlton 12 -stancheva 12 -fyvie 12 -ollestad 12 -tommi 12 -dimitroff 12 -expansively 12 -health-wise 12 -dungavel 12 -wsls 12 -ampney 12 -manoel 12 -ceac 12 -newly-refurbished 12 -endocrinologists 12 -dume 12 -dumo 12 -bucyrus 12 -nazaire 12 -lacieann 12 -drummy 12 -disposables 12 -30-some 12 -ehlert 12 -exeter-based 12 -solaire 12 -moderne 12 -zagoridis 12 -tweeted-about 12 -assarid 12 -afweyne 12 -rampaul 12 -babygrows 12 -broomhilda 12 -wierzbicki 12 -shpresa 12 -slma 12 -hekmat 12 -sundberg 12 -coursed 12 -isma'il 12 -5.90 12 -fashion-savvy 12 -depredations 12 -imura 12 -unbolted 12 -fw14 12 -krawitt 12 -harverson 12 -drugmakers 12 -waayaha 12 -staszko 12 -bykov 12 -geralyn 12 -laniakea 12 -strole 12 -tilling 12 -ololo 12 -now-ex 12 -skive 12 -over-cautious 12 -4-foot-11 12 -lingeveldt 12 -labuschagne 12 -mcstein 12 -tapner 12 -dver 12 -groupme 12 -bissix 12 -nostalgically 12 -netweather 12 -lampre-merida 12 -kem 12 -gilbride 12 -galardi 12 -pavan 12 -murder/suicide 12 -majora 12 -alsobrooks 12 -embroiling 12 -838 12 -capably 12 -17-times 12 -novotny 12 -nazish 12 -lancos 12 -7.03 12 -monchaux 12 -ready-to-drink 12 -re-balance 12 -forlani 12 -bradner 12 -leeswood 12 -suma 12 -crestor 12 -treharris 12 -cereus 12 -haglin 12 -four-year-deal 12 -messe 12 -twiddy 12 -alpas 12 -rambla 12 -máncora 12 -13 1/2 12 -dehnart 12 -tassled 12 -tassles 12 -caber 12 -stubley 12 -project-based 12 -cartoon-style 12 -berejiklian 12 -fredriksson 12 -ragout 12 -patrica 12 -songdowon 12 -anschlag 12 -jasmid 12 -exigencies 12 -million-acre 12 -matchzone 12 -kurin 12 -lurelle 12 -hipness 12 -v.m. 12 -march-3b 12 -klapow 12 -scots-born 12 -mastitis 12 -co-authoring 12 -47th-minute 12 -4.07 12 -4.01 12 -12,250 12 -boreas 12 -werker 12 -onjefu 12 -espa 12 -shanta 12 -makhaya 12 -kashef 12 -scoccimarro 12 -altschuler 12 -zeoli 12 -cristerna 12 -6:49 12 -ylisela 12 -mouna 12 -schlechter 12 -vetere 12 -hilotherapy 12 -homschek 12 -nuts-and-bolts 12 -eisenstein 12 -74-year 12 -grout-smith 12 -chmura 12 -proegler 12 -backrest 12 -ngefa 12 -riesch 12 -gunwan 12 -closed-minded 12 -196ft 12 -26mins 12 -boorn 12 -400-plus 12 -shalala 12 -e5 12 -payette 12 -irradiation 12 -shearwater 12 -arunkalaivanan 12 -callipers 12 -entailing 12 -mannino 12 -cooray 12 -asiedu 12 -bertelsmann 12 -brandindex 12 -center-stage 12 -petrolprices.com 12 -nowai 12 -doblin 12 -e-learning 12 -26p 12 -nsaid 12 -scrivener 12 -panthera 12 -iroko 12 -stick-up 12 -surveil 12 -rabasco 12 -marquesas 12 -millville 12 -hassnain 12 -black-listed 12 -uhlich 12 -exploitive 12 -toasties 12 -pro-gaza 12 -jarun 12 -pulborough 12 -peh 12 -manne 12 -machos 12 -eyebombing 12 -skymiles 12 -blackband 12 -mp4-30 12 -tree-like 12 -fermions 12 -1618 12 -baka 12 -23-point 12 -forty-something 12 -lightwater 12 -jordanne 12 -lamoureaux 12 -kuchuk 12 -brendel 12 -chavarria-medina 12 -etoile 12 -Ž 12 -zantow 12 -keysweeper 12 -wineglass 12 -8:16 12 -8:18 12 -patient-centred 12 -governer 12 -ha'apai 12 -kasyanov 12 -ober 12 -anju 12 -l.t. 12 -2012-2012 12 -a330/a340 12 -fukasawa 12 -pierre-auguste 12 -vicino 12 -bna 12 -athough 12 -caldmore 12 -trackable 12 -cloverfield 12 -519,000 12 -sib 12 -nunziata 12 -amphinex 12 -guinier 12 -arteriovenous 12 -botin 12 -kepler-421b 12 -acid-tongued 12 -jabbering 12 -poohsticks 12 -gilesnan 12 -dentsu 12 -ludin 12 -ramezani 12 -baumbach 12 -honey-trap 12 -possesion 12 -ninkovic 12 -klasnic 12 -wip 12 -uc-berkeley 12 -gesture-controlled 12 -acceding 12 -relegates 12 -reheating 12 -cattiness 12 -merseyrail 12 -sheth 12 -lathuile 12 -tareck 12 -steffe 12 -karmakar 12 -39-second 12 -garbus 12 -destigmatize 12 -317,000 12 -marsano 12 -third-storey 12 -58,500 12 -evers-williams 12 -nuh 12 -ehrmann 12 -gulosh 12 -lalinksy 12 -sillett 12 -bhopari 12 -jeppe 12 -loofahs 12 -hard-and-fast 12 -joudia 12 -lisanti 12 -pick-axe 12 -fallas 12 -soong 12 -mcgough 12 -rereading 12 -salido 12 -maglio 12 -twinwood 12 -flanges 12 -sponging 12 -f**ked 12 -unicom 12 -diaphragms 12 -83.3 12 -melodramas 12 -2,316 12 -stovl 12 -client-9 12 -wilts. 12 -stanmeyer 12 -patau 12 -patan 12 -inheritors 12 -charamba 12 -huff-ricci 12 -silvan 12 -kidwelly 12 -filarial 12 -woollies 12 -israeli-based 12 -ilaria 12 -multi-layer 12 -zavjalovs 12 -codfish 12 -tip-toed 12 -teodor 12 -baldly 12 -front-bench 12 -ragaa 12 -halvorssen 12 -ryding 12 -plymstock 12 -muizelaar 12 -147ft 12 -sambal 12 -lifelessly 12 -schutter 12 -werther 12 -u.s.-supported 12 -ponton 12 -konjac 12 -rosman 12 -petare 12 -thutmose 12 -best-placed 12 -wonderlands 12 -nozomi 12 -citizenships 12 -trumpers 12 -hrynkiw 12 -fibre-glass 12 -bringrr 12 -legitimizing 12 -ioffe 12 -nazarene 12 -rayfield 12 -tabi 12 -anisha 12 -mood-altering 12 -nataasha 12 -savery 12 -ravil 12 -dog-shaped 12 -viloria 12 -35-pound 12 -muscarello 12 -rozario 12 -1621 12 -anti-perspirant 12 -killingholme 12 -riads 12 -i-15 12 -acro 12 -mazdas 12 -nahida 12 -smiedala 12 -480million 12 -peduto 12 -130-year 12 -coneys 12 -theater-goers 12 -krewe 12 -cartabia 12 -canley 12 -ashley-rae 12 -crudest 12 -drug-laced 12 -mariachis 12 -hif 12 -barz 12 -hitoshi 12 -leruth 12 -masterworks 12 -ham-handed 12 -charette 12 -tancosova 12 -4.28 12 -exulted 12 -chatteris 12 -lastarza 12 -felfie 12 -huyghe 12 -cholo 12 -novint 12 -vipassana 12 -79m 12 -sarwari 12 -p.a. 12 -kietzman 12 -adalberto 12 -dallyn 12 -weske 12 -synthetically 12 -acteal 12 -tayto 12 -ohlson 12 -zookeys 12 -dures 12 -super-duper 12 -40-ton 12 -signspotting 12 -fully-formed 12 -schacht 12 -nine-season 12 -bjoern 12 -straya 12 -spring-summer 12 -2063 12 -soccer-mad 12 -inr 12 -unmerited 12 -well-thumbed 12 -handwash 12 -myfoxny 12 -fusillade 12 -ten-years 12 -recirculated 12 -planum 12 -lavarra 12 -polarise 12 -khowleh 12 -refrigerating 12 -caerwent 12 -roomate 12 -leckey 12 -86.8 12 -stow-on-the-wold 12 -collectivism 12 -faizi 12 -auletta 12 -co-designed 12 -paleocene 12 -gillooly 12 -moyra 12 -taloga 12 -mehtab 12 -micro-homes 12 -amway 12 -weight-training 12 -comeau 12 -basiji 12 -kprc-tv 12 -dwinells 12 -shobukhova 12 -73-year 12 -belleza 12 -2,256 12 -accreta 12 -dextrin 12 -salhi 12 -montagna 12 -wiggs 12 -self-raising 12 -undocked 12 -skinks 12 -antilla 12 -firelight 12 -pten 12 -turc 12 -vansolkema 12 -masrakh 12 -bentleigh 12 -8:37 12 -nazaroff 12 -pre-registered 12 -sayidat 12 -catalhoyuk 12 -rochas 12 -g63 12 -sharmistha 12 -bessant 12 -penalty-takers 12 -pyland 12 -miksad 12 -aspiotis 12 -+34 12 -backflipping 12 -repairmen 12 -kamari 12 -edwards-gust 12 -positionally 12 -millerchip 12 -teleconferences 12 -gajic 12 -hinaut 12 -groombridge 12 -warbling 12 -biid 12 -airpooler 12 -wao 12 -manneh 12 -bhoja 12 -abbreviate 12 -laskar 12 -natan 12 -qx1 12 -chumney 12 -neighbourliness 12 -beardwell 12 -vapourising 12 -west-style 12 -gwr 12 -lomb 12 -rubaiyat 12 -schectman 12 -stallholder 12 -ashleymadison 12 -incongruity 12 -gurbanguly 12 -ibs-c 12 -ghee-lan 12 -minella 12 -ex-banker 12 -penninghame 12 -perinçek 12 -farlin 12 -maltings 12 -anti-age 12 -cizek 12 -79.2 12 -malandina 12 -t&c 12 -back-handed 12 -far-ranging 12 -corruption-related 12 -bellville 12 -largent 12 -jiggy 12 -anthoine 12 -tantalize 12 -curlew 12 -eylea 12 -cashed-up 12 -dols 12 -regularise 12 -deh 12 -modise 12 -pruniaux 12 -zywicki 12 -spataro 12 -25-35 12 -borei 12 -prostitution-related 12 -mourier 12 --24 12 -trofimova 12 -nusbaum 12 -ex-scotland 12 -krunic 12 -hertzog 12 -ski-ing 12 -alconbury 12 -hiv-negative 12 -iskander 12 -ayse 12 -hillenbrand 12 -beed 12 -picone 12 -unchristian 12 -manoah 12 -driers 12 -suddaby 12 -obvs 12 -abushagur 12 -cribbar 12 -3d-printer 12 -heart-throbs 12 -delmarva 12 -kaos 12 -shaff 12 -dalkey 12 -unicat 12 -tei 12 -tough-on-crime 12 -crans-sur-sierre 12 -waus 12 -besetting 12 -drop-outs 12 -filoviruses 12 -bow-hunting 12 -dunnan 12 -peniche 12 -glossiness 12 -garajonay 12 -han-sik 12 -lesmahagow 12 -under-twos 12 -15x 12 -stainer 12 -pruitt-igoe 12 -hexacopter 12 -mcminimee 12 -whitcombe 12 -bleiweiss 12 -shumilova 12 -doily 12 -laurentic 12 -ponzi-style 12 -earthquake-damaged 12 -12.38 12 -dulieu 12 -thompson-arce 12 -anarchism 12 -paddleboat 12 -scull 12 -vieirinha 12 -murenzi 12 -baskerville 12 -annigoni 12 -holier-than-thou 12 -military-first 12 -two-timing 12 -shouty 12 -less-educated 12 -boudot 12 -fuglsang 12 -extravehicular 12 -compostable 12 -super-fans 12 -paver 12 -esdm 12 -kawika 12 -zeinat 12 -blameworthy 12 -three-figure 12 -pentothal 12 -ignagni 12 -visanich 12 -103million 12 -#poldi 12 -preborn 12 -wisee 12 -us-china 12 -vicuña 12 -khalifah 12 -rodell 12 -rush-era 12 -encapsulation 12 -hayabusa-2 12 -ski-jump 12 -elmer-laird 12 -joland 12 -elafonissi 12 -cervixes 12 -conson 12 -troyes 12 -z-boys 12 -over-estimate 12 -abolishes 12 -khune 12 -mirundi 12 -garan 12 -cylons 12 -fedrigo 12 -moeser 12 -heartedly 12 -pluckley 12 -denee 12 -triangular-shaped 12 -achilleas 12 -back-nine 12 -easons 12 -satuday 12 -wicketkeeping 12 -phobos-grunt 12 -nonbeliever 12 -county-owned 12 -breed-specific 12 -deodorising 12 -keds 12 -reggiana 12 -lerena 12 -9:46 12 -peerj 12 --292 12 -election-winning 12 -dabaiba 12 -categorisation 12 -widdick 12 -ashrafi 12 -credit-worthy 12 -web-hosting 12 -over-ran 12 -eppp 12 -deutscher 12 -knaus 12 -triumphalist 12 -nasutoceratops 12 -syn-ake 12 -rheu 12 -relationship-building 12 -teppanyaki 12 -freescale 12 -pageanting 12 -katzmarzyk 12 -ankang 12 -voxie 12 -30-21 12 -30-20 12 -keela 12 -slimfast 12 -trajkov 12 -fire-breather 12 -worldview-3 12 -katonah 12 -stranglers 12 -rhiannan 12 -vanderschoot 12 -240m 12 -narragansett 12 -szabolcs 12 -aboulafia 12 -soon-to-launch 12 -bulgarian-born 12 -unpromising 12 -silloth 12 -2,630 12 -copperhead 12 -16,200 12 -wilcocks 12 -jlt 12 -gurnard 12 -weenie 12 -two-yard 12 -domalewski 12 -trestles 12 -kimberlite 12 -ironworks 12 -fork-lift 12 -per1 12 -46in 12 -horsefly 12 -kamlari 12 -gussie 12 -sandside 12 -teruggi 12 -ketteringham 12 -stomachaches 12 -bazinga 12 -over-managed 12 -havin-2 12 -family-size 12 -lapak 12 -egress 12 -techradar 12 -kochar 12 -nira 12 -ljudski 12 -lanford 12 -pepperidge 12 -ex-communist 12 -four-by-four 12 -alosi 12 -cbsnews.com 12 -dervishes 12 -102.5 12 -oduwa 12 -bargen 12 -motten 12 -8:53 12 -carlucci 12 -irt 12 -oflag 12 -4.2.2 12 -olgin 12 -bhang 12 -drug-use 12 -lewry 12 -ufo-shaped 12 -databank 12 -virani 12 -kiya 12 -8-22 12 -warbeck 12 -arinze 12 -sohar 12 -gaviscon 12 -photo-finish 12 -ghostbuster 12 -aliani 12 -macrumours 12 -12.37 12 -fonner 12 -sunshade 12 -polygraphed 12 -humus 12 -high-price 12 -accompaniments 12 -exhalation 12 -lazaar 12 -perlow 12 -shurmer 12 -509,000 12 -deltopia 12 -bahuguna 12 -arsinoe 12 -anahi 12 -tawnee 12 -glucosamine 12 -mazel 12 -sinfin 12 -gud 12 -chie 12 -adamsons 12 -repayable 12 -lyte 12 -hard-of-hearing 12 -post-wimbledon 12 -gasparino 12 -karns 12 -madalla 12 -tiebele 12 -747,000 12 -bluesy 12 -peewee 12 -881 12 -christan 12 -firstenberg 12 -trashorras 12 -granddaughter-in-law 12 -coody 12 -maulings 12 -jetsetting 12 -naheed 12 -homered 12 -buring 12 -holboll 12 -monsigny 12 -mohamedraza 12 -cleere 12 -most-recognisable 12 -benefield 12 -dismantlement 12 -two-sentence 12 -miller-mckenna 12 -samadhi 12 -tryk 12 -seven-wicket 12 -cammarelle 12 -wetmore 12 --42 12 -kropas 12 -caerau 12 -bristly 12 -clifftops 12 -par-5 12 -gudang 12 -squeegee 12 -thyssen 12 -sequentially 12 -sheperd 12 -70bn 12 -foster-care 12 -guite 12 -santucci 12 -nmas 12 -sommermeyer 12 -rogoff 12 -ninth-minute 12 -betabeat 12 -sportv 12 -dogue 12 -jacare 12 -unplaced 12 -janagle 12 -sennen 12 -fire-ravaged 12 -linaksita 12 -lawyered 12 -pre-accident 12 -vehle 12 -weinand 12 -battery-free 12 -cnrs 12 -bandas 12 -grifter 12 -indolent 12 -headlocked 12 -xtensafix 12 -ociepka 12 -thundamentals 12 -broz 12 -mayank 12 -attitudinal 12 -llullaillaco 12 -multiplatform 12 -mispronouncing 12 -al-brahmi 12 -maue 12 -smartprice 12 -wakehurst 12 -wolfsthal 12 -wonâ 12 -low-down 12 -bio-diesel 12 -richmondshire 12 -thornburgh 12 -36in 12 -gwpf 12 -23-years 12 -banguera 12 -laziale 12 -mozaffar 12 -bearce 12 -toumaniantz 12 -rcpch 12 -stroger 12 -struth 12 -anti-conservative 12 -myeongdong 12 -ted2013 12 -gaebler 12 -lemelin 12 -hagel-smith 12 -adelies 12 -isaksen 12 -ashorooq 12 -dáil 12 -banford 12 -sortor 12 -dalal 12 -venkat 12 -monogramming 12 -hennes 12 -tamped 12 -637,000 12 -irwin-hill 12 -mindrdr 12 -caulder 12 -davidi 12 -81,381,673 12 -75.2 12 -á 12 -dateable 12 -tunable 12 -ostrin 12 -patane 12 -quzhou 12 -hinke 12 -coalfield 12 -widyartha 12 -3310 12 -u15 12 -nouel 12 -fleksy 12 -meninist 12 -wason 12 -energy-hungry 12 -luzzi 12 -rosaleda 12 -sindelar 12 -kalema-zikusoka 12 -eadweard 12 -cusps 12 -sen.-elect 12 -prioress 12 -out-of-the-ordinary 12 -esmeraldas 12 -llwynywermod 12 -cat-eye 12 -moissard 12 -brbora 12 -keywood 12 -khrais 12 -aeropuerto 12 -khapalwak 12 -early-bird 12 -a/w14 12 -laskett 12 -palinkas 12 -homekit 12 -icebridge 12 -chaus 12 -gerasimidis 12 -baseball/softball 12 -olmazu 12 -audenried 12 -castingcouch-x 12 -wipp 12 -iida 12 -fetu 12 -score-line 12 -demobilised 12 -bruyninckx 12 -bilau 12 -frappe 12 -juab 12 -uslu 12 -competa 12 -veix 12 -katongo 12 -wqad 12 -idioms 12 -recession-era 12 -grohmann 12 -rayworth 12 -v-1 12 -hoggett 12 -rossmo 12 -bendgate 12 -kneidinger 12 -video-conferencing 12 -albo 12 -almaribe 12 -snake-oil 12 -chopey 12 -dugarry 12 -vacationer 12 -entwisle 12 -valjean 12 -terasem 12 -old-money 12 -931 12 -walerysiak 12 -poromoko 12 -hopital 12 -milien 12 -hikmat 12 -tulkarem 12 -breaking-up 12 -saruman 12 -longfield 12 -1674 12 -pratap 12 -physic 12 -al-nejat 12 -coppard 12 -fanciable 12 -earthlike 12 -glovework 12 -fan-shaped 12 -u.s.-owned 12 -personage 12 -masao 12 -womenfolk 12 -four-months-old 12 -loose-knit 12 -al-khair 12 -shouryya 12 -unwaveringly 12 -sha'ath 12 -o'ryan 12 -deondra 12 -redbox 12 -andronici 12 -hiltons 12 -florke 12 -beyrle 12 -gerwing 12 -orosa 12 -riverisland.com 12 -kaluuya 12 -personal-conduct 12 -d.w. 12 -castar 12 -wisc 12 -dolmabahce 12 -1,138 12 -stolid 12 -fourth-fastest 12 -jemison 12 -charité 12 -#fatkini 12 -pulitzer-prize 12 -protectmarriage.com 12 -merryweather 12 -milligen 12 -tamiz 12 -propolis 12 -perforate 12 -6ft-long 12 -hurricane-strength 12 -revote 12 -rith 12 -dysart 12 -semi-intensive 12 -marrs 12 -run-scoring 12 -4:49 12 -post-ferguson 12 -trinite 12 -e-card 12 -cherkaoui 12 -lanikai 12 -balwant 12 -scarsella 12 -moneghetti 12 -kupu 12 -calisse 12 -soirée 12 -xyz 12 -abstains 12 -no-cost 12 -nikethamide 12 -hamlen 12 -mahankali 12 -zowie 12 -zemir 12 -dog-loving 12 -modcloth 12 -motor-vehicle 12 -bextor 12 -werkhoven 12 -tax-related 12 -restlessly 12 -scowcroft 12 -accross 12 -900km 12 -befalling 12 -oakleigh 12 -hydrodynamic 12 -arobieke 12 -hunagundi 12 -huzhou 12 -ex-priest 12 -northlink 12 -leda 12 -warndon 12 -illgner 12 -a56 12 -galeao 12 -60-pound 12 -spielrein 12 -cryengine 12 -tost 12 -karlstein 12 -dik 12 -goleby 12 -boatswain 12 -lintner 12 -saastamoinen 12 -lashonda 12 -hetero 12 -state-educated 12 -dowels 12 -army-issue 12 -donyo 12 -mous 12 -uncared 12 -wever 12 -stonking 12 -lewis-roberts 12 -al-askari 12 -dawn-to-dusk 12 -cusub 12 -actor/director 12 -belozoglu 12 -warlocks 12 -boguslawski 12 -doo-wop 12 -mini-breaks 12 -0.001 12 -back-foot 12 -argilos 12 -eight-fold 12 -latonya 12 -dirac 12 -adelante 12 -wreckless 12 -svitzer 12 -oleskiewicz 12 -richo 12 -longmen 12 -tedros 12 -gauthier-vaillancourt 12 -theon 12 -teegan 12 -5.17 12 -inaa 12 -serious-looking 12 -steinhardt 12 -something-for-nothing 12 -nasima 12 -mirabelli 12 -newell-skinner 12 -boosh 12 -shayden 12 -spivak 12 -kuester 12 -larmond 12 -metzl 12 -ormes 12 -laake 12 -watchkeeper 12 -holiday-season 12 -greenidge 12 -kuldeep 12 -floccari 12 -recaro 12 -wijesinha 12 -shut-out 12 -daisie 12 -santika 12 -misspoken 12 -trackway 12 -rushers 12 -0157 12 -ruhle 12 -anene 12 -zohn 12 -inyanga 12 -antoun 12 -post-edwardian 12 -omri 12 -nathan-turner 12 -most-talked 12 -fortifies 12 -norullah 12 -chiriseri 12 -all-china 12 -contorts 12 -cranage 12 -mcbryde 12 -sinins 12 -alvelo 12 -dtz 12 -downhills 12 -pdas 12 -pinkies 12 -kenehan 12 -monchi 12 -d'eau 12 -zafran 12 -uro 12 -dolfi 12 -acle 12 -readily-available 12 -buffoonery 12 -implementations 12 -projectionist 12 -semonski 12 -golay 12 -0.295 12 -almodóvar 12 -youâ 12 -yusufiya 12 -bete 12 -keirl 12 -stavas 12 -6.89 12 -fairphone 12 -diomande 12 -bowdidge 12 -78.4 12 -five-month-long 12 -genoa-based 12 -khetkan 12 -9/5 12 -stoer 12 -wrathall 12 -perigord 12 -hsv 12 -perel 12 -thornback 12 -forgoes 12 -reshapes 12 -duodenoscope 12 -aeolis 12 -toleafoa 12 -50-room 12 -middles 12 -aconite 12 -unconquered 12 -psychedelia 12 -hmip 12 -first-week 12 -sigmar 12 -murli 12 -holter 12 -cleadon 12 -sitpack 12 -startles 12 -wanetta 12 -feynman 12 -towning 12 -2000-2002 12 -nisansala 12 -ebbeson 12 -ndlea 12 -skerritt 12 -pouliot 12 -indents 12 -hlavsa 12 -messick 12 -cliffview 12 -multi-tasker 12 -missey 12 -bupropion 12 -1,085 12 -multipacks 12 -sugar-coat 12 -hived 12 -ekstra 12 -13-second 12 -ovi 12 -forziano 12 -100-seat 12 -whas11 12 -crafter 12 -dog-sledding 12 -11,100 12 -huband 12 -saïd 12 -244m 12 -jayesh 12 -ballgobin 12 -chanas 12 -minimization 12 -nia-malika 12 -eulette 12 -legrottaglie 12 -lisan 12 -autoliners 12 -smartened 12 -micro-climate 12 -kranjska 12 -ulan 12 -vaticano 12 -tochigi 12 -high-dependency 12 -rocawear 12 -juarros 12 -lesabre 12 -muzzed 12 -his-and-her 12 -wcti 12 -hotcourses 12 -toleman 12 -bible-minded 12 -contraindications 12 -64.7 12 -headis 12 -platner 12 -tsege 12 -chubby-cheeked 12 -10-gallon 12 -pre-pharmacy 12 -no11 12 -porn-star 12 -sozopol 12 -fidgets 12 -wetering 12 -bestford 12 -mortlock 12 -sprayers 12 -cheonghaejin 12 -lakinski 12 -armijo 12 -275-pound 12 -dehiba 12 -urethral 12 -34.95 12 -69.5 12 -duckham 12 -tonelli 12 -loosed 12 -kilel 12 -crosser 12 -health-insurance 12 -air-defence 12 -rodrick 12 -hellblazer 12 -leeds-liverpool 12 -disingenuously 12 -5:36 12 -grimmy 12 -gutfeld 12 -mangone 12 -90.1 12 -absconder 12 -tesar 12 -sackable 12 -löw 12 -dreamily 12 -pizango 12 -wykeham 12 -crigler-najjar 12 -politest 12 -season-end 12 -cottons 12 -thiess 12 -county-level 12 -skiena 12 -1,116 12 -n-strike 12 -almina 12 -ejectable 12 -melk 12 -ra'ad 12 -suniga 12 -prettejohn 12 -four-word 12 -fee-free 12 -umbrella-shaped 12 -zady 12 -gestural 12 -crinkle 12 -eufaula 12 -rigdol 12 -jarle 12 -8x10 12 -2gether 12 -cottonmouths 12 -intergroup 12 -kingmakers 12 -elsegood 12 -imperiously 12 -woodruffe 12 -tollis 12 -outplaying 12 -archos 12 -mendell 12 -saughton 12 -j.l. 12 -8.54 12 -bvlgari 12 -gt-r 12 -interpretative 12 -molter 12 -wickrematunge 12 -speekz 12 -kalsoom 12 -such-and-such 12 -iphone/ipad 12 -cidade 12 -poultney 12 -computer-savvy 12 -176million 12 -momofuku 12 -mbari 12 -tiraspol 12 -overcorrected 12 -layni 12 -al-samani 12 -priapus 12 -winteregg 12 -luxuriate 12 -dabrowska 12 -59million 12 -name-checking 12 -hurtwood 12 -bienen 12 -weidlich 12 -beon 12 -67.7 12 -67.8 12 -kicheche 12 -gravelled 12 -infectious-disease 12 -29-page 12 -israel-palestine 12 -tos 12 -bsi 12 -komla 12 -roseberry 12 -tomsky 12 -melds 12 -zip-ties 12 -motion-controlled 12 -3.64 12 -bleakly 12 -454g 12 -turville 12 -ickes 12 -graci 12 -bosher 12 -ashur 12 -akulic 12 -howroyd 12 -alerter 12 -schelte 12 -plaistowe 12 -ex-friend 12 -beirich 12 -brucknell 12 -riau 12 -lionised 12 -swetnam 12 -chomps 12 -nolle 12 -soviet-designed 12 -noblett 12 -topology 12 -mailloux 12 -qia 12 -uechtritz 12 -19kg 12 -master-slave 12 -calvados 12 -claytor 12 -ben-gals 12 -flumist 12 -blad 12 -u16s 12 -tinybeans 12 -three-finger 12 -eviscerating 12 -fludgate 12 -antol 12 -leclere 12 -kdsk 12 -tutbury 12 -coppi 12 -macur 12 -fox411 12 -schreffler 12 -mannings 12 -ghezzal 12 -prostaglandins 12 -agonise 12 -kinvara 12 -pelegrin 12 -kickstarter.com 12 -glace 12 -photosphere 12 -likeminded 12 -dach 12 -ueyanagi 12 -open-neck 12 -zhengsheng 12 -i.e 12 -dishonors 12 -kopicki 12 -panday 12 -458,000 12 -disbergers 12 -arendse 12 -111.55 12 -shakila 12 -hamilton-brown 12 -joycelyn 12 -envisaging 12 -assertively 12 -afif 12 -u20s 12 -dervan 12 -negad 12 -bere 12 -matheka 12 -kugow 12 -popsci.com 12 -hanzo 12 -placket 12 -family-man 12 -anti-aids 12 -alternative-energy 12 -superclub 12 -panik 12 -synaptic 12 -outsprinted 12 -iop 12 -findon 12 -mcminnville 12 -danna 12 -facer 12 -nephrotic 12 -demir 12 -mcguinn 12 -15-week-old 12 -trimethylaminuria 12 -kottasova 12 -deep-diving 12 -sorasart 12 -mclaurin 11 -cdc.gov 11 -mcerlane 11 -yeezys 11 -majd 11 -peremptory 11 -miiverse 11 -israeli-made 11 -cádiz 11 -ishida 11 -five-year-deal 11 -sharrak 11 -frights 11 -barlby 11 -witi 11 -akitas 11 -norc 11 -clenched-fist 11 -neuropsychological 11 -tap-to-pay 11 -haight-ashbury 11 -venlo 11 -knx 11 -59m 11 -dehua 11 -peiffer 11 -marmife 11 -thérèse 11 -typescript 11 -louzado 11 -sprit 11 -unhasu 11 -overestimation 11 -baena 11 -mackillop 11 -moisturize 11 -heletey 11 -biviano 11 -oier 11 -nuggett 11 -equinome 11 -laquinn 11 -syedna 11 -shandor 11 -cousar 11 -abortionists 11 -loxahatchee 11 -dieter-robinson 11 -medium-length 11 -eagers 11 -zettabytes 11 -bartick 11 -amidala 11 -1,318 11 -screened-in 11 -makha 11 -briarcliff 11 -gedge 11 -mandt 11 -brushless 11 -chortle 11 -nine-tenths 11 -530million 11 -laghat 11 -grattan 11 -omt 11 -kizuna 11 -furchtgott 11 -bealefeld 11 -lindpere 11 -wadeema 11 -seah 11 -englebert 11 -jauntily 11 -mappa 11 -chirikova 11 -mcwrap 11 -righter 11 -robertsbridge 11 -hft 11 -zolkwer 11 -otolaryngology 11 -niwattumrong 11 -driving-related 11 -adjusters 11 -240billion 11 -hemiplegia 11 -habul 11 -kendrea 11 -bruns 11 -sagmeister 11 -news4 11 -5-4-1 11 -conciousness 11 -millin 11 -endsleigh 11 -kochhar 11 -selectivity 11 -flusher 11 -.03 11 -toeppe 11 -invasiveness 11 -bodrov 11 -uncatchable 11 -zombified 11 -attentional 11 -436b 11 -below-market 11 -funches 11 -anti-religion 11 -1,865 11 -stromer 11 -lutsyshyna 11 -misurata 11 -much-respected 11 -cinquecento 11 -74.1 11 -hosford 11 -gustatory 11 -xinyu 11 -rafif 11 -1502 11 -katyal 11 -endeavoring 11 -ohene-gyan 11 -labrecque 11 -stancliffe 11 -polair 11 -bronchitis-related 11 -two-hour-long 11 -fluoroquinolones 11 -350-page 11 -cepero 11 -u.s.-registered 11 -clulow 11 -life-jacket 11 -keelan 11 -coloreds 11 -11.09 11 -stress-busting 11 -4:07 11 -bb10 11 -fun. 11 -tiebreaks 11 -kiyotake 11 -jesup 11 -7:53 11 -lakeland.co.uk 11 -mr01 11 -39,500 11 -14-ton 11 -filmation 11 -rogin 11 -ercp 11 -marvient 11 -nahuel 11 -laraque 11 -kasmin 11 -mcgraw-hill 11 -114,500-tonne 11 -shout-outs 11 -witha 11 -groizard 11 -caño 11 -chihiraaico 11 -calzada 11 -20,000-strong 11 -261,000 11 -snicket 11 -south-eastwards 11 -sidbury 11 -broch 11 -bercovici 11 -brandram 11 -satirizes 11 -copper-colored 11 -shitake 11 -erraji 11 -re-stocking 11 -letterpress 11 -schavolt 11 -dolt 11 -pokal 11 -1,238 11 -1,234 11 -ivans 11 -ehmke 11 -sensaslim 11 -tmj 11 -banuelos 11 -ingenue 11 -mondial 11 -barronelle 11 -szechenyi 11 -fromer 11 -messerschmitts 11 -hallucinogens 11 -skysports 11 -3:33 11 -vermersch 11 -acupuncturists 11 -flyte 11 -nhaje 11 -leithinger 11 -henslowe 11 -pvet 11 -arnolds 11 -underachieved 11 -walnut-sized 11 -bwalya 11 -kartik 11 -mitral 11 -yamaoka 11 -pokroy 11 -ghada 11 -lanlard 11 -daboul 11 -vikander 11 -boulby 11 -troublemaking 11 -croppers 11 -466,000 11 -sunburst 11 -minneapolis-saint 11 -re-shaping 11 -#savebela 11 -degraff 11 -kupferman 11 -'97 11 -lacen 11 -waycross 11 --27 11 -872 11 -buffered 11 -hongkong 11 -oshine 11 -nonfamily 11 -willams 11 -ratagarama 11 -bastet 11 -aureole 11 -honest-to-goodness 11 -seifi 11 -gorelik 11 -krasner 11 -57040 11 -pen-knife 11 -delden 11 -ciaa 11 -stepping-stone 11 -estádio 11 -kasane 11 -sharkbanz 11 -374,000 11 -stepfathers 11 -retread 11 -tongans 11 -minifigure 11 -stemberg 11 -fullbacks 11 -over-25s 11 -1760s 11 -dionysopoulos 11 -clovermead 11 -oriskany 11 -j1407 11 -tcm.com 11 -visnakovs 11 -proofreader 11 -actress/singer 11 -floor-by-floor 11 -913,000 11 -sarl 11 -shikumen 11 -gunbower 11 -lehberger 11 -20oz 11 -class-based 11 -benbrika 11 -acidosis 11 -dudz 11 -sim-free 11 -eolas 11 -8-13 11 -516,000 11 -varvel 11 -#neknominate 11 -d'you 11 -wolf-whistled 11 -sby 11 -432,000 11 -stoat 11 -llano 11 -raylan 11 -marrafa 11 -alexandroaia 11 -unbelieving 11 -danton 11 -pyrah 11 -ivancev 11 -prizzi 11 -toilet-related 11 -chouly 11 -seven-judge 11 -comcare 11 -questionably 11 -wkrn-tv 11 -manang 11 -chiklis 11 -bigtime 11 -fanaroff 11 -dakotans 11 -melloni 11 -gonalons 11 -jokinen 11 -rickert 11 -snowed-in 11 -chalkbot 11 -abama 11 -bernadi 11 -ledoyen 11 -80percent 11 -savitz 11 -1615 11 -s2000 11 -ionizing 11 -three-speed 11 -hankered 11 -onigiri 11 -foolow 11 -tiree 11 -road-ready 11 -bourke-white 11 -eudora 11 -alfons 11 -dilmah 11 -90,000-square-foot 11 -suber 11 -missal 11 -stathis 11 -melanosomes 11 -zagazig 11 -m8120n 11 -three-song 11 -pre-oscars 11 -heusen 11 -kooren 11 -860million 11 -havengore 11 -slow-release 11 -zolotovsky 11 -dapa 11 -balmaceda 11 -dianetics 11 -non-contagious 11 -bridgett 11 -alesund 11 -ferruccio 11 -muftah 11 -religion-based 11 -glasby 11 -e.d. 11 -53,500 11 -paradisus 11 -occurence 11 -2:18 11 -2:14 11 -caponi 11 -summer-long 11 -fermilab 11 -keate 11 -kundan 11 -non-punitive 11 -8600 11 -sugarplum 11 -windowsills 11 -jtf 11 -10-episode 11 -bagdad 11 -non-poisonous 11 -taxonomy 11 -diggin 11 -vondrasek 11 -apichart 11 -protensa 11 -cuddeback 11 -preis 11 -tourism-related 11 -spring-inspired 11 -grogin 11 -crimesider 11 -nupur 11 -haramboure 11 -timbuk2 11 -weiqing 11 -chatperf 11 -petroff 11 -mildmay 11 -milly-anne 11 -kpax 11 -worcs. 11 -dufka 11 -hotspurs 11 -mercs 11 -isopropyl 11 -montmajour 11 -gero 11 -yevloyev 11 -valori 11 -foppish 11 -rosko 11 -16.00 11 -tapscott 11 -uppermill 11 -203,000 11 -ostracise 11 -edgard 11 -ghosheh 11 -mudflow 11 -shukria 11 -nerja 11 -bramlett 11 -bilharzia 11 -euless 11 -kalettes 11 -klaudia 11 -moratoria 11 -deursen 11 -tourmobile 11 -2,059 11 -1,880 11 -medeva 11 -open-enrollment 11 -synths 11 -combes 11 -1521 11 -152m 11 -1,155 11 -debt-fuelled 11 -figueira 11 -kholodovskii 11 -ilavarasan 11 -34-6 11 -more-or-less 11 -renuka 11 -favouriting 11 -plaats 11 -az. 11 -heydari 11 -timeo 11 -one-seventh 11 -marly 11 -rogaski 11 -malpeso 11 -yolngu 11 -4:26 11 -scary-looking 11 -ondari 11 -telenor 11 -factsheet 11 -ruth-ann 11 -985ft 11 -valproic 11 -locs 11 -proceso 11 -faruqui 11 -43-yard 11 -rhines 11 -booziest 11 -antifungal 11 -roped-off 11 -wssc 11 -18.30 11 -2051 11 -2055 11 -sunswift 11 -zellers 11 -reenacts 11 -rudding 11 -schron 11 -salak 11 -accumulators 11 -lenthall 11 -cepheid 11 -63,500 11 -shrimper 11 -narre 11 -walleye 11 -clotilde 11 -taylour 11 -grigson 11 -neuroma 11 -steens 11 -gion 11 -dieleman 11 -sunport 11 -fawsley 11 -calvins 11 -merri 11 -florowski 11 -sowrey 11 -2,000-foot 11 -motion-based 11 -5-kilometer 11 -schofields 11 -villard 11 -alannah 11 -hnin 11 -tate-labianca 11 -blunderbuss 11 -1:03 11 -acrassicauda 11 -mobiles.co.uk 11 -niedzwiecki 11 -underequipped 11 -school-wide 11 -mbatha-raw 11 -watersheds 11 -vore 11 -syamsuddin 11 -cunanan 11 -two-disc 11 -nocturne 11 -day-to-night 11 -non-descript 11 -electroconvulsive 11 -top-division 11 -tenicka 11 -ex-apple 11 -dreifuss 11 -hourihan 11 -aahs 11 -filippos 11 -evaristo 11 -rien 11 -gaskill 11 -bose-einstein 11 -pitchmen 11 -junkyards 11 -flautist 11 -minister-level 11 -lorenco 11 -thur 11 -pre-pay 11 -przemyslaw 11 -meese 11 -cheis 11 -strike-force 11 -caddying 11 -ketchion 11 -isnâ 11 -renowitzky 11 -lukash 11 -shamrez 11 -fpd 11 -fpl 11 -958 11 -khrunova 11 -parador 11 -party-linked 11 -comet-chasing 11 -front-mounted 11 -tatford 11 -1,038 11 -chakravarti 11 -state-mankato 11 -48995 11 -j-village 11 -paranjpe 11 -mcgorry 11 -adalynn 11 -schnakenberg 11 -labanino 11 -1920x1080 11 -demarcate 11 -odrick 11 -wilson-johnson 11 -470million 11 -epe 11 -sopher 11 -trixibelle 11 -wakulla 11 -tiepolo 11 -bulworth 11 -mccole 11 -portsea 11 -siring 11 -povetkin 11 -nickle 11 -ever-decreasing 11 -polyglot 11 -.2013 11 -trythall 11 -svilar 11 -silchester 11 -w11 11 -67per 11 -missile-related 11 -iasi 11 -weissinger 11 -observer-reporter 11 -speedee 11 -parmentier 11 -malinka 11 -3drudder 11 -sweatsuit 11 -cash-for-access 11 -thousand-year-old 11 -pointes 11 -forston 11 -snel 11 -westcarr 11 -fiszer 11 -browhaus 11 -bríanán 11 -ispr 11 -canyoning 11 -64-page 11 -mainichi 11 -oshawa 11 -kinosh 11 -yogananda 11 -at-bats 11 -mawer 11 -3,646 11 -800-strong 11 -supercenters 11 -crawshaw 11 -parrinello 11 -overselling 11 -waldon 11 -tetrapod 11 -1506 11 -mirante 11 -mandola 11 -coro 11 -newbiggin-by-the-sea 11 -marino-fiandaca 11 -shulgin 11 -25,000-seat 11 -khair 11 -savran 11 -undernutrition 11 -much-reduced 11 -yellow-legged 11 -foreleg 11 -gloucs 11 -beeckman 11 -lidong 11 -velociraptors 11 -terminonaris 11 -shimi 11 -andorran 11 -wilhelms 11 -langberg 11 -obamacare-related 11 -23-3 11 -14-7 11 -mogi 11 -derzis 11 -absent-mindedly 11 -travelators 11 -addlespurger 11 -fellner 11 -misinform 11 -mires 11 -uninsurable 11 -bling-bling 11 -depresses 11 -glamourise 11 -curaçao 11 -quaff 11 -ervine 11 -chikhaoui 11 -taia 11 -camilotti 11 -bigeye 11 -huberman 11 -giacopazzi 11 -distressful 11 -truswell 11 -kolling 11 -turvey 11 -athetoid 11 -vaporium 11 -haltom 11 -trichloroethylene 11 -weddady 11 -bion-m 11 -khalkhali 11 -manenti 11 -syrian-controlled 11 -benhaim 11 -bukar 11 -sinar 11 -ponemon 11 -schneeberg 11 -al-sahlawi 11 -bromham 11 -australia-wide 11 -cnn/u 11 -under-active 11 -gerets 11 -porche 11 -seet 11 -cheveley 11 -carvela 11 -kayan 11 -dysphoric 11 -telegraphs 11 -al-dalou 11 -kpcc 11 -legge-bourke 11 -longest-standing 11 -tallman 11 -jitsu 11 -marché 11 -smaltz 11 -jibed 11 -douai 11 -1,530 11 -derenalagi 11 -preparer 11 -handcraft 11 -tranny 11 -hodirevski 11 -mib 11 -untargeted 11 -re-appear 11 -schield 11 -thiepval 11 -terwilliger 11 -bitingly 11 -motility 11 -37-10 11 -matschie 11 -pushpins 11 -license-plate 11 -jeevan 11 -thomas-darrah 11 -devendra 11 -tardigrade 11 -hypothesise 11 -co-dependency 11 -bartkiw 11 -fransen 11 -8ft-long 11 -federally-funded 11 -immolations 11 -enborne 11 -tavanipupu 11 -beckstrom 11 -indiantown 11 -fluoxetine 11 -bajarin 11 -whisnant 11 -pype 11 -food-loving 11 -kholi 11 -sarabhai 11 -nanotips 11 -dong-a 11 -honky-tonk 11 -singer-actor 11 -medellín 11 -arbilla 11 -ghubash 11 -printmaker 11 -miljo 11 -paraplegia 11 -lowcostholidays 11 -dog-eating 11 -sharp-toothed 11 -jcr 11 -rubix 11 -andreae 11 -balli 11 -arlynn 11 -ex-google 11 -lindblad 11 -longo-ciprelli 11 -fit-out 11 -9.54 11 -8.36 11 -slave-like 11 -379,000 11 -shawarma 11 -chairmaster 11 -scroggins 11 -astonishes 11 -500-member 11 -fillyaw 11 -long-reigning 11 -woodhams 11 -iannicelli 11 -s-2 11 -tie-dyed 11 -reconnaisance 11 -aerocar 11 -420-acre 11 -936 11 -ogof 11 -vlaams 11 -denisov 11 -in-box 11 -slide-out 11 -kort 11 -latheron 11 -prevarication 11 -driver-less 11 -witehira 11 -mckeand 11 -down-and-dirty 11 -ex-client 11 -dingli 11 -imperiale 11 -dhiren 11 -ross-shire 11 -cybele 11 -sølveig 11 -70lb 11 -veras 11 -early-voting 11 -oldham-born 11 -mairwen 11 -giang 11 -3,069 11 -bellard 11 -threadless 11 -crathes 11 -mcduff 11 -under-the-table 11 -janghir 11 -carvounis 11 -zibakalam 11 -hifi 11 -itele 11 -109-year-old 11 -resentencing 11 -mid-western 11 -stabling 11 -hotted 11 -face-painting 11 -subang 11 -hepatology 11 -hagos 11 -methylated 11 -jyrki 11 -shrike 11 -fresca 11 -elachi 11 -radebe 11 -valeting 11 -modal 11 -comley 11 -motol 11 -ibragimov 11 -well-adapted 11 -babson 11 -shui-bian 11 -haseman 11 -dunstall 11 -kayange 11 -zucked 11 -single-wide 11 -bencher 11 -guilfoy 11 -loret 11 -lorem 11 -kfir 11 -salnikow 11 -annbriar 11 -attenuated 11 -zonkey 11 -osweiler 11 -okonjima 11 -scarman 11 -seperated 11 -tdcj 11 -tdcs 11 -lesters 11 -progenitors 11 -nezahualcoyotl 11 -sexcapades 11 -barschak 11 -anjuna 11 -uae-based 11 -bacteriological 11 -fincantieri 11 -chiddingly 11 -shickle 11 -arminak 11 -gammons 11 -i-395 11 -masonis 11 -bio-inspired 11 -festive-themed 11 -lafd 11 -airwave 11 -328million 11 -kawana 11 -renacci 11 -kothari 11 -126m 11 -65,000-strong 11 -irem 11 -data-roaming 11 -brevik 11 -ventersdorp 11 -adlakha 11 -dominicks 11 -8/13 11 -xiahn 11 -biotin 11 -unfasten 11 -tchoumitcheva 11 -refiner 11 -uzis 11 -woodberry 11 -cetkovska 11 -massera 11 -birgitte 11 -30mg 11 -regift 11 -kleinfontein 11 -bumming 11 -turboprops 11 -flaiz 11 -mamaroneck 11 -belleci 11 -:01 11 -75billion 11 -altuzarra 11 -boeve 11 -fishwives 11 -transparencies 11 -siskind 11 -mazloum 11 -liberations 11 -emporer 11 -superfortress 11 -chentouf 11 -middlesboro 11 -a-night 11 -aerialists 11 -maite 11 -copahue 11 -non-local 11 -room-sized 11 -docu-drama 11 -darci 11 -ebbers 11 -car-hire 11 -janell 11 -shortland 11 -berryhill 11 -lockbox 11 -crystallization 11 -cycoped 11 -aalto 11 -hahns 11 -reasonably-priced 11 -kroy 11 -schepis 11 -83billion 11 -jawlines 11 -2,545 11 -984ft 11 -sigurd 11 -strums 11 -super-smart 11 -alvear 11 -shiman 11 -doyles 11 -kerkorian 11 -chuuk 11 -7:27 11 -visnich 11 -specially-modified 11 -damai 11 -non-suicidal 11 -ioactive 11 -littlebigplanet 11 -hemorrhoid 11 -jenney 11 -elleah-jayne 11 -thirlaway 11 -wdaf 11 -jafaari 11 -cabarets 11 -thumb-sized 11 -1800mhz 11 -americanisms 11 -assualt 11 -exfiltration 11 -hynd 11 -tsiklauri 11 -56680 11 -afose 11 -aveyron 11 -66mins 11 -sabbias 11 -simpatico 11 -runar 11 -55078 11 -glanford 11 -lamarcus 11 -lauryl 11 -pugfest 11 -depandi 11 -non-malignant 11 -parkey 11 -parken 11 -warehouseman 11 -cockfights 11 -multicar 11 -hermila 11 -braida 11 -cardio-pulmonary 11 -198th 11 -genevra 11 -fearsome-looking 11 -pharell 11 -bistros 11 -rangan 11 -einat 11 -blast-proof 11 -aurum 11 -massagee 11 -personalizes 11 -al-ikhbariya 11 -kerryn 11 -air-pot 11 -life-savers 11 -mini-tornado 11 -v-twin 11 -drubbed 11 -ndtv.com 11 -crowd-control 11 -wikileaks.org 11 -mirata 11 -cornball 11 -hezbollah-dominated 11 -pnina 11 -havaianas 11 -nachrichten 11 -rydal 11 -cobourg 11 -maiko 11 -maike 11 -georgian-style 11 -ellroy 11 -mexia 11 -gasteyer 11 -reisch 11 -eliminations 11 -tatafu 11 -mooncey 11 -jabakhanji 11 -flunkies 11 -knucklehead 11 -bithrey 11 -shergill 11 -undersecretary-general 11 -guiliani 11 -loveliness 11 -zygi 11 -southerndown 11 -2.4-mile 11 -steinberger 11 -behling 11 -2492 11 -smolan 11 -liscouski 11 -126.7 11 -greenbrook 11 -hindu-majority 11 -tholen 11 -kearn 11 -private-public 11 -canonize 11 -goffs 11 -navdeep 11 -outclasses 11 -camera-toting 11 -leiston 11 -horsforth 11 -harra 11 -pelin 11 -relinquishes 11 -130g 11 -tenyukh 11 -jep 11 -trollinger 11 -syreeta 11 -196million 11 -lemv 11 -770million 11 -hoglundi 11 -shaniece 11 -oncor 11 -obl 11 -obp 11 -lumpen 11 -langan 11 -pinho 11 -deknight 11 -velveteen 11 -sung-yoon 11 -mary-ann 11 -ben-zion 11 -mounter 11 -skirmishing 11 -worm-like 11 -technische 11 -carrolls 11 -vaginosis 11 -beaujolais 11 -51800 11 -28in 11 -cryptosporidiosis 11 -tuks 11 -zhigang 11 -tudwal 11 -hulugalle 11 -mugly 11 -ex-patriots 11 -moon-like 11 -65,000-a-year 11 -17-9 11 -villani 11 -carloto 11 -30percent 11 -counter-attacked 11 -boller 11 -bolles 11 -warrens 11 -niemi 11 -tusker 11 -scaredy 11 -thirlmere 11 -centerline 11 -non-italian 11 -abberline 11 -kony2012 11 -in-competition 11 -now-disbanded 11 -ktxa 11 -caltagirone 11 -govindji 11 -spodak 11 -rebtel 11 -standfield 11 -tomovic 11 -shanwei 11 -simband 11 -tickler 11 -supercenter 11 -heatedly 11 -proctors 11 -point-shaving 11 -cash-and-stock 11 -screeds 11 -deutschneudorf 11 -remixing 11 -anti-car 11 -anjan 11 -14-story 11 -killa 11 -maralunga 11 -sekhri 11 -220ft 11 -metts 11 -manoeuvrings 11 -quantitatively 11 -2,4 11 -sadri 11 -supply-chain 11 -papillion 11 -catfishing 11 -cletus 11 -kerrianne 11 -jarndyce 11 -raucci 11 -balinese-style 11 -widmouth 11 -jorja 11 -okarocha 11 -zeina 11 -1,076 11 -1,071 11 -1,079 11 -199mph 11 -7:18 11 -junnier 11 -sephardic 11 -dishi 11 -tigue 11 -nopparat 11 -habour 11 -grey-coloured 11 -webbs 11 -tola 11 -confusions 11 -mid-pacific 11 -reformulating 11 -scovilles 11 -five-stone 11 -cota-monroy 11 -sprayable 11 -high-kill 11 -youm 11 -15th-minute 11 -cinching 11 -fingolimod 11 -choquehuanca 11 -www.90min.com 11 -earlimart 11 -flava 11 -celt 11 -celi 11 -advertorial 11 -britto 11 -nelmes 11 -kufra 11 -joselito 11 -gigantes 11 -lunel 11 -25-bed 11 -chabbott 11 -shotkoski 11 -psichiatrico 11 -frenchies 11 -anti-prostitution 11 -frontispiece 11 -hig 11 -criddle 11 -janiya 11 -600ml 11 -600mm 11 -ex-conservative 11 -fing 11 -pokuta 11 -gatecrashes 11 -seven-years 11 -24-18 11 -error-free 11 -10.08 11 -buterbaugh 11 -decinque 11 -mandujano 11 -gulledge 11 -metac 11 -mcgrevey 11 -roman-era 11 -orangina 11 -obstinacy 11 -soft-boiled 11 -adrenaline-fueled 11 -fortea 11 -brandel 11 -prepayment 11 -skamania 11 -joscelyn 11 -papania 11 -beppu 11 -passed-out 11 -glesne 11 -ornamented 11 -dariush 11 -gundry 11 -iheart 11 -wils 11 -55lb 11 -armonk 11 -u.s.pga 11 -kfw 11 -liszewski 11 -gamand 11 -korea-based 11 -portents 11 -kyriacos 11 -1723 11 -butterbeer 11 -1,714 11 -2.5-hour 11 -frensham 11 -rocksmith 11 -funicello 11 -ngobeni 11 -ul-qadri 11 -matlins 11 -ezzedine 11 -quarenghi 11 -capitoline 11 -montaigne 11 -blakkolb 11 -deep-freeze 11 -turncoats 11 -torsion 11 -40-man 11 -ninth-graders 11 -mcgonigal 11 -americanization 11 -delmo 11 -grzonka 11 -vaughan-salter 11 -1,390 11 -ascham 11 -towanda 11 -aguagua 11 -bellambi 11 -shirdi 11 -friedfeld 11 -3inches 11 -0.54 11 -guedes 11 -re-investigating 11 -ozala 11 -dundovic 11 -70-30 11 -quek 11 -cakeshop 11 -morlinghaus 11 -hooser 11 -power-brokers 11 -evangelize 11 -isai 11 -americium 11 -opiate-based 11 -2,345 11 -counterprotests 11 -raelene 11 -valadao 11 -unwholesome 11 -jutton 11 -pidgley 11 -salers 11 -ziba 11 -pattis 11 -wb-57 11 -nathen 11 -doorly 11 -hh-60 11 -kashifa 11 -jakell 11 -body-envy 11 -goldderby.com 11 -orginally 11 -lefeged 11 -cushion-cut 11 -pomodoro 11 -loïc 11 -chd 11 -chalybeate 11 -platero 11 -masr 11 -news-miner 11 -56040 11 -image-obsessed 11 -brisson 11 -karpen 11 -rhind 11 -neads 11 -talanoa 11 -walkthrough 11 -fopp 11 -pollin 11 -katyia 11 -marilinda 11 -makwana 11 -12-course 11 -margolyes 11 -whoopie 11 -trans-continental 11 -rockhurst 11 -teutul 11 -flinty 11 -nappen 11 -stephie 11 -inventoried 11 -ex-celtic 11 -belinte 11 -rwenzori 11 -368,000 11 -337,000 11 -leibold 11 -smallbone 11 -agnes-mariam 11 -so-yeon 11 -billion-strong 11 -eurojust 11 -yvana 11 -rainie 11 -chambord 11 -prokopova 11 -sheeley 11 -kazem 11 -front-burner 11 -an-noor 11 -varli 11 -hamri 11 -igcses 11 -dagnall 11 -fleder 11 -slow-down 11 -sajil 11 -kosenko 11 -premonitions 11 -notts. 11 -scratch-proof 11 -guard/security 11 -mothballing 11 -wonfor 11 -social-climbing 11 -kindergartener 11 -jayden-james 11 -nogent 11 -two-month-long 11 -jaba 11 -worthersee 11 -nemeti 11 -broad-daylight 11 -bad-mouthed 11 -magicicadas 11 -craven-walker 11 -eko 11 -phylum 11 -tailender 11 -minakhmetova 11 -supperclub 11 -andrius 11 -dirtied 11 -ebersole 11 -mudguard 11 -convective 11 -early-evening 11 -track-side 11 -sellitto 11 -bonsey 11 -kgalagadi 11 -271st 11 -wurie 11 -dizzyingly 11 -sachem 11 -inverkeithing 11 -yunupingu 11 -leazer 11 -falconers 11 -virtuosity 11 -twosies 11 -hole-in-the-wall 11 -top-order 11 -l.s. 11 -woolery 11 -medium-haul 11 -ruksana 11 -centrefold 11 -bmn 11 -korps 11 -un-english 11 -verkhovna 11 -43770 11 -35-years 11 -dispensable 11 -kips 11 -legally-held 11 -llandre 11 -woolfenden 11 -insel 11 -macartney 11 -machuea 11 -carsyn 11 -sporborg 11 -muxiang 11 -rubber-coated 11 -skipjack 11 -ns 11 -n8 11 -longenecker 11 -philadephia 11 -boasson 11 -frydenberg 11 -1,417 11 -arab-dominated 11 -prokh 11 -14f 11 -gastonguay 11 -festo 11 -danila 11 -pechora 11 -millisievert 11 -gristedes 11 -stakelin 11 -80,000-seat 11 -gr8 11 -mcmachen 11 -peixoto 11 -cipriana 11 -heid 11 -mollo 11 -lopo 11 -leonids 11 -dissanyake 11 -deinocheirus 11 -69million 11 -raffish 11 -charveron 11 -loram 11 -77.8 11 -zorba 11 -77.6 11 -2006-2010 11 -anaesthetising 11 -ambri 11 -newmie 11 -vasti 11 -70-page 11 -incomprehensibly 11 -cash-poor 11 -vaida 11 -revisionists 11 -tatt 11 -haddenham 11 -khunying 11 -mardikian 11 -tangibly 11 -garold 11 -outsides 11 -serre 11 -tienanmen 11 -320m 11 -gratwick 11 -innerhofer 11 -high-placed 11 -lovey 11 -knocked-out 11 -peugeots 11 -sally-anne 11 -francesc 11 -dramatizing 11 -misapplication 11 -1227 11 -glaceau 11 -achraf 11 -glushakov 11 -forsey 11 -mortadella 11 -brett-pierce 11 -six-weeks 11 -insomniacs 11 -tijuana-based 11 -vojtko 11 -nantais 11 -homsi 11 -covetous 11 -lanhydrock 11 -célèbre 11 -re-boarded 11 -leprae 11 -tbn 11 -tbe 11 -selleneit 11 -hezb-e-islami 11 -knee-ligament 11 -ice-penetrating 11 -compered 11 -calbug 11 -skycall 11 -malalai 11 -warg 11 -portmeirion 11 -saheena 11 -thorbjoern 11 -peculiarity 11 -borochoff 11 -pinedo 11 -masahiro 11 -obstetrical 11 -münchen 11 -outranks 11 -record-holding 11 -spicier 11 -counterbalanced 11 -gaydamak 11 -kvoa-tv 11 -bevacqua 11 -mcmansions 11 -23-member 11 -benion 11 -hissan 11 -laniado 11 -eurobarometer 11 -fci 11 -planemaker 11 -stavoren 11 -tahlequah 11 -sivok 11 -brooklin 11 -robertson-smith 11 -darge 11 -morwell 11 -ndingeko 11 -chaperon 11 -orlu 11 -wibw 11 -394,000 11 -lti 11 -bratten 11 -p2i 11 -kdf 11 -17.30 11 -jarringly 11 -icebound 11 -highest-selling 11 -pawlby 11 -@cnnschools 11 -cley 11 -acloque 11 -katowice 11 -468,000 11 -insect-eating 11 -cayat 11 -kakkad 11 -meldish 11 -match-fitness 11 -water-well 11 -bailer-jones 11 -post-antibiotic 11 -procedurals 11 -gadhafi-era 11 -wedneday 11 -income-related 11 -laux 11 -trochesset 11 -jachles 11 -thoughout 11 -resend 11 -gozleveli 11 -40-years 11 -well-targeted 11 -tree-covered 11 -inner-sydney 11 -triboelectric 11 -by-the-book 11 -59-second 11 -high-viz 11 -furred 11 -buyuksarac 11 -coffin-like 11 -shijun 11 -fannan 11 -lickteig 11 -false-color 11 -zaidan 11 -insouciant 11 -localness 11 -khasal 11 -hynek 11 -berti 11 -100-per-cent 11 -elsemiek 11 -0.72 11 -0.79 11 -jieyu 11 -guffaw 11 -fascinations 11 -warfighters 11 -al-shamrani 11 -platin 11 -disruptor 11 -24cm 11 -engelbart 11 -as-yet-unidentified 11 -under-12 11 -henline 11 -npas 11 -perle 11 -pennarun 11 -350k 11 -belkovsky 11 -three-and-a-half-years 11 -reconsiders 11 -brasco 11 -undocking 11 -kubala 11 -mönchengladbach 11 -glebov 11 -entrepeneur 11 -bengalis 11 -ultra-short 11 -6:58 11 -dramatists 11 -5:57 11 -celyn 11 -billingslea 11 -lynes 11 -three-book 11 -vasquez-hernandez 11 -aeds 11 -speculum 11 -eastender 11 -piano-playing 11 -ana-maria 11 -akili 11 -penitents 11 -kolontar 11 -bilcliff 11 -lacey-bordeaux 11 -michaels-hoder 11 -60814 11 -c.r. 11 -recently-launched 11 -strecker 11 -segeda 11 -168million 11 -kalibo 11 -self-reinforcing 11 -rrl 11 -yellow-and-blue 11 -sokolov 11 -v40 11 -halonen 11 -wollaton 11 -negombo 11 -ilderton 11 -bogar 11 -korolko 11 -kinase 11 -132-year 11 -!!!!!!!! 11 -mcclusky 11 -buttertubs 11 -preuss 11 -emmelie 11 -sonapur 11 -ecolodge 11 -3000bc 11 -air-supported 11 -tripr 11 -ponti 11 -higley 11 -blackwing 11 -birra 11 -seven-carat 11 -fattogram 11 -rodi 11 -gocompare.com 11 -destino 11 -gabbedey 11 -chetna 11 -worts 11 -vwp 11 -code.org 11 -25-stone 11 -spring-fed 11 -blanning 11 -urville 11 -penoyer 11 -yanaha 11 -rockview 11 -fornari 11 -accredits 11 -katanec 11 -viveiros 11 -crescenta 11 -infirmities 11 -15080 11 -editorialized 11 -oguna 11 -tripplehorn 11 -artist-in-residence 11 -grainge 11 -windstar 11 -x12 11 -high-tailed 11 -lazuli 11 -triaud 11 -cable-stayed 11 -caixin 11 -bollack 11 -kirven 11 -portugeezer 11 -zha 11 -thrupp 11 -sharry 11 -35-page 11 -rabo 11 -al-hariri 11 -buckworth 11 -khanaqin 11 -tile-based 11 -afonso 11 -bruguera 11 -repped 11 -cantley 11 -fasullo 11 -kosloff 11 -corsetry 11 -hyannisport 11 -baljinder 11 -musclefood.com 11 -dobrich 11 -barong 11 -sauced 11 -pedv 11 -umatilla 11 -shafique 11 -dna-based 11 -5ft2in 11 -vasiliev 11 -bail-outs 11 -34691 11 -'72 11 -nebulisers 11 -non-serb 11 -2,120 11 -2,125 11 -bedini 11 -10-foot-long 11 -tatang 11 -glass-sided 11 -just-published 11 -terrilynn 11 -747-200 11 -amyx 11 -amangiri 11 -emeka 11 -lee-potter 11 -home-owner 11 -tournon 11 -greenhead 11 -then-west 11 -400ad 11 -pari 11 -canavan-mcclung 11 -impedance 11 -neversink 11 -masseria 11 -three-paneled 11 -summerall 11 -defoliant 11 -15-ton 11 -galletti 11 -showreel 11 -mackley 11 -muffet 11 -damico 11 -sword-fighting 11 -on-point 11 -gosch 11 -judalet 11 -4-11 11 -pembridge 11 -morels 11 -noelene 11 -zettel 11 -sozzani 11 -dhanens 11 -mountainbase 11 -skyhook 11 -tanita 11 -phreaking 11 -gray-swain 11 -raphel 11 -27th-minute 11 -tsukimi 11 -qubeka 11 -nukemap 11 -51-second 11 -groveling 11 -motion-tracking 11 -re-victimized 11 -makeout 11 -seiger 11 -zui 11 -haycock 11 -impermanent 11 -irish-catholic 11 -74.99 11 -phablet-style 11 -apotheosis 11 -wet-look 11 -10.42 11 -mini-tour 11 -hand-blown 11 -pionk 11 -jenelle 11 -ninety-four 11 -leaf-peeping 11 -congruence 11 -powhatan 11 -gardner-serpollet 11 -laughlan 11 -white-ball 11 -derr 11 -20-feet 11 -human-robot 11 -krotov 11 -nembe 11 -gang-ridden 11 -lipka 11 -lancair 11 -5-httlpr 11 -virtusize 11 -eidinger 11 -traffic-choked 11 -zuckerburg 11 -overcomplicated 11 -coat-tails 11 -joongwon 11 -swidler 11 -mcgrandles 11 -orrb 11 -nhbc 11 -rakish 11 -crime-busting 11 -gahn 11 -kadeem 11 -losen 11 -t.w. 11 -7:58 11 -cherry-red 11 -yevtushenkov 11 -reorientation 11 -itkin 11 -bronzers 11 -hadouken 11 -short-notice 11 -dellal 11 -crecelius 11 -in-swinging 11 -villacoublay 11 -llandough 11 -gingivitis 11 -under-qualified 11 -suski 11 -wedinos 11 -highest-achieving 11 -staghounds 11 -96.6 11 -britannic 11 -twitchers 11 -double-takes 11 -ryleigh 11 -mbsu 11 -head-start 11 -kelle 11 -3,000,000 11 -35k 11 -coleiro 11 -pook 11 -qaddafi 11 -electrosensitivity 11 -unlatched 11 -wrobel 11 -afropreneurs 11 -lactose-free 11 -six-to-eight 11 -99.999 11 -troyano 11 -magbee 11 -minorca 11 -lissette 11 -theatreland 11 -half-smile 11 -2.09 11 -chernach 11 -cordwell 11 -outside-half 11 -ranbir 11 -peridot 11 -binhai 11 -narcocorridos 11 -skanska 11 -gchat 11 -frappuccinos 11 -skou 11 -levys 11 -lakindu 11 -122million 11 -cuini 11 -bregazzi 11 -earby 11 -160mg 11 -dawsons 11 -trude 11 -albarran 11 -20-19 11 -duked 11 -dashad 11 -roxburghe 11 -llanfair 11 -ligi 11 -mckewen 11 -viscusi 11 -keziah 11 -dismore 11 -klaxon 11 -electrofishing 11 -whirlwinds 11 -tamil-dominated 11 -carpentier 11 -pupi 11 -mwah 11 -child-pornography 11 -gebremariam 11 -jewellry 11 -17-10 11 -78-foot 11 -flightstats.com 11 -ladybourn 11 -donncha 11 -friendfield 11 -nilda 11 -flavius 11 -rooghlawanay 11 -eye-rolls 11 -mackem 11 -civa 11 -steckmann 11 -messily 11 -carparazzi 11 -vesnin 11 -nuanes 11 -platt-lee 11 -ryback 11 -batlle 11 -506th 11 -sauté 11 -haheim 11 -parwaiz 11 -garley 11 -biofeedback 11 -alo 11 -alr 11 -berdymukhamedov 11 -rtr 11 -barbados-born 11 -playbooks 11 -perich 11 -jinhao 11 -professionalize 11 -cantalamessa 11 -gohir 11 -lynzey 11 -impost 11 -sierralta 11 -adayja 11 -chumocracy 11 -100.3 11 -skytrain 11 -dembie 11 -amandeep 11 -buenes 11 -leko 11 -surie 11 -umber 11 -rudenko 11 -endive 11 -pacaccio 11 -6.8-magnitude 11 -mozeliak 11 -road-safety 11 -1640s 11 -yokoyama 11 -bufano 11 -welterweights 11 -roughton 11 -putz 11 -room-mates 11 -firat 11 -match-fixers 11 -chaouchi 11 -atiyah 11 -kuijer 11 -22-week 11 -branscomb 11 -snowdog 11 -durán 11 -hossegor 11 -supercharging 11 -globs 11 -schlachet 11 -minda 11 -bricket 11 -charnwood 11 -abdications 11 -vnesheconombank 11 -mentorships 11 -hfpa 11 -gerstner 11 -creecy 11 -semi-clothed 11 -paolla 11 -chhayra 11 -colonizers 11 -lumineers 11 -denton-beaumont 11 -carion 11 -albertazzi 11 -badsey 11 -bebington 11 -1594 11 -unser 11 -1,185 11 -fallstreak 11 -avenham 11 -tomasello 11 -leftwich 11 -nba.com 11 -hesitations 11 -castucci 11 -12.04 11 -road-building 11 -chortling 11 -clendenin 11 -outdoes 11 -donnybrook 11 -coningham 11 -4/11 11 -frary 11 -sharlana 11 -watchwords 11 -splott 11 -sopko 11 -reallocating 11 -2009-q3 11 -upshur 11 -make-under 11 -autodata 11 -43-foot 11 -myfoxatlanta 11 -acclimatization 11 -50,000-acre 11 -renaghan 11 -gurgle 11 -50-knot 11 -aher 11 -deified 11 -fiddlers 11 -fonzie 11 -beat-down 11 -stop-go 11 -cybersquatting 11 -microblogger 11 -ex-managing 11 -stifler 11 -kirtland 11 -aonach 11 -defeatism 11 -56th-minute 11 -2550 11 -eze 11 -mingaladon 11 -doon 11 -70891 11 -persily 11 -dfl 11 -shuichi 11 -altfield 11 -bleattler 11 -hairbands 11 -muto 11 -braidwood 11 -maymo 11 -guofeng 11 -over-65 11 -dishwater 11 -redwings 11 -hupp 11 -wsil 11 -20,000-a-month 11 -gilleon 11 -no-indictment 11 -manningham 11 -customer-service 11 -w.a. 11 -cyndee 11 -massachi 11 -pastuszczak 11 -copiously 11 -corbetts 11 -sweetlove 11 -bzp 11 -tuber 11 -keils 11 -portaloo 11 -well-acted 11 -microbiota 11 -steinlauf 11 -chilliwack 11 -golfo 11 -soufriere 11 -sidewalls 11 -calabar 11 -signorelli 11 -zenko 11 -cyclamen 11 -kastrinelis 11 -chilvers 11 -abscam 11 -despairingly 11 -ibrc 11 -cognizance 11 -advincula 11 -carcinomatosis 11 -maltman 11 -shucard 11 -slam-winning 11 -ogunyemi 11 -career-oriented 11 -allatt 11 -dayr 11 -tempos 11 -parented 11 -heavily-populated 11 -lesko 11 -shahbandar 11 -espinoza-perez 11 -mayrhofen 11 -ghantoot 11 -maurin 11 -credico 11 -music-themed 11 -ozan 11 -tauheedul 11 -styputkowska 11 -holes-in-one 11 -al-qirbi 11 -346.5 11 -pinheiro-fernandes 11 -darka 11 -catullo 11 -husaini 11 -morwane 11 -@kp24 11 -1,081 11 -no-fuss 11 -morrick 11 -commercialising 11 -malata 11 -gane 11 -gani 11 -quahog 11 -instagramers 11 -unfeminine 11 -arbuckle 11 -rocklea 11 -sperber 11 -hymnal 11 -kyler 11 -conceives 11 -konnikova 11 -sellar 11 -andro 11 -lablache-combier 11 -clothiers 11 -ds5 11 -whole-of-government 11 -darlin 11 -high-achiever 11 -southridge 11 -four-finger 11 -tendinosis 11 -evertsen-mostert 11 -smirnow 11 -28.50 11 -radoslav 11 -algirdas 11 -korkoya 11 -breath-holding 11 -roecliffe 11 -laramée 11 -cheslyn 11 -slaithwaite 11 -mancias 11 -pinocchios 11 -chinas 11 -mickesh 11 -petchatz 11 -quogue 11 -slow-paced 11 -kelpids 11 -firaxis 11 -purlantov 11 -unscramble 11 -proestos 11 -hanse 11 -larayedh 11 -rolands 11 -eritus 11 -braidford 11 -goheen-rengo 11 -inskeep 11 -next-highest 11 -minister-elect 11 -hhr 11 -near-silence 11 -wells-next-the-sea 11 -herlitz 11 -lezhnev 11 -hate-mongers 11 -tarpley 11 -4.58 11 -4.51 11 -4.52 11 -a-1 11 -pacifico 11 -coys 11 -kormaran 11 -family-related 11 -mcq 11 -plimsoll 11 -kaosam-ang 11 -roxene 11 -forager 11 -dege 11 -1620s 11 -peikar 11 -falder 11 -re-arming 11 -nuttal 11 -mizner 11 -isely 11 -zetz 11 -motion-picture 11 -benicia 11 -transpiring 11 -ld 11 -fro-ing 11 -barcap 11 -fusiform 11 -hata 11 -scribbler 11 -borderland 11 -hookworms 11 -adventuresome 11 -business-to-business 11 -alcaide 11 -breteuil 11 -khoza 11 -55264 11 -knightmare 11 -nigiri 11 -rochedale 11 -kalifa 11 -theismann 11 -semar 11 -sea-faring 11 -zanamivir 11 -haidari 11 -citylink 11 -stachelski 11 -gorongosa 11 -lepatner 11 -182nd 11 -flatform 11 -canfora 11 -ilit 11 -zorrilla 11 -52-second 11 -avdija 11 -lock-out 11 -13-night 11 -81.8 11 -anuradhapura 11 -terrazzo 11 -kapahi 11 -pelligrini 11 -fabinho 11 -requip 11 -carneys 11 -muhajir 11 -u.s.-produced 11 -finondo 11 -hatsune 11 -halkidiki 11 -manco 11 -gascogine 11 -post-white 11 -eley 11 -tomaz 11 -95f 11 -gender-related 11 -standard-examiner 11 -miesha 11 -shafighi 11 -albornoz 11 -enunciation 11 -vso 11 -gofmane 11 -mutley 11 -soay 11 -relabeled 11 -pathum 11 -tusa 11 -pictionary 11 -mitul 11 -eco-house 11 -suddeutsche 11 -compnay 11 -calorie-a-day 11 -merryll 11 -ansett 11 -esbl 11 -panders 11 -salvadorian 11 -rukhsana 11 -time-release 11 -skycaliber 11 -cya 11 -cyd 11 -earth-friendly 11 -bocquet 11 -ta-vuong 11 -urmila 11 -8-hour 11 -insee 11 -stieglitz 11 -bendell 11 -biello 11 -daemon 11 -graumann 11 -pinny 11 -outsmarting 11 -brougham 11 -uwagboe 11 -aidar 11 -140-mile 11 -smudgy 11 -three-yearly 11 -mari-simon 11 -f350 11 -morioka 11 -lacher 11 -stangrecki 11 -12.23 11 -plahares 11 -a-cup 11 -naep 11 -nachminovitch 11 -@tim_hume 11 -funnymen 11 -bini 11 -sebire 11 -cubitat 11 -chollima 11 -sponsons 11 -super-heated 11 -venturer 11 -32mm 11 -mazzoni 11 -48lbs 11 -re-shot 11 -mcmenemy 11 -coniferous 11 -watergate-era 11 -collicutt 11 -supermom 11 -60-foot-wide 11 -besley 11 -891 11 -raggatt 11 -gusman 11 -britain-bound 11 -haroche 11 -weeing 11 -ostapiak 11 -nppf 11 -nonpublic 11 -chinoiserie 11 -excepting 11 -kemeter 11 -bahimi 11 -consolata 11 -mylvaganam 11 -academical 11 -602,000 11 -megliola 11 -bryanboy 11 -cossey 11 -aerophobia 11 -post-dinner 11 -lightning-caused 11 -fifita 11 -xterra 11 -cantrill 11 -lissani 11 -sady 11 -fruetel 11 -irredeemably 11 -boteas 11 -seventh-generation 11 -bentzen 11 -oaie 11 -freethy-swimm 11 -cedb 11 -seven-over-par 11 -hippocampal 11 -wkbw 11 -nickolaus 11 -warpaint 11 -shorthaired 11 -cq 11 -naing 11 -sanfords 11 -35,000-a-week 11 -buckbeak 11 -scrambler 11 -cell-mate 11 -groenewald 11 -dirnt 11 -tymoschuk 11 -bulacan 11 -fifth-biggest 11 -enza 11 -slrs 11 -focke-wulf 11 -genome-wide 11 -employee-owned 11 -ouro 11 -kilah 11 -fayah 11 -quran-burning 11 -vetulicolians 11 -hildene 11 -rodriguez-gonzalez 11 -dealy 11 -torkildson 11 -50.3 11 -roissy 11 -haycroft 11 -reichle 11 -mouratoglu 11 -herstal 11 -then-president-elect 11 -4trillion 11 -talent-spotted 11 -kampung 11 -raëlian 11 -sahakian 11 -brewitt 11 -equina 11 -darma 11 -heartworm 11 -titantic 11 -calman 11 -glinda 11 -esophagitis 11 -supp 11 -supa 11 -dynamited 11 -nobs 11 -titlis 11 -krum 11 -madgin 11 -posadas 11 -aliko 11 -zakher 11 -mutebi 11 -cissé 11 -hyphen 11 -cryptologist 11 -tankulic 11 -sendings 11 -mcgonagall 11 -543,000 11 -isbister 11 -moravia 11 -100-room 11 -childrenâ 11 -reborns 11 -dimitrova 11 -halfens 11 -juret 11 -kiddell 11 -schuldt 11 -bachleda 11 -nordoff 11 -meazza 11 -ivison 11 -31p 11 -manilal 11 -orange-nassau 11 -pahwa 11 -42.50 11 -peco 11 -ankita 11 -krocha 11 -zerhouni 11 -yasha 11 -gift-wrapping 11 -tregilgas-davey 11 -browser-based 11 -feloniously 11 -sharer 11 -sharee 11 -iba 11 -hillcroft 11 -110km/h 11 -nilo 11 -alaikum 11 -3,470 11 -hbcu 11 -ansarullah 11 -photocopiers 11 -xagent 11 -ledoux 11 -ballyhoo 11 -45-50 11 -representational 11 -dostoyevsky 11 -france-2 11 -4.78 11 -wrightsville 11 -nookie 11 -tholley 11 -rosaleen 11 -extended-release 11 -mem 11 -haskel 11 -self-enhancement 11 -rocketdyne 11 -19-strong 11 -yujun 11 -nabel 11 -whiplr 11 -self-diagnosis 11 -nadzeya 11 -henniker 11 -percussionists 11 -divested 11 -semeru 11 -standover 11 -fehintola 11 -evercore 11 -philatelic 11 -ganar 11 -jaymar 11 -suntanned 11 -snowploughs 11 -sonenclar 11 -avola 11 -evaporative 11 -25mg 11 -heavitree 11 -teresina 11 -full-floor 11 -annotate 11 -guandolo 11 -oppresses 11 -idrissa 11 -2,625 11 -jérôme 11 -salties 11 -defibrillation 11 -decemeber 11 -32694 11 -spatulas 11 -arrestable 11 -reentering 11 -tough-looking 11 -antinous 11 -rivaz 11 -three-alarm 11 -2-stroke 11 -brasky 11 -re-wire 11 -978 11 -97m 11 -valsecchi 11 -recherche 11 -mallards 11 -leiria 11 -quadros 11 -tongzhi 11 -stavris 11 -sene 11 -1663 11 -5.9-magnitude 11 -devotions 11 -medak 11 -signally 11 -hiv-prevention 11 -baly 11 -roughneck 11 -shadley 11 -jiggles 11 -worse-case 11 -chanaka 11 -bear-hugged 11 -stiver 11 -artiga 11 -rafle 11 -grandmother-to-be 11 -rodriguez-martinez 11 -brosso 11 -d.c.-area 11 -coly 11 -emerald-green 11 -ihat 11 -blood-boosting 11 -8:47 11 -siete 11 -benatar 11 -neices 11 -mirtazapine 11 -murto 11 -shaz 11 -adenubi 11 -kariongi 11 -stauskas 11 -ufton 11 -revin 11 -1,814 11 -mlambo 11 -callwood 11 -ezpeleta 11 -sideserf 11 -oversimplification 11 -ala'i 11 -dien 11 -57.4 11 -57.2 11 -eight-nation 11 -half-conscious 11 -liaises 11 -rado 11 -radi 11 -yo-yoed 11 -underaged 11 -meka 11 -parafoil 11 -12.48 11 -rokstone 11 -penalver 11 -inus 11 -grovell 11 -scandale 11 -beitunya 11 -denimes 11 -xypolitos 11 -egbert 11 -182million 11 -10-kilowatt 11 -4:56 11 -glenning 11 -saqba 11 -vivacity 11 -neroy 11 -osan 11 -mescher 11 -norby 11 -genelle 11 -asmundson 11 -7:09 11 -shoalts 11 -heelers 11 -1520s 11 -clearfield 11 -high-cut 11 -mcweeny 11 -21lbs 11 -brinovec 11 -nistal 11 -kingston-upon-hull 11 -pecunies 11 -chagtai 11 -shaktarsk 11 -owa 11 -tectonically 11 -harbidge 11 -trew 11 -groundbreaker 11 -preselected 11 -kaikai 11 -bar-hopping 11 -she-cave 11 -mounia 11 -sbeineh 11 -tors 11 -harrison-longmate 11 -markiece 11 -15ft-high 11 -salesroom 11 -arxiv 11 -puffed-up 11 -ildiko 11 -abdulin 11 -encrustation 11 -32973 11 -tinitell 11 -1,289 11 -multi-drug 11 -apcoa 11 -microfibre 11 -ndrrmc 11 -courcheval 11 -farshad 11 -whitelam 11 -cornwallis 11 -killington 11 -high-collared 11 -krupsaw 11 -cornhusker 11 -knottenbelt 11 -jannati 11 -ravikumar 11 -4:52 11 -hellraising 11 -berrington 11 -srd 11 -torbjørn 11 -naushad 11 -delcourt 11 -450km 11 -blahniks 11 -0-100mph 11 -syriana 11 -tidman 11 -gretta 11 -schulder 11 -middlewich 11 -marimba 11 -bromides 11 -enochs 11 -5.03 11 -5.02 11 -cheffins 11 -handcrafting 11 -juxtapositions 11 -eshenbaugh 11 -xerxes 11 -patient.co.uk 11 -diviner 11 -52-48 11 -lambertville 11 -symptons 11 -gaw 11 -hattrick 11 -royal-themed 11 -hypernatremia 11 -re-edit 11 -b15 11 -godawa 11 -12-storey 11 -puniet 11 -eastment 11 -vassiljev 11 -mahlasela 11 -prostratin 11 -wannsee 11 -doncheff 11 -pesaturo 11 -karmon 11 -mobilises 11 -27-man 11 -foret 11 -ngobele 11 -mosers 11 -petrol-fuelled 11 -equateur 11 -leoneans 11 -feistiness 11 -totteridge 11 -desch 11 -knock-downs 11 -hertzfeld 11 -stationmaster 11 -self-immolate 11 -pneumothorax 11 -quick-reaction 11 -verticals 11 -famie 11 -wetangula 11 -coucill 11 -biryani 11 -pre-sales 11 -showstoppers 11 -olingos 11 -dujour 11 -presidential-style 11 -stefanoff 11 -cooladdi 11 -geordan 11 -shepparton 11 -sawo 11 -much-praised 11 -helden 11 -68s 11 -irwell 11 -50mins 11 -dunnett 11 -barnsdale-quean 11 -touromov 11 -eddings 11 -reiber 11 -eloisa 11 -tchimpounga 11 -2001-2003 11 -crunchier 11 -oversexed 11 -minack 11 -khosah 11 -tenancingo 11 -2003-2007 11 -1,623 11 -wanaka 11 -hte 11 -mcgrane 11 -bawa 11 -d'shawn 11 -u23 11 -bourse 11 -delenfer 11 -19,200 11 -standard-size 11 -superconductors 11 -mumdex 11 -6,350 11 -bormio 11 -gentilly 11 -ex-pupils 11 -kurland 11 -backbiting 11 -demoro 11 -villaseñor 11 -sempra 11 -svaneti 11 -bandurski 11 -bombmaking 11 -thaxton 11 -ionita 11 -mikkelson 11 -rufus-isaacs 11 -elloy 11 -yusuke 11 -raulinautis 11 -kalanter 11 -faugere 11 -achor 11 -sensate 11 -minestrone 11 -nagubuzi 11 -affectation 11 -stably 11 -meteo 11 -hape 11 -freeform 11 -vernick 11 -ordaining 11 -78,500 11 -belardo 11 -sarcomas 11 -mulcahay 11 -rufino 11 -nkomo 11 -11-a-side 11 -kawakubo 11 -beefburger 11 -huggan 11 -mercure 11 -non-racist 11 -mcelhinny 11 -vine-covered 11 -siri-like 11 -chasson 11 -mesoamerica 11 -molden 11 -yawer 11 -walsoken 11 -well-thought 11 -linklaters 11 -copper-infused 11 -over-taxed 11 -woronyj 11 -f/2 11 -chumps 11 -half-share 11 -donabedian 11 -menkhaus 11 -0.74 11 -appropriately-named 11 -expedia.co.uk 11 -racheli 11 -40-15 11 -doddery 11 -hutten 11 -3,000-meter 11 -darville 11 -7-14 11 -bellmore 11 -metropoulos 11 -redrado 11 -dooms 11 -bosanek 11 -fraud-related 11 -freeloader 11 -hawkmoths 11 -carothers 11 -plunking 11 -viradouro 11 -7,000-capacity 11 -godkin 11 -bio-ethanol 11 -datena 11 -cyanogen 11 -aljezur 11 -unsparing 11 -18-16 11 -volendam 11 -v.s. 11 -gritti 11 -sukarni 11 -pacuare 11 -gorseinon 11 -chelford 11 -single-lane 11 -putina 11 -pingan 11 -mrj 11 -truffaut 11 -skorjanec 11 -gun-owning 11 -abrsm 11 -agent-in-charge 11 -tirah 11 -53685 11 -woodchuck 11 -leduc 11 -siobhan-marie 11 -2per 11 -torg 11 -1,126 11 -1,122 11 -fms 11 -gsb 11 -marcal 11 -encyclopaedic 11 -asghari 11 -short-handed 11 -matiszak 11 -golembesky 11 -promulgate 11 -mema 11 -redcoats 11 -thurkettle 11 -bettamer 11 -billion-year 11 -jutta 11 -stancombe 11 -urbikaite 11 -toubkal 11 -heydays 11 -zdziarski 11 -sorella 11 -nasseri 11 -gen1 11 -5:11 11 -widebody 11 -28f 11 -argiegrit01 11 -mid-foot 11 -chd8 11 -repeatable 11 -amido 11 -multicolour 11 -claussen 11 -springborg 11 -yiadom-boakye 11 -press-telegram 11 -qiyi 11 -lignum 11 -eyetribe 11 -reatequi 11 -genderless 11 -after-thought 11 -stawell 11 -siringas 11 -whylie 11 -huegills 11 -collierville 11 -hambling 11 -blackfan 11 -grizabella 11 -16-bedroom 11 -marble-floored 11 -giarratana 11 -rines 11 -orientate 11 -carfax 11 -doretta 11 -suartana 11 -augusztinyi 11 -turbinates 11 -zhongrong 11 -man-for-man 11 -zajkowski 11 -sazerac 11 -frediani 11 -asfour 11 -manigault-stallworth 11 -267mph 11 -ramler 11 -polokwane 11 -felsen 11 -maldef 11 -bed-stuy 11 -creosote 11 -stagliano 11 -bequette 11 -europe/africa 11 -baryshnikov 11 -kliuchevskoi 11 -lluis 11 -donnas 11 -i-cable 11 -concidine 11 -framwellgate 11 -tsurenko 11 -zicam 11 -toupees 11 -pocus 11 -33-28 11 -strausfeld 11 -gun-style 11 -multibillionaire 11 -hiom 11 -hackathons 11 -4-h 11 -washaway 11 -hernandez-harrison 11 -samaj 11 -tsaregradskaya 11 -then-state 11 -danzy 11 -hummocks 11 -trenker 11 -#newsnight 11 -mccrone 11 -5.21 11 -5.29 11 -mixbit 11 -neo-maoists 11 -100mg/5ml 11 -plate-sized 11 -zari 11 -kwasniewski 11 -aigner 11 -melanesia 11 -dybowski 11 -bailong 11 -xiaflex 11 -vertesi 11 -fan-owned 11 -382,000 11 -towelling 11 -66201 11 -nenni 11 -lingzhi 11 -stainton 11 -easels 11 -fuk 11 -vulgarities 11 -28-6 11 -forestalling 11 -farriers 11 -maraventano 11 -techboston 11 -anear 11 -45-mph 11 -orzo 11 -diet-busting 11 -spread-eagled 11 -kostyrko 11 -13-16 11 -annasophia 11 -bost 11 -bosi 11 -name-dropped 11 -outmanned 11 -tayo 11 -rajcevic 11 -somerset-based 11 -bugjuggler 11 -möhne 11 -wispa 11 -grid-style 11 -inflammable 11 -akrigg 11 -ayodeji 11 -sanah 11 -statesboro 11 -over-reach 11 -sightsee 11 -langsam 11 -el-shaar 11 -usg 11 -cheik 11 -allfrey 11 -charlie-marie 11 -eww 11 -santonja 11 -graet 11 -sulked 11 -morgado 11 -levington 11 -piacentini 11 -presets 11 -dinero 11 -serim 11 -wilson-ellis 11 -3-1-1 11 -withall 11 -tma-11m 11 -trend-setters 11 -tyshaune 11 -roffer 11 -sabio 11 -girone 11 -endemano 11 -srp 11 -hickie 11 -fast-expanding 11 -ripoff 11 -brotac 11 -#nyfw 11 -glyph 11 -t-zone 11 -rostenkowski 11 -mini-baccarat 11 -reals 11 -stemm 11 -sheering 11 -felshtinsky 11 -am/fm 11 -flache 11 -270-pound 11 -al-wefaq 11 -41611 11 -morphsuits 11 -pertuzumab 11 -al-taifi 11 -rashidi 11 -hinteregger 11 -7digital 11 -9:12 11 -non-avian 11 -polymorphic 11 -bachpan 11 -mandra 11 -koleda 11 -izegbune 11 -linga 11 -stokesley 11 -bollington 11 -cathal 11 -harvell 11 -nswrl 11 -chava 11 -zehra 11 -mandara 11 -whitenicious 11 -telemedicine 11 -tierney-jones 11 -most-shared 11 -swoape 11 -lefthander 11 -blunden 11 -frenzies 11 -melanne 11 -albermarle 11 -norment 11 -tannersville 11 -joshan 11 -stationer 11 -breath-test 11 -maurizo 11 -camellias 11 -aldom 11 -kutno 11 -rbs/natwest 11 -sandy-related 11 -el-qedra 11 -clanking 11 -mochan 11 -galyardt 11 -tombling 11 -cigg-e 11 -fan-favorite 11 -51per 11 -cruelties 11 -notochord 11 -kulcinski 11 -highly-controversial 11 -unlicenced 11 -sclerae 11 -regenstein 11 -royden 11 -hair-free 11 -ship-breaking 11 -khidir 11 -938 11 -huevos 11 -lamour 11 -zanfardino 11 -vix 11 -lewell 11 -bothell 11 -falkenbergs 11 -hellwig 11 -lolli 11 -transmissibility 11 -saldiva 11 -95.3 11 -sirenomelia 11 -thielen 11 -banes 11 -zoltowski 11 -r&f 11 -ad-deen 11 -scholte 11 -susanville 11 -fallatah 11 -carel 11 -20inch 11 -short-back-and-sides 11 -proletarian 11 -puckers 11 -toormore 11 -behel 11 -boavista 11 -small-print 11 -cambuur 11 -foldit 11 -veith 11 -dionatan 11 -ellingwood 11 -despondently 11 -manasseh 11 -margalit 11 -bauby 11 -weerasethakul 11 -bête 11 -elephant-hunting 11 -overburden 11 -undof 11 -jamam 11 -ralitsa 11 -larner 11 -katu-tv 11 -dinsmoor 11 -third-term 11 -oil-rig 11 -performance-wise 11 -4:14 11 -tanika 11 -brinicles 11 -polius-curran 11 -60419 11 -sarikoudis 11 -passerelle 11 -kirsti 11 -wieweck 11 -biller 11 -dualib 11 -gamman 11 -kaeppeler 11 -hees 11 -c.difficile 11 -battice 11 -cojean 11 -curlin 11 -biggies 11 -megchelsen 11 -petterson 11 -hurlbert 11 -al-brittani 11 -8.47 11 -ardleigh 11 -westenhanger 11 -jaelen 11 -jigaboo 11 -59.2 11 -abor 11 -hilger 11 -sonso 11 -cigarette-style 11 -handspring 11 -altagracia 11 -usenet 11 -zhitomirskiy 11 -kowa 11 -wellham 11 -rémy 11 -kharja 11 -27p 11 -taarnby 11 -gambale 11 -beardshaw 11 -hadeel 11 -jero 11 -gendercide 11 -wptv.com 11 -balluga 11 -rotich 11 -husfelt 11 -103.5 11 -ermin 11 -merivale 11 -crapshoot 11 -20-foot-wide 11 -1:59 11 -1:51 11 -gawain 11 -mudgee 11 -chauke 11 -ostracod 11 -18-0 11 -18-4 11 -draftsman 11 -cve 11 -737-300s 11 -umoja 11 -clubmates 11 -ossur 11 -lezion 11 -tae-hwi 11 -desanto 11 -moeldoko 11 -multistep 11 -diddley 11 -baits 11 -gittings 11 -yenni 11 -teagin 11 -weblog 11 -2mm-thick 11 -zukas 11 -lujiazui 11 -euphemistic 11 -inda 11 -angeles-class 11 -rifc 11 -schäuble 11 -boyum 11 -epsteen 11 -zaps 11 -bactrian 11 -probo 11 -atcv-1 11 -elnett 11 -panipat 11 -arkansas-based 11 -defensive-minded 11 -chye 11 -thorung 11 -deports 11 -sundahl 11 -735,000 11 -salesmanship 11 -liyana 11 -lyda 11 -mikkilineni 11 -mahrus 11 -misplacement 11 -petaid 11 -intial 11 -notonegoro 11 -exorbitantly 11 -pipe-smoking 11 -1431 11 -better-educated 11 -56.3 11 -postsecret 11 -nohl 11 -ill-intentioned 11 -koepke 11 -firstbrook 11 -football-crazed 11 -leperruque 11 -mega-projects 11 -tacticians 11 -sedef 11 -statelet 11 -cathedral-like 11 -self-assemble 11 -informa 11 -tuvia 11 -rosalia 11 -interfacing 11 -relit 11 -kahneman 11 -pdfs 11 -double-standards 11 -5.97 11 -6872 11 -wplg-tv 11 -special-forces 11 -zderad 11 -gilgit-baltistan 11 -54-second 11 -kirchmaier 11 -h5 11 -ancic 11 -vice-chairwoman 11 -1294 11 -nexplanon 11 -13-member 11 -moga 11 -hoffmans 11 -unstuffy 11 -whorehouse 11 -white-walled 11 -chajnantor 11 -benjani 11 -sorok 11 -klooff 11 -___ 11 -subiaco 11 -colposcopy 11 -32-mile 11 -robbinsdale 11 -berre 11 -cherylee 11 -blacknose 11 -mierzejewski 11 -house-backed 11 -expletive-laced 11 -1,800-mile 11 -dubailand 11 -elbit 11 -sez 11 -peniel 11 -embargoed 11 -observatoire 11 -jeong-hyeop 11 -stxvlbfx 11 -cnnu 11 -circumcising 11 -lunney 11 -rastamouse 11 -funkier 11 -pronged 11 -battle-weary 11 -unbutton 11 -enthronement 11 -mccaleb 11 -cameronian 11 -beloff 11 -willliams 11 -seppia 11 -velasquez-ramirez 11 -oft-cited 11 -chonghua 11 -kargbo 11 -zambezia 11 -wife-swapping 11 -appreciatively 11 -9:36 11 -9:37 11 -nanoscience 11 -2nite 11 -mckeith 11 -dutnall 11 -50-litre 11 -bahgat 11 -cross-training 11 -deportable 11 -watermen 11 -abeledo 11 -tiena 11 -sabmiller 11 -nacreous 11 -schifrin 11 -maer 11 -urinalysis 11 -elmiger 11 -harken 11 -geldman 11 -countersuing 11 -pink-and-white 11 -kon 11 -baryon 11 -cleckley 11 -goyard 11 -salsberg 11 -g-paws 11 -estradiol 11 -andre-browning 11 -zelle 11 -zella 11 -32cm 11 -perepilichnyy 11 -tampa-area 11 -wow-factor 11 -upmann 11 -2:22 11 -fazlalizadeh 11 -urethane 11 -nafisa 11 -satchell 11 -backslide 11 -crecente 11 -gurgled 11 -not-so-cool 11 -self-development 11 -#foreverfaster 11 -middens 11 -fbs 11 -catalyzing 11 -cwgc 11 -peya 11 -rosa-soares 11 -shelterboxes 11 -aulnay 11 -afrikaans-language 11 -bisutti 11 -upcycle 11 -3,117 11 -schlierenzauer 11 -matfen 11 -agbogbloshie 11 -denzinger 11 -far-north 11 -exultation 11 -o'gallagher 11 -precambrian 11 -2005-2008 11 -cosner 11 -dunlavey 11 -blacksell 11 -raleys 11 -eight-ball 11 -hej 11 -japaridze 11 -churyumov-gerasimenko 11 -sensation-seeking 11 -1337 11 -spanish-based 11 -keying 11 -bance 11 -1,131 11 -leazes 11 -ireton 11 -mvs 11 -teether 11 -teaforthree 11 -reinterpreting 11 -nymag.com 11 -kdvr-tv 11 -damasio 11 -fangirls 11 -symon 11 -niehaus 11 -ninth-round 11 -fast-fashion 11 -1,167 11 -ifrc 11 -endometrium 11 -veiling 11 -focha 11 -roman-style 11 -willougby 11 -20-km 11 -calaway 11 -4,409 11 -mokwami 11 -mcstuffins 11 -co-write 11 -strathspey 11 -4:31 11 -bucketful 11 -food-lovers 11 -daniza 11 -episcopou 11 -heelwork 11 -6.34 11 -6.37 11 -croll 11 -fadaghi 11 -1353 11 -walesonline 11 -shearson 11 -remastering 11 -rajai 11 -rochina 11 -drivetime 11 -munish 11 -fishell 11 -ah-64d 11 -sione 11 -tutting 11 -overemphasize 11 -9.22 11 -cilento 11 -dresch 11 -cusimano 11 -wealthinsight 11 -micheli 11 -kare11 11 -103-degree 11 -salekhard 11 -marwell 11 -9mph 11 -teleconferencing 11 -hasna 11 -abie 11 -bopped 11 -aseptic 11 -2,290 11 -2,299 11 -eda 11 -tatarusanu 11 -esmond 11 -interchanged 11 -bouilhaguet 11 -693 11 -kettlebells 11 -rt.com 11 -tatin 11 -aaronovitch 11 -melodious 11 -dashcams 11 -otim 11 -brain-to-brain 11 -vissa 11 -turbotax 11 -raisheem 11 -drip-feed 11 -novitzky 11 -chickasaw 11 -belligerently 11 -fizzes 11 -meekins 11 -btv 11 -minnewanka 11 -btg 11 -thrillist 11 -spell-check 11 -41c 11 -41m 11 -60.6 11 -shareen 11 -ballengée 11 -french-designed 11 -zab 11 -zar 11 -bryon-edmond 11 -sofía 11 -schwinge 11 -france-press 11 -ziada 11 -half-up 11 -28-1 11 -leckhampton 11 -ouzo 11 -hair-dryer 11 -radiowaves 11 -mebazaa 11 -ridd 11 -demoing 11 -aniseed 11 -99.6 11 -p45s 11 -orderliness 11 -humanising 11 -unworldly 11 -crunchies 11 -171.5 11 -hiroo 11 -zaslow 11 -zaida 11 -wisborough 11 -no-compromise 11 -marj 11 -soler-espinosa 11 -nonissue 11 -klohr 11 -non-statutory 11 -rigaut 11 -blandina 11 -scrase 11 -hyponatremia 11 -2007-11 11 -valdivieso 11 -lauzon 11 -carbon-dating 11 -zoet 11 -regally 11 -540k 11 -teynham 11 -noontime 11 -suprisingly 11 -larrinaga 11 -yamashita 11 -middle-schoolers 11 -curvacious 11 -explorative 11 -2007-2012 11 -ultra-maoist 11 -apax 11 -niemann 11 -43st 11 -119-109 11 -cortés 11 -chalkboards 11 -fredman 11 -mcbryan 11 -genscher 11 -vinals 11 -stripteases 11 -subagency 11 -birney 11 -near-surface 11 -ex-journalist 11 -layed 11 -jega 11 -pinkett-smith 11 -uña 11 -marias 11 -3,000-a-month 11 -dhelcy 11 -one-thousandth 11 -pre-olympic 11 -mollified 11 -insurance.com 11 -ultra-luxurious 11 -yaets 11 -al-andalus 11 -dosomething.org 11 -hopps 11 -lowit 11 -second-born 11 -pittenger 11 -24,600 11 -tailings 11 -70th-minute 11 -double-agent 11 -umps 11 -side-lined 11 -multi-ton 11 -sentch 11 -220-acre 11 -inverurie 11 -levent 11 -agenda-driven 11 -trekkie 11 -ninjutsu 11 -k-state 11 -assisted-suicide 11 -d'errico 11 -un-brokered 11 -fanchini 11 -balamory 11 -ogburn 11 -multifamily 11 -u-form 11 -manful 11 -front-wing 11 -tenochtitlan 11 -neocam 11 -emerson-thomas 11 -morven 11 -eaglets 11 -no8 11 -no9 11 -scythes 11 -nog 11 -red-green 11 -undershorts 11 -myfoxny.com 11 -autoclave 11 -slimmeria 11 -veloz 11 -roll-down 11 -backswing 11 -kristall 11 -40-meter 11 -mufleh 11 -re-accommodated 11 -livesay 11 -by-word 11 -popslate 11 -mickie 11 -whyatt 11 -7-minute 11 -rivoli 11 -88.8 11 -88.1 11 -aqueveque 11 -amanita 11 -tennis-playing 11 -severability 11 -nonsmoking 11 -ilovaisk 11 -fomac 11 -oreb 11 -states-led 11 -jemaine 11 -shirakawa 11 -lowest-scoring 11 -overflew 11 -2-degree 11 -nau 11 -salesforce 11 -piscine 11 -jasinski 11 -perkier 11 -hellishly 11 -1,771 11 -withnall 11 -baiseitov 11 -jaaskinen 11 -gettler 11 -48billion 11 -doland 11 -abstracts 11 -barrafina 11 -a310 11 -stanlow 11 -wanzeler 11 -2:09 11 -2:06 11 -grabill 11 -undresses 11 -weirded 11 -re-running 11 -sinno 11 -chicxulub 11 -arclight 11 -stacia 11 -konoski 11 -potter-themed 11 -sefranka 11 -grippy 11 -pershmerga 11 -1966-76 11 -non-serbs 11 -lepelley 11 -hothi 11 -pleasted 11 -16b 11 -genalguacil 11 -pxe 11 -stokoe 11 -scowen 11 -54637 11 -manya 11 -23-16 11 -nget 11 -lehtinen 11 -a-side 11 -pre-manifesto 11 -modulation 11 -debussy 11 -fais 11 -reitan 11 -zohan 11 -neknominated 11 -theodoracopulos 11 -pasar 11 -4.89 11 -norridge 11 -synthesizing 11 -poudel 11 -katko 11 -mhp 11 -desmangles 11 -cheekiest 11 -matchbox-sized 11 -saines 11 -anti-second 11 -shuhel 11 -neasham 11 -liberalising 11 -fiske-harrison 11 -asa'ib 11 -1,899 11 -supercarrier 11 -5.9-inch 11 -tosa 11 -crouzon 11 -naimi 11 -nature-inspired 11 -recently-opened 11 -luray 11 -impaneled 11 -yisroel 11 -fou 11 -43.50 11 -re-purpose 11 -????? 11 -vortexes 11 -kalikow 11 -intimidatingly 11 -melchiorri 11 -mothers-in-law 11 -pareja 11 -scram 11 -pittard 11 -kadikoy 11 -gocatch 11 -http://nbcmiami.com 11 -sanllehi 11 -hokies 11 -south-north 11 -grinches 11 -rupf 11 -mortification 11 -coffin-sized 11 -garanti 11 -jannine 11 -baleiwai 11 -pictou 11 -50,000-capacity 11 -tedd 11 -teds 11 -desensitizing 11 -hard-sided 11 -fifth-best 11 -iselin 11 -catchings 11 -chowdhry 11 -hong-kong 11 -0.4-inches 11 -enersen 11 -8.04 11 -annaliese 11 -ben-nejma 11 -fobb 11 -pre-schooler 11 -year-by-year 11 -primary-age 11 -resumé 11 -footings 11 -1560 11 -neediness 11 -belgian-moroccan 11 -#occupywallstreet 11 -blackmoor 11 -air-cooled 11 -oskarshamn 11 -slewed 11 -man-sized 11 -303,000 11 -joong-jin 11 -baseel 11 -gevinson 11 -63925 11 -vojislav 11 -cotman 11 -demoff 11 -mayam 11 -acabbo 11 -10-months 11 -wladow 11 -mitani 11 -irinn 11 -nakivale 11 -remotely-controlled 11 -langlands 11 -eu-imf 11 -rajapakse 11 -vantz 11 -v-formation 11 -91.6 11 -91.7 11 -gaidamachuk 11 -snozone 11 -33,000-a-year 11 -tra 11 -trw 11 -reysol 11 -futher 11 -meatiest 11 -speu 11 -syrad 11 -saidu 11 -cruncher 11 -bill-signing 11 -chinese-speaking 11 -badajoz 11 -maxtone-graham 11 -co-judge 11 -thipthorpe 11 -a-grades 11 -libeled 11 -kustodiev 11 -sukur 11 -gaffey 11 -29billion 11 -intermodal 11 -tadley 11 -gunvor 11 -caking 11 -olujosun 11 -sharpeville 11 -chelicerates 11 -kohat 11 -phanom 11 -headsmart 11 -interchanging 11 -chunlin 11 -capris 11 -hohenschönhausen 11 -willebrand 11 -potemkin 11 -non-catholic 11 -1,043 11 -gas-electric 11 -disko 11 -54.4 11 -begonia 11 -bester 11 -hensby 11 -trews 11 -raskar 11 -gladesville 11 -centreville 11 -muszynski 11 -hueypoxtla 11 -alphington 11 -nameberry 11 -sumburgh 11 -anabella 11 -50th-minute 11 -duprevil 11 -2wd 11 -436,000 11 -roames 11 -phelisanong 11 -zelinske 11 -beseiged 11 -al-khor 11 -calestous 11 -kutsher 11 -molestations 11 -deakins 11 -stradishall 11 -variola 11 -cosmopolitanism 11 -70-odd 11 -scandanavia 11 -obverse 11 -congregates 11 -pintada 11 -tarragon 11 -n.j 11 -fontcuberta 11 -pedagogy 11 -ehec 11 -eredivise 11 -pyongan 11 -outruns 11 -novitskiy 11 -cybercaliphate 11 -parlophone 11 -citizen-times 11 -aguinaldo 11 -rainbow-hued 11 -maegan 11 -eliaqium 11 -molseed 11 -yulee 11 -naiveté 11 -mcelhill 11 -lanzas 11 -122mph 11 -standpipes 11 -rowde 11 -mini-museum 11 -cross-dressed 11 -torchio 11 -brockhouse 11 -skyrise 11 -technologically-advanced 11 -reloads 11 -200-seat 11 -aphorism 11 -dognappers 11 -eyedrops 11 -kisook 11 -25-cent 11 -kwtx 11 -first-minute 11 -castmembers 11 -bioreactors 11 -shambrey 11 -salie 11 -pruhealth 11 -fertilising 11 -ragin 11 -30-point 11 -joselin 11 -mabhida 11 -85.8 11 -murk 11 -1975-1979 11 -shila 11 -anti-insurgent 11 -narrowness 11 -al-khalifah 11 -ellizeah 11 -warmed-up 11 -schwarzman 11 -cotner 11 -lyrica 11 -amartey 11 -superhighways 11 -23,300 11 -cobarubies 11 -backshall 11 -baxley 11 -17-week 11 -ge222 11 -inhalant 11 -green-screen 11 -slaight 11 -cockrel 11 -udm 11 -mccuiston 11 -i.s. 11 -pronovost 11 -2,680 11 -susitna 11 -wenke 11 -top-paid 11 -radiosurgery 11 -short-beaked 11 -stratfield 11 -kistner 11 -bejar 11 -bejan 11 -dragonhead 11 -bessler 11 -panhandlers 11 -farmaner 11 -libertarian-minded 11 -shoko 11 -ponthir 11 -coast-stores 11 -ahlu 11 -40-somethings 11 -bojinov 11 -zeitels 11 -nebelung 11 -ferric 11 -lupak 11 -petcare 11 -mirje 11 -hardcastle 11 -shellis 11 -64-63 11 -advani 11 -cradle-to-grave 11 -scipio 11 -67280 11 -a458 11 -gollan 11 -jakym 11 -gresh 11 -gress 11 -barnardos 11 -giveforward 11 -barua 11 -quilligan 11 -alloway 11 -hymen 11 -vga 11 -honi 11 -sanaa-based 11 -fsl 11 -yaacoub 11 -humorists 11 -enviromission 11 -ali-walsh 11 -hauerslev 11 -off-breaks 11 -coring 11 -waynesburg 11 -administrate 11 -pattni 11 -nucleotides 11 -fdj.fr 11 -workcentre 11 -ditcheat 11 -off-script 11 -73.1 11 -73.9 11 -mckiddie 11 -stiches 11 -beitz 11 -disney-pixar 11 -56,000-capacity 11 -dieteman 11 -namatjira 11 -menagh 11 -31-mile 11 -i-400 11 -gadkari 11 -kenadee 11 -newsmaker 11 -dimmitt 11 -sorthia 11 -charanjeet 11 -56-day 11 -karaloukas 11 -weigelt 11 -rodríguez-vila 11 -ergot 11 -theodis 11 -pot-shot 11 -wardenclyffe 11 -chip-in 11 -paynesville 11 -scalability 11 -out-voted 11 -eighty-nine 11 -abess 11 -22-room 11 -meen 11 -labros 11 -birdland 11 -ptolemaic 11 -demaurice 11 -fcuk 11 -straggling 11 -police-issued 11 -chaffinches 11 -sheach 11 -rosati 11 -ayeshea 11 -20f 11 -billon 11 -34,500 11 -spyglass 11 -preheated 11 -lythronax 11 -miltary 11 -tomer 11 -donech 11 -1315 11 -semitrailers 11 -2-ranked 11 -2066 11 -roboworld 11 -pehl 11 -fuerza 11 -vampire-like 11 -nanocellulose 11 -weckwerth 11 -trena 11 -ilyasah 11 -thyssen-bornemisza 11 -sedille 11 -unadoptable 11 -shd 11 -hackable 11 -mangers 11 -drm-free 11 -coplen 11 -gfz 11 -fine-tooth 11 -rymell 11 -foodbabe.com 11 -rainger 11 -jamen 11 -yelchin 11 -georgiev 11 -hornberger 11 -zinzan 11 -regime-held 11 -heacock 11 -papafilippou 11 -hiroaki 11 -bolduc 11 -mueang 11 -wazzock 11 -seppelt 11 -kemlin 11 -lithman 11 -semray 11 -navin 11 -explainable 11 -490ft 11 -steabler 11 -1:33 11 -recasts 11 -varietals 11 -noncritical 11 -bydureon 11 -350-acre 11 -xianmei 11 -funnies 11 -winberg 11 -12-bed 11 -fejes 11 -clay-like 11 -amilcar 11 -hajisa 11 -al-ja 11 -metoclopramide 11 -dredges 11 -anti-semetic 11 -manzoni 11 -media-friendly 11 -fishfinger 11 -michon 11 -anti-capitalists 11 -fusco 11 -1,463 11 -kadiabioko 11 -balke 11 -puchalska 11 -haft-e-tir 11 -overbalanced 11 -tokushige 11 -coveritlive 11 -treasuring 11 -abdul-amir 11 -eyestrain 11 -peninsulas 11 -a-changing 11 -unhooking 11 -naumkin 11 -yeongam 11 -staffroom 11 -abdulmuttalab 11 -drea 11 -xui 11 -drager 11 -moneycorp 11 -www 11 -aracataca 11 -heisley 11 -who-tv 11 -82p 11 -824 11 -torley 11 -oxfords 11 -teitoi 11 -vitalis 11 -f*cking 11 -grosz 11 -minegishi 11 -tax-friendly 11 -randheli 11 -chaibou 11 -everdene 11 -fully-working 11 -stucco-fronted 11 -see/experience 11 -ogunrinde 11 -bow-ties 11 -dieumerci 11 -anhang 11 -icij 11 -joycarpet 11 -al-jouz 11 -dumpsite 11 -cesaris 11 -overtone 11 -kl-vs 11 -now-viral 11 -prenups 11 -castay 11 -german-led 11 -henie 11 -tear-filled 11 -saracini 11 -goals-against 11 -1271 11 -bialiatski 11 -ciofi 11 -obama-style 11 -riduan 11 -ultra-green 11 -back-tracking 11 -diamond-walker 11 -sozzled 11 -kalavrvta 11 -fellatio 11 -21-minute 11 -government-related 11 -okoya 11 -temar 11 -blasphemer 11 -watson-munro 11 -goair 11 -al-jutaili 11 -hofesh 11 -borsuk 11 -changaris 11 -uruapan 11 -hickam 11 -stoli 11 -kawasme 11 -ivc 11 -keisuki 11 -brossart 11 -sliz 11 -phyllida 11 -cooppens 11 -argyria 11 -chateaus 11 -belal 11 -whitlow 11 -purkins 11 -impinged 11 -precuneus 11 -opcapita 11 -jambart 11 -cheetah-cub 11 -work-from-home 11 -wt 11 -frunder 11 -76998 11 -croaks 11 -e-numbers 11 -kelesova 11 -roundtree 11 -cananea 11 -disbursing 11 -leptomeningeal 11 -hero4 11 -bathhouses 11 -nabih 11 -mendhar 11 -malki 11 -uglie 11 -moranbong 11 -28-bedroom 11 -bolash 11 -stegman 11 -al-mulla 11 -repudiates 11 -soooooo 11 -al-ahli 11 -kfox14 11 -marlies 11 -61.3 11 -tuberose 11 -funnel-shaped 11 -buccleuch 11 -al-dāghistāni 11 -willl 11 -wille 11 -bahloul 11 -berridge 11 -erfoud 11 -nonresident 11 -bont 11 -hayfield 11 -kyriakos 11 -scandanavian 11 -byaruhanga 11 -augments 11 -36cm 11 -talb 11 -tesori 11 -1736 11 -twerp 11 -emili 11 -carthaginian 11 -aydar 11 -jaret 11 -marinol 11 -1619 11 -shoe-shaped 11 -2:41 11 -preston-werner 11 -kingfish 11 -xuelin 11 -power-assisted 11 -belgium-born 11 -ajdar 11 -105mph 11 -instructables 11 -archosaurs 11 -shamika 11 -miscalculating 11 -stacee 11 -tskj 11 -al-zindani 11 -ever-higher 11 -farve 11 -joique 11 -piave 11 -jigs 11 -ferron 11 -shafqat 11 -sellotaped 11 -crossbred 11 -longlevens 11 -phileas 11 -ozama 11 -22-years 11 -cordsen 11 -hatvany 11 -freedive 11 -yesterdays 11 -heyes 11 -lariah 11 -deceives 11 -divination 11 -demilitarizing 11 -haeflinger 11 -jhelum 11 -hols 11 -hox 11 -rummages 11 -ciolos 11 -pickling 11 -sison 11 -hindustani 11 -1,123 11 -pozzuoli 11 -6-ounce 11 -kalinoski 11 -uveitis 11 -kashin 11 -nbc7 11 -chamblin 11 -bourgogne 11 -vocalisation 11 -re-cut 11 -aribert 11 -amando 11 -bhushan 11 -jackfield 11 -gold-tooled 11 -ugine 11 -ma60 11 -trisler 11 -56077 11 -wsyx 11 -lortab 11 -buco 11 -113th-minute 11 -fitzy 11 -schrani 11 -wheel-y 11 -laubhan 11 -yanmar 11 -protoplanets 11 -230-pound 11 -perspire 11 -2-litre 11 -figueiras 11 -scott-lee 11 -udaltsov 11 -tandon 11 -brodgar 11 -cheesiest 11 -withernsea 11 -zurlo 11 -paradises 11 -sifu 11 -beti 11 -desultory 11 -ensheathing 11 -quiffed 11 -press-herald 11 -kamdesh 11 -89.8 11 -breatharian 11 -bolingbroke 11 -pavers 11 -reattaching 11 -bonifacio 11 -al-wakrah 11 -ribbentrop 11 -scroguard 11 -good-will 11 -eyeworks 11 -vizio 11 -70st 11 -harmonized 11 -tri-county 11 -schwarm 11 -hardtalk 11 -ngts 11 -incandela 11 -durmaz 11 -angeleno 11 -voting-age 11 -lauffer 11 -fakhara 11 -mcdermitt 11 -tripadvisor.com 11 -gaykamangu 11 -moini 11 -44lbs 11 -highcross 11 -motorcare 11 -epilator 11 -indodrill 11 -ravilious 11 -fardelin 11 -expound 11 -lancastrians 11 -hour-by-hour 11 -uruguyan 11 -walvis 11 -164th 11 -agression 11 -duplantis 11 -tvr 11 -tvi 11 -touchi-peters 11 -gaudium 11 -time-zone 11 -gerchick 11 -wafd 11 -atheistic 11 -five-ton 11 -buerger 11 -baillargeon 11 -ii-birkenau 11 -empathising 11 -13mph 11 -kolber 11 -oruzgan 11 -sniffy 11 -ifco 11 -kiddi 11 -weetjens 11 -delahoy 11 -youporn 11 -tarlap 11 -scrubber 11 -mabley 11 -najm 11 -sexter 11 -kxas 11 -sakhpal 11 -andreone 11 -26-year-olds 11 -m1a1 11 -memnon 11 -35-strong 11 -324million 11 -qb2 11 -whdh-tv 11 -stoet 11 -wodonga 11 -taliban-affiliated 11 -genx 11 -heho 11 -1265 11 -lizasuain 11 -kythera 11 -millibars 11 -swiss-italian 11 -ashfeldt 11 -bhajis 11 -spacefaring 11 -nightshift 11 -highly-fancied 11 -lapper 11 -cavanda 11 -g.w. 11 -frenkiel 11 -eight-car 11 -ticagrelor 11 -benat 11 -crickhowell 11 -8per 11 -gemar 11 -xuzhou 11 -jeong-ho 11 -dc3 11 -justicia 11 -capetown 11 -teletext 11 -al-aytan 11 -dahuk 11 -bowman-cryer 11 -sbinet 11 -muzquiz 11 -maranatha 11 -burruto 11 -courier-post 11 -mooi 11 -myrosinase 11 -mötley 11 -anti-constitutional 11 -wrigleyville 11 -neveah 11 -562,000 11 -rebe 11 -southhampton 11 -hongbo 11 -erspamer 11 -bulaoro 11 -sarubbi 11 -kaun 11 -leonce 11 -california-san 11 -smp 11 -dinyal 11 -yandell 11 -gismondi 11 -43,875 11 -@triplej 11 -reponsible 11 -upper-stage 11 -cordiality 11 -165lb 11 -demesa 11 -richards-hill 11 -bujega 11 -katainen 11 -schuurman 11 -briceno 11 -6,360 11 -shytles 11 -zits 11 -10.17 11 -hellp 11 -itime 11 -sobaihi 11 -fekir 11 -bira 11 -deejay 11 -keyme 11 -ben-hur 11 -talica 11 -democratising 11 -porkchop 11 -havlicek 11 -1982-83 11 -laffey 11 -spinsters 11 -kerrang 11 -movie-like 11 -slavery-like 11 -neo-liberal 11 -104billion 11 -ghillies 11 -asllani 11 -allemagne 11 -gha 11 -single-engined 11 -elsayed 11 -broiler 11 -undermanned 11 -kulunga 11 -clubhouses 11 -topco 11 -noska 11 -nhat 11 -homosassa 11 -asaduzzaman 11 -jurien 11 -grandfield 11 -cravener 11 -ebbinghaus 11 -konterman 11 -#blackbrunchnyc 11 -9:04 11 -vunisa 11 -jagerbomb 11 -albertsons 11 -stagnates 11 -honeycomb-like 11 -vezo 11 -80k 11 -naccache 11 -rudnevs 11 -nbcla 11 -mcglade 11 -stagnone 11 -tiddler 11 -mikal 11 -livvy 11 -scoutmasters 11 -comet-like 11 -usurper 11 -cente 11 -asheboro 11 -equestria 11 -athenians 11 -shambling 11 -wildernesses 11 -koco.com 11 -nehalem 11 -mcinery 11 -coquillette 11 -nakib 11 -nympheas 11 -73kg 11 -parnia 11 -3min 11 -org.uk 11 -mayger 11 -0.49 11 -czechoslovak 11 -lalitpur 11 -keratoconus 11 -ninety-eight 11 -absentmindedly 11 -koban 11 -eggbuckland 11 -ondimba 11 -34-acre 11 -presa 11 -zinni 11 -arbabzadeh 11 -karoto 11 -commiserating 11 -laidley 11 -hymas 11 -pennsylvania-born 11 -skydeck 11 -norwich-based 11 -nbpa 11 -swallowers 11 -scotten 11 -20-29 11 -20-23 11 -einsatzgruppen 11 -government-affiliated 11 -sachse 11 -liberté 11 -113m 11 -foot-washing 11 -disney-inspired 11 -macmichael 11 -augustinian 11 -humidities 11 -esri 11 -geoamey 11 -m.k. 11 -rainton 11 -dalmore 11 -ciu 11 -caustically 11 -roope 11 -canisius 11 -vv 11 -kooijman 11 -leashed 11 -prebiotic 11 -froyo 11 -mckenna-doyle 11 -rotela 11 -guttenfelder 11 -0871 11 -5700 11 -willburn 11 -huntingdon-whiteley 11 -alirocumab 11 -marilou 11 -63per 11 -night-out 11 -4,650 11 -vacek 11 -ahmida 11 -pop-rock 11 -horvathova 11 -norham 11 -mixner 11 -groden 11 -maraachlis 11 -petev 11 -mcswain 11 -metal-on-metal 11 -sudocrem 11 -chiheb 11 -re-joins 11 -pro-tibet 11 -internationalized 11 -sundquist 11 -schreuder 11 -504b 11 -grapevines 11 -stillwell-cox 11 -mcwraps 11 -noncombatant 11 -film-goers 11 -roseline 11 -13-1 11 -barbarossa 11 -uneconomic 11 -chariman 11 -diffident 11 -asthana 11 -slazenger 11 -limpias 11 -proscription 11 -self-sacrificing 11 -al-deif 11 -steventon 11 -55-page 11 -yakutian 11 -rolotti 11 -exbury 11 -2000gt 11 -hanabusa 11 -thornier 11 -greenfly 11 -polarbear 11 -yeongpyeong 11 -irshad 11 -fergison 11 -tusting 11 -coucher 11 -firby 11 -advertized 11 -ringrose 11 -kingsgate 11 -ennui 11 -newsround 11 -carnival-like 11 -soderberg 11 -pre-conceived 11 -outclassing 11 -survivorship 11 -t-38 11 -31278 11 -extractive 11 -improvises 11 -ashbury 11 -pottage 11 -masie 11 -nevski 11 -scholas 11 -dykins 11 -vaccine-related 11 -spooled 11 -sun-splashed 11 -derbidge 11 -wtva 11 -wtvt 11 -monopod 11 -mystics 11 -wangfujing 11 -infill 11 -xuanwu 11 -chwedczuk 11 -liberton 11 -musicorps 11 -hedinger 11 -kumana 11 -high-wind 11 -time-scale 11 -castlevania 11 -understudies 11 -ubhi 11 -500bhp 11 -step-sisters 11 -lalmohan 11 -showerheads 11 -mullioned 11 -gold-painted 11 -dfsp 11 -babywear 11 -breal 11 -ciancio 11 -39mph 11 -ghoochannejad 11 -achaemenid 11 -greyjoy 11 -once-beloved 11 -sardinas 11 -foramen 11 -1016 11 -755,000 11 -1,427 11 -1,424 11 -overholt 11 -iraklise 11 -tillery 11 -iodized 11 -gosain 11 -128million 11 -72.7 11 -47per 11 -raunchier 11 -gelt 11 -argentine-born 11 -lancashire-based 11 -evane 11 -54.8 11 -resegregation 11 -kashkari 11 -wkc 11 -fonua 11 -pyloric 11 -brothwood 11 -radiantly 11 -atomized 11 -karmah 11 -folies 11 -siyao 11 -kotkai 11 -snapscan 11 -katsina 11 -demaine 11 -caracappa 11 -delineate 11 -syncopated 11 -khubutia 11 -endocrine-disrupting 11 -10,000-meter 11 -second-line 11 -derege 11 -menocal 11 -transgress 11 -41-megapixel 11 -casdagli 11 -k.p. 11 -falconi 11 -lysychansk 11 -signposting 11 -nasopharyngeal 11 -potentially-deadly 11 -killajoule 11 -large-sized 11 -minova 11 -hatwell 11 -mullocks 11 -lynge 11 -mazomanie 11 -maniatty 11 -dangriga 11 -crimper 11 -46,664 11 -mikhaluk 11 -lennert 11 -dabrafenib 11 -britsh 11 -186f 11 -scaffidi 11 -baddow 11 -mckaig 11 -shumenov 11 -ustin 11 -coreys 11 -mcdermed 11 -easen 11 -fondebrider 11 -92,500 11 -gahanna 11 -fawr 11 -zenteno 11 -habibollah 11 -movie-themed 11 -fluster 11 -finkelman 11 -colorblindness 11 -dowdeswell 11 -elevenses 11 -microdiscectomy 11 -herawi 11 -hargett 11 -lapdancer 11 -redirection 11 -redrafted 11 -provably 11 -koshu 11 -ruocco 11 -bjørnlund 11 -multi-function 11 -second-seed 11 -alfageeh 11 -mcclatchie 11 -16-and-a-half 11 -ngaba 11 -18-plus 11 -co-perpetrator 11 -vasant 11 -zeituni 11 -mvogo 11 -richardo 11 -londoño 11 -siochana 11 -bazz 11 -cannady 11 -8-month 11 -arianne 11 -schlesselman 11 -supercalifragilisticexpialidocious 11 -heuvel 11 -holdom 11 -stivers 11 -betton 11 -fabolous 11 -maryhill 11 -apollo-era 11 -eckhard 11 -1920s-style 11 -ironstone 11 -pesseghini 11 -cosker 11 -square-kilometer 11 -daia 11 -learoyd 11 -genizah 11 -hawke-renn 11 -littman 11 -t.t. 11 -basaaly 11 -seach 11 -211mph 11 -zeineh 11 -metrobus 11 -bloody-mindedness 11 -re-affirming 11 -eastern-most 11 -actu 11 -salsas 11 -gokcek 11 -644,000 11 -sixties-inspired 11 -21-6 11 -21-3 11 -dengel 11 -commentates 11 -make-out 11 -seat-belted 11 -22-25 11 -hv1a/gna 11 -riojas 11 -dead-set 11 -wyomissing 11 -four-round 11 -vulgaris 11 -1199 11 -kolarbyn 11 -urvashi 11 -crumples 11 -24bn 11 -well-matched 11 -holburne 11 -encases 11 -intolerably 11 -wieland 11 -riyaz 11 -hennebry 11 -binyomin 11 -vashi.com 11 -iep 11 -ruching 11 -uglybooth 11 -muessig 11 -nici 11 -gallowgate 11 -software-based 11 -bearish 11 -winkley 11 -tsimas 11 -liferaft 11 -ng911 11 -talent-spotting 11 -unexpired 11 -motormouth 11 -mokdad 11 -skypark 11 -popla 11 -swarthy 11 -water-boarded 11 -marinova 11 -fitness-focused 11 -arleta 11 -eveleigh 11 -mitton 11 -h3n8 11 -6:42 11 -6:47 11 -time-share 11 -dawn.com 11 -sinning 11 -torress-cook 11 -haematology 11 -consuls 11 -bellwood 11 -drive-stun 11 -bellarine 11 -jonna 11 -adverbs 11 -romola 11 -f**ker 11 -bassoon 11 -hand-to-mouth 11 -presciently 11 -865,000 11 -lycee 11 -must-buy 11 -yafai 11 -haussmann 11 -haberdasher 11 -eo 11 -corbat 11 -shape-changing 11 -dumani 11 -tezuka 11 -barker-knott 11 -lichter 11 -asm 11 -cassaro 11 -bradsell 11 -hidayat 11 -rtp 11 -tajura 11 -choctawhatchee 11 -payam 11 -cleaved 11 -marleen 11 -ski-in 11 -sayedee 11 -farrall 11 -mangli 11 -kafelnikov 11 -führerbunker 11 -stewart-lockhart 11 -nussle 11 -simpton 11 -boo-boys 11 -artificial-intelligence 11 -#agoodjew 11 -ice-hockey 11 -nevadas 11 -oyonnax 11 -p-38 11 -knutsen 11 -whitbourne 11 -terje 11 -grandmotherly 11 -tolmachev 11 -dearnley 11 -edgley 11 -gondar 11 -31,100 11 -human-trafficking 11 -ccr 11 -dermatologic 11 -thermo 11 -622,000 11 -karolia 11 -velardi 11 -vvs 11 -vilifies 11 -mirages 11 -seshadri 11 -self-denial 11 -hovercrafts 11 -allister 11 -tipling 11 -thick-rimmed 11 -muzzy 11 -teles 11 -harminder 11 -biretta 11 -xchange 11 -solzhenitsyn 11 -dill-reese 11 -verisimilitude 11 -dewanis 11 -athea 11 -8:19 11 -shahram 11 -bread-making 11 -high-wage 11 -by-line 11 -damietta 11 -seamed 11 -3,096 11 -lambada 11 -mishor 11 -preselection 11 -61016 11 -donakey 11 -seatguru 11 -wbff 11 -daufuskie 11 -johnno 11 -crown-of-thorns 11 -mandals 11 -radatti 11 -al-soofi 11 -#muslimrage 11 -jellied 11 -picplz 11 -prickett 11 -kanyua 11 -tigi 11 -263million 11 -s-line 11 -foreign-trained 11 -quarmby 11 -re-introducing 11 -nine-metre 11 -175lbs 11 -shoulder-mounted 11 -hatshepsut 11 -#blessed 11 -samcheok 11 -hamgyong 11 -crye 11 -gleave 11 -flateau 11 -humanitarianism 11 -minhad 11 -elsom 11 -hyun-joon 11 -model-turned-actress 11 -clownish 11 -sifton 11 -penwell 11 -al-masriya 11 -karnik 11 -32-34 11 -tobia 11 -loftiest 11 -inital 11 -omcg 11 -secondees 11 -11.28 11 -tamburro 11 -wattan 11 -gmac 11 -kitchen/diner 11 -brooklier 11 -52687 11 -grandstaff 11 -post-exposure 11 -gongman 11 -grisliest 11 -15-minutes 11 -saffery 11 -romanticizing 11 -sharp-suited 11 -sianis 11 -kawamoto 11 -high-efficiency 11 -vistula 11 -dollhopf 11 -fredriksz 11 -monsur 11 -10.58 11 -grandniece 11 -ennahada 11 -bucks. 11 -belfer 11 -smolder 11 -multiple-entry 11 -buthelezi 11 -4b 11 -packbots 11 -carmex 11 -chitika 11 -step-mum 11 -fleisher 11 -dorwin 11 -2ft-high 11 -damask 11 -munitionettes 11 -publicity-hungry 11 -urgh 11 -sweig 11 -fingar 11 -hormigos 11 -eleazer 11 -kark-tv 11 -mountings 11 -procuratorate 11 -tatra 11 -belhoucine 11 -malliotakis 11 -sandinistas 11 -luxus 11 -pitstop 11 -qaida-linked 11 -handaxes 11 -18-strong 11 -disorient 11 -tetteh 11 -rofl 11 -ghanaian-born 11 -sumayyah 11 -mccarver 11 -inter-connected 11 -british-iraqi 11 -boebert 11 -all-too-real 11 -regurgitates 11 -midyear 11 -background-check 11 -zoila 11 -linbo 11 -@facebook 11 -ettridge 11 -shopfronts 11 -kyei-baffour 11 -tikoirotuma 11 -zhengfei 11 -liftware 11 -omegle 11 -kezhen 11 -sypek 11 -ragav 11 -break-through 11 -cbrn 11 -1,549 11 -palazzotto 11 -recently-discovered 11 -skylock 11 -nashed 11 -altinbas 11 -vollenhoven 11 -gle 11 -polyhalite 11 -keiper 11 -rubiano 11 -badly-behaved 11 -brambell 11 -viscri 11 -luetzelschwab 11 -quicktype 11 -austrac 11 -chengjiang 11 -faizan 11 -browbeat 11 -newnes 11 -communiques 11 -side-footing 11 -acquah 11 -sakhina 11 -icefield 11 -dimitov 11 -350-year-old 11 -nemat 11 -scoular 11 -dallara 11 -holoman 11 -eyck 11 -solebury 11 -70,000-per-week 11 -howerd 11 -mauritz 11 -re-learned 11 -152million 11 -marinus 11 -pastoor 11 -over-react 11 -mataram 11 -baalham 11 -echeverri 11 -sundowners 11 -harvick 11 -antalyaspor 11 -ticer 11 -tices 11 -zegna 11 -robi 11 -smoothes 11 -sahlgrenska 11 -carcinoid 11 -wifredo 11 -32d 11 -44,100 11 -willebeek-lemair 11 -citywalk 11 -feminization 11 -wiccans 11 -business-only 11 -non-drinking 11 -chitin 11 -cerie 11 -dobermans 11 -b-girl 11 -kryfko 11 -summerhill 11 -mengesha 11 -sawtry 11 -lower-quality 11 -wellpoint 11 -payman 11 -peror 11 -paramotor 11 -runco 11 -snorre 11 -sindhu 11 -campese 11 -ex-blackburn 11 -1563 11 -freeplay 11 -30mb 11 -talita 11 -zarins 11 -rawson-neal 11 -466million 11 -koichiro 11 -mbalula 11 -kfsm 11 -bevers 11 -partial-birth 11 -siurkus 11 -droga 11 -p-plater 11 -kegan 11 -defo 11 -misse 11 -mioduszewski 11 -10-30 11 -rg 11 -bator 11 -downdrafts 11 -romanoff 11 -lashkar-e-islam 11 -setai 11 -co-discovered 11 -bemis 11 -oertel 11 -dryland 11 -58per 11 -mingham 11 -proud-miles 11 -reframed 11 -spottings 11 -x-37 11 -benelux 11 -mocella 11 -700billion 11 -hurford 11 -listicle 11 -connote 11 -lussier 11 -bracebridge 11 -feuerstein 11 -high-elevation 11 -abdirashid 11 -pulici 11 -campisano 11 -evildoers 11 -800bc 11 -e.r. 11 -arrowstream 11 -14-acre 11 -7-point 11 -masataka 11 -kuzennyy 11 -nephrectomy 11 -arteval 11 -fruit-based 11 -cissie 11 -mulhearn 11 -bindeez 11 -shoemakers 11 -karsh 11 -saturley 11 -kierston 11 -unburden 11 -zé 11 -heddon 11 -unthinkingly 11 -little-to-no 11 -itay 11 -pci 11 -to-dos 11 -american-built 11 -swinyard 11 -land-line 11 -serpa 11 -1631 11 -1633 11 -aliyu 11 -cvitanich 11 -eighty-two 11 -pavoletti 11 -taipei-based 11 -nitrites 11 -otake 11 -sugardaddie.com 11 -less-lethal 11 -corynne 11 -rotimi 11 -navar 11 -rotifer 11 -moreta 11 -8:38 11 -jamiroquai 11 -palmarchuk 11 -danyell 11 -thornycroft 11 -2cb 11 -300-meter 11 -methanethiol 11 -warin 11 -ashi 11 -hoshko 11 -27-17 11 -gayed 11 -carb-free 11 -shorey 11 -mowaffak 11 -toytown 11 -overwrite 11 -cymbeline 11 -marmi 11 -borage 11 -jacksgap 11 -stockholder 11 -600-square-foot 11 -perfectly-weighted 11 -nelder 11 -lippard 11 -jorginho 11 -taliban-held 11 -warshel 11 -rafat 11 -cancer-like 11 -negara 11 -deggans 11 -lupito 11 -ladislaus 11 -kaian 11 -kaigwa-okoye 11 -tchebotarev 11 -kilted 11 -rotundo 11 -demarius 11 -high-contrast 11 -alworth 11 -as-yet-untitled 11 -square-metres 11 -lilibet 11 -atypically 11 -door-in-door 11 -d0 11 -nobels 11 -menegaldo 11 -bagman 11 -gwb 11 -underselling 11 -sandblasted 11 -first-known 11 -mix-and-match 11 -nummelin 11 -klansnic 11 -timberwolf 11 -pedestrianized 11 -perishables 11 -holloways 11 -billowy 11 -inverter 11 -routs 11 -rusesabagina 11 -detrimentally 11 -stromboli 11 -cloud-like 11 -lorilei 11 -honickman 11 -trossachs 11 -wiith 11 -hoshino 11 -wave-like 11 -opa 11 -walburga 11 -coning 11 -georgeson 11 -enticements 11 -kemah 11 -shivram 11 -matveev 11 -langata 11 -cointreau 11 -cesia 11 -momper 11 -truely 11 -douglas-woods 11 -boree 11 -kcen-tv 11 -dediara 11 -bossiness 11 -sickbed 11 -two-player 11 -bijie 11 -egli 11 -zlin 11 -self-anointed 11 -bassler 11 -nigh-on 11 -48.3 11 -then-soviet 11 -poupon 11 -forbush 11 -choules 11 -phase-in 11 -eriskay 11 -three-horse 11 -maradiaga 11 -ossama 11 -wciv 11 -250-page 11 -onfield 11 -bossaerts 11 -kurita 11 -foulds 11 -u.s.-sponsored 11 -450lb 11 -censullo 11 -honeycutt 11 -buehrle 11 -erlestoke 11 -disenfranchises 11 -rainmaker 11 -baytieh 11 -citycopter 11 -kingsize 11 -oberyn 11 -eleanora 11 -cecen 11 -self-flagellation 11 -hurstpierpoint 11 -fedahi 11 -desperately-needed 11 -harlaut 11 -duct-tape 11 -volvos 11 -sjolander 11 -utzinger 11 -then-teenage 11 -turndown 11 -véronique 11 -thon 11 -powatag 11 -overthinking 11 -marinette 11 -puhn 11 -cruikshank 11 -moedano 11 -puckish 11 -sossusvlei 11 -duerden 11 -interwar 11 -three-ton 11 -winwood 11 -lymm 11 -ringold 11 -strohm 11 -pro-election 11 -glines 11 -1480 11 -148m 11 -cross-check 11 -gethen 11 -sex-crimes 11 -ellijay 11 -genarlow 11 -lonny 11 -pavlenko 11 -o'roark 11 -okuyama 11 -six-month-long 11 -beelitz-heilstätten 11 -riddall 11 -cortis 11 -gibbet 11 -talhat 11 -breakdance 11 -barraco 11 -biaksangzuala 11 -archenemies 11 -burden-sharing 11 -playgroups 11 -klimentova 11 -gerus 11 -agriprocessors 11 -strat 11 -leonte 11 -hall-of-famer 11 -klassen 11 -hartwich 11 -cybernetics 11 -beishline 11 -turbos 11 -numbs 11 -göring 11 -carthy 11 -florica 11 -variegated 11 -cherry-evans 11 -couzin 11 -bodybuilding.com 11 -hemenway 11 -bodnar 11 -sand-coloured 11 -centre-forwards 11 -vinnell 11 -instamatic 11 -4.43 11 -1,000-square-foot 11 -senbei 11 -buzzworthy 11 -hawksby 11 -ndi 11 -caprivi 11 -monterroso-navas 11 -katwe 11 -esti 11 -chesky 11 -6:03 11 -kissy 11 -deutsches 11 -renouard 11 -amuay 11 -radders 11 -italy-based 11 -youth-serving 11 -chloë 11 -17,300 11 -djakadam 11 -93,500 11 -philippou 11 -hostal 11 -valvo 11 -keandre 11 -pivit 11 -hybisae 11 -on-message 11 -flydubai 11 -45km 11 -leader-call 11 -licker 11 -bare-foot 11 -swerdlow 11 -pro-vaccine 11 -muscovite 11 -ex-inmates 11 -non-amish 11 -timour 11 -mckernan 11 -macedonski 11 -3114 11 -shangdong 11 -villamor 11 -flouncy 11 -65.00 11 -burrawang 11 -muturi 11 -farma 11 -mis-spelt 11 -feint 11 -silage 11 -jiaotong 11 -high-latitude 11 -siberians 11 -ossified 11 -papademetriou 11 -weals 11 -bohra 11 -auclair 11 -#iwill 11 -etzler 11 -finighan 11 -halmshaw 11 -jiji 11 -dreamboat 11 -surrealistic 11 -vote-winning 11 -yingying 11 -renault-nissan 11 -bodiford 11 -child-focused 11 -clarridge 11 -manby 11 -unindicted 11 -baburam 11 -blue-tinted 11 -oko 11 -jaen 11 -branford-wood 11 -laitinen 11 -seon 11 -bleeps 11 -diegans 11 -firle 11 -ramsamy 11 -gargash 11 -winnowed 11 -sumgong 11 -schawinski 11 -barmston 11 -wwd.com 11 -stefanos 11 -kaddouri 11 -kuriyama 11 -thunderstruck 11 -foot-tall 11 -8:56 11 -iliev 11 -samurai-type 11 -sportage 11 -karlivka 11 -zanganeh 11 -oxidizer 11 -dermendzhiev 11 -norledge 11 -abhay 11 -paolis 11 -hyunh 11 -688-acre 11 -eigenfaces 11 -andolan 11 -romandie 11 -5,995 11 -madagali 11 -olympiads 11 -eshaq 11 -eshan 11 -braylee 11 -phinney 11 -semarang 11 -consortia 11 -jnagal 11 -re-taken 11 -rozan 11 -three-week-long 11 -tahor 11 -yglesias 11 -kefalas 11 -zeist 11 -albinson 11 -li-dikov 11 -bootmaker 11 -papito 11 -rbg 11 -kmaq 11 -74-page 11 -alun-wyn 11 -divaris 11 -magec 11 -kirin 11 -balcerowicz 11 -lepic 11 -annabell 11 -wonder-strike 11 -duncans 11 -jurman 11 -seckold 11 -teso 11 -mahapatra 11 -troubleshooters 11 -6per 11 -pinchers 11 -pro-tibetan 11 -7:32 11 -seene 11 -colston 11 -44,999 11 -kurtzman 11 -karni 11 -stull 11 -Époque 11 -busst 11 -13-episode 11 -ilhan 11 -kokta 11 -882 11 -zubrin 11 -karratha 11 -d'urso 11 -kare-tv 11 -two-and-a 11 -alliteration 11 -deferrals 11 -bogatyr 11 -hasen 11 -abra 11 -vrsic 11 -wicket-taking 11 -mid-1900s 11 -moufid 11 -conklin-spillane 11 -synchrony 11 -manzaroli 11 -mynde 11 -ajak 11 -hmmmm 11 -vives 11 -quarter-pound 11 -agaisnt 11 -starbursts 11 -whiteker 11 -vince-stephens 11 -regalecus 11 -doodler 11 -semolina 11 -clutton 11 -six-pointed 11 -rendezvousing 11 -over-50 11 -1-800-sal-army 11 -morrisroe 11 -prats 11 -bedforshire 11 -#likeaboy 11 -schoolie 11 -any1 11 -180g 11 -lochailort 11 -sidnei 11 -ultra-lightweight 11 -princess-like 11 -polaszek 11 -fresh-water 11 -brancaccio 11 -ospedale 11 -ushant 11 -elgon 11 -hild 11 -doric 11 -hervias 11 -krysta 11 -ciarah 11 -scrotums 11 -843,000 11 -maesa 11 -4700 11 -rara 11 -ivaldi 11 -k.k. 11 -enduro 11 -heaselgrave 11 -wood-beamed 11 -plopping 11 -rangeland 11 -al-jayousi 11 -grimmest 11 -14-game 11 -fibro 11 -squashy 11 -warbird 11 -xun 11 -pistone 11 -then-novel 11 -asutaitis 11 -lavon 11 -penalty-box 11 -newsnet 11 -cicatello 11 -maus 11 -three-place 11 -gina-maria 11 -landmasses 11 -sulston 11 -tishomingo 11 -baaa 11 -rondon 11 -dutti 11 -gundev 11 -cooze 11 -vernons 11 -fikri 11 -synapse 11 -eddington-smith 11 -saint-jerome 11 -mohoje 11 -u-21 11 -prolongation 11 -dwarf-throwing 11 -stalcup 11 -tsokanis 11 -mckinnon-bozek 11 -lokshina 11 -.27 11 -hoogerhyde 11 -1,200-year-old 11 -gruenwald 11 -114,500 11 -barrel-shaped 11 -build-ups 11 -schwermer 11 -globule 11 -heavy-lifting 11 -ryazansky 11 -rohff 11 -katsogiannis 11 -fredricks 11 -skort 11 -bason 11 -handhelds 11 -medium-high 11 -cookstown 11 -shogo 11 -año 11 -balconette 11 -hadda 11 -antje 11 -kanpur 11 -thalassemia 11 -winthorpe 11 -quiches 11 -ateliers 11 -free-transfer 11 -luleå 11 -minorczyk 11 -dimona 11 -market-research 11 -maspalomas 11 -specialisms 11 -drotning 11 -5,550 11 -markray 11 -earldom 11 -donor-conceived 11 -yahoos 11 -culpi 11 -kosminsky 11 -baldrige 11 -pietrak 11 -mansbridge 11 -lokonensis 11 -unibet 11 -alick 11 -astrachen 11 -gund 11 -dunnicliffe 11 -skeele 11 -electro-magnetic 11 -schmalz 11 -duplan 11 -conocophillips 11 -non-edible 11 -rent-a-car 11 -3run 11 -latino-americans 11 -tharpe 11 -conversant 11 -domestique 11 -nfi 11 -helios 11 -rehmat 11 -mfk 11 -second-straight 11 -trouw 11 -kilde 11 -reshad 11 -fisherfolk 11 -njong 11 -mortgage-holders 11 -30km/h 11 -indulgently 11 -mottoes 11 -fmx 11 -palante 11 -mang 11 -wraxall 11 -207mph 11 -punitively 11 -75mm 11 -chromogenic 11 -tange 11 -vardzia 11 -joginder 11 -to-date 11 -taíno 11 -34th-minute 11 -alexeyev 11 -foutch 11 -sproule 11 -affinities 11 -squawka 11 -1995/96 11 -halkic 11 -emmanie 11 -55c 11 -hollinshead 11 -doubleclick 11 -makopo 11 -trawlermen 11 -velvet-covered 11 -family-led 11 -theresia 11 -udovicic 11 -tamanrasset 11 -onaolapo 11 -manselius 11 -#atl24 11 -well-performing 11 -teja 11 -terrones 11 -nicholls-trained 11 -promes 11 -moneybags 11 -rubbish-filled 11 -20-course 11 -gemelli 11 -zhaotong 11 -schulberg 11 -6-minute 11 -quinceanera 11 -clayden 11 -13.63 11 -minchion 11 -pre-carnival 11 -soundy 11 -wratten 11 -watsonville 11 -bonomo 11 -shwedagon 11 -meitivs 11 -ul-islam 11 -closeups 11 -coryn 11 -fullmer 11 -parakh 11 -promposals 11 -fc-31 11 -72kg 11 -vashro 11 -whomsoever 11 -gotzis 11 -bbdo 11 -godshaw 11 -bawler 11 -el-medina 11 -naved 11 -charge-sheet 11 -jarema 11 -ollett 11 -nioami 11 -cdi 11 -lyster 11 -reeth 11 -52.1 11 -sangean 11 -cream-filled 11 -bucari 11 -unpeeled 11 -dontae 11 -devrieze 11 -hulu.com 11 -uninstalling 11 -acci 11 -knapping 11 -vectren 11 -1,137 11 -sherbourne 11 -racca 11 -graceless 11 -ion-strengthened 11 -jute 11 -moorcroft 11 -powel 11 -ofunato 11 -hazlette 11 -bajamonti 11 -schlepping 11 -decadal 11 -hallahan 11 -rito 11 -jehol 11 -zinder 11 -zenkel 11 -carnucci 11 -marri 11 -nirim 11 -prunier 11 -laminar 11 -4:48 11 -m26 11 -dimuzio 11 -elmfield 11 -vesey 11 -power-packed 11 -kamkar 11 -piure 11 -okriashvili 11 -vikrant 11 -sharbini 11 -41,700 11 -heba 11 -7:12 11 -in-class 11 -basmah 11 -thompson-edgar 11 -sportlobster.com 11 -saverio 11 -mcmutrie 11 -slosh 11 -mcnicholl 11 -subducted 11 -ash-covered 11 -luyindula 11 -wartson 11 -82.9 11 -23/9/2013 11 -glatt 11 -ardeche 11 -oti 11 -gazumped 11 -hulbig 11 -prisha 11 -gampel 11 -douentza 11 -reactivity 11 -chipolina 11 -go-slow 11 -price-tags 11 -coho 11 -over-privileged 11 -lagerback 11 -kinesava 11 -single-level 11 -heave-ho 11 -wilmut 11 --65 11 -mynett 11 -neowise 11 -hilbrae 11 -jongen 11 -coecss 11 -complacently 11 -jolyn 11 -jitney 11 -saag 11 -full-hearted 11 -mouw 11 -gallaher 11 -#goodmorningbritain 11 -negri 11 -lucy-mae 11 -holthe 11 -mcfarlands 11 -kloe 11 -norrington 11 -aurornis 11 -pantani 11 -318,000 11 -veria 11 -ludvik 11 -kittitas 11 -whitestone 11 -ustad 11 -sweetshop 11 -unapproachable 11 -bombmakers 11 -pcm 11 -rhymed 11 -pipistrelle 11 -hino 11 -supernodes 11 -murray-shelley 11 -softcard 11 -touitou 11 -coastalliving.com 11 -picanha 11 -kidon 11 -hueso 11 -sotin 11 -anodised 11 -barri 11 -terese 11 -oelofse 11 -16-day-old 11 -doddrell 11 -tines 11 -beautymart 11 -kriech 11 -tresemme 11 -betrothal 11 -mihail 11 -rewritable 11 -puli 11 -rose-cut 11 -blaize 11 -soulas 11 -76th-minute 11 -wnew 11 -milliion 11 -luzhou 11 -pagett 11 -mandrill 11 -nouriel 11 -tindafella 11 -mardenborough 11 -addahoumi 11 -pressure-sensitive 11 -misawa 11 -pastygate 11 -milana 11 -foxboro 11 -sou 11 -senreich 11 -self-powered 11 -91billion 11 -fencers 11 -lytchett 11 -cismaru 11 -557ft 11 -over-21s 11 -zoozbeat 11 -55427 11 -5,000-year 11 -orbitz.com 11 -corruptions 11 -mihaloliakos 11 -leybourne 11 -geniene 11 -keepman 11 -decluttering 11 -whiteheads 11 -mamming 11 -promotion-winning 11 -whetting 11 -10inches 11 -acetaldehyde 11 -carhenge 11 -high-kicking 11 -weichers 11 -modality 11 -butke 11 -ramson 11 -57.95 11 -sciarappa 11 -cnn/time/orc 11 -mogle 11 -misprinted 11 -175,223,510 11 -porirua 11 -upholsterer 11 -xiapu 11 -ramzy 11 -undre 11 -lyapin 11 -bondell 11 -highly-coveted 11 -undistinguished 11 -4205 11 -muricy 11 -polyzonis 11 -cat-flap 11 -dishoom 11 -catapang 11 -subhain 11 -morici 11 -42-storey 11 -muff 11 -ryle 11 -misdeed 11 -sumerians 11 -1,635 11 -bufalino 11 -yusseph 11 -ellenberg 11 -callely 11 -widescale 11 -austin-area 11 -yui 11 -adversities 11 -willumsen 11 -k.t. 11 -australian-owned 11 -464,000 11 -dorridge 11 -aragonite 11 -kuhlmann 11 -tamannah 11 -11-4 11 -d'aguilar 11 -coto 11 -9:02 11 -aigburth 11 -eisbach 11 -botswanan 11 -toone 11 -birgitta 11 -roehm 11 -thalamus 11 -meursault 11 -head-banging 11 -deely 11 -monrose 11 -krapova 11 -turp 11 -davinder 11 -21st-minute 11 -alamoudi 11 -10180 11 -gribbohm 11 -frauenfeld 11 -2000-2004 11 -markthal 11 -whittles 11 -protoplanet 11 -satc 11 -aggborough 11 -2,097 11 -geni.com 11 -d'etats 11 -ishii 11 -akyol 11 -abusin 11 -marilee 11 -bfu 11 -judgeship 11 -lemm 11 -rheims 11 -2,00 11 -decelerated 11 -nehru-gandhi 11 -wahiyib 11 -shotstopper 11 -stuccoed 11 -pretax 11 -air-conditioner 11 -self-policing 11 -abstracted 11 -one-state 11 -cubli 11 -shoddily 11 -predicable 11 -serianni 11 -uspstf 11 -43-match 11 -sinfully 11 -well-lived 11 -brzeski 11 -salomonsen 11 -tsokkos 11 -hilgart 11 -eccelstone 11 -type-a 11 -corogeanu 11 -somebodies 11 -tarina 11 -tarino 11 -woodlyn 11 -arboleda 11 -recaptures 11 -pakhomoff 11 -62555 11 -tacony 11 -recognisers 11 -64.2 11 -jink 11 -83mins 11 -zürich 11 -46mm 11 -palisade 11 -satkowski 11 -binti 11 -princeton-educated 11 -gader 11 -doernberg 11 -sungar 11 -sungai 11 -mongo 11 -inya 11 -chippendales 11 -bosnian-serb 11 -triaged 11 -damone 11 -459,000 11 -butter-poached 11 -raceday 11 -1800 273 8255 11 -yachtswoman 11 -111-year-old 11 -rasic 11 -spammy 11 -cripe 11 -lindell-vikarby 11 -sivaraman 11 -chukka 11 -maydew 11 -69.2 11 -panayi 11 -tartous 11 -fraternise 11 -boweya 11 -forino 11 -pencourage.com 11 -fuel3d 11 -biancofiore 11 -0.87 11 -samatar 11 -starfield 11 -abdinur 11 -msk 11 -drazen 11 -sherburne 11 -webinar 11 -amey-obeng 11 -5:33 11 -derringer 11 -2x2 11 -honington 11 -90.5 11 -90.4 11 -petacci 11 -vladyslav 11 -57billion 11 -10.70 11 -82-year 11 -banadir 11 -gesticulate 11 -mockbee 11 -mini-moon 11 -millhouse 11 -ogru 11 -557,000 11 -sellal 11 -rectangle-shaped 11 -warrior-like 11 -buder 11 -skilton 11 -1,111 11 -haytham 11 -azzariti 11 -rodkin 11 -ossuaries 11 -lcwr 11 -protein-packed 11 -isamu 11 -d'allacco 11 -mcalonan 11 -louisana 11 -ozouf 11 -broad-brimmed 11 -119-year-old 11 -ludwigsburg 11 -keppie 11 -pay-as-you 11 -koybasi 11 -batterers 11 -decarol 11 -multi-pack 11 -rawthorpe 11 -vogelsang 11 -enjoyably 11 -generalist 11 -prevas 11 -jéan 11 -pantoliano 11 -harperley 11 -oglio 11 -cosmopolis 11 -poss 11 -hyattsville 11 -alejo 11 -olszewski 11 -hardtop 11 -arisxandra 11 -8.59 11 -sadhana 11 -decriminalizes 11 -commentors 11 -danette 11 -ghost-writer 11 -multi-core 11 -lazaretto 11 -serfdom 11 -vip-only 11 -eal 11 -eag 11 -eap 11 -mcparlin 11 -herge 11 -concealed-weapons 11 -yahle 11 -wilmoth 11 -koth 11 -merna 11 -counter-narrative 11 -lathered 11 -pan-asian 11 -berreni 11 -florschutz 11 -stowe-educated 11 -dorschner 11 -wilsnagh 11 -kristjansson 11 -craftmanship 11 -mesfin 11 -office-funded 11 -thunderbolts 11 -psychical 11 -1:47 11 -tameena 11 -gaudí 11 -average-looking 11 -cloud-storage 11 -parrington 11 -billadeau 11 -gurnon 11 -cirbus 11 -disobeys 11 -re-appearance 11 -mchattie 11 -slackliners 11 -trefoil 11 -madugalle 11 -ballet-style 11 -pharmacia 11 -dulli 11 -oktay 11 -rowboats 11 -edey 11 -cattleman 11 -5.37 11 -208-mile 11 -inci 11 -blumenau 11 -elysse 11 -cullins 11 -revette 11 -cyber-terrorism 11 -:'-lrb- 11 -expropriate 11 -wimer 11 -bewilderingly 11 -8.49 11 -canoed 11 -schraibman 11 -2,970 11 -qwentyn 11 -pebe 11 -osula 11 -sugar-daddy 11 -puducherry 11 -gosier 11 -ftz 11 -barzegar 11 -4-minute 11 -killifish 11 -kanwardeep 11 -nanfang 11 -tielemans 11 -doilies 11 -400-a-month 11 -rotormotion 11 -supress 11 -swanland 11 -sweary 11 -benzegala 11 -luís 11 -corporatization 11 -makled 11 -ritzau 11 -horlicks 11 -eshkol 11 -balzer 11 -uti 11 -aqui 11 -suckla 11 -flu-shot 11 -scuttlebutt 11 -waterless 11 -casivant 11 -abramsohn 11 -cluj-napoca 11 -fenlator 11 -northeasterners 11 -1,254 11 -daccache 11 -lysandra 11 -44-month 11 -jenea 11 -schwerin 11 -horatia 11 -blackhearts 11 -finback 11 -barnsdale 11 -delineation 11 -0.83 11 -over-45s 11 -mustafah 11 -snax 11 -6wcu986 11 -mancilla 11 -jambeck 11 -lowenstein 11 -bosnian-born 11 -denke 11 -scrimshire 11 -meditates 11 -anti-amoeba 11 -ibraheem 11 -zaldivar 11 -silbery 11 -langjökull 11 -iou 11 -benchtops 11 -tocco 11 -formhals 11 -bwin 11 -gruffudd 11 -carol-singing 11 -ramlila 11 -prolifera 11 -leathered 11 -aneela 11 -10194 11 -8-15 11 -hult 11 -kalvert 11 -chiedozie 11 -harpin 11 -by-the-sea 11 -eesti 11 -9:26 11 -9:22 11 -nbh 11 -mohs 11 -spiciest 11 -neill-fraser 11 -abrogated 11 -ulthera 11 -millets 11 -prog 11 -9,999 11 -caqueta 11 -clouston 11 -gritt 11 -raber 11 -zhukov 11 -rockface 11 -100-person 11 -short-acting 11 -berlinghoff 10 -snuggler 10 -hans-erik 10 -aeyo 10 -ultra-exclusive 10 -alistaire 10 -super-stylish 10 -rechargable 10 -epidemiologic 10 -microplastics 10 -goober 10 -touchlines 10 -ciccarese 10 -parangan 10 -rahmanipour 10 -b787 10 -bogu 10 -reflexologist 10 -allim 10 -manoukian 10 -barbella 10 -2:42 10 -maizie 10 -launcherone 10 -,35 10 -503rd 10 -guyanese 10 -mateparae 10 -wynan 10 -varghese 10 -shikata 10 -goncharov 10 -contento 10 -bessell 10 -hammons 10 -hasegawa 10 -runk 10 -c-shaped 10 -bhange 10 -tysons 10 -wrc-tv 10 -ingold 10 -lyre 10 -haran 10 -shirtdress 10 -shantai 10 -carbonensis 10 -330bhp 10 -tchindzoulou 10 -desdemona 10 -delmar-morgan 10 -mis-match 10 -pedicured 10 -ex-detainees 10 -much-celebrated 10 -36-years-old 10 -blinkfeed 10 -vellum-bound 10 -24-minute 10 -carter-vickers 10 -verandahs 10 -@bbcone 10 -exothermic 10 -erkan 10 -frazzini 10 -loadmaster 10 -power-lifting 10 -68-year 10 -agon 10 -ellyard 10 -sevierville 10 -crocodile-skin 10 -juleps 10 -twenty-five-year-old 10 -pikeys 10 -komu 10 -non-french 10 -1730s 10 -ekkers 10 -12th-graders 10 -gosley 10 -busied 10 -hamse 10 -year-ago 10 -cjeu 10 -bruny 10 -delta-mouse 10 -court-imposed 10 -piercefield 10 -schmeinck 10 -nwe 10 -nwo 10 -1,570 10 -fakir 10 -anti-hispanic 10 -champlan 10 -dressmakers 10 -bangladeshi-born 10 -500-1 10 -120.5 10 -narco-traffickers 10 -triperoxide 10 -khadim 10 -kesennuma 10 -pflueger 10 -lakhs 10 -gurpegi 10 -sissel 10 -douglas-o'callaghan 10 -@natsecwonk 10 -ruichang 10 -mcspurren 10 -slivinski 10 -marić 10 -castlemartin 10 -descried 10 -shabbiha 10 -99-pack 10 -colourb4 10 -familiarization 10 -colan 10 -1,178 10 -1,174 10 -fenger 10 -licence-payers 10 -rosboch 10 -bouldering 10 -delillo 10 -conclaves 10 -recommissioned 10 -be'er 10 -overstock 10 -suveg 10 -humdinger 10 -mmos 10 -mccloskey-sharp 10 -tshisekedi 10 -5:43 10 -5:42 10 -musudan-ri 10 -5:48 10 -11-13-25-39-54 10 -11.02 10 -m67 10 -zebrating 10 -retellings 10 -priestfield 10 -carozza 10 -internationalization 10 -kiron 10 -kiro7 10 -scabby 10 -simo 10 -tro-tro 10 -fivethirtyeight 10 -dropkick 10 -foeticide 10 -space-inspired 10 -humanness 10 -cockman 10 -woodtv.com 10 -hartt 10 -7:57 10 -30-22 10 -tashina 10 -coult 10 -neurotics 10 -slow-walked 10 -palpatine 10 -jean-bart 10 -ridker 10 -adjacencies 10 -pouched 10 -dunchurch 10 -c63 10 -estibaliz 10 -strawbridge 10 -mwampembwa 10 -abha 10 -talmudic 10 -proglide 10 -bigamous 10 -d'auria 10 -barrens 10 -winkelmann 10 -faderera 10 -kimmerling 10 -guilmette 10 -millstones 10 -kwakkel 10 -kiss-in 10 -28cm 10 -yardville 10 -abbaye 10 -sport-related 10 -long-finned 10 -zipperbot 10 -borbalan 10 -xiaoguang 10 -fricker 10 -mccrimmon 10 -garson 10 -allanson 10 -heads-of-state 10 -molesworth 10 -maid-rite 10 -gradon 10 -cwc 10 -idiabeta 10 -bright-colored 10 -madjeski 10 -counterparties 10 -beartooth 10 -incompletely 10 -joseba 10 -ktre 10 -madero 10 -norwegian-born 10 -yakking 10 -shari'ah 10 -pollsmoor 10 -tessie 10 -deg 10 -nammo 10 -mcpherron 10 -mixradio 10 -skidelsky 10 -klement 10 -sub-aquatic 10 -immunosuppression 10 -lawanda 10 -jokela 10 -pin-prick 10 -tanguy 10 -nasi 10 -witricity 10 -cierra 10 -hadlee 10 -portadown 10 -s-10 10 -marble-lined 10 -kwoks 10 -tappenden 10 -montalcino 10 -lory 10 -mirandized 10 -nybo 10 -leclercq 10 -442,000 10 -sujit 10 -#joyceevanstweets 10 -421,000 10 -standage 10 -unimagined 10 -hamner 10 -otelul 10 -skyshield 10 -kirstine 10 -'93 10 -pre-approval 10 -kreeger 10 -lupowitz 10 -janiot 10 -stone-like 10 -foriegn 10 -lemonnier 10 --8:30 10 -hajo 10 -lgb 10 -janda 10 -¸ 10 -dirty-tricks 10 -anti-authoritarian 10 -army/marine 10 -dopes 10 -al-ajmi 10 -furbish 10 -crago 10 -necula 10 -mckinleys 10 -nanostructures 10 -chudy 10 -rastrick 10 -inquisitors 10 -nathania 10 -wole 10 -eccc 10 -zulqarnain 10 -hormone-free 10 -ruddington 10 -jedvaj 10 -fourth-season 10 -aabar 10 -plex 10 -benedicto 10 -asolekar 10 -allergists 10 -sedges 10 -kholod 10 -lowball 10 -jenky 10 -bosko 10 -baldizon 10 -1000km 10 -cynde 10 -dumb-bell 10 -kolodny 10 -a400m 10 -konheim 10 -on-form 10 -muise 10 -dively 10 -bandler 10 -libs 10 -remissions 10 -carrie-ann 10 -iis 10 -reissues 10 -potapov 10 -stuy 10 -hash-tag 10 -attuh 10 -off-hours 10 -massena 10 -corran 10 -aat 10 -flamm 10 -jacobites 10 -kapture 10 -waldegrave 10 -hunn 10 -obstetrician/gynecologist 10 -escala 10 -reorder 10 -yreka 10 -cross-burning 10 -ligurian 10 -believin 10 -cousillas 10 -galeotti 10 -raritan 10 -derrel 10 -madol 10 -sandiacre 10 -cmx001 10 -al-shati 10 -kemp-philp 10 -self-shot 10 -ultra-nationalists 10 -debidin 10 -mallu 10 -n'dinga 10 -cammock 10 -pokie 10 -ciragan 10 -walstrom 10 -school-style 10 -a406 10 -keeth 10 -bullshit 10 -warlpiri 10 -sayles 10 -enlai 10 -marinades 10 -pick-up-and-play 10 -waw 10 -renegotiations 10 -ectot 10 -seraphina 10 -renning 10 -bindon 10 -interlinking 10 -perfusion 10 -d'oeuvre 10 -druken 10 -standers 10 -reimposed 10 -estevan 10 -ladies-only 10 -crucis 10 -@sainsburys 10 -welihindha 10 -uaf 10 -acidifying 10 -gulalai 10 -still-classified 10 -low-deposit 10 -tichleman 10 -sherringham 10 -blairgowrie 10 -tarija 10 -rumaisa 10 -illston 10 -braincase 10 -tullahoma 10 -week.the 10 -reno-tahoe 10 -spruces 10 -sudans 10 -three-sport 10 -all-hands 10 -niebel 10 -logistician 10 -aplington-parkersburg 10 -boeremag 10 -sokolowski 10 -brandhorst 10 -govenor 10 -magrathea 10 -rahmoatollah 10 -swisse 10 -freshfields 10 -manze 10 -478,000 10 -co-cathedral 10 -helsing 10 -quetzal 10 -darwinopterus 10 -rajakazee 10 -instapray 10 -svinafellsjokull 10 -robert-michon 10 -bervar 10 -japanese-inspired 10 -conversnitch 10 -amped-up 10 -jongh 10 -lescinskas 10 -edgecumbe 10 -39-acre 10 -intarsia 10 -25-month 10 -davani 10 -sanjiang 10 -sex-specific 10 -se7en 10 -ex-french 10 -ortner 10 -people-powered 10 -visagie 10 -fracked 10 -maine-based 10 -herta 10 -trapdoors 10 -ext. 10 -1,555 10 -1,558 10 -rzeszowska 10 -altaussee 10 -shackelton 10 -guiltier 10 -edgars 10 -wahlin 10 -sarafin 10 -eteo 10 -artemisinin 10 -yandong 10 -lib/lab 10 -over-sexualised 10 -26-month 10 -tonsillectomies 10 -nuaimi 10 -prinzivalli 10 -oakridge 10 -cinemax 10 -abraaj 10 -ozresberoglu 10 -rhinovirus 10 -harmondsworth 10 -zarya 10 -1,158 10 -wakim 10 -675million 10 -kadleck 10 -weyland 10 -hurdled 10 -skin-lightening 10 -big-dollar 10 -wahlstrom 10 -parade-goers 10 -ex-adviser 10 -self-builders 10 -souci 10 -vanaken 10 -payless 10 -near-absolute 10 -fhimah 10 -arab-language 10 -quarter-size 10 -biehn 10 -eudaimonic 10 -akanasu 10 -feustel 10 -azu 10 -sofosbuvir 10 -poletes 10 -netti 10 -greenvale 10 -4:23 10 -trabert 10 -kawai 10 -pleating 10 -killingsworth 10 -hazlehurst 10 -telsa 10 -wsmv-tv 10 -burson-marsteller 10 -olle 10 -mamady 10 -in-pensioners 10 -chaw 10 -batanes 10 -burlew 10 -1345 10 -jat 10 -jau 10 -ghettoes 10 -latell 10 -staircase-escalante 10 -2054 10 -uncountable 10 -9.36 10 -ex-armed 10 -firuta 10 -sipkins 10 -chauvin 10 -tomasovic 10 -clemmer 10 -rawstrom 10 -16,100 10 -sorbian 10 -kavala 10 -dindane 10 -face-offs 10 -izhar 10 -pugachyova 10 -promazine 10 -tzvetkoff 10 -gamblin 10 -haleema 10 -wysocka 10 -mahwah 10 -celmer 10 -laylani 10 -blackcap 10 -wirksworth 10 -gerrit 10 -filippova 10 -69,500 10 -small-group 10 -twitterer 10 -micellar 10 -ellaone 10 -65.8 10 -hibernation-like 10 -gnats 10 -body-image 10 -zubkov 10 -rage-filled 10 -tsk 10 -shavack 10 -enteromorpha 10 -marazul 10 -duvoll 10 -depreciated 10 -3:14 10 -80-day 10 -yuanhong 10 -dilrosun 10 -holdaway 10 -goffer 10 -evospeed 10 -kingswear 10 -daybeds 10 -edam 10 -adeptly 10 -wellow 10 -smiddie 10 -nault 10 -enso 10 -co-pastor 10 -iridimi 10 -saanich 10 -ried 10 -anti-avoidance 10 -hagin 10 -vols 10 -muldrow 10 -641,000 10 -activites 10 -lorence 10 -high-decibel 10 -self-scan 10 -qef 10 -pakistani-afghan 10 -garibashvili 10 -tibbitts 10 -carde 10 -jeffersons 10 -boyajian 10 -fpp 10 -palmasola 10 -marieme 10 -2,245 10 -mackinnon-patterson 10 -steadicam 10 -vasilinda 10 -berrini 10 -unkindness 10 -wpi 10 -finizio 10 -nyffeler 10 -short-finned 10 -1,032 10 -lej 10 -rozov 10 -ramadei 10 -free-style 10 -aryal 10 -larc 10 -266th 10 -haspel 10 -rebrov 10 -implausibility 10 -udom 10 -payn 10 -750lb 10 -gerstel 10 -93-day 10 -hand-rearing 10 -antennagate 10 -sucralose 10 -barbie-like 10 -560million 10 -schumaker 10 -nightpod 10 -vanryn 10 -oskin 10 -unco-operative 10 -vittori 10 -dalwhinnie 10 -cnn/usa 10 -procrastinated 10 -bussey-jones 10 -woldingham 10 -nijhuis 10 -auras 10 -shigemura 10 -unpledged 10 -país 10 -wallroth 10 -topkapi 10 -djebbar 10 -forewoman 10 -chanson 10 -ramonet 10 -housecat 10 -lunas 10 -s40 10 -menwith 10 -cooper-key 10 -900-page 10 -sennacherib 10 -konstantinovsky 10 -tanaiste 10 -teeth-like 10 -jafri 10 -ispa 10 -guest-binns 10 -arambula 10 -73-car 10 -no-makeup 10 -certosa 10 -shirt-dress 10 -sacramento-san 10 -napierkowski 10 -pertemps 10 -brockhole 10 -binge-eating 10 -meniscal 10 -chandu 10 -meulaboh 10 -tchotchkes 10 -khail 10 -lytess 10 -off-the-beaten-path 10 -seegers 10 -madia 10 -www.twitter.com/jeffgrubb 10 -unadvertised 10 -qara 10 -7,000-acre 10 -panoramio 10 -8-yard 10 -sub-conscious 10 -skylarking 10 -qaem 10 -no-knock 10 -stewarts 10 -maidment 10 -muskie 10 -salguero 10 -cent.the 10 -ordinary-looking 10 -meyerle 10 -rhos 10 -széchenyi 10 -luescher 10 -kotlinski 10 -chama 10 -kfor.com 10 -paholke 10 -cincinnati-based 10 -reavie 10 -kumarakom 10 -djalta 10 -no-touch 10 -benighted 10 -seener 10 -ducruet 10 -180billion 10 -4600 10 -hürriyet 10 -withstands 10 -e-class 10 -alles 10 -hard-worker 10 -clank 10 -200s 10 -sub-dermal 10 -elene 10 -aad 10 -aas 10 -drummoyne 10 -marcott 10 -uncompromised 10 -kiener 10 -9.88 10 -kohlrabi 10 -ucc 10 -skyllberg 10 -bolton-based 10 -chigoe 10 -potshot 10 -7.92 10 -tanwar 10 -sunstein 10 -third-parties 10 -sleepbox 10 -mdi 10 -8:26 10 -belive 10 -ikassrien 10 -shuteye 10 -akhoun 10 -tescos 10 -clairton 10 -earthwork 10 -pennywise 10 -plowden 10 -stornes 10 -away-day 10 -gadon 10 -school-to-prison 10 -klans 10 -horse-like 10 -wilkshire 10 -98m 10 -thirsts 10 -angop 10 -paillettes 10 -renicks 10 -armitt 10 -zaccardo 10 -northwoods 10 -baek 10 -pervy 10 -humanetics 10 -mentees 10 -branche 10 -muzaffarabad 10 -goslar 10 -fincke 10 -kalis 10 -cullompton 10 -safaa 10 -minjun 10 -kulokas 10 -photo-opportunity 10 -schimel 10 -skin-on-skin 10 -ex-white 10 -1,535 10 -m.p.h. 10 -u.s.-allied 10 -ntabadde 10 -cafepress 10 -nashawaty 10 -carven 10 -dissuades 10 -otegui 10 -matolcsy 10 -mirkarimi 10 -shadier 10 -varnell 10 -larter 10 -cyf 10 -mthethwa 10 -saponara 10 -koppenhaven 10 -govindasamy 10 -keyleigh 10 -loveint 10 -moubayed 10 -gwenda 10 -bluejack 10 -hist 10 -huaorani 10 -esports 10 -musharbash 10 -vankirk 10 -biddiss 10 -quadrupeds 10 -accusingly 10 -harecastle 10 -quesenberry 10 -ten-metre 10 -geater 10 -roekel 10 -96-page 10 -punchers 10 -caprese 10 -vishwakarma 10 -famine-ravaged 10 -style-wise 10 -diamantes 10 -direct-to-consumer 10 -steppers 10 -caumont 10 -orangemen 10 -knetemann 10 -ex-staffers 10 -lumbreras 10 -yoshihiro 10 -henninger 10 -kirchners 10 -loeseth 10 -shemesh 10 -devender 10 -taraf 10 -monetised 10 -6.03 10 -sightedness 10 -hersheypark 10 -breastplate 10 -ampleharvest.org 10 -pamuk 10 -balle 10 -yutaka 10 -wheel-chair 10 -homeaway.com 10 -borgne 10 -ziarat 10 -pre-oscar 10 -witrack 10 -truck-driving 10 -mutaz 10 -pagpag 10 -frenulum 10 -torrado 10 -9.52 10 -figueroa-levin 10 -tuncel 10 -grotberg 10 -rosenbloom 10 -shimane 10 -fagnano 10 -straight-face 10 -grapsas 10 -festers 10 -irakli 10 -marginedas 10 -siddell 10 -34,999 10 -singledom 10 -2011-14 10 -leggette 10 -catenet 10 -mccluney 10 -dijana 10 -cláudio 10 -korb 10 -reverends 10 -bullet-resistant 10 -speechley 10 -fulce 10 -crusting 10 -publicker 10 -sacro 10 -faichen 10 -boria 10 -semi-arid 10 -wrapped-up 10 -dawaji 10 -elberling 10 -pulsate 10 -spode 10 -train-and-equip 10 -glum-faced 10 -dakpa 10 -over-ride 10 -kalinowski 10 -sallas 10 -overproduce 10 -rajinder 10 -1996-2000 10 -rajouri 10 -möbius 10 -numpties 10 -svartholm 10 -fastrax 10 -unprosecuted 10 -burisma 10 -double-speed 10 -crownshaw 10 -1:22 10 -300-metre 10 -1,279 10 -thatcher-era 10 -csn 10 -csg 10 -redraft 10 -kumoye 10 -issigonis 10 -azizulhasni 10 -pekka 10 -craughwell 10 -highly-placed 10 -knott-craig 10 -waay 10 -circumpolar 10 -dronies 10 -alexsandra 10 -underspend 10 -carcamo 10 -photomontage 10 -misimovic 10 -roadchef 10 -wolmarans 10 -l'ouest 10 -shermer 10 -campe 10 -sterman 10 -noster 10 -serve-and-volley 10 -ravenseat 10 -bolstad 10 -nonstick 10 -wagtails 10 -360cam 10 -gop-held 10 -16-21 10 -mccurley 10 -kenco 10 -thiamin 10 -cauldwell 10 -gilhespy 10 -allergy-like 10 -asenova 10 -mucknell 10 -clairemont 10 -putins 10 -one-finger 10 -landing-gear 10 -thurday 10 -garcia-blase 10 -mafia-like 10 -pulsejet 10 -tresor 10 -finedon 10 -meurig 10 -320-page 10 -loree 10 -simich 10 -yiannakis 10 -cette 10 -wva 10 -lulac 10 -kelpies 10 -sweetbriar 10 -vastikova 10 -worse-off 10 -highly-competitive 10 -18mm 10 -1441 10 -1,055 10 -fendryk 10 -refried 10 -xochimilco 10 -miramontez 10 -coando 10 -deep-vein 10 -yemata 10 -hygroma 10 -rehabbed 10 -boniello 10 -twoo 10 -tremarco 10 -peense 10 -bunga-bunga 10 -erg 10 -5548 10 -kurniadi 10 -noticeboards 10 -barrow-upon-soar 10 -catalyse 10 -nortel 10 -rehashes 10 -mahdjoubi 10 -pommer 10 -aleena 10 -clinning 10 -42-10 10 -losar 10 -atlanticare 10 -sizewell 10 -near-threatened 10 -gholson 10 -wakemed 10 -anxiety-ridden 10 -zardini 10 -bhramaramba 10 -romeros 10 -resistors 10 -no-doubt 10 -gender-equal 10 -timex 10 -tie-breaks 10 -tabatabaei 10 -jarawa 10 -.014 10 -emblem3 10 -enterovirus-d68 10 -solbakken 10 -ignominiously 10 -arely 10 -barkcam 10 -misclassified 10 -gorvy 10 -romeny 10 -ainsty 10 -majcherczyk 10 -double-a 10 -hammerton 10 -murches 10 -myfoxboston 10 -evalds 10 -dysons 10 -illegal-immigrant 10 -uppelle 10 -stracks 10 -filmdistrict 10 -scaffolders 10 -caminito 10 -mujaahid 10 -subsoil 10 -4,265 10 -arduously 10 -33per 10 -nghiem 10 -lelyveld 10 -ogres 10 -chibueze 10 -gutsche 10 -ex-uk 10 -ga-ga 10 -10.24 10 -10.27 10 -dorgis 10 -golfboard 10 -strewing 10 -manara 10 -llanos 10 -mineros 10 -kovar 10 -ashridge 10 -2005-09 10 -pulsford 10 -adh4 10 -mingma 10 -panerai 10 -wegelin 10 -puzzlewood 10 -usss 10 -moustached 10 -tabulation 10 -clarrie 10 -savviness 10 -@marscuriosity 10 -doggy-style 10 -deets 10 -laypeople 10 -feliu 10 -unlikley 10 -frankenfood 10 -dermablend 10 -viles 10 -plasmas 10 -simonetta 10 -a.n. 10 -nbc/wsj 10 -pelota 10 -dobyns 10 -fourth-most 10 -xyboard 10 -hocus 10 -606million 10 -98per 10 -sunlamps 10 -saron 10 -mutora 10 -128.9 10 -bomi 10 -actuated 10 -ivanchenko 10 -galon 10 -kuss 10 -hunslet 10 -ranot 10 -boisseau 10 -dath 10 -tshepo 10 -mccaig 10 -adema 10 -tonala 10 -acf 10 -acn 10 -nivolumab 10 -erasable 10 -summations 10 -quadruples 10 -daryatmo 10 -satchmo 10 -hueytown 10 -120,000-a-week 10 -baehr 10 -million-man 10 -farai 10 -billund 10 -treaters 10 -vestmannaeyjar 10 -9-years-old 10 -yixin 10 -chirag 10 -buttressing 10 -hawea 10 -dropifi 10 -similarly-sized 10 -sovetsky 10 -insecticide-treated 10 -ghajar 10 -gluteal 10 -chiori 10 -spack 10 -cringingly 10 -grigoryev 10 -c-1 10 -woodcut 10 -maui-bound 10 -grumbar 10 -nant-y-garth 10 -chakanetsa 10 -mainey 10 -soundless 10 -now-grown 10 -brodkorb 10 -fatt 10 -pazuzu 10 -2.63 10 -896 10 -165th 10 -ballyhooed 10 -huell 10 -vendrell 10 -szczecin 10 -unst 10 -neilan 10 -microtubules 10 -canapé 10 -vodou 10 -filla 10 -makarta 10 -stolnitz 10 -cryosat-2 10 -kachalka 10 -three-fingered 10 -cosette 10 -street-racing 10 -121st 10 -1,510 10 -eye-liner 10 -mko 10 -mks 10 -mk2 10 -hicon 10 -murai 10 -pareiko 10 -post-its 10 -wadjda 10 -introversion 10 -inf1dl 10 -maneater 10 -sopoaga 10 -abronhill 10 -143.04 10 -moukarzel 10 -underbody 10 -crankshaft 10 -rimon 10 -lamen 10 -rydalch 10 -aluna 10 -53per 10 -beach-ready 10 -whilds 10 -sheepshanks 10 -recertification 10 -popovkin 10 -sanlitun 10 -hielscher 10 -kaiwi 10 -dolphinarium 10 -65,500 10 -youvella 10 -hashomer 10 -9:41 10 -trafficmaster 10 -hubli 10 -meda 10 -kairyte 10 -overwash 10 -hammack 10 -re-aired 10 -scio 10 -lapoint 10 -mortada 10 -rufty 10 -fritzi 10 -marzuki 10 -kleinfeldt 10 -kirui 10 -standifird 10 -wicken 10 -tegu 10 -home-bound 10 -131.9 10 -xox 10 -xoo 10 -1301 10 -sontag 10 -safework 10 -63-page 10 -miedosos 10 -simontown 10 -odoi 10 -arviat 10 -uprating 10 -prio 10 -lemi 10 -kvirkvelia 10 -writeup 10 -kupriyanov 10 -65mm 10 -4,000-a-week 10 -groix 10 -fazil 10 -life-sentence 10 -rivky 10 -mussai 10 -hodgenville 10 -achacha 10 -hanscombe 10 -welner 10 -dumitrescu 10 -rosbifs 10 -wmur-tv 10 -antolino 10 -101,203,600 10 -yilkyes 10 -contempt-of-court 10 -bundoora 10 -rugova 10 -brescianini 10 -pablove 10 -twi 10 -yangjiang 10 -syion 10 -modulator 10 -64p 10 -caernarvon 10 -tuke 10 -rhythmical 10 -konemann 10 -unboxed 10 -blinker 10 -rs-25 10 -car-wash 10 -ebony.com 10 -wollard 10 -makeunder 10 -soviet-built 10 -swed 10 -diffracted 10 -shigeo 10 -wonder-goal 10 -cq1 10 -kaan 10 -kaal 10 -stetich 10 -15mg 10 -impulse-control 10 -risk-management 10 -hamzawy 10 -calcasieu 10 -khairpur 10 -moudry 10 -canadas 10 -fritz-joly 10 -mbuso 10 -44c 10 -wbma 10 -almazo 10 -opare 10 -simonside 10 -rodriguez-kennedy 10 -pangle 10 -brading 10 -julin 10 -@jeffgrubb 10 -riot-hit 10 -dullness 10 -re-injured 10 -pow-mia 10 -single-child 10 -67-acre 10 -audemars 10 -underpay 10 -becquerel 10 -mbofana 10 -wildfox 10 -tortas 10 -benfield 10 -groins 10 -chayson 10 -poynor 10 -monterosso 10 -piquant 10 -frats 10 -saffir 10 -otisville 10 -aftershow 10 -ridelondon-surrey 10 -dillen 10 -gas-x 10 -micha 10 -smallholders 10 -bradbery 10 -splashlight 10 -shostakovich 10 -doddington 10 -97.4 10 -coulier 10 -then-teenager 10 -carmelite 10 -combinado 10 -commingling 10 -eye-sight 10 -kawakami 10 -front-bencher 10 -bushr 10 -18,650 10 -eneko 10 -msgr. 10 -camac 10 -detjens 10 -as-is 10 -no-limit 10 -ireggie 10 -22,600 10 -ghettoized 10 -gresty 10 -track-and-field 10 -charle 10 -fourth-oldest 10 -tomodachi 10 -lykoi 10 -appetiser 10 -metsos 10 -intimidations 10 -ice-t 10 -condotti 10 -kismayu 10 -151ft 10 -kenway 10 -lookadoo 10 -eskom 10 -harrabin 10 -heisele-brown 10 -milanovic 10 -slinks 10 -abd-rabbu 10 -maczynski 10 -leveraxe 10 -ofelia 10 -120f 10 -four-pint 10 -heeds 10 -weidinger 10 -shapeways 10 -eld-weaver 10 -moncrieff 10 -sculpher 10 -röder 10 -co-productions 10 -craigcrook 10 -smoothtooth 10 -gratwicke 10 -edwar 10 -slapper 10 -cossu 10 -lummis 10 -goodland 10 -composts 10 -two-miles 10 -wolf-pack 10 -montjeu 10 -sectoral 10 -gaslighting 10 -february/march 10 -gorno-badakshan 10 -manouvre 10 -self-selected 10 -alker 10 -sentance 10 -harcharan 10 -plyometrics 10 -keilor 10 -crepeau 10 -griselde 10 -politcal 10 -howsam 10 -kamano 10 -millennia-old 10 -demery 10 -rountree 10 -misplaces 10 -24-17 10 -shagatuni.com 10 -afsgq 10 -stites 10 -ahumada 10 -sugar-rich 10 -metropol 10 -moapa 10 -timewarp 10 -semi-desert 10 -mcconnachie 10 -anti-sabotage 10 -infeasible 10 -centerfolds 10 -ibrahimi 10 -anti-hate 10 -motza 10 -tuzla 10 -re-recorded 10 -mardle 10 -mudgeeraba 10 -morters 10 -kgatlhanye 10 -angolans 10 -fav 10 -snooperscope 10 -super-cute 10 -zadok 10 -fredie 10 -aubrie 10 -self-sufficiently 10 -night-long 10 -can-am 10 -orna 10 -stacher 10 -briswool 10 -lockey 10 -bundamba 10 -cipd 10 -tchen 10 -odair 10 -dorrit 10 -merstham 10 -briney 10 -eyes-free 10 -2,520 10 -chukwuma 10 -monetisation 10 -alhurra 10 -no-spy 10 -s-512 10 -chincha 10 -onyebuchi 10 -brother-style 10 -saint-denis 10 -outcroppings 10 -sienkiewicz 10 -prodan 10 -ugt 10 -heat-proof 10 -al-janadi 10 -quadruplet 10 -forthrightness 10 -formilli 10 -heathwick 10 -valadbaygi 10 -geelan 10 -benghazi-based 10 -gun-point 10 -ekins 10 -joekel 10 -nonis 10 -al-hadidi 10 -tetiana 10 -1,398 10 -vexation 10 -kibler 10 -digicel 10 -pettifor 10 -martynova 10 -protease 10 -drachmas 10 -profanity-laden 10 -bullsbrook 10 -punked 10 -godsey 10 -albergo 10 -jawf 10 -dawud 10 -usatoday 10 -dog-whistle 10 -frictional 10 -isao 10 -athens-clarke 10 -overcharges 10 -rarely-used 10 -telepathically 10 -58.9 10 -hnd 10 -shedder 10 -mcternan 10 -kai-shek 10 -kagadi 10 -plateauing 10 -majlath 10 -herzing 10 -dukie 10 -mumuhuila 10 -macfixit 10 -glitterball 10 -suvari 10 -swanny 10 -creake 10 -enterohemorrhagic 10 -48,600 10 -banzi 10 -jukkasjärvi 10 -earlswood 10 -hachey 10 -amellal 10 -327,800 10 -rostam 10 -takepart 10 -cht 10 -southampton-based 10 -domini 10 -inghams 10 -gongadze 10 -try-line 10 -stayful 10 -red-carpeted 10 -jianzhu 10 -baby-safe 10 -estefania 10 -imitative 10 -kmgh-tv 10 -lachaise 10 -bizimungu 10 -220-mile 10 -medfield 10 -mustread 10 -ivee 10 -zadora 10 -pawlyn 10 -sibiu 10 -100-to-1 10 -kraidy 10 -dannijo 10 -daybed 10 -knik 10 -supercooled 10 -pregnenolone 10 -8-core 10 -menorahs 10 -over-time 10 -878,000 10 -h-shaped 10 -isman 10 -mangas 10 -mastaler 10 -flues 10 -scrooser 10 -bockman 10 -denouncement 10 -rairdon 10 -aleph 10 -military-like 10 -odia 10 -berhane 10 -grecia 10 -corrida 10 -dhao 10 -dhal 10 -jasna 10 -schallmoser 10 -pirogue 10 -capre 10 -norcia 10 -bike2basics 10 -eber 10 -ethylestranol 10 -micheel 10 -phl 10 -hinksman 10 -lindero 10 -patshull 10 -quokka 10 -abercynon 10 -c-17a 10 -2,226 10 -rudovic 10 -witkowski 10 -returnable 10 -wingert 10 -shanise 10 -blue-and-yellow 10 -antilia 10 -boonruang 10 -kuwol 10 -5.69 10 -5.64 10 -candyland 10 -hatton-bornshin 10 -grigore 10 -100cm 10 -see-and-be-seen 10 -liasis 10 -j&j 10 -disgorge 10 -qualifer 10 -mugno 10 -backlot 10 -slobodyan 10 -badmouth 10 -pogatetz 10 -optifit 10 -621,000 10 -tweeps 10 -issen 10 -economakis 10 -crackpots 10 -161km 10 -deandra 10 -41mp 10 -side-scanning 10 -amyloidosis 10 -eglen 10 -state-specific 10 -gaikai 10 -23:11 10 -joint-venture 10 -mablethorpe 10 -polytunnels 10 -centeno 10 -etus 10 -calabro 10 -petrillo 10 -unsettles 10 -lisboa 10 -british-israeli 10 -arabit 10 -wittingly 10 -state-subsidised 10 -high-carb 10 -shawntae 10 -signboard 10 -lensed 10 -yeatise 10 -bloze 10 -n2 10 -n5 10 -laurikietis 10 -guerroro 10 -pharynx 10 -1005 10 -zalasiewicz 10 -1,415 10 -over-treatment 10 -ibrabo 10 -yonder 10 -illarra 10 -coulombe 10 -archipelagos 10 -defense-splitting 10 -21,700 10 -hamzaoglu 10 -matteson 10 -fahfas 10 -gre 10 -xxii 10 -kuip 10 -katalin 10 -doodlebug 10 -gulhak 10 -werrong 10 -shark-like 10 -girogio 10 -dzhezkazgan 10 -kloiber 10 -cavolo 10 -tassotti 10 -iommi 10 -westleigh 10 -screw-ups 10 -xalapa 10 -rc-135u 10 -re-install 10 -tuggle 10 -cejnar 10 -hand-wrote 10 -so-named 10 -mindedness 10 -ch-53 10 -carspach 10 -53-page 10 -hollybush 10 -sativa 10 -gosberton 10 -shot-stoppers 10 -mso-font-signature 10 -leopardess 10 -harvan 10 -sun-star 10 -neuro-developmental 10 -boldmere 10 -re-adjust 10 -gharial 10 -intersport 10 -boumedouha 10 -woto 10 -tojo 10 -sisak 10 -skalski 10 -tidewater 10 -2009-now 10 -lindeque 10 -religious-themed 10 -yemi 10 -aboveground 10 -kolasinska 10 -venta 10 -rangnick 10 -omnipresence 10 -boxcar 10 -aspern 10 -aluu 10 -wapiti 10 -hoodless 10 -brookstone 10 -guzman-rodriguez 10 -10-shot 10 -sigerson 10 -sundog 10 -repurchase 10 -romsdal 10 -argyre 10 -broggi 10 -bellvue 10 -firouzabadi 10 -iram 10 -preprogrammed 10 -congregant 10 -20,100 10 -patient-specific 10 -thorntonhall 10 -rajaram 10 -koin6 10 -tembo 10 -23-6 10 -magnaready 10 -lecithin 10 -griffor 10 -gandara 10 -competiton 10 -midian 10 -clingan 10 -widely-circulated 10 -back-drop 10 -doumani 10 -autotrader 10 -meconium 10 -cassocks 10 -wusa-tv 10 -brora 10 -sridhar 10 -buchanans 10 -co-winner 10 -autofocus 10 -fulko 10 -lefsetz 10 -kid-free 10 -volkova 10 -surmacki 10 -urbik 10 -tjahjanto 10 -weaker-than-expected 10 -bank-owned 10 -blenkinsop 10 -landaa 10 -bad-style 10 -deodoro 10 -toombs 10 -7/9 10 -vulpis 10 -geant 10 -yamato 10 -2,980 10 -giv 10 -polychlorinated 10 -marketeers 10 -platypuses 10 -glas 10 -lac-mégantic 10 -1498 10 -dhakal 10 -stretchmarks 10 -romilly 10 -hatanaka 10 -60,000-seater 10 -habor 10 -sugishima 10 -vigia 10 -makura 10 -butterfat 10 -sloughed 10 -delicatessens 10 -scythian 10 -bamberski 10 -shacking 10 -pokusevski 10 -salzmann 10 -duncan-jordan 10 -tonna 10 -1709 10 -microtargeting 10 -nyenswah 10 -superbrands 10 -klyaz 10 -monarcas 10 -geeking 10 -abhijit 10 -unthink 10 -pleasers 10 -uil 10 -shibasaki 10 -banner-herald 10 -chilver 10 -marisha 10 -peasantry 10 -kiadii 10 -nixonian 10 -markdown 10 -resinous 10 -pajerski 10 -houseproud 10 -stanistreet 10 -lamey 10 -aggresive 10 -paris-michael 10 -second-chance 10 -padge-victoria 10 -fistbump 10 -preussen 10 -siskin 10 -lykova 10 -piercey 10 -semi-clad 10 -matheiu 10 -greenish-yellow 10 -carleigh 10 -lamothe 10 -timelapses 10 -clampet 10 -evidences 10 -mcadory 10 -mvps 10 -kedra 10 -azumi 10 -peronist 10 -fukuyo 10 -crystanbul 10 -3,180 10 -krautwurst 10 -verner 10 -wimberley 10 -@juliebishopmp 10 -sad-looking 10 -eustatius 10 -badland 10 -barcock 10 -ehrenberg 10 -commiseration 10 -4-ounce 10 -huburn 10 -business-oriented 10 -hoch 10 -867-5309 10 -branchflower 10 -bearwood 10 -self-checkout 10 -keal 10 -longish 10 -morvillo 10 -morville 10 -anti-globalization 10 -strzelczyk 10 -lukyanov 10 -africat 10 -kareema 10 -internacionale 10 -shangrila 10 -naleo 10 -kolwicz 10 -finalization 10 -non-explosive 10 -weisgarber 10 -8-day 10 -penry-jones 10 -2,220 10 -hinksey 10 -wellstone 10 -30-person 10 -matuska 10 -manfredo 10 -teege 10 -anouska 10 -fisch 10 -re-assessed 10 -osawe 10 -exercise-loving 10 -saint-nazaire 10 -oratorio 10 -knock-knock 10 -v-signs 10 -tongji 10 -al-sultan 10 -girija 10 -pro-suicide 10 -nashville-based 10 -somalians 10 -accredit 10 -vigh-larsen 10 -sbi 10 -controlwear 10 -88-year 10 -yohei 10 -board-level 10 -circunegui 10 -goodmayes 10 -zelent 10 -al-monitor 10 -parent-led 10 -jicha 10 -nayler 10 -autoweek 10 -kiloton 10 -wnbc 10 -goes-12 10 -Ávila 10 -dannon 10 -nyaru 10 -srinivas 10 -guyon 10 -chabert 10 -insipidus 10 -abacha 10 -non-controversial 10 -transact 10 -windowed 10 -petisos 10 -luzinda 10 -wielgus 10 -ruddiman 10 -remco 10 -waymack 10 -coachwork 10 -turkish-iraqi 10 -baarda 10 -dunnington 10 -demesyeux 10 -khamees 10 -ex-seal 10 -xxxxxxxxl 10 -muennink 10 -akali 10 -cavender 10 -meadowbank 10 -flesher 10 -wakeley 10 -1604 10 -thumbprints 10 -yuste 10 -vwf 10 -papalabropoulos 10 -fv 10 -piccinin 10 -108.9 10 -hourslong 10 -arauca 10 -gynecomastia 10 -aboshi 10 -tape-delayed 10 -pieta 10 -long-oppressed 10 -backheels 10 -sucre 10 -abbass 10 -luchese 10 -spong 10 -macan 10 -routly 10 -kcra-tv 10 -21-27 10 -calil 10 -darrick 10 -michaels-martinez 10 -mcconachie 10 -gatley 10 -billips 10 -snoqualmie 10 -1,297 10 -oming 10 -shkp 10 -lollis 10 -littrell 10 -aberffraw 10 -infinitum 10 -bioterrorist 10 -kurylo 10 -tegernsee 10 -nanosatellite 10 -wetterich 10 -c4-5 10 -48g 10 -moench 10 -comic-style 10 -soot-covered 10 -patillo 10 -zoglin 10 -x-pro 10 -boyaca 10 -fougeres 10 -nauta 10 -170-mile 10 -berggrun 10 -one-vehicle 10 -ressam 10 -kxly 10 -412,000 10 -cohon 10 -441,000 10 -137th 10 -1,434 10 -1,436 10 -dauphinoise 10 -female-led 10 -hade 10 -foglia 10 -lanka-born 10 -beckii 10 -gilette 10 -claw-foot 10 -09-10 10 -sumira 10 -sasman 10 -vaporises 10 -pasierb 10 -promotionalcodes.org.uk 10 -cross-hatched 10 -'76 10 -jennine 10 -machine-to-machine 10 -hermantown 10 -2,127 10 -aaqil 10 -back-fired 10 -out-sourcing 10 -metereye 10 -self-publicist 10 -defamer 10 -nohmul 10 -burne-jones 10 -grkovic 10 -laman 10 -parsemus 10 -haylie 10 -darkie 10 -18 1/2 10 -soffe 10 -heithuis 10 -rigden 10 -prisco 10 -farted 10 -mcminn-shokat 10 -double-checking 10 -kapolei 10 -hartside 10 -cowan-sanluis 10 -nasrullah 10 -tofield 10 -decos 10 -ratmanski 10 -incandescents 10 -poison-laced 10 -crowter 10 -zonen 10 -muri 10 -elsbeth 10 -jacobus 10 -pavlovian 10 -corbridge 10 -wsop 10 -austan 10 -tankov 10 -indica 10 -health-boosting 10 -vranicar 10 -quadlin 10 -seelow 10 -outbidding 10 -biobutanol 10 -g.i 10 -flexor 10 -2,888 10 -112mph 10 -hoinsky 10 -caesarea 10 -ingénue 10 -threequel 10 -iafrika 10 -forty-year-old 10 -draap 10 -al-julani 10 -keenly-contested 10 -arcimboldo 10 -4,660 10 -tipuna 10 -1716 10 -yumkella 10 -mlangeni 10 -oberth 10 -zakharov 10 -assif 10 -zug 10 -roday 10 -fashion-focused 10 -nine-alarm 10 -rogier 10 -unrevealed 10 -tapfield 10 -10.47 10 -wfmy 10 -dakosaurus 10 -rawding 10 -piedrahita 10 -chetwood 10 -massachussets 10 -cifarelli 10 -bloodbaths 10 -christie-sturges 10 -sherbrooke 10 -54-page 10 -beechman 10 -maleisa 10 -al-obaidi 10 -family-only 10 -kreutzer 10 -vazille 10 -keshav 10 -deggendorf 10 -deri 10 -roquemaurel 10 -warbles 10 -goi 10 -gog 10 -goz 10 -gor 10 -ethnology 10 -lenamon 10 -stratasys 10 -elikowski 10 -orange-colored 10 -gildo 10 -ratna 10 -sholom 10 -vattenfall 10 -assumang 10 -jallot 10 -kandola 10 -180-mile 10 -glenton 10 -senay 10 -#standwithwendy 10 -holum 10 -tetrus 10 -catfights 10 -hags 10 -boka 10 -gigas 10 -dajeon 10 -buisse 10 -kbe 10 -kbs 10 -kboi 10 -gahr 10 -ebrington 10 -525ft 10 -steerage 10 -staub 10 -t-top 10 -most-affected 10 -atem 10 -aten 10 -martinez-ramos 10 -yarumal 10 -siwik-daniels 10 -reservationhop.com 10 -ollivander 10 -wvec 10 -dota 10 -gen-y 10 -birdlike 10 -langa 10 -shrewdness 10 -quercetin 10 -actin 10 -hansdotter 10 -daeschler 10 -alights 10 -7.16 10 -96.7 10 -keveža 10 -mini-warehouse 10 -scrabbled 10 -billion-worth 10 -minutes-per-goal 10 -fast4 10 -high-potential 10 -tefaf 10 -tabachneck 10 -osea 10 -al-arifi 10 -tamgho 10 -spilsbury-butler 10 -lindvall 10 -four-engined 10 -chaitman 10 -surdyke 10 -peckforton 10 -amsler 10 -cashing-in 10 -1,930 10 -bonefish 10 -denominational 10 -five-lane 10 -kidwell 10 -salendine 10 -bushlands 10 -multi-sports 10 -idaho-based 10 -sideburn 10 -blaire 10 -geadah 10 -416,000 10 -oppd 10 -devasted 10 -contras 10 -mailonine 10 -three-decade-old 10 -hairier 10 -deegbe 10 -argaman 10 -slovakian-born 10 -shree 10 -cumberbitches 10 -1-8 10 -jae-sang 10 -20-11 10 -20-13 10 -mso-font-charset 10 -chambon 10 -4.39 10 -bellybutton 10 -ciuffardi 10 -keylogging 10 -graafschap 10 -neptuno 10 -asuquo 10 -batphone 10 -iraqi-british 10 -record-tying 10 -jordbruksverket 10 -sandygate 10 -d'angour 10 -sinton 10 -uteruses 10 -nasa/esa 10 -tettenhall 10 -bradbourn 10 -blango 10 -otonaroid 10 -mednik 10 -obliterates 10 -parapets 10 -chrismukkah 10 -58cm 10 -ammouri 10 -62-mile 10 -shailer 10 -cardona-gonzalez 10 -nyamwasa 10 -plasmids 10 -mincher 10 -sanitization 10 -jessika 10 -camel-coloured 10 -luvvies 10 -christler 10 -merman 10 -scop 10 -bonnan 10 -carvin 10 -planty 10 -normal-weight 10 -800cc 10 -jinhae 10 -outclass 10 -emberlin 10 -agewatch 10 -matrook 10 -porchia 10 -pan-seared 10 -presidium 10 -alysen 10 -temu 10 -sctv 10 -maddicks 10 -pousada 10 -sikander 10 -cregg 10 -chancellor-brown 10 -boubakeur 10 -trenitalia 10 -al-iraqi 10 -advanfort 10 -83.4 10 -tamers 10 -girlshealth.gov 10 -elesha 10 -fold-away 10 -odm 10 -quick-drying 10 -hasakeh 10 -moadamiyet 10 -inchindown 10 -hemophagocytic 10 -oscoda 10 -izmit 10 -eoe 10 -eop 10 -1628 10 -mcshaw 10 -volmer 10 -darwent 10 -tenting 10 -lonstein 10 -300-a-day 10 -doughertys 10 -benyus 10 -scatological 10 -jourdren 10 -decade-plus 10 -petersberg 10 -greensitt 10 -knishes 10 -8,105 10 -africano 10 -bathampton 10 -132lbs 10 -harilal 10 -gusau 10 -volunteer-based 10 -tyl 10 -slutwalk 10 -open-toe 10 -kurukulaaratchy 10 -robeena 10 -masato 10 -torrence 10 -kazaryan 10 -malallah 10 -baranowski 10 -gavyn 10 -breading 10 -noyonita 10 -wing-tip 10 -ishwar 10 -airmass 10 -hitier-abadie 10 -zelepos 10 -svanemyr 10 -anelay 10 -sanitarium 10 -ryaheen 10 -talloires 10 -3:07 10 -plotho 10 -barriere 10 -tokat 10 -cannavale 10 -cinematically 10 -goiana 10 -vignacourt 10 -topsfield 10 -5,125-year 10 -cosiness 10 -anti-polygamy 10 -cockneys 10 -denittis 10 -incomparably 10 -condrey 10 -47-member 10 -million-mile 10 -v-necked 10 -out-of-area 10 -seventy-eight 10 -doerflein 10 -elberta 10 -geis 10 -pedretti 10 -newsflash 10 -1-listed 10 -obliques 10 -loli 10 -lols 10 -britiain 10 -albahari 10 -krasnov 10 -oceanstarlet 10 -wnd 10 -junaidi 10 --48 10 -potentially-lethal 10 -oaida 10 -shahrokh 10 -tariji 10 -mito 10 -brechin 10 -darkow 10 -narducci 10 -marvell 10 -healthiness 10 -100-carat 10 -gun-shy 10 -marazzi 10 -maunde 10 -osx 10 -osf 10 -getups 10 -tawadkar 10 -chromophores 10 -grandpas 10 -winiarczyk 10 -german-language 10 -body-slammed 10 -direct-to-dvd 10 -fan-tastic 10 -3:36 10 -valenzo 10 -250-300 10 -pairi 10 -fibroblasts 10 -jarrad 10 -talang 10 -magnetic-inductive 10 -batesville 10 -unreflective 10 -marite 10 -auxilliary 10 -wyche 10 -donde 10 -italvino 10 -78-day 10 -post-budget 10 -praileau 10 -lutzow 10 -most-decorated 10 -fulminated 10 -fushun 10 -taranza 10 -#mydressmychoice 10 -reprints 10 -once-booming 10 -honkanen 10 -satur 10 -valdimir 10 -eastridge 10 -blacksmithing 10 -totowa 10 -saunt 10 -romero-flores 10 -post-treatment 10 -five-weeks-old 10 -kamermaker 10 -blackshirts 10 -backman 10 -pietruszczak 10 -azmi 10 -#yolo 10 -rubber-soled 10 -42mph 10 -waziri 10 -kassasbeh 10 -dragonstone 10 -leumeah 10 -phillips-davies 10 -lowenthal 10 -demetz 10 -prato 10 -m.f. 10 -flaam 10 -bulmers 10 -maboneng 10 -shaylyn 10 -k.d. 10 -1,333 10 -kallif 10 -alcazar 10 -0.008 10 -non-euro 10 -line-of-sight 10 -quarrelsome 10 -emergency-room 10 -alburquerque 10 -intertropical 10 -mawardi 10 -areca 10 -emelonye 10 -335ft 10 -disney-like 10 -perchlorates 10 -sode 10 -one-night-only 10 -subjection 10 -doughboys 10 -vojvodina 10 -ayandeh 10 -hamauei 10 -jinkee 10 -ecstasy-style 10 -randiv 10 -boux 10 -baniulis 10 -gann 10 -kataib 10 -u-17 10 -lipshultz 10 -paulistanos 10 -kyles 10 -1743 10 -1,774 10 -horeb 10 -ds3 10 -pub-goer 10 -herschell 10 -8,848-meter 10 -wauwatosa 10 -smartcard 10 -sporea 10 -7262 10 -hugely-popular 10 -restrictionists 10 -ivonne 10 -black-sand 10 -gashaw 10 -do-well 10 -streetlamps 10 -hedonometer 10 -luminol 10 -huxford 10 -yelverton 10 -eco-city 10 -brenham 10 -smothermon 10 -labradoodles 10 -aberdyfi 10 -moldovan-flagged 10 -nonga 10 -lamlin 10 -hadar 10 -ursa 10 -aragua 10 -nnsa 10 -clathrate 10 -149.95 10 -yes-men 10 -dernie 10 -thin-crust 10 -superphone 10 -suo 10 -nordqvist 10 -139.95 10 -139.99 10 -pc-12 10 -henn-na 10 -barberio 10 -100,000,000 10 -shuttleton 10 -silhan 10 -1,667 10 -osaigbovo 10 -yaken 10 -kilcullen 10 -eulogised 10 -featherby 10 -short-course 10 -jarzabek 10 -sierra-leone 10 -abayomi 10 -meth-related 10 -proflowers 10 -tantiwattanakul 10 -busses 10 -rejon 10 -toady 10 -expensed 10 -plexidrone 10 -nisham 10 -nagoro 10 -mcm 10 -mcf 10 -mcu 10 -jadhav 10 -6:12 10 -careerism 10 -nedjah 10 -sultze 10 -teint 10 -armey 10 -plepler 10 -yaremchuk 10 -flaviu 10 -ballin 10 -hiv-related 10 -mamf 10 -menfolk 10 -mcloud 10 -colourists 10 -dessana 10 -short-snouted 10 -nakoulma 10 -l9 10 -30-degree 10 -jinke 10 -stephon 10 -twice-convicted 10 -maimie 10 -holies 10 -pre-loading 10 -3,560 10 -dvsa 10 -christianne 10 -nimruz 10 -kwa 10 -atlantans 10 -schoenberger 10 -cialone 10 -kbps 10 -veljovic 10 -messed-up 10 -madridistas 10 -valujevs 10 -under-tens 10 -impoliteness 10 -thwaite 10 -tabcorp 10 -neck-breaking 10 -eschert 10 -gurdip 10 -batwing 10 -sunzu 10 -basikbasik 10 -102,800 10 -colford 10 -derrell 10 -2,645 10 -kamok 10 -voskoboeva 10 -blixen 10 -mangku 10 -futurecast 10 -jmu 10 -jml 10 -cacciatore 10 -muckleshoot 10 -faintness 10 -glue-like 10 -mortazavi 10 -sconce 10 -fear-based 10 -hamren 10 -garamond 10 -limerence 10 -baumer 10 -canalys 10 -41-28 10 -city-sized 10 -coexisting 10 -kelahar 10 -shifrin 10 -raizel 10 -falkand 10 -dezerah 10 -husting 10 -dallas-forth 10 -lampanelli 10 -smidge 10 -952 10 -956 10 -63.6 10 -63.4 10 -palecek 10 -swail 10 -conerly 10 -facs 10 -toolmaker 10 -kacena 10 -juif 10 -dunganstown 10 -teekay 10 -bbbc 10 -wheel-base 10 -part-nationalised 10 -madekwe 10 -mangles 10 -brouk 10 -stanczak 10 -cappelluti 10 -abenia 10 -kert 10 -tounes 10 -quiksilver 10 -rosander 10 -psychopathology 10 -nonresponsive 10 -rgiii 10 -rebensburg 10 -73mph 10 -mittelbau-dora 10 -8:21 10 -finneran 10 -siery 10 -bayarena 10 -2119 10 -bilgola 10 -cigar-shaped 10 -yago 10 -holtzman 10 -second-weekend 10 -upavon 10 -lolled 10 -disinhibition 10 -furey 10 -784,000 10 -kym-marie 10 -stippo 10 -hafren 10 -jabalya 10 -unforgettably 10 -quaintly 10 -anelli 10 -@instagram 10 -izbasa 10 -piatkus 10 -pulsations 10 -grecian-style 10 -klier 10 -ridon 10 -highly-infectious 10 -notarianni 10 -decollete 10 -keen-eyed 10 -npis 10 -37.58 10 -islamically 10 -shaca 10 -outliving 10 -70-yard 10 -caplets 10 -enge 10 -duchesne 10 -smooth-hound 10 -juste 10 -naea 10 -450-pound 10 -bink 10 -al-farooq 10 -berriman 10 -nupela 10 -marum 10 -boringly 10 -myxomatosis 10 -smokefree 10 -ghaziabad 10 -gundogs 10 -lytwyn 10 -creaming 10 -specialbuys 10 -harkaway 10 -sejas 10 -stele 10 -mcnenny 10 -nonevent 10 -kepler-7b 10 -water-soaked 10 -7:21 10 -slow-roasted 10 -eastnor 10 -mårten 10 -monoceros 10 -burana 10 -893 10 -wermeling 10 -analogs 10 -takoulo 10 -docent-led 10 -slovo 10 -capas 10 -bocchini 10 -maust 10 -pramod 10 -0-30 10 -marquetry 10 -colerain 10 -hunstman 10 -cazares 10 -scheidegger 10 -sadayev 10 -bunion 10 -bluffer 10 -n'gayla 10 -dunny 10 -waira 10 -haugerud 10 -longer-lived 10 -ampoules 10 -canos 10 -tott 10 -shklyaeva 10 -spaniola 10 -timberlands 10 -bilyaletdinov 10 -matharu 10 --37 10 -12.5-mile 10 -inconveniently 10 -polenta 10 -winfried 10 -tinkerbella 10 -ex-baseball 10 -eutelsat 10 -wintersmith 10 -litmanen 10 -saccoccia 10 -mohn 10 -pedalos 10 -delbanco 10 -ambidextrous 10 -artilleryman 10 -dohring 10 -bebb 10 -kisyombe 10 -neustar 10 -candle-light 10 -enquist 10 -castrations 10 -rema 10 -crueller 10 -polydor 10 -mmol/l 10 -delacourt 10 -cambs. 10 -tabuk 10 -duclos 10 -giufre 10 -evseyev 10 -through-out 10 -boonstra 10 -motion-sensitive 10 -norpe 10 -escamilla 10 -stc 10 -five-yearly 10 -nbc12 10 -martancik 10 -animates 10 -torbjorn 10 -brain-imaging 10 -grossglockner 10 -smithey 10 -mid-2005 10 -ghovanloo 10 -abdulahi 10 -mtv.com 10 -minature 10 -devil-worshippers 10 -casilli 10 -mikalansky 10 -pandoravirus 10 -sedgmer 10 -zhongjun 10 -sha'ar 10 -kapuya 10 -kalama 10 -laomedon 10 -nicolay 10 -wenhua 10 -5.63 10 -5.61 10 -14.90 10 -avorio 10 -hilberling 10 -centile 10 -paravelo 10 -vosa 10 -laverstoke 10 -princesse 10 -asian-style 10 -schexnider 10 -689,003 10 -baliga 10 -19,700 10 -kesher 10 -keshet 10 -microgram 10 -incarcerations 10 -gcb 10 -durantie 10 -turku 10 -galloudec 10 -lyng 10 -ressurrection 10 -muscularity 10 -perotti 10 -karpichkov 10 -fugly 10 -carryout 10 -tristyn 10 -beery 10 -daydreamer 10 -stoute-trained 10 -kruc 10 -kanden 10 -top-40 10 -copfer 10 -white-on-black 10 -luptak 10 -macchiarella 10 -al-assads 10 -sanei 10 -madrasa 10 -tolar 10 -darvish 10 -olympic-standard 10 -longstone 10 -folarin 10 -maetzold 10 -kullson 10 -normoyle 10 -teadranks 10 -35mcg 10 -6,000-year-old 10 -humanae 10 -recombinant 10 -keanna 10 -marwaan 10 -afdi 10 -tolaj 10 -osin 10 -dead-on 10 -greensville 10 -sevket 10 -rachal 10 -lidle 10 -anklets 10 -lars-kristian 10 -75c 10 -inteliscope 10 -macheteros 10 -narco-state 10 -evinced 10 -25km/h 10 -gowthorpe 10 -350,000-strong 10 -cloudina 10 -zagor 10 -ibu 10 -uptin 10 -beguerisse 10 -vieau 10 -mcmicking 10 -unasked 10 -aroha 10 -31-30 10 -nieuwsblad 10 -pulitzer-winning 10 -pebble-dashed 10 -kalev 10 -kales 10 -jaysh 10 -nahariya 10 -4.70 10 -4.79 10 -gauls 10 -9:56 10 -9:52 10 -anti-bikie 10 -accelerations 10 -vs201 10 -swaniker 10 -great-looking 10 -kaleak 10 -puto 10 -scrubbers 10 -amrine 10 -verbiage 10 -marzano 10 -emceed 10 -kipapa 10 -naber 10 -choularton 10 -kennewell 10 -mcnelley 10 -petocz 10 -bradd 10 -penicuik 10 -newark-on-trent 10 -bellway 10 -7th-century 10 -barda 10 -duckface 10 -jinil 10 -pickert 10 -curo 10 -lazaros 10 -0808 10 -glam-mas 10 -#freepalestine 10 -hava 10 -35in 10 -jilong 10 -700kg 10 -zhelesnik 10 -budgerigar 10 -hitch-hike 10 -1.875 10 -gramiccioni 10 -mesnage 10 -fur-clad 10 -saqlain 10 -v2s 10 -re-releasing 10 -fpsrussia 10 -magraff 10 -310m 10 -macaluso 10 -ikhwan 10 -mazower 10 -kealba 10 -bizet 10 -trap-jaw 10 -ulundi 10 -nbc/wall 10 -morcillo 10 -shanzhai 10 -post-games 10 -mcquaig 10 -velopark 10 -veltkamp 10 -jamshid 10 -simunovic 10 -blippy 10 -cascarini 10 -92.2 10 -menyn 10 -pea-size 10 -caulle 10 -atatürk 10 -kozicki 10 -fynn 10 -mercies 10 -gelsthorpe 10 -petrodollars 10 -panteleymonov 10 -langoo 10 -psychomotor 10 -antiperspirants 10 -demerits 10 -sidibé 10 -opos 10 -leconfield 10 -perlberg 10 -outplay 10 -mallan 10 -honourary 10 -deputized 10 -stefany 10 -enshi 10 -ingelheim 10 -tivat 10 -pasig 10 -super-earth-size 10 -re-shoot 10 -livy 10 -darabi 10 -amoah 10 -side-scrolling 10 -shavelle 10 -8:43 10 -8:49 10 -moreso 10 -jötunvillur 10 -hidrocapital 10 -piovaccari 10 -frigstad 10 -panella 10 -swot 10 -prejudged 10 -alevizos 10 -puntarenas 10 -kiehl 10 -161st 10 -stanca 10 -vijaya 10 -27-20 10 -bef 10 -gebert 10 -chafford 10 -haydor 10 -szeto 10 -blackcomb 10 -tennent 10 -life-changer 10 -dongola 10 -diarrhoeal 10 -allele 10 -yakult 10 -zig-zags 10 -glammed-up 10 -sturtevant 10 -furstenburg 10 -sa'ad 10 -first-in-class 10 -markeya 10 -kernes 10 -kapustina 10 -arico 10 -slacklines 10 -portscatho 10 -usiobaifo 10 -nacc 10 -pokras 10 -pre-booking 10 -bilf 10 -bili 10 -over-30s 10 -armadas 10 -cliquey 10 -108f 10 -vachira 10 -efps 10 -funmi 10 -harroff 10 -super-cheap 10 -ayeni 10 -allanah 10 -eodelphis 10 -potentially-fatal 10 -korean-based 10 -tamarine 10 -162nd 10 -12.44 10 -navyboot 10 -qaswarah 10 -seafish 10 -11-under-par 10 -docetaxel 10 -fiorillo 10 -kvasnicka 10 -chocolaty 10 -kosgey 10 -lewis-pratt 10 -landside 10 -yervoy 10 -retune 10 -cryptosporidium 10 -coal-rich 10 -jupiter-sized 10 -anti-aliasing 10 -ticketholder 10 -#thankyousmith 10 -tempests 10 -zedupad 10 -tensest 10 -planktonic 10 -z-mapp 10 -absi 10 -renishaw 10 -46-foot 10 -60in 10 -akhtara 10 -gohan 10 -hatzola 10 -misjudgments 10 -manukau 10 -300s 10 -berluti 10 -kornreich 10 -cribley 10 --55 10 --54 10 -huckerby 10 -cheer-leading 10 -2,763 10 -capodanno 10 -rs.com 10 -trampy 10 -music-loving 10 -woloshin 10 -gasan 10 -plain-spoken 10 -córdoba 10 -kalu 10 -hand-rolling 10 -myfox9 10 -selfie-taker 10 -425million 10 -airheads 10 -kima 10 -huaraz 10 -nonsupport 10 -hegseth 10 -raschig 10 -4/4 10 -croque 10 -maloofs 10 -inzelberg 10 -conurbations 10 -dipa 10 -kressin 10 -kinfauns 10 -513,000 10 -andel 10 -lalor 10 -leninist 10 -rinses 10 -dollis 10 -acheulean 10 -unparliamentary 10 -10f 10 -10d 10 -inefficiently 10 -skakle 10 -doulis 10 -bebek 10 -theoffili 10 -lorenzetti 10 -live-blogging 10 -gotv 10 -eisa 10 -gillings 10 -sous-chef 10 -adeolu 10 -qnx 10 -henry-richards 10 -trench-coat 10 -ogunkunle 10 -californias 10 -chelsea-based 10 -muchmusic 10 -kohout 10 -peccadilloes 10 -l'ami 10 -langwarrin 10 -feldblum 10 -google.co.uk 10 -kichloo 10 -high-wattage 10 -zaltzman 10 -catolica 10 -panasenko 10 -thusly 10 -zoon 10 -hullah 10 -lampl 10 -nocker 10 -gehrke 10 -colossally 10 -yeff 10 -cepheus 10 -derby-based 10 -tuilaepa 10 -gateguru 10 -madjid 10 -rielee 10 -selinsgrove 10 -pulteney 10 -dellos 10 -grambling 10 -cheevers 10 -tresemmé 10 -transsexualism 10 -extra-constitutional 10 -ravijour 10 -thinkprogress 10 -olloclip 10 -earthcam 10 -narcan 10 -49,500 10 -thalattoarchon 10 -pollokshields 10 -asker 10 -theodoulou 10 -then-justice 10 -epictv 10 -dyspeptic 10 -livens 10 -amanwella 10 -meilleur 10 -lorio 10 -eures 10 -bigby 10 -osct 10 -bar-restaurant 10 -sea-themed 10 -pre-workout 10 -devic 10 -dudley-smith 10 -sucessful 10 -indias 10 -tattam 10 -cachaca 10 -piatt 10 -tufton 10 -36-week 10 -riffel 10 -ascherson 10 -bicton 10 -2003-2006 10 -16-2 10 -errazuriz 10 -poorly-received 10 -lesette 10 -burundi-born 10 -windsurf 10 -crew-member 10 -thunk 10 -yangzi 10 -kalinka 10 -cucuta 10 -lauschet 10 -lobao 10 -monnet 10 -polacek 10 -degrelle 10 -salau 10 -tinkerers 10 -live-firing 10 -time-pressed 10 -séances 10 -downward-facing 10 -karatantcheva 10 -mazloumian 10 -reappoint 10 -fastballs 10 -rushmoor 10 -katchpole 10 -bondage-style 10 -musyoka 10 -bujumbura 10 -numerate 10 -campany 10 -mattin 10 -high-bandwidth 10 -meterology 10 -lodatto 10 -100-meters 10 -_______ 10 -19-point 10 -nahm 10 -elisabet 10 -tabletops 10 -sadikov 10 -jayceon 10 -natoco 10 -baa-baas 10 -nalder 10 -evanson 10 -plotclock 10 -eindecker 10 -brimingham 10 -56p 10 -spiralizer 10 -berini 10 -podoski 10 -boslough 10 -wesminster 10 -bautista-agut 10 -programme-making 10 -makokha 10 -dead-ringer 10 -uk-us 10 -all-red 10 -egghead 10 -charlenni 10 -golubovich 10 -yubitsume 10 -unconfident 10 -burghfield 10 -tartlets 10 -meridiani 10 -victoriana 10 -woodnook 10 -uzbekistani 10 -1,212 10 -btig 10 -rachell 10 -moskovsky 10 -mega-wealthy 10 -musik 10 -ghadafi 10 -sanpower 10 -slutsky 10 -ahlam 10 -fixed-penalty 10 -16-match 10 -hennard 10 -sayn-wittgenstein-berleburg 10 -felstein 10 -onn 10 -elah 10 -kmox 10 -86mph 10 -ol339 10 -macmillian 10 -impudence 10 -selt 10 -189.3 10 -vom 10 -vo2 10 -6:21 10 -club-trained 10 -pumphrey 10 -babi 10 -inggall 10 -sukkot 10 -luzenac 10 -nurbanu 10 -tripple 10 -flowton 10 -blakney 10 -permisos 10 -one-arm 10 -wholesaling 10 -university-funded 10 -elegiac 10 -groyne 10 -boulangerie 10 -624,000 10 -mulpeter 10 -backley 10 -microfiber 10 -orontes 10 -wenban-smith 10 -yuzuru 10 -orlowski 10 -yanhong 10 -vilafranca 10 -anak 10 -jadson 10 -chupa 10 -daftest 10 -15th-placed 10 -nantawarra 10 -grugeon 10 -aulbaugh 10 -nardos 10 -hi-resolution 10 -celebalike 10 -1,830 10 -five-pointed 10 -octuplet 10 -styloid 10 -viguerie 10 -decentralisation 10 -yellow-green 10 -blue-riband 10 -1933-1945 10 -sachiti 10 -six-season 10 -onouha 10 -ski-jumping 10 -stepnpull 10 -ubi 10 -sauerwein 10 -heiman 10 -albacore 10 -bioderma 10 -pentre 10 -pre-hospital 10 -1574 10 -tinh 10 -julez 10 -afropolitans 10 -80mins 10 -37.95 10 -zeidler 10 -signoff 10 -olinguitos 10 -chirouf 10 -lebohang 10 -russian-ukraine 10 -almario 10 -priebatsch 10 -baulking 10 -kiesha 10 -ranong 10 -shaunni 10 -chacaltana 10 -teeth-chattering 10 -brusatte 10 -pedius 10 -bangura 10 -subwoofer 10 -babington-browne 10 -shaoxing 10 -400-yard 10 -laudanum 10 -sign-on 10 -geographers 10 -b&w 10 -mouchache 10 -kumble 10 -mellark 10 -muderabilia 10 -long-cherished 10 -berenbaum 10 -perfect365 10 -luminita 10 -makovicky 10 -carethers 10 -breatharianism 10 -venjah 10 -fraternize 10 -pallot 10 -matrix-style 10 -al-mugaiteeb 10 -bilclough 10 -pro-america 10 -hauptman 10 -short-shorts 10 -netmums.com 10 -dhi 10 -topf 10 -moras 10 -8/15 10 -procrastinators 10 -vulvar 10 -gwanghwamun 10 -75-strong 10 -ex-dane 10 -1billion-a-year 10 -donella 10 -anfal 10 -runswick 10 -podell 10 -auckland-based 10 -vogl-bauer 10 -alasdhair 10 -galvagni 10 -petracca 10 -varteg 10 -bleach-blonde 10 -18/1 10 -kaja 10 -pay-monthly 10 -persei 10 -coimbatore 10 -42.9 10 -thq 10 -thd 10 -outmanoeuvre 10 -stephens-dunn 10 -willingboro 10 -cedarbaum 10 -conflict-ravaged 10 -bernero 10 -miodrag 10 -250-year 10 -malmö 10 -cuisinart 10 -stecklow 10 -harafias 10 -myfoxmemphis 10 -schake 10 -willet 10 -rossiya-24 10 -sirana 10 -bendouli 10 -kingly 10 -mayhugh 10 -aspland 10 -kwena 10 -izegbu 10 -remount 10 -brewpubs 10 -montaner 10 -trimmers 10 -worsfold 10 -traditional-style 10 -chairless 10 -ost 10 -ruairi 10 -spillane 10 -prison-issued 10 -to-and-from 10 -digital-age 10 -hemingwrite 10 -orthostatic 10 -rockness 10 -norlin 10 -moz 10 -defanged 10 -chestful 10 -ministrations 10 -radboud 10 -hillwood 10 -first 10 -#tacklekeown 10 -fram 10 -lucchino 10 -aerolab 10 -meriel 10 -ahri 10 -slogs 10 -punley 10 -nofx 10 -13-18 10 -prisoners-of-war 10 -curvan 10 -healthy-living 10 -mis-sell 10 -molecatcher 10 -playschool 10 -ytterbium 10 -magyar 10 -tononi 10 -habilaj 10 -goondiwindi 10 -nashef 10 -non-national 10 -imasuen 10 -vaguest 10 -grandfatherhood 10 -jedburgh 10 -tylea 10 -geophagy 10 -animal-friendly 10 -phanthavongsa 10 -hackenberg 10 -ianuale 10 -mra4 10 -cravers 10 -sailes 10 -tadamon 10 -kvia 10 -put-upon 10 -f4j 10 -slicers 10 -flaxen 10 -bias-motivated 10 -koger 10 -mumbaikars 10 -rat-a-tat 10 -guadalcanal 10 -brettell 10 -maschietto 10 -manaton 10 -mackozdi 10 -ardel 10 -bosozoku 10 -fedigan 10 -igcse 10 -otaku 10 -moggridge 10 -ksnw 10 -ramillies 10 -orlebar 10 -biggarenn 10 -overlayed 10 -cdpp 10 -sjögren 10 -re-cast 10 -sgp 10 -parringtoni 10 -mobile-device 10 -golbourne 10 -itumeleng 10 -league-leaders 10 -badly-burned 10 -vigneau 10 -1,604 10 -strip-club 10 -vowell 10 -baud 10 -yemer 10 -macchione 10 -13mm 10 -placebo-controlled 10 -sackett-hutcheson 10 -connerton 10 -surveilance 10 -tanasio 10 -culpably 10 -brindabella 10 -church-affiliated 10 -133.7 10 -widely-reported 10 -regolith 10 -ncw 10 -nch 10 -braybrooke 10 -blachman 10 -ponorovskaya 10 -50-an-hour 10 -516.55 10 -19-3 10 -districting 10 -hippolite 10 -140th 10 -wackier 10 -koning 10 -kligman 10 -chavo 10 -foriel-destezet 10 -fluffier 10 -boff 10 -trentino 10 -portesham 10 -berriew 10 -loweth 10 -claremore 10 -foreland 10 -circleville 10 -130-metric-ton-configuration 10 -114-114 10 -kutsan 10 -pepito 10 -bodycons 10 -lucrecia 10 -kosmos 10 -sharmarke 10 -emrys 10 -uncleared 10 -mulvany 10 -kepler-21b 10 -segelson 10 -butt-zeeshan 10 -émigré 10 -illicitencounters.com 10 -1461 10 -wendesday 10 -orangey 10 -thebump.com 10 -colarusso 10 -ever-shifting 10 -33-yard 10 -husson 10 -lidegaard 10 -seleção 10 -slowboat 10 -anikow 10 -jelinek 10 -mildenstein 10 -kohala 10 -wral-tv 10 -rond 10 -jackendoff 10 -bundick 10 -2004-2008 10 -tullis 10 -menulog 10 -ourself 10 -fadhil 10 -auterac 10 -set-to 10 -desautels 10 -bikov 10 -seba 10 -wingsuiter 10 -universität 10 -truckle 10 -rakyat 10 -exo-skeleton 10 -kanawha-charleston 10 -hogstedt 10 -glenroy 10 -futbolmania 10 -kesey 10 -hillocks 10 -naugatuck 10 -unpakt 10 -1378 10 -theodorus 10 -31-year-olds 10 -sipan 10 -autellus 10 -to-the-point 10 -bilsons 10 -gursky-doyen 10 -kuttab 10 -pinebrook 10 -chidyausiku 10 -andreadis 10 -meadham 10 -kaupo 10 -gidus 10 -.17 10 -155lbs 10 -windproof 10 -mackowiak 10 -tunepics 10 -asaf 10 -busuttil 10 -q13fox.com 10 -besombes 10 -mucosa 10 -ghalley 10 -soliola 10 -rakower 10 -rathdowney 10 -re-discover 10 -harton 10 -1513 10 -instagram-style 10 -ogwen 10 -hardhat 10 -montevallo 10 -sa'er 10 -nohad 10 -pingree 10 -overd 10 -deker 10 -tongue-eating 10 -brontes 10 -mcmansion 10 -klemetti 10 -nurnburg 10 -pouzin 10 -american-british 10 -ryk 10 -vbacs 10 -m57 10 -m5s 10 -s'arenal 10 -screw-top 10 -ibera 10 -bench-press 10 -isis-inspired 10 -taq 10 -altstatt 10 -taa 10 -gérald 10 -bittorf 10 -haymaker 10 -chlaniow 10 -misappropriate 10 -rastafarianism 10 -nenshi 10 -heer 10 -heen 10 -tawang 10 -weaponizing 10 -darold 10 -7:49 10 -7:48 10 -7:43 10 -lodh 10 -sacculina 10 -ortak 10 -popland 10 -rojana 10 -lacerating 10 -100-percent 10 -croxley 10 -wchs 10 -kenniff 10 -now-demolished 10 -eaddy 10 -hosny 10 -chooky 10 -diatomic 10 -kefir 10 -levo 10 -leve 10 -31,600 10 -condescendingly 10 -smart-casual 10 -bakre 10 -loadvideowithkey 10 -holocaust-era 10 -lincicome 10 -quicktrim 10 -austerfield 10 -blash 10 -heli-pad 10 -9/11-like 10 -beqiri 10 -59.6 10 -abos 10 -pucallpa 10 -nuit 10 -shadravan 10 -tramlines 10 -ebv 10 -thrace 10 -deadheads 10 -enflamed 10 -multi-city 10 -krish-veeramany 10 -lengle 10 -well-produced 10 -kirkby-in-ashfield 10 -b-grade 10 -mayet 10 -kerres 10 -ds-lite 10 -dilbert 10 -wsav 10 -sara-jayne 10 -crypto-currency 10 -uh-huh 10 -reshef 10 -basest 10 -makurin 10 -1:56 10 -110.7 10 -avdeyev 10 -18-9 10 -pre-financial 10 -phar 10 -megalomaniacal 10 -13/10 10 -rodway 10 -brt 10 -brl 10 -brb 10 -re-interviewing 10 -first-aider 10 -dorff 10 -handyside 10 -caliper 10 -yassan 10 -archambault 10 -torvik 10 -sanctimony 10 -kornacki 10 -rabble-rousers 10 -106.5 10 -kupwara 10 -tin-foil 10 -yolken 10 -amnesiac 10 -elnour 10 -ooops 10 -newly-constructed 10 -anklebone 10 -lccs 10 -fossilisation 10 -dosimeter 10 -kwgn 10 -takao 10 -levie 10 -mikhaylov 10 -jis 10 -legendarily 10 -phonepayplus 10 -dcpcu 10 -siggins 10 -naiyer 10 -25hours 10 -hutchful 10 -cadieux 10 -lashio 10 -aidi 10 -uscg 10 -homogenized 10 -champagne-coloured 10 -lesch 10 -incorrectness 10 -avella 10 -behner 10 -boursicot 10 -klutz 10 -977-count 10 -almanor 10 -marysa 10 -lamberski 10 -adeeba 10 -anthropoid 10 -'82 10 -galatico 10 -trustingly 10 -fixations 10 -abdominoplasty 10 -solow 10 -webbers 10 -moustakas 10 -mahalla 10 -dietl 10 -caplina 10 -anti-press 10 -alessandrini 10 -copay 10 -tzekov 10 -dado 10 -petaling 10 -setsuko 10 -constanza 10 -pignotti 10 -garban 10 -scott-rogers 10 -hefazat 10 -steeve 10 -lamina 10 -dragtimes 10 -minamisanriku 10 -rallo 10 -city/highway 10 -15-course 10 -6,000-acre 10 -zebov 10 -804/438 10 -humours 10 -asmallworld 10 -donka 10 -kershner 10 -sankoh 10 -delran 10 -al-suleiman 10 -shontz 10 -bucha 10 -dynasty-style 10 -cupit 10 -2010â 10 -ariano 10 -sawtooth 10 -urko 10 -peder 10 -turnell 10 -hand-me-ups 10 -3-1/2 10 -weidong 10 -cheesecloth 10 -28-inch 10 -canlla 10 -ozolins 10 -categorising 10 -severally 10 -non-performing 10 -mizanskey 10 -high-rate 10 -1,639 10 -seu 10 -se1 10 -schmidt-jones 10 -34per 10 -revile 10 -bessarabia 10 -pattens 10 -1981-82 10 -too-tight 10 -girgis 10 -zitzewitz 10 -festoon 10 -hastily-arranged 10 -melvins 10 -sky1 10 -zambito 10 -mzamane 10 -patraeus 10 -of-the-moment 10 -gotingco 10 -2,425 10 -facetune 10 -better-quality 10 -lactose-intolerant 10 -gangotri 10 -ciotti 10 -liberal-national 10 -vadar 10 -cocktail-making 10 -nmn 10 -front-door 10 -salen 10 -48cm 10 -unboxers 10 -doubtlessly 10 -shorabak 10 -greeuw 10 -passanger 10 -trout-fishing 10 -vice-patron 10 -talca 10 -turds 10 -waxkirsh 10 -anthonie 10 -warble 10 -greubel 10 -constitutionalism 10 -zele 10 -quinzio 10 -arbib 10 -neas 10 -i7 10 -38per 10 -venning 10 -vg-10 10 -aryanna 10 -grandjean 10 -gyri 10 -somic 10 -hesitance 10 -arequipa 10 -stemware 10 -molinker 10 -amalur 10 -kol 10 -kow 10 -,22 10 -ruardean 10 -breashears 10 -soetoro-ng 10 -khozissova 10 -leiba 10 -sandulli 10 -sun-seeking 10 -mälaren 10 -560-mile 10 -mytilene 10 -hill-top 10 -crime-solving 10 -semin 10 -perivale 10 -rockefellers 10 -skeates 10 -novermber 10 -l'amour 10 -perthes 10 -krosnick 10 -eisenbise 10 -antechinus 10 -delmer 10 -vicary-smith 10 -giacomelli 10 -sharpless 10 -hsct 10 -cartner 10 -beetroots 10 -tasmin 10 -giuffre 10 -harby 10 -apollinare 10 -delta-v 10 -boehringer 10 -tonally 10 -jacinda 10 -piggot 10 -tunde 10 -quenching 10 -moogan 10 -jesumary 10 -goleniowska 10 -luper 10 -jessee 10 -1,309 10 -taelor 10 -rotenberry 10 -persecutor 10 -73rd-minute 10 -kufahl 10 -re-affirmed 10 -raindance 10 -fout 10 -sho-rack 10 -jali 10 -azzouz 10 -28.93 10 -holdcroft 10 -24hr 10 -dipuccio 10 -siebel 10 -kettani 10 -dairying 10 -grantees 10 -self-fulfillment 10 -regrettability 10 -meat-packing 10 -4ft-high 10 -mdikane 10 -sanath 10 -ben-tor 10 -westrup 10 -alcota 10 -machlin 10 -tasmanian-born 10 -petrocelli 10 -14-room 10 -psychiatrically 10 -solidarite 10 -parabola 10 -freddo 10 -spinningfields 10 -uncorking 10 -futilely 10 -71.8 10 -artifical 10 -sebastianelli 10 -mvd 10 -dazeem 10 -longtail 10 -muntadhir 10 -phyliss 10 -green-wood 10 -kuhar 10 -pericardium 10 -fetterman 10 -1,873 10 -ivy-league 10 -kimberlee 10 -flavourful 10 -norvell 10 -kaewkumnerd 10 -trouville 10 -vgtrk 10 -˜the 10 -fuzzier 10 -shagged 10 -leshin 10 -picerno 10 -vodden 10 -tiru 10 -chinstrap 10 -moulins 10 -importations 10 -3,375 10 -streich-kest 10 -harger 10 -mulgrave 10 -c.e. 10 -thornfield 10 -rouseff 10 -eker 10 -dolphin-like 10 -volunteer-run 10 -fraîche 10 -helvenston-wettengel 10 -ribeira 10 -melbourn 10 -dilks 10 -4:33 10 -dry-erase 10 -brokofsky 10 -13th-minute 10 -lovecraft 10 -almondbury 10 -maundrell-merritt 10 -triangulated 10 -olsi 10 -lybrido 10 -emana 10 -harum 10 -spittey 10 -promulgating 10 -kotzin 10 -rajat 10 -dukureh 10 -lopresti 10 -still-missing 10 -ghazaryan 10 -ebbesmeyer 10 -4,500-acre 10 -sinnix 10 -depastino 10 -9.21 10 -9.24 10 -#jadapose 10 -.341 10 -exigent 10 -pedi 10 -11-season 10 -etches 10 -curtseys 10 -rimvydas 10 -borotra 10 -wendie 10 -avrohom 10 -pof 10 -4151 10 -desdunes 10 -tanta 10 -sex-based 10 -untag 10 -bizilj 10 -wachs 10 -babykins 10 -oyl 10 -400ml 10 -sölden 10 -razor-toothed 10 -astringent 10 -taste-tested 10 -genuineness 10 -down-on-their-luck 10 -mcmonigal 10 -532,000 10 -ballyfermot 10 -xlii 10 -canynge 10 -blurton 10 -whirlow 10 -harmin 10 -fiefdoms 10 -murlough 10 -imhoff 10 -latto 10 -superiore 10 -testbed 10 -italian-flagged 10 -yobbery 10 -demystifying 10 -meziche 10 -1984-85 10 -laabs 10 -non-speaking 10 -ayza 10 -mantoux 10 -adulterating 10 -insidiously 10 -belang 10 -seven-seat 10 -off-form 10 -lead-off 10 -krabbe 10 -bulik 10 -clearlake 10 -super-slimmer 10 -12th-placed 10 -megson 10 -lensman 10 -debaucherous 10 -jurkiewicz 10 -half-french 10 -neophytou 10 -vilani 10 -biphenyls 10 -shabeeha 10 -klimke 10 -aitzaz 10 -obfuscating 10 -samed 10 -fitbug 10 -four-wicket 10 -qawalish 10 -naratto 10 -coathanger 10 -smiliest 10 -danceable 10 -epitomising 10 -amanullah 10 -kaupas 10 -divinyls 10 -msia 10 -under-insured 10 -castromata 10 -#mufc 10 -warnell 10 -sirait 10 -kizza 10 -ardeno 10 -pedrad 10 -indisposed 10 -overstretching 10 -ludmilla 10 -canarelli 10 -nycb 10 -foreskins 10 -wxmi 10 -lyngstad 10 -llangennith 10 -taedong 10 -kwara 10 -carolina-chapel 10 -parcell 10 -switchback 10 -cyclonic 10 -eurogamer 10 -neidpath 10 -tewari 10 -863,000 10 -senrab 10 -blanding 10 -treadaway 10 -ionisation 10 -niijima 10 -62nd-minute 10 -1413 10 -sennelager 10 -1,028 10 -shewring 10 -glitzier 10 -lfo 10 -lfs 10 -glutz 10 -wjhl 10 -janel 10 -samore 10 -raybole 10 -worob 10 -7inches 10 -gatz 10 -stamp-sized 10 -top-eight 10 -sippel 10 -gizela 10 -saltman 10 -patsches 10 -walsenburg 10 -vinall 10 -berocca 10 -anti-hillary 10 -ipic 10 -squirreling 10 -keytar 10 -babushka 10 -moncer 10 -stackers 10 -uwa 10 -crashgate 10 -lamalera 10 -boehnhardt 10 -jama'are 10 -brundtland 10 -wireline 10 -jawaher 10 -abuelas 10 -julbord 10 -liddelow 10 -turbyfill 10 -saqr 10 -daydreamed 10 -travertine 10 -orchy 10 -immobilize 10 -hoppo 10 -flexion 10 -asama 10 -golder 10 -shiyan 10 -bydgoszcz 10 -gingerella 10 -4per 10 -ferhani 10 -unitas 10 -vidra 10 -kojak 10 -danushi 10 -wineman 10 -40pc 10 -fluharty 10 -krumpf 10 -krumpe 10 -flakey 10 -126-page 10 -fogging 10 -baca-lucero 10 -interoffice 10 -socials 10 -magnano 10 -ambedkar 10 -in-jokes 10 -manrakhan 10 -brother-like 10 -self-identification 10 -azzi 10 -229m 10 -23kg 10 -wrasse 10 -teetotaller 10 -bredas 10 -bartkova 10 -marishane 10 -erazo 10 -sconces 10 -6946 10 -whittlesey 10 -torridon 10 -gurganus 10 -desk-based 10 -preformed 10 -falardeau 10 -maggard 10 -debrahlee 10 -dadswell 10 -meyde 10 -nox 10 -co-ord 10 -philharmonia 10 -narelle 10 -screen-based 10 -19minutes 10 -vacantly 10 -743 10 -bomb-detecting 10 -ionospheric 10 -mardis 10 -azarmehr 10 -0300 1234 999 10 -poseurs 10 -fetishists 10 -four-line 10 -years.the 10 -half-block 10 -sub-set 10 -malou 10 -samosa 10 -tourmalet 10 -mage 10 -88.3 10 -tokmakjian 10 -disembowelling 10 -montasser 10 -selectee 10 -tounisi 10 -wych 10 -altamont 10 -campora 10 -hhc 10 -soft-land 10 -bracketing 10 -lss 10 -two-and-a-quarter 10 -verweij 10 -halterman 10 -beer-battered 10 -doublets 10 -thomas-rasset 10 -anglais 10 -alexy 10 -trevan 10 -cronshaw 10 -taht 10 -disillusioning 10 -counterfeiter 10 -gurunath 10 -horng 10 -mujaheddin 10 -multigrain 10 -preternaturally 10 -rime 10 -bickmore 10 -e.e. 10 -currituck 10 -bhanji 10 -frideric 10 -troggs 10 -2:02 10 -stoneleigh 10 -small-unit 10 -amritpal 10 -7.89 10 -tiggywinkles 10 -dongxian 10 -beechey 10 -bodin 10 -now-fiance 10 -krespanis 10 -mcbreen 10 -pre-assigned 10 -three-round 10 -ousts 10 -teetzel 10 -holowczyc 10 -al-sayyed 10 -sulola 10 -bluecross 10 -songkhla 10 -madelung 10 -91.3 10 -pyromallis 10 -5300 10 -bussereau 10 -dandini 10 -islamic-style 10 -tanco 10 -pandaman 10 -winstock 10 -huacachina 10 -siddiq 10 -fur-free 10 -franco-british 10 -spaceweather.com 10 -wrongdoer 10 -cartouche 10 -certitude 10 -coaltion 10 -ruddle 10 -vel 10 -safdie 10 -petya 10 -faustin 10 -dahlonega 10 -boiling-hot 10 -tanchon 10 -#icantbreathe 10 -2nd-r 10 -streetside 10 -pasay 10 -man-of-war 10 -rorting 10 -orzechowski 10 -furber 10 -mapmakers 10 -npg 10 -salps 10 -medium-lift 10 -sudders 10 -aguelhok 10 -woodway 10 -teymourian 10 -maimouna 10 -voronkov 10 -misconstrue 10 -shuhei 10 -jung-soo 10 -jupiler 10 -caril 10 -#afc 10 -117million 10 -phonelines 10 -appdata 10 -atrophied 10 -five-judge 10 -anti-kremlin 10 -gender-selective 10 -dendrobium 10 -bayraktar 10 -nightlight 10 -intranasal 10 -overenthusiastic 10 -mukantabana 10 -1,147 10 -lockless 10 -micro-controller 10 -escaper 10 -warden-controlled 10 -damanhur 10 -bogoyavlensky 10 -proyas 10 -gastel 10 -pooftahs 10 -kalkan 10 -wappinger 10 -turned-out 10 -near-flawless 10 -out-of-place 10 -pachyrhinosaurus 10 -lizette 10 -ball-bearings 10 -sallyann 10 -configuring 10 -isobella 10 -heche 10 -trespasses 10 -cyrille 10 -srecko 10 -bulgasem 10 -milka 10 -pro-wrestling 10 -35,000-year-old 10 -galleried 10 -alpi 10 -seaney 10 -speedball 10 -6.13 10 -bozorgchami 10 -1333 10 -adawiya 10 -dbhd 10 -x-raying 10 -ultra-cool 10 -moralists 10 -espino 10 -afghan-american 10 -icaew 10 -two-by-fours 10 -experimenters 10 -sharp-shooter 10 -redline 10 -nwitimes.com 10 -progressivism 10 -meathead 10 -pandita 10 -approximates 10 -160-page 10 -cummer 10 -venlafaxine 10 -anghaie 10 -frechen 10 -tasing 10 -dolerites 10 -platinum-haired 10 -∙ 10 -barbe 10 -canuck 10 -69mph 10 -chrome-plated 10 -axeman 10 -mbayo 10 -kose 10 -out-ukip 10 -medal-winner 10 -118mph 10 -singapore-registered 10 -wide-legged 10 -unloving 10 -malamala 10 -shrewder 10 -mayas 10 -32-man 10 -rukavina 10 -zolciak 10 -irini 10 -self-presentation 10 -allmendinger 10 -40,800 10 -wuwei 10 -shanelle 10 -pro-v 10 -kushnir 10 -widely-regarded 10 -political-military 10 -2hr 10 -perez-tano 10 -beesmer 10 -kady 10 -cra 10 -wtxf 10 -patronises 10 -trx 10 -much-larger 10 -huntersville 10 -sahli 10 -keemala 10 -hormone-related 10 -leidenfrost 10 -disengages 10 -dulac 10 -cinema-style 10 -text-speak 10 -falloon 10 -furneaux 10 -karaduman 10 -piaui 10 -conveyors 10 -ksk 10 -xb 10 -816,000 10 -94.2 10 -atomiser 10 -sukup 10 -gaffed 10 -wood-robinson 10 -punzenberger 10 -cringely 10 -fresh-cut 10 -aytach 10 -al-karbouli 10 -french-built 10 -keithley 10 -restoin 10 -360-degrees 10 -vasovagal 10 -scinetists 10 -king-woolfork 10 -demi-god 10 -dolorosa 10 -qfc 10 -seasalter 10 -matonga 10 -fortune-telling 10 -mokshda 10 -newzbin 10 -obregon 10 -republican-held 10 -vasilieva 10 -fluffiest 10 -8,820 10 -journal-news 10 -38mmm 10 -changsa 10 -yalla 10 -nerone 10 -lip-service 10 -schoolcraft 10 -1476 10 -ahla 10 -shore-based 10 -foot-high 10 -deutschmark 10 -fiftysomething 10 -greider 10 -formulates 10 -resales 10 -tiddlers 10 -breezily 10 -retno 10 -barranco 10 -reiff 10 -1976-77 10 -drug-maker 10 -khobaib 10 -fonio 10 -arisce 10 -malkowski 10 -searl 10 -marinella 10 -sakin 10 -blomkvist 10 -fraxel 10 -ardiansyah 10 -tullett 10 -yatton 10 -istana 10 -garrathy 10 -persico 10 -beachcombers 10 -christenson 10 -edwardsville 10 -136-page 10 -laer 10 -mcleese 10 -father-of-ten 10 -eurodisney 10 -williton 10 -tig 10 --85 10 -97,500 10 -tilman 10 -55-acre 10 -top-100 10 -taehwa 10 -11,000-acre 10 -kooperatif 10 -7f 10 -f22 10 -endplate 10 -andrii 10 -l8r 10 -12-yards 10 -repairers 10 -125lb 10 -chlorofluorocarbons 10 -dameion 10 -counteroffer 10 -koomey 10 -quasi-government 10 -achellam 10 -real-name 10 -2-14 10 -toking 10 -deedee 10 -targiniq 10 -1000ft 10 -130lb 10 -516th 10 -drought-affected 10 -draff 10 -producer-director 10 -drumhead 10 -subgenres 10 -anti-paparazzi 10 -mysupermarket.co.uk 10 -175cm 10 -imperceptibly 10 -jamrozek 10 -ibeacon 10 -ite 10 -intertoto 10 -mantegna 10 -blistery 10 -foreknowledge 10 -capacocha 10 -greatful 10 -bioprocessing 10 -left-handedness 10 -smithline 10 -siguiri 10 -refurbishes 10 -nig 10 -geochemists 10 -vds 10 -oustanding 10 -shanel 10 -velib 10 -iza 10 -111.5 10 -deke 10 -fortino 10 -transducers 10 -raofi 10 -crannog 10 -boom-and-bust 10 -controvery 10 -85.5 10 -shawbury 10 -navida 10 -maaz 10 -tenereillo 10 -by-law 10 -nicobar 10 -belletti 10 -maced 10 -late-1970s 10 -pangasius 10 -tepidly 10 -sarli 10 -recut 10 -nowy 10 -b-cell 10 -piccalilli 10 -garabedian 10 -broadbandchoices.co.uk 10 -theatricality 10 -gelowicz 10 -savannah-based 10 -taittinger 10 -daneshmand 10 -pearsons 10 -24-6 10 -callus 10 -familys 10 -pamm 10 -manukainiu 10 -unislamic 10 -ruki 10 -shell-like 10 -udp 10 -vasileios 10 -greedier 10 -vampirism 10 -jianbin 10 -yayanos 10 -bauza 10 -iancu 10 -interglacial 10 -snorer 10 -hamoudi 10 -vyktoriah 10 -less-experienced 10 -nicoya 10 -khudi 10 -kordale 10 -reflectography 10 -r-fla 10 -nahmias 10 -lawreen 10 -higher-up 10 -jordanian-born 10 -994 10 -neubert 10 -lokelani 10 -16.25 10 -parabolas 10 -hanksville 10 -lobeke 10 -harville 10 -sannjay 10 -cinemedia 10 -sukru 10 -babila 10 -kennan 10 -linekar 10 -toolkits 10 -spitzers 10 -termas 10 -pro-lifers 10 -64,500 10 -112-year-old 10 -dart-throwing 10 -matsunaga 10 -accreted 10 -j-class 10 -newsam 10 -bluffton 10 -duggie 10 -floraholland 10 -10-foot-high 10 -katic 10 -maillardet 10 -rejectionist 10 -5ft7in 10 -portola 10 -73.2 10 -highly-contagious 10 -altinozu 10 -woodinville 10 -positivesingles 10 -human-machine 10 -jallianwala 10 -junge 10 -jodass 10 -russian-u.s. 10 -dilger 10 -pustules 10 -levodopa 10 -doogie 10 -signora 10 -fitzharris 10 -lorton 10 -goals-per-game 10 -slim-fit 10 -captian 10 -colle 10 -jewglass 10 -antiporek 10 -gwithian 10 -five-year-long 10 -poshtels 10 -tishman 10 -doctrinaire 10 -tristano 10 -158million 10 -gleaner 10 -exam-cheating 10 -mccains 10 -oxcart 10 -folonis 10 -sabeti 10 -ndoj 10 -famille 10 -yachvili 10 -cavaleiro 10 -rossall 10 -musleh 10 -zzyzx 10 -somos 10 -supran 10 -monda 10 -poikolainen 10 -laino 10 -hawksbill 10 -perma-tan 10 -hyphens 10 -winbush 10 -jousts 10 -tepes 10 -influxes 10 -eyelock 10 -uv-a 10 -poopers 10 -ecotality 10 -riseley 10 -petrucelly 10 -floured 10 -.303 10 -undemonstrative 10 -mizz 10 -d'astoli 10 -crowdcube 10 -mifsud 10 -gapminder 10 -momock 10 -myfreeimplants.com 10 -clevelanders 10 -spirograph 10 -jonelle 10 -veselý 10 -bird-watchers 10 -controller-free 10 -soraida 10 -half-blind 10 -zhenrong 10 -t11 10 -t12 10 -hoenig 10 -abdelmoumene 10 -hopscotched 10 -choukri 10 -rimmerman 10 -oberste 10 -flatforms 10 -dongguk 10 -pomerleau 10 -un-pc 10 -parsimonious 10 -confiscates 10 -1687 10 -kurta 10 -amedi 10 -yinka 10 -four-try 10 -6,295 10 -10/12 10 -ilissos 10 -stavert-lee 10 -navia 10 -sahd 10 -ingleside 10 -zapatista 10 -depressurised 10 -copsin 10 -f015 10 -loone 10 -fzs 10 -yun-mi 10 -53billion 10 -1,242 10 -herby 10 -3.93 10 -gin-based 10 -triggermen 10 -tpd 10 -faffing 10 -mcgruff 10 -srdjan 10 -aquilops 10 -creedy 10 -cleankill 10 -wiland 10 -blue-shirted 10 -opposable 10 -world-beater 10 -banteay 10 -27,300 10 -hamtramck 10 -chellis 10 -tias 10 -16th-minute 10 -ultra-strict 10 -4,000-mile 10 -muhith 10 -lesterland 10 -nihat 10 -three-inch-long 10 -morphine-like 10 -enna 10 -264million 10 -etoiles 10 -hiroko 10 -fugates 10 -blecker 10 -qijiang 10 -engima 10 -stasse 10 -30-ton 10 -1,462 10 -rha 10 -woolpack 10 -microbeads 10 -minbic 10 -five-count 10 -laurinavicius 10 -2-l 10 -2-8 10 -rogien 10 -solangi 10 -20-meter 10 -bodyfelt 10 -indispensible 10 -lous 10 -loui 10 -braggadocio 10 -machine-gunner 10 -'23 10 -jon-jo 10 -jarmain 10 -breakwaters 10 -khadjimuradov 10 -dezenhall 10 -evanescence 10 -kovalevskaya 10 -genistein 10 -tsunami-hit 10 -gimpo 10 -skriver 10 -1454 10 -1,064 10 -1,068 10 -tarkutis 10 -fashion-loving 10 -twin-track 10 -tomo 10 -anachronisms 10 -annuals 10 -krys 10 -relearned 10 -kaepernicking 10 -3,738 10 -undertrained 10 -ambush-style 10 -two-week-long 10 -lcz696 10 -primers 10 -hwan 10 -wherley 10 -athearn 10 -exorcising 10 -r-ariz 10 -grrr 10 -match-changing 10 -henery 10 -hubbs 10 -madrid-barajas 10 -titillate 10 -mirror-image 10 -slim-down 10 -hospitalize 10 -slow-witted 10 -dashti 10 -#rugeleymassfeed 10 -bord 10 -exner 10 -semi-detatched 10 -shestakov 10 -mallyon 10 -human-created 10 -edvald 10 -tasty-looking 10 -nyongo 10 -canal-side 10 -rugasira 10 -newly-purchased 10 -us-run 10 -blairon 10 -blundells 10 -illiquid 10 -co-eds 10 -cheeta 10 -almouthanna 10 -p-3c 10 -widdicombe 10 -lygon 10 -soa 10 -dongmei 10 -zonana 10 -arbitrariness 10 -ridgers 10 -peecook 10 -endowing 10 -torii 10 -hzo 10 -a130 10 -130km/h 10 -polycephaly 10 -roeding 10 -sketty 10 -vyssa 10 -margreitter 10 -waffled 10 -10.36 10 -pleva 10 -cristiana 10 -mooing 10 -kuklachev 10 -emelie 10 -celexa 10 -offland 10 -skull-shaped 10 -beach-goer 10 -oskars 10 -fakhoury 10 -bioclimatic 10 -anam 10 -wl 10 -chorlton-cum-hardy 10 -ardyss 10 -winchelsea 10 -mesenchymal 10 -empathizes 10 -sledders 10 -counterpunch 10 -nabilla 10 -14-32 10 -tetiaroa 10 -48.2 10 -crisostomo 10 -felsman 10 -ankunda 10 -colourised 10 -elver 10 -parabon 10 -handman 10 -non-classical 10 -61.4 10 -dartmouth-hitchcock 10 -belligerents 10 -dreyfoos 10 -jenny-anne 10 -six-mile-wide 10 -hazm 10 -satins 10 -ingrate 10 -vegetarian-friendly 10 -dauz 10 -bahamonde 10 -osinski 10 -duangjay 10 -onabulé 10 -svay 10 -snaptu 10 -odón 10 -malkov 10 -naivetà 10 -critcism 10 -re-liberation 10 -danny-boy 10 -schoolsrugby.co.uk 10 -1613 10 -motion-control 10 -gratings 10 -vorarlberg 10 -econ 10 -contagem 10 -2:48 10 -alic 10 -near-future 10 -robika 10 -mitiga 10 -yosuke 10 -worldpay 10 -breathalyzers 10 -shakeout 10 -vitt 10 -27-page 10 -grieff 10 -unthreatening 10 -hamas-led 10 -never-seen 10 -ziemendorf 10 -papier-mache 10 -7,500-a-week 10 -swers 10 -subsections 10 -meekin 10 -windmeyer 10 -accident-free 10 -confessionals 10 -k&l 10 -non-deployed 10 -oakden 10 -bergsten 10 -healthfully 10 -gasser 10 -food-wise 10 -syrian-lebanese 10 -superwind 10 -beerburrum 10 -viands 10 -fantasizes 10 -braigo 10 -computer-security 10 -starscream 10 -lance-star 10 -speed-up 10 -1322 10 -byeong-hun 10 -o'donohoe 10 -stakele 10 -jcs 10 -yeagley 10 -salvans 10 -re-adopted 10 -winick 10 -latino-american 10 -sedway 10 -mlc 10 -mlt 10 -multifaith 10 -assuaging 10 -pries 10 -unsaveable 10 -tse-tung 10 -ruhnert 10 -zeevi 10 -bas-relief 10 -21-week 10 -geo-politics 10 -bohai 10 -gutherie 10 -sight-saving 10 -cardie 10 -graham-cumming 10 -16cm 10 -cadley 10 -khyese 10 -podiatrists 10 -re-focus 10 -sasima 10 -zipf 10 -mansbach 10 -a-frame 10 -zyna 10 -shock-jock 10 -bastianich 10 -azorian 10 -lysergic 10 -upstairs/downstairs 10 -mantids 10 -carfagna 10 -restaurant-goers 10 -mid-decade 10 -ampersand 10 -gellert 10 -rapperport 10 -apple.pro 10 -#bgt 10 -naturopath 10 -6.56 10 -pelini 10 -ufologists 10 -hippolyte 10 -jaiwei 10 -quadriplegics 10 -89.7 10 -jackson-vanik 10 -hilbert 10 -9.81 10 -9.82 10 -grohe 10 -coseley 10 -tetyana 10 -wan-tung 10 -sen-sgt 10 -kludze 10 -ambika 10 -a.m.-10 10 -alien-looking 10 -ocean-view 10 -chaudhury 10 -vaynerchuk 10 -imc 10 -sassie 10 -poussaint 10 -kalaouz 10 -celebuzz 10 -jiggly 10 -scofflaws 10 -alesia 10 -layar 10 -el-sayed 10 -french-kissing 10 -relapsing-remitting 10 -pirg 10 -wheel-well 10 -rochemback 10 -suleimanova 10 -el-essawy 10 -duomax 10 -bowtie 10 -lleyn 10 -supped 10 -waxwings 10 -reinstitute 10 -wasicek 10 -luthier 10 -nicanor 10 -paneer 10 -maraini-melehi 10 -e-reading 10 -blaby 10 -nurmagomedov 10 -loboda 10 -widdersheim 10 -aven 10 -trills 10 -qbeak 10 -edenhofer 10 -kaniel 10 -spaz 10 -http://www.socialstudies.org/ 10 -armthorpe 10 -hawkings-byass 10 -defendent 10 -al-azazy 10 -35-mile 10 -slc16a11 10 -leaf-covered 10 -120-room 10 -connellsville 10 -50-odd 10 -petrovsky 10 -atsdr 10 -super-stardom 10 -endpoints 10 -abelardo 10 -seigenthaler 10 -jiu-jitsu 10 -zabala 10 -rind 10 -petunia 10 -canada-wide 10 -rafaele 10 -palimony 10 -duport 10 -osterley 10 -5100 10 -1998-2003 10 -unmovable 10 -shtanski 10 -2/4 10 -captive-born 10 -19,000-a-year 10 -hendershot 10 -simoni 10 -stoneton 10 -ndukwu 10 -schizophrenics 10 -15-25 10 -autonoma 10 -milovich 10 -kiwan 10 -212million 10 -4,877 10 -pbq 10 -li-ion 10 -550lb 10 -bhawan 10 -cortinas 10 -directgov 10 -rp-1 10 -progamme 10 -mistretta 10 -threshers 10 -voltron 10 -hoffine 10 -hysteroscopy 10 -tongaat 10 -temperamentally 10 -zealotry 10 -hard-work 10 -drumroll 10 -200,000-strong 10 -tura 10 -dcc 10 -shinbone 10 -vardar 10 -three-phase 10 -qaisar 10 -lates 10 -al-zour 10 -next-to-last 10 -lunkina 10 -self-governed 10 -beenleigh 10 -klub 10 -none-the-wiser 10 -bieker 10 -adaptions 10 -342,500 10 -fredricsen 10 -qaeda-style 10 -estafeta 10 -personology 10 -30miles 10 -2,000-square-foot 10 -setzers 10 -ecarriage 10 -renfroe 10 -emptor 10 -barbaresi 10 -kift 10 -mullensiefen 10 -amauri 10 -suicide-related 10 -marchanti 10 -al-qaboun 10 -sigmon 10 -orthodoxies 10 -guay 10 -chiseling 10 -esimit 10 -dreier 10 -kihlgren 10 -lloydstsb 10 -sair 10 -esler 10 -boisterously 10 -http://www.samaritans.org/ 10 -sowden 10 -angaelos 10 -pantsuits 10 -chiarella 10 -half-scale 10 -soterious 10 -koh-lanta 10 -zapopan 10 -domenick 10 -domenica 10 -70,500 10 -tentpoles 10 -lower-key 10 -benedito 10 -anti-bush 10 -soit 10 -slimon 10 -nurden 10 -gomersal 10 -kidbrooke 10 -never-give-up 10 -hodgetts 10 -oil-filter 10 -gerben 10 -fg 10 -megabit 10 -1-800 10 -pictrued 10 -bellavia 10 -ferreri 10 -salone 10 -renou 10 -pelvic-floor 10 -kallman 10 -khedar 10 -nordhagen 10 -bearcats 10 -mobile-only 10 -branfield 10 -marxen 10 -p34 10 -castillejo 10 -babycenter 10 -greymouth 10 -shoulder-high 10 -lamaze 10 -baddams 10 -lovell-clarke 10 -tano 10 -sadists 10 -iccat 10 -hudson-wilkin 10 -tieless 10 -jouppi 10 -forgas 10 -fransico 10 -heubusch 10 -grok 10 -australia-bound 10 -tonio 10 -heldman 10 -1,704 10 -paik 10 -mansha 10 -crab-like 10 -mozzies 10 -hair-removal 10 -dececco 10 -riotously 10 -45-piece 10 -zarebska 10 -remotely-operated 10 -anti-robot 10 -ganglia 10 -646,000 10 -kolkiewicz 10 -7.21 10 -itter 10 -perepilichny 10 -polamalu 10 -deraas 10 -kindah 10 -#feelingnuts 10 -recalde 10 -cotabato 10 -north-eastwards 10 -fruit-flavoured 10 -nottis 10 -all-access 10 -bonar 10 -habitations 10 -ex-miners 10 -oregonlive 10 -undersupply 10 -omilami 10 -nuseir 10 -minihan 10 -cannabis-smoking 10 -egyptian-mediated 10 -gilleney 10 -sheller 10 -jocasta 10 -openrov 10 -anti-depression 10 -mumtalakat 10 -jegat 10 -thedford 10 -mistraal 10 -unframed 10 -stilled 10 -llamazares 10 -cyberworld 10 -giniel 10 -werts 10 -slidel 10 -blackbrook 10 -taverners 10 -reagh 10 -hmd 10 -own-goals 10 -shinrikyo 10 -47billion 10 -hula-hoop 10 -ceballo 10 -buckfastleigh 10 -ktvx 10 -filor 10 -20-22 10 -rewatching 10 -wilentz 10 -teats 10 -uh-1 10 -latarche 10 -harringtons 10 -hackerspace 10 -st.andrews 10 -sub-postmaster 10 -demus 10 -ahlawy 10 -toria 10 -mne 10 -esrb 10 -makana 10 -86-acre 10 -l-39 10 -ozcelik 10 -falcodore 10 -gangnam-gu 10 -explosives-sniffing 10 -leave-in 10 -battier 10 -aromasin 10 -sylvio 10 -litwin 10 -vd 10 -vy 10 -berchiche 10 -govinda 10 -aorangi 10 -576,000 10 -nikooei 10 -jinno 10 -time-limit 10 -tamisiocaris 10 -48-foot 10 -haidary 10 -tauscher 10 -gkfw 10 -pruner 10 -bialys 10 -hall-davis 10 -3,538 10 -natoli 10 -81.6 10 -r-rating 10 -wallonia 10 -crisped 10 -cat-face 10 -lindesay 10 -bilik 10 -not-so-great 10 -verdier 10 -proteges 10 -bloise 10 -marel 10 -halman 10 -wagners 10 -ulriksen-schulte 10 -chbosky 10 -30-seat 10 -ploughshare 10 -tasende 10 -zadeh 10 -bondage-type 10 -bodley 10 -aftenposten 10 -zombie-themed 10 -tebo 10 -craighead 10 -neonatology 10 -pelty 10 -bizzle 10 -gop-backed 10 -sleepiq 10 -scuccia 10 -laserlight 10 -trism 10 -mizkan 10 -registrable 10 -chronopoulos 10 -guardroom 10 -pelz 10 -linkoping 10 -sweeby 10 -over-awed 10 -qubilah 10 -spearpoint 10 -halfaya 10 -makda 10 -orjan 10 -guatamala 10 -foxp 10 -abar 10 -kufr 10 -adult-themed 10 -dagong 10 -10ks 10 -footway 10 -sandham 10 -6.6-magnitude 10 -gurnett 10 -vtr 10 -animal-related 10 -musekeweya 10 -brickx 10 -unvetted 10 -keumgang 10 -forewarn 10 -antartica 10 -blue-footed 10 -plumbs 10 -melih 10 -46per 10 -t-34 10 -erector 10 -amref 10 -r-l 10 -skysat-2 10 -disaster-prone 10 -swfs 10 -water-bearing 10 -1,288 10 -1,282 10 -1,287 10 -truffled 10 -juntunen 10 -22.00 10 -12.80 10 -bw 10 -kerlen 10 -schmidle 10 -ghaefelipour 10 -bli 10 -directioner 10 -warp-speed 10 -fredinburg 10 -husband-wife 10 -kurier 10 -ruby-red 10 -foretz 10 -aktabantay 10 -auchernack 10 -schiavelli 10 -mairin 10 -azare 10 -tonawanda 10 -swished 10 -missile-launching 10 -niwaka 10 -rockcliffe 10 -gudula 10 -iken 10 -frauenkirche 10 -feather-trimmed 10 -fotuhi 10 -triggertrap 10 -mondal 10 -borssom 10 -17-24 10 -17-21 10 -113-year-old 10 -isotonic 10 -415million 10 -petersohn 10 -kryptoglanis 10 -17km 10 -devery 10 -blacketer 10 -carolus 10 -betsen 10 -djemaa 10 -indigenization 10 -atoning 10 -takuya 10 -gsp 10 -junod 10 -hammell 10 -drac 10 -jigme 10 -187.5 10 -rothen 10 -krajina 10 -norick 10 -urkel 10 -snuffs 10 -claressa 10 -zielke 10 -upstages 10 -bakir 10 -failand 10 -akc 10 -presgrove 10 -nr-1 10 -macculloch 10 -105-inch 10 -tricorn 10 -dunball 10 -fronteras 10 -bodibase 10 -almquist 10 -crofters 10 -caruth 10 -ipab 10 -lionhead 10 -yuji 10 -62billion 10 -iddesleigh 10 -debronkart 10 --9:30 10 -vafamehr 10 -lusted-after 10 -freelove 10 -freuds 10 -de'von 10 -steyning 10 -quintao 10 -cincinnati/northern 10 -62,400 10 -china-africa 10 -megha 10 -marsiglia 10 -fiandaca 10 -i-era 10 -f.a.m.e. 10 -kasi 10 -headworth 10 -my1styears.com 10 -bitinstant 10 -micromax 10 -foggiest 10 -next-to-nothing 10 -zufi 10 -jairam 10 -ex-argentina 10 -checked-baggage 10 -sequim 10 -kite-surfing 10 -newsradio 10 -veal-whitting 10 -40,000-50 10 -etruria 10 -mastaba 10 -3:42 10 -jayci 10 -19f 10 -wieder 10 -chlorogenic 10 -19-member 10 -otegi 10 -moskos 10 -2.77 10 -cerrejon 10 -no-questions-asked 10 -priscah 10 -boutonniere 10 -gakirah 10 -mccarey 10 -grayed 10 -huffs 10 -forces-afghanistan 10 -2,007 10 -bragdon 10 -tarbes 10 -firehose 10 -matewan 10 -moussaka 10 -c/2014 10 -xiaolin 10 -sujey 10 -gerenscer 10 -rapper/actor 10 -re-occur 10 -zebu 10 -neetu 10 -rsph 10 -xti 10 -21lb 10 -astern 10 -nkoloso 10 -mashaie 10 -no-sugar 10 -zurawik 10 -metawatch 10 -hafs 10 -child-trafficking 10 -casualness 10 -ramsbotham 10 -tahhan 10 -balaton 10 -khulood 10 -berthay 10 -asahara 10 -gunderman 10 -hypnose 10 -newberg-dundee 10 -jarreau 10 -turkish-israeli 10 -prolactin 10 -chups 10 -sanna 10 -goosby 10 -1,605 10 -interrupters 10 -tax-exemption 10 -micronation 10 -fepex 10 -lvmpd 10 -great-grandma 10 -nandi 10 -homegirl 10 -amorelli 10 -gleidson 10 -mitica 10 -mass-production 10 -tungurahua 10 -rooftopper 10 -deeley-brewer 10 -vitaminwater 10 -sziy 10 -koranda 10 -paracetemol 10 -andras 10 -wash-out 10 -six-day-a-week 10 -wahroonga 10 -huculak-kimmel 10 -19cm 10 -pleurobot 10 -anatoli 10 -republican-majority 10 -minxia 10 -22-23 10 -0.68 10 -0845 790 9090 10 -moecke 10 -storay 10 -78per 10 -sorient 10 -meimei 10 -brotha 10 -ca-7 10 -fernhurst 10 -miito 10 -sakuragi 10 -antiguan 10 -goullet 10 -pulkownik 10 -weedkillers 10 -flintlock 10 -qaeda-trained 10 -yongle 10 -gordons 10 -4.02 10 -harkema 10 -pople 10 -a21 10 -post-transplant 10 -32,600 10 -lynbrook 10 -houblon 10 -nasiriyah 10 -gallaga 10 -breadstick 10 -iniki 10 -sigworth 10 -hitchings 10 -houlder 10 -kairos 10 -matshikiza 10 -ribbeck 10 -lluy 10 -appsense 10 -jonni 10 -banaris 10 -rutba 10 -barrett-jackson 10 -gafsa 10 -homewrecking 10 -saltzman 10 -stringybark 10 -bouland 10 -digital-music 10 -harmsworth 10 -simonian 10 -homann 10 -tinhte.vn 10 -breckneall 10 -k8200 10 -3,141 10 -3,140 10 -wohler 10 -yelcick 10 -militarize 10 -zamel 10 -re-filed 10 -non-va 10 -justin-jinich 10 -far-post 10 -e9 10 -gagnaire 10 -labrum 10 -jividen 10 -heavy-hitting 10 -nalip 10 -djebbour 10 -potmesil 10 -conflation 10 -14-and-a-half 10 -thrill-o-meter 10 -asx 10 -anti-circumcision 10 -shahbagh 10 -cavalierly 10 -fued 10 -jinzhou 10 -26s 10 -beyda 10 -spireites 10 -hachim 10 -oles 10 -zhikharev 10 -kamla 10 -itoje 10 -clothworkers 10 -costley 10 -peyman 10 -chellie 10 -neuritis 10 -bottley 10 -bhagwati 10 -majchrzak 10 -unbundling 10 -najlaa 10 -griesch 10 -plotnick 10 -428,000 10 -shifters 10 -chortled 10 -overfished 10 -roadies 10 -mortdecai 10 -krenz 10 -al-saghir 10 -cabinetmaker 10 -state-to-state 10 -reider 10 -kiobel 10 -lordships 10 -ogd 10 -comapny 10 -five-0 10 -vyvyan 10 -four-ball 10 -kinetoscope 10 -jakari 10 -clinton-obama 10 -1614 10 -seki 10 -gangbanger 10 -zivotofskys 10 -qumsiyeh 10 -jordache 10 -kc-97 10 -magnitude-3 10 -instanbul 10 -baltusrol 10 -kurri 10 -cator 10 -aurel 10 -aurea 10 -bruzzese 10 -half-fit 10 -whippersnapper 10 -scratchers 10 -fist-fight 10 -sandhoe 10 -tunicia 10 -acculturation 10 -curmudgeons 10 -bateel 10 -kercheval 10 -lacefield 10 -mantles 10 -elfar 10 -silsden 10 -r/c 10 -career-minded 10 -mallonee 10 -europe1 10 -pre-fame 10 -95-79 10 -ex-jockey 10 -rambo-style 10 -llewlyn-bowen 10 -emulsifier 10 -libidinous 10 -krizan-wilson 10 -blanchet 10 -lymphohistiocytosis 10 -tanque 10 -bihari 10 -chaverri 10 -drys 10 -twerkers 10 -tal-y-bont 10 -limbe 10 -stensgaard 10 -babatz 10 -paderina 10 -ringworm 10 -cityspire 10 -70-pound 10 -strongbox 10 -bishopthorpe 10 -superspy 10 -lerette 10 -tiga 10 -37.04 10 -meheux 10 -cawthray 10 -serotype 10 -reagans 10 -spotleson 10 -invitee 10 -emmalyn 10 -naresh 10 -tucson-based 10 -carlaw 10 -ragano 10 -biopen 10 -out-compete 10 -101mph 10 -inpe 10 -magalena 10 -gauzere 10 -over-35s 10 -desensitization 10 -17in 10 -goda 10 -re-gain 10 -now-abandoned 10 -soudan 10 -cargiant 10 -phentermine 10 -tosin 10 -arizpe 10 -tenga 10 -huguenots 10 -teachout 10 -lemina 10 -howzat 10 -inundations 10 -halowski 10 -ship-building 10 -al-saadoon 10 -y-front 10 -farmfoods 10 -bahader 10 -mielnikiewicz 10 -catch-phrase 10 -25,600 10 -locavore 10 -ophardt 10 -abiodun 10 -outgun 10 -aquatalia 10 -ajumogobia 10 -hemdan 10 -massengill 10 -tredalo 10 -warwicks 10 -miesbach 10 -wild-haired 10 -11.26 10 -career-wise 10 -or7 10 -wkmg-tv 10 -k'abel 10 -band-tailed 10 -adman 10 -passtime 10 -abridge 10 -renesas 10 -stunell 10 -clymo 10 -panyee 10 -longhirst 10 -attard 10 -raphaël 10 -yoshimasa 10 -59-20 10 -goonj 10 -michaeli 10 -nue 10 -floriana 10 -jackson-related 10 -ex-father-in-law 10 -cheshire-based 10 -latam 10 -mokk 10 -joos 10 -pushkarev 10 -moremi 10 -bonzo 10 -vikramjeet 10 -rens 10 -osso 10 -monsheimer 10 -news-post 10 -smet 10 -dicamillo 10 -wilner 10 -oyen 10 -schriever 10 -tomczak 10 -jedda 10 -patriarchias 10 -hermens 10 -2,311 10 -berberich 10 -rabidly 10 -piersol 10 -750-mile 10 -schalit 10 -ooooh 10 -ydb 10 -trabia 10 -bagou 10 -staniel 10 -2880 10 -mozingo 10 -nootbaar 10 -bellissima 10 -bixby 10 -morfoot 10 -zapper 10 -komertz 10 -government-brokered 10 -celiberti 10 -ceara 10 -podair 10 -18,300 10 -carthaginians 10 -nahayan 10 -lesly 10 -warsash 10 -buchwald 10 -abuhafs 10 -desa 10 -sub-type 10 -rubashkin 10 -abdicates 10 -entombing 10 -post-al-assad 10 -customises 10 -freeloading 10 -jirsch 10 -sidemen 10 -sooriyabandara 10 -faizah 10 -pacoima 10 -straight-six 10 -21bn 10 -332-94 10 -gyno 10 -musham 10 -amélie 10 -ruscak 10 -zim 10 -koistinen 10 -pongsudhirak 10 -polydactyly 10 -gounis 10 -campaign-finance 10 -phonies 10 -dominions 10 -dolbeer 10 -17.49 10 -12-12 10 -sayafi 10 -d.hedral 10 -alterna 10 -www.ultimo.co.uk 10 -berteau 10 -enslow 10 -kepler-10c 10 -clovers 10 -barata 10 -high-yield 10 -subdivide 10 -marauder 10 -up-armored 10 -nkonzo 10 -flexiseq 10 -step-ups 10 -careworn 10 -badung 10 -stern-looking 10 -dissembling 10 -femco 10 -?!?!? 10 -aquavault 10 -subsystem 10 -jefri 10 -bullecourt 10 -li'l 10 -illya 10 -montsegur 10 -onaiyekan 10 -quad-bike 10 -police-related 10 -hiya 10 -308million 10 -guanine 10 -duro 10 -garteh 10 -dalei 10 -chamorro 10 -windlesham 10 -thida 10 -53.3 10 -adult-onset 10 -d-cup 10 -moderate-income 10 -golden-i 10 -catman 10 -hotels4u 10 -nanford 10 -870million 10 -igt 10 -1,699 10 -diaz-canel 10 -asanish 10 -sucdi 10 -positing 10 -early-round 10 -coulding 10 -spiker 10 -spikey 10 -nobuo 10 -prickles 10 -4.21 10 -ex-seleka 10 -samudra 10 -scoones 10 -bazzarre 10 -smirl 10 -servette 10 -#freethenipple 10 -milquetoast 10 -destructible 10 -mothball 10 -tuckwell-smith 10 -mooncakes 10 -6:27 10 -mullaley 10 -wade-jones 10 -shenkar 10 -anapol 10 -shakhter 10 -polidori 10 -fierce-looking 10 -bezbatchenko 10 -age-gap 10 -yuill 10 -toucet 10 -tellspec 10 -waddles 10 -65-pound 10 -shian 10 -braam 10 -blowholes 10 -smitkova 10 -pleasantness 10 -family-operated 10 -118-110 10 -ex-professionals 10 -nteff 10 -lycka 10 -neuendorf 10 -pro-smoking 10 -rover.com 10 -peach-colored 10 -circumvention 10 -long-suppressed 10 -baldanza 10 -hasting 10 -laura-jane 10 -aimal 10 -re-engined 10 -jagdish 10 -51a 10 -51g 10 -staib 10 -rabbinic 10 -after-effect 10 -duchek 10 -man-handled 10 -amu 10 -public-school 10 -lazare 10 -special-education 10 -hoensbroek 10 -lascala 10 -wordsection1 10 -orb-web 10 -oppen 10 -huerfano 10 -dinnigan 10 -high-low 10 -devillebichot 10 -giemulla 10 -gdf11 10 -gest 10 -cenek 10 -tene 10 -4,191 10 -halaweh 10 -warners 10 -conductance 10 -not-so-friendly 10 -five-and-dime 10 -milkybar 10 -talanian 10 -vapestick 10 -mhenni 10 -40-44 10 -mialet 10 -dubis 10 -dubie 10 -pepy 10 -interweaving 10 -warren-madden 10 -41-31 10 -ashikali 10 -longbows 10 -naturalis 10 -adhi 10 -z100 10 -nasiri 10 -re-hearing 10 -phineas 10 -tabart 10 -kollars 10 -mask-wearing 10 -mattsson 10 -360º 10 -1634 10 -shaalan 10 -samimi 10 -354,000 10 -hobos 10 -cage-fighter 10 -vangelis 10 -khelife 10 -mrwebi 10 -trustedsec 10 -ehlen 10 -jammy 10 -snookered 10 -eylward 10 -19lbs 10 -shavolian 10 -omnipotence 10 -love-triangle 10 -enlarger 10 -8:34 10 -stickle 10 -seven-speed 10 -akanat 10 -5:22 10 -caftan 10 -non-essentials 10 -hénin 10 -tae-hee 10 -red-brown 10 -pidcock 10 -mertilla 10 -ficarra 10 -cut-down 10 -eyoma 10 -jupiter-like 10 -wajda 10 -tuter 10 -esmat 10 -ex-mi5 10 -forster-tuncurry 10 -benepe 10 -unbridgeable 10 -freighted 10 -kn-08 10 -sutchi 10 -prunty 10 -up-beat 10 -mid-song 10 -rehear 10 -prabhjot 10 -ondecker 10 -24-second 10 -toing 10 -lanne-mirrlees 10 -c.l. 10 -relaxed-looking 10 -100-ton 10 -awais 10 -feigen 10 -front-on 10 -bruxelles 10 -barton-upon-humber 10 -abyssinian 10 -loyon 10 -parries 10 -halmahera 10 -koppler 10 -papaioannou 10 -decipherable 10 -nundy 10 -almon 10 -boggle 10 -mamana 10 -st-jean-sur-richelieu 10 -kulm 10 -pressurize 10 -trematode 10 -lomi 10 -90,000-per-week 10 -tree-trimming 10 -sulzer 10 -580,000-a-year 10 -arcangel 10 -mandón 10 -treesort 10 -symiczek 10 -melgaard 10 -scheid 10 -non-retired 10 -spitler 10 -abualkhair 10 -high-waist 10 -79.6 10 -79.7 10 -woodpile 10 -belkhair 10 -cheese-eating 10 -ten-day-old 10 -baker-masson 10 -opd 10 -ope 10 -opc 10 -autothysis128s 10 -rancheros 10 -4mins 10 -reckers 10 -benito-kowalski 10 -freedia 10 -kehua 10 -nbcnews 10 -leasowes 10 -sourpuss 10 -modish 10 -enppi 10 -haxby 10 -gun-show 10 -kamlesh 10 -pliosaurus 10 -fornicating 10 -ockham 10 -mcevatt 10 -unmasks 10 -wermuth 10 -zaney 10 -single-drug 10 -invalided 10 -poetics 10 -fumicino 10 -kierah 10 -ten-piece 10 -tyrin 10 -tyrik 10 -exserohilum 10 -coire 10 -48.1 10 -35-stone 10 -toque 10 -1.5-2 10 -kleptomaniac 10 -niese 10 -sordo 10 -cryptocurrencies 10 -doxil 10 -privatizations 10 -teegarden 10 -despoiled 10 -al-azm 10 -sujoe 10 -kontinental 10 -notam 10 -bubbler 10 -sterett 10 -alperovitch 10 -wuaki.tv 10 -1,359 10 -swf 10 -shandwick 10 -saphire 10 -rosewarne 10 -karlito 10 -thembi 10 -halitosis 10 -hewling 10 -660lbs 10 -agzarian 10 -untrusting 10 -kiselyov 10 -leathley 10 -45.1 10 -gwynne-james 10 -reginaldo 10 -hagiography 10 -22-pound 10 -dennery 10 -orange-and-black 10 -newschannel5 10 -114-year-old 10 -elite-level 10 -kapital 10 -zippered 10 -t8 10 -8wgal 10 -congaree 10 -floppiness 10 -acquisti 10 -micro-pigs 10 -botanics 10 -falenski 10 -amagasa 10 -ghali 10 -ghale 10 -catteries 10 -out-run 10 -transtromer 10 -binford 10 -statler 10 -gbp 10 -chauvet 10 -sogn 10 -neo-baroque 10 -ineptness 10 -sanoussi 10 -directly-elected 10 -muhlestein 10 -nery 10 -alwash 10 -spaceguard 10 -yabroud 10 -kavir 10 -sgts 10 -rochefoucauld 10 -1,099 10 -1,098 10 -1,091 10 -guest-worker 10 -atvod 10 -almyra 10 -lox 10 -poskitt 10 -loiterers 10 -bova 10 -bovo 10 -ml866 10 -quiffs 10 -action/adventure 10 -top-50 10 -rojansky 10 -homophones 10 -picken 10 -pickel 10 -mso-font-pitch 10 -narco-terrorist 10 -hermann-texas 10 -poarch 10 -1,767 10 -wellfleet 10 -dri 10 -pro-separatist 10 -foxp2 10 -18-ton 10 -compadres 10 -stanzas 10 -cherry-picker 10 -dampers 10 -chrin 10 -gleidman 10 -bezjak 10 -dačić 10 -anti-colonial 10 -al-berjawi 10 -stylites 10 -aditi 10 -muzyka 10 -overnights 10 -sniffle 10 -mascall 10 -3t 10 -legear 10 -delwar 10 -rands 10 -yasuni 10 -keiearra 10 -tilmanstone 10 -macfan 10 -7-foot-tall 10 -michalik 10 -1,674 10 -al-senoussi 10 -tonquinisha 10 -fastness 10 -elongates 10 -airguard 10 -31-24 10 -sugar-coating 10 -most-populous 10 -block-booked 10 -banting 10 -xliii 10 -61-page 10 -51-yard 10 -gigging 10 -nethercot 10 -yassine 10 -ej200 10 -9:44 10 -consolo 10 -1,585 10 -tee-shot 10 -180km 10 -trebah 10 -lukman 10 -twitter-related 10 -nazaré 10 -freeskiing 10 -280-mile 10 -4000m 10 -all-you-can-drink 10 -24.00 10 -futers 10 -primm 10 -grayscale 10 -dutheil 10 -abundances 10 -bermudian 10 -karl-johan 10 -maly 10 -khalilzada 10 -guehenno 10 -chlorpyrifos 10 -model/actress 10 -sesena 10 -reimburses 10 -saikia 10 -3,599 10 -koti 10 -glamour.com 10 -public-facing 10 -lussick 10 -minich 10 -smith-horak 10 -somerfield 10 -rhiannah 10 -french-trained 10 -l'hospitalet 10 -gut-level 10 -petabytes 10 -gumbinger 10 -kérastase 10 -natividad 10 -coatzacoalcos 10 -halai 10 -transshipment 10 -desmarais 10 -micus 10 -hessdalen 10 -eljarh 10 -al-moayad 10 -fumigating 10 -soubriquet 10 -giugno 10 -mattinson 10 -hegglin 10 -mamontov 10 -14000 10 -cabezas 10 -2,636 10 -plaskett 10 -eluned 10 -silverjet 10 -bloodworth 10 -danlos 10 -type-45 10 -sherbert 10 -blaker 10 -errr 10 -rieckenberg 10 -osmium 10 -brownouts 10 -ridley-thomas 10 -carnsew 10 -nityananda 10 -al-sunna 10 -anastos 10 -leveson-style 10 -crystallises 10 -dujmovits 10 -italy-uruguay 10 -sophola 10 -lingvall 10 -ystradgynlais 10 -#whoruprotecting 10 -conchos 10 -life-plus-20-year 10 -dimambro 10 -al-sherif 10 -lovel 10 -off-shoots 10 -nirl 10 -keiding 10 -glenlivet 10 -1997-2010 10 -carolingian 10 -museet 10 -hazing-related 10 -rataic 10 -wellbutrin 10 -slowey 10 -eddine 10 -ex-liberal 10 -comi 10 -transer 10 -al-raghie 10 -lotfy 10 -40-storey 10 -#muslimlivesmatter 10 -jennilyn 10 -ravishingly 10 -34-years-old 10 -lihau 10 -ginning 10 -yolkers 10 -sabetta 10 -incident-free 10 -re-told 10 -cfg 10 -egyptian-canadian 10 -sabiston 10 -benczur 10 -pavlik 10 -submunitions 10 -rockel 10 -contiki 10 -wann 10 -reentered 10 -lahl 10 -spiegler 10 -pinon 10 -pinos 10 -zuckerbergs 10 -kidwill 10 -maleman 10 -swelter 10 -ridha 10 -erudition 10 -zor 10 -rathborne 10 -pile-ups 10 -pyretta 10 -genney 10 -huntington-ashland 10 -12.31 10 -well-practised 10 -cranebrook 10 -pithovirus 10 -gaffin 10 -boquillas 10 -wyzykowski 10 -helicycle 10 -post-16 10 -starteens 10 -duologue 10 -slamka 10 -decanting 10 -head-spinning 10 -garbs 10 -lessy 10 -pietrzak 10 -understates 10 -razorbills 10 -manias 10 -d.m. 10 -zadrozny 10 -jyrobike 10 -ledsham 10 -trotty 10 -regen 10 -chix 10 -kurr 10 -earthed 10 -multi-hull 10 -szechuan 10 -donelson 10 -maricela 10 -hapilabs 10 -fogerty 10 -self-funders 10 -enshrinement 10 -gedis 10 -leye 10 -ponda 10 -jayprakash 10 -12-game 10 -money-losing 10 -reggaetoneros 10 -speigner 10 -superted 10 -preta 10 -two-pound 10 -werrett 10 -ice2sea 10 -vandersteen 10 -adlard 10 -hinwaii 10 -intermarriages 10 -unpronounceable 10 -mangabey 10 -bugal 10 -log-on 10 -icefjord 10 -sunderbans 10 -makeblock 10 -alt-country 10 -sighthill 10 -ristroph 10 -25-18 10 -parred 10 -mayfair-based 10 -neuromodulation 10 -bsl 10 -thingvellir 10 -bintan 10 -badjeck 10 -cambiano 10 -gumsuri 10 -design-wise 10 -spillways 10 -burholt 10 -eddins 10 -ducker 10 -saulius 10 -hotson 10 -nonaccidental 10 -myfoxphilly.com 10 -stun-gun 10 -laksi 10 -snake-arm 10 -grimanis 10 -casen 10 -deherrera 10 -malinda 10 -lennox-gastaut 10 -take-it-or-leave-it 10 -679,000 10 -#gmb 10 -sua 10 -prohibitionists 10 -steffans 10 -kcnc-tv 10 -dfm 10 -wprost 10 -laundy 10 -shorthanded 10 -otsuchi 10 -tyreese 10 -tinkov 10 -cattanio 10 -proficiently 10 -robinett 10 -warber 10 -millionsaved 10 -battaglia 10 -congresswomen 10 -blancmange 10 -lower-wage 10 -tujunga 10 -bora-bora 10 -hottug 10 -b.k. 10 -69191 10 -aadil 10 -moloh 10 -arbeiter 10 -bandai 10 -ankeet 10 -kenis 10 -325million 10 -ragen 10 -feistiest 10 -razor-wire 10 -tinke 10 -tunur 10 -season-best 10 -self-referential 10 -lant 10 -subunits 10 -all-australian 10 -toretto 10 -smerconish 10 -conowingo 10 -columbia-based 10 -claybrook 10 -immune-compromised 10 -app.net 10 -castellaneta 10 -unipolar 10 -degnan 10 -1993-2001 10 -vinesh 10 -pre-budget 10 -tubandt 10 -qatar-owned 10 -bettis 10 -gundel 10 -militarisation 10 -22,000-a-week 10 -houmous 10 -lmb 10 -naughtier 10 -caubergs 10 -african-themed 10 -mease 10 -189733 10 -cryptographer 10 -phytosaur 10 -nitties 10 -amerasian 10 -mohinder 10 -pirret 10 -elenin 10 -mahmoudi 10 -yumen 10 -girlhood 10 -grundmann 10 -proscribing 10 -1,785 10 -1,788 10 -on-the-road 10 -lacson 10 -dpd 10 -susli 10 -lagiard 10 -5.1-magnitude 10 -jerran 10 -responsibilty 10 -smith-williams 10 -renationalisation 10 -guilding 10 -etzel 10 -wreyford 10 -derain 10 -makeup-free 10 -ceren 10 -macdonalds 10 -donkelaar 10 -mazar-i-sharif 10 -turati 10 -schizoid 10 -sintef 10 -biohackers 10 -blinged 10 -mulvi 10 -tutumlu 10 -vondrich 10 -end-terrace 10 -haldenby 10 -glendower 10 -korena 10 -í 10 -cillizza 10 -salience 10 -220c 10 -pervitin 10 -caicara 10 -1ghz 10 -ramkissoon 10 -creepshots 10 -reguarly 10 -mullineux 10 -kimberle 10 -shark-diving 10 -folkingham 10 -v.i.p. 10 -hidcote 10 -geek.com 10 -c-difficile 10 -1,364 10 -1,369 10 -blenner 10 -huhn 10 -50metres 10 -antena 10 -tetrodotoxin 10 -carnuntum 10 -ferarri 10 -baynton 10 -bargain-hunter 10 -lowinger 10 -kronick 10 -apakan 10 -nf2 10 -shebeen 10 -salla 10 -balise 10 -ampie 10 -hample 10 -111-run 10 -#superbowl47 10 -pelser 10 -rabah 10 -salvor 10 -badal 10 -bexington 10 -dittmeyer 10 -kujoe 10 -simia 10 -anim 10 -songshan 10 -yanchuk 10 -mankading 10 -q-warrior 10 -nabba 10 -yorick 10 -beaut 10 -sulks 10 -alomari 10 -down-and-outs 10 -anti-west 10 -buie 10 -luxford 10 -ronn 10 -spyderco 10 -fixed-line 10 -not-so-happy 10 -uhrman 10 -dural 10 -non-international 10 -partyers 10 -qbpc 10 -snobbishness 10 -baczyk 10 -ha-na 10 -impinging 10 -ar-15-style 10 -ticketek 10 -guenterberg 10 -thalmann 10 -perking 10 -2001-2009 10 -pig-headed 10 -24mph 10 -enalapril 10 -mokhiniso 10 --100 10 -in-the-moment 10 -wearsiders 10 -incubates 10 -el-awa 10 -scorches 10 -eo40 10 -micco 10 -galápagos 10 -sweet-tasting 10 -alekseyev 10 -bundeswehr 10 -51mins 10 -hared 10 -work-shy 10 -sheilas 10 -brenes 10 -varibike 10 -popchips 10 -qimr 10 -sylvanian 10 -8,000-a-year 10 -homebrand 10 -thibout 10 -clywd 10 -do-wells 10 -public-records 10 -petz 10 -tripler 10 -reexamined 10 -iter 10 -streaming-music 10 -krajian 10 -gothenberg 10 -al-cambodi 10 -ettington 10 -mckinnis 10 -brodskaya 10 -yolanthe 10 -oin 10 -oia 10 -chilavert 10 -splutters 10 -79.95 10 -devins 10 -hagenbeck 10 -gerrish 10 -in-cabin 10 -convo 10 -1671 10 -chesbro 10 -northen 10 -shagroon 10 -dampier 10 -kiam 10 -glengarry 10 -headlam 10 -styron 10 -shrum 10 -anusha 10 -deicorp 10 -redistributes 10 -speeded-up 10 -delavar 10 -empire-building 10 -foudakis 10 -bavarian-style 10 -sankare 10 -vit 10 -viz 10 -22-stone 10 -jorn 10 -pankowska 10 -cosplaying 10 -pongolle 10 -watercooler 10 -nine-weeks-old 10 -hahaah 10 -elnaggar 10 -hadrons 10 -kob4 10 -cdg 10 -hulkower 10 -collier-brewer 10 -ahmann 10 -hruda 10 -shiniest 10 -schull 10 -mouallem 10 -sadako 10 -altair 10 -income-tax 10 -lorina 10 -hallinan 10 -wavell 10 -trevener 10 -lafollette 10 -javelins 10 -kliff 10 -brigadiers 10 -5-foot-long 10 -kazutaka 10 -nephropathy 10 -wicomico 10 -rael 10 -tumescent 10 -uber-rich 10 -overstimulated 10 -daves 10 -fairytale-like 10 -self-tan 10 -150,000-per-week 10 -acxiom 10 -wildfell 10 -kozakova 10 -baron-cohen 10 -830million 10 -109million 10 -5:01 10 -5:02 10 -5:04 10 -long-neglected 10 -janaway 10 -minghella 10 -rossmiller 10 -!?! 10 -maggy 10 -trinita 10 -abdelmonen 10 -pallansch 10 -rebak 10 -gorlovka 10 -sytner 10 -yaffa 10 -cordey 10 -spearey 10 -hebb 10 -leeck 10 -heberlein 10 -pen-like 10 -munsu 10 -fietek 10 -ossian 10 -paser 10 -hallencreutz 10 -potbellied 10 -bevacizumab 10 -77mins 10 -lucznikowska 10 -al-ghouta 10 -bassin 10 -blucher 10 -penniman 10 -obama-romney 10 -transference 10 -esporlas 10 -mycar 10 -distinctive-looking 10 -cave-dwelling 10 -homeserve 10 -jedidiah 10 -mohit 10 -5,050 10 -hangup 10 -ariely 10 -vajazzles 10 -skimped 10 -multifocal 10 -moumtzis 10 -peregrines 10 -churrascaria 10 -bijindo 10 -tamborine 10 -otsego 10 -borah 10 -shorish-shamley 10 -scrappage 10 -nerandzic 10 -levinsohn 10 -kinzel 10 -bildeston 10 -susumu 10 -unite4 10 -drog 10 -handoko 10 -jangid 10 -lerum 10 -mazurek 10 -al-aswad 10 -pikk 10 -llopis 10 -welegedara 10 -tir 10 -mesoamerican 10 -fotokite 10 -segues 10 -dorko 10 -rhymer 10 -gausepohl 10 -heavily-bearded 10 -abdul-jalil 10 -a.m.-7 10 -bandarin 10 -bibring 10 -ferrari-driving 10 -dirar 10 -sunbaker 10 -#askhermore 10 -tv135 10 -tragus 10 -five-yard 10 -ruess 10 -aranburu 10 -zhurbin 10 -ampsurf 10 -modin 10 -abdulfattah 10 -fox29 10 -procreating 10 -macgowan 10 -10,000-plus 10 -pule 10 -haugum 10 -deyu 10 -scotcher 10 -mendi 10 -korea-u.s. 10 -scherz 10 -gocman 10 -65,000-tonne 10 -strombolian 10 -procures 10 -948 10 -frbs 10 -simintov 10 -ipplepen 10 -jetsmarter 10 -o'doul 10 -super-rare 10 -dewis 10 -orum 10 -miglorino 10 -pfft 10 -njoku 10 -astringency 10 -halsman 10 -lcl 10 -matthau 10 -risman 10 -#mycalvins 10 -twlight 10 -nourry 10 -americo 10 -konradsen 10 -moonoo 10 -regine 10 -sveriges 10 -nonsteroidal 10 -hat-wearing 10 -reordered 10 -perturbation 10 -morgan-thomas 10 -laleham 10 -bt3030 10 -13-part 10 -mom-of-three 10 -perron 10 -mealor 10 -sub-cultures 10 -carbonic 10 -huong 10 -fangirl 10 -basia 10 -kazimi 10 -keles 10 -deputize 10 -narino 10 -hand-feed 10 -ramalinga 10 -nyree 10 -expenses-paid 10 -betc 10 -wakeford 10 -deford 10 -blockley 10 -trouton 10 -ceva 10 -toshihiko 10 -paneth 10 -yakovenko 10 -lamarche 10 -al-nusrah 10 -60,000-plus 10 -zuleika 10 -ravanelli 10 -marples 10 -yanic 10 -aegyo 10 -tablet-style 10 -61st-minute 10 -heriot-watt 10 -hitch-hiker 10 -bluefields 10 -165cm 10 -corbi 10 -untapable 10 -gacesa 10 -ctvrtnicek 10 -clemencia 10 -naison 10 -officiator 10 -5-gallon 10 -mainella 10 -regulus 10 -homeport 10 -okaz 10 -ostrowska 10 -segun 10 -chaga 10 -hyppolite 10 -spot-check 10 -9:07 10 -devonta 10 -greylock 10 -mso-generic-font-family 10 -froing 10 -tukur 10 -lenzen 10 -nyumbani 10 -sayyari 10 -matau 10 -pe.com 10 -much-talked 10 -addow 10 -getzin 10 -metastasised 10 -godber 10 -turi 10 -1-15 10 -sundee 10 -torchlit 10 -dellisa 10 -benquerenca 10 -fon 10 -2000-2006 10 -mahl 10 -parraz 10 -cummock 10 -winkli 10 -bohun 10 -hyper-masculine 10 -sensi 10 -snugride 10 -melville-shreeve 10 -labourlist 10 -kravanis 10 -dineley 10 -cuprinol 10 -meale 10 -face-paint 10 -mehjoo 10 -yaobang 10 -allot 10 -monzer 10 -misrule 10 -flagellation 10 -r-massachusetts 10 -kweli 10 -kornilov 10 -mk-3475 10 -soeoth 10 -out-of-context 10 -third-wicket 10 -burped 10 -z-list 10 -ponzi-schemer 10 -nayfack 10 -obabiyi 10 -bulloch 10 -lindiwe 10 -over-generous 10 -joeleen 10 -christianmingle.com 10 -dettling 10 -kilwinning 10 -non-biting 10 -massac 10 -harrys 10 -scabbard 10 -lieutenant-governor 10 -glyphs 10 -adlon 10 -narco-terrorists 10 -re-enlistment 10 -bernholdt 10 -cookney 10 -conejo 10 -rambam 10 -thrustssc 10 -ervs 10 -biggovernment.com 10 -llandysul 10 -coptics 10 -filicide 10 -gorki 10 -roils 10 -chainrai 10 -fraiman 10 -meenan 10 -gadea 10 -semiprecious 10 -92p 10 -burned-down 10 -whinfrey 10 -steinhaus 10 -derpy 10 -inter-fraternity 10 -sumners 10 -schuller 10 -striping 10 -desormeaux 10 -daviot 10 -69.4 10 -acute-onset 10 -chillsner 10 -hendrickx 10 -footstool 10 -haeckel 10 -web-freedom 10 -mossler 10 -tua 10 -looses 10 -seven-woman 10 -mathlouthi 10 -kamilah 10 -bingeman 10 -ganong 10 -mss 10 -apopo 10 -douthat 10 -colonist 10 -glamsquad 10 -grimms 10 -madikizela 10 -carbo 10 -schroyer 10 -asbi 10 -voorwerp 10 -blane 10 -gushi 10 -bfs 10 -furkert 10 -ukiah 10 -wideville 10 -long-service 10 -kukui 10 -enmore 10 -loose-lipped 10 -yanelli 10 -nealey 10 -1,119 10 -r6 10 -wtrf 10 -txt 10 -drafty 10 -low-salt 10 -turbolenza 10 -duplexes 10 -dilly 10 -muhieddine 10 -figure-skimming 10 -frogh 10 -halberd 10 -twin-aisle 10 -over-exercising 10 -pratten 10 -all-court 10 -nishizawa 10 -microcar 10 -dymaxion 10 -windley 10 -667,000 10 -sahba 10 -victimise 10 -cheo 10 -iturbide 10 -broths 10 -sandigo 10 -easy-to-follow 10 -double-paned 10 -wac 10 -missned 10 -coller 10 -porntip 10 -senaki 10 -warren-beck 10 -u.s.-north 10 -scherwitz 10 -deblay 10 -nature-lover 10 -autumn-winter 10 -braulio 10 -moncrief 10 -theradome 10 -railwaymen 10 -gunwalking 10 -abbey-style 10 -conviviality 10 -barik 10 -canieatit.co.uk 10 -andriansyah 10 -valbona 10 -baronoene 10 -ssmk 10 -self-protective 10 -brigand 10 -tzorvas 10 -bold-faced 10 -zikhali 10 -sulistyo 10 -urself 10 -tuitions 10 -@ids_mp 10 -1589 10 -20mins 10 -27mm 10 -lietzau 10 -hannam 10 -crowes 10 -storie 10 -erlandson 10 -five-metre-long 10 -61.6 10 -goulds 10 -catto 10 -1:41 10 -1:42 10 -1:44 10 -mullally 10 -tatem 10 -hypotension 10 -voetbal 10 -hubertus 10 -woolstencroft 10 -muscovy 10 -kartick 10 -urbex-sw 10 -rear-guard 10 -mccullins 10 -bsm 10 -ricalton 10 -high-carbohydrate 10 -thirwall 10 -shiney 10 -layaways 10 -cancelo 10 -abdull 10 -sepulvado 10 -adcocks 10 -3.66 10 -brennon 10 -sybi 10 -hualien 10 -galliers 10 -kreuziger 10 -leight 10 -gasparac 10 -better-qualified 10 -keevill 10 -deep-set 10 -medair 10 -9.08 10 -orsato 10 -menken 10 -76.6 10 -aveni 10 -chernikoff 10 -crf19 10 -revitalift 10 -hard-to-please 10 -bodyline 10 -cataphiles 10 -gandron 10 -b2b 10 -ghika 10 -cammie 10 -granato 10 -toates 10 -cerney 10 -non-schoolies 10 -337-page 10 -espite 10 -consolos 10 -anderson-dixon 10 -weingart 10 -tomotaka 10 -skyprowler 10 -talavera 10 -al-nouri 10 -1427 10 -agonist 10 -52-inch 10 -vampiric 10 -zoje 10 -para-equestrian 10 -euphanerops 10 -f-18e 10 -olyphant 10 -lav 10 -bandhavgarh 10 -adblock 10 -courtni 10 -meesha 10 -taza 10 -micoperi 10 -analyzer 10 -blair-ford 10 -heaver 10 -todo 10 -qmul 10 -dth 10 -updos 10 -first-placed 10 -utt 10 -mufc 10 -muttram 10 -then-chancellor 10 -father/son 10 -empathizing 10 -photogs 10 -forgemasters 10 -kelce 10 -toned-down 10 -stokely 10 -375mm 10 -suncor 10 -third-country 10 -crocodilian 10 -beri 10 -bogunovich 10 -mogra 10 -chevins 10 -marysue 10 -ksar 10 -foderingham 10 -gerke 10 -0.81 10 -sawan 10 -coxan 10 -duking 10 -silkwood 10 -16-story 10 -tsunami-ravaged 10 -cytoplasm 10 -guereca 10 -#notbuyingit 10 -wooton 10 -54-46 10 -queue-en-brie 10 -wedgeworth 10 -clued-up 10 -cleggy 10 -knickerbox 10 -popovski 10 -merchandiser 10 -10.85 10 -birchgrove 10 -non-oil 10 -sub-region 10 -braunwalder 10 -beltrame 10 -thornes 10 -9:21 10 -nbs 10 -komsomolets 10 -orionid 10 -pd-l1 10 -sparos 10 -aningaaq 10 -nerys 10 -slower-paced 10 -pre-assembled 10 -pancam 10 -shenkin 10 -430-mile 10 -rookmangud 10 -bertelli 10 -pocantico 10 -long-period 9 -edgeways 9 -giessen 9 -oraya 9 -gruenewald 9 -lafayette-ede 9 -one-on-ones 9 -macha 9 -equilateral 9 -snowbirds 9 -zottoli 9 -stapel 9 -poker-straight 9 -grossmann 9 -kiesling 9 -pepitone 9 -jinbo 9 -yiddo 9 -nailfie 9 -usoni 9 -munde 9 -bogo 9 -unadopted 9 -peppe 9 -verbruggen 9 -clefts 9 -wighton 9 -dymond 9 -#askislamicstate 9 -250-room 9 -29-24 9 -nonusers 9 -bioarchaeologist 9 -lawing 9 -mobcast 9 -knp 9 -snowbanks 9 -17.95 9 -omero 9 -gilden 9 -bromine 9 -christofias 9 -gravel-voiced 9 -unnap 9 -camaguey 9 -atik 9 -ninety-three 9 -oludamola 9 -numatic 9 -bebop 9 -sturla 9 -take-charge 9 -rossie 9 -rushlau 9 -takhalov 9 -indian-owned 9 -700mhz 9 -runa 9 -jon-allan 9 -one-foot 9 -54-mile 9 -nemcovsky 9 -2:33 9 -2:34 9 -genre-bending 9 -armatage 9 -then-missing 9 -kubaisi 9 -newcome-baker 9 -sorocaba 9 -r-wyoming 9 -499-page 9 -semi-darkness 9 -morecombe 9 -xristina 9 -tattoed 9 -georgious 9 -flower-like 9 -dodeen 9 -mikvah 9 -3,495 9 -tranquilise 9 -disneyfication 9 -moo-jin 9 -wop 9 -gainariu 9 -double-tap 9 -monohull 9 -312-pound 9 -goddio 9 -milinovich 9 -ambrosiano 9 -haulover 9 -dominicana 9 -gorniak 9 -doona 9 -2004-2011 9 -2004-2010 9 -21.25 9 -coega 9 -parkhouse 9 -wellfield 9 -baisar 9 -todorova 9 -wannabee 9 -warmley 9 -datingdirect.com 9 -flyin 9 -sciarpelletti 9 -tacko 9 -post-prison 9 -d-ca 9 -zarkadakis 9 -corum 9 -noncitizen 9 -noura 9 -decorous 9 -gambaru 9 -glassblowers 9 -buswell-robinson 9 -montas 9 -red-and-yellow 9 -locally-grown 9 -el-farrah 9 -hard-drives 9 -jemblung 9 -reeders 9 -negreanu 9 -paskins 9 -alalcomenaeus 9 -schwanke 9 -ohain 9 -dighton 9 -stephanz 9 -stephani 9 -petrol-driven 9 -64kg 9 -26-21 9 -nerdo 9 -.00 9 -riveters 9 -cochetel 9 -winterland 9 -korengal 9 -world-leader 9 -demuren 9 -nantlle 9 -9,205 9 -corruption-free 9 -aarij 9 -brustholm 9 -silver-screen 9 -cococay 9 -miksys 9 -a-h 9 -now-debunked 9 -scotcen 9 -yardsticks 9 -mihevc 9 -sven-göran 9 -geo-strategic 9 -rock-paper-scissors 9 -1,173 9 -beaulier 9 -niemira 9 -polisher 9 -edgett 9 -mohon 9 -712,000 9 -lauter 9 -recheck 9 -damapong 9 -three-days 9 -zoellner 9 -matchesfashion.com 9 -pazyryk 9 -anti-climate 9 -rachins 9 -imminence 9 -hargrave 9 -4:03 9 -downslope 9 -mosson 9 -brother-sister 9 -nicotiana 9 -ludendorff 9 -extra-vehicular 9 -septembers 9 -85-63 9 -portaledges 9 -heinonen 9 -code-of-conduct 9 -olens 9 -olena 9 -steet 9 -frickley 9 -stabile 9 -ghodse 9 -velassaru 9 -atlante 9 -thriftiness 9 -leitsinger 9 -already-strained 9 -scotiabank 9 -todhunter 9 -mbeli 9 -2,199 9 -kawhmu 9 -multi-room 9 -orexigen 9 -wolbachia 9 -jin-ah 9 -9.19 9 -greenport 9 -aixam 9 -birdy 9 -worriedly 9 -8.70 9 -cubbington 9 -still-burning 9 -vapers 9 -vusi 9 -bravia 9 -rort 9 -schillaci 9 -do-not-call 9 -ledisi 9 -henblas 9 -squiggly 9 -gruss 9 -gambits 9 -0-8 9 -0-9 9 -natgeo 9 -aasen 9 -baros 9 -entwhistle 9 -cybersquatter 9 -berlin-born 9 -hav304 9 -quantifies 9 -home-building 9 -587,000 9 -117lbs 9 -beaufoy 9 -mapmaker 9 -curcio 9 -merly 9 -stille 9 -lundblad 9 -chiwayo 9 -galbiati 9 -bumbershoot 9 -dinokeng 9 -37th-minute 9 -close-call 9 -second-eldest 9 -al-hindi 9 -kleve 9 -tn1 9 -ten-person 9 -dearlove 9 -ultra-feminine 9 -tume 9 -4,230 9 -2,730 9 -forbears 9 -hyoscine 9 -impurity 9 -abdulle 9 -citronella 9 -beaner 9 -de-cluttering 9 -salles 9 -al-azzawi 9 -17-room 9 -supeno 9 -beres 9 -propellor 9 -lankapuvath 9 -2,175 9 -harlock 9 -2-year-olds 9 -rustamova 9 -dinorah 9 -jiving 9 -two-hours 9 -multiplatinum-selling 9 -previously-unknown 9 -sponseller 9 -enflame 9 -booze-soaked 9 -plant-eater 9 -imagineering 9 -solemia 9 -tma 9 -ktrs 9 -re-invigorate 9 -ishack 9 -kodjovi 9 -abu-sir 9 -kdlt 9 -womenâ 9 -rod-like 9 -marjory 9 -suresch 9 -darras 9 -3:39 9 -lombroso 9 -shrode 9 -www.takingthekids.com 9 -rastafari 9 -50-41 9 -chan-o-cha 9 -7 9 -time-being 9 -zuni 9 -slimpod 9 -pindling 9 -adriel 9 -krason 9 -edmontosaurus 9 -sikhanyiso 9 -,000,000 9 -public-opinion 9 -sleepwalkers 9 -47,800 9 -isoprene 9 -afspa 9 -begrudged 9 -swidlicki 9 -carbon-dioxide 9 -agaba 9 -righ 9 -sidewall 9 -mazandaran 9 -va2 9 -neklyaev 9 -agnifilo 9 -rhinestone-studded 9 -gotenna 9 -stone-and-a-half 9 -carolyne 9 -ghadi 9 -movie-maker 9 -4-week-old 9 -rojecki 9 -lashimba 9 -396,906 9 -pierce-arrow 9 -tousle-haired 9 -edgehill 9 -detik.com 9 -baldridge 9 -alevis 9 -marsh-welton 9 -beaschler 9 -musampa 9 -minoru 9 -77lbs 9 -scabbing 9 -ormesby 9 -black-haired 9 -kafir 9 -jamie-leigh 9 -feints 9 -bellringer 9 -galluzzo 9 -140p 9 -1,015 9 -straten 9 -brown-outs 9 -klonopin 9 -diabetes-related 9 -sankarlal 9 -fernie 9 -hainer 9 -painshill 9 -2.4-inch 9 -rousson 9 -komova 9 -hurries 9 -meekings 9 -famly 9 -morpho 9 -woll 9 -haripur 9 -arvelo 9 -corretjer 9 -scramjets 9 -eskew 9 -chef-owner 9 -dingui 9 -short-barreled 9 -#thinspiration 9 -factory-farmed 9 -otsu 9 -mofa 9 -rifaximin 9 -fictionally 9 -joani 9 -drotleff 9 -vaille 9 -gwladys 9 -renewable-energy 9 -gorre 9 -single-humped 9 -300-person 9 -germy 9 -tattenham 9 -fare-paying 9 -megamillions 9 -geotag 9 -million-euro 9 -merryn 9 -15,900 9 -abdelmajid 9 -sabuco 9 -sidda 9 -71-day 9 -breathalyzed 9 -re-hire 9 -vena 9 -loco-motion 9 -sharni 9 -5,160 9 -160km/h 9 -rediscovers 9 -5,000-ton 9 -roncin 9 -templo 9 -kyung-eun 9 -thread-like 9 -well-ventilated 9 -trelenberg 9 -dronenburg 9 -700-4 9 -edeka 9 -al-jazari 9 -treasure-hunting 9 -29-man 9 -manana 9 -rezoning 9 -rangrez 9 -nlc 9 -nlb 9 -debasement 9 -coupler 9 -skeletorus 9 -rothenburg 9 -nerheim 9 -werhahn 9 -six-tenths 9 -tangents 9 -khalaji 9 -35,600 9 -chetnole 9 -batar 9 -barga-milbury 9 -hawkswell 9 -insulin-producing 9 -road-kill 9 -rangin 9 -re-fuelling 9 -tazarib 9 -realnetworks 9 -vielma 9 -33-years-old 9 -ryders 9 -zadan 9 -mnangagwa 9 -341,000 9 -one-and-only 9 -glum-looking 9 -durov 9 -smith-payne 9 -rentz 9 -wriggly 9 -orda 9 -saljic 9 -doeschate 9 -ilsley 9 -sarka 9 -bandaranaike 9 -ferof 9 -sarrouj 9 -booby-traps 9 -silty 9 -manisa 9 -nose-to-tail 9 -self-perceived 9 -flumenbaum 9 -fiszman 9 -mcgeechan 9 -action-thriller 9 -speedways 9 -six-race 9 -minifigs 9 -milepost 9 -helical 9 -yendell 9 -due-process 9 -dexia 9 -agt 9 -disease-resistant 9 -bannigan 9 -rearmed 9 -136million 9 -berks. 9 -nowakowski 9 -omm 9 -reconstitution 9 -super-healthy 9 -re-authorization 9 -clacking 9 -steininger 9 -saizar 9 -beverley-jane 9 -third-string 9 -newitt 9 -nasib 9 -chatlines 9 -mom-of-four 9 -molehills 9 -homestretch 9 -natero-armento 9 -ilfc 9 -perevalnoe 9 -bayang 9 -witcherley 9 -brierly 9 -zhiqiao 9 -piggly 9 -markkula 9 -wltx 9 -3/7 9 -gun-shaped 9 -weicker 9 -devyn 9 -bianna 9 -porthminster 9 -gorod 9 -slim-line 9 -kunstmuseum 9 -kwanzaa 9 -monch 9 -over-abrupt 9 -longus 9 -20,000-capacity 9 -bilinguals 9 -trusteeship 9 -cassandre 9 -manche 9 -pawlowicz 9 -garling 9 -insurrections 9 -pyrosome 9 -atthe 9 -montalbano 9 -dartsight 9 -cohabitating 9 -gaoli 9 -luciferin 9 -then-military 9 -birchim 9 -kube 9 -twiddle 9 -jeannemarie 9 -merce 9 -swindlehurst 9 -dongdaemun 9 -keyc 9 -fuchsias 9 -delacy 9 -skyros 9 -lekki 9 -waza-ari 9 -bortell 9 -bunagana 9 -douce 9 -aitmarri 9 -sanders-campfield 9 -badboy 9 -tahmina 9 -harrison-bentzen 9 -louden 9 -gloor 9 -dégagé 9 -10,000-acre 9 -outwork 9 -ushioda 9 -2,640 9 -.23 9 -.20 9 -folkie 9 -jamaica-born 9 -adtrap 9 -fmcg 9 -baronial 9 -hriz 9 -okmeydani 9 -maf 9 -29-story 9 -2363 9 -re-constructive 9 -half-ape 9 -10-night 9 -kemsley 9 -thursday-sunday 9 -unstaffed 9 -10am-2pm 9 -mexicano 9 -greavsie 9 -invercargill 9 -donaire 9 -azt 9 -mermoz 9 -sugenth 9 -1,400,000 9 -pipkin 9 -ghufron 9 -shaqueel 9 -dusit 9 -slimness 9 -4:22 9 -suvorov 9 -dungen 9 -lady-like 9 -townhill 9 -lordkipanidze 9 -fulp 9 -ramazzotti 9 -heinz-harald 9 -self-financed 9 -tuition-free 9 -eustachian 9 -luder 9 -bodenham 9 -kachoria 9 -vocca 9 -6.26 9 -6.27 9 -harvy 9 -1346 9 -verbeek 9 -jaa 9 -michigan-born 9 -clocky 9 -ramakrishnan 9 -rahayu 9 -egberto 9 -militantly 9 -cranio 9 -harbour-front 9 -s.n. 9 -corkhill 9 -8.16 9 -commercial-grade 9 -bedrick 9 -teamo 9 -gun-suicide 9 -seatguru.com 9 -stott-bumsted 9 -5gs 9 -broadwalk 9 -allaway 9 -agyeman 9 -ireland-related 9 -aggrieve 9 -campanile 9 -eem 9 -een 9 -padak 9 -sahady 9 -whingers 9 -teetotaler 9 -mockney 9 -ayanna 9 -56per 9 -onneley 9 -jasika 9 -evanna 9 -glossier 9 -saddlebag 9 -wysocki 9 -holycombe 9 -nunlee 9 -alexandrina 9 -highfalutin 9 -cyle 9 -68p 9 -hypopituitarism 9 -romily 9 -gemologist 9 -270m 9 -32mph 9 -torabi 9 -andruw 9 -laa-laa 9 -anagrams 9 -faiyum 9 -9-millimeter 9 -rammell 9 -big-boy 9 -berck 9 -domain-name 9 -tarango 9 -1:05 9 -gronoff 9 -longtoushan 9 -2ib 9 -zoo-like 9 -cur 9 -cud 9 -niagra 9 -warda 9 -shafiei 9 -shaghai 9 -3,009 9 -pebody 9 -philp-parsons 9 -kunst 9 -håkensmoen 9 -caliphs 9 -sunderland-born 9 -pivonka 9 -fullabrook 9 -plage 9 -28-21 9 -lupi 9 -thigh-gap 9 -namco 9 -nordhausen 9 -awl 9 -tree-trunk 9 -wilcken 9 -lizann 9 -commedia 9 -#rupertsfault 9 -excepts 9 -brij 9 -up-for-grabs 9 -hendrickse 9 -181st 9 -enrika 9 -maracanã 9 -overcompensating 9 -16.5-11 9 -valeter 9 -nekzad 9 -wan-ifra 9 -105.3 9 -105.5 9 -peguy 9 -javea 9 -pakal 9 -armalite 9 -meader 9 -added-time 9 -high-neck 9 -qed 9 -sheered 9 -avelar 9 -mulveyhill 9 -commision 9 -roundtables 9 -self-objectification 9 -lovespace 9 -wajahat 9 -banitskas 9 -relentlessness 9 -xinhau 9 -guerry 9 -2008-now 9 -maxmin 9 -jochum 9 -maruster 9 -39-page 9 -kooza 9 -zytiga 9 -makélélé 9 -leptis 9 -rinconada 9 -newcastle-based 9 -cristin 9 -spidi 9 -12-fold 9 -32-team 9 -mctigue 9 -murfet 9 -revolving-door 9 -pacini 9 -savants 9 -covach 9 -osieck 9 -ex-drug 9 -mottisfont 9 -al-baghdadiya 9 -cleanings 9 -hougdahl 9 -saarbruecken 9 -sixtieth 9 -flowchart 9 -stellwagen 9 -3,220 9 -whiz-bang 9 -sightsavers 9 -fourth-leading 9 -markac 9 -counter-surveillance 9 -sex-selection 9 -rharouity 9 -strength-training 9 -risk-assessed 9 -evia 9 -sandbergs 9 -laarne 9 -59.9 9 -boheme 9 -etkin 9 -tiankai 9 -620million 9 -bonino 9 -bremridge 9 -ewens 9 -doo-doo 9 -ozeki 9 -al-najar 9 -.2014 9 -.2010 9 -jørgensen 9 -shot-putter 9 -915,000 9 -senhor 9 -alte 9 -1999-2001 9 -1999-2007 9 -dilating 9 -safecast 9 -sundin 9 -enslaves 9 -vittore 9 -4,995 9 -4,999 9 -mudders 9 -resealing 9 -price-cutting 9 -double-points 9 -over-looked 9 -lugg 9 -formalizes 9 -earnie 9 -ballard-hudson 9 -cross-pollination 9 -radar-guided 9 -malinke 9 -al-muhajireen 9 -cholangitis 9 -floorspace 9 -seth-smith 9 -hosier 9 -hodan 9 -once-respected 9 -begbies 9 -faragher 9 -2,380 9 -six-room 9 -359,000 9 -javell 9 -u.s.-eu 9 -vaandering 9 -jeanne-claude 9 -sharqieh 9 -7,350 9 -kaillie 9 -wpvi-tv 9 -mother-and-daughter 9 -scardinos 9 -unworried 9 -hans-jorg 9 -dupont-aignan 9 -miscast 9 -barsham-rolfe 9 -self-hate 9 -thorgalsen 9 -jeune 9 -bodyparts 9 -ulmer 9 -renacimiento 9 -robot-assisted 9 -1,172 9 -genclerbirligi 9 -walmart-owned 9 -tuusula 9 -right-hand-drive 9 -befuddle 9 -justgiving.com 9 -booba 9 -babycastles 9 -coast-based 9 -marisela 9 -spf15 9 -nibiru 9 -baguette-cut 9 -colwin 9 -earthquake-devastated 9 -190m 9 -orfevres 9 -ghostswimmer 9 -texeira 9 -zawacki 9 -jelawat 9 -x-boxes 9 -pq 9 -lenten 9 -going-to-the-sun 9 -fel 9 -sora 9 -widny 9 -glute 9 -nenthead 9 -meretz 9 -av80r 9 -latitudinal 9 -tranquillised 9 -cottingley 9 -14-8 9 -murugan 9 -80,000-seater 9 -allgood 9 -tkacik 9 -boco 9 -uncultivated 9 -krak 9 -tauriel 9 -drummond-hay 9 -byfords 9 -yelp.com 9 -cnn/youtube 9 -elenz 9 -410million 9 -wilden 9 -dalen 9 -928gt 9 -rianna 9 -baverman 9 -babilonia 9 -eat24 9 -dyas 9 -businessman-turned-politician 9 -raisher 9 -ucr 9 -binoche 9 -sanghvi 9 -tumpey 9 -newboys 9 -eco-credentials 9 -yorke-davies 9 -stachurski 9 -senhao 9 -sorrenti 9 -wharfedale 9 -w-a-t-e-r 9 -margaretha 9 -1-foot 9 -overfly 9 -kelty 9 -nkamba 9 -zussman 9 -jeydon 9 -8.0.2 9 -biedrzycki 9 -rabbitts 9 -mikveh 9 -judaean 9 -predominates 9 -predominated 9 -bukal 9 -roko 9 -3,125 9 -3,122 9 -gikomba 9 -bricknell 9 -car-related 9 -60-65 9 -gretz 9 -ordnanceman 9 -ghislain 9 -luzern 9 -gorshkov 9 -friedl 9 -malborough 9 -wachiraporn 9 -planeterrella 9 -sonko 9 -intersnack 9 -meedendorp 9 -archaeologically 9 -empaneled 9 -pink-haired 9 -bresi-ando 9 -dussen 9 -antigens 9 -quarter-pounder 9 -mis-shapen 9 -laterooms.com 9 -non-pharmacological 9 -sarko 9 -peosta 9 -hatoum 9 -doheny 9 -bailyn 9 -rubén 9 -transom 9 -rinaudo 9 -pozonsky 9 -1,536 9 -self-disgust 9 -#blackoutblackfriday 9 -mih 9 -super-car 9 -biasi 9 -cooper-harris 9 -re-infected 9 -srichand 9 -leader-in-waiting 9 -gangnam-style 9 -summer-signing 9 -leatha 9 -clo 9 -cla 9 -get-out-of-jail 9 -vtox 9 -pataskala 9 -semion 9 -hokum 9 -pentax 9 -lynchpins 9 -imaginate 9 -aisikaier 9 -macatoo 9 -tollbooth 9 -ministerial-level 9 -book-ended 9 -indesit 9 -brahm 9 -anticoagulants 9 -kilajyan 9 -funk-haslam 9 -nesn 9 -colcci 9 -nightscape 9 -momat 9 -maxwells 9 -o'ahu 9 -monaco-style 9 -drogue 9 -munua 9 -kalonge 9 -tor-ivar 9 -rough-looking 9 -sinisterly 9 -unflagging 9 -agrigoroaei 9 -700-strong 9 -inyama 9 -pompom 9 -jacorey 9 -overdeveloped 9 -satiating 9 -class-c 9 -pruvic 9 -ati 9 -bidco 9 -payhembury 9 -reaveley 9 -piron 9 -irock 9 -i-270 9 -conatzer 9 -170ft 9 -xiaoxiao 9 -tumon 9 -sawani 9 -dehumanise 9 -daubert 9 -yang-gon 9 -6.06 9 -cocoa-nomics 9 -strangward 9 -moleman 9 -eftychiou 9 -moxen 9 -hryvnia 9 -haston 9 -mccurdy-quintana 9 -2075 9 -anicich 9 -rasello 9 -godspell 9 -sea-front 9 -maternal-fetal 9 -dermott 9 -8.34 9 -ohman 9 -neruja 9 -crawshawbooth 9 -overeater 9 -demodectic 9 -leanest 9 -foch 9 -hayward-maher 9 -title-holders 9 -kolesnikov 9 -30-member 9 -setaniye 9 -hindon 9 -westphal 9 -self-promoter 9 -gavigan 9 -gullane 9 -vinnicombe 9 -u.s-mexico 9 -inflation-linked 9 -sperl 9 -black-faced 9 -then-20-year-old 9 -clydach 9 -harbourmaster 9 -lenience 9 -expositions 9 -thigh-length 9 -certifiable 9 -iannucci 9 -piquancy 9 -newson-smith 9 -pardis 9 -saia 9 -heat-sensing 9 -kabbani 9 -76,500 9 -zhaojie 9 -poizeau 9 -1972-73 9 -childfund 9 -62.63 9 -auric 9 -price-gouging 9 -blainville 9 -ansi 9 -appaloosas 9 -1,273 9 -1,274 9 -elaziz 9 -neamt 9 -dreyzehner 9 -waaf 9 -hongping 9 -eighth-place 9 -peripherally 9 -nanavati 9 -full-month 9 -ignorantly 9 -agnero 9 -love-fest 9 -lubrano 9 -ebola-reston 9 -418,000 9 -gehry-designed 9 -five-room 9 -colour-blocking 9 -mescaline 9 -rannveig 9 -glampers 9 -salkida 9 -rowbottom 9 -megafon 9 -fechter 9 -fearmongering 9 -celebrity-style 9 -splay 9 -1045 9 -e18 9 -1,455 9 -1,451 9 -esthwaite 9 -mcklevis 9 -modad 9 -asiyalova 9 -kmtv 9 -high-need 9 -deline 9 -phenylketonuria 9 -homero 9 -taxpayer-subsidized 9 -chik-v 9 -broadnax 9 -gansbaai 9 -deep-fry 9 -coreyography 9 -anoushka 9 -agios 9 -boegli 9 -581,000 9 -l'isle 9 -tadić 9 -25-meter 9 -hemeyou 9 -counter-intuitively 9 -ravenously 9 -twinkle-toed 9 -wiseguy 9 -qdoba 9 -culpas 9 -poliwood 9 -stourport-on-severn 9 -jalpaiguri 9 -aptly-titled 9 -pealing 9 -heart-racing 9 -satjawat 9 -revolutionizes 9 -bhoomika 9 -swaters 9 -ravjani 9 -heligan 9 -medrobotics 9 -1445 9 -1,053 9 -vitalija 9 -telemovie 9 -amsr 9 -kerberos 9 -deloughrey 9 -clean-water 9 -elusiveness 9 -deanda 9 -courtnay 9 -bionym 9 -rosalba 9 -mischief-making 9 -córdova 9 -eru 9 -hajnajafi 9 -fryent 9 -craffonara 9 -succes 9 -forteviot 9 -taste-test 9 -mehlberg 9 -bill-payers 9 -28-strong 9 -uzb 9 -newspace 9 -wilczek 9 -hurtt 9 -roddey 9 -acdc 9 -bullet-shaped 9 -600-a-month 9 -delaplane 9 -austrian-owned 9 -cocaine-trafficking 9 -fitiao 9 -kleeberger 9 -brasfield 9 -talacrest 9 -texana 9 -tea-towels 9 -175-acre 9 -racqueman 9 -zaitouneh 9 -bomper 9 -hewitts 9 -92ft 9 -over-stayers 9 -dunraven 9 -eight-ton 9 -ibstock 9 -tierpark 9 -sihasak 9 -shapovalov 9 -d-san 9 -130km 9 -alner 9 -largess 9 -centaurus 9 -snn 9 -self-controlled 9 -rapporteurs 9 -0-7 9 -daschke 9 -self-righteously 9 -burros 9 -samso 9 -outward-facing 9 -kaufer 9 -ditcher 9 -disc-like 9 -vigouroux 9 -80-1 9 -l10 9 -hours-old 9 -napes 9 -ex-us 9 -142.9 9 -enqing 9 -chmielewski 9 -#bedofshame 9 -vacquier 9 -awerial 9 -vallon 9 -whitener 9 -alness 9 -dollar-for-dollar 9 -late-20s 9 -dippold 9 -prew 9 -samuda 9 -startribune 9 -mis-firing 9 -marblehead 9 -megafight 9 -berghdal 9 -deddington 9 -vilet 9 -rhib 9 -zammett 9 -mechoulam 9 -ashesi 9 -umtiti 9 -lyricists 9 -someren 9 -21km 9 -plesch 9 -alura 9 -addictiveness 9 -reichsmarks 9 -housefull 9 -adamov 9 -lpa 9 -kinabatangan 9 -latchford 9 -li-fi 9 -simpson-lee 9 -dibona 9 -lafourche 9 -cavin 9 -spag 9 -69010 9 -cobley 9 -then-manchester 9 -pembrolizumab 9 -barboursville 9 -bosche 9 -tolmachevy 9 -communiquà 9 -us-versus-them 9 -cheerleading-style 9 -2:57 9 -montilla 9 -farak 9 -jennet 9 -vaezi 9 -tirath 9 -vigoda 9 -sandee 9 -narratively 9 -once-ruling 9 -colosimo 9 -cynk 9 -recurrences 9 -lucilla 9 -encoder 9 -counter-punching 9 -pitie-salpetriere 9 -pill-popping 9 -jarvez 9 -nitisinone 9 -advanced-stage 9 -deans-dundas 9 -hour-glass 9 -thingiverse 9 -high-grossing 9 -Ötztal 9 -line-standers 9 -vijender 9 -ventre 9 -mourniho 9 -c-h 9 -newbiggin 9 -kennerly 9 -mid-price 9 -shunsuke 9 -healthfulness 9 -jazzercise 9 -orienting 9 -christian-based 9 -hi-five 9 -128,500 9 -kiedyk 9 -tilefish 9 -kiddo 9 -tadevsz 9 -issoufou 9 -under-five 9 -6,000-plus 9 -hoonah 9 -bingaman 9 -giraavaru 9 -carlstadt 9 -e-retail 9 -burruchaga 9 -hobyo 9 -aparna 9 -breakfasting 9 -acid-free 9 -at-a-glance 9 -hipwell 9 -ripperda 9 -catalfu 9 -oppman 9 -urfan 9 -gladiolus 9 -veroni 9 -v.a. 9 -flyable 9 -atitlan 9 -criswell 9 -juszkiewicz 9 -liat 9 -two-yearly 9 -ashforth 9 -a53 9 -okulski 9 -over-familiar 9 -coad 9 -mahalo 9 -steering-wheel 9 -rustenberg 9 -#music 9 -metinvest 9 -jencks 9 -imbuing 9 -front-right 9 -scotland-based 9 -benjaminsen 9 -dual-lens 9 -farnes 9 -florey 9 -jonie 9 -sleights 9 -clairvoyants 9 -herzfelder 9 -x5s 9 -riserva 9 -rosales-martinez 9 -self-medicates 9 -ex-pros 9 -beskau 9 -15,450 9 -whelk 9 -stockland 9 -grapeshot 9 -newtownbutler 9 -wahoos 9 -shallop 9 -iannone 9 -rumbaugh 9 -ezekwesili 9 -powdering 9 -enchantress 9 -al-kassar 9 -evidence-tampering 9 -annulling 9 -in-goal 9 -330lbs 9 -preachings 9 -accordions 9 -40-over 9 -polytunnel 9 -murtada 9 -23g 9 -23a 9 -astypalaia 9 -alman 9 -forwent 9 -mazin 9 -veilleux 9 -131.7 9 -qobani 9 -gaertner 9 -selfie-style 9 -kugannesan 9 -11-18 9 -otaiba 9 -puspendu 9 -hieatt 9 -jeu 9 -ciolino 9 -bald-headed 9 -0.57 9 -bondage-themed 9 -gimball 9 -1,237 9 -sightseer 9 -sentinal 9 -28,800 9 -pullitzer 9 -sex-segregated 9 -9.78 9 -jayaraman 9 -eight-night 9 -nikolaenko 9 -alleles 9 -miquelon 9 -correlating 9 -hasim 9 -dawdled 9 -estaban 9 -welney 9 -covering-up 9 -oby 9 -obs 9 -chalus 9 -risheng 9 -insitu 9 -kipruto 9 -schoefield 9 -tanjug 9 -unproved 9 -ecmwf 9 -give-away 9 -muzikante 9 -newbuild 9 -flophouse 9 -mclouglin 9 -unequalled 9 -spoliation 9 -vyrnwy 9 -2,790 9 -ticknall 9 -mitri 9 -engelberg 9 -yohe 9 -kelemen 9 -consulate-general 9 -cameroonians 9 -12000 9 -schenkel 9 -10-17 9 -banjos 9 -fosu-mensah 9 -mud-caked 9 -emailer 9 -clean-skins 9 -anti-genocide 9 -lomen 9 -tiquie 9 -micro-moments 9 -claimline 9 -entrenching 9 -3,048 9 -bragger 9 -pspca 9 -trifled 9 -9,995 9 -mayzee 9 -ilyushin-76 9 -virago 9 -killara 9 -turkish-flagged 9 -maharjan 9 -fahed 9 -ishtar 9 -matriarchs 9 -firearms-related 9 -kennestone 9 -102-87 9 -leverhulme 9 -clean-tech 9 -talukdar 9 -outwear 9 -lafell 9 -cilybebyll 9 -quattrociocche 9 -lovegood 9 -chatburn 9 -mackinday 9 -rane 9 -ranh 9 -ranj 9 -matriach 9 -wirraway 9 -labuan 9 -vamping 9 -self-quarantine 9 -lay-out 9 -3:32 9 -elroy 9 -earlsfield 9 -pavoncello 9 -dyrholm 9 -postlewaite 9 -fitness-wise 9 -victoriano 9 -hustles 9 -heils 9 -1,477 9 -ruhlman 9 -margiocchi 9 -flippy 9 -bi-product 9 -alshehri 9 -supermen 9 -hizballah 9 -pseudo-scientific 9 -geox 9 -blix 9 -97.9 9 -undervalues 9 -detlev 9 -greenspun 9 -league-best 9 -lap-dancer 9 -autotune 9 -ex-dictator 9 -dwane 9 -bromyard 9 -family-focused 9 -suffolk-born 9 -sea-life 9 -niketan 9 -beems 9 -wuc 9 -1,077 9 -subjugating 9 -staindrop 9 -siegmann 9 -loubet 9 -nossa 9 -austerity-hit 9 -bobbit 9 -broomloan 9 -narev 9 -nareg 9 -mikhaela 9 -fédération 9 -drennan 9 -rubane 9 -metson 9 -izambard 9 -cammeray 9 -sapien 9 -ju3 9 -etv 9 -changlin 9 -diggins 9 -garafalo 9 -19-stone 9 -camby 9 -colostrum 9 -sludgy 9 -instrumentals 9 -progestogen 9 -kalms 9 -draftsmen 9 -1580s 9 -severiano 9 -charvet 9 -digressions 9 -snow-laden 9 -ihub 9 -emulators 9 -al-ajami 9 -youd 9 -wolferton 9 -theodent 9 -swingin 9 -zabrze 9 -relaxin 9 -visca 9 -aksu 9 -cantania 9 -boardriders 9 -re-living 9 -hemagglutinin 9 -gastineau 9 -katzenstein 9 -earwaker 9 -eathan 9 -hunsinger 9 -anti-roma 9 -allgier 9 -lahontan 9 -gayness 9 -cellou 9 -4-d 9 -shvut 9 -hunga 9 -sazan 9 -yong-ho 9 -www.anthonynolan.org 9 -louch 9 -hair-styling 9 -bilgi 9 -hapuna 9 -husqvarna 9 -spaetzel 9 -intoning 9 -shorter-range 9 -kamani 9 -wagnor 9 -jordan-smith 9 -bonenberger 9 -akihiko 9 -critical-care 9 -24-10 9 -hudd 9 -thirst-quenching 9 -zelenitsky 9 -gekkos 9 -front-flip 9 -gambians 9 -benjamin-muthiah 9 -sergeant-major 9 -cugat 9 -potage 9 -r18 9 -eelpout 9 -sherlyn 9 -detaille 9 -highly-popular 9 -demotic 9 -comedy/musical 9 -deneve 9 -six-block 9 -swearword 9 -sujatha 9 -costal 9 -simek 9 -maluleke 9 -#jesuisahmed 9 -moms-to-be 9 -krystall 9 -glitterlips 9 -bachor 9 -jukkasjarvi 9 -plagiarise 9 -republican-run 9 -stroop 9 -rollerblades 9 -30-foot-long 9 -d'afrique 9 -palestine-general 9 -ninety-two 9 -herran 9 -huntspill 9 -sourouzian 9 -darel 9 -sepia-toned 9 -wishfull 9 -unclench 9 -forbis 9 -rosenman 9 -bylot 9 -cockroach-infested 9 -myrlie 9 -wardrobing 9 -boor 9 -todman 9 -tensely 9 -gold-framed 9 -moshling 9 -gypsier 9 -davi 9 -europe-v-facebook 9 -f12berlinetta 9 -tamr 9 -saint-salvy 9 -greenlands 9 -thelin 9 -104.5 9 -caav 9 -1726 9 -srey 9 -1,713 9 -closely-related 9 -kutub 9 -e.b. 9 -scabiei 9 -sipe 9 -waycot 9 -schreefel 9 -dealmakers 9 -geodesy 9 -sholing 9 -mortalities 9 -kawaya 9 -royles 9 -vukcevic 9 -big-busted 9 -napoleone 9 -oxygen-depleted 9 -boghian 9 -13-yard 9 -geile 9 -leehom 9 -weprin 9 -ayuni 9 -21.0 9 -much-trumpeted 9 -psoe 9 -conglomeration 9 -kedah 9 -ex-oil 9 -1,393 9 -circumspection 9 -goram 9 -psb 9 -flatt-blevins 9 -23lbs 9 -three-engine 9 -dog-owning 9 -sun-bleached 9 -mcharg 9 -dausman 9 -flash-mob 9 -mini-league 9 -fraker 9 -maclachlans 9 -sonos 9 -catoctin 9 -2010-13 9 -gitu 9 -escalettes 9 -58.1 9 -under-30 9 -castanares 9 -morroco 9 -rasco 9 -campbellton 9 -zulte-waregem 9 -nienstedt 9 -amboise 9 -anole 9 -scaccia 9 -batchelder 9 -servis 9 -cogle 9 -art-loving 9 -zibo 9 -lucasz 9 -logano 9 -hubschman 9 -@realdonaldtrump 9 -bear-like 9 -electricity-generating 9 -krubera 9 -carella 9 -r16 9 -wmaq 9 -wmap 9 -mzee 9 -slackening 9 -12-team 9 -ostracize 9 -gold-tone 9 -24.75 9 -louisiana-based 9 -brucker 9 -hyperekplexia 9 -batre 9 -obamcare 9 -152.5 9 -baheerathan 9 -chuanfu 9 -tinglin 9 -misidentify 9 -aforesaid 9 -housesitting 9 -book-length 9 -newz 9 -doveton 9 -tailio 9 -zbeeb 9 -723,000 9 -five-planet 9 -padrino 9 -umayr 9 -one-pot 9 -anti-isil 9 -insolvencies 9 -anglos 9 -196mph 9 -tube-web 9 -gloger 9 -dunthorne 9 -apj 9 -locomotor 9 -drawling 9 -well-studied 9 -ratnayake 9 -wndu 9 -stormiest 9 -hardbacks 9 -desormes 9 -flagellate 9 -barnacled 9 -holdenville 9 -sigurgeirsson 9 -tahir-akinyele 9 -emaan 9 -hiitgirl 9 -tony-nominated 9 -quayum 9 -1stfone 9 -zhengyang 9 -one-night-stand 9 -kurbanova 9 -afpak 9 -jamella 9 -abdilal 9 -spertus 9 -localization 9 -one-in-a-billion 9 -tibaudo 9 -puréed 9 -2039 9 -prosecuter 9 -9.93 9 -oil-filled 9 -zuo 9 -lidos 9 -lettley 9 -mayotte 9 -smiley-face 9 -php 9 -phw 9 -orien 9 -aguillar 9 -15.72 9 -marcellin-little 9 -netheravon 9 -regretsy 9 -masaad 9 -bargy 9 -hadnot 9 -akra 9 -standiford 9 -whisperers 9 -sahabi 9 -nihilist 9 -firey 9 -majumder 9 -tilke 9 -23-stone 9 -ciollo 9 -pickbourne 9 -yasiel 9 -53-hour 9 -nva 9 -anadan 9 -qataa 9 -jeffries-tipton 9 -brain-based 9 -belliss 9 -re-kindled 9 -Éclat 9 -stu_fraser 9 -z-man 9 -kedarnath 9 -penally 9 -21-point 9 -county-wide 9 -3:12 9 -naturalism 9 -schlub 9 -mulbah 9 -aurigema 9 -roughsedge 9 -risperidone 9 -dalmazzi 9 -scheveningen 9 -osseointegration 9 -beget 9 -anwr 9 -gombeau 9 -zulkifli 9 -nicety 9 -salarymen 9 -heterochromia 9 -umbers 9 -zazou 9 -23,000-strong 9 -idoorcam 9 -limas 9 -air-dry 9 -bio-containment 9 -shader 9 -85per 9 -industrial-size 9 -papalii 9 -standardizing 9 -restructures 9 -remescar 9 -arm-waving 9 -skowron 9 -trenka 9 -mossburg 9 -songjiang 9 -chitosan 9 -dfps 9 -jamira 9 -lisovicz 9 -pethers 9 -70mins 9 -stay-focused 9 -mahlon 9 -kleptocracy 9 -bandol 9 -koech 9 -umkhonto 9 -owonla 9 -s-92 9 -asola-fatehpur 9 -esthechoc 9 -narkle 9 -debit/credit 9 -blargan 9 -marcelin 9 -straussy 9 -popularization 9 -tikki 9 -kafirs 9 -gamor 9 -gruevski 9 -shamash 9 -tuttles 9 -flemings 9 -milk-based 9 -grh 9 -microfracture 9 -jahessye 9 -day-release 9 -codifying 9 -al-kabira 9 -kleargear 9 -gazetteer 9 -gittens-bishop 9 -roussow 9 -liquitabs 9 -tarsem 9 -first-strike 9 -frohardt-lane 9 -cappiello 9 -grote 9 -kashour 9 -marghani 9 -edale 9 -once-daily 9 -90,000-a-year 9 -rugby-style 9 -swearwords 9 -astras 9 -bhutta 9 -fair-goers 9 -touchpads 9 -crickley 9 -debited 9 -trami 9 -baliker 9 -taintor 9 -firehole 9 -49th-minute 9 -johnsrud 9 -binladenism 9 -pawa 9 -bone-crushing 9 -highly-qualified 9 -bow-legged 9 -lugli 9 -jetwing 9 -kappahl 9 -then-editor 9 -majer 9 -slayers 9 -two-touch 9 -sex-tape 9 -prescribers 9 -122m 9 -battison 9 -mony 9 -machine-like 9 -al-sakat 9 -moyglare 9 -ftl 9 -costigan 9 -one-seat 9 -rech 9 -2117 9 -sunnah 9 -caterpillar-like 9 -raafat 9 -redinel 9 -julani 9 -53mph 9 -czajkowski 9 -chinasmack 9 -wingo 9 -vande 9 -quickbird 9 -cosme 9 -masiulis 9 -fritillary 9 -alkhamissi 9 -penha 9 -anstruther 9 -6.43 9 -non-japanese 9 -appelhans 9 -patzes 9 -mouthparts 9 -schiergen 9 -moyano 9 -@illumivato 9 -kien 9 -maxinutrition 9 -kazuki 9 -1306 9 -golba 9 -portelli 9 -belgammel 9 -roncal 9 -30-storey 9 -taroom 9 -tomalin 9 -eave 9 -schaer 9 -mariinsky 9 -marriya 9 -sex-mad 9 -throbbed 9 -kmiecik 9 -10.65 9 -helmi 9 -doulas 9 -andary 9 -gloe 9 -prototyped 9 -friendswood 9 -spasmodic 9 -knakal 9 -adli 9 -forcados 9 -anjana 9 -zinged 9 -fitty 9 -energy-producing 9 -hoedspruit 9 -tuanpai 9 -vproud 9 -aerovironment 9 -great-grand 9 -24-day 9 -kisch 9 -less-known 9 -parajet 9 -disney/pixar 9 -2,983 9 -gip 9 -brachycephaly 9 -caudill 9 -broaches 9 -ios6 9 -maltz 9 -fcv 9 -ex-firefighter 9 -datong 9 -sholtis 9 -boardings 9 -waterbeach 9 -united/continental 9 -eco-tourists 9 -schoenborn 9 -rispoli 9 -plixi 9 -margulis 9 -aalund 9 -14-under-par 9 -argyrou 9 -lekeshia 9 -beedle 9 -grabow 9 -gangstas 9 -nivin 9 -diena 9 -2,501 9 -ochieng 9 -eazy-e 9 -repasky 9 -strait-jacket 9 -tonni 9 -1705 9 -1,730 9 -1,738 9 -chebbi 9 -orginal 9 -9.94 9 -bebee 9 -woai 9 -radkey 9 -icily 9 -prasutagus 9 -skitzo 9 -generalov 9 -rathaus 9 -marylisa 9 -outland 9 -skelley 9 -poorly-lit 9 -seven-part 9 -al-alawi 9 -sengal 9 -spurling 9 -terri-ann 9 -vish 9 -agliotti 9 -dot-to-dot 9 -single-dose 9 -record-holders 9 -300sl 9 -f1-style 9 -lanyon 9 -anti-revolutionary 9 -starland 9 -ardoz 9 -sealants 9 -herringswell 9 -then-fiancà 9 -pqa 9 -3,188 9 -aurengzeb 9 -waidbacher 9 -cultivator 9 -parvis 9 -typographic 9 -qalat 9 -billion-member 9 -cadsden 9 -ouedraogo 9 -blood-flow 9 -isoc 9 -meningitidis 9 -chicago-bound 9 -multi-country 9 -delmonte 9 -feedstock 9 -canker 9 -lilly-may 9 -bosquet 9 -ishtiaq 9 -cherrystone 9 -eichler 9 -costumers 9 -guerrieri 9 -kentaro 9 -poggi 9 -raiky 9 -difference-maker 9 -keam 9 -20-34 9 -weighed-in 9 -4.18 9 -liem 9 -privileging 9 -half-sibling 9 -fandangueira 9 -democractic 9 -slavering 9 -bassenthwaite 9 -cluelessness 9 -bingguo 9 -esq. 9 -guest-edit 9 -karlson 9 -rozonda 9 -6:56 9 -6:54 9 -6:59 9 -epoc 9 -carny 9 -ground-attack 9 -floribeth 9 -arcade-style 9 -fungie 9 -cfda/vogue 9 -pingping 9 -jacole 9 -longet 9 -tyszczuk 9 -sweet-faced 9 -bouguereau 9 -castagnozzi 9 -kemps 9 -kushayb 9 -then-house 9 -bunu 9 -two-bath 9 -jello 9 -petrine 9 -timoshenko 9 -voreqe 9 -pavlichenko 9 -shajul 9 -boroughbridge 9 -extra-strength 9 -ventrella 9 -montejano 9 -sievwright 9 -bindra 9 -arx 9 -neather 9 -hanauma 9 -familiy 9 -dauriac-stoebe 9 -inguinal 9 -1-0aug 9 -lab126 9 -streamwood 9 -ionio 9 -hardest-hitting 9 -explicable 9 -neofytou 9 -27f 9 -smurthwaite 9 -zelboraf 9 -renovates 9 -hostelries 9 -reintroductions 9 -auldhouse 9 -mccraley 9 -chameau 9 -hebun 9 -304,000 9 -tsum 9 -weiher 9 -10-11p 9 -acebal 9 -leib 9 -overfilling 9 -luii 9 -kob-tv 9 -norseman 9 -146th 9 -fuhrerbunker 9 -3doodler 9 -pierre-val 9 -patchway 9 -maslow 9 -well-tolerated 9 -npcs 9 -kyok-sik 9 -tv-watching 9 -fuschini 9 -mcdonad 9 -last-resort 9 -detzner 9 -oximeter 9 -grandpre 9 -xiaogang 9 -emv 9 -40-year-olds 9 -cruysberghs 9 -schweiger 9 -anti-sodomy 9 -league-based 9 -adriaens 9 -pavlica 9 -viravong 9 -108.4 9 -reorganising 9 -autism-related 9 -#feelnoshame 9 -girardo 9 -mangalitsa 9 -dauman 9 -subsidises 9 -cinemascope 9 -yola 9 -buxton-henderson 9 -asmina 9 -protection-from-abuse 9 -tuinen 9 -woelbing 9 -altimas 9 -lobsterman 9 -furosemide 9 -sywak 9 -heeler 9 -defaces 9 -alrayes 9 -chewers 9 -galekovic 9 -kiddos 9 -everglade 9 -vidale 9 -badly-needed 9 -pet-sitting 9 -moakley 9 -scruffier 9 -48k 9 -husayn 9 -lammie 9 -+43 9 -eye-gaze 9 -bystrom 9 -40-member 9 -ludvig 9 -panaghita 9 -lorinczy 9 -accrues 9 -941,000 9 -edzard 9 -big-breasted 9 -non-mexican 9 -46-room 9 -egoism 9 -mahnaz 9 -1,435 9 -jatinder 9 -pierre-henry 9 -sheremet 9 -32-degree 9 -9.28 9 -seat-belts 9 -fernandina 9 -wakeboarder 9 -hornbeck 9 -motaz 9 -kinmel 9 -wnyt 9 -pro-cockfighting 9 -8210 9 -mesto 9 -anami 9 -shwopping 9 -jevtovic 9 -123million 9 -pre-warned 9 -appling 9 -2-pound 9 -overvalue 9 -glasier 9 -poll-takers 9 -'70 9 -aleck 9 -2,126 9 -deerwood 9 -storekeepers 9 -mega-rocket 9 -cevert 9 -defames 9 -light-like 9 -aycc 9 -sobeck 9 -australian-run 9 -ikumelo 9 -ukccis 9 -hedden 9 -wire-tapped 9 -gobblers 9 -jumiati 9 -11.38 9 -f250 9 -porchetta 9 -boardmasters 9 -twic 9 -nunchaku 9 -resturant 9 -daggering 9 -gewanter 9 -frascona 9 -over-regulated 9 -kitemark 9 -institutet 9 -imperials 9 -front-loader 9 -i20 9 -bellport 9 -klinenberg 9 -372,000 9 -barnoldswick 9 -kindertransport 9 -jojoba 9 -now-legendary 9 -poundcafe 9 -pomeranians 9 -al-firansi 9 -kazdin 9 -true-colour 9 -smartphone-based 9 -vanrooyen 9 -unbuilt 9 -soapboxes 9 -cloud-to-ground 9 -remoter 9 -lattera 9 -b-roll 9 -5trillion 9 -http://www.socialstudies.org/standards/strands/ 9 -1516 9 -jandara 9 -player/coach 9 -barnaba 9 -cosmic-impact 9 -2,880 9 -air-gap 9 -grittiness 9 -branly 9 -170,000-a-week 9 -box-shaped 9 -isw 9 -morson 9 -jesse-cole 9 -mucks 9 -florentin 9 -escarpments 9 -anti-paedophile 9 -narberth 9 -poppelsdorf 9 -bascule 9 -saliers 9 -l+r 9 -cryobank 9 -wrap-effect 9 -rigaudis 9 -endobarrier 9 -1:8 9 -10.49 9 -then-league 9 -cl&p 9 -teter 9 -eiland 9 -fibia 9 -abramenkova 9 -paviotti 9 -metes 9 -pennypack 9 -duprey 9 -facepalm 9 -kisai 9 -4,000-word 9 -derb 9 -switched-on 9 -goc 9 -missile-carrying 9 -sokolsky 9 -snips 9 -jinsha 9 -fininvest 9 -hugues 9 -mounsey 9 -rubalcava 9 -s.w.a.t. 9 -israeli-lebanese 9 -wahaha 9 -monastry 9 -i-road 9 -soleh 9 -sonification 9 -g-a-y 9 -chetwynd 9 -93.4 9 -93.1 9 -sonenshine 9 -feret 9 -hagg 9 -crispello 9 -party-planner 9 -tasmiyah 9 -paraben-free 9 -bus-stop 9 -age-rated 9 -mulanje 9 -gahl 9 -cancelada 9 -crimean-congo 9 -pibworth 9 -featherdale 9 -rosamunde 9 -scrummager 9 -crisis-torn 9 -cresskill 9 -pada 9 -padi 9 -mutallab 9 -jewel-like 9 -obagi 9 -veilstone 9 -attala 9 -trapps 9 -langs 9 -watson-gladwish 9 -stranieri 9 -charborough 9 -sulfaro 9 -naziri 9 -wipeouts 9 -54mins 9 -licit 9 -long-nosed 9 -yoshikazu 9 -hathwar 9 -lairy 9 -7.19 9 -2,147,483,647 9 -diara 9 -mid-16th 9 -96.9 9 -kuperwasser 9 -micro-bloggers 9 -holwell 9 -mum-of-five 9 -christopoulos 9 -caygill 9 -khantitham 9 -annalynne 9 -tefal 9 -efrem 9 -mclucas 9 -swan-horton 9 -osen 9 -bat-eared 9 -unfccc 9 -hairnets 9 -a74 9 -orrca 9 -madalena 9 -three-monthly 9 -four-to-five 9 -ugandan-born 9 -trombino 9 -garaging 9 -wunstell 9 -29mm 9 -radcliffe-on-trent 9 -luxembourger 9 -kiira 9 -schefler 9 -al-tahrir 9 -mainok 9 -kristalina 9 -110-story 9 -eitc 9 -new-borns 9 -treasurys 9 -tucson-area 9 -1,685 9 -no-trespass 9 -kimjongilia 9 -quitman 9 -plott 9 -toolies 9 -jungmann 9 -draughon 9 -guss 9 -under-employment 9 -walkerville 9 -garbeff 9 -throwings 9 -freyr 9 -accommodative 9 -repugnance 9 -first-season 9 -double-century 9 -saied 9 -curalate 9 -toplessness 9 -middow 9 -nizzear 9 -technocracy 9 -non-answer 9 -bandova 9 -jaumann 9 -leistner 9 -reevaluation 9 -6:39 9 -kangerlussuaq 9 -human-produced 9 -lechal 9 -traduced 9 -asiya 9 -thickett 9 -sea-coaling 9 -jindabyne 9 -377ft 9 -amisfield 9 -winker 9 -2,003 9 -2,002 9 -frothed 9 -wolkenstein 9 -humidity-controlled 9 -phoenixville 9 -gearshift 9 -malcolm-jamal 9 -in-world 9 -krajinovic 9 -totall 9 -emerita 9 -showaddywaddy 9 -kyu 9 -lacalle 9 -kushal 9 -synthesise 9 -jadco 9 -ald 9 -shemanovsky 9 -cornaro 9 -severus 9 -allouache 9 -mcglennan 9 -mirpur 9 -upraised 9 -belgian-made 9 -industrializing 9 -acquavella 9 -rcvs 9 -strelzin 9 -crockford 9 -magnitude-2 9 -kozlowska 9 -diffenbaugh 9 -kitcat 9 -lassegue 9 -herm 9 -steel-framed 9 -hypervigilant 9 -callinan 9 -longthorne 9 -brostrom 9 -byung-se 9 -timestamps 9 -gambinos 9 -245-pound 9 -laurencekirk 9 -1,205 9 -winuk 9 -one-dollar 9 -father-of-14 9 -shahrkhani 9 -marsudi 9 -greenville-spartanburg 9 -amalgamate 9 -mungiki 9 -touro 9 -verulam 9 -refusenik 9 -moden 9 -twickets 9 -delforce 9 -omotesando 9 -fourth-straight 9 -360s 9 -talamante 9 -re-establishes 9 -ezi-cig 9 -belly-flopped 9 -2,260 9 -olduvai 9 -two-member 9 -académie 9 -prchal 9 -chipset 9 -kotze 9 -faes 9 -tustian 9 -nepenthes 9 -flight-test 9 -mcshan 9 -akarevuro 9 -bavaro 9 -'36 9 -strigamia 9 -micro-sensors 9 -lightflask 9 -nine-bed 9 -takagi 9 -fisheria 9 -non-starters 9 -benyk 9 -anti-theist 9 -chambri 9 -27cm 9 -bodine 9 -oxygenate 9 -colorings 9 -third-flight 9 -petrenko 9 -phidias 9 -afterword 9 -franklins 9 -muggeridge 9 -738m 9 -worchester 9 -veinwave 9 -salceda 9 -leonhard 9 -115-foot 9 -threatre 9 -anki 9 -whizz-kidz 9 -celli 9 -celle 9 -sangiang 9 -diiorio-sterling 9 -ajaz 9 -stanwyck 9 -stavola 9 -ostrikov 9 -jered 9 -tya 9 -tyc 9 -bau 9 -darning 9 -subjee 9 -collado 9 -supergran 9 -hilder 9 -mccullouch 9 -10,141 9 -totenkopf 9 -re-structuring 9 -isopod 9 -iale 9 -counterweights 9 -white-shirted 9 -adriane 9 -1,181 9 -boryszczuk 9 -disbands 9 -taddei 9 -ansara 9 -irradiate 9 -beatles-themed 9 -hinchcliffe 9 -rendón 9 -17-months-old 9 -embera 9 -12.02 9 -pitiable 9 -nixzmary 9 -photo-messaging 9 -practicising 9 -lavric 9 -rist 9 -nichopoulos 9 -wheat-free 9 -4-day-old 9 -freckly 9 -cheesemakers 9 -kittila 9 -tarth 9 -nikolett 9 -telangana 9 -terephthalate 9 -dutch-style 9 -friaa 9 -fourest 9 -indoor-outdoor 9 -wharfside 9 -tecla 9 -loll 9 -putumayo 9 -sutheran 9 -mappin 9 -easy-listening 9 -co-incidence 9 -shawal 9 -nonthaburi 9 -nigellissima 9 -swavesey 9 -mullinar 9 -kinnar 9 -protégée 9 -nucleotide 9 -self-designed 9 -guarulhos 9 -salzano 9 -bodymetrics 9 -clamorous 9 -11.17 9 -11.12 9 -11.13 9 -wiist 9 -lapsset 9 -bulgar 9 -beardilizer 9 -fairbridge 9 -al-shoroeiya 9 -hamhung 9 -points-scoring 9 -yediot 9 -anoka 9 -non-decision 9 -biblioteca 9 -canavesio 9 -gülpen 9 -reichling 9 -vallées 9 -reguero 9 -holophone 9 -konstantinov 9 -government-regulated 9 -majal 9 -rogeberg 9 -casemates 9 -bordt 9 -abdal 9 -bare-handed 9 -achtung 9 -chasseur 9 -shafter 9 -white-painted 9 -spirtos 9 -once-in-a 9 -deep-ocean 9 -dogecoins 9 -smartglasses 9 -champi 9 -bromford 9 -civvies 9 -@twitter 9 -oxcarts 9 -20-21 9 -holiday-maker 9 -bisiar 9 -leuckel 9 -23-strong 9 -evader 9 -jambon 9 -jungle-clad 9 -self-closing 9 -banana-shaped 9 -19-inch 9 -five-state 9 -hermopolis 9 -knottingley 9 -w/my 9 -japhet 9 -selvakumar 9 -knockin 9 -tfm 9 -ningshi 9 -navagio 9 -murong 9 -2,320 9 -service-oriented 9 -highlines 9 -lapenta 9 -hirokazu 9 -education-related 9 -goldendoodle 9 -inter-national 9 -ryoji 9 -5,140 9 -caruthers 9 -victoriabeckham.com 9 -palach 9 -dogvacay.com 9 -#happy 9 -shelf-stacking 9 -chipwrecked 9 -go-it-alone 9 -karpati 9 -double-yolker 9 -calumny 9 -slappers 9 -whitland 9 -hot-house 9 -decked-out 9 -underskin 9 -kathimerini 9 -elsternwick 9 -warrior-king 9 -talina 9 -asplund 9 -0.005 9 -six-axis 9 -olcott 9 -senakiewicz 9 -ex-glamour 9 -slobodianik 9 -kirkbie 9 -iordanskaya 9 -ellekhlifi 9 -16-11 9 -16-15 9 -uarm 9 -melannie 9 -fox23 9 -nazi-like 9 -pietrzyk 9 -taylar 9 -itemize 9 -unshackle 9 -milanello 9 -misiu 9 -tamazashvili 9 -dallol 9 -fetlock 9 -liquid-cooled 9 -andresol 9 -saathoff 9 -medulla 9 -calabresi 9 -schuppan 9 -kiting 9 -bradying 9 -moondance 9 -crestline 9 -carrick-a-rede 9 -katakai 9 -greenmount 9 -ugnano 9 -flea-infested 9 -sudanese-born 9 -36-month 9 -prison-style 9 -waterwheel 9 -120-member 9 -coloane 9 -holetown 9 -feelisch 9 -niloufer 9 -reanalysis 9 -granata 9 -wlodarski 9 -chryslers 9 -1742 9 -sugimoto 9 -bisharat 9 -bundgaard 9 -alsina 9 -melsky 9 -geetha 9 -ums 9 -spoonable 9 -sinama 9 -ribas 9 -intermixed 9 -fractals 9 -resizing 9 -scrap-metal 9 -midlanders 9 -hagans 9 -wonderstrike 9 -arruebarrena 9 -asanas 9 -one-note 9 -odorous 9 -pycon 9 -visvanathan 9 -76.3 9 -cyberlocker 9 -american-run 9 -kozel 9 -festerman 9 -thien 9 -lading 9 -fiordland 9 -choksy 9 -exocets 9 -studd 9 -4258 9 -bridleway 9 -moulsdale 9 -epernay 9 -one-another 9 -trans-neptunian 9 -recruitments 9 -1,917 9 -pansiri 9 -moring 9 -re-sentence 9 -climate-changing 9 -oiliness 9 -neubacher 9 -easygym 9 -nit-picking 9 -anthamatten 9 -2230 9 -omekongo 9 -saltpeter 9 -mercedes-powered 9 -youcam 9 -daudia 9 -alborz 9 -bertinelli 9 -elfering 9 -butterfly-shaped 9 -108-year 9 -abadia 9 -mikoliunas 9 -landbanking 9 -prusac 9 -gyrfalcon 9 -ricasa 9 -medaeng 9 -early-life 9 -pelkie 9 -necaxa 9 -danieli 9 -mucker 9 -union-busting 9 -orifices 9 -1,593 9 -nem 9 -neu 9 -news-tribune 9 -bantu 9 -pratama 9 -flavourless 9 -oboist 9 -403,000 9 -nerva 9 -abari 9 -fels 9 -wonderstruck 9 -6:19 9 -tastemaker 9 -carra 9 -neuville 9 -marauds 9 -neta 9 -6,000-word 9 -bilsland 9 -pot-related 9 -surfactants 9 -timestamp 9 -malet 9 -maleh 9 -malen 9 -schurr 9 -willmott-brown 9 -possessor 9 -muslim-only 9 -waker 9 -waked 9 -mulvihill 9 -finegold 9 -védrines 9 -capkin 9 -pentawere 9 -choquequirao 9 -hato 9 -palm-lined 9 -saulo 9 -chiad 9 -ulli 9 -vane-tempest-stewart 9 -mundos 9 -freeze-up 9 -scrimshaw 9 -softworks 9 -ane 9 -blasphemers 9 -14-fold 9 -48-mile 9 -mcgilligan 9 --170 9 -um!brands 9 -oil-covered 9 -n'gog 9 -sivarajah 9 -temping 9 -statment 9 -front-left 9 -o'boyle 9 -teoh 9 -grokhovsky 9 -berbick 9 -vashisht 9 -papendick 9 -british-controlled 9 -kayak.co.uk 9 -sandgate 9 -soosalu 9 -mogridge 9 -fette 9 -respectably 9 -snoozy 9 -ithyphallic 9 -niculae 9 -well-constructed 9 -marban 9 -diontae 9 -shijaiyah 9 -weibu 9 -zigong 9 -kahli 9 -junco 9 -13.56 9 -makau 9 -cofounders 9 -pbt 9 -maryanna 9 -888,000 9 -dangerous-looking 9 -4.98 9 -cazenove 9 -kassir 9 -30-minutes 9 -at72 9 -exenatide 9 -bennell-smith 9 -laggards 9 -hand-carried 9 -manimal 9 -tishara 9 -lindeberg 9 -merkle 9 -suprachiasmatic 9 -pen-name 9 -g-tech 9 -84-year 9 -maruyama 9 -life-risking 9 -adolfsson 9 -disinclination 9 -hamze 9 -eighths 9 -single-serving 9 -cernuda 9 -al-ibadi 9 -wing-davey 9 -eight-course 9 -mersenne 9 -washakie 9 -hogrogian 9 -tér 9 -flip-book 9 -67,060 9 -anagain 9 -41cm 9 -mirwais 9 -grubman 9 -face-time 9 -bcl 9 -dispossessing 9 -hernandez-orta 9 -natch 9 -frentzen 9 -then-princess 9 -19.25 9 -ibrihim 9 -al-jaabari 9 -werschler 9 -unobserved 9 -tajoura 9 -shadwick 9 -monsalvatge 9 -boxun 9 -hirtzel 9 -suffolk-based 9 -450-acre 9 -peterstone 9 -kolorov 9 -12.21 9 -12.26 9 -schultze 9 -pouryan 9 -flus 9 -rahmaty 9 -pga.com 9 -near-by 9 -riddlesden 9 -ix-xini 9 -valerius 9 -arcos 9 -batsuit 9 -cantlay 9 -nickel-cadmium 9 -apsara 9 -ashville 9 -satirise 9 -crtv 9 -shachtman 9 -innuendoes 9 -fatcatinthehat 9 -colo-colo 9 -rso 9 -#bendgate 9 -figaniak 9 -ellis-van 9 -tocqueville 9 -haole 9 -marquail 9 -gte 9 -rayer 9 -60-bed 9 -black-backed 9 -loddington 9 -trivett 9 -single-wing 9 -tinnie 9 -dipjar 9 -7:23 9 -denisova 9 -beaudesert 9 -poisioning 9 -koufax 9 -schnur 9 -mbegu 9 -camis 9 -toporoff 9 -23per 9 -nidaa 9 -ligotti 9 -badged 9 -hashimzada 9 -busman 9 -braggies 9 -holoprosencephaly 9 -pritchard-jones 9 -providencia 9 -re-loaded 9 -kebbi 9 -konashenkov 9 -rohullah 9 -gas-fueled 9 -shupe 9 -dissociation 9 -lungu 9 -mindaugas 9 -large-calibre 9 -newly-rich 9 -d.o. 9 -snow-hit 9 -squibs 9 -17-7 9 -fentinol 9 -watermelon-sized 9 -snowsuit 9 -ddp 9 -totp 9 -prep-school 9 -collaborationist 9 -gasthaus 9 -paratroop 9 -cotopaxi 9 -25-20 9 --39 9 -dc-cik 9 --31 9 -werdum 9 -chole 9 -abdulkarim 9 -midwood 9 -2,745 9 -peruses 9 -sanfino 9 -1,000-word 9 -creveld 9 -otas 9 -talbert 9 -entertainingly 9 -auster 9 -sayida 9 -200-lb 9 -nshimyumuremyi 9 -ehpd 9 -kwambura 9 -demare 9 -a-20 9 -vedovotto 9 -no-gays 9 -irom 9 -contepomi 9 -d'abernon 9 -affutu 9 -36224 9 -1976-1983 9 -pack-a-day 9 -teleported 9 -wellston 9 -overstimulate 9 -chandrasekaran 9 -flash-forward 9 -porthmeor 9 -miltoncross 9 -hurricane-ravaged 9 -rope-like 9 -ozell 9 -sterkfontein 9 -mearth 9 -martinolich 9 -halferty 9 -selita 9 -wath 9 -legally-owned 9 -kloof 9 -hydroview 9 -swifty 9 -fromagerie 9 -hatty 9 -ingleburn 9 -flat-packed 9 -metre-wide 9 -soteros 9 -red-and-blue 9 -15,750 9 -self-monitor 9 -treepeople 9 -cageless 9 -18.00 9 -citizenm 9 -melisandre 9 -bjog 9 -obliviousness 9 -putson 9 -5.68 9 -stamatin 9 -bareilly 9 -rinschler 9 -six-alarm 9 -aboutarik 9 -gomez-pomar 9 -modiface 9 -ismailis 9 -sarah-jayne 9 -usie 9 -khatra 9 -65-70 9 -mazariego 9 -azikiwe 9 -machester 9 -thirkell 9 -gci 9 -jolin 9 -greengart 9 -daligault 9 -soloed 9 -sarte 9 -rhsc 9 -ten-storey 9 -orb-weaving 9 -sonography 9 -22km 9 -conflict-related 9 -thalys 9 -mokhles 9 -unzips 9 -eliya 9 -newahun 9 -bamenda 9 -beere 9 -cash-and-carry 9 -money-lending 9 -squally 9 -belt-fed 9 -bonsafo 9 -chamani 9 -scampie 9 -gdps 9 -mandira 9 -nigris 9 -ranee 9 -globulin 9 -koloshi 9 -castigates 9 -départ 9 -airboats 9 -aduke 9 -crang 9 -lathmar 9 -winterhart 9 -zutell 9 -inghilleri 9 -maqbool 9 -chambéry 9 -smedingoff 9 -lancy 9 -googlenet 9 -harassmap 9 -murr-ma 9 -duale 9 -ibirapuera 9 -mccalebb 9 -handscomb 9 -varasano 9 -acos 9 -isobars 9 -ibisworld 9 -nyall 9 -al-shayah 9 -boulger 9 -poltrona 9 -naseby 9 -hayan 9 -marginalising 9 -bo01 9 -shanghaiist.com 9 -olbrich 9 -navlet 9 -khazana 9 -braais 9 -zolbert 9 -162,500 9 -auto-tuned 9 -wallah 9 -priestman 9 -scofidio 9 -temporally 9 -groulx 9 -1,646 9 -selke 9 -opti 9 -bifurcation 9 -cartoonishly 9 -luatua 9 -rajkot 9 -shurvell 9 -red-glowing 9 -lazic 9 -nullity 9 -ceibal 9 -karl-theodor 9 -then-england 9 -obligates 9 -righties 9 -outsourcery 9 -20-50 9 -light-flyweight 9 -robotham 9 -slipmatt 9 -geeing 9 -avaricious 9 -amundson 9 -thence 9 -stockholmers 9 -veeder 9 -recants 9 -brignull 9 -9:59 9 -noori 9 -al-keeb 9 -weinan 9 -barenboim 9 -attarian 9 -cazenave 9 -lutetia 9 -ritualistically 9 -67th-minute 9 -253mph 9 -chowdury 9 -108-year-old 9 -macmahon 9 -gem-set 9 -australian-first 9 -sweger 9 -kattenhorn 9 -yachtmaster 9 -erskine-hill 9 -shirvington 9 -netrebko 9 -achan 9 -internationally-recognized 9 -façades 9 -santiaguito 9 -pengpeng 9 -256gb 9 -eggy 9 -138ft 9 -echosounder 9 -beauharnais 9 -schrage 9 -boneyards 9 -non-coeliac 9 -rugby-loving 9 -shutoff 9 -robot-like 9 -ten-pin 9 -solar-panel 9 -gharavi 9 -boozed 9 -screwy 9 -loanhead 9 -rehteah 9 -mulpuru 9 -non-cash 9 -oeste 9 -geppert 9 -38st 9 -woodcarver 9 -surbey 9 -m&p 9 -malloch-brown 9 -leather-trimmed 9 -gaile 9 -sathya 9 -cuba-florida 9 -4-pound 9 -renovator 9 -keisoglu 9 -4,145 9 -106mph 9 -tillerson 9 -kamaz 9 -70-meter 9 -n.s.a. 9 -street-fighting 9 -webdale 9 -wamu 9 -karth 9 -scansoriopteryx 9 -storeman 9 -mesh-wielding 9 -vostock 9 -zip-lock 9 -wiliam 9 -headly 9 -sandars 9 -serafin 9 -ahmimed 9 -dustman 9 -oroumieh 9 -c&t 9 -vezina 9 -13.75 9 -ophadell 9 -self-parody 9 -delawareonline 9 -ghost-hunting 9 -npia 9 -vaccinators 9 -setting-up 9 -ohl 9 -ohm 9 -executable 9 -bellifemine 9 -cinereous 9 -polmont 9 -cretaceous-tertiary 9 -seán 9 -pse&g 9 -sumiati 9 -166m 9 -helvin 9 -1668 9 -mealy-mouthed 9 -especial 9 -vmd 9 -hand-shaped 9 -mannamead 9 -croitor 9 -lafarge 9 -kantoh 9 -garbowsky 9 -beutlers 9 -perran 9 -sarvas 9 -dyer-lake 9 -borosak 9 -adopt-a-highway 9 -115lbs 9 -sipes 9 -21-12 9 -bowlful 9 -boselli 9 -vassileva 9 -ahimsa 9 -nuseirat 9 -stagecoaches 9 -carwin 9 -mulago 9 -kaiseki 9 -lyari 9 -taque 9 -rosinlof 9 -52-0 9 -quillan 9 -reverse-engineered 9 -27-25 9 -#stoptheparade 9 -gloried 9 -flogger 9 -mega-watt 9 -h1-b 9 -sankar 9 -biumi 9 -serhat 9 -sarnies 9 -desirous 9 -peramaki 9 -lalish 9 -pagakis 9 -21,000-a-year 9 -schonebelen 9 -spottorno 9 -ixtepec 9 -1980-81 9 -kaila 9 -disavows 9 -waskiewicz 9 -1954-55 9 -gulberg 9 -voglina 9 -antifoaming 9 -wieden 9 -zentrum 9 -chest-length 9 -enas 9 -ndebele 9 -ehle 9 -yahiaoui 9 -dayne 9 -kf 9 -sandefur 9 -leap-frogging 9 -scrim 9 -moorcrest 9 -hyoun 9 -macrame 9 -aiws 9 -240lbs 9 -voile 9 -piñatas 9 -jailene 9 -roofies 9 -quinteros 9 -debruge 9 -nailed-on 9 -protuberance 9 -over-claimed 9 -55.45 9 -geep 9 -demagogues 9 -sunit 9 -chlorate 9 -outmanoeuvred 9 -30-a-day 9 -hirayama 9 -flutist 9 -one-pieces 9 -psychos 9 -evertonfc.com 9 -coprolites 9 -bacai 9 -inaugurating 9 -probationers 9 -pronatura 9 -500,000-strong 9 -tokes 9 -abuchian 9 -morfa 9 -pedroscope 9 -panjabi 9 -dissapearance 9 -slota 9 -slote 9 -behind-the 9 -deterministic 9 -coggins 9 -fruitier 9 -no3 9 -sherina 9 -appg 9 -vulcanology 9 -katara 9 -joberate 9 -smeltz 9 -overextend 9 -fozard 9 -belissimo 9 -sheran 9 -wrangel 9 -zodiacal 9 -warabe 9 -greenwhich 9 -sanook 9 -serfs 9 -waitz 9 -diggity 9 -steratore 9 -humouring 9 -uncirculated 9 -atencio 9 -ameeta 9 -customizes 9 -mayura 9 -@dan_down 9 -polyphony 9 -mckenny 9 -ajani 9 -19km/h 9 -jtrig 9 -filthiest 9 -ex-sheffield 9 -beaurain 9 -bear-baiting 9 -wset 9 -tiesler 9 -denegri 9 -4.5-hour 9 -sabq 9 -g-men 9 -public-service 9 -durston 9 -searcys 9 -ranchi 9 -threadgold 9 -112.5 9 -32-day 9 -white-tiled 9 -mind-control 9 -eleonore 9 -4:58 9 -rabbiting 9 -merrit 9 -fabro 9 -guanacaste 9 -zeynalov 9 -time-strapped 9 -bulletstorm 9 -folch 9 -riyanti 9 -104-story 9 -g21 9 -fischetti 9 -bluebonnets 9 -ilinykh 9 -mariyam 9 -oderinwale 9 -dipu 9 -bank-notes 9 -aeneid 9 -healesville 9 -fister 9 -davida 9 -rasa 9 -tamudo 9 -holmul 9 -ever-presents 9 -dollie 9 -jains 9 -jaina 9 -bodelwyddan 9 -altachiara 9 -shia-sunni 9 -allahs 9 -crin 9 -nothum 9 -masriya 9 -khaldun 9 -50-70 9 -sinfonia 9 -monkland 9 -post-midnight 9 -nazione 9 -uncultured 9 -franconia 9 -drew-ashlyn 9 -annalisa 9 -kemnal 9 -vouchercloud.com 9 -ozer 9 -geed 9 -haridwar 9 -perdoni 9 -jieddo 9 -arguement 9 -jaydee 9 -elmos 9 -glia 9 -supraglacial 9 -leguizamon 9 -divisible 9 -january-february 9 -japanese-built 9 -valyiskaya 9 -50-years 9 -al-yaqoubi 9 -lll 9 -iah 9 -re-assessment 9 -golinger 9 -meygen 9 -1,655 9 -vese 9 -grappa 9 -kawamura 9 -garr 9 -garp 9 -shugg 9 -ristovski 9 -20ft-deep 9 -stubbly 9 -sukhoi-25 9 -djurgardens 9 -insta-model 9 -super-imposed 9 -invulnerability 9 -shakara 9 -yudy 9 -modiak 9 -hosers 9 -clifton-brown 9 -steamrolling 9 -hyppia 9 -fertilize 9 -bezanson 9 -berdimuhamedow 9 -king-in-waiting 9 -gryaznevich 9 -ticket-buying 9 -1.5-acre 9 -ribes 9 -huu 9 -askey 9 -romie 9 -miniaturisation 9 -kwik-e-mart 9 -:p 9 -becontree 9 -kingsolver 9 -nanometre 9 -fibulae 9 -20miles 9 -bewl 9 -kodomoroid 9 -16-person 9 -superpipe 9 -e-smoking 9 -tory-ukip 9 -sherpao 9 -maor 9 -pplkpr 9 -entangling 9 -dishong 9 -harads 9 -iñigo 9 -sometimes-violent 9 -munyon 9 -mcausland 9 -winkleigh 9 -15-day-old 9 -zweibrucken 9 -sones 9 -bilalov 9 -16-7 9 -16-4 9 -1,620 9 -1,622 9 -five-bathroom 9 -viso 9 -aurigny 9 -andino 9 -egede 9 -abqaiq 9 -havill 9 -330m 9 -speights 9 -standard-definition 9 -blagoveshchensk 9 -al-zahawi 9 -hanisch 9 -weibrecht 9 -cockapoo 9 -cladek 9 -mahajan 9 -fire-safety 9 -spaser 9 -rousselet 9 -steffes 9 -10-seat 9 -santhiago 9 -shonk 9 -white/black 9 -fales 9 -lucite 9 -fridriksson 9 -democracy-building 9 -dress-code 9 -glatter 9 -sauven 9 -p.l. 9 -penstone 9 -ledburn 9 -loincloth 9 -d-calif 9 -military-inspired 9 -mais 9 -skowhegan 9 -permissibility 9 -flesh-toned 9 -matryoshka 9 -arevelo 9 -enthoven 9 -barreleye 9 -bullguard 9 -burda 9 -go-lucky 9 -fidelino 9 -leavened 9 -@thetwofairies 9 -sehong 9 -55cm 9 -camshaft 9 -lyu 9 -is-controlled 9 -behoove 9 -whufc.com 9 -water-skier 9 -gunnlaugsson 9 -bodi 9 -masinagudi 9 -verlon 9 -ksu 9 -self-financing 9 -137.43 9 -noiva 9 -mantelpieces 9 -ogunkoya 9 -fetisov 9 -maurizia 9 -hillyard 9 -wantee 9 -vierra 9 -paskett 9 -mehravar 9 -geo-located 9 -beeso 9 -breiter 9 -ammah 9 -miskicked 9 -conformation 9 -ruch 9 -pro-breastfeeding 9 -cleveland-hopkins 9 -113kg 9 -anapa 9 -lippert/heilshorn 9 -alcs 9 -stanthorpe 9 -cupecoy 9 -non-athletes 9 -10ft-long 9 -definately 9 -lashkar-e 9 -krarup 9 -meridians 9 -heliopause 9 -dolphinholme 9 -evelyne 9 -okeanos 9 -mcghaw 9 -red-velvet 9 -qilu 9 -wlox 9 -endres 9 -deadsocial 9 -malouf 9 -voldheim 9 -jihottie 9 -omokoh 9 -korbyn 9 -towball 9 -20,000-plus 9 -mangi 9 -roshid 9 -cakehead 9 -36-page 9 -jahr 9 -etlinger 9 -homeroom 9 -91f 9 -sirohi 9 -leisure.com 9 -hairiest 9 -dad-of-four 9 -annaleise 9 -bike-riding 9 -polysaccharides 9 -istruct 9 -selo 9 -frontpage 9 -lawdar 9 -winther 9 -ockenden 9 -shahwan 9 -horno 9 -misael 9 -gamet 9 -aneeta 9 -carrousel 9 -sexualizes 9 -onur 9 -smoochy 9 -fragos 9 -orrong 9 -stargel 9 -bruco 9 -hoelting 9 -talwar 9 -rydinghurst 9 -peschke 9 -pulitzers 9 -beezow 9 -banga 9 -katan 9 -félix 9 -tweeze 9 -moutoussamy 9 -mrn 9 -shetye 9 -zwart 9 -athanasiou 9 -tax-dodgers 9 -see-sawing 9 -gyimah 9 -alvictus 9 -korody 9 -zyrees 9 -sélys 9 -kite-flying 9 -five-pointer 9 -electro-fishing 9 -wilkinson-tancock 9 -aurimas 9 -norridgewock 9 -budds 9 -wadding 9 -videalert 9 -1573 9 -flowrider 9 -nut-handling 9 -flatliner 9 -slackened 9 -ipad-style 9 -harpeet 9 -bellworthy 9 -ziesel 9 -3:13 9 -kaplanyan 9 -hemlington 9 -feigns 9 -davinci 9 -disentanglement 9 -lueders 9 -corridos 9 -aiya 9 -overfeed 9 -allievi 9 -m33 9 -spam-fighting 9 -clime 9 -aarushi 9 -whimpered 9 -lytle 9 -three-in-one 9 -esar 9 -eifuku 9 -141.6 9 -palfest 9 -auxilium 9 -szilvia 9 -pasek 9 -spittal 9 -by-stander 9 -30-12 9 -bathmat 9 -559,000 9 -baled 9 -uxua 9 -aligarh 9 -kapello 9 -dreamin 9 -banham 9 -hsueh 9 -malocclusion 9 -nickless 9 -plainmoor 9 -alexandalexa.com 9 -markisa 9 -mini-neptune 9 -roust 9 -jasem 9 -gursahani 9 -activator 9 -decino 9 -annest 9 -t-they 9 -snoopybabe 9 -kasang 9 -bm-21 9 -aguirre-sacasa 9 -oum 9 -pagliuca 9 -desano 9 -grundboeck 9 -boutcher 9 -tindyebwa 9 -roughhousing 9 -miliary 9 -singalongs 9 -trampolinist 9 -community-acquired 9 -liquefying 9 -aronsohn 9 -curvatures 9 -invigorates 9 -prettified 9 -laboratoire 9 -ngongo 9 -biomolecule 9 -smashbox 9 -outrunner 9 -attacknid 9 -vermicomposting 9 -handshaw 9 -aubyn 9 -liivak 9 -podsmead 9 -courteously 9 -fà 9 -betamax 9 -overtopped 9 -microcapsules 9 -bendon 9 -predicaments 9 -elorza 9 -fegrouch 9 -hemispherectomy 9 -kandemir 9 -thredgold 9 -patroness 9 -chantell 9 -glaziers 9 -0845 634 1414 9 -figure1 9 -levinthal 9 -cumbers 9 -lobotomies 9 -karplus 9 -#whyimvotingukip 9 -occultists 9 -guaviare 9 -hazels 9 -bergkristall 9 -bcuhb 9 -burwain 9 -ispca 9 -afganistan 9 -dunnes 9 -bpl 9 -bps 9 -#wtf 9 -bear-ly 9 -swift-moving 9 -@sallybercow 9 -rosenoir 9 -a320neo 9 -fenxi 9 -lydeard 9 -u.s.-uk 9 -dowlut 9 -indian-style 9 -polad 9 -seven-car 9 -half-season 9 -super-lightweight 9 -raghuram 9 -malini 9 -eisel 9 -kawah 9 -sunstone 9 -kollar-kotelly 9 -fundrazr 9 -rooyen 9 -ertharin 9 -abela 9 -alayna 9 -12b 9 -12k 9 -subdermal 9 -galgorm 9 -saint-roch 9 -neish 9 -gailie 9 -zulia 9 -svallfors 9 -scahill 9 -anjhe 9 -unwearable 9 -mamak 9 -micro-manage 9 -offroad 9 -ratón 9 -writer/producer 9 -9-month 9 -earthquake-stricken 9 -schoharie 9 -pixadores 9 -seflie 9 -efes 9 -westenra 9 -energy-harvesting 9 -tifosi 9 -chey 9 -birkill 9 -lovenkrands 9 -number-two 9 -sharieff 9 -jefferson-jackson 9 -man-style 9 -burtons 9 -mujib 9 -davis-kimball 9 -sobe 9 -fut 9 -icreach 9 -ivleva 9 -l.d. 9 -josè 9 -ackworth 9 -13,750 9 -frankford 9 -northborough 9 -dubery 9 -rabeder 9 -cat-lovers 9 -piperlime 9 -13-11 9 -terrington 9 -vice-governor 9 -ayaka 9 -ayako 9 -mug-shot 9 -sundsbø 9 -long-list 9 -giray 9 -grimandi 9 -hwy. 9 -swanned 9 -l'est 9 -roller-skates 9 -ical 9 -atilla 9 -i-x 9 -cuvillier 9 -dus 9 -farlam 9 -12th-minute 9 -bamboos 9 -arthroscopy 9 -polosmak 9 -h.f. 9 -ovington 9 -tormey 9 -235lbs 9 -deflections 9 -rouches 9 -unplowed 9 -farquarson 9 -tueart 9 -ex-staff 9 -lalaurie 9 -vanderwyden 9 -grunberg 9 -postmarks 9 -nicotine-laced 9 -jharna 9 -ayuba 9 -woolooware 9 -cerff 9 -vima 9 -stillings 9 -over-hunting 9 -hayee 9 -last-lap 9 -sendejas 9 -instgram 9 -novolazarevskaya 9 -ksnv 9 -qegs 9 -mailmen 9 -447billion 9 -kaltenbrunner 9 -stodge 9 -alpman 9 -dafna 9 -warred 9 -milsap 9 -taxotere 9 -sonte 9 -merseybeat 9 -blel 9 -zr 9 -stempniewicz 9 -225m 9 -225g 9 -oglivy 9 -apple-designed 9 -trigonopterus 9 -ino 9 -1,603 9 -placemats 9 -dnfs 9 -boomgaarden-cook 9 -akshardham 9 -gathwright 9 -saltwell 9 -british-somali 9 -lazmi 9 -hataway 9 -corak 9 -trophee 9 -melvill 9 -elody 9 -zimbabwean-born 9 -.400 9 -slav 9 -find 9 -low-orbit 9 -over-valued 9 -ac-130 9 -hilling 9 -casson-smith 9 -berkshires 9 -respublica 9 -claimer 9 -premiership-winning 9 -ncs 9 -nci 9 -eniola 9 -kateman 9 -solemnising 9 -kosecki 9 -z2 9 -siliguri 9 -rigol 9 -epub 9 -confimed 9 -courmouzis 9 -pindell 9 -kolk 9 -trigonocephaly 9 -beardless 9 -bunglawala 9 -kb726 9 -cavalho 9 -guebru 9 -wobbler 9 -post-deployment 9 -ellefsen 9 -immobilizing 9 -molland 9 -maku 9 -helbig 9 -participations 9 -xiaobing 9 -ever-so-slightly 9 -pivaric 9 -self-repair 9 -butz 9 -93.20 9 -burbs 9 -+977 9 -betwen 9 -jines 9 -ansip 9 -delarabond 9 -lobanovskyi 9 -mazarine 9 -cfht 9 -quasney 9 -loudhailer 9 -està 9 -velazco 9 -freepost 9 -gennadij 9 -groezinger-fitzpatrick 9 -karl-heinze 9 -wawrzyniak 9 -frontale 9 -speed-eating 9 -adb 9 -25in 9 -rozita 9 -beltrami 9 -mullaghmore 9 -furr 9 -shukan 9 -zorski 9 -dynamical 9 -aim-straight 9 -4,105 9 -fadwa 9 -caddesi 9 -world-weary 9 -kana-biyik 9 -ruslana 9 -arsalaan 9 -banana-eating 9 -knowledgable 9 -sacketts 9 -tweener 9 -scotchman 9 -polycephalic 9 -catholic-run 9 -minnikhanov 9 -ramela 9 -mogens 9 -baulch 9 -flexitarians 9 -vugt 9 -ents 9 -dungaree 9 -stubbies 9 -subacuatico 9 -gloustershire 9 -romanticise 9 -kassou 9 -latin-american 9 -money-men 9 -borrill 9 -eichenwald 9 -pacifists 9 -rock-n-roll 9 -war-divided 9 -breeckner 9 -gemba 9 -millinocket 9 -dirt-poor 9 -hga 9 -movil 9 -comfier 9 -arthritis-like 9 -mirsad 9 -strycker 9 -12-hours 9 -388,000 9 -lancehead 9 -gazin 9 -poluan 9 -nto 9 -ntb 9 -iammarino 9 -136mph 9 -key-ring 9 -djimi 9 -drawn-on 9 -re-dedication 9 -.13 9 -clifft 9 -united.com 9 -gomez-echavarria 9 -anca 9 -turpentine 9 -hensch 9 -eslinger 9 -bakhtiyar 9 -asam 9 -icona 9 -mulgoa 9 -dudinka 9 -thebarge 9 -bucio-bucio 9 -troiano 9 -duetsch 9 -zacapa 9 -tyntesfield 9 -blencoe 9 -mipwr 9 -goldzband 9 -simecki 9 -minsky 9 -laporshia 9 -repairability 9 -klich 9 -follwing 9 -102-year 9 -protea 9 -plonka 9 -jusa 9 -valenica 9 -mcaninch 9 -son-in-laws 9 -slinn 9 -reznik 9 -slovik 9 -baudoin 9 -anti-republican 9 -3900 9 -moaney 9 -18,800 9 -waissel 9 -mdina 9 -thamel 9 -pennan 9 -friedrichs 9 -stengele 9 -5:38 9 -six-woman 9 -dust-ups 9 -clotworthy 9 -ferryman 9 -stimler 9 -paramilitary-style 9 -ketapang 9 -quiescent 9 -brentlinger 9 -pro-war 9 -gliomas 9 -girfriend 9 -well-camouflaged 9 -called-up 9 -bibliothèque 9 -microcystin 9 -parmjit 9 -gillnets 9 -kumpf 9 -railguns 9 -riordans 9 -turina 9 -merpati 9 -ziyang 9 -family-of-five 9 -brooke-taylor 9 -karem 9 -2,181 9 -47st 9 -435million 9 -harissa 9 -marble-topped 9 -wisent 9 -labynkyr 9 -schocken 9 -r-sc 9 -shadrack 9 -pickorer 9 -frette 9 -calzini 9 -cloakd 9 -down-sizing 9 -cap-sleeved 9 -succati 9 -#findben 9 -el-farra 9 -staffies 9 -broagan 9 -rasagiline 9 -gifpop 9 -war-wounded 9 -stuchenko 9 -vidrine 9 -guyard-guillot 9 -501,000 9 -guillian-barre 9 -doudet 9 -bonello 9 -ineffectively 9 -foot-powered 9 -asmats 9 -25-44 9 -25-40 9 -vonage 9 -dominican-born 9 -borle 9 -jin-sung 9 -léger 9 -bien-aime 9 -consolidator 9 -sandeen 9 -tuli 9 -leeves 9 -emer 9 -sloanes 9 -one-two-go 9 -foxwoods 9 -steinmetz 9 -20-count 9 -rosse 9 -tagliatelle 9 -vilcabamba 9 -re-drawn 9 -allegre 9 -tingwall 9 -globalwebindex 9 -molas 9 -williams-byers 9 -fraser-holmes 9 -sauchiehall 9 -12,000,000 9 -1ppm 9 -four-tenths 9 -uwire.com 9 -mesopotamians 9 -yansong 9 -wawel 9 -110.1 9 -110.3 9 -yoonjung 9 -latvia-based 9 -saudi-based 9 -18-6 9 -metagenomics 9 -reefa 9 -boxrec 9 -stick-n-find 9 -84.2 9 -tolutau 9 -ex-baltimore 9 -ukaegbu 9 -rahal 9 -shindo 9 -muza 9 -43g 9 -hajime 9 -8,601 9 -kemptner 9 -crangle 9 -22.0 9 -sulayem 9 -mckittrick 9 -lundekvam 9 -zarar 9 -malili 9 -malila 9 -pressurisation 9 -udvar-hazy 9 -becasue 9 -unlearned 9 -percenters 9 -spherification 9 -shigwadja 9 -toppin-hector 9 -lambswool 9 -763.035 9 -sdunek 9 -findagrave.com 9 -materiality 9 -ablative 9 -ngila 9 -flesh-colored 9 -doha-based 9 -in-office 9 -counter-accusations 9 -317-foot 9 -gento 9 -red-bearded 9 -pencil-shaped 9 -7200 9 -peterkin 9 -maywood 9 -startac 9 -mismatching 9 -bodnant 9 -taghdissian 9 -student-generated 9 -incurious 9 -pirabahuran 9 -kazungu 9 -brogdon 9 -'89 9 -9,000-year-old 9 -1245 9 -allibone 9 -renominate 9 -anandtech 9 -perdis 9 -1435 9 -150-page 9 -peyser 9 -jose-maria 9 -pfcs 9 -noh8 9 -nonconformity 9 -shouse 9 -noho 9 -lip-reading 9 -mehulic 9 -jameses 9 -unglazed 9 -vanvlerah 9 -supercentenarian 9 -overcompensated 9 -alimi 9 -aoraki/mount 9 -jamason 9 -23mins 9 -kongkrit 9 -icoa 9 -hami 9 -stanekzai 9 -home.the 9 -59-0 9 -house-sitter 9 -djordje 9 -arthurson 9 -pakistani-americans 9 -laan 9 -uranga 9 -yankwitt 9 -super-powers 9 -silver-medal 9 -brkic 9 -kanelli 9 -129m 9 -otra 9 -yodelling 9 -danehill 9 -juridical 9 -hegazi 9 -kshb-tv 9 -nazneen 9 -gailmard 9 -schwarzenberg 9 -nikolopoulos 9 -gutridge 9 -budinger 9 -shahs 9 -berri 9 -woollerton 9 -deviled 9 -labban 9 -cominsky 9 -g-shock 9 -al-khaled 9 -guynn 9 -nolasco 9 -vandever 9 -adelene 9 -halowell 9 -streamliner 9 -upstairs-downstairs 9 -diaolou 9 -murali-krishnan 9 -hubler 9 -sleekly 9 -kedzie 9 -al-baitha 9 -wilfired 9 -kahlah 9 -sashko 9 -bezjian 9 -usurps 9 -millirem 9 -jaleh 9 -ibaloi 9 -5,500-year-old 9 -ypi 9 -bazbaz 9 -koechner 9 -budget-related 9 -gibraltor 9 -ejogo 9 -araz 9 -59-day 9 -shwed 9 -tomás 9 -pro-footballer 9 -whaddon 9 -3-bedroom 9 -9:31 9 -9:32 9 -nmp 9 -aleksandras 9 -moisture-laden 9 -magmimo 9 -stoupe 9 -verhaaren 9 -phorm 9 -cluelessly 9 -larache 9 -lucretia 9 -metrosexuals 9 -sarobi 9 -militarising 9 -leslee 9 -attonley 9 -fagerström 9 -brabants 9 -star-like 9 -matama 9 -gussied 9 -alibor 9 -europeantour.com 9 -attachà 9 -exchange-paint 9 -biomuseo 9 -authenticates 9 -clandestines 9 -shiha 9 -174th 9 -paraben 9 -passata 9 -smartship 9 -finacee 9 -kullen 9 -tadhg 9 -fortune-teller 9 -currall 9 -garbed 9 -20-city 9 -fishkhabur 9 -belloni 9 -knapper 9 -kulen 9 -44-mile 9 -tangiers 9 -cocoon-like 9 -,21 9 -hammer-like 9 -hillwalker 9 -58.50 9 -lawang 9 -maslovka 9 -dilapidation 9 -sdsr 9 -wabara 9 -uvwxyz 9 -zello 9 -lewanika 9 -motspur 9 -mini-revival 9 -oneplusone 9 -fikile 9 -wood-lined 9 -earthrace 9 -riahi 9 -ungracious 9 -silfra 9 -farda 9 -photobomber 9 -rajbar 9 -disunited 9 -syrian-iraqi 9 -giga 9 -in-orbit 9 -hotly-disputed 9 -sgobba 9 -jayant 9 -girasek 9 -izard 9 -primecare 9 -chirashi 9 -aerially 9 -gastian 9 -saretta 9 -mittelstand 9 -21.30 9 -best-reviewed 9 -frind 9 -resistive 9 -45-caliber 9 -super-luxe 9 -kinko 9 -entrusts 9 -sieber 9 -obliviously 9 -echegoyen-mccabe 9 -kadence 9 -barbet 9 -mop-haired 9 -eyeballed 9 -aera 9 -benzoylecgonine 9 -grubbing 9 -corte 9 -railroading 9 -irreligious 9 -ameliorated 9 -test-flight 9 -itteringham 9 -lerato 9 -catano 9 -freising 9 -20,800 9 -flagon 9 -cash-on-hand 9 -arkalyk 9 -mfuwe 9 -arouty 9 -a83 9 -assembly-line 9 -streetscapes 9 -doull 9 -yasynuvata 9 -1,562 9 -wyliei 9 -hanworth 9 -hoketsu 9 -absorbency 9 -kater 9 -71.2 9 -osegueda 9 -rieber 9 -ducal 9 -mistranslated 9 -settree 9 -hepatic 9 -bastyovanszky 9 -anek 9 -liene 9 -co-presidents 9 -castalia 9 -selbee 9 -change-of-plea 9 -ecigs 9 -caw 9 -swedbank 9 -kubert 9 -maino 9 -laher 9 -kurmann 9 -mint-green 9 -aeration 9 -schnacky 9 -woodburning 9 -macallan 9 -tangalooma 9 -rosenbauer 9 -25-lap 9 -bordentown 9 -800-word 9 -1,164 9 -66lbs 9 -synovial 9 -jagodina 9 -western-led 9 -nagaland 9 -gun-themed 9 -misguidedly 9 -meaw 9 -high-drama 9 -holts 9 -ochila 9 -daini 9 -egland 9 -hispanoamerican 9 -kappelhoff-day 9 -gonza 9 -try-saving 9 -superintendency 9 -5:53 9 -5:56 9 -ulanoff 9 -kourounis 9 -bettweiser 9 -al-alas 9 -record-signing 9 -hozaki 9 -gershoff 9 -isack 9 -perivolas 9 -century-style 9 -kirno 9 -anti-party 9 -open-and-shut 9 -ojjeh 9 -yurena 9 -sang-deuk 9 -indochinese 9 -wilberding 9 -8.06 9 -8.09 9 -76-page 9 -olso 9 -iniquity 9 -salivation 9 -oleic 9 -pelli 9 -41-page 9 -1,400-acre 9 -smilingly 9 -1355 9 -1354 9 -32lbs 9 -doomsayers 9 -shilla 9 -wdf 9 -bylines 9 -800-273-8255 9 -archly 9 -citicorp 9 -kousai 9 -shiism 9 -then-21-year-old 9 -dummigan 9 -8.60 9 -svii 9 -redoute 9 -ituc 9 -garnets 9 -kufuor 9 -wacho 9 -brooklynite 9 -dance-pop 9 -left-rear 9 -chavful 9 -14-karat 9 -rigoberta 9 -chignik 9 -caravanners 9 -swats 9 -myoelectric 9 -v-festival 9 -11,875 9 -stower 9 -galangal 9 -hypnotizing 9 -tohidi 9 -69s 9 -dug-in 9 -then-majority 9 -over-emphasised 9 -panagian 9 -shoeboxes 9 -nfed 9 -joetta 9 -micklehurst 9 -basse 9 -nambour 9 -leiberman 9 -salz 9 -pacholak 9 -900-strong 9 -panniers 9 -camilletti 9 -countrywomen 9 -non-sports 9 -re-applied 9 -baringo 9 -mezze 9 -nonsmoker 9 -mycoides 9 -re-enforced 9 -re-integrate 9 -jabir 9 -perillo 9 -pink-clad 9 -inner-east 9 -libere 9 -hava-is 9 -cti 9 -holland-martin 9 -unpowered 9 -moreauville 9 -22-member 9 -appgs 9 -smurfette 9 -droxler 9 -sexists 9 -sundwall 9 -pranav 9 -domonique 9 -morale-sapping 9 -rhiane 9 -rhiann 9 -okayed 9 -41f 9 -3:01 9 -shaunie 9 -stankiewicz 9 -engined 9 -intelcrawler 9 -zulema 9 -olumide 9 -3.51 9 -testosterone-fueled 9 -gizzard 9 -zaf 9 -kiffe 9 -moopen 9 -guzzanti 9 -otsuki 9 -popside 9 -dellums 9 -rehema 9 -vice-mayor 9 -junior/senior 9 -lcac 9 -mirano 9 -majola 9 -nonscientific 9 -enrc 9 -fossa 9 -fording 9 -brittania 9 -mudroom 9 -14.15 9 -générale 9 -overtraining 9 -hillarys 9 -loughman 9 -work-permit 9 -knock-backs 9 -joslyn 9 -lip-syncs 9 -leffew 9 -gohary 9 -ritualized 9 -afren 9 -granier 9 -al-ghazzawi 9 -lingwood 9 -melnik 9 -consorts 9 -kubic 9 -khizar 9 -hermida 9 -bingenheimer 9 -plus-one 9 -pejoratively 9 -ummmm 9 -batboy 9 -vlach 9 -christoffel 9 -zings 9 -zenden 9 -craufurd 9 -giganotosaurus 9 -s-bahn 9 -rigaud 9 -timakova 9 -party-boy 9 -time-worn 9 -hussaini 9 -barbequed 9 -2007-10 9 -gisiger 9 -aldbourne 9 -tour-de-force 9 -ambit 9 -five-weight 9 -lidbetter 9 -witchdoctors 9 -bailhache 9 -ligus 9 -reddam 9 -studbook 9 -1,029 9 -chappelle-nadal 9 -thayil 9 -abu-rahma 9 -rannells 9 -janew 9 -bashari 9 -kathlyn 9 -seyaj 9 -gata 9 -walcheren 9 -jiufang 9 -farenheit 9 -bacalhau 9 -amarsinghe 9 -super-wide 9 -eight-years 9 -30,800 9 -dunaj 9 -sudron 9 -diaz-hernandez 9 -villus 9 -odysseus 9 -mararv 9 -bartolome 9 -chelewa 9 -paté 9 -hughs 9 -subarctic 9 -timorese 9 -kutnick 9 -couleurs 9 -manic-depressive 9 -woolas 9 -kin-man 9 -phenotyping 9 -tamez 9 -super-glued 9 -calatrava 9 -malisa 9 -moltmann 9 -counter-drug 9 -trichologists 9 -camejo 9 -fisc 9 -hostetter 9 -clelland 9 -mercury-based 9 -184million 9 -sobkow 9 -benedettelli 9 -gyeongju 9 -63-year 9 -babri 9 -persimmons 9 -chenes 9 -hyper-real 9 -hicken 9 -fsai 9 -re-launching 9 -light-polluted 9 -near-bankrupt 9 -yakob 9 -greensmith 9 -saraya 9 -al-fajr 9 -rosebuds 9 -320mg 9 -leftfield 9 -pro-bailout 9 -georgetown-educated 9 -93rd-minute 9 -fushi 9 -guga 9 -nafasat 9 -a-star 9 -restauranteurs 9 -legard 9 -georgian-born 9 -2,443 9 -arcana 9 -horsehair 9 -cagnazzi 9 -noy 9 -ibai 9 -peddie 9 -albertina 9 -thephakaysone 9 -incarcerates 9 -flores-ortiz 9 -hmnb 9 -74p 9 -656million 9 -wide-mouthed 9 -insect-borne 9 -baseball-related 9 -al-sarkhi 9 -ginova 9 -windsock 9 -slimed 9 -labruzzo 9 -88.2 9 -beta-glucan 9 -monopolising 9 -ax-wielding 9 -skiny 9 -richwine 9 -pellicer 9 -akiba 9 -citc 9 -crimefighters 9 -uncosted 9 -meachin 9 -61per 9 -fukuhara 9 -yamoussoukro 9 -10-digit 9 -massi 9 -schuckit 9 -pakhomov 9 -mutahida 9 -alpines 9 -rorick 9 -to-die-for 9 -olwen 9 -safyan 9 -whitehawk 9 -melencia 9 -kmo 9 -bersant 9 -changa'a 9 -chaturvedi 9 -toughpad 9 -kamenova 9 -caffeine-rich 9 -barkess 9 -19-second 9 -fewsmith 9 -cax-puluc 9 -rusinov 9 -schizo 9 -uceny 9 -guayama 9 -decisionmaking 9 -tisiri 9 -glados 9 -1990-1991 9 -fountain-fort 9 -sprajc 9 -abolishment 9 -routehappy 9 -mcgoohan 9 -kruithof 9 -rovs 9 -memoribilia 9 -secretively 9 -a318 9 -rimm 9 -inner-circle 9 -riano 9 -hafetz 9 -2:08 9 -2:07 9 -abildgaard 9 -nasha 9 -pussyfooting 9 -carly-mae 9 -wenig 9 -stavins 9 -nacre 9 -beechen 9 -latchmore 9 -bà 9 -57th-minute 9 -akein 9 -tidily 9 -jwt 9 -photoplay 9 -afeigan 9 -pencilling 9 -gadhafis 9 -virdee 9 -dyslexics 9 -szabos 9 -qipao 9 -roemmele 9 -al-furat 9 -arranz 9 -corsodyl 9 -maximilien 9 -kerry-anne 9 -londrina 9 -adjoined 9 -laxon 9 -off-shoring 9 -milovan 9 -ringel 9 -shrivels 9 -60-90 9 -500bc 9 -wrongfooted 9 -consolations 9 -ruffins 9 -canimorsus 9 -bouma 9 -busbridge 9 -dewormed 9 -jerame 9 -abdramanov 9 -hcp 9 -b-cup 9 -cayuga 9 -nagyova 9 -pich 9 -caribbean-style 9 -free-diver 9 -self-compassion 9 -hradecky 9 -4.87 9 -harbor-ucla 9 -emojli 9 -3,820 9 -mannering 9 -kumamon 9 -ribnovo 9 -jamaa 9 -shepperd 9 -weapon-free 9 -pinch-to-zoom 9 -comadre 9 -vemurafenib 9 -malbone 9 -bazookas 9 -daleast 9 -difficult-to-reach 9 -acevo 9 -jerin 9 -konyak 9 -jining 9 -1,893 9 -oxyrhynchos 9 -scervino 9 -sutcliffes 9 -x-bow 9 -ullapool 9 -showboats 9 -galenski 9 -cannington 9 -kidzania 9 -crapper 9 -desensitise 9 -mhor 9 -38-minute 9 -media-shy 9 -re-invest 9 -ultra-secure 9 -assignations 9 -rule-breaking 9 -maik 9 -bushveld 9 -anansie 9 -delitsky 9 -h4 9 -al-jarba 9 -anglicanism 9 -soothsayer 9 -ngoforo 9 -marianela 9 -rossana 9 -self-castration 9 -marka 9 -denosumab 9 -pernetta 9 -i-285 9 -yellowhammer 9 -31-0 9 -magne 9 -driveable 9 -cocorobo 9 -2-inches 9 -dongles 9 -bowes-phipps 9 -dayane 9 -multihulls 9 -milko 9 -gurji 9 -woodmansee 9 -lewicki 9 -borax 9 -raziq 9 -1339 9 -al-harzi 9 -dutch-speaking 9 -super-pacs 9 -eagleside 9 -barrette 9 -videoton 9 -tarsus 9 -cataractonium 9 -skomina 9 -bucciarelli 9 -1,200-strong 9 -non-waterfront 9 -qadeer 9 -skygazers 9 -9.46 9 -wuppertal 9 -8.02 9 -8.07 9 -white-gold 9 -butterup 9 -wht 9 -crestview 9 -pml 9 -orsainville 9 -3g-speed 9 -pierre-cedric 9 -apxs 9 -al-zamili 9 -adenowo 9 -missouri-st 9 -confidencial 9 -quick-footed 9 -swara 9 -slacktivism 9 -languedoc 9 -caslen 9 -abunayyan 9 -dantzlerward 9 -loshagina 9 -segbers 9 -eight-seater 9 -two-engine 9 -levette 9 -therriault 9 -job-seeker 9 -tailhook 9 -aldehyde 9 -mayah 9 -wmctv 9 -as350 9 -fanger 9 -casas-zamora 9 -thru-hikers 9 -overgrow 9 -scriptorium 9 -lihua 9 -volandri 9 -sun-lounger 9 -adsense 9 -etemad-e 9 -weissbourd 9 -chanler 9 -n'tae 9 -@united 9 -miyoshi 9 -1,261 9 -marshall-wessendorf 9 -inapplicable 9 -self-blame 9 -91.9 9 -inzinga 9 -crr 9 -technics 9 -leftoverswap 9 -soul/r 9 -tro 9 -foale 9 -worksafe 9 -mediates 9 -media-led 9 -long-debated 9 -dollwet 9 -scruffily 9 -anguishes 9 -119.99 9 -47p 9 -mulroy 9 -spicers 9 -harawira 9 -bitchiness 9 -patra 9 -pardus 9 -pro-equality 9 -pruszkow 9 -orange-tip 9 -germinated 9 -salt/100g 9 -jornada 9 -malabon 9 -pattenmakers 9 -crable 9 -foton-m4 9 -predicate 9 -kunatani 9 -xn 9 -x7 9 -mahto 9 -five-level 9 -tananarive 9 -zephany 9 -longest-living 9 -ruttiman 9 -saturday-night 9 -kowalska 9 -94.3 9 -wine-makers 9 -oil-for-food 9 -bernette 9 -rexford 9 -numbats 9 -baptie 9 -inkinen 9 -now-discontinued 9 -169.5 9 -skiathos 9 -wookie 9 -vaporizing 9 -camrys 9 -alrewas 9 -penrice 9 -pro-muslim 9 -peseta 9 -greybull 9 -lakeridge 9 -160-mile 9 -fso 9 -bi-level 9 -off-leash 9 -magyars 9 -pulks 9 -zabludowicz 9 -mundelein 9 -zeitchik 9 -re-organise 9 -1971-72 9 -sivan 9 -silbersky 9 -de-fund 9 -1475 9 -well-considered 9 -1,044 9 -1,041 9 -fatales 9 -13-under-par 9 -reche 9 -carcavelos 9 -54.9 9 -supperstone 9 -jance 9 -gibler 9 -polymelia 9 -logbar 9 -700-a-night 9 -degreaser 9 -humper 9 -6:41 9 -saint-sulpice 9 -stellan 9 -vegetated 9 -navarrete-gonzalez 9 -dementia-stricken 9 -grigoris 9 -qiyuan 9 -gemme 9 -shammam 9 -yuly 9 -simplistically 9 -watercrafts 9 --81 9 -bar-headed 9 -self-initiated 9 -1999-2011 9 -7x 9 -hockensmith 9 -sivapithecus 9 -mega-star 9 -vaughan-ellis 9 -steinglass 9 -big-hitter 9 -dark-matter 9 -samiarso 9 -high-shine 9 -pre-frontal 9 -camela 9 -omnia 9 -podhorzer 9 -jauhar 9 -56kg 9 -waldridge 9 -ephrem 9 -conservations 9 -greatest-hits 9 -pagecount 9 -brachiosaurus 9 -kangra 9 -tunkhannock 9 -farouk1986 9 -petrol-heads 9 -saravanamuttu 9 -llangynidr 9 -hosein 9 -friede 9 -saj 9 -tallo 9 -rustico 9 -onerepublic 9 -2,394 9 -petapixel.com 9 -fruiterers 9 -probus 9 -cash-for-honours 9 -martillo 9 -test-1 9 -ex-ukip 9 -goodgame 9 -kashmar 9 -pimento 9 -arteritis 9 -toe-curlingly 9 -capriciously 9 -baby-selling 9 -mcnenney 9 -huay 9 -gasp-inducing 9 -trussler 9 -87-year 9 -forsworn 9 -gengel 9 -kabat 9 -kabab 9 -bratman 9 -pilsner 9 -expresso 9 -adeley 9 -planked 9 -arleen 9 -tham 9 -freitag 9 -non-cuban 9 -egypt-gaza 9 -malatya 9 -reporter-telegram 9 -elterman 9 -78ft 9 -r-naught 9 -indefinable 9 -bookman 9 -less-qualified 9 -cold-call 9 -rewilding 9 -zuby 9 -socioeconomically 9 -callaloo 9 -guerrucci 9 -drug-seeking 9 -caggins 9 -hausman 9 -storbeck 9 -curdling 9 -massingill 9 -setterstrom 9 -ticket-buyers 9 -mosha 9 -cooinda 9 -bettel 9 -radita 9 -sarll 9 -mphela 9 -hessin 9 -d-nebraska 9 -watermills 9 -federalization 9 -dasa 9 -anti-oestrogen 9 -hetfield 9 -pryzybyla 9 -cuevas-nazario 9 -maneesh 9 -unexposed 9 -luhabe 9 -bahaa 9 -ermanno 9 -sotirios 9 -l'espresso 9 -nathaly 9 -800lbs 9 -stanford-on-soar 9 -heebie-jeebies 9 -18,000-a-year 9 -silverchair 9 -a.p.c. 9 -7.63 9 -beistline 9 -untruth 9 -presidentobama 9 -djarragun 9 -30-yards 9 -separateness 9 -wongsawat 9 -alwar 9 -stretty 9 -wardana 9 -innovatively 9 -favicons 9 -state-linked 9 -jons 9 -bondurant 9 -diptyque 9 -kauri 9 -fenichel 9 -remapping 9 -nelms 9 -blissed-out 9 -fuoco 9 -pencilled-in 9 -silkworms 9 -tineesha 9 -tentacle-like 9 -franceville 9 -resurrects 9 -996 9 -anti-vandal 9 -stollen 9 -penley 9 -retaliations 9 -highly-motivated 9 -firenze 9 -poleshchuk 9 -allerberger 9 -obalon 9 -kalvin 9 -re-watched 9 -ellensburg 9 -big-mouthed 9 -powerkey 9 -boyo 9 -hurworth 9 -ferez 9 -100,000-a-month 9 -contrastingly 9 -mijanka 9 -papel 9 -corine 9 -muker 9 -imrt 9 -free-falls 9 -a44 9 -conjugate 9 -richelieu 9 -1,529 9 -nighthawk 9 -hagglers 9 -then-graduate 9 -williamsville 9 -allegrone 9 -balderstone 9 -g600 9 -cmj 9 -cmi 9 -energyhelpline.com 9 -zaazou 9 -calverley 9 -tarkan 9 -nemberg 9 -haidian 9 -mojahed 9 -six-footer 9 -5.6-magnitude 9 -caricom 9 -gloats 9 -seam-free 9 -much-decorated 9 -new-media 9 -weisbard 9 -matland 9 -lorien 9 -jesters 9 -mezuzahs 9 -semi-wild 9 -event-driven 9 -posessions 9 -berkshares 9 -promposal 9 -delmenhorst 9 -self-regulated 9 -shajidur 9 -church-based 9 -tianhe 9 -khoso 9 -zaynab 9 -much-deserved 9 -estadi 9 -aup 9 -half-covered 9 -hornback 9 -megabecquerels 9 -castlebrook 9 -playhaven 9 -wordsmiths 9 -maxamed 9 -evgeni 9 -elyssa 9 -anderson-graham 9 -employment-related 9 -edlund 9 -sunbath 9 -glycerol 9 -wheelington 9 -izak 9 -463,000 9 -500s 9 -6.77 9 -19-and-a-half 9 -speaker-elect 9 -mcclains 9 -24-pack 9 -restaurante 9 -ermakova 9 -same-old 9 -shyer 9 -plaszow 9 -radia 9 -overreliance 9 -al-saad 9 -alcohol-impaired 9 -well-wishing 9 -halsne 9 -2064 9 -9.67 9 -localize 9 -8.26 9 -8.28 9 -sabel 9 -mecktone 9 -sibanda 9 -pathé 9 -grindelwald 9 -jeralean 9 -1.6-mile 9 -shenguo 9 -signorini 9 -maroney-lemmon 9 -monnett 9 -141million 9 -california-los 9 -winnsboro 9 -mcgorian 9 -hakaan 9 -mangere 9 -selimaj 9 -kostolnik 9 -kolinko 9 -range-topping 9 -+263 9 -wriddhiman 9 -bente 9 -benty 9 -muppeteer 9 -sonnenfeld 9 -8-track 9 -post-electoral 9 -briefers 9 -truslow 9 -tedtalks 9 -wieczorkiewicz 9 -181million 9 -barnhem 9 -historia 9 -512mb 9 -mdiabetes 9 -calloused 9 -less-populated 9 -kurth 9 -tzvi 9 -pouri 9 -65k 9 -docosahexaenoic 9 -4,260 9 -limpets 9 -edifices 9 -u.s.-manufactured 9 -ausgrid 9 -navid 9 -stonham 9 -republik 9 -konnight 9 -jantastic 9 -amphitheaters 9 -knackers 9 -heinicke 9 -1,241 9 -sonesta 9 -carenzio 9 -bht 9 -3,071 9 -tpl 9 -tpr 9 -mehul 9 -laundromats 9 -lamezia 9 -full-bore 9 -senility 9 -zarko 9 -marchell 9 -too-small 9 -linnet 9 -zeq 9 -cerutti 9 -wainman 9 -thabit 9 -czarnocki 9 -audrea 9 -przewlocka 9 -enns 9 -breel 9 -aami 9 -field-sized 9 -multi-tiered 9 -fidelman 9 -servile 9 -harlescott 9 -al-shehhi 9 -divison 9 -1058 9 -ntaba 9 -adre 9 -bee-friendly 9 -bechtel 9 -rh5 9 -air-ground 9 -over-head 9 -suarez-patrice 9 -internationally-recognised 9 -ghuman 9 -immunisations 9 -gz 9 -g6 9 -g5 9 -ailerons 9 -330lb 9 -kumai 9 -step-change 9 -president-to-be 9 -dovedale 9 -niantic 9 -@desjuanthethug 9 -danishefsky 9 -enderez 9 -farrisee 9 -petrello 9 -jorie 9 -burnden 9 -ever-widening 9 -vermeir 9 -dioralyte 9 -jaglowski 9 -robbert 9 -grose 9 -non-eea 9 -parirenyatwa 9 -ransley 9 -geodata 9 -errera 9 -wodjan 9 -coachbuilder 9 -anti-fascism 9 -poshstock 9 -hanky-panky 9 -boy-meets-girl 9 -annoucement 9 -serina 9 -picked-up 9 -nyein 9 -icis 9 -doom-and-gloom 9 -engendering 9 -wonkish 9 -lodin 9 -dubos 9 -poincenot 9 -chuch 9 -marano 9 -mig-31 9 -oristown 9 -buzzkill 9 -norregaard 9 -behnam 9 -hockham 9 -821,000 9 -demosi 9 -caochangdi 9 -ultracane 9 -bamkin 9 -mukudan 9 -gerwin 9 -21,200 9 -127m 9 -1279 9 -danielsson 9 -maizuss 9 -metodo 9 -prisonomics 9 -manheim 9 -smartring 9 -talwars 9 -lemos 9 -strophy 9 -frankfurters 9 -nodger 9 -liquify 9 -olympics-related 9 -zardana 9 -roeland 9 -hegeman 9 -madisen 9 -over-night 9 -moralist 9 -cheeto 9 -presbytery 9 -falsifications 9 -leafed 9 -benjaman 9 -exploitable 9 -out-of-reach 9 -motsoaledi 9 -winningham 9 -2-15 9 -kozyra 9 -140,000-a-week 9 -catequilla 9 -@savannahguthrie 9 -ath 9 -date-night 9 -nonaggression 9 -good-value 9 -immunosuppressants 9 -spirito 9 -bennewith 9 -altindoken 9 -pasteurization 9 -ex-officials 9 -klaver 9 -mullets 9 -ziva 9 -in-studio 9 -grandfather-of-nine 9 -frenchs 9 -sprave 9 -ist 9 -hartsville 9 -novant 9 -sohna 9 -deputizing 9 -adid 9 -pakse 9 -w7 9 -over-exposed 9 -brak 9 -low-flow 9 -tabulating 9 -nabagasera 9 -zabayar 9 -melako 9 -usda-inspected 9 -dashawn 9 -mediterranean-inspired 9 -195m 9 -nunney 9 -hemimelia 9 -agal 9 -rmti 9 -ski-lifts 9 -hehner 9 -two-liter 9 -ffu 9 -ffl 9 -zip-tie 9 -wainuiomata 9 -macc 9 -landfalls 9 -minoglio 9 -kragen 9 -bommarito 9 -hakuba 9 -61.1 9 -galbreath 9 -topal 9 -valcu 9 -55mm 9 -avene 9 -tillack 9 -anti-sex 9 -jetcost.co.uk 9 -chrismas 9 -stuffocation 9 -anti-america 9 -spondon 9 -45-feet 9 -witch-hunts 9 -daun 9 -daul 9 -bassima 9 -light-duty 9 -aesthete 9 -polymerase 9 -micro-hdmi 9 -ex-prisoner 9 -grio 9 -1739 9 -safiyah 9 -bentos 9 -out-earning 9 -tugbeh 9 -cosmography 9 -kotwal 9 -vennavally-rao 9 -tretyakov 9 -sodomite 9 -7.47 9 -cuttler 9 -hansens 9 -half-gallon 9 -pinfold 9 -monograph 9 -tariah 9 -classicists 9 -ieodo 9 -breadwinning 9 -hatcheries 9 -youngish 9 -potter-style 9 -hurtgen 9 -1-mile 9 -ruonala 9 -then-egyptian 9 -brainflight 9 -plagiarising 9 -premierships 9 -climatological 9 -slamdance 9 -director/producer 9 -27.93 9 -green-lit 9 -husyev 9 -1978-79 9 -chorten 9 -jetlagged 9 -1-cent 9 -kabibe 9 -enjuanes 9 -rhws 9 -violi 9 -@clivefpalmer 9 -unenforced 9 -super-secure 9 -constine 9 -a-f33 9 -guffawed 9 -noctiluca 9 -chouhan 9 -caddick 9 -riyono 9 -petroc 9 -lariat 9 -ivanice 9 -harpa 9 -tassy-mason 9 -lamellar 9 -vac 9 -vay 9 -lighter-skinned 9 -obaigbena 9 -walheim 9 -holo 9 -bunker-busting 9 -secretly-filmed 9 -delmont 9 -boiling-water 9 -marven 9 -gyongyosi 9 -schwenker 9 -pedal-power 9 -crash-worthiness 9 -pagerank 9 -shirato 9 -wekesa 9 -mastropole 9 -segar 9 -liby 9 -a61 9 -unpacks 9 -jesuischarlie 9 -weathercast 9 -1,505 9 -mainetti 9 -immune-boosting 9 -safe-deposit 9 -neo-colonial 9 -anodized 9 -nonfarm 9 -stuard 9 -brencher 9 -schoenebeck 9 -nardoza 9 -53kg 9 -te'i 9 -b100 9 -sodra 9 -baragwanath 9 -outserve 9 -human-looking 9 -komal 9 -wooburn 9 -lerone 9 -interaxon 9 -re-direct 9 -consignor 9 -h.r 9 -bluecool 9 -280m 9 -westworth 9 -eyewire 9 -spill-over 9 -body-confident 9 -zlobin 9 -athelstone 9 -rostowski 9 -leuluai 9 -istiqlal 9 -snapchatdb 9 -single-game 9 -campillo 9 -sensex 9 -disneysea 9 -infinity-edge 9 -quila 9 -aikido 9 -psycho-sexual 9 -hanadi 9 -m-1 9 -meat-filled 9 -sleep-away 9 -hoenderloo 9 -hayoun 9 -hamado 9 -trilled 9 -nizhni 9 -sunup 9 -4-cent 9 -varon 9 -vikie 9 -seanad 9 -glamorous-looking 9 -instillation 9 -yaacov 9 -rukshana 9 -13/8 9 -zbish 9 -keecker 9 -rhaiem 9 -two-alarm 9 -killin 9 -asthall 9 -astro-photographer 9 -d-indiana 9 -shevlin 9 -blackledge 9 -triplow 9 -marichal 9 -cookeville 9 -155km 9 -petersham 9 -pik 9 -set-off 9 -pinchbeck 9 -starmine 9 -skeaping 9 -38th-minute 9 -workpods 9 -dignam 9 -millepied 9 -blow-outs 9 -61billion 9 -07-11 9 -trix 9 -restaveks 9 -multi-phase 9 -jonesville 9 -wingmen 9 -altay 9 -betgeorge 9 -iron-ore 9 -duck-and-cover 9 -10mb 9 -judge-alone 9 -ambrosius 9 -yongzhou 9 -peshdary 9 -al-midan 9 -explosives-filled 9 -lamivudine 9 -bortz 9 -subhead 9 -shearim 9 -tuts 9 -rastogi 9 -300-yard 9 -lethem 9 -barandon 9 -donto 9 -galled 9 -kepler-37b 9 -100bc 9 -chanthu 9 -highjack 9 -shuttlecock 9 -vanilli 9 -bestest 9 -fankhauser 9 -cake-maker 9 -lenoue 9 -rowhouse 9 -235million 9 -blondi 9 -jecca 9 -miles-per-hour 9 -sorur 9 -tuwaitha 9 -crystal-covered 9 -rarebit 9 -wriglesworth 9 -larsens 9 -bequeaths 9 -28,700 9 -inter-state 9 -dommett 9 -post-attack 9 -fukishima 9 -mc1r 9 -sialkot 9 -abulahoum 9 -calligrapher 9 -antonello 9 -19.90 9 -diamondfield 9 -thudded 9 -cama 9 -shalwar 9 -ettv 9 -cromwellian 9 -magnanimously 9 -kanharith 9 -travesties 9 -molavi 9 -windtalkers 9 -braund 9 -al-sidra 9 -villacorta 9 -marzooq 9 -baldoni 9 -albersdoerfer 9 -substructure 9 -tregaron 9 -welsh-language 9 -chomyn 9 -starfighter 9 -garren 9 -lanyards 9 -guilavogui 9 -delice 9 -fesus 9 -1998-2000 9 -mcquilter 9 -2/2 9 -venezuelan-born 9 -erpenbach 9 -gormly 9 -national-level 9 -uktv 9 -rayle 9 -waqif 9 -pillboxes 9 -sauaia 9 -under-achieved 9 -cd/dvd 9 -khanty 9 -tricked-out 9 -rothco 9 -desbians 9 -blue-gray 9 -half-japanese 9 -leshkevich 9 -cajamarca 9 -burgese 9 -tamotsu 9 -pleasence 9 -ktvu.com 9 -bakos 9 -meritage 9 -marie-christine 9 -spirochetes 9 -averett 9 -rivaroxaban 9 -desmarets 9 -helenius 9 -donyel 9 -exposés 9 -junhua 9 -misconstruing 9 -dumpsites 9 -bucala 9 -regius 9 -nkenko 9 -sub-10 9 -grupinski 9 -kiryas 9 -chambermaids 9 -50-a-day 9 -mzuri 9 -woud 9 -chaharshanbe 9 -symieon 9 -edwards-stuart 9 -hodsons 9 -sistrunk 9 -kyvat 9 -meinstein 9 -mcmonagle 9 -re-integrated 9 -ayouba-ali 9 -behesht 9 -univesity 9 -ostersunds 9 -hemopo 9 -whec 9 -cheslin 9 -morgane 9 -benedictus 9 -binge-drinkers 9 -spruill-smith 9 -multi-view 9 -trunov 9 -dadd 9 -bajramovic 9 -zanib 9 -zarella 9 -duca 9 -delamination 9 -trainspotters 9 -jumpjet 9 -hancy 9 -oncillas 9 -larkum 9 -theoreticians 9 -poblano 9 -peugot 9 -celliott@ngs.org 9 -shoyeju 9 -yuh 9 -arruabarrena 9 -damrey 9 -smm 9 -seven-course 9 -bensley 9 -rublein 9 -skivers 9 -hogh-christensen 9 -mucho 9 -kingside 9 -imporowicz 9 -second-by-second 9 -dilettante 9 -judios 9 -250-a-day 9 -damnable 9 -nefesh 9 -60-watt 9 -caquais 9 -beltrones 9 -haolan 9 -waksman 9 -shabaan 9 -geiling 9 -khalatian 9 -hellabrunn 9 -ringmer 9 -anorgasmia 9 -salmi 9 -jaswant 9 -innocenti 9 -nwokeh 9 -daladier 9 -jerimiah 9 -thet 9 -baling 9 -treuille 9 -boulle 9 -mahfood 9 -giffnock 9 -leemans 9 -ukab 9 -ceyda 9 -settling-in 9 -echinacea 9 -brutuk 9 -462,000 9 -music-lover 9 -thibeau 9 -fy 9 -off-patent 9 -leuser 9 -hegel 9 -valeo 9 -calichman 9 -less-developed 9 -wics 9 -gruden 9 -cermak 9 -postergirl 9 -yearâ 9 -al-fresco 9 -mosi-oa-tunya 9 -lymphocyte 9 -t.r. 9 -seven-foot-tall 9 -permed 9 -shimla 9 -chaniya 9 -sviridenko 9 -40mins 9 -yousri 9 -300mbps 9 -time-warp 9 -mxenes 9 -suste 9 -uhy 9 -unalloyed 9 -scummy 9 -mealey 9 -royalton 9 -dooming 9 -vishing 9 -city-issued 9 -vanderburgt 9 -tricolore 9 -glass-fibre 9 -gspc 9 -reisman 9 -criss-crosses 9 -twan 9 -micro-chip 9 -chigi 9 -pullinger 9 -cayleigh 9 -post-event 9 -renie 9 -twixt 9 -ex-blue 9 -62per 9 -prm 9 -fastco 9 -vocalizing 9 -podge 9 -jideonwo 9 -perturb 9 -asbestosis 9 -shlain 9 -5,000-worth 9 -weasleys 9 -fass 9 -sonne 9 -manzanero 9 -nien 9 -tucked-away 9 -shirt-front 9 -cunnilingus 9 -10metres 9 -over-turned 9 -chinyere 9 -trust-owned 9 -bublitz 9 -jones-style 9 -adverb 9 -adroitly 9 -signable 9 -briseon 9 -shabista 9 -mercury-atlas 9 -harbinson 9 -cobi 9 -dry-roasted 9 -budelli 9 -sawasdipol 9 -nicasio 9 -mob-related 9 -offenburg 9 -intercoastal 9 -silman 9 -658,000 9 -wistrich 9 -proglio 9 -torus 9 -grinsell 9 -rabie 9 -much-older 9 -marella 9 -cif 9 -cii 9 -gagrica 9 -semi-vegetative 9 -hanafin 9 -guaratiba 9 -denswil 9 -wolkind 9 -gargham 9 -rengert 9 -sognefjord 9 -tarheel 9 -agains 9 -againt 9 -monkey-like 9 -babaii 9 -bedrosian 9 -sakinah 9 -cinderblocks 9 -7,000-mile 9 -tongue-twister 9 -hoefer 9 -sepideh 9 -myfoxphoenix.com 9 -tolleshunt 9 -nosepads 9 -melodee 9 -koua 9 -treaster 9 -bilin 9 -cushty 9 -galipo 9 -lawar 9 -uncontainable 9 -bar-honda 9 -glaeser 9 -perth-born 9 -transmedics 9 -aldiki 9 -heartiest 9 -4,488 9 -cribbins 9 -hammerskins 9 -cillit 9 -vrdoljak 9 -unparallelled 9 -ex-detective 9 -moruya 9 -rs3 9 -submissiveness 9 -rangefinder 9 -zenyatta 9 -testable 9 -bowties 9 -stimpy 9 -frioli 9 -franeker 9 -fareeha 9 -olean 9 -superconductor 9 -pelta 9 -eidelweiss 9 -throwable 9 -abiteboul 9 -imada 9 -asharq 9 -nocs 9 -midcourt 9 -sytem 9 -skien 9 -peli 9 -limpieza 9 -bulwarks 9 -trofimov 9 -non-college 9 -aviatr 9 -twelve-year 9 -exige 9 -semi-independent 9 -sykora 9 -16-yard 9 -yiburaymu 9 -somnambulism 9 -tilled 9 -presentment 9 -dawit 9 -2,218 9 -record-setter 9 -ell 9 -5f 9 -cuttingly 9 -mahasen 9 -tylers 9 -starlit 9 -latouche 9 -belser 9 -carafe 9 -90minutes 9 -epiphanies 9 -min-woo 9 -diverges 9 -avdic 9 -citimortgage 9 -wrong-foot 9 -huruma 9 -melloy 9 -web-browsing 9 -sertima 9 -wishneski 9 -canario 9 -tywyn 9 -alaeddin 9 -szaky 9 -suginami 9 -mangham 9 -boil-water 9 -terminator-style 9 -arria 9 -biloela 9 -journalistically 9 -blendon 9 -leape 9 -expends 9 -malta-based 9 -csilla 9 -kalema 9 -1,281 9 -1,286 9 -ranchettes 9 -sehwa 9 -maxxandra 9 -fabra 9 -9-10p 9 -27-55 9 -sonitpur 9 -wpmi 9 -windless 9 -shallow-water 9 -friedsam 9 -erebor 9 -boulogne-billancourt 9 -optegra 9 -adamowicz 9 -hausmann 9 -feather-light 9 -adatia-sood 9 -ninestiles 9 -pierre-emile 9 -dimwit 9 -low-loader 9 -time-outs 9 -tutaj 9 -partings 9 -improver 9 -thermarum 9 -nonnie 9 -winelands 9 -ceesay 9 -ashden 9 -nutrient-packed 9 -hoed 9 -180,000-a-year 9 -purposed 9 -courter 9 -vernon-jackson 9 -nanteos 9 -weekenders 9 -haw-haw 9 -haimoff 9 -sautà 9 -areikat 9 -ikaria 9 -nahr 9 -mid-heel 9 -april/may 9 -dekatron 9 -platino 9 -pulleine 9 -out-competed 9 -ergoflex 9 -1015 9 -petaflops 9 -1,428 9 -1,425 9 -grrrl 9 -70-ft 9 -stoats 9 -aletsch 9 -al-sumali 9 -vanderzanden 9 -heartware 9 -mendelssohn 9 -lueras 9 -72.0 9 -ojile 9 -unpredicted 9 -hamengkubuwono 9 -totems 9 -waterproofed 9 -yamani 9 -geli 9 -ashrafieh 9 -tangikara 9 -endovascular 9 -gss 9 -steig 9 -bronze-medal 9 -24,901 9 -bonetti 9 -6music 9 -theming 9 -cacioppo 9 -internalizing 9 -chivvis 9 -meziere 9 -keomanivong 9 -craker 9 -hospitalizing 9 -anticlimax 9 -non-flying 9 -112ft 9 -one-seater 9 -1999ju3 9 -5600 9 -560m 9 -junior-senior 9 -doggles 9 -blubbed 9 -best-suited 9 -deglaciation 9 -drizin 9 -pluss 9 -over-exaggerated 9 -well-chronicled 9 -nashik 9 -ibookstore 9 -shoichi 9 -dau 9 -toko 9 -stakelbeck 9 -rtunjya 9 -vanchiro 9 -kiteboarder 9 -helmbrecht 9 -concas 9 -43-room 9 -non-german 9 -musi 9 -muso 9 -vashon 9 -moolah 9 -open-hearted 9 -eyl 9 -franschhoek 9 -gravell 9 -silver-tongued 9 -sulkin 9 -elenite 9 -tri-level 9 -personal-injury 9 -iizuka 9 -uren 9 -nannie 9 -vanes 9 -fard 9 -reproached 9 -sheherazade 9 -suturing 9 -segregates 9 -well-treated 9 -gopichand 9 -lawned 9 -beautyheaven 9 -ever-improving 9 -chimuka 9 -hunger-strike 9 -motrescu 9 -moderns 9 -nembutal 9 -bentinck 9 -nutricentre.com 9 -panchen 9 -masturbatory 9 -australopithecines 9 -salicylate 9 -parkash 9 -737-300 9 -pro-army 9 -mixter 9 -cathrow 9 -gilberti 9 -dvd/blu-ray 9 -kinjah 9 -#happiness 9 -rematches 9 -taskings 9 -information-technology 9 -nambia 9 -chattels 9 -8:23 9 -deese 9 -half-starved 9 -shakti 9 -then-married 9 -ohio-born 9 -re-employment 9 -nesci 9 -gaffs 9 -trevarthen 9 -then-yugoslav 9 -alkalinity 9 -hirsts 9 -sturdiest 9 -lilyanna 9 -everland 9 -jurica 9 -steeplechaser 9 -sniggered 9 -kaeson 9 -goose-step 9 -intuitions 9 -baseboard 9 -cimarron 9 -ked 9 -ket 9 -pile-on 9 -iredell 9 -#indyref 9 -32-count 9 -cityjet 9 -mohamedou 9 -eyad 9 -long-wearing 9 -phiri 9 -heavier-than-air 9 -ferencz 9 -lights-out 9 -six-bathroom 9 -savell 9 -snarkily 9 -domenec 9 -romanaux 9 -83p 9 -galazka 9 -lave 9 -e-volo 9 -teddy-bear 9 -lakshmanan 9 -santiago-maldonado 9 -conaghan 9 -curvilinear 9 -musi-cafe 9 -wollstonecraft 9 -ispahani 9 -alfriston 9 -labonte 9 -winx 9 -magnetized 9 -pipeathlon 9 -arkansans 9 -lumberton 9 -eribulin 9 -trinitarians 9 -pagesize 9 -nazionale 9 -decarbonisation 9 -garcia-rose 9 -furbelowed 9 -34g 9 -34p 9 -daddie 9 -lathes 9 -moncure 9 -spareone 9 -ppc 9 -fernhill 9 -transoceanic 9 -0.67 9 -schmuhl 9 -karamanoglu 9 -al-halabi 9 -curuvija 9 -unsend 9 -bowah 9 -bozoljac 9 -prosaically 9 -match-going 9 -heydemann 9 -canberra-based 9 -danwei 9 -iec 9 -cupial 9 -ivanishin 9 -aspic 9 -portville 9 -#rebelheart 9 -fourteeners 9 -lockard 9 -damjan 9 -krankl 9 -knife-carrying 9 -non-eligible 9 -demotivating 9 -adultfriendfinder.com 9 -day-glo 9 -valrhona 9 -twisp 9 -amber-may 9 -pambula 9 -1,373 9 -giustina 9 -17-carat 9 -wealth-friendly 9 -charcol 9 -papon 9 -hamartoma 9 -decorah 9 -dabska 9 -high-stepping 9 -us-russian 9 -l'hotel 9 -guiltily 9 -prud 9 -wortley 9 -murda 9 -google-branded 9 -megyeri 9 -flatworms 9 -gmurzynska 9 -fanzhi 9 -kajal 9 -laguerre 9 -medleys 9 -lodolce 9 -e-foils 9 -skarz 9 -catahoula 9 -brotherhood-led 9 -18th-floor 9 -piddle 9 -laprincia 9 -sulina 9 -fraunfelder 9 -southcott 9 -newlook.com 9 -vincenza 9 -multiview 9 -fozzie 9 -363million 9 -harems 9 -six-foot-two 9 -chicken-fried 9 -euronext 9 -anti-sexism 9 -bigfoots 9 -exfoliant 9 -fjordbak 9 -marie-therese 9 -culligan 9 -less-serious 9 -rempel 9 -e-z 9 -donehue 9 -sebba 9 -friarage 9 -kiszely 9 -audio/visual 9 -laconia 9 -tygard 9 -interlocks 9 -saalfield 9 -wxii 9 -hector-ingram 9 -homestead-miami 9 -warninger 9 -lieblein 9 -encantado 9 -skiathlon 9 -liuzhou 9 -balky 9 -sampsons 9 -brecel 9 -g-wagon 9 -kirkhope 9 -1,227.985 9 -nevadan 9 -mallersdorf 9 -beetlejuice 9 -mcilveen 9 -b129 9 -sleepily 9 -belén 9 -mellion 9 -shamar 9 -zemouche 9 -shemale 9 -abc4 9 -donathan 9 -donnovan 9 -nuuk 9 -tetraplegic 9 -c-series 9 -enyce 9 -scvngr 9 -mossbourne 9 -latice 9 -dirt-covered 9 -nationalising 9 -akhara 9 -teacher-training 9 -quick-tempered 9 -halbach 9 -cathera 9 -longhouses 9 -montol 9 -glyn-jones 9 -earlham 9 -anti-contamination 9 -deworming 9 -go-forward 9 -isufi 9 -joti 9 -dahoud 9 -8:13 9 -8:12 9 -8:17 9 -super-cooled 9 -diedrich 9 -ludogrets 9 -saturates 9 -mothered 9 -double-down 9 -garmston 9 -al-yemeni 9 -crowther-wilkinson 9 -anglo-zulu 9 -bitched 9 -broadcastify 9 -unpreventable 9 -hotspotting 9 -yogesh 9 -pritz 9 -tze 9 -bnb 9 -libor-rigging 9 -bulleted 9 -100-hour 9 -amagertorv 9 -grotenhuis 9 -matsuhisa 9 -su'a 9 -safarik 9 -2:01 9 -magaletta 9 -tigh 9 -alledged 9 -prutianu 9 -wallersteiner 9 -54in 9 -racv 9 -astakov 9 -adubato 9 -once-off 9 -endy 9 -klessin 9 -ununpentium 9 -citrusy 9 -vartan 9 -17/12/98 9 -nafi 9 -bandra 9 -sabeen 9 -seabrooks 9 -lowest-income 9 -jung-eun 9 -benadon 9 -garcia-torres 9 -boyar 9 -saponins 9 -abengoa 9 -ildefons 9 -laserdisc 9 -desisted 9 -andreia 9 -cohabitant 9 -sliker 9 -gottfrid 9 -govinde 9 -croghan 9 -gobank 9 -gamemaker 9 -paryss 9 -perala 9 -sjöstrand 9 -68th-minute 9 -'45 9 -durnell 9 -myferrylink 9 -asian-born 9 -mojowijo 9 -three-yard 9 -quindlen 9 -bahasa 9 -non-negligent 9 -shough 9 -justocat 9 -wincor 9 -jalandhar 9 -allahbad 9 -11.21 9 -howard-williams 9 -vaginismus 9 -somatic 9 -7,000-pound 9 -sea-ing 9 -head-down 9 -colorism 9 -finzi 9 -adderly 9 -shopfloor 9 -konwufine 9 -g.s. 9 -space-themed 9 -champalimaud 9 -willesee 9 -mocoa 9 -sharktopus 9 -nuwara 9 -diffley 9 -#savebobbysmum 9 -telegony 9 -sixth-seed 9 -cry-baby 9 -knoedler 9 -forefoot 9 -clean-air 9 -salako 9 -lenell 9 -chirlaine 9 -berrys 9 -12-9 9 -12-2 9 -pasachoff 9 -swingle 9 -rhinelander 9 -national-winning 9 -juhas 9 -trilogies 9 -vacuum-packing 9 -rectors 9 -blow-ups 9 -deforms 9 -stuffington 9 -thorley 9 -semi-trucks 9 -kalme 9 -spinosa 9 -scintillans 9 -merfest 9 -sledge-hammer 9 -proscribe 9 -refashion 9 -insanitary 9 -nostradamus 9 -shubatt 9 -sii 9 -sil 9 -beame 9 -margerison 9 -belladonna 9 -five-bedrooms 9 -tetter 9 -deluke 9 -penkridge 9 -200ad 9 -patas 9 -barques 9 -odilo 9 -five-kilometer 9 -11inches 9 -reposed 9 -shakour 9 -bredon 9 -serio 9 -non-contract 9 -pouillon 9 -shopaholics 9 -maules 9 -chacin 9 -stanier 9 -kaputar 9 -biosensors 9 -yoshito 9 -impoverishing 9 -seiches 9 -gidaszewski 9 -skellington 9 -bellissimo 9 -grafham 9 -belga 9 -unpolluted 9 -montreal-born 9 -border-gavaskar 9 -16-foot-long 9 -varadkar 9 -smidlein 9 -toadstools 9 -careercast 9 -curda 9 -airvr 9 -then-arkansas 9 -tobermory 9 -vidigal 9 -traig 9 -contently 9 -sambar 9 -gasconade 9 -kinematics 9 -chesnoff 9 -burberry.com 9 -popadiuk 9 -mayi 9 -aidiniantz 9 -delaware-based 9 -tv-14 9 -re-applying 9 -customiser 9 -neera 9 -guglielmucci 9 -retitled 9 -hydroid 9 -chads 9 -ocarina 9 -hegar 9 -ski-out 9 -sligar 9 -umaga 9 -alaniz 9 -bryers 9 -satyavathi 9 -ulxs 9 -naturopathy 9 -mpowering 9 -piques 9 -titter 9 -devillard 9 -may-britt 9 -bhupendra 9 -yanshi 9 -shindand 9 -sarcen 9 -cr-6 9 -douthwaite 9 -ivanna 9 -21f 9 -2-week-old 9 -hiddenite 9 -protectees 9 -apple-shaped 9 -scavenges 9 -carrington-windo 9 -emmins 9 -tangail 9 -ving 9 -tetroxide 9 -pu'iwa 9 -gendelman 9 -hoper 9 -at-fault 9 -oshaniwa 9 -katumbi 9 -bybrook 9 -palestinian-controlled 9 -paytouch 9 -dissanayake 9 -cardsharps 9 -ferras 9 -kochevar 9 -plantation-style 9 -internet-related 9 -exchange-traded 9 -great-great-great-grandfather 9 -al-mulathameen 9 -rampy 9 -shredders 9 -kellys 9 -pentillie 9 -melman 9 -niklaus 9 -plagens 9 -yasur 9 -1,928 9 -oberholzer 9 -wallet-friendly 9 -gyeonggi 9 -21-room 9 -cahir 9 -leveen 9 -michalke 9 -alinga 9 -biorock 9 -scarfed 9 -taverner 9 -mymitt 9 -2,000-year 9 -ex-spin 9 -aids/hiv 9 -hib 9 -630million 9 -blazey 9 -utahraptor 9 -plainville 9 -5,300-year-old 9 -gurl 9 -most-requested 9 -kippes 9 -qasoori 9 -beserk 9 -brusa 9 -alphabetic 9 -przysiezny 9 -hagelof 9 -valandro 9 -1172 9 -1178 9 -goldwire 9 -wimbledons 9 -mbi 9 -litter-picking 9 -marthie 9 -sclerosing 9 -prayerfully 9 -lscb 9 -diannah 9 -carss 9 -23mph 9 -assi 9 -stacksteads 9 -ex-shadow 9 -tarkio 9 -dolpa 9 -horndog 9 -calibers 9 -gdańsk 9 -gaumont 9 -allagui 9 -shiao 9 -copepods 9 -yu-hwan 9 -pasty-faced 9 -heathrow-bound 9 -croman 9 -symprove 9 -erythroderma 9 -cardon 9 -red-footed 9 -drumlines 9 -jinju 9 -garlicky 9 -khaw 9 -talybont 9 -sunbelt 9 -cyproterone 9 -hennekens 9 -ultra-sharp 9 -expansionary 9 -right-handers 9 -mcculloh 9 -bamfords 9 -ultra-liberal 9 -dostoevsky 9 -washlets 9 -war-themed 9 -boetius 9 -t.c. 9 -jockers 9 -bad-taste 9 -baraan 9 -marae 9 -street-smart 9 -10.22 9 -parkhomenko 9 -greenlighted 9 -jacksonville.com 9 -khonso 9 -run-n-read 9 -figgis 9 -canonise 9 -one-a-day 9 -nsaku 9 -jmyha 9 -faron 9 -naysayer 9 -hesc 9 -86.7 9 -dukane 9 -light-wave 9 -siler-fisher 9 -sweetland 9 -al-yazid 9 -augier 9 -epileptics 9 -kamden 9 -chikitova 9 -goddess-like 9 -roomies 9 -treaded 9 -blocksidge 9 -40-45 9 -soborun 9 -sombat 9 -kayracos 9 -worldvu 9 -esmail 9 -google.org 9 -doğu 9 -ramez 9 -law-breakers 9 -hoije 9 -155mm 9 -pcr 9 -pcl 9 -pcitured 9 -souffles 9 -blache 9 -99,500 9 -foot-in-mouth 9 -buccal 9 -companywide 9 -2,255 9 -942 9 -questing 9 -altom 9 -ventotene 9 -angelico 9 -1636 9 -graphene-based 9 -#otherthingsthepoordontdo 9 -bestfriend 9 -mcribs 9 -chromatography 9 -tianyuan 9 -elleven 9 -broadby 9 -beichuan 9 -bacton 9 -perebeynos 9 -jinmao 9 -lsat 9 -cutrone 9 -long-stemmed 9 -tiahrt 9 -nc32 9 -kesa 9 -overripe 9 -pappa 9 -feticide 9 -j-1 9 -co-leaders 9 -abrham 9 -pujol 9 -rustom 9 -post-olympics 9 -toddled 9 -minks 9 -bulk-buy 9 -7-up 9 -series-winning 9 -libratore 9 -crotchless 9 -2,875 9 -menara 9 -government-ordered 9 -brexit 9 -gayer 9 -handedness 9 -cuitiño 9 -tx4 9 -ravello 9 -tairsters 9 -altruistically 9 -twelve-month 9 -prazuck 9 -moniba 9 -skycargo 9 -300,000-a-year 9 -+31 9 -o'murchu 9 -voice-assistant 9 -freestylers 9 -humanegement 9 -zalkin 9 -2003-05 9 -high-design 9 -sherkin 9 -re-taking 9 -ex-security 9 -huebl 9 -landgraf 9 -ikan 9 -nanosystems 9 -assistentes 9 -12.17 9 -newswoman 9 -walkerestimate 9 -al-anesi 9 -strabo 9 -tarantelli 9 -nade 9 -holotropic 9 -larae 9 -garron 9 -lakehead 9 -dyett 9 -15,000-a-week 9 -memphis-based 9 -statutorily 9 -voici 9 -kitted-out 9 -disney/abc 9 -lojka 9 -muoka 9 -ijomanta 9 -7,414 9 -nickson 9 -gehl 9 -ashrafian 9 -regency-style 9 -immobilising 9 -mcgeough 9 -orthez 9 -@british_airways 9 -teixobactin 9 -ummad 9 -fitzgerald-roberts 9 -132lb 9 -neisloss 9 -viareggio 9 -seligsohn 9 -beurre 9 -horsmonden 9 -child-resistant 9 -karli 9 -enschede 9 -more4 9 -63-stone 9 -poyzer 9 -pocketqube 9 -hakluyt 9 -lamba 9 -semmering 9 -a217 9 -madonia 9 -pizzuto 9 -sofka 9 -79.1 9 -11.07 9 -kumchangri 9 -andratx 9 -cellan-jones 9 -cloverleaf 9 -ventouse 9 -intelligender 9 -hangin 9 -deadlifting 9 -polylactic 9 -hormone-sensitive 9 -crocosaurus 9 -lenaghan 9 -nuff 9 -lowest-priced 9 -eyles 9 -foxen 9 -sung-taek 9 -mchickie 9 -180-page 9 -orrington 9 -5-foot-6-inch 9 -tedstone 9 -limpsfield 9 -qargha 9 -culvahouse 9 -literati 9 -mobile-first 9 -ravensbruck 9 -ribero 9 -120.9 9 -beem 9 -mescal 9 -vargyas 9 -245million 9 -jeepneys 9 -pressies 9 -taskone 9 -messom 9 -snowbusiness 9 -seida 9 -helkern 9 -inclduing 9 -’60 9 -easyhotel 9 -130bn 9 -dollaway 9 -rumaysah 9 -al-jumeii 9 -momoh 9 -sandy-coloured 9 -gordon-reed 9 -bryeanna 9 -azira 9 -treadway 9 -washer-dryer 9 -42-room 9 -rocket-launched 9 -snozzle 9 -braz 9 -front-wheel-drive 9 -131mph 9 -vaprwear 9 -namus 9 -sahiron 9 -skyrider 9 -mahmudian 9 -jaypraykash 9 -nouvelles 9 -cuajimalpa 9 -hassett 9 -four-foot-tall 9 -hatalsky 9 -sulfurous 9 -expounding 9 -soaker 9 -maclin 9 -husni 9 -benoist 9 -krabloonik 9 -central-midfield 9 -kolyma 9 -250/1 9 -opperud 9 -backstories 9 -anti-kidnapping 9 -malkemus 9 -bharucha 9 -batellerie 9 -red-dot 9 -keon 9 -t6 9 -tataouine 9 -brio 9 -hekmatullah 9 -greenprint 9 -sowter 9 -lakeem 9 -riggall 9 -8.53 9 -tente 9 -sikkenga 9 -vaculik 9 -adado 9 -grand-slam 9 -346,000 9 -puggles 9 -fresnillo 9 -plasmasphere 9 -instils 9 -kenber 9 -eulogizing 9 -akiva 9 -cirp 9 -173million 9 -underbutt 9 -thermochromatic 9 -1,096 9 -booby-trapping 9 -kovaleski 9 -buzkashi 9 -methylone 9 -fckh8.com 9 -avdiivka 9 -mezzeh 9 -karvan 9 -snopes 9 -rees-smith 9 -i-bell 9 -hotel.info 9 -mohamadou 9 -kiannah 9 -tadd 9 -famine-stricken 9 -iván 9 -singeing 9 -1861-1865 9 -agreeably 9 -sigarang 9 -herrewege 9 -56,500 9 -shahroudi 9 -re-plant 9 -claitt 9 -orgill 9 -apotropaic 9 -helmuth 9 -tuisovurua 9 -untill 9 -hirshhorn 9 -russia-born 9 -momentos 9 -stojkova 9 -megyer 9 -anti-mine 9 -russe 9 -gerevich 9 -700ml 9 -f9r 9 -curham 9 -jostein 9 -12-day-old 9 -gravina 9 -brennan-jobs 9 -d'artagnan 9 -farndale 9 -housebuyers 9 -bridge-building 9 -duncraft 9 -atlatl 9 -yelich 9 -martialed 9 -sestriere 9 -maceachen 9 -okoli 9 -kazbrella 9 -0.21 9 -90percent 9 -beak-like 9 -546,000 9 -xinxiang 9 -23bn 9 -iae 9 -checo 9 -maremma 9 -beauly 9 -demy 9 -ecosphere 9 -vedanta 9 -16th-floor 9 -checkley 9 -janelly 9 -state-organised 9 -beetlecam 9 -cross-dress 9 -emond 9 -dimethylpolysiloxane 9 -4.44 9 -l'amitie 9 -most-trusted 9 -vestor 9 -terrorises 9 -zambra 9 -funtown 9 -oceanico 9 -rosuvastatin 9 -broni-mensah 9 -10million-rated 9 -gamechanger 9 -baalak 9 -acanthamoeba 9 -10-20-life 9 -veiny 9 -tyrosine 9 -mowden 9 -chagaeva 9 -blakeburn 9 -barcyzk 9 -147lbs 9 -al-sakhour 9 -yellowy 9 -matheney 9 -results-oriented 9 -defunds 9 -stinkiest 9 -graphics-intensive 9 -petrozavodsk 9 -praver 9 -all-in-ones 9 -telexfree 9 -74-seat 9 -ianson-hughes 9 -oliver-christie 9 -sansbury 9 -rittman 9 -ymu 9 -latticework 9 -auman 9 -clade 9 -croule 9 -ktn 9 -mindfully 9 -uniao 9 -delegitimizing 9 -non-meat 9 -loverboy 9 -emmylou 9 -rianda 9 -extra-legal 9 -mauriello 9 -jérémy 9 -jail-house 9 -philadelphians 9 -2,632 9 -fradley 9 -kolleh 9 -carentan 9 -moffit 9 -bauchum 9 -batumi 9 -history-maker 9 -tidies 9 -babysits 9 -over-hanging 9 -storrier 9 -koklas 9 -zannini 9 -blakes 9 -erra 9 -mili 9 -bazan 9 -supportable 9 -gayton 9 -rancagua 9 -safetyculture 9 -industrie 9 -pa8 9 -combativeness 9 -raptorex 9 -600billion 9 -tiao 9 -1500bc 9 -autoportrait 9 -5tt 9 -krolick 9 -woodhaven 9 -parade.com 9 -hypersexualized 9 -marsh-smith 9 -meachen 9 -pro-wilson 9 -revolutionmuslim.com 9 -manky 9 -25-i 9 -waihi 9 -christukat 9 -0630 9 -vrs 9 -lightowler 9 -shabina 9 -derong 9 -infantil 9 -bibimbap 9 -dzanga 9 -lepera 9 -staines-upon-thames 9 -mulberries 9 -stackable 9 -waghmare 9 -jaecks 9 -deviantart 9 -sissies 9 -whinged 9 -shamyla 9 -differentially 9 -radhwaniya 9 -harz 9 -reordering 9 -bohanan 9 -bampatzis 9 -locally-based 9 -magdelene 9 -bogopane-zulu 9 -hà 9 -snipp3t 9 -bufferin 9 -baghadadi 9 -anamorphic 9 -movie-style 9 -lansal 9 -person-of-interest 9 -nacita 9 -rockey 9 -dispossesses 9 -1,807 9 -chiari 9 -haraguchi 9 -kainuu 9 -kaster 9 -tooth-like 9 -pain-relief 9 -80-piece 9 -tevatron 9 -esham 9 -prahova 9 -kaulard 9 -zog 9 -vender 9 -yevgeniya 9 -nevile 9 -complementarity 9 -apartments.com 9 -bauge 9 -shindigs 9 -brightling 9 -laojun 9 -wide-receiver 9 -srinigar 9 -0-10 9 -mgarr 9 -advertisment 9 -aberrational 9 -suhayb 9 -1,488 9 -shatford 9 -abrogating 9 -rbk 9 -2k14 9 -hefce 9 -ludeman 9 -ingen 9 -drancy 9 -gammack 9 -heart-print 9 -rucci 9 -dannaer 9 -kuru 9 -heyuan 9 -abdulkader 9 -574ft 9 -harvinder 9 -hospital-based 9 -7:36 9 -7:37 9 -portzamparc 9 -tegwyn 9 -kloza 9 -burakov 9 -two-days-old 9 -lully 9 -herbals 9 -tregear 9 -88c 9 -papamichael 9 -mechelle 9 -coprolite 9 -tryscorer 9 -pepeng 9 -laurelton 9 -milstone 9 -glaoui 9 -presaging 9 -dc-area 9 -heart-disease 9 -iran-140 9 -reevaluating 9 -glacéau 9 -oracles 9 -agro 9 -reasbeck 9 -reselected 9 -bad-guy 9 -sarigerme 9 -hawkings 9 -189,931 9 -nudy 9 -organophosphates 9 -turnill 9 -super-expensive 9 -smoking-cessation 9 -ocularist 9 -leg-lengthening 9 -impressive-looking 9 -lauberhorn 9 -shahrastani 9 -supergirl 9 -shermans 9 -nymphaea 9 -ipomoea 9 -burberry-esque 9 -frontages 9 -borgo 9 -chocs 9 -limone 9 -bso 9 -ezz 9 -tuffty 9 -27r 9 -youth-boosting 9 -1109 9 -laya 9 -perissia 9 -keckley 9 -su-24 9 -prata 9 -mayaguez 9 -rhyan 9 -outswinging 9 -delicious-looking 9 -pontyclun 9 -brain-injured 9 -over-sensitivity 9 -14,300 9 -riffit 9 -shortcrust 9 -heyland 9 -boase 9 -deedie 9 -malindo 9 -menendez-kirk 9 -u.s.-egyptian 9 -pengu 9 -jumale 9 -wkyc.com 9 -copy-paste 9 -ventriloquism 9 -horihan 9 -mowafi 9 -leblond 9 -wud 9 -canonizing 9 -pergolas 9 -steffani 9 -2,356 9 -lithuanian-born 9 -kimmeridge 9 -vansyckel 9 -wondolowski 9 -lornie 9 -obayomi 9 -ultra-marathons 9 -bahá 9 -bakley 9 -silver-leaf 9 -soneira 9 -diros 9 -dehaene 9 -24mins 9 -chacma 9 -cheryll 9 -treatment-resistant 9 -brera 9 -boots-on-the-ground 9 -ballester 9 -unclip 9 -quintupled 9 -nachmanoff 9 -povman 9 -149million 9 -voth 9 -obilale 9 -decison 9 -chiauzzi 9 -brok 9 -mikail 9 -aiai 9 -ndahimana 9 -siyabonga 9 -ewhurst 9 -46-years 9 -donizete 9 -synthes 9 -dubinka 9 -3,000-plus 9 -pin-sharp 9 -krekar 9 -282ft 9 -octomum 9 -@garylineker 9 -constantinos 9 -durrett 9 -kabanga 9 -houseboys 9 -chrysi 9 -luciferase 9 -sledi 9 -orpheum 9 -stantiall 9 -gingko 9 -newnan 9 -kramar 9 -mh20 9 -kapadokya 9 -nikitina 9 -kempter 9 -bops 9 -high-fidelity 9 -spreiser 9 -azmina 9 -hyperspectral 9 -moneyman 9 -ginner 9 -troupers 9 -gillooley 9 -micro-electronics 9 -butterwick 9 -matsushita 9 -daggett 9 -hannemann 9 -sclafani 9 -chinon 9 -erzgebirge 9 -paan 9 -inquisitions 9 -mawuli 9 -norwin 9 -wigand 9 -ausbrooks 9 -pre-2008 9 -kevon 9 -charlbury 9 -tradable 9 -bargary 9 -arenberg 9 -yücel 9 -commodification 9 -keach 9 -acnf 9 -kununurra 9 -goundry 9 -chiluba 9 -zephaniah 9 -eastop 9 -gelin 9 -kohlberg 9 -tworogal 9 -sohrab 9 -kasanka 9 -toloman 9 -kircher 9 -horvitz 9 -ateronon 9 -catsharks 9 -excreta 9 -friedmans 9 -narcoleptic 9 -10th-grader 9 -hennel 9 -henslee 9 -mclin 9 -brobson 9 -1,969 9 -75.8 9 -75.3 9 -cipicchio 9 -animal-like 9 -girma 9 -goldsborough 9 -locicero 9 -opua 9 -hautzenroeder 9 -opuz 9 -1,658 9 -1,652 9 -deep-frying 9 -tequilas 9 -piana 9 -wnbc-tv 9 -helmet-clad 9 -whch 9 -rambagh 9 -overachievers 9 -al-zahar 9 -corda 9 -sangita 9 -calleja 9 -auto-parts 9 -blu-tack 9 -oslo-based 9 -leanspa 9 -kippax 9 -acholi 9 -debris-removal 9 -sonshine 9 -bedukadze 9 -4.60 9 -4.68 9 -jannetta 9 -papin 9 -dawei 9 -reculver 9 -di-natale 9 -canarias 9 -swashbuckler 9 -over-friendly 9 -housemartins 9 -chattered 9 -2-ton 9 -sterlin 9 -pro-cochran 9 -barasch 9 -tepljakova 9 -al-askariya 9 -mediacom 9 -cc100 9 -20ft-high 9 -fattiest 9 -tip577 9 -kofta 9 -camelid 9 -s9 9 -88lbs 9 -2002-2006 9 -recapping 9 -dougray 9 -buyukada 9 -pessoi 9 -aurele 9 -maione-schwind 9 -shin-kicking 9 -hoecke 9 -recently-completed 9 -oestradiol 9 -mossa 9 -108billion 9 -hattingh 9 -otiose 9 -muscogee 9 -benjamins 9 -ciudadano 9 -ealier 9 -tollesbury 9 -grouting 9 -starobesheve 9 -fairy-like 9 -h.b. 9 -ratepayer 9 -stael 9 -pretom 9 -lader 9 -widner 9 -38.9 9 -amazonfresh 9 -drukov 9 -deitsch 9 -gaetan 9 -sniper-style 9 -hodeidah 9 -dillards 9 -tawel 9 -lecterns 9 -hyles 9 -tekkar 9 -tapachula 9 -parisiens 9 -janny 9 -schinault 9 -batmanghelidjh 9 -fuggle 9 -replicable 9 -carefully-planned 9 -188million 9 -battlecry 9 -surmising 9 -ashlin 9 -thewrap 9 -kamba 9 -urwiler 9 -hamoumi 9 -brener 9 -illy 9 -illl 9 -mezals 9 -cospas-sarsat 9 -angoua 9 -freeza 9 -korotchenko 9 -re-thinking 9 -iammatteo 9 -nieboy 9 -whyld 9 -colaio 9 -cbs12.com 9 -capanne 9 -office-holders 9 -al-hashmalud 9 -minc 9 -lomban 9 -monajed 9 -stage-four 9 -malott 9 -matriculation 9 -brayben 9 -cristianinho 9 -boneco 9 -268.50 9 -trykush 9 -toche 9 -mini-14 9 -torch-bearer 9 -eskew-shahan 9 -79-a-year 9 -love-sick 9 -maturely 9 -escalations 9 -story-driven 9 -neoconservatives 9 -fann 9 -owhin 9 -1675 9 -jukin 9 -lakeisha 9 -kiai 9 -open-mic 9 -laubscher 9 -kleinbard 9 -fighter-jet 9 -xochitl 9 -malkmus 9 -part-ownership 9 -dilhorne 9 -delaval 9 -nosiru 9 -usuga 9 -grail-b 9 -earth-observing 9 -calfornia 9 -liwu 9 -fecklessness 9 -gess 9 -family-of-three 9 -sadeeq 9 -club-goer 9 -lockscreen 9 -divorcé 9 -thatcherites 9 -lamysa 9 -kusnet 9 -falstaff 9 -ghostlyrich 9 -nalwa 9 -tzemach 9 -firewriter 9 -ex-judge 9 -curcus 9 -nazzaro 9 -montador 9 -asds 9 -persyn 9 -tremolite 9 -rubinsohn 9 -blaha 9 -wahidi 9 -bdc 9 -lebretton 9 -o.c 9 -1,829 9 -gordon-lennox 9 -naidu 9 -hotchin 9 -misjudges 9 -nefyn 9 -nicia 9 -accs 9 -viñals 9 -colen 9 -fdac 9 -wasp-18b 9 -shantia 9 -metal-framed 9 -nidd 9 -12.53 9 -12.52 9 -12.56 9 -dissociated 9 -souid 9 -predynastic 9 -stryper 9 -26kg 9 -borys 9 -sino-american 9 -lares 9 -polynice 9 -atascadero 9 -polomski 9 -iju-ishaga 9 -5:06 9 -swensen 9 -kerfoot 9 -maxwell-cameron 9 -non-tea 9 -ncacs 9 -trostre 9 -kakitani 9 -street-corner 9 -#inaug2013 9 -prorogation 9 -zhilei 9 -29g 9 -mushing 9 -korolev 9 -ex-los 9 -kanebo 9 -b-day 9 -7:11 9 -graddick 9 -#inlove 9 -bioscapes 9 -r.c. 9 -34-28 9 -bisignani 9 -yet-to-be-determined 9 -choppin 9 -bassis 9 -najim 9 -microbrews 9 -tincture 9 -12inch 9 -seemans 9 -r-tn 9 -67mins 9 -disea 9 -82.2 9 -hogevoll 9 -ronkonkoma 9 -hanyang 9 -free-swimming 9 -w-word 9 -outplacement 9 -otb 9 -roundness 9 -stadden 9 -barki 9 -@evleaks 9 -ostbye 9 -warded 9 -ethelred 9 -dic 9 -dit 9 -a285 9 -pronotto 9 -tramontana 9 -badush 9 -seven-foot-long 9 -digregorio 9 -andriukaitis 9 -el-damaty 9 -crellin 9 -lapinski 9 -batalona 9 -carmon 9 -voorman 9 -bastl 9 -non-delegable 9 -saah 9 -khol 9 -cup-related 9 -modray 9 -a50s 9 -water-saving 9 -multituberculates 9 -sportske 9 -recently-elected 9 -barraza 9 -prognostications 9 -fbi-style 9 -refaat 9 -treasure-trove 9 -mellitah 9 -cribbed 9 -re-imagines 9 -makeups 9 -mackeown 9 -godsick 9 -underwires 9 -allegedy 9 -luxon 9 -ssh 9 -owlman 9 -phalarope 9 -harwin 9 -hate-speech 9 -al-yarmouk 9 -38cm 9 -xiangyang 9 -marie-anne 9 -cringey 9 -werneth 9 -bloors 9 -etal 9 -gate-to-gate 9 -maliah 9 -bartons 9 -rapo 9 -protopopov 9 -9million-a-year 9 -fire-starter 9 -ilunga 9 -sitges 9 -lakshman 9 -inter-personal 9 -calipers 9 -i-65 9 -takla 9 -5.11 9 -ptarmigan 9 -mass-shooting 9 -tatalena 9 -austin-bruce 9 -efdd 9 -slimzene 9 -delist 9 -troponin 9 -nyfd 9 -thsi 9 -child-custody 9 -braggarts 9 -krizsan 9 -tiglao 9 -puls 9 -anti-malarials 9 -deya 9 -rawabi 9 -languedoc-roussillon 9 -o'balle 9 -u14s 9 -cct 9 -blue-blood 9 -habachy 9 -@thierryhenry 9 -tansley 9 -vendôme 9 -wildcatz 9 -sollenberger 9 -part-owners 9 -pre-evacuation 9 -unbuttoning 9 -koenigsberg 9 -seroxat 9 -frio 9 -berkeleyside 9 -frisland 9 -romuald 9 -multi-decade 9 -al-gharafa 9 -rushforth 9 -zebra-print 9 -targu-jiu 9 -dacked 9 -cd-roms 9 -light-year 9 -saudi-registered 9 -romiley 9 -deonna 9 -freja 9 -posterous 9 -kerrigans 9 -briars 9 -somersett 9 -lefay 9 -epaulets 9 -wilcynski 9 -nosee 9 -pehlivan 9 -rain-slicked 9 -lalueza-fox 9 -freemantlemedia 9 -michiana 9 -animal-mad 9 -16,600 9 -rynek 9 -tailboys 9 -ill-served 9 -bregier 9 -tarbert 9 -antionio 9 -gözde 9 -tight-fitted 9 -uru 9 -satsumas 9 -lannen 9 -bakalar 9 -bhugra 9 -gainza 9 -burnton 9 -sillito 9 -crystalised 9 -biddinger 9 -wsyr 9 -lebatard 9 -desmonde 9 -exfoliates 9 -pockett 9 -cardale 9 -maclean-price 9 -hunchbacked 9 -stewarton 9 -thijs 9 -ruggedness 9 -equivocate 9 -78.3 9 -appian 9 -barberini 9 -six-passenger 9 -makrani 9 -pulsifer 9 -oven-ready 9 -118ft 9 -9/7 9 -trophy-less 9 -upton-upon-severn 9 -e-safety 9 -soaries 9 -2.91 9 -below-ground 9 -davidge 9 -rtbf 9 -demoralise 9 -kneeboarding 9 -wave3 9 -stiffler 9 -moslems 9 -brazil-bound 9 -rfrp3 9 -cantoria 9 -hildburghausen 9 -illegally-parked 9 -montia 9 -salbutamol 9 -avasthi 9 -noooo 9 -liangming 9 -nipton 9 -labyad 9 -avalynn 9 -marthe 9 -fean 9 -roofie 9 -proskauer 9 -price-matching 9 -necromancer 9 -pen-pushers 9 -purse-strings 9 -berowra 9 -oafish 9 -native-americans 9 -2000-2003 9 -keycard 9 -oneonta 9 -garissa 9 -turner-mitchell 9 -bowlin 9 -feversham 9 -unconsecrated 9 -nzekwu 9 -savuti 9 -ronettes 9 -peche 9 -wilee 9 -grado 9 -keehi 9 -conkling 9 -schmidli 9 -mealy 9 -s.b. 9 -coniglios 9 -sølve 9 -31-years-old 9 -jornot 9 -enaut 9 -gergel 9 -ferrandino 9 -catsuits 9 -stoaked 9 -akg 9 -lipglosses 9 -scrummage 9 -cordice 9 -hogsett 9 -maeder 9 -bb&t 9 -trehan 9 -groeninger 9 -janway 9 -wenches 9 -hogshooter 9 -d'ambrogio 9 -4,178 9 -eduards 9 -pre-primary 9 -gamester 9 -goodhew 9 -bunts 9 -road-users 9 -aberrations 9 -vergence 9 -conflict-resolution 9 -put-in 9 -geppetti 9 -369th 9 -heavy-caliber 9 -re-board 9 -opening-weekend 9 -lightle 9 -stickney 9 -care.com 9 -hadoke 9 -64.8 9 -jins 9 -unpleasantries 9 -200metres 9 -fillingim 9 -seaters 9 -kotsiopoulos 9 -thomas-jones 9 -romm 9 -poton 9 -yasuní 9 -cal-maine 9 -pitons 9 -92m 9 -cakert 9 -bald-faced 9 -shanwick 9 -wasiuta 9 -ganj 9 -howgate 9 -singson 9 -1695 9 -seco 9 -bleeth 9 -mankinis 9 -woops 9 -legrande 9 -vna 9 -latika 9 -mickey-taking 9 -satoko 9 -3-4-2-1 9 -98-foot 9 -itasca 9 -pratfalls 9 -one-and-a-half-minute 9 -strongbody 9 -110-88 9 -57-year 9 -aizaz 9 -prescriber 9 -coif 9 -nul 9 -ex-bayern 9 -rituximab 9 -adhyan 9 -masci 9 -boldersons 9 -voinova 9 -karlos 9 -marijuana-like 9 -tight-five 9 -heartkids 9 -runkle 9 -farkhanda 9 -misbranding 9 -5:31 9 -517,000 9 -metalworkers 9 -grich 9 -fausnaught 9 -90.0 9 -co-champions 9 -sophisticate 9 -billutifuls 9 -6-pack 9 -prldef 9 -unconventionally 9 -foges 9 -floodline 9 -pimiento 9 -klimeck 9 -two-pill 9 -well-proportioned 9 -60th-minute 9 -unaccountably 9 -600k 9 -penalty-taking 9 -wjac 9 -fenwicks 9 -kaiof 9 -takanashi 9 -ressa 9 -sundell 9 -moorfield 9 -shishi 9 -displaymate 9 -treatises 9 -217million 9 -meli 9 -hfsg 9 -gimmickry 9 -sanki 9 -hossaini 9 -gavaris 9 -26in 9 -valetta 9 -high-humidity 9 -dills 9 -kohstall 9 -head-shaved 9 -nadeshiko 9 -un-christian 9 -scapa 9 -beechcroft 9 -cometti 9 -fitch-holland 9 -non-overseas 9 -tickers 9 -inflammations 9 -mandem 9 -stech 9 -st.petersburg 9 -hamerman 9 -five-foot-long 9 -water-treating 9 -592,000 9 -leakages 9 -pittsburgh-based 9 -1386 9 -tredworth 9 -co-funded 9 -tollin 9 -58-foot 9 -33.75 9 -ffs 9 -unfastened 9 -pazos 9 -9mins 9 -mikewicz 9 -naoya 9 -smrc 9 -idlewild 9 -sovann 9 -12-seat 9 -non-reclining 9 -saddlebags 9 -0/2 9 -felaco 9 -pierrepont 9 -eotech 9 -intertidal 9 -unsuitability 9 -unsocial 9 -homestyle 9 -corp.-owned 9 -serfaty 9 -wound-up 9 -re-deployed 9 -pinakothek 9 -nikkel 9 -closed-loop 9 -izet 9 -setia 9 -by-the-numbers 9 -traxel 9 -top-of-the-scale 9 -macenzie 9 -gravelle 9 -drinan 9 -minutes-long 9 -mauceri 9 -doorkeeper 9 -madanir 9 -scandalising 9 -presbyopia 9 -margerrison 9 -opthalmic 9 -webzine 9 -three-season 9 -besmirching 9 -wythall 9 -tamkin 9 -adelman 9 -abhinav 9 -barngrover 9 -canada-france-hawaii 9 -northanger 9 -tof 9 -bsp 9 -beaufighter 9 -50-tonne 9 -guidepost 9 -ishaaq 9 -round-eared 9 -everbody 9 -two-year-deal 9 -soon-to-be-ex-wife 9 -county-usc 9 -3:52 9 -400-600 9 -damage-limitation 9 -fuchigami 9 -orie 9 -39-year-olds 9 -leisurewear 9 -lucado 9 -billets 9 -eljanabi 9 -resevoir 9 -egotistic 9 -uhatafe 9 -italian-speaking 9 -pastafarianism 9 -debridement 9 -kittiwakes 9 -juvinai 9 -lieuwe 9 -servier 9 -dnepr 9 -rias 9 -derden 9 -fryderyk 9 -starzacher 9 -xinjian 9 -modoc 9 -lucila 9 -buncrana 9 -32.72 9 -ngati 9 -caffell 9 -tourmaline 9 -quebracho 9 -armstong 9 -feifei 9 -elswhere 9 -zawr 9 -gerdau 9 -queso 9 -brown-tailed 9 -bauder 9 -transgenderism 9 -54-foot 9 -bbwaa 9 -yunshan 9 -ambassadorships 9 -brakeman 9 -scottsville 9 -pterygium 9 -second-steppers 9 -bornholm 9 -seidl 9 -9:28 9 -wing-shaped 9 -shoppable 9 -wgme 9 -carports 9 -klavko 9 -30ins 9 -xcat 9 -carbon-intensive 9 -anaesthesiology 9 -air-brushed 9 -wifi-only 9 -tradebook 9 -99lb 9 -poison-tipped 9 -sanfrecce 9 -park-style 9 -night-club 9 -u-166 9 -canaccord 9 -gustavia 9 -agah 9 -mouritz 9 -backon 9 -3dtouch 9 -zentai 9 -zayani 9 -1960s-era 9 -ahamed 9 -northsea 9 -magnacca 9 -placks 9 -shakily 9 -obertilliach 9 -vicitms 9 -moocs 9 -size-zero 9 -kimbo 9 -d'urville 9 --3.5 9 -edgecliff 9 -biopsied 9 -shinola 9 -namaika 9 -ajvatovica 9 -centaurs 9 -1-point 9 -roncaglia 9 -vithanage 9 -kohavi 9 -lasane 9 -montrell 9 -unselfishness 9 -casta 9 -sky-dive 9 -tagines 9 -nkoulou 9 -0.82 9 -0.89 9 -hackbridge 9 -lingchao 9 -mothra 9 -wnd.com 9 -freeze-drying 9 -hatchbacks 9 -electress 9 -overstock.com 9 -hydromash 9 -mcmakin 9 -era-defining 9 -ryanne 9 -zengrab 9 -buffoonish 9 -hoever 9 -beady-eyed 9 -burren 9 -iol 9 -throw-back 9 -1,610 9 -1,613 9 -98.8 9 -saunton 9 -hapeman 9 -egersdorf 9 -brain-like 9 -hasbrouck 9 -saffran 9 -danne 9 -zerzan 9 -coquerel 9 -litos 9 -briggs-bennett 9 -skinvision 9 -pavlopoulos 9 -imperialistic 9 -bracco 9 -raqefet 9 -pagliaro 9 -afsor 9 -ultra-expensive 9 -2,430 9 -trimet 9 -aspros 9 -auburn-haired 9 -11/8 9 -9:27 9 -demin 9 -grandy 9 -grandi 9 -ferroelectret 9 -muyshondt 9 -furgeri 9 -146mph 9 -efremov 9 -cabannes 9 -want-away 9 -71p 9 -al-mussawi 9 -ysabelle 9 -swinth 9 -outpour 9 -qashqavi 9 -wiltshire-based 9 -menager 9 -espinho 9 -shakya 9 -cutlet 9 -torpey 9 -kalmar 8 -virtuosos 8 -bhubaneshwar 8 -torrico 8 -mickle 8 -@cnntravel 8 -49,999 8 -communicants 8 -olowu 8 -iclarified 8 -stitchers 8 -shifren 8 -homerun 8 -kacerek 8 -pennridge 8 -irremediable 8 -eilers 8 -jospin 8 -ularamu 8 -lyrids 8 -853,000 8 -ganda 8 -hast 8 -automata 8 -pentagons 8 -vitruvius 8 -scoop6 8 -most-played 8 -cixi 8 -eight-grade 8 -cloonan 8 -unige 8 -slap-bang 8 -super-spy 8 -gilbreath 8 -off-center 8 -lumen 8 -slains 8 -s-curve 8 -trawlerman 8 -low-mercury 8 -exacta 8 -rotton 8 -yak-42 8 -lambert-westcott 8 -jostedalsbreen 8 -sixt 8 -haleiwa 8 -extoll 8 -2002/3 8 -tamadot 8 -intonations 8 -paiste 8 -emara 8 -metalheads 8 -metrix 8 -whiz-kid 8 -11mins 8 -henhouses 8 -izecson 8 -zaggora 8 -multi-instrument 8 -esmene 8 -660m 8 -ijspeert 8 -rotbart 8 -father-and-daughter 8 -amath 8 -unpingco 8 -1,313 8 -idsa 8 -7online 8 -canino 8 -mist-covered 8 -oswin 8 -maerdy 8 -ten-times 8 -sinex 8 -monknash 8 -8.2-magnitude 8 -omx 8 -lugubrious 8 -moongoyle 8 -smelted 8 -borriol 8 -carroza 8 -regularities 8 -dazmann 8 -tma-22 8 -barnburgh 8 -hershbergers 8 -utopians 8 -ultra-radical 8 -8-q400 8 -touch-friendly 8 -rawest 8 -louvel 8 -out-earn 8 -weifang 8 -electro-optical 8 -lfctv 8 -25-ton 8 -s.p. 8 -intelligence-driven 8 -giphoscope 8 -bangalore-based 8 -mcnichol 8 -hamsa 8 -safed 8 -convolutional 8 -80bn 8 -machiya 8 -mamunur 8 -tomass 8 -1/1/70 8 -1/1/76 8 -1/1/75 8 -m-implants 8 -munda 8 -knife-like 8 -u.s.open 8 -shoreham-wading 8 -rushey 8 -shyster 8 -jamaluddin 8 -345million 8 -nw3 8 -nw. 8 -xvii 8 -terror-group 8 -adult-league 8 -42lbs 8 -malingering 8 -denktas 8 -cross-bar 8 -mul 8 -re-energizing 8 -pharand 8 -ande 8 -stittleburg 8 -worthalter 8 -pacaya 8 -braeken 8 -900-mile 8 -biggest-spending 8 -funabashi 8 -egyptian-style 8 -evening-wear 8 -sarhan 8 -minette 8 -enon 8 -12345 8 -salomons 8 -salomone 8 -institutionalizes 8 -mordant 8 -shirl 8 -#buckwild 8 -74.2 8 -74.8 8 -water-cooled 8 -stateâ 8 -hunterian 8 -eyepieces 8 -celusta 8 -cyberpunk 8 -tiernon 8 -brumbacks 8 -tuyen 8 -drug-ridden 8 -colverson 8 -granulomatosis 8 -pony-tail 8 -napcan 8 -antibiotic-free 8 -laboratory-grown 8 -canakkale 8 -chafets 8 -nación 8 -e-business 8 -skilliter 8 -burkman 8 -muench 8 -shallis 8 -edell 8 -shjon 8 -cabau 8 -recently-purchased 8 -discriminations 8 -cultic 8 -gooooooaaaaalllll 8 -karakontie 8 -4:01 8 -zurek 8 -mccourts 8 -fortismere 8 -howatson 8 -lifeproof 8 -schuester 8 -90mm 8 -blue-tinged 8 -teyo 8 -dellaverson 8 -cordia 8 -nagore 8 -halse 8 -run-chase 8 -druk 8 -kaminer 8 -peloe 8 -hovaghimian 8 -xer 8 -halahlah 8 -ahlberg 8 -1366 8 -1363 8 -1360 8 -rehou 8 -eggermont 8 -enchants 8 -anthracobunidae 8 -leavening 8 -politically-sensitive 8 -al-marghani 8 -noosaville 8 -portuguesa 8 -dunkerque 8 -bottle-throwing 8 -bidlack 8 -deibert 8 -65km 8 -jeong-eun 8 -berbera 8 -39th-minute 8 -#teamlh 8 -crêpes 8 -zaldy 8 -hyperglycemia 8 -643,000 8 -bohuslän 8 -henegar 8 -11.85 8 -roro 8 -caleta 8 -dannenfelser 8 -ziegert 8 -virginities 8 -cutka 8 -cossetted 8 -chalom 8 -wehle 8 -colmar 8 -frantisek 8 -festival-style 8 -ecf 8 -ecs 8 -virga 8 -lewisham-born 8 -34mph 8 -sieradzka 8 -whelton 8 -elsdon 8 -kova 8 -houbara 8 -apple-centric 8 -unha 8 -agüero 8 -bjoernland 8 -ellerby 8 -lutsk 8 -just-in-time 8 -caid 8 -caterwauling 8 -sofrep 8 -jurys 8 -tomane 8 -nfff 8 -beanes 8 -joad 8 -compain 8 -xiaogan 8 -euharlee 8 -indian-made 8 -0330 8 -espuelas 8 -mauchly 8 -workbenches 8 -self-correcting 8 -okhlobystin 8 -debases 8 -womb-like 8 -overman 8 -dennard 8 -devgru 8 -woollongong 8 -have-a-go-hero 8 -inari 8 -silicon-based 8 -ovodov 8 -lomé 8 -dunnaway 8 -xuanxu 8 -kayelyn 8 -peploe 8 -hope-england 8 -catkins 8 -turnipseed 8 -live4liverpool 8 -fathers-to-be 8 -daswani 8 -sterns 8 -macknik 8 -a350wxb 8 -foxed 8 -ratt 8 -verrico 8 -schave 8 -sarler 8 -urdu-speaking 8 -cytokine 8 -pvel 8 -fiaz 8 -zullo 8 -schwendels 8 -abstinence-based 8 -acing 8 -silent-film 8 -lab-created 8 -craigieburn 8 -mekka 8 -kong-born 8 -cleus 8 -rigi 8 -qz 8 -14,900 8 -non-contiguous 8 -laborfest 8 -croydon-based 8 -candolim 8 -usbs 8 -luminoso 8 -xtc 8 -pyrotechnical 8 -farecompare.com 8 -cyberdyne 8 -geter 8 -volturno 8 -episurveyor 8 -vorhaus 8 -correze 8 -lyulchak 8 -140lb 8 -oezdemir 8 -madnodje 8 -mizulina 8 -al-awsat 8 -kalpana 8 -nerrek 8 -'91 8 -ifield 8 -corail 8 -guined 8 -belfair 8 -washouts 8 -myhrvold 8 -diasporans 8 -moonlite 8 -n200 8 -lavely 8 -1401 8 -pedagogical 8 -powertrains 8 -frenk 8 -kireka-whaanga 8 -downard 8 -democrat-dominated 8 -globalizing 8 -merseytravel 8 -ex-hmas 8 -furfest 8 -skilbeck 8 -ª 8 -hasta 8 -nanodots 8 -eirias 8 -belous 8 -belitung 8 -shubb 8 -kratzer 8 -ruderer 8 -marner 8 -non-domiciled 8 -yunjie 8 -lace-ups 8 -huiying 8 -maralhas 8 -forceshoe 8 -harringay 8 -testin 8 -huangyan 8 -cancelations 8 -machaba 8 -as-sidra 8 -kwikchex 8 -139-bed 8 -vanmeter 8 -600-800 8 -jury-rigged 8 -guy-uriel 8 -terran 8 -muxo 8 -three-four 8 -langstaff 8 -wedgies 8 -perkins-stoudermire 8 -dujail 8 -aqwa 8 -fuleco 8 -ramnarine 8 -unidirectional 8 -shih-tzus 8 -argumaniz 8 -jamlah 8 -towneley 8 -vido 8 -vide 8 -vids 8 -akif 8 -miscommunications 8 -shapiros 8 -ndong 8 -450billion 8 -franey 8 -hyper-speed 8 -avenal 8 -tyninghame 8 -photo-shopping 8 -niblock 8 -wellstar 8 -25-piece 8 -nicolosi 8 -15-months 8 -germa 8 -cyndy 8 -spaceship-style 8 -clothianidin 8 -wedekind 8 -mobilology 8 -pukhov 8 -zero-fat 8 -kapow 8 -2020vision 8 -miserable-looking 8 -tekkers 8 -buzios 8 -horrisberger 8 -caipirinhas 8 -foulbrood 8 -abbrev 8 -overscheduled 8 -iic 8 -feebleness 8 -re-assure 8 -scott-falber 8 -kibriah 8 -kepley 8 -myat 8 -powergrid 8 -diduca 8 -corkery 8 -burgoo 8 -made-over 8 -chewits 8 -sex-changing 8 -temkin 8 -trainer-coach 8 -father/daughter 8 -bundled-up 8 -vatan 8 -equivocated 8 -weaklings 8 -zulily 8 -2,454 8 -aa/populus 8 -pop-cultural 8 -carnelian 8 -budkov 8 -guallpa 8 -nli 8 -passangers 8 -victim-impact 8 -be11 8 -pbgc 8 -cornum 8 -pompeu 8 -ikiebe 8 -mushrow 8 -bio-based 8 -kefauver 8 -undersides 8 -khaddam 8 -innerleithen 8 -spartakas 8 -claviere 8 -130.9 8 -cosmographia 8 -phatically 8 -chairlifts 8 -nambiar 8 -non-diabetic 8 -neo-luddite 8 -railena 8 -hajrah 8 -ex-chicago 8 -akian 8 -czarue 8 -@wdjstraw 8 -tailcoats 8 -edenbrow 8 -river-like 8 -wyle 8 -semi-professionally 8 -uncharitable 8 -teichrob 8 -pushpin 8 -immune-suppressing 8 -escutcheon 8 -réunion 8 -odometers 8 -denulder 8 -non-exclusive 8 -sayler 8 -mcculley 8 -35bn 8 -cfos 8 -ramjeet 8 -harkening 8 -nine-deck 8 -011-52/624 8 -rakoczy 8 -muenchow 8 -audetat 8 -half-dollar 8 -,14 8 -overpromising 8 -kandahari 8 -russian-french 8 -liysa 8 -craniectomy 8 -honestjohn.co.uk 8 -disturbers 8 -publicly-available 8 -omi 8 -isopropanol 8 -musaqaleh 8 -pointlessness 8 -chavvy 8 -trevathan 8 -hoggers 8 -heavy-looking 8 -water-treatment 8 -unwatchable 8 -tripindex 8 -chisako 8 -swiss-mediated 8 -chorzow 8 -pruvedenti 8 -non-fans 8 -ak-47-wielding 8 -transfixing 8 -2:17 8 -586,000 8 -100metre 8 -hurston 8 -faren 8 -dehavilland 8 -bashkiria 8 -schlock 8 -reviva 8 -sharjeel 8 -jta 8 -lezley 8 -escitalopram 8 -kiersten 8 -lemann 8 -carders 8 -grab-and-go 8 -race-winner 8 -croc-infested 8 -antiquaries 8 -amarildo 8 -wageuzi 8 -mini-sub 8 -take-ons 8 -zuffi 8 -francophile 8 -mysterious-looking 8 -pettifer 8 -superintelligent 8 -plages 8 -low-turnout 8 -roid 8 -rois 8 -artegon 8 -herpetology 8 -v.v.s. 8 -1991-1994 8 -electrically-charged 8 -gillnet 8 -yavala 8 -akimoto 8 -gengler 8 -#marriageequality 8 -deskins 8 -energy-drink 8 -foredeck 8 -pastafarian 8 -8,530 8 -pre-load 8 -nedal 8 -nedas 8 -eucerin 8 -scarpati 8 -piccoli 8 -russum 8 -standards-based 8 -kook 8 -simpsonville 8 -@realtracymorgan 8 -al-zahra 8 -corse 8 -aquarists 8 -levos 8 -upendo 8 -tibenham 8 -sacan 8 -lafemina 8 -daphna 8 -aloke 8 -sclera 8 -pamberi 8 -tgif 8 -alyza 8 -wgcl-tv 8 -heraldry 8 -hospenthal 8 -datawind 8 -fratoni 8 -4.94 8 -sadomasochist 8 -gnostic 8 -kossuth 8 -1,553 8 -novell 8 -iannitelli 8 -caborn-waterfield 8 -timeform 8 -liversedge 8 -yueng 8 -ryden 8 -woerthersee 8 -suspensory 8 -chugach 8 -suboptimal 8 -kepler-444 8 -ofori 8 -self-ruled 8 -kluber 8 -palce 8 -afreeca 8 -llerenas 8 -apps4africa 8 -non-sterile 8 -no-parking 8 -dahmane 8 -royalcollection.org.uk 8 -league-wide 8 -panteliadis 8 -salsify 8 -flattest 8 -clermont-ferrand 8 -1,152 8 -fuel-saving 8 -nyasa 8 -unacceptability 8 -domotor 8 -streptococci 8 -rutler 8 -micklegate 8 -blabbing 8 -metsaranta 8 -shameen 8 -bomb-like 8 -planet-wide 8 -internalise 8 -malcolm-hutton 8 -ahsoak 8 -khone 8 -tchuto 8 -istat 8 -stiff-person 8 -bigend 8 -rypien 8 -markopoulos 8 -eilah 8 -tiririca 8 -familar 8 -saint-vil 8 -backplate 8 -4:21 8 -4:28 8 -idiot-proof 8 -rock-like 8 -barcenas 8 -five-letter 8 -oleander 8 -maslowskaya 8 -alsh 8 -redshanks 8 -mujwa 8 -quebecers 8 -jovana 8 -ankersen 8 -faruque 8 -tskhadadze 8 -attachable 8 -buckenham 8 -soon-taek 8 -abandi 8 -skyliners 8 -majembeni 8 -alevi 8 -sun-sentinal 8 -contrada 8 -contrade 8 -apoptosis 8 -foxwell 8 -ujjwal 8 -claymation 8 -goudier 8 -groes 8 -teargassed 8 -adjamian 8 -ground-dwelling 8 -under-24s 8 -vallebuona 8 -aeman 8 -14 1/2 8 -unconstitutionality 8 -sorrowfully 8 -stanley-dougherty 8 -yaen-koen 8 -super-light 8 -kurnell 8 -zaripov 8 -money-related 8 -europa-park 8 -abbasid 8 -dine-in 8 -hulley 8 -intralace 8 -amrullah 8 -leafield-based 8 -claudy 8 -freebase 8 -22042 8 -yangshuo 8 -viren 8 -mccombs 8 -taxi-hiring 8 -hemangiomas 8 -alava 8 -dayron 8 -kopa 8 -yuksel 8 -nay-nay 8 -balmedie 8 -alexandrino 8 -near-live 8 -qadar 8 -nine-second 8 -roselyne 8 -filles 8 -27in 8 -steffel 8 -charrington 8 -showstudio 8 -injury-blighted 8 -laboratory-made 8 -cilwendeg 8 -ravenblade 8 -ilija 8 -pathfinders 8 -kirschbaum 8 -zhaoyuan 8 -schoolbook 8 -domino-like 8 -poledica 8 -henwick 8 -1:01 8 -1:02 8 -ejectives 8 -cuv 8 -cuc 8 -redipuglia 8 -bowlsbey 8 -weatherbys 8 -134.7 8 -water-stressed 8 -tsi 8 -wargames 8 -kttv 8 -hayama 8 -daichi 8 -headguard 8 -tamaruke 8 -oetken 8 -abbess 8 -milonas 8 -tomson 8 -dazzler 8 -3:17 8 -spdt 8 -goffey 8 -undeservedly 8 -cut-backs 8 -fuel-injected 8 -non-crime 8 -atlanta-journal 8 -edan 8 -lamptey 8 -co-guardianship 8 -204mph 8 -over-extended 8 -roerdink 8 -just-concluded 8 -ramarajaha 8 -scadpads 8 -karate-kicked 8 -.04 8 -kosmicki 8 -conservative-themed 8 -lecaroz 8 -cedres 8 -arcata 8 -howle 8 -lobjoie 8 -asco 8 -bainbridge-flor 8 -1067 8 -telavi 8 -mainegeneral 8 -brightwells 8 -shondaland 8 -goldstaub 8 -bay-area 8 -fitzhenry 8 -omondi 8 -majorettes 8 -wittgrove 8 -pactual 8 -screwup 8 -testamentary 8 -floorplans 8 -wuli 8 -shippon 8 -cormoran 8 -menna 8 -coindesk 8 -monograms 8 -cristia 8 -beckstead 8 -kazlausks 8 -miyama 8 -blank-firing 8 -sarangani 8 -steel-making 8 -900-a-month 8 -marriotts 8 -carmaggedon 8 -aquaduck 8 -kurtzberg 8 -morton-hooper 8 -three-michelin 8 -safarali 8 -p/2013 8 -esteros 8 -1,039 8 -1,034 8 -campbell-tiech 8 -skybridge 8 -interdictions 8 -hrabowski 8 -toxicant 8 -shoreside 8 -pussybow 8 -cpi-w 8 -lybrel 8 -prospekt 8 -ac360 8 -28-17 8 -23.07 8 -calabash 8 -shale-gas 8 -catafalque 8 -victorinox 8 -sabillon 8 -panduwinata 8 -schutters 8 -fedorok 8 -fedorov 8 -800s 8 -male/female 8 -balzac 8 -engeldinger 8 -most-anticipated 8 -relle 8 -qeiyafa 8 -winterset 8 -prodanovic 8 -zizou 8 -sirine 8 -shipka 8 -yume 8 -montaigu 8 -modfather 8 -venzo 8 -42-35 8 -amurri 8 -yoshinari 8 -sheinwald 8 -mayo-smith 8 -parassols 8 -cartama 8 -holbox 8 -1999-2004 8 -brinkley-cook 8 -riobe 8 -sape 8 -anier 8 -channel-surfing 8 -busines 8 -shirota 8 -damonte 8 -akwa 8 -mantaring 8 -halbower 8 -probationer 8 -cypriot-registered 8 -rodion 8 -roubaud 8 -sixth-century 8 -pro-slavery 8 -peppercorns 8 -bulkeley 8 -sapphic 8 -non-celebrity 8 -swishy 8 -hcmc 8 -canlis 8 -samurai-style 8 -balanescu 8 -547,000 8 -37ft 8 -lhx1 8 -hellotel 8 -ex-emmerdale 8 -dcps 8 -cercle 8 -reinecke 8 -zybutz 8 -brande 8 -eyeshot 8 -endows 8 -#lebroning 8 -neandertals 8 -mucci 8 -dinner-table 8 -80metres 8 -potala 8 -hard-top 8 -ill-educated 8 -saincome 8 -tishreen 8 -reincarnations 8 -chandi 8 -marianos 8 -stemwinder 8 -400,00 8 -117,500 8 -trakdot 8 -potler 8 -plectrumelectrum 8 -travel-size 8 -kennford 8 -maddern 8 -caprile 8 -antonov-26 8 -roediger 8 -less-than-ideal 8 -rossiiskaya 8 -smuggest 8 -felkel 8 -mickleover 8 -lgb&t 8 -precobs 8 -gigawatt 8 -190g 8 -redhouse 8 -seargent 8 -oscar-winners 8 -34-mile 8 -hickmans 8 -wiercioch 8 -cup/europa 8 -pz 8 -ducusin 8 -azita 8 -gluts 8 -mafa 8 -sheumack 8 -biskie 8 -shahrzad 8 -gamonal 8 -12th-floor 8 -meghrabi 8 -sportsnation 8 -gelati 8 -iurie 8 -friskier 8 -17th-floor 8 -niccole 8 -hugley 8 -14-9 8 -mogg 8 -comoro 8 -seo77 8 -misson 8 -sablon 8 -bagana 8 -bojorquez 8 -saralee 8 -epple 8 -shamshak 8 -ogemaw 8 -stuffer 8 -three/four 8 -backseats 8 -snapback 8 -singal 8 -liqui 8 -above-knee 8 -gitau 8 -jornet 8 -marchis 8 -levkoff 8 -perry-class 8 -gurmeet 8 -canacona 8 -exposà 8 -protectant 8 -al-rahimi 8 -jankulovski 8 -ucb 8 -liska 8 -clercq 8 -low-voltage 8 -parangaricutiro 8 -onetruefan 8 -geoeye 8 -núñez 8 -dragarov 8 -2,691 8 -seruyan 8 -fomalhaut 8 -238billion 8 -gaetz 8 -razieh 8 -mdm 8 -pavé 8 -hedge-funder 8 -jaywalkers 8 -celi-moreno 8 -fastpass 8 -palazzos 8 -mesotherapy 8 -grammar-school 8 -plomin 8 -breckinridge 8 -king5.com 8 -ramiz 8 -herewith 8 -mantofa 8 -scraggs 8 -foreign-sounding 8 -yoshiko 8 -yoshiki 8 -flatscreens 8 -typhoon-ravaged 8 -gretl 8 -37-storey 8 -pij 8 -oaklee 8 -tartuffe 8 -982 8 -987 8 -geileskey 8 -saumarez 8 -kemple 8 -453,000 8 -isel 8 -biomes 8 -sakaida 8 -ethnics 8 -buyagift 8 -micrometeorites 8 -masrour 8 -sumzero 8 -gisburn 8 -boofy 8 -digianfilippo 8 -emma-jean 8 -betbright 8 -gloddy 8 -mujava 8 -out-perform 8 -kaliq 8 -serviettes 8 -paraphrases 8 -off-centre 8 -pupusas 8 -scammell 8 -bucktown 8 -druian 8 -16.24 8 -louviere 8 -dramatic-looking 8 -nsr 8 -precipitates 8 -retinoic 8 -06/08/2012 8 -zia-ul-haq 8 -khara 8 -mii 8 -mim 8 -bisecting 8 -klebsiella 8 -molting 8 -darebin 8 -guintoli 8 -nolting 8 -ex-captain 8 -megahed 8 -coghill 8 -tuschinski 8 -bezler 8 -grau 8 -metabolisers 8 -y-chromosomal 8 -hisd 8 -before-viewing 8 -mojahedin-e 8 -overzealousness 8 -agaves 8 -hallums 8 -winmarleigh 8 -redington 8 -mukund 8 -bilyeu 8 -goli 8 -six-pound 8 -realness 8 -rot-weiss 8 -deann 8 -octopod 8 -20-member 8 -holly-sue 8 -grocery-store 8 -enumclaw 8 -buddle 8 -yeses 8 -four-tier 8 -wanzer 8 -dillenburger 8 -ikuo 8 -tassimo 8 -vicenzino 8 -dionicio 8 -velaterapia 8 -childbirths 8 -phocuswright 8 -marczak 8 -hersch 8 -abominations 8 -baliszewski 8 -protists 8 -loralai 8 -e&y 8 -brownite 8 -slamat 8 -gratteri 8 -mceverything 8 -zang 8 -lithia 8 -strictness 8 -arfon 8 -elderberries 8 -tocumen 8 -cobre 8 -phospholipids 8 -rcapital 8 -kazak 8 -beetlecopter 8 -21g 8 -ggotjebi 8 -33-story 8 -tiziano 8 -unawatuna 8 -ollivant 8 -dwelt 8 -nicastri 8 -heermance 8 -edilia 8 -gwalior 8 -military-installed 8 -12-inches 8 -imbed 8 -first-ball 8 -64-acre 8 -duddridge 8 -subcontracting 8 -poorly-trained 8 -hkd$ 8 -kermani 8 -hosey 8 -manpad 8 -overcash 8 -349.99 8 -microneedles 8 -9.57 8 -kaytlen 8 -profiteroles 8 -frunet 8 -ynclan 8 -gilheaney 8 -thousand-yard 8 -sonn 8 -keanan 8 -al-tawhid 8 -california-nevada 8 -240-pound 8 -67billion 8 -mosele 8 -muhajireen 8 -liangjiahe 8 -templer 8 -moderate-to-severe 8 -cluniac 8 -greif 8 -then-california 8 -molson 8 -shoreway 8 -c220 8 -zaghloul 8 -capful 8 -aasia 8 -garrisoned 8 -eataly 8 -sandhills 8 -classique 8 -grb130427a 8 -tolpuddle 8 -oclock 8 -rutnam 8 -buckthorn 8 -joing 8 -chapmans 8 -hyperparathyroidism 8 -ebihara 8 -altimeters 8 -xiangmin 8 -co-signatory 8 -milroy 8 -27kg 8 -1,000-tonne 8 -alphira 8 -rexhepi 8 -railton 8 -mcmahan 8 -ghanaja 8 -curreri 8 -journal-review 8 -silk-lined 8 -fauquier 8 -bootlegged 8 -ojong 8 -cologna 8 -hueber 8 -keepin 8 -back-pedalling 8 -non-subscribers 8 -third-trimester 8 -3,067 8 -biv 8 -stinziano 8 -copperbox 8 -wolske 8 -wolsky 8 -gingerism 8 -bulluck 8 -sherafiyah 8 -medically-oriented 8 -dagvadorj 8 -brasileirao 8 -palestino 8 -colcannon 8 -kita 8 -kith 8 -46c 8 -wboc 8 -flashpackers 8 -hastings-on-hudson 8 -knacker 8 -gaensbauer 8 -100-odd 8 -maggies 8 -insupportable 8 -keyanna 8 -u.s.-built 8 -samho 8 -svetloe 8 -andrew-jaja 8 -hand-picking 8 -bajur 8 -scalzo 8 -Úbeda 8 -grand-father 8 -ngwenya 8 -5-foot-tall 8 -kattouf 8 -kheow 8 -manawatu 8 -pendeen 8 -kipple 8 -cayacos 8 -figure-skating 8 -boulmer 8 -orthodontics 8 -campobello 8 -llanwrtyd 8 -delfosse 8 -once-impoverished 8 -battered-woman 8 -bisects 8 -solley 8 -3-acre 8 -13-metre 8 -insoll 8 -caouette 8 -china-watcher 8 -shahbag 8 -consequence-free 8 -bogong 8 -z-cars 8 -godmen 8 -derartu 8 -khayat 8 -pirra 8 -16-hours 8 -augstein 8 -koene 8 -hyperstimulation 8 -graça 8 -strasser 8 -mistable 8 -petrosaudi 8 -re-gifting 8 -layard 8 -ruiz-gaviria 8 -lubricates 8 -over-rates 8 -thacher 8 -kaewkamnerd 8 -basejumper 8 -lukangol 8 -weinger 8 -dagar 8 -culpan 8 -re-purposing 8 -orofino 8 -auto-excommunicate 8 -ulosevich 8 -well-mapped 8 -thesmokinggun.com 8 -statters 8 -sn2014j 8 -tontitown 8 -eastport 8 -dissonant 8 -rahmati 8 -frankenstorm 8 -edgson 8 -evans-thomas 8 -chocolate-box 8 -patriarca 8 -over-runs 8 -bocce 8 -coachload 8 -roopkund 8 -hafted 8 -satiri 8 -parsisson 8 -1004 8 -mukisa 8 -priscella 8 -nmb 8 -scheman 8 -ribblehead 8 -mostefa 8 -muxworthy 8 -38-0 8 -anchieta 8 -maynooth 8 -woerner 8 -lopez-diaz 8 -feghaly 8 -http://nbcchicago.com 8 -mathes 8 -old-man 8 -waterlily 8 -reisinger 8 -amaero 8 -fine-arts 8 -golubovskis 8 -bijlert 8 -geving 8 -snokhous 8 -nuclear-test-ban 8 -arkyd 8 -darrelle 8 -scholtz-klink 8 -two-and-a-half-mile 8 -modelers 8 -pace-setter 8 -23-ton 8 -dishforth 8 -cookhouse 8 -locarno 8 -miter 8 -habibur 8 -ginty 8 -1267 8 -atase 8 -mafia-busting 8 -grana 8 -c/sgt 8 -story-book 8 -sakr 8 -brajkovic 8 -33-storey 8 -ilc2s 8 -sub-alpine 8 -iren 8 -prizing 8 -straight-arm 8 -20kw 8 -harpooning 8 -giroir 8 -dussehra 8 -.011 8 -hakhovich 8 -to-and-fro 8 -agulhas 8 -marylanders 8 -haidt 8 -5ghz 8 -almuhajir 8 -injury-related 8 -destabilises 8 -custom-tailored 8 -horseriding 8 -lykins 8 -llanllwni 8 -best-funded 8 -@lindsaylohan 8 -500,000,000 8 -1740s 8 -lionsraw 8 -ambush-protected 8 -1728 8 -raymonde 8 -dirndls 8 -leakiest 8 -124.99 8 -desir 8 -desio 8 -flimsy-looking 8 -al-huda 8 -roved 8 -rationalising 8 -queimada 8 -pimpin 8 -mehler 8 -worldviews 8 -graffis 8 -koentjoro 8 -rules-based 8 -rices 8 -obama-putin 8 -hermit-like 8 -olé 8 -kumeroa 8 -soopers 8 -10.23 8 -10.29 8 -semmes 8 -manard 8 -wowforreeel 8 -sheinis 8 -webmasters 8 -sollinger 8 -gendy 8 -consuelos 8 -socio-demographic 8 -ussi 8 -tomohiro 8 -nyassi 8 -taimur 8 -anti-reflective 8 -spf30 8 -messaoudi 8 -barboianu 8 -adrianus 8 -39-stone 8 -dices 8 -73,500 8 -ajoy 8 -popejoy 8 -mainframes 8 -2,500-1 8 -yoopers 8 -stagehands 8 -scenes-of-crime 8 -half-british 8 -price-sensitive 8 -7,995 8 -#tcot 8 -tenofovir 8 -kyokushin-kan 8 -n-tv 8 -thousand-plus 8 -colloquialisms 8 -vorhees 8 -lleida 8 -zaborovska 8 -aghanistan 8 -ibitoye 8 -ariyawathie 8 -sushi-ya 8 -yazigi 8 -khabur 8 -128.5 8 -pro-enterprise 8 -8,000-12 8 -traversi 8 -fan-ownership 8 -200-person 8 -ankle-ligament 8 -circ 8 -khl 8 -kho 8 -kha 8 -oszek 8 -apolito 8 -techno-savvy 8 -glenturret 8 -d.o.m. 8 -nikchemny 8 -sarthe 8 -life-line 8 -ewbank 8 -800-a-month 8 -ack 8 -cayey 8 -swiss-german 8 -herzfeld 8 -jubliee 8 -4,000-6 8 -hypermach 8 -109.4 8 -sousan 8 -immodestly 8 -rathmell 8 -sirr 8 -dragsholm 8 -2:56 8 -2:54 8 -2:53 8 -2:52 8 -2:51 8 -torrens 8 -championes 8 -h.p. 8 -zig-zagged 8 -7.70 8 -26-years 8 -radric 8 -eleven-month-old 8 -gefitinib 8 -ottenberg 8 -vorilhon 8 -huestis 8 -taravati 8 -lanker 8 -past-due 8 -jayvon 8 -furrah 8 -oakland-based 8 -karcher 8 -legography 8 -ultraviolent 8 -irureta 8 -eufemiano 8 -foregen 8 -handsomest 8 -mclay 8 -tippeligaen 8 -magazine-like 8 -eagle-eye 8 -nishijima 8 -graffiato 8 -41,500 8 -still-smoldering 8 -birder 8 -snorsky 8 -risberg 8 -sczcesny 8 -non-parents 8 -madiha 8 -chiyangwa 8 -1,200-square-foot 8 -murwald 8 -today.the 8 -d'aigle 8 -isci 8 -millimetre-wave 8 -potegal 8 -mosadiq 8 -blushers 8 -suchowacki 8 -dhami 8 -paralleling 8 -gay-pride 8 -tapan 8 -habit-forming 8 -trabzon 8 -lily-ella 8 -wero 8 -idehill 8 -imma 8 -lassin 8 -coag 8 -bequerels 8 -discernable 8 --200 8 -mahali 8 -matchfixing 8 -mk3 8 -muray 8 -bare-knuckled 8 -shopkick 8 -re-gained 8 -unventilated 8 -nishino 8 -cemfjord 8 -latently 8 -well-compensated 8 -floret 8 -@cristiano 8 -batirashvili 8 -yurovsky 8 -mcgirt 8 -wakeskater 8 -predisposing 8 -hoofing 8 -lock-step 8 -magnifica 8 -bike-mounted 8 -adrenocortical 8 -topiramate 8 -semana 8 -siprut 8 -unfriends 8 -early-nineties 8 -rockledge 8 -kortrijk 8 -kammenos 8 -purna 8 -winterkorn 8 -thorneywork 8 -redbourn 8 -okechukwu 8 -tetrads 8 -@iamkellybrook 8 -siles 8 -alousi 8 -plettenberg 8 -ankle-high 8 -non-enforcement 8 -tax-evasion 8 -daulat 8 -integrations 8 -association-trained 8 -lashano 8 -priest-in-charge 8 -schill 8 -misandry 8 -perminova 8 -columbaria 8 -lochmore 8 -re-enroll 8 -obaze 8 -amunyoko 8 -bioimpedance 8 -14-inch-tall 8 -terengganu 8 -jerusalem-based 8 -piveteau 8 -straw-coloured 8 -cheesebrough 8 -downloaders 8 -2,500-strong 8 -1,200-ton 8 -graphic.jpg 8 -ram-raid 8 -epa-approved 8 -westminster-based 8 -macready 8 -acushnet 8 -103f 8 -mwepu 8 -1305 8 -milioti 8 -illes 8 -molchan 8 -sambhaji 8 -possessiveness 8 -lema 8 -bush-mccain 8 -start-finish 8 -9.79 8 -dummerston 8 -9.73 8 -224-foot-long 8 -amplifon 8 -accursed 8 -kawasmeh 8 -chadds 8 -readjustments 8 -011-52/755 8 -26,600 8 -eight-tier 8 -pjk 8 -geekfest 8 -rasheen 8 -barwala 8 -6:08 8 -6:07 8 -treon 8 -langar 8 -5ks 8 -narrabri 8 -goslings 8 -pickpocketed 8 -lashley 8 -maceio 8 -elwahabi 8 -sonicstar 8 -guardbot 8 -typo-laden 8 -mludzinski 8 -mascola 8 -viray 8 -pie-scraper 8 --0.7 8 -force-wide 8 -apgar 8 -quattrocchi 8 -zahra'u 8 -1,900-acre 8 -portioned 8 -minutest 8 -broadis 8 -superman-style 8 -wojack 8 -shackley 8 -auxillary 8 -salicylates 8 -girlband 8 -320-year 8 -ledingham 8 -unirea 8 -donside 8 -fadipe 8 -unspecific 8 -gleno 8 -beachbody 8 -plummetted 8 -liberates 8 -stone-age 8 -higher-paid 8 -rocket-shaped 8 -groenefeld 8 -fluffs 8 -yeguas 8 -ef-0 8 -personages 8 -torimi 8 -assalamu 8 -wakatobi 8 -tusked 8 -derik 8 -lakhanpal 8 -kryten 8 -64.99 8 -katsalapov 8 -db10 8 -cross-shaped 8 -1,256 8 -1,251 8 -campina 8 -10-foot-wide 8 -shate 8 -ofari 8 -wurman 8 -regling 8 -kronforst 8 -yogendra 8 -37-hour 8 -party-girl 8 -korra 8 -log-burning 8 -trifles 8 -mayzes 8 -sahid 8 -brillant 8 -out-of-hospital 8 -virage 8 -syr 8 -kirt 8 -44f 8 -ebraham 8 -307million 8 -deco-inspired 8 -morwenstow 8 -@england 8 -granddads 8 -cressy 8 -portending 8 -455ft 8 -cherwenka 8 -azin 8 -hishamuddin 8 -dirigibles 8 -niemiec 8 -siirt 8 -fleurieu 8 -intercut 8 -7-mile 8 -bielby 8 -ecofarm 8 -buncich 8 -bhf-funded 8 -silenzi 8 -mslo 8 -hurrey 8 -fredou 8 -yuxin 8 -rondu 8 -tangos 8 -bridleways 8 -zemdegs 8 -one-lane 8 -killl 8 -sanita 8 -come-to-jesus 8 -1021 8 -geffner 8 -e3g 8 -brogrammer 8 -pooh-pooh 8 -1,940 8 -nyle 8 -qaa 8 -u.s.-european 8 -25-28 8 -2008-2013 8 -bleyer 8 -97.2 8 -97.7 8 -langmead 8 -electrophysiology 8 -tomizawa 8 -cross-examinations 8 -larizadeh 8 -ignasius 8 -havrilla 8 -forones 8 -demotivated 8 -thiruvananthapuram 8 -attahiru 8 -54.95 8 -barnstormed 8 -gendarmeria 8 -grivna 8 -ninth-largest 8 -1,078 8 -double-homicide 8 -hujama 8 -motasim 8 -two-tee 8 -tdap 8 -lolland-falster 8 -spurtle 8 -post-wwii 8 -tamicare 8 -3,260 8 -multi-spectral 8 -kristinsson 8 -011-52/998 8 -belmas 8 -plx4032 8 -dunluce 8 -now-fiancee 8 -evolvable 8 -muronets 8 -giovannini 8 -moonee 8 -36,600 8 -compounders 8 -crêpe 8 -suicidepreventionlifeline.org 8 -@bbcr4today 8 -top-rating 8 -israeli-gaza 8 -4bc 8 -anti-alcohol 8 -ety 8 -pandacam 8 -marketshare 8 -relph 8 -fabbrini 8 -xynthia 8 -kesling 8 -ezair 8 -wrighton 8 -nalini 8 -heitmans 8 -re-sits 8 -abdon 8 -wkrg-tv 8 -neponset 8 -twin-rotor 8 -torrox 8 -bonthron 8 -1204 8 -1206 8 -120k 8 -120c 8 -giorgis 8 -hypno-programmed 8 -goldfrapp 8 -daffron 8 -mobed 8 -25-goal 8 -jevons 8 -dismounting 8 -660million 8 -hevener 8 -lockergnome.com 8 -2136 8 -2130 8 -d-listers 8 -nollybooks 8 -50.07 8 -ba.com 8 -22-under 8 -in-tune 8 -chandrasekhar 8 -laser-sighted 8 -kava 8 -4,000-strong 8 -flightdeck 8 -fresh-squeezed 8 -forsdick 8 -bidvest 8 -micklefield 8 -besemann 8 -lysakowska 8 -car-jacked 8 -slm 8 -bailee 8 -mcinulty 8 -snawder 8 -plenoptic 8 -byrnecut 8 -singsong 8 -cheesemaker 8 -tiquicheo 8 -romarco 8 -afghan-born 8 -kind-of 8 -mornin 8 -great-great-great-great-great 8 -mazzeh 8 -40mg 8 -anthologies 8 -fyffe 8 -one-length 8 -freebird 8 -35-man 8 -gubb 8 -baronets 8 -24-19 8 -gazarik 8 -bindeshwar 8 -fatau 8 -roboz 8 -11,920 8 -asman 8 -bahujan 8 -retallack 8 -worldview-2 8 -carmello 8 -apalachee 8 -jestin 8 -thfc 8 -morganroth 8 -novak-garcia 8 -reservatrol 8 -public-interest 8 -lidgate 8 -morganton 8 -deevy 8 -kajsa 8 -addam 8 -awearness 8 -shenon 8 -14-28 8 -14-20 8 -match-defining 8 -d'acampo 8 -teleporter 8 -balala 8 -mabo 8 -al-dustour 8 -zador 8 -weizman 8 -takizawa 8 -lookfantastic.com 8 -intentionality 8 -kinnings 8 -glesni 8 -ebbets 8 -renominated 8 -academicals 8 -gustwiller 8 -santiago-serrano 8 -kautikari 8 -renno 8 -o'reggio 8 -beezy 8 -snarkiness 8 -farouki 8 -aquafina 8 -elvina 8 -hamamatsu 8 -draginova 8 -sheetrock 8 -cofre 8 -queerspace 8 -outlives 8 -bindel 8 -chulpayev 8 -swayamsevak 8 -oesin 8 -atac 8 -atap 8 -glasses-wearing 8 -electrically-powered 8 -gremont 8 -pahl 8 -1,712 8 -tuberculin 8 -diebolt 8 -lvov 8 -latin-inspired 8 -fine-scale 8 -kutum 8 -magatte 8 -seested 8 -non-graduate 8 -163.5 8 -lasa 8 -crinoline 8 -7.58 8 -7.59 8 -scorchie 8 -pierre-hugues 8 -super-computer 8 -ryuichi 8 -koraun 8 -8,914 8 -195lbs 8 -nurhasyim 8 -multi-stakeholder 8 -picaridin 8 -moviestarplanet 8 -159million 8 -fortnam 8 -hofgartner 8 -kelkoo 8 -agriculturally 8 -ugalde 8 -20-cent 8 -over-medicated 8 -ball-handling 8 -haise 8 -fostanes 8 -lockhurst 8 -laberge 8 -wencel 8 -artificially-induced 8 -tenison 8 -7,650 8 -greetland 8 -psd 8 -0.52 8 -non-metropolitan 8 -zumper 8 -londonistan 8 -29in 8 -takoradi 8 -overgrazing 8 -gbao 8 -resealable 8 -fagenson 8 -ryad 8 -isak 8 -850th 8 -mollusk 8 -gubler 8 -eldfell 8 -winful 8 -gits 8 -bucket-load 8 -cressage 8 -2008-2014 8 -2008-2018 8 -neuroenhancement 8 -goeschel 8 -shedden 8 -4-hour 8 -al-sakkaf 8 -170-ft 8 -latchkey 8 -mattina 8 -finger-printed 8 -woo-hoo 8 -cominotto 8 -gondwanaland 8 -shimandale 8 -starriest 8 -mazieres 8 -fireproofing 8 --220 8 -hathout 8 -guangshan 8 -spearfish 8 -trebarwith 8 -james-lee 8 -gildersleeve 8 -jrotc 8 -neurobehavioral 8 -embezzler 8 -meat-based 8 -thaxted 8 -yichun 8 -5.5-inches 8 -prpa 8 -duelled 8 -rusroshi 8 -waclawiak 8 -maleness 8 -leppink 8 -850billion 8 -110.15 8 -lecher 8 -knowles-dixon 8 -detemines 8 -six-furlong 8 -supertrees 8 -leifer 8 -domina 8 -belle-vue 8 -rasiej 8 -sleepaway 8 -sinkings 8 -blazejowski 8 -karpel 8 -cunliffe-copeland 8 -sawbridgeworth 8 -oogjes 8 -edhi 8 -mukunda 8 -chardonnays 8 -ciutadella 8 -h-1 8 -wewege 8 -former-president 8 -najafian 8 -kalpesh 8 -13-fold 8 -week-on-week 8 -upper-deck 8 -belgiki 8 -aways 8 -madonnari 8 -guéckédou 8 -pet-owners 8 -ravenelle 8 -apk 8 -apm 8 -ap7 8 -harpocrates 8 -sea-coalers 8 -kirkstall 8 -tidier 8 -housebuilder 8 -texan-born 8 -seban 8 -over-emphasis 8 -rpx 8 -toothsome 8 -bergmonch 8 -15-and-a-half 8 -existance 8 -anobii 8 -micro-gravity 8 -grand-final 8 -magpas 8 -pronin 8 -siew 8 -unalterably 8 -thaipusam 8 -al-kholi 8 -cobos 8 -tumarkin 8 -seeing-eye 8 -macdonnell 8 -chiyoda-ku 8 -gefreiter 8 -mid-water 8 -tobi-jayne 8 -1210 8 -caisley 8 -debove 8 -moteab 8 -healthywage 8 -ferments 8 -26-second 8 -mutes 8 -zaoralova 8 -stéfano 8 -torness 8 -migs 8 -ryelands 8 -hybridisation 8 -penny-farthing 8 -briesen 8 -re-touched 8 -thresh 8 -junya 8 -carnese 8 -markfield 8 -tansu 8 -ennals 8 -speechly 8 -money-grubbing 8 -thawatchai 8 -masaai 8 -bujak 8 -pre-entitlement 8 -non-natural 8 -plasterers 8 -renomination 8 -vote-winner 8 -ciljan 8 -opening-night 8 -5017 8 -mao-style 8 -d-ny 8 -ktuu-tv 8 -bilmes 8 -mallia 8 -gleeks 8 -bio-medical 8 -230kg 8 -light-reflecting 8 -661 8 -onizuka 8 -63mins 8 -artley 8 -163mph 8 -tintori 8 -maykop 8 -mukoro 8 -hardeman 8 -@millerbode 8 -highly-talented 8 -denmon 8 -wickramasingha 8 -slezic 8 -legitimated 8 -barzelay 8 -dalvi 8 -barsby-finch 8 -recertified 8 -shovell 8 -re-inspected 8 -#royalprank 8 -sivivatu 8 -munisteri 8 -transworld 8 -2-month 8 -kemish 8 -globe-trotter 8 -professionalised 8 -dykgraaf 8 -apptivity 8 -gusen 8 -sanitisers 8 -bms 8 -hormozgan 8 -19.89 8 -nocerina 8 -tsutomu 8 -poppin 8 -vestguard 8 -opala 8 -al-musawi 8 -hairbrushes 8 -al-ga 8 -vilnai 8 -capshaw 8 -clubbs 8 -kazuyuki 8 -tenure-track 8 -undammed 8 -clarksons 8 -naku 8 -naka 8 -racketeers 8 -zymatic 8 -sheril 8 -ouca 8 -normal-size 8 -flareups 8 -zaineb 8 -1,419 8 -zuppiger 8 -rmr 8 -rmi 8 -pinkowski 8 -nishikawa 8 -ponomusic 8 -rampantly 8 -colloidal 8 -gender-biased 8 -kiryienka 8 -vansittart 8 -regorafenib 8 -tariff-free 8 -lacquerie 8 -potchefstroom 8 -gema 8 -ninjago 8 -begraj 8 -nassef 8 -croton 8 -eugster 8 -grb 8 -un-mandated 8 -casteels 8 -8,850 8 -polytheists 8 -mcnee 8 -kayseri 8 -lope 8 -leutwiler 8 -bindloss 8 -bickerdike 8 -bingil 8 -scillies 8 -anastasiou 8 -eastney 8 -morgia 8 -bobsledders 8 -hindman 8 -huijbregts 8 -odalisque 8 -77.3 8 -vocalization 8 -tunceli 8 -rakigjija 8 -panufnik 8 -squarespace 8 -ambro 8 -410ad 8 -stablised 8 -subspecialists 8 -thick-cut 8 -du-ri 8 -madwoman 8 -lamon 8 -micras 8 -facsimiles 8 -ibtimes 8 -mid-series 8 -rozek 8 -omfg 8 -vasta 8 -besirevic 8 -witchmarks 8 -takahiro 8 -85mins 8 -11.53 8 -11.54 8 -11.58 8 -slatted 8 -snow-related 8 -narcy 8 -shipload 8 -apha 8 -smolinski 8 -shuzo 8 -cave-ins 8 -u.t. 8 -jean-christian 8 -sea-doo 8 -duplantier 8 -rosenkavalier 8 -kindersley 8 -pitztal 8 -dbl 8 -db1 8 -db2 8 -absented 8 -architectures 8 -triple-jumper 8 -foell 8 -ghazzawi 8 -durántez 8 -sixtus 8 -meowseph 8 -nasta 8 -gosai 8 -salt-n-pepa 8 -cisplatin 8 -jehane 8 -nine-stone 8 -chungaung 8 -thometz 8 -schipplock 8 -hollibaugh 8 -mao-era 8 -wing-span 8 -dromedaries 8 -14-seater 8 -10,000-seat 8 -sagram 8 -scannán 8 -nanometer 8 -sunbird 8 -forsee 8 -best/worst 8 -paatelainen 8 -song-writer 8 -liffey 8 -khlystov 8 -sentara 8 -action-movie 8 -aurangzeb 8 -211m 8 -lapwings 8 -prostatic 8 -museum-quality 8 -gamoke 8 -10-a-month 8 -milnes 8 -vandi 8 -bigfin 8 -angstrom 8 -payables 8 -herpa 8 -teganya 8 -seacoastonline 8 -hande 8 -ecologies 8 -ship-2 8 -ship-1 8 -pelloux 8 -tuneup 8 -lunga 8 -farshid 8 -delmon 8 -adrenaline-inducing 8 -reserach 8 -endia 8 -portella 8 -glibness 8 -10th-floor 8 -kleinwort 8 -581d 8 -eissa 8 -extra-ordinary 8 -giarrusso 8 -posnansky 8 -familiarizing 8 -f.e.a.r 8 -huckster 8 -mehlhase 8 -anangu 8 -1980s-era 8 -francheska 8 -w.o. 8 -elliston 8 -zoophilia 8 -turban-wearing 8 -m'naghten 8 -turrell 8 -laurance 8 -purepulse 8 -tambien 8 -5.84 8 -5.88 8 -cotylocara 8 -trash-free 8 -vivino 8 -6/5 8 -caplehorn 8 -adla 8 -baldia 8 -schwendel 8 -legowo 8 -rfb 8 -masker 8 -lickies 8 -parentis 8 -test-run 8 -melany 8 -pollicita 8 -nabilah 8 -7,451 8 -gramling 8 -academician 8 -hillfields 8 -sohl 8 -colons 8 -sedinger 8 -unhelpfully 8 -endears 8 -phyland 8 -loakes 8 -arquilla 8 -chace 8 -othon 8 -1996-1997 8 -saloom 8 -tintypes 8 -flowy 8 -raso 8 -undergound 8 -cardrona 8 -terisia 8 -lts 8 -ltv 8 -zookal 8 -ilyich 8 -kbak 8 -rahela 8 -snow-free 8 -kaner 8 -cheapair 8 -180-pound 8 -alaotran 8 -galella 8 -in-principle 8 -62.9 8 -full-calorie 8 -tardec 8 -cyberterrorists 8 -utsler 8 -hamson 8 -newstands 8 -power-share 8 -pre-hearing 8 -26,800 8 -pop-tarts 8 -7.33 8 -hilkey 8 -nacke 8 -yourshaw 8 -sigsworth 8 -byre 8 -631,000 8 -8:59 8 -mooty 8 -38g 8 -3,299 8 -vist 8 -tamarack 8 -faceboook 8 -mohammedie 8 -todung 8 -hanappi 8 -hawaiian-born 8 -ucpf 8 -autism-like 8 -110-meter 8 -kerker 8 -antioxidant-rich 8 -nationally-recognized 8 -wilcoxson 8 -sumrall 8 -avians 8 -azuma 8 -22-15 8 -dissembled 8 -misstravel 8 -0.70 8 -off-spring 8 -bulat 8 -overregulation 8 -bethune-cookman 8 -pocket-friendly 8 -half-mile-long 8 -mid-wilshire 8 -self-hypnosis 8 -settees 8 -kjell 8 -player/manager 8 -48kg 8 -larza 8 -168lb 8 -granuloma 8 -kansan 8 -under-10 8 -yubari 8 -lifebuoy 8 -whle 8 -gomphothere 8 -496,000 8 -alka-seltzer 8 -stratford-on-avon 8 -karason 8 -then-welterweight 8 -pacte 8 -croxson 8 -camelina 8 -palamberis 8 -wasp-18 8 -cozzoni 8 -1129 8 -1120 8 -blade-shaped 8 -waliur 8 -rickel 8 -skytran 8 -tongue-twisting 8 -all-too-often 8 -nitrocharge 8 -goreti 8 -dufort 8 -pamella 8 -eells 8 -falabella 8 -park-goers 8 -6:52 8 -hokhlov 8 -reengaging 8 -revelries 8 -ultra-skinny 8 -rossetto 8 -68,500 8 -camera-mounted 8 -sulphates 8 -terrorist-type 8 -re-lit 8 -disowns 8 -trematon 8 -battah 8 -garrotted 8 -legal-looking 8 -spiral-shaped 8 -roble 8 -neuf 8 -kincorth 8 -golfin 8 -bobble-head 8 -followed-up 8 -microbialites 8 -b-52h 8 -affronts 8 -anglo-australian 8 -pollot 8 -polloi 8 -10.38 8 -osmany 8 -republika 8 -three-weeks 8 -one-bath 8 -martialled 8 -dhakota 8 -lewisbest 8 -nfl-funded 8 -knightsmith 8 -bromich 8 -bonorong 8 -tellaro 8 -pengiran 8 -nikkah 8 -Åhléns 8 -vega-maldonado 8 -7:38 8 -ethicall 8 -bomberos 8 -yarralumla 8 -1969-70 8 -e.w. 8 -naturopathic 8 -malapascua 8 -non-alignment 8 -old-guard 8 -laburnum 8 -dailer 8 -straka 8 -tax-efficient 8 -balmier 8 -neustift 8 -knightscope 8 -xkr 8 -sacrarium 8 -pover 8 -saitoti 8 -81mins 8 -chirri 8 -kacary 8 -shandra 8 -modiselle 8 -alphonsus 8 -kinno 8 -#shopping 8 -curelaru 8 -journal/marist 8 -sukkarieh 8 -irish-based 8 -sathyavagiswaran 8 -mias 8 -naoma 8 -cappa 8 -netcare 8 -317million 8 -kossoff 8 -t42 8 -linpeng 8 -scallywag 8 -narodowy 8 -outsole 8 -tamarac 8 -taihe 8 -baren 8 -26,794 8 -emg 8 -dreamscape 8 -paster 8 -serwa 8 -scialfa 8 -non-payers 8 -kolkilic 8 -entropic 8 -picture-led 8 -warchus 8 -deafeningly 8 -callaways 8 -mirqab 8 -hogan-gary 8 -26,900 8 -34-foot 8 -brockhoff 8 -bernath 8 -kristopik 8 -tung-kwok 8 -lembeh 8 -lilani 8 -on-body 8 -moon-shaped 8 -austrian-made 8 -abrahamsson 8 -ferder 8 -sieno 8 -elthorne 8 -fionan 8 -lasius 8 -blackheads 8 -eldoret 8 -sutkiewicz 8 -nulph 8 -ntuli 8 -pleasuredrome 8 -1,291 8 -hard-to-access 8 -no-look 8 -ulugun 8 -zero-based 8 -zod 8 -adonai 8 -dms.facebook.posttofb 8 -insan 8 -harandi 8 -incontestable 8 -double-click 8 -azide 8 -limca 8 -guidolin 8 -47s 8 -shivaratri 8 -irreverently 8 -house-price 8 -ponsonby 8 -nhan 8 -manadon 8 -overflying 8 -pre-conference 8 -pawpaw 8 -raba 8 -alizada 8 -chewie 8 -astakho 8 -3400 8 -knappenberger 8 -daiza 8 -lenska 8 -yosano 8 -metamorphose 8 -noonans 8 -brossier 8 -snow-affected 8 -meredyth 8 -bleeker 8 -rimu 8 -efrat 8 -114-year 8 -12,000-a-year 8 -spareroom.co.uk 8 -flashdancer 8 -rosada 8 -bozanic 8 -exchangers 8 -kucsma 8 -over-fished 8 -kinmen 8 -ogunquit 8 -ercot 8 -coban 8 -kopecky 8 -piccardi 8 -addana 8 -houben 8 -eichhorn 8 -12.39 8 -vaporiser 8 -griffith-williams 8 -gallian 8 -yesudhas 8 -hannon-dalby 8 -linotype 8 -korkoryah 8 -claassens 8 -sandero 8 -backe 8 -cmpd 8 -slepian 8 -trunkster 8 -10,923 8 -145.50-a-year 8 -whe 8 -3-5-1-1 8 -cooked-up 8 -onetouch 8 -semenenko 8 -philipstown 8 -mahmudullah 8 -over-zealously 8 -shahrour 8 -decaro 8 -kisangani 8 -rossell 8 -donnelly-martin 8 -nabu 8 -pavlakis 8 -catanzarite 8 -ktvt-tv 8 -paleozoic 8 -stolberg 8 -6.93 8 -kiprop 8 -officer-in-charge 8 -scharnhorst 8 -oximetry 8 -laeng 8 -roopnarine 8 -whiners 8 -sachaberry 8 -jimihatt 8 -mccurtain 8 -budich 8 -non-ferrous 8 -anatabine 8 -45bn 8 -drumnadrochit 8 -merga 8 -dovegate 8 -chirpily 8 -rashies 8 -cootamundra 8 -0207 938 6364 8 -ingrouille-kidd 8 -nasvi 8 -16,000-a-year 8 -cosmoprof 8 -consultees 8 -dsquared 8 -oskal 8 -bavis 8 -ortmann 8 -vondra 8 -rudie 8 -anti-bounce 8 -gazzaley 8 -dvorsky 8 -sundgren 8 -algernon 8 -damara 8 -30-story 8 -cressel 8 -hibernia 8 -gangster-like 8 -airily 8 -montenapoleone 8 -rickwood 8 -myring 8 -rean 8 -sorga 8 -allafrica.com 8 -kary 8 -herry 8 -baldizan 8 -video-recording 8 -pornhub.com 8 -kreiss 8 -anti-mursi 8 -bursitis 8 -pralines 8 -iruke 8 -dcxa 8 -shr 8 -ont 8 -afl.com.au 8 -battlelines 8 -takotsubo 8 -@thornetravel 8 -conjectured 8 -bobbibrown.co.uk 8 -chronowing 8 -edla 8 -lightwriter 8 -3:41 8 -zum 8 -laleh 8 -mokalu 8 -tahitians 8 -chmait 8 -re-releases 8 -arti 8 -jaspars 8 -redeye 8 -schaechter 8 -10.41 8 -chumbawamba 8 -linan 8 -lleras 8 -breathability 8 -once-loved 8 -nioxin 8 -275m 8 -12mp 8 -everything-goes 8 -cat-calls 8 -halfhearted 8 -crowngate 8 -daksha 8 -mangini 8 -mychael 8 -post-victory 8 -semirostrum 8 -inarguable 8 -veronelli 8 -birkitt 8 -gronigen 8 -caplis 8 -one-drop 8 -bolaise 8 -derm 8 -air-borne 8 -berchesi 8 -ballers 8 -chappies 8 -brogden 8 -abita 8 -ivies 8 -kveta 8 -armorer 8 -1800flowers 8 -world-champion 8 -maggiano 8 -zelada 8 -donizetti 8 -schwolert 8 -verema 8 -atangana 8 -defaulters 8 -contrivance 8 -hyman-knight 8 -@billcosby 8 -electrostatically 8 -gadsen 8 -leang 8 -leana 8 -crescendos 8 -acapella 8 -vocativ.com 8 -financially-stricken 8 -well-bred 8 -kocsis 8 -dundee-based 8 -barcelo 8 -sangus 8 -locational 8 -raheny 8 -psychobitches 8 -heggarty 8 -668,000 8 -least-liked 8 -cashdan 8 -taio 8 -fan-made 8 -carter-stephenson 8 -nesheiwat 8 -ates 8 -sulu'ape 8 -english-speaker 8 -mittermeier 8 -boryeong 8 -safoora 8 -1,752 8 -fromberg 8 -in-boxes 8 -sanie 8 -ultrabike 8 -zoomable 8 -skiffle 8 -33kg 8 -abdel-latif 8 -baker-brown 8 -aerosolized 8 -avongate 8 -biteman 8 -control-alt-delete 8 -asterisks 8 -left-overs 8 -96.4 8 -zippr 8 -hirings 8 -refits 8 -mussler 8 -zizmor 8 -perreira 8 -wamwayi 8 -warlencourt 8 -regionalism 8 -thornberg 8 -buttu 8 -finwood 8 -anti-stress 8 -yinon 8 -feastival 8 -kahyk 8 -glass-roofed 8 -kösen 8 -gressum 8 -korrel 8 -1,935 8 -1,937 8 -,2 8 -steelmen 8 -737-800s 8 -jagdeep 8 -stratagem 8 -anti-corporate 8 -pataky 8 -klipin 8 -reelect 8 -redfish 8 -abbatoir 8 -kranji 8 -korea-us 8 -bell-bottom 8 -skov 8 -campey 8 -fertitta 8 -1-a 8 -1-9 8 -musleah 8 -hamling 8 -kegg 8 -20-18 8 -re-signs 8 -embroided 8 -silverfish 8 -dry-docked 8 -ligo 8 -mashkevich 8 -a39 8 -schnoozer 8 -fully-dressed 8 -menegos 8 -114p 8 -114m 8 -jongjit 8 -foremothers 8 -112,500 8 -ma. 8 -weleda 8 -glam-rock 8 -marrinyama 8 -44-foot 8 -fena 8 -849,000 8 -78f 8 -l-plates 8 -carenero 8 -eyelets 8 -asta 8 -sferrazza 8 -al-udeid 8 -derangement 8 -muslim-christian 8 -buffet-style 8 -subito 8 -treponema 8 -richardt 8 -lechelt 8 -mrhandcuffs 8 -fiser 8 -oyamel 8 -herberman 8 -nokomis 8 -creepshot 8 -run-throughs 8 -aspiro 8 -well-aimed 8 -goldbart 8 -planciunene 8 -glycation 8 -rosatom 8 -research-and-development 8 -amendolia 8 -kyo 8 -arifin 8 -bakeoff 8 -sachdev 8 -umphres 8 -means-test 8 -tyrel 8 -lyman-alpha 8 -kreutz 8 -girardot 8 -rta 8 -clukey 8 -sweden-based 8 -biavati 8 -dukezong 8 -figeroa 8 -devilment 8 -nixdorf 8 -cellutome 8 -granshaw 8 -snaffling 8 -tarim 8 -towline 8 -sengupta 8 -roundshaw 8 -shiotani 8 -moure-eraso 8 -gabled 8 -4,188 8 -rangwala 8 -kammy 8 -methedrone 8 -nelin 8 -popworld 8 -100.5 8 -circumbinary 8 -go-anywhere 8 -welbourne 8 -horas 8 -ex-public 8 -uchiyama-lee 8 -offed 8 -manderley 8 -benkirane 8 -cycmanick 8 -rolain 8 -konger 8 -kcbs-tv 8 -antecessor 8 -prague-based 8 -haram-related 8 -literatours 8 -mougins 8 -aakash 8 -decepticons 8 -scribd 8 -yunque 8 -five-country 8 -buriganga 8 -joleen 8 -illogan 8 -qesem 8 -milligans 8 -odo 8 -ishim 8 -quikscat 8 -orenbuch 8 -abbassi 8 -swinbrook 8 -marqueshi 8 -eligio 8 -cantref 8 -1626 8 -three-cornered 8 -alali 8 -chongqinq 8 -globalfoundries 8 -al-qureshi 8 -manasfi 8 -farriella 8 -cultists 8 -mallee 8 -mallen 8 -kuwait-based 8 -klebb 8 -62e 8 -800-a-night 8 -newly-completed 8 -bio-alcamid 8 -ex-criminals 8 -turned-up 8 -three-weekly 8 -zindziswa 8 -cbsdfw 8 -8:06 8 -8:02 8 -276-acre 8 -xiaoshan 8 -zither 8 -moccasin 8 -tradewinds 8 -thrombocytopenia 8 -hi-hat 8 -masiello 8 -photocard 8 -ajaj 8 -asir 8 -back-garden 8 -laughy 8 -wyant 8 -wyand 8 -gallatinov 8 -cinnaminson 8 -hardingham 8 -cameroid 8 -tricorders 8 -sunai 8 -sonjia 8 -thirty-five-year-old 8 -argetoaia 8 -seliga 8 -hot-bed 8 -sarbanes 8 -ioanna 8 -improbability 8 -current-generation 8 -one-and-a-quarter 8 -chupar 8 -chipciu 8 -doughten 8 -schönbrunn 8 -head-hunters 8 -1595 8 -1591 8 -yochelson 8 -ladling 8 -lumpsucker 8 -fergburger 8 -109mph 8 -ex-nanny 8 -280lbs 8 -taddeo 8 -bodysurfing 8 -farmstays 8 -right-brain 8 -bagua 8 -enes 8 -3:08 8 -tokay 8 -rudling 8 -earsplitting 8 -coshes 8 -camera-carrying 8 -finchingfield 8 -crespos 8 -morishita 8 -bayambang 8 -540ft 8 -14-ounce 8 -badly-decomposed 8 -tartu 8 -accessions 8 -toeman 8 -heme 8 -hofsetter 8 -manâ 8 -monthan 8 -flood-prevention 8 -l'ormarins 8 -milliyet 8 -carpet-bombing 8 -cluysenaar 8 -sawarka 8 -rq-180 8 -bad-conduct 8 -yalgi 8 -92-page 8 -corll 8 -manzoor 8 -na-dene 8 -now-trademark 8 -sturckow 8 -sawtell 8 -podz 8 -semicircular 8 -ratcliffe-on-soar 8 -leutze 8 -eighty-eight 8 -pre-valentine 8 -0-15 8 -3,700-mile 8 -jttf 8 -staluppi 8 -11.19 8 -11.16 8 -ses-8 8 -flordia 8 -53-second 8 -bre-x 8 -brixworth 8 -penndot 8 -low-gi 8 -179,932.32 8 -small-plane 8 -sobieski 8 -signo 8 -dermatillomania 8 -balram 8 -already-high 8 -spearses 8 -1961-1963 8 -meres 8 -upperclassman 8 -eye-care 8 -jarram 8 -pipe-dream 8 -himidi 8 -hommemystere 8 -4,280 8 -microcirculation 8 -250mg 8 -rowbarge 8 -placek 8 -open-back 8 -imojis 8 -flunk 8 -whittome 8 -feltheimer 8 -jassy 8 -ziade 8 -fraternized 8 -misdirecting 8 -sagres 8 -rockpod 8 -kanevsky 8 -people-watch 8 -suleimani 8 -2150 8 -chuandong 8 -consecrate 8 -hayre 8 -erasamus 8 -turfing 8 -cyberpsychology 8 -hezbollah-led 8 -sessanio 8 -trial-run 8 -thermae 8 -tfr 8 -sheikh-husseyin 8 -cu-boulder 8 -creevy 8 -manute 8 -strashevskaya 8 -evans-woodward 8 -2,324 8 -2,325 8 -colorized 8 -emoji-filled 8 -divvying 8 -over-commercialization 8 -jamara 8 -headen 8 -paulistano 8 -point-new 8 -hagues 8 -stap 8 -coba 8 -72per 8 -rapha 8 -taison 8 -hate-mail 8 -reprobate 8 -maruca 8 -yingling 8 -choosers 8 -hoyas 8 -unflatteringly 8 -berko 8 -counterprogramming 8 -noiseless 8 -led-flash 8 -moisyadi 8 -tanichev 8 -birdsville 8 -manahawkin 8 -prayut 8 -5.43 8 -onieva 8 -petaflop 8 -counter-sue 8 -neutralizes 8 -544,000 8 -khqa 8 -halsted 8 -0.006 8 -0.003 8 -lowery-gale 8 -paragons 8 -lebanese-based 8 -witched 8 -diyaa 8 -tatlot 8 -inswinger 8 -524,000 8 -cbs8 8 -double-arm 8 -euro-area 8 -airwolf 8 -edoumou 8 -kohut 8 -deep-fat 8 -541,000 8 -touched-up 8 -jackelin 8 -gazmin 8 -numerologist 8 -plié 8 -meggiorini 8 -irabu 8 -seenauth 8 -parara 8 -shagufta 8 -50mb 8 -affinia 8 -straight-ahead 8 -tikasingh 8 -sabr 8 -make-a-rail 8 -shawty 8 -backgarden 8 -darky 8 -liberalise 8 -harpster 8 -sanjurjo 8 -faggione 8 -wuertly 8 -usian 8 -bazlington 8 -suro 8 -delphiniums 8 -toscana 8 -shapal 8 -1,089 8 -kuehneotherium 8 -3,106 8 -recirculate 8 -yoked 8 -bidzina 8 -bancorp 8 -teliasonera 8 -faena 8 -maryfield 8 -terrytown 8 -11-feet 8 -cherkasov 8 -one4all 8 -rutstein 8 -gorostieta 8 -sothern 8 -pachacamac 8 -schmader 8 -verite 8 -hartburn 8 -10.57 8 -you-name-it 8 -corephotonics 8 -easy-to-access 8 -500-yard 8 -sashays 8 -dorigo 8 -admonishments 8 -bumpier 8 -spaceline 8 -lesan 8 -three-team 8 -quetzaltenango 8 -lumina 8 -sirico 8 -phala 8 -umw 8 -ume 8 -happyness 8 -leintwardine 8 -sauder 8 -long-sightedness 8 -henleys 8 -r-n.y. 8 -skaife 8 -f-you 8 -dixieland 8 -gammapix 8 -widmar 8 -boscobel 8 -betties 8 -errata 8 -once-pristine 8 -benedettini 8 -1lbs 8 -kleefisch 8 -lifeflight 8 -76.4 8 -pevensey 8 -fame-obsessed 8 -moralizing 8 -blankenstein 8 -01494 8 -motroc 8 -izaak 8 -advise-and-assist 8 -harish 8 -hinterlands 8 -15-rated 8 -sinko 8 -#wimbledon 8 -midshires 8 -lonchakov 8 -burgstrum 8 -manvi 8 -gremillion 8 -brilli 8 -1,910 8 -elsebet 8 -federalized 8 -weary-looking 8 -1,300,000 8 -quebradillas 8 -kingussie 8 -medium-resolution 8 -taximeter 8 -hartstine 8 -gorgeousness 8 -berivan 8 -coursesmart 8 -al-warraq 8 -donatelli 8 -siuslaw 8 -remixer 8 -1.175 8 -cyber-bullied 8 -half-drunk 8 -toups 8 -haagen 8 -rowcliffe 8 -janmohamed 8 -supong 8 -materialising 8 -huub 8 -coxley 8 -golden-hued 8 -tabula 8 -gadget-obsessed 8 -member-states 8 -a-e 8 -giveth 8 -northbridge 8 -father-of-12 8 -1,598 8 -nex 8 -testiculo 8 -aboolian 8 -reconciles 8 -andouille 8 -deison 8 -reappraise 8 -gain-line 8 -pasquotti 8 -producer/director 8 -heart-strings 8 -netz 8 -sundback 8 -backpages 8 -flexural 8 -knave 8 -kwok-yung 8 -heavy.com 8 -bizkit 8 -stand-ups 8 -jacopo 8 -mams 8 -qiyia 8 -furball 8 -pomalidomide 8 -32,000-a-year 8 -funnywoman 8 -frogspawn 8 -yanar 8 -david-and-goliath 8 -waken 8 -paratroops 8 -0820 8 -keech 8 -heusgen 8 -bagandans 8 -hatt 8 -okies 8 -pollitt 8 -mispronounce 8 -sauli 8 -venezuela-based 8 -sandtoft 8 -pirola 8 -dryathlon 8 -homira 8 -washita 8 -sofrito 8 -guendogan 8 -retout 8 -faster-moving 8 -durando 8 -ano 8 -flykly 8 -rurayya 8 -sirio 8 -milverton 8 -wauford 8 -strothers 8 -obara 8 -@onedirection 8 -hannett 8 -back-page 8 -mi-2a 8 -s-76c 8 -prediabetic 8 -frisoli 8 -iannacone 8 -45-pound 8 -dausey 8 -melodia 8 -givon 8 -yestin 8 -gery 8 -shunin 8 -lte-a 8 -egill 8 -belbruno 8 -corbeau 8 -tannenholz 8 -whitehorn 8 -amercia 8 -clinked 8 -frap 8 -gravities 8 -ak-47-type 8 -51-49 8 -wcyb 8 -peregruzka 8 -keegan-james 8 -81.9 8 -uranium-enrichment 8 -legaue 8 -ironfist 8 -josephoartigasia 8 -supercilious 8 -carbis 8 -canteloupe 8 -hirwaun 8 -7-time 8 -w.f. 8 -kahle 8 -pitaya 8 -bluck 8 -shirks 8 -per-mile 8 -aquarobics 8 -hhonors 8 -klingons 8 -tightwad 8 -232million 8 -prestowitz 8 -bottlers 8 -agfa 8 -blakely-berry 8 -kassin 8 -dopers 8 -fresh-baked 8 -marie-charline 8 -imbibe 8 -turn-up 8 -rowdiness 8 -oft-stated 8 -gujjrar 8 -1641 8 -chiltington 8 -haliwa-saponi 8 -vsp 8 -sapna 8 -impington 8 -medoc 8 -administrating 8 -coes 8 -pre-empts 8 -1997-2000 8 -goldfinches 8 -much-hated 8 -ambulanceman 8 -track-record 8 -verzasca 8 -insect-like 8 -industrial-age 8 -alysa 8 -anxiety-filled 8 -scottsbluff 8 -airfarewatchdog 8 -d'alauro 8 -team-spirit 8 -litz 8 -costica 8 -aconitum 8 -dugger 8 -toileting 8 -8:29 8 -flimsier 8 -menstruate 8 -nesbeth 8 -59.50 8 -loebsack 8 -tbhq 8 -sviridov 8 -teslas 8 -ex-school 8 -dangly 8 -realview 8 -teleost 8 -third-time 8 -doubler 8 -baliban 8 -emson 8 -cyp 8 -canadair 8 -privett 8 -benninger 8 -g77 8 -tiesi 8 -zacarra 8 -moochers 8 -pixmania 8 -arlesey 8 -hongtao 8 -raitanen 8 -ayacucho 8 -euro-skeptic 8 -taboada-hall 8 -100whf 8 -maricourt 8 -zwally 8 -72,216 8 -113.10 8 -maestra 8 -45,000-a-year 8 -kappel 8 -proofreading 8 -satirically 8 -mattallana-galvas 8 -klien 8 -huaroani 8 -doppleganger 8 -bi-directional 8 -appeal-democrat 8 -tankards 8 -jinjuu 8 -thurgaland 8 -battista-frazee 8 -graveney 8 -blackrod 8 -mahar 8 -sintra 8 -creizman 8 -daniyaal 8 -sonnier 8 -12.27 8 -wolf-whistle 8 -epigenetics 8 -gbamin 8 -self-satisfaction 8 -tenanted 8 -larbi 8 -highly-provocative 8 -neo-grunge 8 -rct 8 -cheez-its 8 -wallon 8 -158,400 8 -chenonceau 8 -lebanese-american 8 -straphangers 8 -space-craft 8 -niggers 8 -18-64 8 -enunciate 8 -familiarised 8 -oligodendrocytes 8 -traditional-looking 8 -barn-storming 8 -www.organdonation.nhs.uk 8 -399.99 8 -7:22 8 -kittrell 8 -tractel 8 -steamier 8 -sonneman 8 -ruffian 8 -suskin 8 -lyst 8 -pullers 8 -malene 8 -stretchable 8 -revuebar 8 -@dwill_ 8 -camil 8 -gerleve 8 -eithne 8 -quickboat 8 -bletsch 8 -#breaktheinternet 8 -pinnies 8 -diplock 8 -nidar 8 -newly-fitted 8 -lamex 8 -beckwith-veroni 8 -barigye 8 -shrops. 8 -movida 8 --7.3 8 -upbraiding 8 -mandler 8 -basheer 8 -pinitol 8 -firepit 8 -4,030 8 -waru 8 -nakaji 8 -janashvili 8 -wentwood 8 -lindley-highfield 8 -rheagan 8 -clemencies 8 -canoy 8 -buzz-worthy 8 -wool-blend 8 -escentual 8 -coleton 8 -socio-moral 8 -britvic 8 -25-24 8 -25-23 8 -chamberland 8 -ajala 8 -silverlake 8 -british-lebanese 8 -crowdwish 8 -yubi 8 -alga 8 -499.99 8 -falagan 8 -hochberg 8 -salmonella-tainted 8 -ebola-themed 8 -haschel 8 -daery 8 -w-7 8 -blue-skinned 8 -gating 8 -3billion-a-year 8 -70ad 8 -akkus 8 -deep-red 8 -stupefy 8 -kanwar 8 -chauan 8 -t206 8 -cz 8 -hiddencash 8 -834,000 8 -______ 8 -medill 8 -bonpoint 8 -newly-adopted 8 -2,346 8 -non-fasting 8 -overambitious 8 -doghouses 8 -reprivatisation 8 -al-ameen 8 -mid-2006 8 -mid-2003 8 -anti-landmine 8 -400metres 8 -cybercriminal 8 -elimelech 8 -aquilino 8 -once-successful 8 -gongquan 8 -moneymail 8 -murmurings 8 -coliforms 8 -akoud 8 -concrete-lined 8 -violist 8 -northbrook 8 -embon 8 -arenburg 8 -respectfulness 8 -slurps 8 -gerakaris 8 -hand-wash 8 -pellinen 8 -stanley-jones 8 -lactase 8 -single-carriageway 8 -mechler 8 -614,000 8 -0.025 8 -fitspiration 8 -maddens 8 -220kg 8 -kurbanjan 8 -mamiya 8 -boons 8 -kharal 8 -corones 8 -pre-easter 8 -alkaloids 8 -iwdg 8 -westheimer 8 -frenchtown 8 -ultimo.co.uk 8 -gynecologic 8 -dwon 8 -flower-covered 8 -gcp 8 -clubman 8 -platybelodon 8 -noutene 8 -glamorization 8 -475million 8 -subcamp 8 -neuropsychology 8 -haskovo 8 -sps-alpha 8 -murphrey 8 -leininger 8 -nambucca 8 -rinky-dink 8 -fraules 8 -kordan 8 -conlay 8 -airlinereporter.com 8 -egri 8 -sabawi 8 -wakhan 8 -mcfee 8 -solan 8 -1496 8 -bethersden 8 -lampang 8 -arzt 8 -chairi 8 -three-carat 8 -retainers 8 -promettes 8 -sjoholm 8 -gsubramaniam 8 -gants 8 -acre-feet 8 -coralia 8 -coralie 8 -chiva 8 -heartrate 8 -kuczynski 8 -pflugrad 8 -cava-poo-chon 8 -pakefield 8 -al-muadami 8 -timpanogos 8 -bin-liners 8 -hooders 8 -super-sexy 8 -33,600 8 -olianne 8 -wogs 8 -callie-louise 8 -heavily-fortified 8 -macrinus 8 -lms 8 -mouflon 8 -anvaripour 8 -pssi 8 -milliwatts 8 -ralfe 8 -tischendorf 8 -leatherheads 8 -sapwell 8 -neatest 8 -reimpose 8 -trebetherick 8 -americanised 8 -dingess 8 -liberatore 8 -lolcats 8 -hunedoara 8 -benchmates 8 -greenness 8 -post-delivery 8 -kulakov 8 -300mm 8 -al-nabaa 8 -ziplines 8 -franny 8 -vanzandt 8 -dowsell 8 -oohing 8 -kersten 8 -giabiconi 8 -diamante-encrusted 8 -odfjell 8 -baillieston 8 -bancessi 8 -tosser 8 -tvws 8 -laudy 8 -all-south 8 -1,975 8 -1,973 8 -bergamini 8 -body-boarding 8 -blandly 8 -firmenich 8 -ridgelines 8 -quannel 8 -sullenly 8 -doppelgänger 8 -academy-award 8 -jacci 8 -kawolski 8 -ibk 8 -1,649 8 -orgeon 8 -flanby 8 -haipeng 8 -scandolera 8 -sagaki 8 -14-strong 8 -three-pack 8 -osaila 8 -arko 8 -begikhani 8 -45-55 8 -rnzaf 8 -dickins 8 -phased-in 8 -monarchical 8 -sellheim 8 -4.72 8 -svedskas 8 -a/s 8 -gpda 8 -9:58 8 -off-ramps 8 -1-888-407-4747 8 -bio-fuels 8 -19-day-old 8 -makawa 8 -defecates 8 -38-yard 8 -body-shaping 8 -sznewajs 8 -caray 8 -hearkening 8 -hongo 8 -11mph 8 -seventh-highest 8 -#feelthegame 8 -lahti 8 -huaca 8 -a.m.-2 8 -ofatumumab 8 -tarentaise 8 -attenboroughi 8 -matajudíos 8 -itched 8 -sharpling 8 -wingrave 8 -walshes 8 -symposiums 8 -bitchiest 8 -jetsetters 8 -street-wear 8 -100-tonne 8 -longwith 8 -mulrennan 8 -horinek 8 -rebekka 8 -plutonium-powered 8 -natron 8 -teenie 8 -mariestad 8 -loboc 8 -meditators 8 -75lbs 8 -runwell 8 -ttya 8 -campiagn 8 -tigran 8 -bonafede 8 -gogonasus 8 -wouldnâ 8 -lagasca 8 -pharro 8 -nemesysco 8 -phrao 8 -halkett 8 -jl-2 8 -arthus-bertrand 8 -drainbow 8 -kvitfjell 8 -disadvantaging 8 -stuckler 8 -sytchampton 8 -rossettini 8 -bosleys 8 -guiterrez 8 -triple-e 8 -shuker 8 -oversea 8 -dassin 8 -overington 8 -ardizzone 8 -agujero 8 -gachet 8 -lineside 8 -no-calorie 8 -time4sleep 8 -mangia 8 -comittee 8 -17,000-a-year 8 -synesthesists 8 -#askacop 8 -al-drissi 8 -joi 8 -post-separation 8 -aaa-rated 8 -72-ounce 8 -langsdorff 8 -avenges 8 -richeldi 8 -leparmentier 8 -micromappers 8 -head-coaching 8 -92.7 8 -whelpley 8 -cabuk 8 -oliberte 8 -hoyn 8 -nonato 8 -rucka 8 -b-cells 8 -morbelli 8 -less-invasive 8 -rumor-mongering 8 -fedotov 8 -ohr 8 -6.21 8 -overeager 8 -cardelus 8 -hunty 8 -deceitfully 8 -anti-tech 8 -arrested.the 8 -frostier 8 -foxhunter 8 -swagg 8 -percovich 8 -dibello 8 -hruby-mills 8 -1667 8 -shoe-making 8 -stereotactic 8 -agrigento 8 -co-presenting 8 -zavorotnyi 8 -hopa 8 -al-omran 8 -times-capped 8 -snarks 8 -ruminant 8 -pickersgill 8 -kon-tiki 8 -murphree 8 -criminological 8 -26-3 8 -5-years-old 8 -zeyno 8 -alternators 8 -crosthwaite 8 -tree-top 8 -banik 8 -vatansever 8 -rakovich 8 -rock-strewn 8 -giaimo 8 -micrometre 8 -chenggen 8 -45,600 8 -f2012 8 -brickie 8 -adult-film 8 -trovato 8 -cgh 8 -caravisio 8 -doxaras 8 -eradicates 8 -buboes 8 -pan-democrats 8 -ller 8 -merbein 8 -brookyln 8 -deandrea 8 -flusurvey 8 -1,810 8 -mikolajczak 8 -thawadi 8 -pruchnicki 8 -olstead 8 -ceili 8 -fojas 8 -minamisoma 8 -155m 8 -cosenza 8 -tracylee 8 -lockouts 8 -grammel 8 -adell 8 -nyquist 8 -whittlebury 8 -rienau 8 -smoove 8 -tsentserensky 8 -warnica 8 -12.41 8 -walstad 8 -hawthornes 8 -vagos 8 -al-aziz 8 -ronne 8 -kq 8 -intracellular 8 -bily 8 -sherak 8 -martinek 8 -indentified 8 -sperisen 8 -passà 8 -basco-porkolab 8 -souveny 8 -marsy 8 -chubster 8 -half-n 8 -9.39 8 -gladness 8 -a-ji 8 -656ft 8 -4:51 8 -4:57 8 -pross 8 -spartobranchus 8 -fixates 8 -miscalculate 8 -three-metre-long 8 -32kg 8 -abates 8 -hartunian 8 -darwins 8 -claye 8 -mikimoto 8 -tech3 8 -scheft 8 -shinawi 8 -26-minute 8 -khanke 8 -congeals 8 -okonjo 8 -schrot 8 -fahlman 8 -veley 8 -camidge 8 -boingboing 8 -lazarenko 8 -2,149 8 -2,140 8 -chimbalanga 8 -dumez 8 -mccarthys 8 -chanttelle 8 -myfoxdc 8 -onishi 8 -age-inappropriate 8 -sw1x 8 -bashers 8 -sn0501 8 -8.82 8 -k7 8 -drought-parched 8 -pen-pal 8 -jenae 8 -al-batsh 8 -sharmaine 8 -besiris 8 -18,426 8 -persistant 8 -penet 8 -wyldfire 8 -spear-like 8 -stracke 8 -paleo-indians 8 -crawshaws 8 -rabley 8 -carpinteria 8 -two-inches 8 -labral 8 -totalis 8 -okieze 8 -waitt 8 -istrategylabs 8 -meadowland 8 -ex-team-mate 8 -dja 8 -summersville 8 -hazazi 8 -stuggle 8 -meril 8 -skycruiser 8 -fintry 8 -tuanku 8 -dickan 8 -eight-state 8 -wafic 8 -sidronio 8 -ajang 8 -25pc 8 -kavalan 8 -gallia 8 -drizzles 8 -breakthough 8 -invisalign 8 -gleed 8 -˚ 8 -sabb 8 -pachon 8 -jode 8 -brownout 8 -stegemann 8 -uvda 8 -low-rated 8 -30-percent 8 -19-22 8 -hylton-reid 8 -embryologists 8 -eatonton 8 -handcycling 8 -tuneberg 8 -banwell-moore 8 -kamdyn 8 -osushi 8 -angest 8 -#boom 8 -vidas 8 -joint-highest 8 -colloid 8 -de-emphasize 8 -arrigoni 8 -imodium 8 -osezua 8 -quaich 8 -#feelsgood 8 -no-alcohol 8 -merrie 8 -123.9 8 -srr 8 -dorji 8 -2,364 8 -kimm 8 -gourock 8 -azia 8 -fishtailing 8 -zhiliang 8 -cameleon3 8 -winward 8 -coggin 8 -pochek 8 -esfahan 8 -barracudas 8 -mcsweegan 8 -chafes 8 -mckenney 8 -crye-leike 8 -3million-a-year 8 -hannin 8 -prateek 8 -computable 8 -sztokmanska 8 -graubünden 8 -drought-ridden 8 -union-united 8 -zenon 8 -ansaar 8 -grochowski 8 -weight-management 8 -burham 8 -newby-fraser 8 -flyway 8 -crankcase 8 -msgs 8 -10b 8 -marquita 8 -5,000-a-night 8 -aquanaut 8 -rajabzadeh 8 -pixel-per-inch 8 -ordona 8 -tepuyes 8 -cocaine-fueled 8 -riphagen 8 -dimwitted 8 -lubera 8 -vous 8 -adrain 8 -sopot 8 -140,000-a-year 8 -tindy 8 -kanyuch 8 -3,995 8 -gowadia 8 -lanel 8 -puka 8 -smartness 8 -half-pint 8 -hunnings 8 -u15s 8 -gac 8 -holofernes 8 -radzokota 8 -minimisation 8 -money-no-object 8 -arcturos 8 -chabane 8 -7,950 8 -three-axis 8 -armon 8 -moratoriums 8 -double-drop 8 -potentates 8 -multi-terrain 8 -rains-wedan 8 -ultra-hd 8 -non-fan 8 -donlan 8 -nonbiological 8 -wendts 8 -768,000 8 -darussalam 8 -jacquemetton 8 -hagedorn 8 -sl55 8 -garw 8 -life-without-parole 8 -long-snouted 8 -grianna 8 -cannabis-derived 8 -fitwet 8 -platitude 8 -ohamov 8 -multiple-launch 8 -kylix 8 -scrunch 8 -benguigui 8 -freedland 8 -ortez 8 -redbacks 8 -mistenur 8 -fintech 8 -kaolin 8 -warland 8 -prenger 8 -kereight 8 -bjerregaard 8 -gildernew 8 -intermarry 8 -remonde 8 -sengis 8 -uejf 8 -armstrongs 8 -howerter 8 -nyiramasuhuko 8 -stettin 8 -00wartherapy00 8 -superhabitable 8 -vika 8 -tavassoli 8 -jhaghra 8 -demorand 8 -snapchatters 8 -pesters 8 -needletail 8 -ourrad 8 -biljana 8 -parinello 8 -waterhead 8 -fielke 8 -5.77 8 -rawlingson 8 -protoporphyrin 8 -devis 8 -barner 8 -oakervee 8 -zit 8 -remediated 8 -woodforth 8 -32-31-33 8 -arvanitidis 8 -rooker 8 -security-camera 8 -628-nautical 8 -drang 8 -sarwat 8 -waterparks 8 -lifestraw 8 -kuskopf 8 -cazalet 8 -3news 8 -liriano 8 -gillion 8 -2001-2006 8 -broden 8 -ogwuche 8 -sidan 8 -cubical 8 -arbury 8 -breaking-news 8 -farsley 8 -backstretch 8 -methomyl 8 -one-upping 8 -hobbit-themed 8 -230-foot 8 -gewürztraminer 8 -ils 8 -university-based 8 -fandy 8 -radravious 8 -testro 8 -wince-inducing 8 -teetotallers 8 -fenerbache 8 -willdajack 8 -2,400-a-month 8 -donawa 8 -ytl 8 -numbat 8 -fn-6 8 -control-freak 8 -quinsy 8 -cardana 8 -cluequest 8 -gorgodze 8 -washington-baltimore 8 -widely-followed 8 -wpsd 8 -rootscamp 8 -travelistic 8 -saich 8 -nae 8 -vinz 8 -amirlak 8 -clatworthy 8 -2,700-acre 8 -salat 8 -lammars 8 -49-7 8 -latiqwa 8 -mga 8 -visger 8 -harmonix 8 -honour-based 8 -supertyphoon 8 -gate-crashing 8 -72s 8 -aughton 8 -ridgebacks 8 -velar 8 -francescana 8 -0.92 8 -sallalich 8 -8 8 -poelnitz 8 -bisenius 8 -yalalova 8 -africapitalism 8 -julyan 8 -re-lay 8 -sadgrove 8 -2002-2010 8 -fl. 8 -al-muqdad 8 -molodin 8 -sluman 8 -robichaux 8 -urbanspoon 8 -terpstra 8 -gelama 8 -chato 8 -burde 8 -mapoon 8 -520million 8 -astell 8 -tottle 8 -twittered 8 -menashri 8 -bods 8 -ex-seals 8 -cihan 8 -wiltord 8 -hot-desking 8 -56-yard 8 -ksi 8 -gocam 8 -upcountry 8 -anglea 8 -savopoulos 8 -56g 8 -lilith 8 -campante 8 -flagstick 8 -650ad 8 -angbwa 8 -angelically 8 -side-tracked 8 -figure-of-eight 8 -honeymead 8 -scada 8 -10-block 8 -tip-toe 8 -cundinamarca 8 -shotbolt 8 -amines 8 -holwood 8 -maccosmetics.co.uk 8 -ferrand 8 -guandong 8 -barefeet 8 -rondell 8 -exhalations 8 -graeber 8 -mabunda 8 -el-sabbagh 8 -filkins 8 -gazelle-like 8 -zoo-born 8 -estevanell 8 -alcl 8 -deoxyribonucleic 8 -nobel-winning 8 -haller-jorden 8 -corella 8 -22-12 8 -ex-butler 8 -mansingh 8 -eight-episode 8 -limericks 8 -leao 8 -leam 8 -double-fault 8 -untalented 8 -ginzburg 8 -3trillion-a-day 8 -non-compete 8 -compartmentalised 8 -busyness 8 -martijn 8 -vice-consul 8 -boiles 8 -lailani 8 -meadowgate 8 -esene 8 -tanee 8 -shahla 8 -beautyheaven.com.au 8 -alaita 8 -lmp1 8 -buyagift.com 8 -jommi 8 -chechnyan 8 -ngor 8 -worle 8 -gambacorto 8 -well-recognised 8 -ermenek 8 -sheriffâ 8 -1686 8 -al-amal 8 -alibaba.com 8 -marosan 8 -todorovski 8 -andong 8 -much-ridiculed 8 -chalabi 8 -seatgeek 8 -derose 8 -opthamologist 8 -bafokeng 8 -goldin-meadow 8 -piek 8 -chetana 8 -ooiio 8 -skills-based 8 -purnomo 8 -asteroseismology 8 -overdoes 8 -trethowan 8 -cohn-vargas 8 -nordics 8 -zantac 8 -moonset 8 -seventh-round 8 -siquiera 8 -cabellud 8 -team-related 8 -drees 8 -#family 8 -55-yard 8 -ironhide 8 -dunelm 8 -nagmeh 8 -bigfork 8 -21-member 8 -counter-offer 8 -yeronga 8 -threated 8 -well-populated 8 -salloum 8 -barcelos 8 -1,834 8 -pantelis 8 -kazakova 8 -conflict-stricken 8 -petitclerc 8 -ritholtz 8 -haydar 8 -lanell 8 -blouin 8 -raupp 8 -recollecting 8 -reinvigoration 8 -scotchy 8 -levanon 8 -budde 8 -2313 8 -woolner 8 -137.6 8 -alleno 8 -1578 8 -side-swept 8 -childbirth-related 8 -prosapio 8 -shelli 8 -gsi 8 -saint-german 8 -37.93 8 -evangelii 8 -ariet 8 -matthysen 8 -howsey 8 -shalaby 8 -saleslady 8 -doesnâ 8 -anuruddha 8 -seuva'ai 8 -ralfini 8 -briscome 8 -viriginia 8 -shriya 8 -heighway 8 -sarojini 8 -zimmerly 8 -5:18 8 -5:16 8 -puchala 8 -brewski 8 -27-yard 8 -hambro 8 -maynard-gibson 8 -sign-writer 8 -putnisite 8 -hgr 8 -blud 8 -larison 8 -gumigem 8 -maryna 8 -b&h 8 -taskrabbit 8 -nanospheres 8 -172nd 8 -gustiana 8 -obamneycare 8 -latvala 8 -1395 8 -jung-wook 8 -laberdesque 8 -posta 8 -toughed 8 -zalkans 8 -medolla 8 -zonooz 8 -ailesbury 8 -ps1-10afx 8 -broony 8 -card-skimming 8 -z-40 8 -soo-ji 8 -nicols 8 -mini-tournament 8 -cusper 8 -fussball 8 -jorgie 8 -declutter 8 -francisco-area 8 -yadlin 8 -christijan 8 -roc-a-fella 8 -frilot 8 -touran 8 -zombiu 8 -athene 8 -well-supported 8 -akkoyunlu 8 -close-quarter 8 -thickburger 8 -disney-themed 8 -aairpass 8 -lovehoney.co.uk 8 -senegal-born 8 -hafner 8 -feroce 8 -sauvons 8 -surquillo 8 -fitbits 8 -astute-class 8 -bright-roberts 8 -sheffey 8 -b-teams 8 -dht 8 -servos 8 -fletchers 8 -flowave 8 -owede 8 -keyarika 8 -ulfberht 8 -unmc 8 -trinamool 8 -hotcha 8 -councilmember 8 -siosi 8 -filthier 8 -10/9c 8 -laverty 8 -ostoloza 8 -@gselevator 8 -colvile 8 -hasaba 8 -arunga 8 -audino 8 -mahary 8 -mercedez-benz 8 -20,320 8 -kmov-tv 8 -tune-in 8 -jackasses 8 -hawo 8 -islamic-based 8 -contreras-ramirez 8 -wattanayagorn 8 -heung 8 -colour-blindness 8 -tollett 8 -hauxley 8 -olexandr 8 -osburn 8 -logjams 8 -gayus 8 -purepotions 8 -no-make-up 8 -gtmo 8 -araguaia 8 -portales 8 -400k 8 -solarte 8 -evil-looking 8 -govinder 8 -ithaa 8 -burghers 8 -pawle 8 -lopez-canteraas 8 -xenu 8 -akinyuwa 8 -polat 8 -organizationally 8 -match-saving 8 -malins 8 -myst 8 -dearn 8 -pythagoras 8 -baugher 8 -danza 8 -kyotokumaru 8 -belstock 8 -entwining 8 -solveiga 8 -transkei 8 -cadran 8 -600ad 8 -gulbenkian 8 -szemalikowski 8 -husien 8 -ricke 8 -dorren 8 -latwann 8 -fistulas 8 -goueta 8 -large-bodied 8 -darkling 8 -thunderhill 8 -moskvy 8 -veit 8 -then-archbishop 8 -5.23 8 -6-series 8 -willborns 8 -accoring 8 -mamat 8 -27-0 8 -magnises 8 -abdolfattah 8 -non-wired 8 -reepham 8 -mutnovsky 8 -outisde 8 -steelmaker 8 -plesiosaur 8 -montanez 8 -de-nuclearization 8 -tempel 8 -méribel 8 -600-year 8 -handmaiden 8 -lungarotti 8 -tromp 8 -artemyeva 8 -thre 8 -scornfully 8 -spangdahlem 8 -1983-84 8 -spiderlabs 8 -longships 8 -dawlah 8 -heavy-hitters 8 -fishtailed 8 -rudzinski 8 -debt-limit 8 -warnie 8 -blekinge 8 -kofler 8 -northcliff 8 -teizer 8 -lohmeyer 8 -4-2-4 8 -bellaghy 8 -nahulu-mahelona 8 -waterfoot 8 -testrad 8 -cooloola 8 -chalcroft 8 -symphysiotomy 8 -muren 8 -munayyer 8 -188cm 8 -frecks 8 -jjimjilbang 8 -˜it 8 -ziemniak 8 -linkenholt 8 -kildea 8 -pfai 8 -pfau 8 -demascus 8 -woolsington 8 -@anthonyweiner 8 -acetylene 8 -socia 8 -13-13 8 -13-12 8 -inspiro 8 -@katyperry 8 -soft-hearted 8 -sticky-fingered 8 -gaits 8 -tinoco 8 -daba 8 -foubert 8 -perriand 8 -littlemore 8 -bruisers 8 -hoesen 8 -benko 8 -thelen 8 -saveur 8 -dut 8 -self-disclosure 8 -gephardt 8 -avermaet 8 -us3 8 -baeza 8 -age-specific 8 -awardees 8 -abercromby 8 -joire 8 -moravek 8 -mugo 8 -davidoff 8 -e-njoint 8 -reseda 8 -catenaccio 8 -paparic 8 -monsoon-like 8 -ferrovial 8 -tragicomic 8 -per-screen 8 -hawwa 8 -sauk 8 -diarmaid 8 -grael 8 -academicians 8 -wethly 8 -celebrity-led 8 -pelamis 8 -223rd 8 -pyper 8 -smith-blackmore 8 -eid-al-adha 8 -penmaenmawr 8 -0.98 8 -0.95 8 -50-bed 8 -telegram.com 8 -yaghmaian 8 -blueford 8 -larusso 8 -over-emotional 8 -waltzer 8 -eisman 8 -voshart 8 -llach 8 -fast-thinking 8 -ruapehu 8 -modern-era 8 -wichman 8 -mid-city 8 -16/5 8 -keychains 8 -svobodova 8 -space-saver 8 -1,608 8 -pictsweet 8 -judyth 8 -reali 8 -herpetological 8 -11.29 8 -coran 8 -@yayatoure 8 -compario 8 -hourei 8 -flans 8 -argy 8 -tilghman 8 -8mins 8 -dreamit 8 -mcjordan 8 -schoeni 8 -oaktree 8 -trull 8 -foreignpolicy.com 8 -skin-colored 8 -2014-2014 8 -berankis 8 -nairobi-bound 8 -pseudo-religious 8 -tbij 8 -milpitas 8 -streeters 8 -slants 8 -433,000 8 -9:16 8 -ncd 8 -ratri 8 -granen 8 -noonu 8 -irredeemable 8 -girly-girl 8 -libertys 8 -browerville 8 -enigmatically 8 -prizewinners 8 -sultanpur 8 -arreaza 8 -half-asleep 8 -snowmaking 8 -court-sanctioned 8 -invercauld 8 -25-years-to-life 8 -drive-up 8 -leer-greenberg 8 -lowthorpe 8 -barnsbury 8 -nude-coloured 8 -iya 8 -hell-raisers 8 -great-granny 8 -barazarte 8 -wargaming 8 -fly-infested 8 -idolization 8 -zeni 8 -walshaw 8 -astrophotographers 8 -80,800 8 -2,086 8 -2,084 8 -overvaluing 8 -solva 8 -in-front 8 -olingo 8 -nalyvaychenko 8 -altamore 8 -deluging 8 -h1b 8 -46-0 8 -bed-sit 8 -krigsman 8 -over-plucking 8 -20-goal 8 -spinella 8 -29-31 8 -taxa 8 -commonweath 8 -khyam 8 -luhby 8 -mirvis 8 -7mate 8 -manspreading 8 -castelldefels 8 -right-angle 8 -pre-u 8 -adn 8 -ground-launched 8 -cranio-facial 8 -desirialr 8 -sakru 8 -bining 8 -modzeleski 8 -scribblers 8 -gordano 8 -disinterment 8 -microfilms 8 -csapo 8 -longlisted 8 -kampfer 8 -maringa 8 -schulthies 8 -citysunderland 8 -iraola 8 -bronington 8 -rotenier 8 -hollstein 8 -9th/12th 8 -organelles 8 -sehdev 8 -big-shot 8 -chappel 8 -libation 8 -veneneia 8 -so.cl 8 -benyam 8 -ben-yosef 8 -charolette 8 -jsc 8 -billlion 8 -pessoa 8 -test-class 8 -breakaways 8 -non-syrians 8 -srpska 8 -ugarte 8 -40-31 8 -vlogging 8 -snoopi 8 -sansiri 8 -268mph 8 -undercliff 8 -moogoo 8 -jimale 8 -1,362 8 -rumoro 8 -bushwacker 8 -liberec 8 -globalism 8 -24-hours-a-day 8 -julián 8 -sabry 8 -summerhays 8 -storytime 8 -ufluencer 8 -laurendeau 8 -deprioritised 8 -ento 8 -cryptograms 8 -rurua 8 -aerts 8 -carefully-worded 8 -soeren 8 -twitter-owned 8 -rafaqat 8 -tonacatepeque 8 -schratter 8 -tomasetti 8 -hobden 8 -counterattacking 8 -tullio 8 -jannot 8 -350ml 8 -qinhuai 8 -frankea 8 -leversee 8 -pegaso 8 -namgyal 8 -16:9 8 -strawser 8 -arimathea 8 -unsterile 8 -eddard 8 -medei 8 -medef 8 -automobilia 8 -hajee 8 -sub-adults 8 -keynes-based 8 -#cnnafrica 8 -kesel 8 -neem 8 -istick 8 -bowbelle 8 -catala 8 -dgsi 8 -aston-brown 8 -monica-malibu 8 -goalen 8 -1,357 8 -glascow 8 -long-barreled 8 -kaori 8 -1/1/84 8 -1/1/85 8 -oxygenating 8 -mortarboard 8 -great-grandfathers 8 -de-radicalisation 8 -poppa 8 -hairwork 8 -@newyorkcity 8 -22-acre 8 -baney 8 -banez 8 -optimizes 8 -hazley 8 -scholtz 8 -mtg 8 -professionally-planned 8 -borexino 8 -tippling 8 -westshore 8 -collingtree 8 -challice 8 -liezel 8 -kimteng 8 -cyber-criminal 8 -ancs 8 -whipper 8 -athenaeum 8 -200million-a-year 8 -jolliff 8 -alingar 8 -petfinder.com 8 -batya 8 -urdaneta 8 -jerrycan 8 -llah 8 -jahns 8 -budoff 8 -kringe 8 -midamerican 8 -arion1 8 -crash-course 8 -quoc-viet 8 -hlatshwayo 8 -5,460 8 -overthrows 8 -iconographic 8 -babbin 8 -bangana 8 -starrish 8 -wieghorst 8 -2332 8 -araghchi 8 -tiafoe 8 -turkish-syria 8 -1,108 8 -1,103 8 -gucciardi 8 -macroeconomics 8 -jixer 8 -receivable 8 -drinksavvy 8 -diddi 8 -standridge 8 -elinescu 8 -deep-cover 8 -burns-williamson 8 -iraniha 8 -ashol 8 -erdelyi 8 -herjavec 8 -al-deayea 8 -pyka 8 -suvarna 8 -chancha 8 -zerban 8 -silk-satin 8 -re-frame 8 -7:02 8 -fix-all 8 -uotsuri 8 -carreño 8 -bolschwing 8 -498,000 8 -12-episode 8 -smajdor 8 -maroc 8 -doebler 8 -5:32 8 -4:13 8 -rosana 8 -keelen 8 -kumpula 8 -clifers 8 -lower-skilled 8 -hagaoui 8 -kirli 8 -pedasí 8 -sino 8 -hermanson 8 -hauts-de-seine 8 -single-figure 8 -friis 8 -3ft-high 8 -aviv-based 8 -hartston 8 -cov 8 -a4093 8 -low-stress 8 -nclb 8 -nintedanib 8 -parit 8 -7:44 8 -30-39 8 -napoletani 8 -maquel 8 -popy 8 -carbon-monoxide 8 -vanaman 8 -kfyr 8 -esinam 8 -65-million-year-old 8 -patient-doctor 8 -grytviken 8 -xaverian 8 -computer-hacking 8 -snooks 8 -falzone 8 -najdi 8 -anna-maja 8 -9.04 8 -mahkovic 8 -zapfe 8 -8.44 8 -250-a-night 8 -hombori 8 -sleaziest 8 -beefier 8 -artut 8 -grenadian 8 -problem-solver 8 -shipanga 8 -squirrelly 8 -elpadaro 8 -hakakian 8 -highest-performing 8 -simcoach 8 -hangry 8 -manolis 8 -metal-frame 8 -aristo 8 -alfonse 8 -60mg 8 -ebd 8 -eba 8 -el-katateny 8 -window-shopping 8 -nahed 8 -spanks 8 -calviera 8 -1670s 8 -mesmerize 8 -expressively 8 -3,775 8 -sacolick 8 -vote-swap 8 -non-married 8 -cassiobury 8 -wordlessly 8 -yoyogi 8 -catsa 8 -nitmiluk 8 -disasterous 8 -mutamba 8 -baisalov 8 -polydactyl 8 -wikstrom 8 -355million 8 -ableidinger 8 -narmer 8 -103.7 8 -103.6 8 -tultepec 8 -jermey 8 -ollerenshaw 8 -databanks 8 -beltra 8 -five-foot-tall 8 -largier 8 -tessalit 8 -tophets 8 -makuria 8 -herminio 8 -celestron 8 -guvnors 8 -al-najjar 8 -465million 8 -american-themed 8 -valkyries 8 -peace-making 8 -four-minutes 8 -agresti 8 -kahu 8 -cvr 8 -cvn 8 -ferriero 8 -reath 8 -rochon 8 -84.1 8 -elemment 8 -al-shadadi 8 -granett 8 -sekwena 8 -ghozlan 8 -brf 8 -pancrazio 8 -tolson 8 -lobola 8 -jazzing 8 -93per 8 -edman 8 -over-eat 8 -catlateral 8 -pfaeffikon 8 -saeeda 8 -#pushy 8 -catchall 8 -3:21 8 -mopti 8 -mellifera 8 -schlecht 8 -3.73 8 -unliveable 8 -cancalosi 8 -dito 8 -chumbley 8 -bezuijen 8 -aylsham 8 -cryptococcal 8 -pre-positioning 8 -parrinder 8 -827,000 8 -desiderio 8 -kruszelnicki 8 -bowmans 8 -battuta 8 -10-percent 8 -takai 8 -usurpers 8 -carrycot 8 -polka-dotted 8 -karbus 8 -franco-american 8 -correctable 8 -podcasting 8 -leterrier 8 -readjusts 8 -voix 8 -grade-schooler 8 -caesarean-section 8 -ismailov 8 -sleek-looking 8 -lp640 8 -debie 8 -garre 8 -buccament 8 -no-risk 8 -solar-paneled 8 -nyet 8 -chely 8 -lun-mei 8 -uludere 8 -onuzo 8 -taoism 8 -well-marked 8 -budrus 8 -thanksgivings 8 -adami 8 -malbrouck 8 -gilje 8 -47-nation 8 -blasik 8 -evisceration 8 -williamston 8 -vachon 8 -kalavyrta 8 -false-positives 8 -bittlestone 8 -rajeswari 8 -casaubon 8 -1,000-meter 8 -hyperlinks 8 -kariakoo 8 -atura 8 -sives 8 -fifteen-month-old 8 -kaluza 8 -hulled 8 -al-jibouri 8 -lembata 8 -stay-at-home-mum 8 -maerklin 8 -socialises 8 -crashaw 8 -301,000 8 -bestas 8 -zanardelli 8 -lily-livered 8 -iturra 8 -28-20 8 -multistage 8 -spray-tan 8 -zoot 8 -piffle 8 -rosal 8 -vasella 8 -wanya 8 -donana 8 -366,000 8 -supriatna 8 -diulka 8 -azahara 8 -j10 8 -sydney-bound 8 -druck 8 -chiorniy 8 -operating-system 8 -bettencourt-meyers 8 -125,800 8 -chainsaw-wielding 8 -loping 8 -barnsford 8 -rhubodach 8 -mennes 8 -moncks 8 -childishness 8 -polyamide 8 -240-mile 8 -alltami 8 -orkin 8 -soft-porn 8 -goshi 8 -job-training 8 -appearance-related 8 -mrca 8 -gloomily 8 -tracy-ann 8 -jafarzadeh 8 -calana 8 -soft-sided 8 -castillejos 8 -neiweem 8 -forsne 8 -haleavy 8 -nathusius 8 -prady 8 -hitchman 8 -constantijn 8 -abscessed 8 -fault-free 8 -descriptor 8 -cwrs 8 -joël 8 -bungays 8 -sketchers 8 -live-born 8 -nhmrc 8 -samaha 8 -33.19-carat 8 -premate 8 -cathays 8 -chumming 8 -endymion 8 -mulhern 8 -pagai 8 -semi-public 8 -mkoyan 8 -re-define 8 -handja 8 -coppercreek 8 -littlebig 8 -cnns 8 -zagui 8 -blueburger 8 -slidebar 8 -homburg 8 -ihr 8 -jacie 8 -in-seat 8 -hpl 8 -middlebrooks 8 -wghp 8 -male-to-male 8 -sound-proofing 8 -feature-film 8 -tobacconist 8 -3,419 8 -einsiedel 8 -yummypets 8 -gekoski 8 -creazzo 8 -marín 8 -guit 8 -guin 8 -praslin 8 -cock-fighting 8 -facbook 8 -200-million 8 -ruminants 8 -700-pupil 8 -soundcheck 8 -el-badri 8 -chocolate-coloured 8 -donestsk 8 -30-car 8 -sukhdev 8 -palicios 8 -allays 8 -9:33 8 -sherill 8 -al-naseri 8 -scarle 8 -long-hidden 8 -inner-western 8 -mononucleosis 8 -keyonnah 8 -koepp 8 -montecristo 8 -falim 8 -intimacies 8 -extremophile 8 -well-priced 8 -113.1 8 -skateparks 8 -great-grandmothers 8 -gehani 8 -surreally 8 -alhamdulillah 8 -felli 8 -counter-balance 8 -fha 8 -haughney 8 -now-ousted 8 -fairlight 8 -africa-inspired 8 -monsour 8 -neena 8 -spokeperson 8 -consumer-grade 8 -chaffinch 8 -neah 8 -foul-up 8 -archaelogists 8 -two-timed 8 -fish-eating 8 -14.0 8 -cartell 8 -dhanbad 8 -stretched-out 8 -skivington 8 -marvelon 8 -christini 8 -yonelisa 8 -gironde 8 -brise 8 -meigs 8 -koa 8 -131million 8 -channings 8 -tevfick 8 -asda.com 8 -galactus 8 -cuboid 8 -darnedest 8 -dystopic 8 -amauris 8 -daubhill 8 -verruca 8 -stalinsky 8 -urlik 8 -parapsychology 8 -seafoods 8 -domesticating 8 -eme 8 -self-starter 8 -furlan 8 -mikio 8 -151st 8 -facism 8 -kasthuri 8 -reconstituting 8 -state-supported 8 -china-focused 8 -lovebird 8 -sorosky 8 -a338 8 -gnh 8 -fly-tipper 8 -ex-felons 8 -anuar 8 -meadowbrook 8 -e.k. 8 -pre-presidential 8 -cartoon-themed 8 -quethelie 8 -comcast-time 8 -frykowski 8 -bunkering 8 -pelter 8 -saltines 8 -curtsies 8 -realizations 8 -miche 8 -sergewa 8 -florrick 8 -aqab 8 -obligate 8 -kindig 8 -waldmon 8 -smartphone-controlled 8 -crunchiness 8 -essomba 8 -emetrece 8 -subbing 8 -modernizes 8 -bi-lingual 8 -stosny 8 -dockrell 8 -alaska-based 8 -predrag 8 -18-under-par 8 -returners 8 -battle-ravaged 8 -kokane 8 -638,000 8 -drug-user 8 -defensively-minded 8 -conformance 8 -www.pgatour.com 8 -kakaotalk 8 -red-white-and-blue 8 -tonkiss 8 -diethylene 8 -rohl 8 -non-students 8 -a417 8 -celebrity-loved 8 -free-styling 8 -davitt 8 -toxify 8 -2trillion 8 -ex-love 8 -lorcaserin 8 -by-gone 8 -chihuly 8 -sierotko 8 -3,271,611 8 -wraf 8 -balanda 8 -jeffels 8 -verifications 8 -tandberg 8 -b-max 8 -privation 8 -biospheres 8 -edta 8 -maynards 8 -tafzi 8 -jasvinder 8 -millport 8 -black-robed 8 -lightly-armed 8 -seong 8 -patient-targeted 8 -asefi 8 -centre-midfield 8 -34kg 8 -birkenshaw 8 -chanchal 8 -entrenches 8 -parrilli 8 -obuzor 8 -400-a-day 8 -1/1/68 8 -four-alarm 8 -blackballing 8 -nhial 8 -529,000 8 -annamarie 8 -1,566 8 -recompensed 8 -reddish-purple 8 -chabb 8 -pevsner 8 -re-enlisting 8 -dog-related 8 -brentnall 8 -stockyards 8 -nerea 8 -edvige 8 -teethmarks 8 -poppets 8 -pre-welfare 8 -rub-down 8 -rippa 8 -bjarni 8 -columbus-america 8 -multi-drug-resistant 8 -esthetic 8 -14.733 8 -wwjd 8 -1,878 8 -sociocultural 8 -forcelli 8 -skimpies 8 -tieniber 8 -committment 8 -craiova 8 -inocencio 8 -fast-twitch 8 -1532 8 -1,162 8 -8-9p 8 -800-meters 8 -anzisha 8 -cooktown 8 -homewear 8 -herald-record 8 -evrard 8 -mohna 8 -slocom 8 -couchsurfing.com 8 -stratified 8 -ochamchire 8 -shoemaking 8 -fanshare 8 -102ft 8 -bardoc 8 -chattooga 8 -bohuslav 8 -44-year-olds 8 -5:51 8 -canejo 8 -ahhhh 8 -gazumping 8 -nuplanit 8 -4:39 8 -mclaren-mercedes 8 -fylingdales 8 -litzenberger 8 -longboards 8 -seung-hi 8 -mcallen-edinburg-mission 8 -lung-bursting 8 -litigations 8 -bertolaso 8 -briggsy 8 -spurns 8 -fegan-earl 8 -jests 8 -poffo 8 -rucho 8 -mozos 8 -vessup 8 -6.32 8 -emans 8 -appleseed 8 -horse-mad 8 -parky 8 -mulchinock 8 -135m 8 -ex-leeds 8 -aramis 8 -indiravati 8 -twats 8 -123rd 8 -21,900 8 -muttley 8 -balas 8 -consumer-focused 8 -balal 8 -culliford 8 -hydrae 8 -wcjb 8 -gpml 8 -pro-police 8 -mccrindle 8 -unmeasured 8 -alaverdian 8 -winzenried 8 -gairns 8 -dubes 8 -wychowanec 8 -reanalyzed 8 -busybodies 8 -exploitations 8 -sabai 8 -butterflied 8 -ammi 8 -pitana 8 -arunachal 8 -news-enterprise 8 -countryâ 8 -eitel 8 -40-foot-wide 8 -o'donnells 8 -baaji 8 -poopertrator 8 -farahi 8 -oyo 8 -citywest 8 -dermalogica 8 -fyffes 8 -meliá 8 -hejaz 8 -redstarts 8 -georgio 8 -volafile 8 -río 8 -portioning 8 -touessrok 8 -thornwood 8 -insurable 8 -8x 8 -givemetap 8 -strobe-light 8 -kumparak 8 -no-warrant 8 -lingen 8 -gutiérrez 8 -lebanese-owned 8 -borns 8 -micronations 8 -behlen 8 -mastros 8 -marisota 8 -69c 8 -internationalize 8 -4,229 8 -4,222 8 -sesriem 8 -fullscream 8 -dunakin 8 -vrindaban 8 -@jdsutter 8 -fetuli 8 -foxhall 8 -princesshay 8 -smally 8 -cathinone 8 -nyanya 8 -hey-day 8 -squamish 8 -naturalize 8 -merseybeats 8 -gholam-hossein 8 -custance 8 -khazakstan 8 -geralt 8 -shorenstein 8 -looseness 8 -ashley-mead 8 -bleeding-heart 8 -nasa.gov 8 -eight-room 8 -smizing 8 -780million 8 -1,206 8 -neuilly 8 -puppyhood 8 -ctj 8 -ex-workers 8 -phoo 8 -thimpu 8 -ex-celebrity 8 -skinade 8 -hootan 8 -darell 8 -single-lens 8 -blathering 8 -sinews 8 -ennobling 8 -okamura 8 -j-32 8 -villavicencio 8 -swaggers 8 -job-based 8 -notecards 8 -youre 8 -al-majed 8 -passenger-carrying 8 -weybourne 8 -zat 8 -011-52/987 8 -racialized 8 -aw-shucks 8 -lagneau 8 -sambrano 8 -haillie-rose 8 -punam 8 -travelandleisure.com 8 -veselin 8 -clogwyn 8 -contextualize 8 -pulisciano 8 -yanji 8 -sadnesses 8 -ridgeville 8 -reconciliatory 8 -simak 8 -spelunking 8 -self-starters 8 -melluso 8 -kyrilla 8 -ju-jitsu 8 -16-10 8 -sivori 8 -games-related 8 -backplane 8 -self-disciplined 8 -bdus 8 -40kph 8 -usai 8 -bradstreet 8 -parracombe 8 -gbarnga 8 -65,000-a-week 8 -rogen-james 8 -imphal 8 -somone 8 -gloveman 8 -never-married 8 -narangoda 8 -kubik 8 -fast-living 8 -419,000 8 -samaher 8 -westpark 8 -vaea 8 -autocracies 8 -15,000-a-month 8 -gobadi 8 -dia'a 8 -arancini 8 -sauaso 8 -dennis-palmer 8 -shahian 8 -b-class 8 -andresier 8 -de-baathification 8 -damped 8 -highly-strung 8 -coulon 8 -muchnick 8 -jorma 8 -mutya 8 -burguess 8 -ushanov 8 -time-capsule 8 -1410 8 -schoonrad 8 -autauga 8 -issue-oriented 8 -beelzebufo 8 -fazeli 8 -pictuerd 8 -buglioni 8 -tootell 8 -dogwalker 8 -babina 8 -co-organiser 8 -5400 8 -930million 8 -non-specialist 8 -copco 8 -dakotan 8 -low-rate 8 -goal-laden 8 -ballenilla 8 -shot@life 8 -nuku'alofa 8 -anglophone 8 -gig-goers 8 -2.4-liter 8 -meriting 8 -hills-trained 8 -15,000-20 8 -karena 8 -mpdv 8 -buzzfeed/cnn 8 -828,000 8 -mcquery 8 -#crimingwhilewhite 8 -tech-related 8 -lorded 8 -liftport 8 -nashat 8 -signed-in 8 -icms 8 -gemaco 8 -llobregat 8 -schnurr 8 -paxo 8 -nebraskans 8 -corydalis 8 -chugg 8 -after-taste 8 -arrowtown 8 -dys 8 -bisoi 8 -habanos 8 -mycause 8 -sparano 8 -80-mph 8 -wunna 8 -mackauf 8 -pro-communist 8 -keepy-up 8 -ghose 8 -chocolate-milk 8 -@cnnwriters 8 -lierle 8 -sharp-witted 8 -khon-tv 8 -perdy 8 -represses 8 -rathke 8 -agbodjelou 8 -bsnl 8 -renancourt 8 -gladysvale 8 -halpert 8 -56in 8 -genderbent 8 -10-feet 8 -85km 8 -anazodo 8 -niokoa 8 -athill 8 -scu 8 -spafford 8 -elementary-school 8 -r.t. 8 -denno 8 -duralde 8 -sailrocket 8 -zinah 8 -ruthven 8 -delisting 8 -marriage-equality 8 -teichman 8 -nnaji 8 -tetouan 8 -bauhinia 8 -excising 8 -troikas 8 -reynoldsburg 8 -ronchi 8 -tech-centric 8 -textspeak 8 -danio 8 -krhin 8 -screwed-up 8 -126-121 8 -half-forgotten 8 -mindwave 8 -shulaa 8 -gousul 8 -amarante 8 -boeuf 8 -huoi 8 -chanes 8 -awal 8 -huip 8 -ferugson 8 -claddagh 8 -sanina 8 -kremlin-controlled 8 -amercian 8 -no5 8 -wayde 8 -fasotec 8 -loewenstein 8 -boxoffice.com 8 -congee 8 -enjoin 8 -super-obese 8 -arent 8 -29-acre 8 -75-mile 8 -ellouise 8 -herrell 8 -pepto 8 -combinator 8 -guekedou 8 -charest 8 -carome 8 -remitted 8 -chaderton 8 -safian 8 -telephonic 8 -eyesockets 8 -prelaunch 8 -gatchell 8 -commisioner 8 -scabbed 8 -fire-bombed 8 -minicamp 8 -super-galaxy 8 -namby-pamby 8 -hookway 8 -shiju 8 -rensing 8 -hyper-pigmentation 8 -egot 8 -skinz 8 -plentyoffish 8 -compleat 8 -rougoor 8 -click-bait 8 -kokes 8 -bergreen 8 -chethams 8 -umair 8 -supportively 8 -boosbeck 8 -bobe 8 -przybylski 8 -neo-colonialism 8 -bioactive 8 -ognjen 8 -gigayacht 8 -deregistered 8 -brainier 8 -puxton 8 -dewalt 8 -pawlik 8 -20-car 8 -takeshita-dori 8 -maralinga-tjarutja 8 -gonce 8 -revolucion 8 -juventud 8 -coachmen 8 -alkhanshli 8 -chaubey 8 -mullholland 8 -coriano 8 -riff-raff 8 -once-common 8 -home-baked 8 -waterston 8 -pulverise 8 -guryev 8 -helsingborgs 8 -buy-up 8 -quarterman 8 -microvascular 8 -paulinus 8 -lubeck 8 -rosés 8 -symbology 8 -riani 8 -2:04 8 -topolsky 8 -gluttons 8 -citarum 8 -role-model 8 -allnut 8 -daryle 8 -7.86 8 -blueservo.net 8 -perez-rodas 8 -borloo 8 -peary 8 -buttering 8 -kadugli 8 -antoniades 8 -bokhar 8 -kolleh-mcburrough 8 -spit-roasted 8 -damms 8 -towergate 8 -freakshow 8 -wluk 8 -venerating 8 -27,000-a-year 8 -135,303 8 -then-11-year-old 8 -aal 8 -1,326 8 -holed-up 8 -almanack 8 -tory-supporting 8 -marees 8 -91.8 8 -feraday 8 -foglio 8 -sung-hwan 8 -overhunting 8 -23-13 8 -oderus 8 -ortley 8 -#corybookerstories 8 -nyangatom 8 -1992-1993 8 -vizicities 8 -ursuline 8 -sefl 8 -davlin 8 -@tipsforjesus 8 -bandmember 8 -abigael 8 -hajah 8 -hcl 8 -pappano 8 -cochon 8 -chorwon 8 -1wtc 8 -16ins 8 -lacey-may 8 -ulysse 8 -ex-ireland 8 -laissez 8 -playin 8 -ear-piercing 8 -shinpad 8 -reactionaries 8 -self-promote 8 -lambreghts 8 -social-sharing 8 -ziben 8 -4.86 8 -4.82 8 -linh 8 -fit-up 8 -e-7a 8 -top-scale 8 -codi 8 -gooks 8 -jednel 8 -naghma 8 -r-arkansas 8 -pointillism 8 -mhs 8 -vishambar 8 -konare 8 -great-granddaughters 8 -mahlab 8 -restaino 8 -grifo 8 -meetup.com 8 -lakeport 8 -icecaps 8 -zanoli 8 -pirouetted 8 -011-52/322 8 -wolbeck 8 -calistoga 8 -disses 8 -1,892 8 -1,890 8 -peopleton 8 -sarkatzis 8 -computer-science 8 -azeem 8 -koffmann 8 -almada 8 -almadi 8 -gobsmacking 8 -97.25 8 -rootsy 8 -wayfarer 8 -anti-flu 8 -onepiece 8 -junking 8 -omori 8 -huahine 8 -hucheng 8 -locater 8 -buddon 8 -kwethluk 8 -kositpipat 8 -lazika 8 -race-track 8 -neyra 8 -lickin 8 -mujibur 8 -hv 8 -koler 8 -fazekas 8 -racioppo 8 -corydon 8 -shishani 8 -eyeteq 8 -rokaya 8 -chosing 8 -balzaretti 8 -bardin 8 -petpal 8 -61,600 8 -hypotonia 8 -co-executor 8 -pashuta 8 -markt 8 -redcurrant 8 -kalisha 8 -madinah 8 -qsr 8 -quantas 8 -billik 8 -clozapine 8 -mcdarby 8 -#twittersilence 8 -ocana 8 -pge2 8 -arrochar 8 -super-effective 8 -spain-gibraltar 8 -ice-rich 8 -12-volt 8 -faryd 8 -woollens 8 -hku 8 -altovise 8 -argiris 8 -librairie 8 -sladkus 8 -gil-ad 8 -crocket 8 -cashtomato 8 -sitanggang 8 -hytner 8 -khuzwayo 8 -brightener 8 -entrà 8 -konza 8 -eikrem 8 -petroleos 8 -lacey-jane 8 -9.47 8 -9.42 8 -'75 8 -proedl 8 -propitious 8 -618,000 8 -xanta 8 -layups 8 -one-night-stands 8 -ostomies 8 -rain-triggered 8 -bullington 8 -fancifully 8 -48-room 8 -silive.com 8 -sábado 8 -591,000 8 -get-fit 8 -in-space 8 -re-paint 8 -franson 8 -limoncello 8 -pensioned 8 -efg 8 -lancashire-born 8 -breakin 8 -marchioli 8 -cses 8 -servillo 8 -canan 8 -polymicrogyria 8 -qaeda-allied 8 -haggadah 8 -mbaya 8 -lesiow 8 -stress-reduction 8 -schaus 8 -toulemonde 8 -britcliffe 8 -skokie 8 -manjura 8 -imprudently 8 -tilley-gyado 8 -dhaif 8 -lewknor 8 -rocheteau 8 -28lb 8 -whiner 8 -chytridiomycosis 8 -abdulah 8 -infrequency 8 -sedang 8 -testosterone-filled 8 -photochrom 8 -once-classified 8 -bampi 8 -prakarn 8 -milaydys 8 -caira 8 -iterative 8 -behn 8 -totting-up 8 -resa 8 -snarf 8 -caloia 8 -ancel 8 -ayutla 8 -1:14 8 -stohrer 8 -conigliaro 8 -chervony 8 -1,264 8 -f-35a 8 -accomodations 8 -clicky-wristed 8 -condensers 8 -kerlon 8 -doremus 8 -hizzoner 8 -moaveni 8 -hunt-hutchings 8 -nightstands 8 -jedran 8 -mediatek 8 -rahel 8 -sculling 8 -jayla 8 -trezise 8 -cruddy 8 -cruceiro 8 -sideswiping 8 -nepcote 8 -zivile 8 -Özgün 8 -47g 8 -dulal 8 -biographic 8 -kafer 8 -anup 8 -rozlin 8 -eggenton 8 -loenne 8 -34-month 8 -mammoliti 8 -hemmerle 8 -lewises 8 -msakni 8 -subjectivity 8 -669,000 8 -self-tanner 8 -ampyra 8 -pakatan 8 -requisition 8 -vyacheslavovna 8 -barcroft 8 -94.7 8 -nans 8 -wuebben 8 -bich 8 -rocy 8 -bammer 8 -nuthin 8 -6ft-high 8 -tyva 8 -zeshan 8 -sivills 8 -cran 8 -farhaan 8 -bilello 8 -kitzinger 8 -frannie 8 -tama-chan 8 -motha 8 -ponant 8 -moonwalker 8 -moonwalked 8 -deaden 8 -mooncheeze 8 -qylatron 8 -tetrachloride 8 -20-match 8 -cactuar 8 -chuv 8 -beigette 8 -baigrie 8 -johammer 8 -almansouri 8 -ingels 8 -aix 8 -policyholder 8 -gayles 8 -550-mile 8 -glass-half-full 8 -cadestin 8 -kimery 8 -matai 8 -sivas 8 -maniacally 8 -eight-seat 8 -oshima 8 -alarmism 8 -recht 8 -therrell 8 -54.7 8 -outred 8 -buquet 8 -overcomplicate 8 -heanor 8 -brayton 8 -hilsea 8 -misdiagnose 8 -27,100 8 -non-swiss 8 -36-second 8 -dowlett 8 -tsouvalas 8 -stamey 8 -par-fives 8 -grigoriy 8 -quintessence 8 -non-faith 8 -forneys 8 -tangmei 8 -prirazlomnaya 8 -abdel-aziz 8 -heartworms 8 -mccook 8 -nastastic 8 -lael 8 -damazo-santos 8 -ahronot 8 -corrett 8 -mergard 8 -wallendas 8 -chargehr 8 -.2003 8 -henot 8 -toyne 8 -1999-2010 8 -comiskey 8 -bonano 8 -voorschoten 8 -heever 8 -yowler 8 -snowedoutatlanta 8 -1257 8 -1254 8 -sudsy 8 -vico 8 -vojtech 8 -decoders 8 -rathie 8 -tagir 8 -handicapped-accessible 8 -anti-street 8 -ivrea 8 -sh-t 8 -gas-guzzler 8 -paharganj 8 -side-door 8 -calvery 8 -news-star 8 -amphipods 8 -phenytoin 8 -hillview 8 -shoe-bomb 8 -holstock 8 -@lewishamilton 8 -adventure-seeking 8 -agins 8 -veera 8 -pet-related 8 -roundhouses 8 -shebaa 8 -gerena 8 -red-coloured 8 -smokejumper 8 -dierenpark 8 -out-of-print 8 -water-bombing 8 -u.s.-rok 8 -sarnoff 8 -sarah-louise 8 -torchia 8 -jowly 8 -edun 8 -avalanched 8 -heart-stoppingly 8 -80.2 8 -natnael 8 -consciousness-raising 8 -tubbahata 8 -ciesielski 8 -silvin 8 -applegarth 8 -garbett 8 -soroka 8 -cranhill 8 -l'aiguille 8 -bedggood 8 -jamie-lynn 8 -3-14 8 -macmaster 8 -newme 8 -bookmarked 8 -hindu-christian 8 -spaciousness 8 -fremainville 8 -snoopsnitch 8 -feldhaus 8 -lowcostholidays.com 8 -hmps 8 -lalo 8 -55-room 8 -rahkine 8 -match-fix 8 -yin-yang 8 -reintegrates 8 -premio 8 -2,016 8 -atyrau 8 -trebeck 8 -tropical-storm-force 8 -yadira 8 -jellybeans 8 -nazi-hunting 8 -darkhotel 8 -crop-top 8 -shoe-horned 8 -ingratiating 8 -lulu-rose 8 -maintenon 8 -malil 8 -malic 8 -shellharbour 8 -shilu 8 -jabarti 8 -neep 8 -kickstand 8 -fluvax 8 -ynca 8 -aliayah 8 -fekkai 8 -labarre 8 -bowdoin 8 -minley 8 -pila'a 8 -?????? 8 -kopple 8 -ternan 8 -forotv 8 -bole 8 -j.m.w. 8 -11.6-inch 8 -schnook 8 -pentagonal 8 -malbis 8 -krnv 8 -stitcher 8 -kalyussky 8 -ex-irs 8 -boulachanis 8 -derris 8 -post-menopause 8 -ohss 8 -visijax 8 -hahndorf 8 -covalent 8 -enemy-held 8 -abl 8 -kepler-186 8 -ifunny 8 -camayd-freixas 8 -emini 8 -pame 8 -pama 8 -paraguana 8 -myfoxtampabay.com 8 -anti-dogfighting 8 -webb-davidson 8 -coniophis 8 -rayshawn 8 -futs 8 -meirelles 8 -otway 8 -7.64 8 -bonbons 8 -nonuplets 8 -ahlburg 8 -bodos 8 -uhhh 8 -feuerman 8 -202-page 8 -backdown 8 -weight-bearing 8 -blood-lust 8 -llanbrynmair 8 -godwits 8 -experia 8 -li-chin 8 -goal-setting 8 -azizia 8 -graphing 8 -n30 8 -ex-playboy 8 -gooley 8 -astorino 8 -mcvittie 8 -undergrounding 8 -white-striped 8 -cargos 8 -mukhopadhyay 8 -urzi 8 -chidlren 8 -human-dominated 8 -natera-armenta 8 -break-time 8 -geonet 8 -lusseau 8 -bitterbaum 8 -dhoti 8 -eurosurveillance 8 -gebhardt 8 -13.95 8 -beremedy 8 -off-roaders 8 -cop18 8 -naustdal 8 -murium 8 -60-75 8 -proudman 8 -kober 8 -schumerth 8 -barreth 8 -wrex 8 -under-threes 8 -bzdek 8 -cudjoe 8 -wearne 8 -wiercinski 8 -mingolet 8 -kennelling 8 -braies 8 -160-year 8 -least-known 8 -tonino 8 -topliff 8 -hap 8 -khakrezwal 8 -baze 8 -evidence-gathering 8 -marrick 8 -rudnick 8 -drug-cartel 8 -chicksands 8 -bakehouse 8 -vodny 8 -state-building 8 -juniac 8 -shehla 8 -yanick 8 -samassa 8 -hilliar 8 -corini 8 -a43 8 -pattani 8 -1,528 8 -1,526 8 -over-age 8 -luisinho 8 -mjl 8 -dunetz 8 -73.7 8 -pipher 8 -110,100 8 -syllabi 8 -aurore 8 -southmen 8 -i-405 8 -43,100 8 -puzzle-solving 8 -fevrier 8 -chortles 8 -pratica 8 -pratico 8 -tossers 8 -post-combat 8 -edwardian-style 8 -sausage-making 8 -chancellorship 8 -mi9 8 -filitsa 8 -markelov 8 -salads/sandwich 8 -kukla 8 -buet 8 -neagu 8 -unsalvageable 8 -geekdom 8 -duno 8 -shuhada 8 -hahahahahaha 8 -hill-baker 8 -cruise-ship 8 -leafhoppers 8 -flys 8 -vieille 8 -u-cat 8 -angove 8 -almarai 8 -kuhnhausen 8 -shortlisting 8 -hedblom 8 -messanges 8 -sportsnight 8 -mugaritz 8 -ekimov 8 -google-backed 8 -goomeri 8 -over-stepped 8 -shoot-em-up 8 -denizard 8 -lebon 8 -kuper 8 -emblazoning 8 -alexandalexa 8 -andrique 8 -shimshi 8 -baoding 8 -erotically 8 -marinelli 8 -baker-stedham 8 -lipophilic 8 -tailenders 8 -springfields 8 -1314 8 -closed-toe 8 -marinate 8 -finnmark 8 -over-treated 8 -katydids 8 -carltonlima 8 -12-guage 8 -lammer 8 -purple-colored 8 -bellgrove 8 -rock-face 8 -strobing 8 -jagatic 8 -lee-on-the-solent 8 -counterrevolution 8 -9.66 8 -elephantine 8 -sleepify 8 -3,950 8 -ebby 8 -leaven 8 -harmonium 8 -befor 8 -job-protected 8 -over-anxious 8 -toyama 8 -inklings 8 -vnexpress 8 -evouna 8 -tilders 8 -george.com 8 -braskem 8 -chanelled 8 -multi-alarm 8 -scotswoman 8 -speechifying 8 -ehc 8 -eho 8 -anglo-russian 8 -kickass 8 -juresko 8 -yusof 8 -royales 8 -automobili 8 -whitewall 8 -breadmakers 8 -cogburn 8 -reincarnate 8 -mauretani 8 -toastmaster 8 -roshonara 8 -hand-raising 8 -nauman 8 -pinkard 8 -rajala 8 -2686 8 -garmley 8 -deshaun 8 -sahr 8 -arapiles 8 -similar-sounding 8 -irsa 8 -rereleased 8 -winscombe 8 -artistes 8 -university/cbs 8 -tripwires 8 -owen-owned 8 -quantavious 8 -1:34 8 -postgraduates 8 -7,000-foot 8 -1,246 8 -2004-5 8 -citisoles 8 -caretech 8 -20metres 8 -creepy-ass 8 -cpg 8 -kemmons 8 -3.91 8 -anti-vaxxers 8 -reputation.com 8 -duzgan 8 -burkovskiy 8 -ufo-like 8 -hafif 8 -clumsiest 8 -mærsk 8 -ston 8 -u.n.-run 8 -monographs 8 -walmart.com 8 -dream-come-true 8 -2,710 8 -samim 8 -al-shahristani 8 -martock 8 -poloko 8 -muckraker 8 -buprenorphine 8 -raiu 8 -monaca 8 -fizi 8 -autodrome 8 -tabernacles 8 -delevinge 8 -happy-looking 8 -non-potable 8 -moorley 8 -brantley-rios 8 -goleniowski 8 -debriefs 8 -zverev 8 -gwinett 8 -heimo 8 -#qanda 8 -germinal 8 -scsl 8 -macneills 8 -hursley 8 -70.1 8 -wesbite 8 -hallucinates 8 -titanosaurs 8 -ship-to-ship 8 -nakarawa 8 -arrmanatha 8 -tosarvandan 8 -co-captains 8 -caloundra 8 -flensburg 8 -2-r 8 -pachamama 8 -shahab-3 8 -winkelhock 8 -gerding 8 -caesarians 8 -automatonophobia 8 -husin 8 -caste-based 8 -krystel 8 -centinela 8 -72mins 8 -vacher 8 -hamilton-jewell 8 -barcrawl 8 -anglo-welsh 8 -'28 8 -legally-married 8 -bralet 8 -ulama 8 -ilhwa 8 -pattered 8 -vigipirate 8 -alphas 8 -backrower 8 -criminalist 8 -scent-marking 8 -aggrandizement 8 -95per 8 -cold-blood 8 -vasse 8 -91-year 8 -katznelson 8 -re-confirmed 8 -nardi 8 -steppan 8 -camelia 8 -http://www.civiced.org/ 8 -601-member 8 -sedgewick 8 -gahleitner 8 -messerli 8 -star-formation 8 -saravan 8 -excretes 8 -kuhles 8 -esf 8 -esd 8 -body-cam 8 -pata 8 -varughese 8 -khairi 8 -messageme 8 -roudeline 8 -92.50 8 -scanimation 8 -verbalize 8 -zambellas 8 -microlift 8 -fusarium 8 -semiconscious 8 -fultz 8 -addressable 8 -18-story 8 -pagosa 8 -under-eights 8 -salaun 8 -34,400 8 -adjuncts 8 -giotto 8 -dargusch 8 -painted-on 8 -saracino 8 -anglo-indian 8 -feminized 8 -viau 8 -842,000 8 -sayegh 8 -760m 8 -small-bore 8 -gunilla 8 -lyvia 8 -planethunters.org 8 -cateau 8 -sewall 8 -caussyram 8 -aunor 8 -schroepfer 8 -canaanite 8 -skepta 8 -1970-71 8 -anarkali 8 -gdst 8 -disobliging 8 -mankell 8 -overbreeding 8 -ohamana 8 -buzzi 8 -mónica 8 -bomb-damaged 8 -cockington 8 -turkov 8 -fallouts 8 -pre-chemotherapy 8 -mr8 8 -rexroat 8 -sok 8 -light-welter 8 -rideshare 8 -pre-digestive 8 -democrat-backed 8 -schiada 8 -cross-complaint 8 -camutos 8 -vaquitas 8 -shufu 8 -desha 8 -fangping 8 -time-lapses 8 -danum 8 -swoveland 8 -nastygal 8 -comsonics 8 -cholinesterase 8 -louisburg 8 -overburdening 8 -@mooseygamer 8 -773,000 8 -shamiram 8 -fruit-picking 8 -sober-living 8 -ex-tv 8 -:35 8 -dumitrache 8 -10.39 8 -x17online 8 -pictograms 8 -wingsail 8 -lefkofsky 8 -moorers 8 -over-stayed 8 -speedman 8 -facebooker 8 -facebooked 8 -carabiners 8 -hard-throwing 8 -3-inches 8 -jolivert 8 -eyenaemia 8 -ophiuchus 8 -punctually 8 -deul 8 -5-foot-1 8 -ashante 8 -dannelley 8 -cbsnews 8 -activeclass 8 -lourie 8 -reoccupied 8 -news-democrat 8 -agan 8 -targhee 8 -dwarte 8 -issur 8 -545million 8 -maco 8 -kemba 8 -27,200 8 -saturno 8 -overflown 8 -vicktory 8 -tayeb 8 -pashanin 8 -sueur 8 -auto-injectors 8 -www.yahoo.co.uk/worldcup 8 -topiaries 8 -hat-tip 8 -reinterprets 8 -grubstreet 8 -460-mile 8 -ade651 8 -repopulating 8 -hazrata 8 -weyls 8 -tsarskoye 8 -cristeta 8 -brians 8 -riot-related 8 -gabellini 8 -self-injurious 8 -375m 8 -skumanick 8 -kil 8 -daum 8 -rocket-fueled 8 -impoverish 8 -steckler 8 -g.l. 8 -vr-a 8 -100-feet 8 -thammarat 8 -shingled 8 -stick-figure 8 -98.9 8 -myanna 8 -wertz 8 -canonically 8 -pre-marriage 8 -yarbro 8 -medanta 8 -jarek 8 -kaiyuan 8 -groats 8 -karbouli 8 -new-season 8 -ziglar 8 -2:44 8 -2:47 8 -2:49 8 -pertman 8 -swiss-french 8 -nike.com 8 -7.48 8 -hopton-on-sea 8 -vedant 8 -bunmi 8 -stenstrom 8 -cepulionis 8 -byob 8 -55-foot 8 -over-16s 8 -labrador-chow 8 -4-1/2 8 -footfalls 8 -annelie 8 -basindwa 8 -eleftheriadis 8 -tri-cities 8 -short-period 8 -lupoi 8 -soviet-trained 8 -ptr 8 -blander 8 -bonell 8 -burlakoti 8 -herdwick 8 -quannengshen 8 -papadopolous 8 -matvei 8 -eight-foot-tall 8 -dumpleton 8 -buruca 8 -najeh 8 -nasolabial 8 -pilkey 8 -handsaw 8 -maternities 8 -demobilized 8 -gassew 8 -petrou 8 -often-overlooked 8 -sitra 8 -deceiver 8 -kraddick 8 -hextable 8 -vae 8 -limited-government 8 -nige 8 -whic 8 -kayli 8 -skene 8 -17,000-strong 8 -geogenetics 8 -ac72 8 -last-gen 8 -willborn 8 -cool-looking 8 -cissoko 8 -mosquito-infested 8 -muneer 8 -mareeba 8 -a68 8 -exil 8 -111m 8 -1,502 8 -unpersuasive 8 -samatha 8 -arslanian 8 -onziema 8 -celente 8 -fourth-seed 8 -capsaicinoids 8 -28-month 8 -kashia 8 -dainton 8 -al-fahim 8 -somoles 8 -ill-trained 8 -croydoc 8 -shud 8 -792,000 8 -slavcheva 8 -shiflet 8 -tijani 8 -al-omar 8 -three-seven-zero 8 -ananskikh 8 -reinwardt 8 -chasity 8 -ayyappan 8 -148th 8 -ambiguously 8 -heidgen 8 -badesha 8 -backwash 8 -1-1/2 8 -lien-fa 8 -drakes 8 -robar 8 -nightscapes 8 -kidron 8 -weinjen 8 -raulie 8 -ballycraigy 8 -pijanowski 8 -mcgonegal 8 -228,288 8 -maanda 8 -birch-bark 8 -injera 8 -1-800-call-fbi 8 -cafeteria-style 8 -ostfeld 8 -ambroeus 8 -us5 8 -mcglashen 8 -west-bound 8 -700bc 8 -timeworn 8 -hougesen 8 -torlopova 8 -achindu 8 -verdick 8 -trindade 8 -rufio 8 -awc 8 -prickle 8 -bardey 8 -glushu 8 -155-pound 8 -eligidagne 8 -mezcal 8 -contentiousness 8 -ex-international 8 -ferdinands 8 -450-room 8 -popolo 8 -probables 8 -pre-ashes 8 -crackstarter 8 -zama 8 -reality-based 8 -invalidity 8 -villagomez-saldan 8 -flamingoes 8 -congresos 8 -devecser 8 -r-wis 8 -milanes 8 -6.59 8 -thunderdome 8 -éclairs 8 -bazza 8 -decedents 8 -degenerating 8 -lodestar 8 -doxylamine 8 -pernas 8 -tory/lib 8 -65mins 8 -12-carat 8 -azimuth 8 -real-looking 8 -lucho 8 -chiarolanza 8 -witchery 8 -chomsky 8 -vijecnica 8 -lenh 8 -#iftheygunnedmedown 8 -ashling 8 -9.86 8 -moluccans 8 -educationalists 8 -coultas 8 -4-mei 8 -isipho 8 -scissor-like 8 -shockproof 8 -dignan 8 -gypin 8 -out-of-shape 8 -1-meter 8 -chuffer 8 -busway 8 -andzelina 8 -2,232 8 -twining 8 -tencate 8 -ejf 8 -award-wining 8 -fetcher 8 -bisphosphonates 8 -cavernomas 8 -nasogastric 8 -pallace 8 -joint-chairman 8 -europe-bound 8 -kandapara 8 -frigide 8 -cuse 8 -50-foot-long 8 -5m-a-year 8 -cult-classic 8 -rissi 8 -penpushers 8 -julleen 8 -tuti 8 -ukrainy 8 -600,000-a-year 8 -gallez 8 -charitybuzz 8 -darder 8 -chin-length 8 -mozo 8 -rear-admiral 8 -sketchwriter 8 -förstemann 8 -bastawi 8 -tuohey 8 -shoygu 8 -sons-in-law 8 -98-93 8 -keltbray 8 -bandito 8 -callendar 8 -matzo 8 -schiatti 8 -bahoui 8 -ockerby 8 -x-trail 8 -cheviot 8 -gunshow 8 -bully-ish 8 -make-ups 8 -speedsters 8 -warkworth 8 -floresta 8 -percin 8 -gamcheon 8 -eckhardts 8 -picardie 8 -oversier 8 -kalathat 8 -nowata 8 -176x 8 -border-crossing 8 -thromboembolism 8 -forero 8 -carpools 8 -al-mamuri 8 -gerghiceanu 8 -chang-jung 8 -nine-night 8 -glossiest 8 -1,751 8 -dornin 8 -al-islami 8 -constitucion 8 -bio-mechanics 8 -111.3 8 -sentience 8 -oversharers 8 -thrombus 8 -cabrillo 8 -beautridge 8 -stick-like 8 -wonka-style 8 -bny 8 -pearling 8 -chiverton 8 -hominy 8 -immunosuppressive 8 -elucidate 8 -hnlms 8 -human-resources 8 -remotely-piloted 8 -litterkwitter 8 -super-green 8 -botes 8 -marger 8 -higher-education 8 -serendipitously 8 -guardian-reading 8 -6-14 8 -6-18 8 -rine 8 -egd 8 -rose-gold 8 -permira 8 -golkanbhan 8 -renames 8 -seven-floor 8 -bodvarsson 8 -minatomirai 8 -coma-like 8 -al-rabiah 8 -turgal 8 -cged 8 -kintore 8 -crouchy 8 -itsy-bitsy 8 -wiseby 8 -undercount 8 -denholme 8 -anti-coagulant 8 -ecas 8 -nedra 8 -powwow 8 -brushback 8 -alcohol-dependent 8 -dismounts 8 -15-29 8 -15-21 8 -pulled-pork 8 -sanitas 8 -toller 8 -janeth 8 -aslet 8 -malabehar 8 -centacare 8 -bukhara 8 -nephi 8 -khameini 8 -silvstedt 8 -angiosperms 8 -high-walled 8 -feaver 8 -abdinasir 8 -shuffleboard 8 -250billion 8 -3,278 8 -wilson-britten 8 -giannina 8 -gwot 8 -abdelbaky 8 -bourj 8 -ohly 8 -rakib 8 -desperito 8 -gop-dominated 8 -crisan 8 -cranbury 8 -mapisa-nqakula 8 -ogmundsson 8 -muisca 8 -sordal 8 -alhiwidi 8 -crawfords 8 -alexandrov 8 -purplish-red 8 -peach-coloured 8 -la-dwina 8 -zero-day 8 -vogtle 8 -schwarzenbach 8 -lynelle 8 -hyper-aggressive 8 -violence-marred 8 -glitch-prone 8 -shomali 8 -khill 8 -graupera-cassimiro 8 -scrum-halves 8 -30.15 8 -7-foot-long 8 -hairgen 8 -yung-jan 8 -leage 8 -enunciated 8 -polydimethylsiloxane 8 -1216 8 -lesage 8 -unpocket 8 -singtel 8 -three-generation 8 -half-cat 8 -cosplayer 8 -roll-off 8 -yegorova 8 -globally-successful 8 -churchwell 8 -zootaxa 8 -2104 8 -aisar 8 -morganucodon 8 -cocroft 8 -calvaruso 8 -gaddings 8 -beckwiths 8 -militarist 8 -mashups 8 -mccotry 8 -camdenton 8 -mulet 8 -peipert 8 -136.78 8 -shahnawaz 8 -shafiul 8 -laluna 8 -dutch-language 8 -sexual-abuse 8 -giampietro 8 -smf 8 -waterbuck 8 -gilleland 8 -135kg 8 -d'leh 8 -glendalough 8 -paul-julien 8 -roebling 8 -annesley 8 -pd-1 8 -cecco 8 -whelks 8 -aeroboat 8 -shelf-stacker 8 -9/11-related 8 -reemerge 8 -pennypacker 8 -razo 8 -bovines 8 -tyahnybok 8 -anatol 8 -philthy 8 -covacci 8 -kaoma 8 -bluesmart 8 -birr 8 -delap 8 -draftees 8 -#looksgood 8 -moowe 8 -highline179 8 -langenbach 8 -ibom 8 -kebony 8 -mashed-up 8 -anti-armor 8 -oxford-cambridge 8 -lunchrooms 8 -search-and-seizure 8 -faqir 8 -whoscored.com 8 -faqih 8 -broadoak 8 -over-used 8 -dacosta 8 -nemec 8 -judder 8 -ghc 8 -breakdancer 8 -monets 8 -eye-for-an-eye 8 -wythenshaw 8 -quark-gluon 8 -harben 8 -hunke 8 -hunka 8 -illini 8 -erchull 8 -coulthart 8 -al-waer 8 -f# 8 -pen-pals 8 -skipp 8 -spermidine 8 -step-siblings 8 -adelphia 8 -kazarama 8 -sellouts 8 -17lbs 8 -then-nfl 8 -transaero 8 -11-7 8 -pasteurise 8 -pflp 8 -pohanka 8 -geelani 8 -household-name 8 -kauffmann 8 -5inch 8 -bissoe 8 -sousley 8 -anti-same-sex 8 -2,535 8 -pesach 8 -ex-boston 8 -energy-boosting 8 -al-durrah 8 -atf3 8 -stonewash 8 -blitzen 8 -clubhotel 8 -ovaltine 8 -jasons 8 -alonside 8 -paia 8 -pro-china 8 -body-painted 8 -landi 8 -galazia 8 -latz 8 -superpac 8 -crabmeat 8 -trago 8 -medicity 8 -jehad 8 -cent5 8 -lippincott 8 -lefleur 8 -apples-to-apples 8 -holyoak 8 -subreddits 8 -re-elections 8 -nimer 8 -strichen 8 -yevgen 8 -betvictor 8 -re-fuel 8 -pisgat 8 -hillwalkers 8 -lasource 8 -maltreating 8 -vencat 8 -150mm 8 -abq 8 -36e 8 -groeschel 8 -rnl 8 -amama 8 -1,386 8 -1,388 8 -barkel 8 -tight-end 8 -uvf 8 -tenisha 8 -koito 8 -sires 8 -career-making 8 -jaktogo 8 -rajasurirar 8 -ex-offender 8 -witzke 8 -kostenko 8 -highly-successful 8 -lc-32lx85 8 -notowidigo 8 -noriyuki 8 -haythornthwaite 8 -chislett 8 -facedeals 8 -staplers 8 -one-kilogram 8 -futurism 8 -2010-now 8 -three-pack-a-day 8 -mohtarma 8 -70million-to-one 8 -hmy 8 -five-percenters 8 -bezel-free 8 -grandal 8 -actium 8 -ghasem 8 -laucala 8 -pacus 8 -rieder 8 -biegun 8 -kebe 8 -glomar 8 -shatkin 8 -yodels 8 -lida 8 -sherridan 8 -chailert 8 -ignighter 8 -kalashnikov-wielding 8 -al-abbadi 8 -zaporozhye 8 -tax-funded 8 -8-20 8 -hypochondriacs 8 -3p-a-litre 8 -azmal 8 -clowntown 8 -dybacz 8 -hydrochlorothiazide 8 -mns 8 -spacagna 8 -veruca 8 -shupback 8 -smith-schafer 8 -yeldham 8 -dovecot 8 -dostie 8 -three-door 8 -carob 8 -cip 8 -lynde 8 -re-grouped 8 -half-dead 8 -naryshkin 8 -blaum 8 -shorte 8 -comparethemarket 8 -quittez 8 -vm 8 -prizewinner 8 -faxing 8 -texas-style 8 -kogan.com 8 -newly-announced 8 -rydell 8 -indent 8 -coauthored 8 -abashed 8 -euro-era 8 -gza 8 -mangongo 8 -crenes 8 -570m 8 -woollatt 8 -zendehdel 8 -wrighty 8 -oludeniz 8 -macroscelides 8 -harnam 8 -allum 8 -30minutes 8 -devarajan 8 -smokemart 8 -4,480 8 -5-9 8 -2,600-year-old 8 -373,000 8 -promethazine 8 -hubbard-riley 8 -m249 8 -shaloudi 8 -abendanon 8 -munsinger 8 -vanderwerff 8 -kaliese 8 -livix 8 -ojuederie 8 -ocoa 8 -fuga 8 -re-sized 8 -tooted 8 -ear-shattering 8 -scudding 8 -micro-blogger 8 -boxter 8 -roeper 8 -al-huthaili 8 -bracero 8 -assynt 8 -two-on-one 8 -xli 8 -savard 8 -suttor 8 -cock-ups 8 -bartoletta 8 -divots 8 -qanta 8 -monroe-woodbury 8 -harbert 8 -carretera 8 -antanas 8 -sachsalber 8 -lorigan 8 -keynoter 8 -u-bend 8 -walburn 8 -pelc 8 -sabie 8 -avalor 8 -open-carry 8 -cichlid 8 -!!!!!!!!! 8 -hodara 8 -shahed 8 -shahrukh 8 -florinda 8 -morghab 8 -transall 8 -wittelsbach 8 -tillen 8 -soulard 8 -183rd 8 -720million 8 -argoed 8 -pulistar 8 -chinese-australian 8 -ela 8 -submental 8 -osenat 8 -teddybears 8 -modest-sized 8 -vanderlip 8 -beavill 8 -gayheart 8 -#fergusonunderis 8 -near-zero 8 -then-army 8 -devo-max 8 -sapsan 8 -spritzers 8 -fore-edge 8 -cheesey 8 -21-15 8 -palmere 8 -??! 8 -gastropods 8 -maarfi 8 -cultivars 8 -misremembered 8 -schifferle 8 -amavisca 8 -neatness 8 -magenn 8 -banna 8 -whdh.com 8 -non-fluoridated 8 -urick 8 -sinai-based 8 -myu 8 -153million 8 -mukoko 8 -iryna 8 -1212 8 -castlebeck 8 -gohar 8 -siprnet 8 -ancop 8 -50-person 8 -faux-leather 8 -kaleme 8 -expedience 8 -birken 8 -back-stage 8 -n'daw 8 -regeneron 8 -krnv-tv 8 -briolini 8 -yebes 8 -maffett 8 -bodytite 8 -wtvg 8 -wcau-tv 8 -murderously 8 -braggart 8 -trakai 8 -rethinks 8 -orbicularis 8 -millatu 8 -kacelnik 8 -600bhp 8 -sexercise 8 -shermaine 8 -266million 8 -first-party 8 -then-lover 8 -brinkema 8 -tieu 8 -varyag 8 -bramson 8 -molla 8 -doxycycline 8 -polytechnics 8 -hayduk 8 -klavina 8 -hanton 8 -deeply-rooted 8 -olthuis 8 -bodhisattva 8 -hyperinflationary 8 -nisoor 8 -right-time 8 -1014 8 -1013 8 -samlesbury 8 -rosnay 8 -weixin 8 -hartcliffe 8 -snidely 8 -jackson-cooke 8 -aslanova 8 -nano-particles 8 -saidam 8 -kneepads 8 -110cm 8 -resnik 8 -24-bed 8 -clouted 8 -beauteous 8 -hertzberg 8 -denormandie 8 -faires 8 -gs4 8 -repast 8 -grommets 8 -steel-and-concrete 8 -cortège 8 -soundboard 8 -49billion 8 -skulason 8 -potosí 8 -american-accented 8 -21-months-old 8 -emmer 8 -spritzes 8 -spritzer 8 -spritzed 8 -poer 8 -gun-friendly 8 -trapero 8 -'63 8 -woitape 8 -crime-related 8 -huidong 8 -zemanova 8 -show-stealing 8 -uyara 8 -alphametrix 8 -declarative 8 -hersfeld 8 -refectory 8 -collbran 8 -pandiani 8 -noujaim 8 -franciszek 8 -11.41 8 -feasters 8 -host-city 8 -stepper 8 -bessel 8 -chabrieres 8 -lottery-funded 8 -iseman 8 -embarrasing 8 -karelian 8 -wisher 8 -sharifi-ha 8 -oregon-born 8 -high-iq 8 -torn-down 8 -mi-bullet 8 -itty-bitty 8 -microsomia 8 -people-power 8 -hanny 8 -amarjeet 8 -whiteflies 8 -reanimated 8 -body-scanning 8 -diangienda 8 -nude-colored 8 -zurcher 8 -72oz 8 -backdoor.breut 8 -earlsdon 8 -majdi 8 -gurukanth 8 -rothfeld 8 -boden.co.uk 8 -egomaniacal 8 -sonnie 8 -bank-rolling 8 -blazingly 8 -photospheres 8 -hopelab 8 -sickbay 8 -mahawar 8 -witchdoctor 8 -1235 8 -hamburglar 8 -as-needed 8 -crassly 8 -osterberg 8 -rouffanche 8 -shope 8 -briggo 8 -conlisk 8 -erlikosaurus 8 -jeroboam 8 -shreeve 8 -dirandro 8 -craigwell 8 -morquio 8 -gruesome-looking 8 -antilock 8 -clinton-gore 8 -beccs 8 -tiedt 8 -shabu 8 -caska 8 -schaffel 8 -acropora 8 -kalogerakos 8 -gachette 8 -yuengling 8 -byy 8 -organovo 8 -mdwise 8 -dongsheng 8 -duisberg 8 -hamipterus 8 -mooncake 8 -kornfield 8 -movie-trailer 8 -kookogey 8 -dancey 8 -37-mile 8 -1.5-metre 8 -now-traditional 8 -shaitan 8 -lisewska 8 -18-member 8 -aquarids 8 -huixian 8 -aol-owned 8 -templarios 8 -lewkowicz 8 -3.83 8 -guillym 8 -congenita 8 -leviathans 8 -youmans 8 -nine-carat 8 -presages 8 -cubelli 8 -then-iraqi 8 -5-acre 8 -beinn 8 -34-17 8 -newsboy 8 -unsual 8 -hebrich 8 -wolmark 8 -matagorda 8 -over-exploitation 8 -coursey 8 -hochsteins 8 -daguerre 8 -al-bilawi 8 -faceb4 8 -al-farouq 8 -working-level 8 -amelle 8 -ulreich 8 -steib 8 -tertre 8 -15th-ranked 8 -al-jolani 8 -full-featured 8 -pharrel 8 -khane 8 -window-cleaning 8 -hantsch 8 -o'pry 8 -taschler 8 -strebe 8 -coat-of-arms 8 -slower-moving 8 -benini 8 -casbah 8 -haslar 8 -consciousnesses 8 -cell-based 8 -employes 8 -rule-breakers 8 -meshal 8 -panchenkova 8 -jessett 8 -candombe 8 -kindoki 8 -pouille 8 -mamadee 8 -aprisdianto 8 -subaquatic 8 -petten 8 -central-west 8 -eventim 8 -rizwana 8 -post-hosni 8 -markman 8 -dieke 8 -agri-tech 8 -rashard 8 -sprouse 8 -communions 8 -chinky-poos 8 -#pandorawishes 8 -congolese-born 8 -engagment 8 -21-storey 8 -technology-related 8 -kex 8 -samworth 8 -community-service 8 -multimodal 8 -al-kidra 8 -cringe-making 8 -skybus 8 -joshue 8 -ulzheimer 8 -tenosynovitis 8 -suzukii 8 -kampeter 8 -supertramp 8 -pastafarians 8 -gourmets 8 -ferenci 8 -tudou 8 -1,720 8 -sanni 8 -burkitts 8 -sargara 8 -alexandrovna 8 -penrod 8 -lavo 8 -lavy 8 -pastured 8 -1,578 8 -hankie 8 -vivaaerobus 8 -hawkshaw 8 -abu-dhabi 8 -fuel-economy 8 -bejko 8 -blundeston 8 -huangshan 8 -#savethesurprise 8 -deradicalisation 8 -trilingual 8 -tengku 8 -cod-style 8 -re-inserted 8 -two-dose 8 -stumpery 8 -placidly 8 -franco-spanish 8 -abersychan 8 -peedell 8 -stop-and-frisks 8 -#pistorians 8 -sammamish 8 -obloquy 8 -tassler 8 -winningly 8 -a630 8 -innkeepers 8 -stromatolites 8 -snapchat-style 8 -gransden 8 -03000 8 -underdiagnosed 8 -labus 8 -plantagenets 8 -petherton 8 -seperately 8 -ieb 8 -ie6 8 -steigman 8 -elzey 8 -eichenseer 8 -a340-300 8 -filipino-american 8 -zooids 8 -sensecam 8 -moochie 8 -huguerie 8 -nielssen 8 -godfroid 8 -oscillates 8 -galizio 8 -harjani 8 -12/14 8 -capato 8 -catsup 8 -pinguin 8 -parachini 8 -charcot 8 -easygroup 8 -minchew 8 -611,000 8 -115m 8 -postins 8 -non-coding 8 -homogentisic 8 -asgharzadeh 8 -54-nation 8 -lumberjills 8 -eye-contact 8 -uk-made 8 -blacknell 8 -karsenty 8 -eustis 8 -6:48 8 -u.s.-yemeni 8 -stirrers 8 -oldwadge 8 -delapidated 8 -salvio 8 -sunglint 8 -freeborough 8 -diamanté 8 -saenger 8 -13.00 8 -hollon 8 -shyann 8 -stamens 8 -68-31 8 -wizzard 8 -varmus 8 -mini-buses 8 -estebanez 8 -karuna 8 -alicea-antonetti 8 -gay-straight 8 -2,014 8 -2,015 8 -tunnacliffe 8 -rowdiest 8 -most-tweeted 8 -buon 8 -homans 8 -ergin 8 -betavivo 8 -schaeffel 8 -sarayburnu 8 -lack-lustre 8 -jump-yip 8 -zamen 8 -3,555 8 -3,550 8 -haworth-booth 8 -maumoon 8 -rending 8 -abott 8 -ulas 8 -deftones 8 -dacorum 8 -microfibres 8 -gygax 8 -reutten 8 -ashburnham 8 -41,800 8 -447,000 8 -winnowing 8 -baby-themed 8 -ovchinnikov 8 -mcgarr 8 -14-yard 8 -harasimchuk 8 -marie-josée 8 -velasques 8 -cock-eyed 8 -cassara 8 -barach 8 -mikva 8 -ooo 8 -launius 8 -#congratssavannah 8 -ployer 8 -scott-whale 8 -yeliani 8 -dipple-johnstone 8 -737-200 8 -guillermina 8 -farzan 8 -sexta 8 -jiangdu 8 -tianhe-1a 8 -varki 8 -djurdjura 8 -sts-135 8 -gold-rush 8 -busying 8 -raffiki 8 -spychella 8 -raasch 8 -balku 8 -161,653,000 8 -rajchel 8 -timofte 8 -sesil 8 -wartburg 8 -gaal-acticos 8 -paradoxum 8 -7-series 8 -dogz 8 -41-10 8 -ugarkovic 8 -lightning-speed 8 -akama 8 -cattelan 8 -arona 8 -grebe 8 -wzzm13 8 -ogi 8 -ogc 8 -dawgs 8 -party-ready 8 -deadliest-ever 8 -coldiron 8 -„ 8 -fedden 8 -pay-back 8 -aalesund 8 -alkaptonuria 8 -env 8 -fadl 8 -trusler 8 -gassner 8 -vvv 8 -broughall 8 -fastfox 8 -cepelova 8 -hemifacial 8 -shanine 8 -lopping 8 -bébé 8 -poeple 8 -non-sustainable 8 -cozzolino 8 -catoe 8 -penedo 8 -unshorn 8 -tele2 8 -hadassa 8 -raedler 8 -melki 8 -tomnod.com 8 -twindex 8 -vasilaris 8 -1,100-square-foot 8 -63-second 8 -biutiful 8 -swashbucklers 8 -29-minute 8 -anti-shia 8 -biographer-turned-mistress 8 -forcings 8 -heredia 8 -hapton 8 -facebooks 8 -tautai 8 -tautau 8 -unpublicized 8 -medaled 8 -ecuadoreans 8 -khumbanyiwa 8 -novellas 8 -neuroeconomics 8 -al-yamama 8 -schiebe 8 -conspires 8 -re-align 8 -mangove 8 -untrammeled 8 -finistere 8 -vianini 8 -difluoroethane 8 -bregancon 8 -norsk 8 -subtracts 8 -bacs 8 -intoximeter 8 -inflected 8 -field-tested 8 -0207 938 6683 8 -avio 8 -charron 8 -185cm 8 -gergova 8 -carino 8 -+56 8 -cantle 8 -ai-wu 8 -feusahrens 8 -sonograms 8 -1,192 8 -travelwest 8 -irregular-shaped 8 -anghiari 8 -ue 8 -late-round 8 -monkburn 8 -raco 8 -click-through 8 -320gb 8 -abebe 8 -east-based 8 -flick-knife 8 -kinglsey 8 -ageel 8 -fürth 8 -lebeouf 8 -wrong-footing 8 -uncasville 8 -galaticos 8 -498.8 8 -toyshop 8 -radomir 8 -caveney 8 -swordplay 8 -sans-serif 8 -ginkto 8 -rejlander 8 -herb-1 8 -hilman-payne 8 -mechem 8 -gnarls 8 -saddlery 8 -saddlers 8 -ranadive 8 -hirak 8 -daniah 8 -frotox 8 -rounded-out 8 -18-12 8 -18-17 8 -first-come-first-served 8 -cerini 8 -abdukadir 8 -neutralises 8 -cheddi 8 -truglia 8 -elvstrom 8 -vasopressin 8 -sabbaticals 8 -roussillon 8 -vlahos 8 -kunf 8 -debusk 8 -eisteddfod 8 -dysport 8 -pocari 8 -taklha 8 -flareup 8 -katinka 8 -wih 8 -yalda 8 -kasuri 8 -elyas 8 -jorga 8 -piggybank 8 -chidren 8 -cyclodeo 8 -amuse-bouche 8 -salmaniya 8 -thick-framed 8 -cleghorn 8 -gelderland 8 -newbern 8 -scheppy 8 -piebalgs 8 -energiser 8 -11.24 8 -auchtavan 8 -53rd-minute 8 -malekpour 8 -smwa 8 -ankier 8 -gonave 8 -raskin 8 -ork 8 -iacobelli 8 -masiluleke 8 -anatomedia 8 -84,500 8 -meisl 8 -tv-like 8 -112vzy 8 -non-pregnant 8 -woodle 8 -assadullah 8 -holston 8 -melling-firth 8 -coia 8 -oniangue 8 -upnorthlive 8 -193million 8 -Ógra 8 -2001/02 8 -paquet 8 -lutes 8 -al-araji 8 -molokini 8 -mroz 8 -al-murisi 8 -65per 8 -cinzia 8 -benjelloun 8 -lexapro 8 -482,000 8 -meadowood 8 -kingshurst 8 -rashaun 8 -eight-over-par 8 -haffey 8 -precolonial 8 -brevis 8 -11-car 8 -rosenau 8 -ballintoy 8 -fingal 8 -tamaira 8 -neurofeedback 8 -56st 8 -659,000 8 -lrad 8 -hatsko 8 -tancos 8 -tgm 8 -kramers 8 -abergil 8 -highest-income 8 -kthv 8 -hingson 8 -prescoed 8 -gull-wing 8 -osteonecrosis 8 -rearranges 8 -osman-rani 8 -month.the 8 -quaterback 8 -anti-kiev 8 -bomassa 8 -civetone 8 -aonb 8 -162-year-old 8 -standpoints 8 -kosner 8 -mizune 8 -76-minute 8 -quadrupedal 8 -aerotek 8 -b.o. 8 -bejjani 8 -halsingland 8 -self-images 8 -cesano 8 -volvic 8 -frankstown 8 -then-premier 8 -collery 8 -nafferton 8 -production-ready 8 -kish-donovan 8 -ragaz 8 -popeyes 8 -still-unsolved 8 -swing-state 8 -@susannareid100 8 -hirth 8 -remands 8 -irking 8 -jarmoune 8 -ouwerkerk 8 -yagan 8 -badry 8 -padgate 8 -susyn 8 -googoo 8 -ducheneaux 8 -cader 8 -ripped-up 8 -hate-tracking 8 -stammberger 8 -48-yard 8 -human-animal 8 -rospotrebnadzor 8 -karwacki 8 -2,262 8 -kiyla 8 -panayiotis 8 -dendrochronology 8 -beacuse 8 -vegal 8 -non-business 8 -dacher 8 -sarah-elizabeth 8 -a.e. 8 -disabuse 8 -chadi 8 -220.8 8 -khayatzadeh 8 -janeiro-based 8 -cryptology 8 -osmanthus 8 -carnarvons 8 -hey-maestre 8 -broke-up 8 -moftah 8 -orsa 8 -al-wahishi 8 -slone 8 -zore 8 -nerkh 8 -aubers 8 -5-page 8 -lix 8 -blatch 8 -a580 8 -waterwall 8 -kcl 8 -kcu 8 -priskin 8 -cyclocable 8 -strongwoman 8 -stansburge 8 -rinjani 8 -gemany 8 -calk 8 -caln 8 -thank-yous 8 -doue 8 -mahindra 8 -prabhakar 8 -livre 8 -korica 8 -engberg 8 -woolterton 8 -623,000 8 -113,019,926 8 -macrosomia 8 -platforming 8 -beblawi 8 -ice-locked 8 -all-embracing 8 -goateed 8 -mominul 8 -iron-hulled 8 -alvo 8 -big-scale 8 -voluntourism 8 -hrossey 8 -dataloft 8 -gearen 8 -astors 8 -dunigan 8 -hashtagging 8 -86mins 8 -one-to-many 8 -ironmongers 8 -kunsthistorisches 8 -84-inch 8 -kudirka 8 -city_my 8 -condensates 8 -three-count 8 -osbaldeston 8 -ibssa 8 -swett 8 -necip 8 -ovik 8 -counterargument 8 -01483 8 -fun-run 8 -#pussyriot 8 -duru 8 -daleo 8 -lichters 8 -saint-exupery 8 -wytheville 8 -ailed 8 -blimline 8 -taste-buds 8 -marigot 8 -noise-canceling 8 -evolutions 8 -brattleby 8 -1,179-mile 8 -institutionalizing 8 -bonnici 8 -0600 8 -judes 8 -hinebaugh 8 -activity-tracking 8 -farihov 8 -heintz 8 -trainline 8 -guyer 8 -cliff-hanger 8 -formalin 8 -t-sneachda 8 -fili-krushel 8 -rutgard 8 -rabotte 8 -cliphit 8 -igm 8 -whole-genome 8 -fenteany 8 -whiteland 8 -animal-protection 8 -d3s 8 -rbs-natwest 8 -hoft 8 -caucusing 8 -bertoni 8 -305.3 8 -phone-free 8 -gialamas 8 -172mph 8 -quandts 8 -bagnato 8 -grandel 8 -skulked 8 -self-rescue 8 -docofossor 8 -valdez-villarreal 8 -pickels 8 -six-foot-one 8 -hyrum 8 -llanas 8 -kibuye 8 -inclusively 8 -off-the 8 -ruckelshaus 8 -hulin 8 -bitcoiniacs 8 -two-by-two 8 -motijheel 8 -kivlin 8 -kashmere 8 -parthum 8 -bolsenbroek 8 -sherlin 8 -a-student 8 -ulrey 8 -drydock 8 -ovale 8 -columbarium 8 -magnifique 8 -scantling 8 -lohengrin 8 -abou-atta 8 -epistles 8 -karrina 8 -disproportionality 8 -fishfingers 8 -kushlefsky 8 -tiegs 8 -talkband 8 -isbar 8 -wadkins 8 -isidoro 8 -capsis 8 -raters 8 -saradhi 8 -al-khaibari 8 -2,030 8 -blanda 8 -93.55 8 -kaioi 8 -jinja 8 -172,200 8 -nations-brokered 8 -loxas 8 -ikegwuonu 8 -raftree 8 -meowed 8 -be-plumed 8 -jadwiga 8 -gillaspy 8 -zetian 8 -german-jewish 8 -lalara 8 -beleive 8 -arab-owned 8 -mohammadreza 8 -coupette 8 -harner 8 -washington-williams 8 -arida 8 -aimar 8 -jhendelyn 8 -emomali 8 -rushyford 8 -ebola-afflicted 8 -adjudications 8 -f.g. 8 -amh 8 -amb 8 -elabdellaoui 8 -tagou 8 -eckford 8 -maras 8 -corollas 8 -haratsis 8 -dalein 8 -multi-part 8 -tauran 8 -footplate 8 -todorov 8 -mahamud 8 -ft-1 8 -soussi 8 -cesium-134 8 -crillon 8 -emdur 8 -#cnnwomen 8 -hemangiosarcoma 8 -zipcodes 8 -bashford 8 -ojogel 8 -porojan 8 -zandvoort 8 -86.6 8 -wing-mounted 8 -salpetriere 8 -bucknall 8 -ma-9 8 -ma-8 8 -sarcoptes 8 -re-found 8 -salukvadze 8 -jefferts 8 -word-perfect 8 -meneghini 8 -utrera 8 -paintbox 8 -midnite 8 -glucocorticoids 8 -anti-diabetic 8 -kuprewicz 8 -lede 8 -foglesong 8 -ferreted 8 -warhola 8 -yixian 8 -18-foot-long 8 -2005-07 8 -ramel 8 -itax 8 -barroom 8 -94th-minute 8 -1930s-style 8 -fysh 8 -fangshan 8 -most-downloaded 8 -tillig 8 -well-ordered 8 -clear-cutting 8 -shchastya 8 -oxidisation 8 -94p 8 -takeda 8 -taneff 8 -cotela 8 -oplc 8 -play-ey 8 -goossens 8 -120-acre 8 -anti-iranian 8 -rudenstein 8 -counterespionage 8 -baii 8 -pragyan 8 -houphouet-boigny 8 -non-conformity 8 -antipersonnel 8 -now-canceled 8 -8tracks 8 -meterologist 8 -farmborough 8 -230lb 8 -half-vulcan 8 -gaudet 8 -miracle-gro 8 -kleer 8 -acording 8 -long-deceased 8 -rewatch 8 -routis 8 -x-keyscore 8 -cockeyed 8 -weaverling 8 -makovsky 8 -reynaud 8 -ahmedi 8 -bone-jarring 8 -fandango.com 8 -guldur 8 -moreto 8 -fraim 8 -techsense 8 -egg-sized 8 -karlin 8 -wordsley 8 -clean-lined 8 -unretired 8 -hargreave 8 -larges 8 -bodypaint 8 -liquid-crystal 8 -6th-century 8 -ellum 8 -tostes 8 -sheinbein 8 -27-14 8 -250,001 8 -tzatziki 8 -marcolini 8 -mahdee 8 -neaves 8 -mortvedt 8 -46,800 8 -allhiphop.com 8 -segestria 8 -consistencies 8 -multiple-vehicle 8 -kassa 8 -embonpoint 8 -caesarstone 8 -pro-privacy 8 -dowley 8 -bertellotti 8 -basteir 8 -wistfulness 8 -english-speakers 8 -run-flat 8 -hard-bitten 8 -wind-power 8 -shoulberg 8 -biodome 8 -raam 8 -raap 8 -derlei 8 -unexploited 8 -daigh 8 -cantonment 8 -powerfully-built 8 -one-baby 8 -boiler-room 8 -mini-mes 8 -blowy 8 -stratstone 8 -early-20s 8 -dentaku 8 -orvis 8 -pin-hole 8 -mechanicsville 8 -nellore 8 -unpermitted 8 -hba1c 8 -extraverted 8 -marva 8 -lograsso 8 -souther 8 -aerobraking 8 -malleability 8 -westies 8 -d9 8 -df 8 -goodsell 8 -stapenhill 8 -fiering 8 -tourettes 8 -detjen 8 -matonis 8 -strip-tease 8 -murray-sunset 8 -kopp-etchells 8 -tammaso 8 -dubiel 8 -triamcinolone 8 -nces 8 -fully-automatic 8 -mansudae 8 -barbells 8 -kulr 8 -morphine-based 8 -zineb 8 -duquet 8 -a'zhari 8 -perani 8 -15-40 8 -kernick 8 -kalandrani 8 -woh 8 -iorfa 8 -epicentres 8 -gigantomastia 8 -locane 8 -73.95 8 -sardonically 8 -miwa 8 -akerlof 8 -ampullae 8 -flattop 8 -well-turned 8 -kottke 8 -nivaria 8 -micro-loans 8 -gigya 8 -breguet 8 -roselmack 8 -wire-to-wire 8 -79.4 8 -chek 8 -lording 8 -gwei 8 -autothysis128t 8 -incentivises 8 -embryologist 8 -pwtt 8 -estridge 8 -s/n 8 -bertodano 8 -gornell 8 -caminos 8 -converges 8 -signed-up 8 -degress 8 -furkids 8 -dold 8 -1,600-year-old 8 -open-faced 8 -sheikh-hussein 8 -lo-fi 8 -geekery 8 -towe 8 -senhora 8 -double-page 8 -calavan 8 -youthification 8 -tory-controlled 8 -ottumwa 8 -reale 8 -re-order 8 -71-year 8 -graven 8 -antiquorum 8 -artezian 8 --29 8 -12,000-strong 8 -siong 8 -osmonds 8 -shomari 8 -unmemorable 8 -clayborne 8 -us-dakota 8 -basketry 8 -retro-reflective 8 -whoah 8 -aeronautique 8 -nedrow 8 -hamilton-deeley 8 -kick-back 8 -shota 8 -sloganeering 8 -hellstern 8 -banquette 8 -per-gallon 8 -consoler-in-chief 8 -leuellyn 8 -non-alcohol 8 -tek 8 -kennemer 8 -party-themed 8 -scheiber 8 -mountain-climbing 8 -28-stone 8 -didymos 8 -aziri 8 -gamgee 8 -oyala 8 -population-based 8 -superlicence 8 -burbery 8 -schops 8 -call-back 8 -maltbie 8 -smith-magenis 8 -quynh 8 -hughart 8 -fasher 8 -yanamandra-fisher 8 -felicitas 8 -dead-ends 8 -fidelgoldsh 8 -uber-cool 8 -289,000 8 -fatshion 8 -kroeger 8 -e-tailers 8 -stepashin 8 -slac 8 -byzantium 8 -sycophant 8 -keating-hutchinson 8 -maninder 8 -sheela 8 -lagamma 8 -kiteboarders 8 -5.54 8 -commissar 8 -per-hour 8 -bitc 8 -fathy 8 -belin 8 -conditon 8 -nationalmannschaft 8 -0.035 8 -bovill 8 -56mins 8 -ridpath 8 -tf 8 -gem-encrusted 8 -fast-forwarding 8 -dulé 8 -zacks 8 -galleons 8 -469,000 8 -oddfellows 8 -shatter-proof 8 -starrie 8 -four-four-two 8 -4,050 8 -kreher 8 -candomblé 8 -domestiques 8 -travie 8 -perreaux-forest 8 -zambikes 8 -laroze 8 -björnsson 8 -isere 8 -bomb-hit 8 -fingerboard 8 -helliar 8 -touquet-paris-plage 8 -gingis 8 -hewed 8 -suryana 8 -water-front 8 -al-mansoori 8 -lod 8 -work-place 8 -bülent 8 -holidaysplease 8 -streetlamp 8 -krtv 8 -hispanic-americans 8 -nosal 8 -azithromycin 8 -kac 8 -plimoth 8 -conisbee 8 -kubitschek 8 -souvenaid 8 -skingle 8 -salpigidis 8 -user-created 8 -beleives 8 -obbink 8 -leelee 8 -sanja 8 -dyll 8 -jujuy 8 -klebart 8 -raveena 8 -92-years-old 8 -38-17 8 -educationalist 8 -pencasts 8 -muzhange 8 -leucochloridium 8 -socialsklz 8 -urato 8 -double-door 8 -clairvoyance 8 -hydro-power 8 -burgstaller 8 -boisjoly 8 -bodur 8 -stealthier 8 -romli 8 -4-vesta 8 -visting 8 -trype 8 -melocco 8 -desertec 8 -derrickson 8 -mably 8 -co-create 8 -mixed-ability 8 -epi-pen 8 -triponey 8 -leboucher 8 -finebaum 8 -30b 8 -3r 8 -macmullett 8 -hondros 8 -geezers 8 -brundidge 8 -gimlet 8 -ethane-beta-sultam 8 -ringim 8 -www.nhs.uk 8 -406,000 8 -1,901 8 -cybersmile 8 -ipsen 8 -yacare 8 -günter 8 -howdon 8 -mbulaeni 8 -figure-fixing 8 -makhmur 8 -bromances 8 -pcworld 8 -milisavljevic 8 -ginther 8 -harpsund 8 -gillum 8 -club-style 8 -by-passers 8 -3,465 8 -non-recoverable 8 -dimmers 8 -pether 8 -subpopulations 8 -hb-sia 8 -zied 8 -pleam 8 -bandeirantes 8 -desjuan 8 -ripeness 8 -roycroft 8 -compositum 8 -coffin-siris 8 -a.d 8 -paquette 8 -hinda 8 -claramunt 8 -enchinton 8 -hansmeyer 8 -kaunisto 8 -overvaluation 8 -kessler-sanders 8 --290 8 -tedford 8 -latinobarometro 8 -daynard 8 -ksanfomaliti 8 -sidefooting 8 -jarrett-bryan 8 -techno-glasses 8 -ex-supermodel 8 -00030/0150 8 -docampo 8 -bigger-than-expected 8 -anti-money-laundering 8 -walrond 8 -822,198 8 -picatinny 8 -denizen 8 -rockman 8 -walkinshaw 8 -margulis-ohuma 8 -anti-litter 8 -more-than 8 -cerveny 8 -angara 8 -mcilvenna 8 -kuantan 8 -uel 8 -foot-stomping 8 -lurhmann 8 -decentralizing 8 -hsaio-qua 8 -bambach 8 -robie 8 -tortoni 8 -demystified 8 -savse 8 -cush 8 -ahndorils 8 -dogsledding 8 -pan-am 8 -dunant 8 -stanislavsky 8 -juce 8 -shekhovtsova 8 -baranos 8 -cavey 8 -dsei 8 -carawan 8 -10-foot-deep 8 -florange 8 -lâm 8 -breast-feeds 8 -shoemaker-levy 8 -châtelperronian 8 -biancoshock 8 -flytippers 8 -arpey 8 -overwatch 8 -pro-euthanasia 8 -@nytimes 8 -sestito 8 -wavy.com 8 -luss 8 -chronican 8 -cerveza 8 -re-ordering 8 -nial 8 -vihlen 8 -45-hour 8 -cholevas 8 -lower-speed 8 -asperas 8 -gravitylight 8 -car-ride 8 -2,638 8 -haymond 8 -sharoff 8 -#twittermillion 8 -fagundez 8 -rossich 8 -shantou 8 -prophesies 8 -jll 8 -state-of-emergency 8 -gallazzi 8 -blythburgh 8 -1975-79 8 -shirebrook 8 -andrena 8 -ex-nazis 8 -establishment-minded 8 -bisotel 8 -grp78 8 -66.8 8 -khoder 8 -bunked 8 -sesma 8 -higher-than-usual 8 -pro-islamist 8 -u.s.-japanese 8 -sharp-elbowed 8 -putdown 8 -bunche 8 -fare-dodging 8 -white-dominated 8 -berosh 8 -velocipede 8 -fuerte 8 -breyette 8 -megaloptera 8 -martyak 8 -co-efficient 8 -institutionalisation 8 -under-perform 8 -lonestar 8 -breast-ironing 8 -klaybor 8 -tarmacs 8 -mispronunciation 8 -37-inch 8 -co-prosecutors 8 -sheetal 8 -nelton 8 -25-0 8 -ranastianis 8 -1654 8 -agnessa 8 -vrc 8 -2020/21 8 -desilva 8 -juslin 8 -whymper 8 -zagaria 8 -mellingsaeter 8 -unintelligibly 8 -mylifeelsewhere 8 -tsakhia 8 -unphiltered 8 -kaetsu 8 -fulke 8 -maydan 8 -salustiano 8 -kleins 8 -publio 8 -schoppe-sullivan 8 -franchise-record 8 -hachigo 8 -weatherwax 8 -v.c. 8 -melor 8 -forno 8 -ceiba 8 -pecorino 8 -ombudsperson 8 -preslee 8 -ram-raided 8 -styleite 8 -museum-goers 8 -kabuye 8 -muffles 8 -abuiso 8 -ratha 8 -8:54 8 -8:51 8 -dappen 8 -shorefront 8 -sixpenny 8 -vespignani 8 -nalut 8 -red-painted 8 -32km/h 8 -compressors 8 -non-london 8 -gouaux 8 -stofile 8 -olgie 8 -noumandiez 8 -chevrolets 8 -land-grab 8 -kilwillie 8 -lengthways 8 -sharm-el-sheikh 8 -erhadt 8 -brehme 8 -degustation 8 -ebts 8 -#highheels 8 -15-person 8 -glenna 8 -hanein 8 -19.19 8 -leifsson 8 -krauthamer 8 -plati 8 -+10 8 -poppaea 8 -preempting 8 -bossie 8 -bio-dome 8 -cullens 8 -cartrail 8 -18-match 8 -brayley 8 -preikestolen 8 -ouatarra 8 -reanimate 8 -qbic 8 -silin 8 -lowgate 8 -collingdale 8 -faygate 8 -polster 8 -sollitt 8 -pajhwok 8 -zaitsev 8 -poundpub 8 -mcalees 8 -poloski 8 -12.33 8 -half-open 8 -martise 8 -mildness 8 -hoansi 8 -unforseen 8 -kizhi 8 -1,486 8 -1,483 8 -square-meter 8 -boyet 8 -rock-hewn 8 -vinichenko 8 -heat-treated 8 -58m 8 -lynott 8 -lexi-rose 8 -magen 8 -short-game 8 -abou-el-ella 8 -boudewijn 8 -matless 8 -ryabkova 8 -100-million 8 -czech-made 8 -val-de-grace 8 -homefree 8 -easy-to-make 8 -yammering 8 -khitab 8 -bartlesville 8 -frixion 8 -wazuma 8 -kaffee 8 -kfdm 8 -utahns 8 -witticisms 8 -fung-wong 8 -hosseinkhani 8 -lacondeguy 8 -syncardia 8 -lilywhites 8 -cal-cruz 8 -boche 8 -casserly 8 -habeeb 8 -genri 8 -angelic-looking 8 -spurway 8 -alkhaled 8 -apsa 8 -metaphoric 8 -hundred-year 8 -3-hour 8 -menger 8 -abdellaoue 8 -pyongang 8 -joanlia 8 -genese 8 -quad-city 8 -lianyungang 8 -boxofficeguru.com 8 -iveagh 8 -367,500 8 -ifixit.com 8 -officeworks 8 -gian-luc 8 -bridezillas 8 -antman 8 -troedson 8 -verrone 8 -whacked-out 8 -26-piece 8 -borge 8 -perenchio 8 -chetty 8 -lifewater 8 -galamaz 8 --43 8 -entrenchment 8 -windows-powered 8 -13mins 8 -chactun 8 -definable 8 -tidmarsh 8 -satires 8 -close-fitting 8 -hawkstone 8 -@boringmilner 8 -david-wilp 8 -islam-zulfiqar 8 -anissimova 8 -colbach 8 -used-game 8 -underwoods 8 -lichin 8 -glatzer 8 -patthar 8 -nørrebro 8 -bandsmen 8 -chà 8 -stamell 8 -unstated 8 -unimaginatively 8 -37mins 8 -dormy 8 -five-floor 8 -nwvaa 8 -distributive 8 -benedick 8 -dierdre 8 -quinoric 8 -schork 8 -320-pound 8 -hastags 8 -campagne 8 -okail 8 -luxi 8 -ixs 8 -walikale 8 -ft.com 8 -innuendo-filled 8 -whiffy 8 -shameela 8 -softshell 8 -reevey 8 -10-foot-tall 8 -filipino-born 8 -3:56 8 -high-court 8 -gorsegner 8 -5.79 8 -low-earning 8 -naeba 8 -snogged 8 -nescafé 8 -worrick 8 -pendergraft 8 -perhentian 8 -pentagrams 8 -winiarcyzk 8 -barchick 8 -apple-samsung 8 -recordsetter.com 8 -mechanicals 8 -dupond-moretti 8 -wadha 8 -chalet-style 8 -lybia 8 -ivaylo 8 -aqidi 8 -knowles-carter 8 -topmost 8 -corringham 8 -tweaker 8 -non-news 8 -well-killing 8 -ukik 8 -chapping 8 -75-100 8 -baseboards 8 -bollerman 8 -sargassum 8 -ring-bearer 8 -outjumps 8 -knole 8 -lace-trimmed 8 -parida 8 -mangapinna 8 -ex-directory 8 -urban-rural 8 -letterheads 8 -worell 8 -smugmug 8 -11-fold 8 -senyera 8 -19,000-a-week 8 -double-barrel 8 -hali 8 -neons 8 -saute 8 -dincuff 8 -tomohon 8 -censorious 8 -perele 8 -mattylawless 8 -guileless 8 -sidearms 8 -fiddleback 8 -firetrap 8 -short-story 8 -two-and-a-half-minute 8 -lachele 8 -1,781 8 -1,789 8 -meratol 8 -chenpeng 8 -sanda 8 -valtz 8 -gradings 8 -hichame 8 -adham 8 -hayes-danson 8 -beta-blocker 8 -tlali 8 -lapina 8 -packet-switching 8 -benguela 8 -snyders 8 -king-hit 8 -ethers 8 -l.k.bennett 8 -cristano 8 -strategically-important 8 -meusburger 8 -ferriss 8 -roelof 8 -spin-out 8 -high-angle 8 -patlove 8 -dettor 8 -fazliddin 8 -6,560 8 -crop-monitoring 8 -rahsaan 8 -prach 8 -chaldeans 8 -high-achievers 8 -sesto 8 -al-ayoubi 8 -wiltsie 8 -vassiliki 8 -salish 8 -borchert 8 -borchers 8 -1,063 8 -baevsky 8 -surfaid 8 -hagaman-clark 8 -terrestrials 8 -breitmayer 8 -9100 8 -stelmakh 8 -frelinghuysen 8 -barlerin 8 -hanwei 8 -lashline 8 -solemn-faced 8 -vh-1 8 -microwavable 8 -75.4 8 -well-schooled 8 -connemara 8 -flytrap 8 -201.3 8 -nazan 8 -camese 8 -autobot 8 -cankle 8 -quoll 8 -salivated 8 -armaan 8 -valkenburg 8 -malarone 8 -reposts 8 -tjon 8 -post-benghazi 8 -u12 8 -low-opportunity 8 -atack 8 -maale 8 -wego 8 -karger 8 -barcelona-born 8 -4,370 8 -combadges 8 -kotnik 8 -leading-man 8 -once-divided 8 -4.67 8 -4.62 8 -boxercise 8 -35-10 8 -mystify 8 -kalkaska 8 -tucker-smith 8 -vanderwesthuizen 8 -super-confident 8 -clown-like 8 -salbi 8 -arrecife 8 -runneth 8 -kewane 8 -pbac 8 -baseball-size 8 -armorsource 8 -mfb 8 -modellers 8 -childrearing 8 -anticompetitive 8 -21-stone 8 -gumshoe 8 -mauffrey 8 -solhjell 8 -poundage 8 -branwen 8 -badabing 8 -ithe 8 -anic 8 -hollas 8 -alvaston 8 -hudson-lapore 8 -ostersund 8 -ventral 8 -duchies 8 -5,000-meter 8 -jankowska 8 -ovrebo 8 -miltiadis 8 -well-backed 8 -thiede 8 -blizzardmobile 8 -attewell 8 -camdal 8 -meuli 8 -slow-to-evolve 8 -fresa 8 -haws 8 -mashtal 8 -ottoway 8 -brick-by-brick 8 -buckden 8 -changping 8 -dumbing-down 8 -cycleways 8 -skivenes 8 -boerewors 8 -hamied 8 -agriflu 8 -5-15 8 -baikuni 8 -wardwell 8 -keasey 8 -barnetts 8 -perini 8 -kutai 8 -2,000-degree 8 -mass-scale 8 -florham 8 -cabandie 8 -indego 8 -167,800 8 -sexminster 8 -mischaracterizes 8 -mouse-box 8 -indri 8 -massachusetts-amherst 8 -liplock 8 -varec 8 -terms-of-service 8 -dacres 8 -food-style 8 -cloud-seeding 8 -17-judge 8 -burkini 8 -scotsmen 8 -33-foot 8 -jayvee 8 -kaseman 8 -pellow 8 -paddle-boarder 8 -similes 8 -kneeler 8 -hair-stylist 8 -minn 8 -silbernagel 8 -knuckle-duster 8 -microfluidics 8 -piraino 8 -1/12 8 -livening 8 -mdundo 8 -waste-to-energy 8 -ask.com 8 -kokhanok 8 -memorializes 8 -colins 8 -jaki 8 -gloomiest 8 -proffesor 8 -eido 8 -millender 8 -life-bearing 8 -gurdwaras 8 -snapchat-like 8 -genuity 8 -alridge 8 -northey 8 -rivalland 8 -tibble 8 -crystal-embellished 8 -moscatel 8 -hajji 8 -vivos 8 -limón 8 -re-appoint 8 -ubiribo 8 -jamuna 8 -yaros 8 -bactrack 8 -tokophobia 8 -zweden 8 -finlow 8 -1,400-student 8 -smithwick 8 -bank-based 8 -rushie 8 -rasbridge 8 -hollywoodland 8 -kiddieland 8 -amarah 8 -shpilenok 8 -berish 8 -recognitions 8 -nectarine 8 -heinlein 8 -4970 8 -cadete 8 -anti-peace 8 -poverty-ridden 8 -hia 8 -minou 8 -lululeika 8 -incentivizing 8 -yume-hotaru 8 -cdo 8 -upper-tier 8 -tavizon 8 -sugarhouse 8 -stepover 8 -boddington 8 -resistence 8 -95ft 8 -billionairess 8 -pitlochry 8 -bdt 8 -ascencia 8 -otton 8 -much-photographed 8 -leibovitch 8 -1,824 8 -thundersley 8 -5,432 8 -moorside 8 -hardenne 8 -shot-by-shot 8 -setara 8 -seldom-seen 8 -ksdk.com 8 -biffy 8 -plumped-up 8 -appearance-altering 8 -1544 8 -1541 8 -foxsports.com 8 -punicalagin 8 -angiograms 8 -led-lit 8 -7,500-a-month 8 -thickbroom 8 -split-up 8 -disqualifier 8 -timy 8 -didio 8 -norlanders 8 -contogouris 8 -bandicoot 8 -#predatorinstinct 8 -cardless 8 -skogen 8 -shinwary 8 -consaul 8 -kennedy-style 8 -devolves 8 -zakwan 8 -pagán 8 -ahwahnee 8 -pissed-off 8 -micro-finance 8 -brevent 8 -wanderwalle 8 -in-custody 8 -primly 8 -airness 8 -layer-by-layer 8 -rdf 8 -nirwan 8 -lungworm 8 -jadaoun 8 -hanash 8 -bursac 8 -musicares 8 -weare 8 -tramontin 8 -cotoneaster 8 -83.2 8 -androscoggin 8 -pernille 8 -omarr 8 -gedu 8 -raeth 8 -petpaint 8 -gliksten 8 -uncrossed 8 -volcom 8 -afrezza 8 -federally-recognized 8 -santita 8 -7:14 8 -7:17 8 -beechmont 8 -illemassene 8 -tevzadze 8 -ruffini 8 -mensline 8 -hainsey 8 -wci 8 -toots 8 -parndon 8 -shilin 8 -pollen.com 8 -seventh-tier 8 -oder 8 -bellshill 8 -moria 8 -web-site 8 -shamefaced 8 -colinton 8 -partaken 8 -evelynn 8 -dodsworth 8 -jozianne 8 -recombined 8 -after-action 8 -126lbs 8 -wind-assisted 8 -perenyi 8 -portmagee 8 -5,100-a-night 8 -albina 8 -game-winner 8 -seheriya 8 -56-44 8 -filatova 8 -passport-holders 8 -30-foot-deep 8 -frascotti 8 -1,500-acre 8 -kosovar 8 -open-records 8 -30-fold 8 -gamburtsev 8 -bustice 8 -write-downs 8 -dereon 8 -zhuzhou 8 -periwinkle 8 -pinboards 8 -wolfish 8 -peterbrough 8 -dorcus 8 -conason 8 -rezler 8 -pratts 8 -13,260 8 -tosi 8 -baojun 8 -8billion-a-year 8 -bullis 8 -maged 8 -reliastar 8 -bierzo 8 -bioweapons 8 -paychex 8 --62 8 --63 8 -sartain-clarke 8 -pennsauken 8 -aksai 8 -87.2 8 -newly-unearthed 8 -maghen 8 -saal 8 -flow-rate 8 -1.5-inches 8 -furbys 8 -mini-city 8 -hatful 8 -pelagicus 8 -martirosyan 8 -britner 8 -benylin 8 -masta 8 -marichalar 8 -siskovic 8 -zhaozhong 8 -china-made 8 -serrao 8 -news-times 8 -haskells 8 -kingsnake 8 -heliostats 8 -fabrica 8 -prineg 8 -temur 8 -cucina 8 -kwa-zulu 8 -makdessi 8 -farmersonly.com 8 -hackel 8 -kili 8 -aqueous 8 -laupahoehoe 8 -haydon-jones 8 -11,380 8 -non-academic 8 -cchf 8 -schruers 8 -sanameen 8 -kafle 8 -queensland-based 8 -zzz 8 -quinzhee 8 -pre-arrange 8 -ex-corrie 8 -corddry 8 -obeisance 8 -208th 8 -26-10 8 -al-lakiss 8 -isave 8 -recolonize 8 -11a 8 -theos 8 -sandwhich 8 -five-nation 8 -less-is-more 8 -natika 8 -full-spectrum 8 -debrosse 8 -ka-ching 8 -jail-issued 8 -gegolick 8 -sangare 8 -bilbray 8 -schoenefeld 8 -realtree 8 -kazahkstan 8 -raggi 8 -curlier 8 -37-acre 8 -minesweeping 8 -bioenergy 8 -sdcc 8 -mitsukoshi 8 -mccay 8 -redesdale 8 -hamm-niebruegge 8 -hand-powered 8 -teeny-tiny 8 -bristolians 8 -fluffer 8 -troglodyte 8 -granadilla 8 -gfs 8 -cours 8 -nigga 8 -burklow 8 -ccb 8 -paraic 8 -goitre 8 -celebrity-endorsed 8 -vieites 8 -cagen 8 -drawcards 8 -vanous 8 -blue-helmeted 8 -tschirschky 8 -appin 8 -enthusiasms 8 -clean-sheet 8 -zajic 8 -mireya 8 -barend 8 -rahmoun 8 -quntar 8 -carboniferous 8 -svenssons 8 -4-methylimidazole 8 -madinda 8 -freedom-of-speech 8 -jakubec 8 -r.e. 8 -remender 8 -senft 8 -rental-car 8 -free-throw 8 -nullifies 8 -lake-front 8 -noem 8 -hougoumont 8 -mangareva 8 -caverswall 8 -lci 8 -sojourns 8 -pisculichi 8 -moorestown 8 -superboat 8 -daar 8 -lunday 8 -home-away-from-home 8 -aceves 8 -change-of-command 8 -casuals 8 -self-talk 8 -152,450 8 -accreditations 8 -claverie 8 -barathi 8 -once-in-a-century 8 -vandross 8 -wifely 8 -tarelkin 8 -purdey 8 -namie-machi 8 -widgery 8 -countersnipers 8 -ex-nurse 8 -backcombing 8 -juacelo 8 -beauford 8 -ura 8 -al-malik 8 -yuezi 8 -rapsi 8 -85,500 8 -unfired 8 -dap-kings 8 -red-tagged 8 -tikaram 8 -shakiba 8 -lockridge 8 -chasmosaurus 8 -59,300 8 -pseudomyxoma 8 -unexcavated 8 -am-dram 8 -smoothers 8 -gardnerville 8 -bainesy 8 -malinki 8 -dad-of-five 8 -pysden 8 -coquitlam 8 -wiat.com 8 -tega 8 -1,500-word 8 -edden 8 -busiello 8 -exfoliated 8 -ribotype 8 -diffa 8 -krolow 8 -hyun-soo 8 -breathers 8 -millenniums 8 -6.63 8 -whcih 8 -taesongsan 8 -yalong 8 -ausveg 8 -1,945 8 -fitness-related 8 -18ft-long 8 -silver-grey 8 -#sochi2014 8 -630m 8 -cicconetti 8 -disodium 8 -etch-a-sketch 8 -lumi 8 -swardt 8 -al-saeed 8 -mcillroy 8 -moraru 8 -loverin 8 -kongsberg 8 -webasto 8 -islamia 8 -romanowski 8 -ker-lindsay 8 -pool-stage 8 -wptz 8 -duncan-smith 8 -garino 8 -panettas 8 -9:08 8 -9:01 8 -bransfield 8 -pixy 8 -caterhams 8 -schertler 8 -most-powerful 8 -mcmenamy 8 -s.paulo 8 -bransgore 8 -stovepipe 8 -gumble 8 -fowzia 8 -caudrelier 8 -delk 8 -kartashov 8 -léon 8 -daglan 8 -mg/l 8 -pitsuwan 8 -privvy 8 -westerville 8 -eight-day-old 8 -owais 8 -summan 8 -haifeng 8 -ex-germany 8 -315million 8 -zuko 8 -zuks 8 -2000-2008 8 -2000-2005 8 -760million 8 -warnakulasuriya 8 -75km 8 -interferometer 8 -2,095 8 -schoolmistress 8 -dalbesio 8 -summerleaze 8 -va.-based 8 -lineal 8 -lower-back 8 -folktale 8 -malaria-infected 8 -mcrobb 8 -morriss 8 -zemmer 8 -eco-luxury 8 -sigmundur 8 -spivack 8 -b8 8 -john-lewis 8 -head-shaking 8 -average-size 8 -sweatbands 8 -sydling 8 -injudicious 8 -d'asti 8 -nerses 8 -wide-plank 8 -hachelbich 8 -57p 8 -papps 8 -sukhon 8 -atka 8 -voicebox 8 -lanolin 8 -fatah-hamas 8 -coghurst 8 -gaffigan 8 -glucomen 8 -tarawa 8 -a361 8 -avalere 8 -tonite 8 -phonic 8 -hand-lettered 8 -toomsboro 8 -llegal 8 -geebee 8 -tightly-knit 8 -naso-gastric 8 -s-money 8 -picacho 8 -slusher 8 -hsdd 8 -noden 8 -thigpen 8 -bergmeier 8 -3.253 8 -mashtags 8 -text-book 8 -co-present 8 -doxastakis 8 -christodoulopoulos 8 -mhuto 8 -ragheb 8 -2,131 8 -ponoplayer 8 -redeems 8 -maser 8 -bourassa 8 -wolfgango 8 -jazayeri 8 -readingmate 8 -combat-equipped 8 -olton 8 -laurs 8 -laury 8 -precipices 8 -butylated 8 -tver 8 -zhukovsky 8 -al-maeena 8 -paulaner 8 -namechecked 8 -uterqüe 8 -winders 8 -500mb 8 -ochres 8 -bayston 8 -countesses 8 -sellard 8 -nesterchuk 8 -swaby 8 -supai 8 -55-pound 8 -baruchel 8 -sportsluxe 8 -armature 8 -rasik 8 -abdoul 8 -jakaria 8 -ventas 8 -woodrum 8 -struggler 8 -183mph 8 -campazzo 8 -d-mich. 8 -canungra 8 -abelisaurids 8 -ill-thought-through 8 -609,000 8 -merseyside-based 8 -best-connected 8 -gprs 8 -madhusudan 8 -mercers 8 -transat 8 -anti-thaksin 8 -khnp 8 -deer-vehicle 8 -too-close-for-comfort 8 -tap-ins 8 -897million 8 -well-polished 8 -federalisation 8 -msi 8 -oba 8 -meqdad 8 -zimei 8 -unfaltering 8 -misreads 8 -anti-animal 8 -gastronomes 8 -emirates-based 8 -pro-hezbollah 8 -#brasil 8 -drambuie 8 -predator-prey 8 -clamshells 8 -farhoodi 8 -clephane 8 -15-litre 8 -choco-pies 8 -muktinath 8 -430m 8 -rizespor 8 -pattering 8 -50,500 8 -proto-state 8 -bio-energy 8 -aveeno 8 -zocca 8 -40percent 8 -antitank 8 -12th-seeded 8 -blue-state 8 -everard 8 -spuc 8 -erdhardt 8 -tappero 8 -600c 8 -tularosa 8 -team-bonding 8 -abbou 8 -1566 8 -seagrove 8 -hitchon 8 -sureshkumar 8 -flirtexting 8 -boskovski 8 -jigsaw-online 8 -inuka 8 -seine-saint-denis 8 -height-adjustable 8 -euskirchen 8 -30-litre 8 -al-tamimi 8 -rosing 8 -björkliden 8 -plattsmouth 8 -ominous-sounding 8 -lamari 8 -grazielli 8 -peerindex 8 -jumaane 8 -medard 8 -garric 8 -garrin 8 -garris 8 -golt 8 -5:29 8 -aylmer 8 -tuskers 8 -meldrum-hanna 8 -xingu 8 -magik 8 -nyiro 8 -highwoods 8 -voss-wittig 8 -boettcher 8 -safavid 8 -shakkour 8 -lusties 8 -vigilante-style 8 -monteverdi 8 -third-leading 8 -b-minus 8 -conjunctiva 8 -saccone-joly 8 -matania 8 -mossman 8 -crinkled 8 -mganga 8 -dzudovic 8 -ganyard 8 -pantopia 8 -cassis 8 -obameter 8 -eram 8 -aravah 8 -tobar 8 -fortunino 8 -8.57 8 -mujra 8 -20-seat 8 -insight100 8 -nevilles 8 -horse-carriage 8 -budtender 8 -potently 8 -dipak 8 -mact 8 -cash-for-votes 8 -grenoside 8 -yaniseth 8 -kubbar 8 -gambira 8 -strathglass 8 -wondemagegne 8 -laici 8 -haese 8 -blake-powell 8 -167million 8 -huntsworth 8 -wooky 8 -doffs 8 -732,000 8 -shalke 8 -oromo 8 -josemans 8 -sanlucar 8 -defense-related 8 -meunch 8 -sudhof 8 -caldas 8 -cirilo 8 -baggier 8 -tuca 8 -izen 8 -bonsor 8 -shafee 8 -flatoff 8 -http://nbcsandiego.com 8 -fifth-in-line 8 -tandragee 8 -then-unidentified 8 -lobacheva 8 -bike-share 8 -glueing 8 -dasti 8 -calorie-packed 8 -sun-lovers 8 -balga 8 -dürer 8 -joppa 8 -67.1 8 -orations 8 -vcat 8 -3,409 8 -animalia 8 -falcón 8 -toss-ups 8 -newson6.com 8 -surgenor 8 -toq 8 -weragoda 8 -bushwalking 8 -tubemogul 8 -democratic-run 8 -adamses 8 -ludovico 8 -cosier 8 -fashion-led 8 -matchy-matchy 8 -dorie 8 -dealth 8 -hodella 8 -patituce 8 -merzwski 8 -gamification 8 -respicio 8 -tafheen 8 -3:53 8 -3:54 8 -zeuxis 8 -innuendo-laden 8 -gun-making 8 -dreamscience 8 -lezama 8 -short-toed 8 -atrociously 8 -spritzing 8 -chipperton 8 -scf 8 -dog-sitting 8 -collar-length 8 -escalades 8 -romancer 8 -brozin 8 -test-launched 8 -akong 8 -majella 8 -staffordshire-based 8 -campylobacteriosis 8 -13d 8 -neira 8 -benita-lynn 8 -mascio 8 -re-freeze 8 -5.32 8 -5.33 8 -catharina 8 -14.25 8 -tarverdiyeva 8 -re-posts 8 -allama 8 -shrief 8 -sky-scraper 8 -empire-line 8 -gallifrey 8 -aplusk 8 -truenorth 8 -76.1 8 -demopolis 8 -terminators 8 -t.a.p. 8 -makuake 8 -skirmished 8 -260-year-old 8 -ngata 8 -ripostes 8 -gemological 8 -lambrinos 8 -duvan 8 -emote 8 -95km/h 8 -kasserra 8 -bramber 8 -stranges 8 -shamghadri 8 -sisul 8 -anschluss 8 -chandre 8 -7,969 8 -daudzai 8 -airteam 8 -mission-critical 8 -madingley 8 -lower-strength 8 -separators 8 -macgarva 8 -#jesus 8 -horniblew 8 -1425 8 -91kg 8 -5,180 8 -lathering 8 -maver 8 -said-moorhouse 8 -amun 8 -sukiati 8 -egger 8 -inter-prison 8 -tom_sheen 8 -transel 8 -cica 8 -post-quake 8 -geocoding 8 -wyard 8 -dacy 8 -pressman 8 -tischenko 8 -ectrodactyly 8 -teahouses 8 -bockius 8 -bright-line 8 -frolov 8 -cheeburger 8 -pobiner 8 -laurinda 8 -jabre 8 -trenticosta 8 -harisu 8 -anti-miscegenation 8 -scarpulla 8 -evie-leigh 8 -5555 8 -side-splitting 8 -kay-ann 8 -scout5000 8 -three-to-one 8 -marawa 8 -tuckerman 8 -augean 8 -weeford 8 -ride-hailing 8 -coakey 8 -poudre 8 -bogging 8 -managala 8 -loubani 8 -paoletti 8 -piled-up 8 -puerto-cabello 8 -dc-4 8 -glower 8 -a666 8 -drumbeats 8 -chinelle 8 -mooch 8 -zenonade 8 -p226 8 -coywolf 8 -kierra 8 -manson-perry 8 -lockets 8 -lanceros 8 -re-evaluates 8 -re-screened 8 -akker 8 -blueshield 8 -yasmani 8 -biodesign 8 -twitter-themed 8 -legislations 8 -mountlake 8 -mocorito 8 -pro-islam 8 -polkerris 8 -greenfinches 8 -glaciology 8 -photobooths 8 -primeau 8 -cliserio 8 -jordanaires 8 -@giaallemand 8 -española 8 -one-race 8 -walton-on-the-naze 8 -petruzzi 8 -hekmatis 8 -hunterston 8 -ukpabio 8 -lawyer-client 8 -submissives 8 -abduallah 8 -fundred 8 -tebbut 8 -3d-printable 8 -sdi 8 -trombones 8 -lee-lo 8 -crimmin 8 -aalbersberg 8 -palindrome 8 -slacked 8 -mid-deck 8 -ibeanu 8 -sete 8 -1,611 8 -movado 8 -bhamra 8 -raniero 8 -zsuzsanna 8 -sippola 8 -flange 8 -mburri 8 -pre-leukemia 8 -al-haffa 8 -hyeonseo 8 -belarsky 8 -dc-3s 8 -hyperflex 8 -watercourse 8 -infestans 8 -l'archet 8 -#classy 8 -lm-1 8 -8-14 8 -amriki 8 -2,436 8 -2,435 8 -ebo 8 -entim 8 -9:24 8 -neuroprotective 8 -kroma 8 -overfinch 8 -musicase 8 -zahidi 8 -trpv1 8 -45-metre 8 -dormers 8 -kugor 8 -csas 8 -rosendahl 8 -allsvenskan 8 -drop-waist 8 -gotabhaya 8 -interventionists 8 -frou-frou 7 -sunrays 7 -elizardo 7 -lapua 7 -misseriya 7 -owlfish 7 -ankylosaurs 7 -nonconsecutive 7 -tudorache 7 -50-piece 7 -a.h. 7 -airservices 7 -storm-affected 7 -abdikarim 7 -rowthorn 7 -indonesia-based 7 -kravchuk 7 -trenear-harvey 7 -leara 7 -bratwursts 7 -ex-stepson 7 -cfia 7 -stone-carved 7 -antagonizes 7 -kulfi 7 -nasa-noaa 7 -food-handling 7 -ozdemir 7 -kno 7 -matrosskaya 7 -imtiyaz 7 -five-foot-two 7 -changemyreputation.com 7 -,33 7 -59g 7 -mythique 7 -kocin 7 -hurly-burly 7 -lumee 7 -weitzel 7 -nabeela 7 -coffee-growing 7 -obadiaru 7 -higbee 7 -tech-obsessed 7 -goodener 7 -d23 7 -ho41 7 -hipster.com 7 -carrier-based 7 -a3211 7 -super-prison 7 -marsili 7 -meihls 7 -2:31 7 -2:37 7 -2:39 7 -kokenyova 7 -plusgrade 7 -mazzy 7 -canadaigua 7 -endarterectomy 7 -liberato 7 -yeeles 7 -mughniyah 7 -gainor 7 -clatsop 7 -norooznews 7 -long-drawn 7 -llovera 7 -three-and-a-half-hour 7 -euroa 7 -o'sheas 7 -skycap 7 -ceaser 7 -salnitskaya 7 -hotchner 7 -afte 7 -faird 7 -fdls 7 -agapanthus 7 -websense 7 -interjection 7 -smokescreens 7 -joyxee 7 -smilianets 7 -schön 7 -donsah 7 -krunchy 7 -desensitizes 7 -nessling 7 -yasamie 7 -seizure-free 7 -1,319 7 -duranbah 7 -determinative 7 -fairyflies 7 -immuno-suppressive 7 -1/50 7 -chadbourn 7 -oddicombe 7 -helicoprion 7 -nonaka 7 -traditionalism 7 -siner 7 -flaggers 7 -3,166 7 -dauahare 7 -sea-water 7 -90-tonne 7 -institutionalise 7 -stanyer 7 -isanbul 7 -friendlychemist 7 -agol 7 -bayramoglu 7 -gondor 7 -kadiyska 7 -milchberg 7 -after-exams 7 -play-date 7 -chygrynskiy 7 -fromagers 7 -94million 7 -tan-colored 7 -tukhtin 7 -hernciar 7 -mayardit 7 -clotheslines 7 -pawprints 7 -vhr 7 -mortons 7 -oficina 7 -ranu 7 -galleguillos 7 -kamionkowski 7 -mckellican 7 -scoles 7 -rear-wheel-drive 7 -tribals 7 -sarim 7 -bachems 7 -brung 7 -blakeview 7 -mundu 7 -dyker 7 -moonman 7 -sargasso 7 -seljuk 7 -1,572 7 -1,576 7 -powercor 7 -saleswomen 7 -hauritz 7 -sundridge 7 -tomomi 7 -muj 7 -511,000 7 -escolar 7 -hathersage 7 -8000x 7 -mors 7 -ferc 7 -26-29 7 -budiawan 7 -sundvollen 7 -rapidly-changing 7 -beghi 7 -coralyn 7 -exasperate 7 -hecken 7 -90-page 7 -mcelholm 7 -status-quo 7 -nowlin 7 -wickramaratna 7 -siva-jothy 7 -hinkson 7 -chouest 7 -carcharodontosaurs 7 -school-gate 7 -digihaven 7 -montori 7 -easy-to-digest 7 -mayhill 7 -sastind 7 -syphon 7 -yarchagumba 7 -ovenbirds 7 -ghashir 7 -short-order 7 -rafid 7 -1505 7 -75-year-olds 7 -lumbersexual 7 -fuxianhuia 7 -dress-down 7 -waverton 7 -bellahouston 7 -crassness 7 -tichborne 7 -deisseroth 7 -thai-themed 7 -pacifici 7 -sno-cat 7 -eshpari 7 -webcasts 7 -hate-motivated 7 -dider 7 -gulsen 7 -nonces 7 -fragias 7 -avera 7 -chimborazo 7 -college-ready 7 -melony 7 -kohnen 7 -21-strong 7 -brockhill 7 -deliverymen 7 -iran-related 7 -first-response 7 -zhavoronkov 7 -pollards 7 -oumkheyr 7 -archi 7 -tomasek 7 -taskbar 7 -carvel 7 -juist 7 -6k 7 -5:46 7 -one-in-100-million 7 -11.06 7 -11.03 7 -ultra-safe 7 -4:06 7 -thiemann 7 -then-24-year-old 7 -ozanne 7 -tossa 7 -boshier 7 -calid7 7 -hecks 7 -,39 7 -defiants 7 -well-looked 7 -schlitte 7 -dovel 7 -dentin 7 -sepia-tinged 7 -__________ 7 -pencoed 7 -silverbridge 7 -uninflated 7 -nii-azu 7 -non-gaming 7 -fotaras 7 -o'shannessy 7 -villares 7 -2081-2100 7 -taavi 7 -hartz 7 -7:54 7 -subducting 7 -gauhar 7 -oocyte 7 -freidel 7 -fornash 7 -assyria 7 -bikeable 7 -odal 7 -2,192 7 -gijsbert 7 -tunecore 7 -bayji 7 -kaytlin 7 -ercc 7 -anti-drinking 7 -8.78 7 -56,000-plus 7 -silvertown 7 -luganda 7 -明朝 7 -siphiwe 7 -audibles 7 -p-8a 7 -derderian 7 -videgaray 7 -340,000-a-year 7 -mathematica 7 -curdle 7 -janusiscus 7 -kicca.com 7 -mediterraneo 7 -johanssen 7 -thokozani 7 -tolchard 7 -youbionic 7 -quarter-length 7 -#uk 7 -dobell 7 -griffioen 7 -spin-doctor 7 -niass 7 -lenya 7 -lorusso 7 -blogger.com 7 -hartleys 7 -two-months-old 7 -siggs 7 -canda 7 -cando 7 -papplewick 7 -anjool 7 -dmo 7 -sbenaty 7 -eco-hotel 7 -10am-4pm 7 -60,000-strong 7 -co-headlining 7 -rettig 7 -800billion 7 -majhi 7 -ketterman 7 -19-years 7 -khiday 7 -143.5 7 -sydney-siders 7 -brandwatch 7 -talibanization 7 -vrinda 7 -25-pounder 7 -high-kick 7 -alrashid 7 -olawale 7 -lucano 7 -coiling 7 -camarasaurus 7 -thuli 7 -multicolor 7 -freshly-caught 7 -schmallenberg 7 -othe 7 -u.s.-libyan 7 -caven-atack 7 -slezak 7 -sumanjeet 7 -canutt 7 -bedfellow 7 -anthill 7 -kendarius 7 -metdesk 7 -maginot 7 -hybridization 7 -full-resolution 7 -toshihiro 7 -yfrog 7 -leskien 7 -1,236 7 -36-27 7 -fragonard 7 -cross-pollinate 7 -delear 7 -haggui 7 -wallkill 7 -joergen 7 -frameless 7 -euroseries 7 -minnery 7 -18th-placed 7 -silverwood 7 -achiness 7 -securenvoy 7 -dayuse-hotels 7 -@thedukeofyork 7 -carbon-dated 7 -22-1 7 -243million 7 -today/suffolk 7 -sakowicz 7 -co-team 7 -zigang 7 -kombase 7 -poteet 7 -detling 7 -elshiekh 7 -boondocks 7 -eight-letter 7 -transneft 7 -mitzpe 7 -dettorre 7 -tastelessly 7 -delafield 7 -flett 7 -sherlockians 7 -peggielene 7 -bylsma 7 -kiddicare 7 -snuppy 7 -sciri 7 -selfie-stick 7 -bartleson 7 -piatek 7 -doctortown 7 -vaa 7 -vestre 7 -mclendon-covey 7 -fortey 7 -satellite-linked 7 -844-page 7 -nine-yard 7 -cod-liver 7 -lignite 7 -centerjuly 7 -hiv-affected 7 -reticulata 7 -chitimacha 7 -less-skilled 7 -sallies 7 -tuinder 7 -severgnini 7 -ercan 7 -liberal-conservative 7 -car-size 7 -www.facebook.com 7 -mcskimming 7 -child-safe 7 -creepydol 7 -floodwalls 7 -tearfund 7 -englishwomen 7 -brillante 7 -euthanising 7 -squba 7 -coagulation 7 -ditmas 7 -@piersmorgan 7 -timoney 7 -blubbery 7 -dolor 7 -amjit 7 -@investeccricket 7 -wenli 7 -loxy 7 -49-year-olds 7 -burundians 7 -romanes 7 -beheld 7 -co-schemer 7 -on-the-runs 7 -linnie 7 -lingual 7 -dartz 7 -audel 7 -sanjayan 7 -orange-peel 7 -hadjarab 7 -bargets 7 -desperado 7 -1406 7 -gesture-based 7 -beeks 7 -onwuha 7 -refering 7 -a-hole 7 -violence-wracked 7 -matthey 7 -wygle 7 -doom-mongers 7 -htike 7 -york-jfk 7 -cozzens 7 -siejka 7 -estefani 7 -prostrating 7 -adebiyi-abiola 7 -34-car 7 -fruit-flavored 7 -junipers 7 -worzel 7 -500-person 7 -oxiclean 7 -mazafer 7 -mangoush 7 -phife 7 -townsley 7 -jayyusi 7 -culburra 7 -jazzman 7 -gamecube 7 -golvin 7 -conquistador 7 -reserachers 7 -seaon 7 -lecraw 7 -textron 7 -8th-grade 7 -re-appearing 7 -challenor 7 -doolally 7 -re-decorating 7 -201km 7 -maysara 7 -kondratiev 7 -stehlik 7 -wolz 7 -pioz 7 -geriatrician 7 -benford 7 -yuca 7 -mayr-achleitner 7 -uvz 7 -labo 7 -dustcart 7 -senghor 7 -half-cent 7 -vyse 7 -later-life 7 -5-ounce 7 -lootings 7 -brownface 7 -gursky 7 -darbelnet 7 -f14 7 -f15 7 -kvly 7 -37,800 7 -pntx2-6 7 -bucko 7 -serozinc 7 -railcar 7 -tailgater 7 -gelvin 7 -alcapa 7 -vacanti 7 -codpieces 7 -dronabinol 7 -mediabistro 7 -hawijah 7 -38f 7 -209,920 7 -0s 7 -boroday 7 -rathner 7 -phrae 7 -nonpresidential 7 -traynom 7 -bilinski-munion 7 -drop-side 7 -staffcentrix 7 -hotsinpiller 7 -zuying 7 -re-consider 7 -thinh 7 -dairy-farm 7 -douching 7 -deloach 7 -rowghani 7 -kasimpasa 7 -aphroditois 7 -swope 7 -batato 7 -shavata 7 -kichwa 7 -tudgay 7 -nadezda 7 -midsections 7 -rahmeier 7 -aikin 7 -tv-ma 7 -sbl 7 -11-room 7 -thovex 7 -twenty-nine-year-old 7 -jandarma 7 -jianmei 7 -carryall 7 -thunderously 7 -vouchercodespro 7 -pro-woman 7 -cesspools 7 -shirellda 7 -elzie 7 -microspheres 7 -200miles 7 -keyc-tv 7 -straarup 7 -klonoff 7 -semi-fictional 7 -kruser 7 -200kph 7 -hodogaya 7 -castellotti 7 -multi-material 7 -cuddliest 7 -w.w. 7 -anti-is 7 -brumberger 7 -1,311 7 -shambala 7 -ciotat 7 -kela 7 -okunoin 7 -margarete 7 -bidondi 7 -tolosa 7 -wipe-clean 7 -a&t 7 -mamluk 7 -imperioli 7 -127.9 7 -grzibovska 7 -rickers 7 -spacers 7 -70-acre 7 -divis 7 -urbanski 7 -neo-conservatives 7 -kenly 7 -120,00 7 -cottage-style 7 -preordered 7 -double-deckers 7 -carola 7 -kawamata 7 -aquaplaned 7 -beveled 7 -fang-like 7 -over-complicated 7 -turek 7 -bolyston 7 -zampatti 7 -metail 7 -gasbuddy.com 7 -sinclair-webb 7 -amazones 7 -scudettos 7 -noblet 7 -yordan 7 -al-dawood 7 -powelson 7 -naiya 7 -sweet-tempered 7 -buras 7 -jaravaza 7 -inner-most 7 -perez-maestro 7 -haggard-looking 7 -feliciana 7 -nandrolone 7 -ranitidine 7 -sheeha 7 -phenobarbital 7 -nota 7 -322km 7 -grandmasters 7 -anxious-looking 7 -lowballing 7 -boao 7 -kellermeister 7 -knock-about 7 -bernards 7 -hi-jacked 7 -ohka 7 -unitarians 7 -debartolo 7 -roadys 7 -daps 7 -mohaqiq 7 -overprescribing 7 -opolli 7 -then-6-year-old 7 -badly-injured 7 -leonesa 7 -evocatively 7 -puszta 7 -sensus 7 -winefride 7 -afesip 7 -neoliberals 7 -boys/girls 7 -post-crescent 7 -betrayers 7 -inkoom 7 -canine-friendly 7 -shasha 7 -tri-tip 7 -alchemical 7 -portlethen 7 -soviet-born 7 -#hero 7 -108bn 7 -videophones 7 -emissions-free 7 -69mw 7 -2:19 7 -story-lines 7 -farel 7 -mother-of-14 7 -billie-jean 7 -husker 7 -medieval-themed 7 -mané 7 -pielke 7 -logrolling 7 -still-grieving 7 -kpix-tv 7 -anti-men 7 -roach-eating 7 -aurdal 7 -@dc2forlife 7 -janurary 7 -nisreen 7 -jazaa 7 -jhamarion 7 -abitbol 7 -watarrka 7 -scholaert 7 -duivenvoorden 7 -trieu 7 -way-out-there 7 -kondalilla 7 -bio-recovery 7 -mp-203 7 -encina 7 -entranceway 7 -behçet 7 -al-rashidi 7 -pickiest 7 -march-past 7 -kimbolton 7 -neuro-surgeon 7 -letsie 7 -amaru 7 -amarg 7 -1,337 7 -1,332 7 -biscoe 7 -vacheron 7 -rockette 7 -jaquez 7 -finglands 7 -keirle 7 -aletta 7 -dionysius 7 -re-broadcast 7 -nmachi 7 -sitdown 7 -glastron 7 -warungs 7 -emporiums 7 -sealers 7 -modulating 7 -eurotrash 7 -meixner 7 -datar 7 -makonnen 7 -collards 7 -pre-prom 7 -mymagic 7 -moschella 7 -brankin 7 -bhs.co.uk 7 -92-minute 7 -168th 7 -costantin 7 -morganatic 7 -louwagie 7 -north-bound 7 -4400 7 -hdc 7 -whtm 7 -bar-tending 7 -haulbowline 7 -lean-tos 7 -cosell 7 -szilagyi 7 -hamayun 7 -syndicator 7 -ex-spouse 7 -3-week 7 -pibe 7 -hammerings 7 -big-business 7 -radiometric 7 -shinbones 7 -horwath 7 -jangled 7 -devireddy 7 -macweb 7 -50mbps 7 -lippett 7 -counter-punch 7 -townie 7 -hadrava 7 -crunchwrap 7 -unbox 7 -putzel 7 -khusro 7 -newly-bought 7 -verte 7 -.28 7 -viorica 7 -saabs 7 -68billion 7 -gerner 7 -ellenville 7 -cnr 7 -limantour 7 -mid-terraced 7 -1953-1961 7 -2,055 7 -edensor 7 -whizzy 7 -heward 7 -58-page 7 -1974-1977 7 -mexicanos 7 -stupples 7 -1,887 7 -1,888 7 -gimcrack 7 -gottshall 7 -154mph 7 -principessa 7 -comben 7 -reassembles 7 -hatta 7 -sub-contracting 7 -kertesz 7 -bratchikova 7 -1524 7 -1525 7 -1527 7 -1523 7 -cowan-hall 7 -lingmerth 7 -agusan 7 -gemasolar 7 -depressurized 7 -soldatov 7 -362,000 7 -duty-style 7 -message-in-a-bottle 7 -atram-hasis 7 -urayasu 7 -3,360 7 -juna 7 -mustachio 7 -grid-like 7 -endersby 7 -bat-like 7 -compartmentalise 7 -scratchproof 7 -souce 7 -guttieres 7 -trippler 7 -awacs 7 -shayan 7 -super-tough 7 -tweedie-connor 7 -50-story 7 -parekh 7 -haberdashery 7 -atiba 7 -scex 7 -drug-like 7 -domracheva 7 -eighth-highest 7 -wilstein 7 -seong-chang 7 -sun-synchronous 7 -nikkie 7 -minzu 7 -cannistra 7 -cox-reed 7 -legitimises 7 -picabo 7 -bayonetta 7 -rui'an 7 -battle-worn 7 -yecheng 7 -altnaharra 7 -d.e. 7 -re-investing 7 -langenstein-zwieberge 7 -sadah 7 -sadam 7 -mesnes 7 -gerontologist 7 -chac 7 -crack-addicted 7 -gardenia 7 -ryann 7 -burled 7 -easements 7 -low-status 7 -@seanabbott77 7 -anti-consumer 7 -jaf 7 -huizar 7 -mersey-cheshire 7 -monsignors 7 -fornicate 7 -x-band 7 -balby 7 -bicorne 7 -sandhya 7 -al-sadd 7 -ischaemia 7 -143m 7 -undershot 7 -1438 7 -kitzerow 7 -2057 7 -205m 7 -11/11/11 7 -xunyi 7 -9.32 7 -testone 7 -dudzisz 7 -erek 7 -squirrel-like 7 -under-explored 7 -dodridge 7 -vaitilingam 7 -schrod 7 -daniilidou 7 -inclusionary 7 -re-arm 7 -tricot 7 -party-affiliated 7 -zepps 7 -pns 7 -pnu 7 -rables 7 -critically-injured 7 -hyper-feminine 7 -matamata 7 -renoirs 7 -smiggles 7 -setpiece 7 -body-boarders 7 -bullet-like 7 -leet 7 -sokotei 7 -kneesy 7 -dribblers 7 -monta 7 -55-strong 7 -ousterhout 7 -tuteja 7 -thefacebook.com 7 -2,287 7 -loto-quebec 7 -mugisha 7 -kalenda 7 -48th-minute 7 -club-versus-country 7 -eiglarsh 7 -austalian 7 -incheon-bound 7 -eew 7 -eed 7 -over-flowing 7 -evangelising 7 -pannick 7 -darksiders 7 -brewmeister 7 -90-acre 7 -3,744 7 -@robmillsymills 7 -asocial 7 -mirinae 7 -abdollahpour 7 -performace 7 -forecastle 7 -gilfillan 7 -dahei 7 -visted 7 -jintilo 7 -truett-hurst 7 -dienst 7 -santokh 7 -meczyk 7 -chaiten 7 -14.800 7 -claycord 7 -thasos 7 -bavisi 7 -mahawa 7 -margaritis 7 -10th-ranked 7 -thanarak 7 -tagus 7 -schreckengost 7 -maggot-infested 7 -white-capped 7 -fuaed 7 -ambinder 7 -lledr 7 -decontee 7 -pesta 7 -chipboard 7 -maxi-dress 7 -jember 7 -somerdale 7 -bromby 7 -sozonov 7 -haenel 7 -maaytah 7 -missold 7 -legon 7 -off-spinners 7 -hierakonpolis 7 -humaya 7 -hakkari 7 -mandamus 7 -corkcicle 7 -capehorn 7 -killefer 7 -shalvis 7 -noach 7 -once-grand 7 -stechelberg 7 -mowforth 7 -smart-looking 7 -aday 7 -orsi 7 -kivi 7 -175-pound 7 -rekia 7 -danitra 7 -3:11 7 -40d 7 -brynjar 7 -loehr 7 -cyprien 7 -go-getters 7 -puxley 7 -al-mu 7 -eday 7 -battelle 7 -locally-produced 7 -hsidu 7 -gerolsteiner 7 -ellershaw 7 -l&m 7 -regenocyte 7 -game-style 7 -ex-scientology 7 -al-mutairi 7 -nogwaza 7 -portes 7 -kasungu 7 -mashhour 7 -turnagain 7 -then-cardinal 7 -halotherapy 7 -onrush 7 -lobdell 7 -mississipi 7 -summerwalk 7 -narcotics-related 7 -straitjackets 7 -legeno 7 -gillott 7 -glaize 7 -bone-rattling 7 -juntas 7 -105.8 7 -105.7 7 -106m 7 -1068 7 -bishko 7 -biocoal 7 -17bn 7 -volx 7 -gabet 7 -banyas 7 -honor-roll 7 -bucholz 7 -munsell 7 -fully-featured 7 -postulates 7 -3,960 7 -koehn 7 -wolof 7 -loynes 7 -macklowe 7 -debriefers 7 -wooden-framed 7 -cobia 7 -ruiz-gallardon 7 -fenetre 7 -bit-by-bit 7 -housebreaker 7 -kitengela 7 -mid-priced 7 -mypads 7 -chipembele 7 -hittites 7 -addra 7 -herrero 7 -hilarion 7 -suitsy 7 -330billion 7 -adare 7 -salmon-colored 7 -24,999 7 -kishkiyya 7 -fpf 7 -astrocytes 7 -broere 7 -kilworth 7 -avnet 7 -half-shaved 7 -poba 7 -omozusi 7 -wpo 7 -seikan 7 -bug-eating 7 -alteza 7 -ex-wimbledon 7 -1995-1999 7 -1465 7 -novichonok 7 -trussardi 7 -farside 7 -anthocyanin 7 -farmbloomington 7 -sheesh 7 -nowhereelese.fr 7 -wrtv-tv 7 -mso-fareast-theme-font 7 -micro-managed 7 -warrick-deaves 7 -biscovey 7 -ruffels 7 -197ft 7 -ayala-arizmendi 7 -instacart 7 -klutzy 7 -psyllid 7 -cross-community 7 -périgord 7 -prithviraj 7 -flummox 7 -kanji 7 -kishna 7 -wachowskis 7 -xtandi 7 -seeyourimpact.org 7 -middleclass 7 -cougill 7 -trances 7 -antonina 7 -antonine 7 -radhi 7 -skittering 7 -gruebel 7 -paveena 7 -navidad-hernandez 7 -alibhai 7 -borelli 7 -biters 7 -wond 7 -oancea 7 -www.traceycox.com 7 -lado 7 -5,000-10 7 -unrealised 7 -dolled-up 7 -second-fiddle 7 -time-wise 7 -vinyly 7 -london2london 7 -toyon 7 -fistler 7 -alderly 7 -gangwish 7 -sussing 7 -kvbc 7 -shahlai 7 -al-dahab 7 -1240 7 -1249 7 -five-episode 7 -w10 7 -railfan 7 -litzelman 7 -knish 7 -125mm 7 -akkar 7 -moulder 7 -pantomine 7 -25-match 7 -cup-a-soup 7 -adalia 7 -inanity 7 -sulej 7 -olsztyn 7 -21-bedroom 7 -handpicks 7 -over-charged 7 -ryota 7 -angiosarcoma 7 -akashvani 7 -kushino 7 -toquero 7 -richardson-walsh 7 -5,350 7 -chhatbar 7 -grutter 7 -dcpp 7 -eidsdottir 7 -#cozygirl 7 -skydance 7 -finkbonner 7 -shebby 7 -amagasaki 7 -electric-blue 7 -casecam 7 -stamback 7 -burray 7 -kepr 7 -integrationist 7 -pearlly 7 -nitta 7 -papler 7 -airportr 7 -intaglio 7 -aboutrika 7 -yanofsky 7 -petro-dollars 7 -cephalonia 7 -nonprimary 7 -sultzbach 7 -onic 7 -haniff 7 -kosan 7 -nonomura 7 -phenotype 7 -schattman 7 -ex-king 7 -self-definition 7 -budgeon 7 -slip-resistant 7 -sparklemuffin 7 -lochinver 7 -buggins 7 -high-tax 7 -sequedin 7 -matterson 7 -catterns 7 -vogelherd 7 -blerkom 7 -kerastase 7 -balika 7 -wfoil 7 -sleepphones 7 -multitasker 7 -rutberg 7 -preoccupy 7 -37,600 7 -bisegni 7 -kievs 7 -nikolaou 7 -alamshar 7 -fiveash 7 -peronne 7 -asraam 7 -26-stone 7 -declination 7 -monesi 7 -soro 7 -brandau 7 -sub-categories 7 -pacemaker-like 7 -nine-vehicle 7 -r-wis. 7 -al-haram 7 -colleville 7 -36,928 7 -extended-range 7 -akerboom 7 -friskies 7 -laigast 7 -warm-water 7 -#blackbrunch 7 -miday 7 -muslims4uk 7 -23-7 7 -114.8 7 -114.5 7 -régates 7 -14-4 7 -holmy 7 -home-educated 7 -hetti 7 -sheppeard 7 -p85 7 -middle-management 7 -tulun 7 -forristal 7 -maehara 7 -ex-finance 7 -m'bohli 7 -u.n.-affiliated 7 -krathong 7 -asian-inspired 7 -brattahild 7 -deitert 7 -aae 7 -icqc 7 -elderberry 7 -batkin 7 -tonti 7 -buttevant 7 -40-million 7 -alajuela 7 -9.80 7 -arpkd 7 -jagex 7 -haasbroek 7 -paglen 7 -eukaryotic 7 -hachi 7 -uch 7 -forstemann 7 -strahovski 7 -aeosana 7 -israeli-turkish 7 -tarnya 7 -magista 7 -super-sub 7 -lookup 7 -#brianwilliamsmisremembers 7 -heijmans 7 -shadowmancer 7 -jogjakarta 7 -7.90 7 -weigner 7 -alberg 7 -tishchenko 7 -alcohol-detection 7 -initialed 7 -over-mighty 7 -beldon 7 -recently-formed 7 -raziel 7 -victimâ 7 -quipus 7 -non-mainstream 7 -strobl 7 -mcneese 7 -near-invisible 7 -132nd 7 -dunwoodie 7 -moranda 7 -cpus 7 -mtwapa 7 -jawid 7 -sulome 7 -out-manoeuvred 7 -3-8 7 -tee-total 7 -lurker 7 -mondavi 7 -osteomyelitis 7 -reneges 7 -astrofest 7 -gold-diggers 7 -vecellio 7 -cyber-attacker 7 -zhangjiakou 7 -krauze 7 -uihlein 7 -eltringham 7 -re-made 7 -honnor 7 -schwall 7 -jenkinses 7 -westhampton 7 -20-foot-high 7 -game-tying 7 -embryolisse 7 -ball-by-ball 7 -campuswide 7 -iset 7 -landholders 7 -silverstone-based 7 -cavezzo 7 -share-based 7 -10-month-long 7 -@freyanewman 7 -gipp 7 -australian-wide 7 -truckin 7 -hbc 7 -augusten 7 -#equalitycalling 7 -1,011.5 7 -stick-shift 7 -2008-present 7 -pilz 7 -224,000 7 -takatz 7 -safak 7 -gulfnews.com 7 -hoglets 7 -heglund 7 -red-letter 7 -delboy 7 -ratby 7 -jaurez 7 -black-and-white-striped 7 -baby-pink 7 -mateos 7 -demello 7 -novena 7 -butt-kicking 7 -urness 7 -residents-only 7 -morley-clough 7 -qasemi 7 -ginuwine 7 -42mins 7 -wellborn 7 -turkish-occupied 7 -lichtenberg 7 -freethinkers 7 -abdelrahim 7 -clk 7 -biddersingh 7 -clp 7 -beddis 7 -shimabukuro 7 -cyn 7 -lllt 7 -maimi 7 -briefskate 7 -yun-fat 7 -sexagenarian 7 -izhevsk 7 -self-diagnosed 7 -lovetere 7 -aboalkassem 7 -vermote 7 -push-pull 7 -xintong 7 -mehat 7 -chengpeng 7 -n.e. 7 -monessen 7 -ngugi 7 -stickley 7 -burrs 7 -kotsay 7 -recidivist 7 -pulverizing 7 -fenrir 7 -pinfield 7 -1,764 7 -potty-trained 7 -lazzareschi 7 -commiserates 7 -kozelle 7 -tabley 7 -neknominations 7 -hand-hewn 7 -gazlay 7 -atilay 7 -akhmedov 7 -wando 7 -spillnot 7 -glangwili 7 -red-clad 7 -nine-iron 7 -mcgahen 7 -17-story 7 -julo 7 -cvitanovich 7 -cutts-mckay 7 -spekterkov 7 -gubera 7 -11,660 7 -v-10 7 -wolcottville 7 -feigin 7 -10mp 7 -20.13 7 -mcclead 7 -mermin 7 -atlasjet 7 -rfet 7 -rush-masuret 7 -teledyne 7 -tornado-damaged 7 -nirav 7 -retrofits 7 -tanina 7 -air-born 7 -chaviano 7 -hesam 7 -paraphilia 7 -career-focused 7 -chilblains 7 -giorgianni 7 -siim 7 -baofeng 7 -extravagent 7 -catch-weight 7 -dassow 7 -dually 7 -matatu 7 -schweiz 7 -bogof 7 -abalsamo 7 -tereshchenko 7 -6.07 7 -halwa 7 -beller 7 -payatas 7 -web-slinging 7 -al-faranci 7 -buchter 7 -1327 7 -masher 7 -freidan 7 -tilehurst 7 -rathmill 7 -wet-wipes 7 -whispery 7 -kibati 7 -naizmand 7 -rigley 7 -counter-charges 7 -qatari-backed 7 -foresta 7 -9.53 7 -teleferico 7 -home-crowd 7 -githinji 7 -farsi-language 7 -rotationplasty 7 -mycia 7 -long-gestating 7 -201million 7 -dotingly 7 -#pout 7 -6,500-strong 7 -wyrosdick 7 -khalique 7 -prinsjesdag 7 -bridalplasty 7 -perplexity 7 -revista 7 -listecki 7 -tyskie 7 -bauerlein 7 -wacom 7 -ephedra 7 -laganas 7 -1,500,000 7 -connyland 7 -11.42 7 -second-serve 7 -lustgarten 7 -teleprinter 7 -dabbous 7 -barci 7 -ultra-hip 7 -fizzio 7 -scamander 7 -i-522 7 -chinpo 7 -sssi 7 -ssss 7 -turmus 7 -saivet 7 -digitaltrends.com 7 -befurt 7 -metabolising 7 -lamkowski 7 -38-years-old 7 -housebites.com 7 -uitto 7 -taizhou 7 -poising 7 -bonfim 7 -computex 7 -high-tops 7 -agency-wide 7 -najjair 7 -nehi 7 -60-year-olds 7 -forca 7 -yonah 7 -portela 7 -punakha 7 -birman 7 -four-player 7 -100gb 7 -usages 7 -3-carat 7 -billerica 7 -non-acceptance 7 -late-30s 7 -emtala 7 -overanalyze 7 -geraci 7 -auris 7 -rept 7 -jasonville 7 -reactively 7 -cgear 7 -22,841 7 -1:27 7 -cee-lo 7 -matys 7 -elspas 7 -pre-treatment 7 -44-second 7 -yermolayev 7 -gschwandtkopf 7 -giana 7 -giani 7 -salaryman 7 -high-chair 7 -stumpings 7 -nokia.com 7 -bed-head 7 -25mins 7 -aopa 7 -eyjafjallajokul 7 -season-finale 7 -román 7 -alveolar 7 -porchway 7 -hospital-bound 7 -cntv 7 -lifegem 7 -538,000 7 -polyclinic 7 -templeman 7 -jabberwocky 7 -poorly-worded 7 -kiama 7 -renny 7 -sterry 7 -indie-pop 7 -carter-edwards 7 -stegoceras 7 -cruise-control 7 -twenty-three-year-old 7 -achill 7 -vyne 7 -rahn 7 -mccrary 7 -zaytseva 7 -43-minute 7 -crash-avoidance 7 -millot 7 -norren 7 -bridgeview 7 -winkelvoss 7 -pangerapan 7 -staphefekt 7 -ex-london 7 -lamber 7 -kreayshawn 7 -maghraby 7 -bohitile 7 -lekhno 7 -fiskvatn 7 -tostadas 7 -ekwa 7 -anjos 7 -al-sakhar 7 -gestations 7 -16-23 7 -eastbrook 7 -trefanny 7 -fresch 7 -1049 7 -nersnaes 7 -mahad 7 -gaytime 7 -1,458 7 -1,454 7 -lesseps 7 -gaddaffi 7 -9.13 7 -zitacuaro 7 -sarabia 7 -cbds 7 -derico 7 -costessey 7 -diefenbach 7 -johnsson 7 -zablocki 7 -denber 7 -tips@sheriff.sccgov.org 7 -genetically-engineered 7 -mutilates 7 -enthrall 7 -dridi 7 -mid-contract 7 -jibjab 7 -lote 7 -fondles 7 -tangibility 7 -psychometrics 7 -trinkaus 7 -mmafighting.com 7 -farewelling 7 -140,000-per-week 7 -uccellini 7 -topicality 7 -10,948 7 -strassberg 7 -@google 7 -lifeboatman 7 -third-deadliest 7 -40,000,001 7 -urungus 7 -checkhimout.com 7 -moazzem 7 -natallia 7 -hit-up 7 -kentucky-tennessee 7 -merill 7 -wigan-born 7 -yayi 7 -1442 7 -1,056 7 -jascon 7 -ryzhova 7 -cynwyd 7 -kfa 7 -houari 7 -decade-and-a-half 7 -wartski 7 -oland 7 -walberg 7 -authorisations 7 -charny 7 -tannous 7 -recke 7 -10km/h 7 -3f 7 -boza 7 -douwe 7 -28,137 7 -9:34 7 -cash-in-transit 7 -mishandle 7 -kirotv 7 -ashelman 7 -cukor 7 -clearblue 7 -greencrest 7 -insa 7 -incongruities 7 -tapp 7 -cunnamulla 7 -pickin 7 -werft 7 -blonds 7 -a-quality 7 -mykel 7 -mykey 7 -krestovnikoff 7 -zatara 7 -porns 7 -hellstrom 7 -chinea 7 -gernreich 7 -incentive-based 7 -coxwell 7 -9,000-strong 7 -pamukkale 7 -ashegoda 7 -moodus 7 -deavean 7 -bodyman 7 -pesticide-free 7 -chidester 7 -ice-axe 7 -natural-color 7 -ecgs 7 -154lb 7 -berglund 7 -all-hands-on-deck 7 -substrains 7 -saxe-coburg-gotha 7 -tokenistic 7 -shiawassee 7 -groeben 7 -nehra 7 -mitey 7 -once-beautiful 7 -tamaya 7 -shoestrings 7 -tolle 7 -astro-turf 7 -curriculum-based 7 -calibrates 7 -bergessio 7 -university-level 7 -must-attend 7 -re-vamped 7 -schmutz 7 -lock8 7 -rilley 7 -tilemsi 7 -b105 7 -vcrs 7 -burdis 7 -190lbs 7 -47-minute 7 -wtatennis.com 7 -re-unite 7 -fathomed 7 -canasta 7 -sagesse 7 -programe 7 -gertner 7 -ill-treat 7 -162mph 7 -housecoat 7 -dorados 7 -klawe 7 -counter-demonstrator 7 -shinta 7 -lebanese-syrian 7 -lizabeth 7 -rainman 7 -maramures 7 -time-served 7 -anencephalic 7 -reverdes 7 -raymonds 7 -then-4-year-old 7 -romulous 7 -200hp 7 -gardisil 7 -luey 7 -cogger 7 -hettinger 7 -mini-drama 7 -argotec 7 -pinotti 7 -kamall 7 -flag-lowering 7 -fuser 7 -nermine 7 -kannon 7 -wheatle 7 -hindrances 7 -mechanistic 7 -gedeon 7 -bracey 7 -scorpios 7 -york-listed 7 -posset 7 -rahamim 7 -amerindian 7 -non-polluting 7 -four-armed 7 -fishpond 7 -bronzie 7 -2005-08 7 -lovably 7 -azuline 7 -salha 7 -pruchniewski 7 -48ft 7 -griggi 7 -trutanich 7 -groupons 7 -wedpics 7 -tattinger 7 -tiesto 7 -looking-glass 7 -vainglorious 7 -eosinophils 7 -fowls 7 -deshler 7 -simco 7 -20.37 7 -20.38 7 -dixville 7 -30:30 7 -onder 7 -bilkent 7 -riper 7 -larger-screened 7 -goper 7 -self-administering 7 -rozsypne 7 -investec.co.uk 7 -22-story 7 -carcraft 7 -abine 7 -skyscraping 7 -dardens 7 -cross-checks 7 -119.5 7 -119.2 7 -ice-sheet 7 -scaredy-cat 7 -herrod 7 -caffeine-based 7 -now-empty 7 -enteropneusts 7 -falaco 7 -biggy 7 -winterize 7 -religous 7 -mcgeown 7 -fusking 7 -rajartnam 7 -vitti 7 -liebana 7 -al-rifai 7 -rennais 7 -ourense 7 -re-arranging 7 -gaddafis 7 -sandfly 7 -kellman 7 -50,100 7 -passfield 7 -clear-blue 7 -warzenski 7 -kanzius 7 -monocerotis 7 -28th-minute 7 -darcheville 7 -polard 7 -kuljic 7 -mckarney 7 -tens-of-thousands 7 -datt 7 -mccail 7 -best-attended 7 -moslem 7 -acy 7 -regifting 7 -second-top 7 -agren 7 -internationally-acclaimed 7 -irradiance 7 -turkey-iraq 7 -peat-free 7 -telethons 7 -6,000,000 7 -fegrouche 7 -mishaw 7 -kiely-cohen 7 -mezague 7 -jasbir 7 -#libspill2 7 -communitarian 7 -34lbs 7 -interregnum 7 -lanie 7 -109.8 7 -great-great-great-great 7 -ruha 7 -fanad 7 -sirt 7 -sublette 7 -blackbox 7 -karisa 7 -2:58 7 -whibberley 7 -givskud 7 -escola 7 -ushaw 7 -mcconnell-reid 7 -7.71 7 -7.78 7 -saint-loup 7 -hense 7 -harok 7 -dencer 7 -cabarete 7 -coimín 7 -gallanagh 7 -boutik 7 -nonhostile 7 -1.8-inch 7 -non-belief 7 -metamucil 7 -hawed 7 -1965-66 7 -lazaney 7 -hargin 7 -copernican 7 -attemped 7 -hatlestad 7 -quis 7 -sifry 7 -ingolia 7 -hadyn 7 -opoku 7 -dickersbach 7 -wilits 7 -rathwell 7 -east-north 7 -jones-reilly 7 -ppaca 7 -choreographs 7 -f42/44 7 -zhongyun 7 -human-level 7 -remanard 7 -arch-enemies 7 -puy 7 -nudibranch 7 -rationalizations 7 -bloodgate 7 -tabasim 7 -@mittromney 7 -29kg 7 -zwak 7 -figiel 7 -señora 7 -step-sons 7 -guest-list 7 -opendns 7 -23-21 7 -nugee 7 -@the 7 -mughals 7 -late-november 7 -goodfriend 7 -single-core 7 -51,300 7 -77-page 7 -MS 7 -hoos 7 -gernot 7 -vrsaljko 7 -pre-polling 7 -6.2-mile 7 -neyland 7 -writegirl 7 -gunslingers 7 -one-size 7 -lashmanova 7 -70-bed 7 -twinsburg 7 -antipaxos 7 -delage 7 -zildjian 7 -finlan 7 -petrels 7 -brautigam 7 -jalal-abad 7 -ifversen 7 -unrolled 7 -1,512 7 -powercut 7 -salut 7 -galetti 7 -lamanivong 7 -rosemount 7 -chanterelle 7 -tenderstem 7 -howrah 7 -16-seater 7 -outspokenly 7 -3hr 7 -theale 7 -trousdale 7 -anti-nypd 7 -isakova 7 -ling-cohan 7 -over-breeding 7 -ella-rose 7 -clutter-free 7 -spellacy 7 -niyondiko 7 -giggler 7 -backpacked 7 -brigitta 7 -karachay 7 -lamere 7 -uchebo 7 -better-informed 7 -beauvais-tille 7 -44,800 7 -99,950 7 -carve-up 7 -uvas 7 -hi-visibility 7 -hurdlers 7 -bobrova 7 -reconviction 7 -tenneco 7 -mcabee 7 -15-year-long 7 -moreton-on-lugg 7 -mesothelin 7 -ashtag 7 -n.k. 7 -burpo 7 -by-catch 7 -16bn 7 -physician-patient 7 -head-to 7 -state-media 7 -mudher 7 -px22 7 -powergel 7 -oe 7 -strick 7 -taupin 7 -mass-murder 7 -dollman 7 -aronica 7 -guleria 7 -cayetano 7 -whelp 7 -drive-time 7 -elphaba 7 -microsite 7 -114mph 7 -mermell 7 -sough 7 -criner 7 -6,066 7 -univ. 7 -nde 7 -6-16 7 -gas-mask 7 -ekhe 7 -righteously 7 -11-yard 7 -2495 7 -markert 7 -pomelo 7 -smollet 7 -callin 7 -3,00 7 -jmml 7 -coyel 7 -aquaponics 7 -chao-lin 7 -technic 7 -gutheridge 7 -trevanion 7 -aspatuck 7 -bellamkonda 7 -betas 7 -diakides 7 -popinjay 7 -sigi 7 -127-year-old 7 -payment-by-results 7 -al-rashed 7 -kennadi 7 -@taylorswift13 7 -jannice 7 -leather-lined 7 -131.5 7 -6.64 7 -wvib 7 -tirley 7 -scarfing 7 -11-11 7 -kruzliak 7 -blotter 7 -culloty 7 -400-unit 7 -transbay 7 -jey 7 -sontay 7 -tribewanted 7 -toons 7 -sandora 7 -didcock 7 -normals 7 -blighters 7 -kasubi 7 -mbera 7 -11pro 7 -graining 7 -silveta 7 -shaniqua 7 -emotionalism 7 -screen-free 7 -v-for-victory 7 -pujiula 7 -rahlves 7 -miep 7 -stonehedge 7 -glenfeldt 7 -peirsol 7 -casadesus 7 -milind 7 -11-piece 7 -rafaela 7 -80-20 7 -407.7 7 -pick-axes 7 -abraao 7 -alpough 7 -bygrave 7 -warchest 7 -flasik 7 -dutch-registered 7 -sanchez-navarro 7 -intifadas 7 -kage 7 -self-generating 7 -repack 7 -kookaburras 7 -obf 7 -obc 7 -winter-weary 7 -cold-turkey 7 -defined-benefit 7 -ethnological 7 -coagulate 7 -taily 7 -all-plastic 7 -multi-vitamins 7 -classiche 7 -tokhai 7 -teguramori 7 -molted 7 -fern-grass 7 -crossinvest 7 -310 642 2317 7 -casabu 7 -uncomfortable-looking 7 -mawston 7 -zhezkazgan 7 -broms 7 -vandercar 7 -invigilator 7 -tyrwhitt-drake 7 -wugofski 7 -#homeland 7 -schwitters 7 -moutoussamy-ashe 7 -counterprotest 7 -abzug 7 -goldencents 7 -2,795 7 -sledgehammer-wielding 7 -felumlee 7 -eastpoint 7 -jelcova 7 -pricespy 7 -sold-off 7 -upvotes 7 -motiva 7 -keynoush 7 -udoamaka 7 -three-row 7 -death-eligible 7 -go-getting 7 -five-leaf 7 -antimony 7 -psychobabble 7 -khusnutdinova 7 -ascenta 7 -fleurent 7 -7.8-acre 7 -20pc 7 -conformists 7 -patson 7 -nine-acre 7 -swep 7 -2mg 7 -pronovias 7 -al-nasr 7 -co-offender 7 -guested 7 -late-victorian 7 -arithmetical 7 -twp 7 -cked 7 -destructively 7 -still-unknown 7 -still-smoking 7 -fungible 7 -novomoskovsk 7 -provosts 7 -1100ad 7 -syn 7 -windowpanes 7 -isan 7 -kirm 7 -ddh 7 -steam-driven 7 -apprenticing 7 -greenbaum 7 -patters 7 -tranquilising 7 -cherno 7 -mycerinus 7 -ifbb 7 -hedonists 7 -h-fabp 7 -shambaugh 7 -metellus 7 -tibu 7 -portland-area 7 -adjoa 7 -personal-best 7 -sharston 7 -mirta 7 -mamuka 7 -decoupling 7 -oiympic 7 -pohlad 7 -tymoshchuk 7 -desaparecidos 7 -fillary 7 -shirlee 7 -caribbeans 7 -opulently 7 -singhania 7 -talisha 7 -rka 7 -l'heureux 7 -creepyworld 7 -world-championship 7 -rocketship 7 -nyla 7 -ellenbogen 7 -ashton-in-makerfield 7 -renderos 7 -zernike 7 -cerium 7 -mullaitivu 7 -two-line 7 -emeghara 7 -edinger 7 -bronca 7 -geometrically 7 -menja 7 -kumba 7 -8003 7 -sudekum 7 -contributer 7 -www.dictionary.com 7 -serialising 7 -brain-scanning 7 -alphorn 7 -luminance 7 -shawnda 7 -castiglioni 7 -manningtree 7 -jannelle 7 -al-shehri 7 -81m 7 -ex-wales 7 -turramurra 7 -nanotyrannus 7 -amaretti 7 -collier-woods 7 -1,074 7 -1,072 7 -district-level 7 -medshare 7 -arizona-utah 7 -nerdish 7 -danceworks 7 -post-partisan 7 -over-promised 7 -11.75 7 -khabib 7 -whalebone 7 -willicombe 7 -disqus 7 -hankers 7 -malheur 7 -lykov 7 -pickaway 7 -penybont 7 -zaniboni 7 -previtera 7 -katharyne 7 -denice 7 -beniwal 7 -tars 7 -dholakia 7 -456,000 7 -d'un 7 -boucault 7 -clc 7 -mydlarz 7 -etd 7 -pastoralist 7 -heat-wave 7 -pauw 7 -1979-80 7 -gurpegui 7 -riggs-long 7 -chubu 7 -rijal 7 -99.98 7 -tretherras 7 -content-sharing 7 -suncare 7 -un-do 7 -mig-27 7 -nubul 7 -gender-reassignment 7 -belvita 7 -d800 7 -balladur 7 -factoids 7 -474,500 7 -@rihanna 7 -subo 7 -quake-battered 7 -brunskill 7 -gohde 7 -merveldt-guevara 7 -part-funding 7 -8-acre 7 -youi 7 -mascarelli 7 -120p 7 -vamvakias 7 -leygreen 7 -mitsch 7 -orfanides 7 -arnt 7 -interconnections 7 -952-foot 7 -dalmations 7 -hotschedules 7 -48mins 7 -remon 7 -shoehorning 7 -fq-x 7 -bezo 7 -castagnoli 7 -machias 7 -intermarché 7 -abtahi 7 -seymours 7 -29-storey 7 -goo.gl 7 -chen-oster 7 -stelladot.co.uk 7 -splendida 7 -comite 7 -li-fraumeni 7 -superhero-themed 7 -california-arizona 7 -chitlin 7 -cossa 7 -gapyeong 7 -stuut 7 -zontae 7 -37bn 7 -raxit 7 -gruchy 7 -decarnin 7 -lilikoi 7 -valspar 7 -http://video.foxnews.com 7 -dayslong 7 -rimell 7 -pocan 7 -slu 7 -capwell 7 -eammon 7 -teennick 7 -unenlightened 7 -khanty-mansiysk 7 -brisard 7 -kazumi 7 -weisser 7 -550lbs 7 -gumeracha 7 -myrtleford 7 -papyrologist 7 -burdwan 7 -santella 7 -bar-code 7 -kizelewicz 7 -keung 7 -coyness 7 -hyo 7 -a127 7 -poor-taste 7 -oystermouth 7 -febraury 7 -furniture-making 7 -rendez-vous 7 -knitchen 7 -4.3-magnitude 7 -dogwalk 7 -owusu-koranteng 7 -willmot 7 -xxxxxxxx 7 -anthemus 7 -10.07 7 -helos 7 -nontechnical 7 -gumtree.com 7 -cuttino 7 -lanfranchi 7 -incorruptible 7 -ichthyosaurus 7 -21,148 7 -restormel 7 -straightest 7 -sherburn-in-elmet 7 -ahlem 7 -pterobranchs 7 -10,649 7 -ambrus 7 -baden-wurttemberg 7 -ghadamis 7 -richert-slagle 7 -boonton 7 -chelyshev 7 -sciarra 7 -simes 7 -20.10 7 -misc. 7 -celebrity-inspired 7 -romanova 7 -colborn 7 -ex-tsa 7 -inanities 7 -anti-nsa 7 -turch 7 -iketubosin 7 -scorey 7 -101.9 7 -ghanians 7 -pre-pack 7 -brandee 7 -medbourne 7 -villaneuva 7 -proofpoint 7 -bytelair 7 -499,000 7 -river-bus 7 -neurodevelopment 7 -bornholmer 7 -musculus 7 -gamsjager 7 -muezzin 7 -gehrt 7 -sekkaki 7 -wilx 7 -milsom-mcquillan 7 -robertus 7 -kirshbaum 7 -feras 7 -snagge 7 -strassen 7 -conoy 7 -mozilo 7 -mogull 7 -p40 7 -cammarata 7 -fitwit 7 -leatherbarrow 7 -7-ton 7 -cipr 7 -longe 7 -shappell 7 -sulieman 7 -high-yielding 7 -snurridge 7 -u.s.-designated 7 -fengqin 7 -more-ish 7 -sutent 7 -grabble 7 -batmans 7 -sohostel 7 -atay 7 -tenuis 7 -rattin 7 -baybay 7 -3rs 7 -wakira 7 -lkab 7 -tainsh 7 -thracians 7 -debilitate 7 -celebrity-backed 7 -snackers 7 -crofter 7 -gaiduk 7 -santuario 7 -3,864 7 -unrecognizably 7 -weiden 7 -hand-up 7 -#herewego 7 -lask 7 -lasn 7 -allicia 7 -tachtsidis 7 -7per 7 -7.53 7 -contaminations 7 -southold 7 -12-foot-tall 7 -africa-focused 7 -horsegate 7 -candlemas 7 -bardell 7 -spackman 7 -helmet-wearing 7 -erleigh 7 -tigerman 7 -cretinous 7 -tophams 7 -short-eared 7 -odéon 7 -wire-topped 7 -borrini 7 -livetv 7 -arrupe 7 -chittum 7 -yarnfield 7 -speech-to-text 7 -co-piloted 7 -12-litre 7 -vanboskirk 7 -papayas 7 -goldup 7 -spiess 7 -mannell 7 -hanoi-based 7 -prapto 7 -c++ 7 -ods 7 -promptness 7 -a605 7 -psr 7 -pss 7 -karapetyan 7 -hithi 7 -jawaan 7 -twenty-year 7 -mcdo 7 -hemolytic-uremic 7 -eizenstat 7 -fennessy-sharp 7 -jawa 7 -closs 7 -short-man 7 -seabolt 7 -non-recent 7 -frakes 7 -labra 7 -carol-ann 7 -re-negotiated 7 -blaina 7 -double-up 7 -getcha 7 -unruliness 7 -tarpan 7 -toxicants 7 -dokuchaev 7 -kasatka 7 -dobrolet 7 -callear 7 -fealgood 7 -abstracting 7 -kopacz 7 -dawick 7 -withlacoochee 7 -city-dwelling 7 -softhearted 7 -12,000-a-week 7 -trigged 7 -quiney 7 -gakunga 7 -bedevils 7 -off-the-radar 7 -rakitskiy 7 -blumel 7 -idehen 7 -okla 7 -#whyileft 7 -shoulder-held 7 -olyvia 7 -110f 7 -anti-coagulants 7 -morepork 7 --222 7 -tythegston 7 -r10 7 -jalapa 7 -lillywhite 7 -fisman 7 -wmal 7 -sudan-born 7 -neriza 7 -lunch-hour 7 -churchills 7 -lybridos 7 -sober-minded 7 -mudflat 7 -plotnik 7 -puccetti 7 -jbwere 7 -redback 7 -dance-based 7 -ha-ha 7 -dnrs 7 -pomodore 7 -greasly 7 -7.4-magnitude 7 -muharraq 7 -2,048 7 -right-on 7 -collectspace 7 -stabyhoun 7 -gugino 7 -eutef 7 -deonarine 7 -scribblenauts 7 -@billclinton 7 -jananto 7 -no-swimming 7 -sollman 7 -chesnut 7 -newtown-sandy 7 -aziziya 7 -cashwell 7 -giulliani 7 -diagnosticus 7 -citycrystal 7 -whampoa 7 -spirikaitis 7 -kumars 7 -smokeout 7 -burne 7 -castletown 7 -139.9 7 -liveliness 7 -delegitimization 7 -freegans 7 -camelicious 7 -3,501 7 -endgames 7 -catabolysis 7 -once-in-a-career 7 -secular-minded 7 -45.00 7 -valiela 7 -tepees 7 -oitnb 7 -nubuck 7 -d-pan 7 -d-pad 7 -ramondetta 7 -ashbrook 7 -rejoneador 7 -lidster 7 -web-users 7 -titties 7 -cauchi 7 -war-making 7 -miramare 7 -ucatt 7 -barragem 7 -skeats 7 -magrane 7 -vaidas 7 -horvatova 7 -fast-emerging 7 -catch-and-release 7 -woolridge 7 -sheeler 7 -albenga 7 -non-guests 7 -khurana 7 -rennell 7 -literalist 7 -hafizah 7 -niulang 7 -farra 7 -guram 7 -crumbley 7 -makuei 7 -figaro2000 7 -prepositioned 7 -vacuity 7 -tagalongs 7 -dodgems 7 -xiapex 7 -hoefflin 7 -re-connect 7 -wicharn 7 -hahnenberg 7 -kannada 7 -1211 7 -bowgen 7 -stay-behind 7 -lee-on-solent 7 -lavash 7 -kezi 7 -otherwordly 7 -vancleave 7 -jayalal 7 -dhan 7 -115.4 7 -115.7 7 -9.97 7 -myong-chol 7 -biofilms 7 -yonghegong 7 -suryadi 7 -keryn 7 -virkler 7 -model-maker 7 -59-yard 7 -jiujiang 7 -brashly 7 -5,092 7 -change/cancellation 7 -vocalising 7 -2,225 7 -tugger 7 -plews-smith 7 -bargh 7 -atlanta-bound 7 -easdales 7 -stordal 7 -bedourie 7 -essman 7 -callander 7 -comed 7 -vul 7 -natina 7 -fundly.com 7 -2,000-capacity 7 -cyberman 7 -wildfire-fighting 7 -laloup 7 -lahaye 7 -ir3535 7 -kushnarev 7 -botches 7 -sinani 7 -ceire 7 -21-25 7 -night-life 7 -unrepentantly 7 -66s 7 -9-mile 7 -estey 7 -salwan 7 -bunkersville 7 -clinicenta 7 -laura-louise 7 -goldwin 7 -adewale 7 -caching 7 -melfi 7 -bahamian-flagged 7 -gastro-pub 7 -magrino 7 -1994-96 7 -kinzie 7 -endotracheal 7 -www.easyjet.com 7 -mabkhout 7 -leykind 7 -5ft10 7 -nincompoop 7 -gophone 7 -neurodegeneration 7 -lower-priority 7 -zhulitskaya 7 -enkarta 7 -olvido 7 -takemoto 7 -tiebreakers 7 -euripides 7 -dapple 7 -blipp 7 -keratitis 7 -mx5 7 -sharqiya 7 -langerhan 7 -bandmaster 7 -backroads 7 -veres 7 -molewa 7 -double-speak 7 -duggy 7 -altstadt 7 -heren 7 -sharobeem 7 -fabricor 7 -tayleur 7 -zarah 7 -takashimaya 7 -gang-banger 7 -legras 7 -tul 7 -patriotically 7 -kliment 7 -capell 7 -blystone 7 -longship 7 -waen 7 -transceivers 7 -roters 7 -353lb 7 -polty 7 -megathrust 7 -slovakians 7 -bunyard 7 -cloudmade 7 -newly-freed 7 -militarise 7 -pole-axed 7 -multivehicle 7 -aodhan 7 -nato-member 7 -fourfiveseconds 7 -kaido 7 -pondicherry 7 -mikeska 7 -gerardus 7 -arru 7 -turqoise 7 -taimour 7 -kileigh 7 -hematite 7 -qaraqe 7 -lászló 7 -stolyarova 7 -1003 7 -1002 7 -reductionist 7 -bisham 7 -1,418 7 -1,416 7 -paulin 7 -tornado-prone 7 -rm8 7 -souvlaki 7 -to'ak 7 -wolfdogs 7 -wawrzak 7 -tumanda 7 -qci 7 -anti-convulsant 7 -yarraville 7 -sauflon 7 -koinonia 7 -piccolino 7 -totteham 7 -rocketman 7 -close-controlled 7 -koblenzer 7 -mid-upper 7 -agen 7 -brodnicki 7 -torsella 7 -punch-line 7 -joo-ho 7 -256-bit 7 -leonida 7 -paruk 7 -maspero 7 -lecrae 7 -vasilica 7 -islamovic 7 -kusuma 7 -stoernell 7 -turbine-powered 7 -saidnaya 7 -77.2 7 -overgate 7 -weather-proof 7 -minimalists 7 -film/dvd 7 -quicklime 7 -suan 7 -tried-and-trusted 7 -winfall 7 -chere 7 -three-bath 7 -10,923-square-foot 7 -holland-belgium 7 -chude 7 -swimsuit-clad 7 -off-beach 7 -water-carrying 7 -bascules 7 -400cc 7 -cheapside 7 -quicks 7 -three-seat 7 -rosenlund 7 -sharington 7 -★ 7 -d'sa 7 -clearview 7 -udia 7 -tavira 7 -image-makers 7 -dbg 7 -mengu 7 -db7 7 -coomer 7 -pdms 7 -retarding 7 -peritonei 7 -o'nora 7 -treacey 7 -gottardo 7 -slavers 7 -diversifies 7 -baogen 7 -higher-skilled 7 -wynetta 7 -boluda 7 -tinyes 7 -mountjoy 7 -fasttrain 7 -hugii 7 -pinegar 7 -franceso 7 -gulbahar 7 -milvio 7 -lesperance 7 -enjoining 7 -122f 7 -federal-state 7 -k-love 7 -alexandrovich 7 -clay-based 7 -d'alessio 7 -vajayjay 7 -blasco 7 -joeridge87 7 -skyvu 7 -20gb 7 -community-supported 7 -urda 7 -mid-round 7 -211s 7 -21-century 7 -martindale-vale 7 -matwyshyn 7 -one-millionth 7 -2000/01 7 -kiplings 7 -saranac 7 -mulde 7 -set-list 7 -lurigancho 7 -humani 7 -silverbird 7 -idefix 7 -324.4 7 -majidi 7 --12.5 7 -sjt 7 -vangioni 7 -lungi 7 -startled-looking 7 -cartledge 7 -70-second 7 -sneinton 7 -arundell 7 -bernatche 7 -ottlyk 7 -echegaray 7 -ghodke 7 -fast-improving 7 -folke 7 -gedminas 7 -forelock 7 -vincristine 7 -sedena 7 -aiviq 7 -caffeinall 7 -luay 7 -maybeck 7 -mantaro 7 -zsi 7 -inch-by-inch 7 -oxynorm 7 -zenga 7 -vinatieri 7 -symm 7 -s550 7 -godse 7 -567,000 7 -schleifer 7 -lawyerly 7 -envion 7 -kwasnik 7 -ingall 7 -messrs. 7 -windridge 7 -junjun 7 -versaball 7 -phelim 7 -irakliotis 7 -frederich 7 -gang-rapes 7 -aktuelle 7 -less-crowded 7 -medium.com 7 -cowpats 7 -zeefuik 7 -clemishire 7 -handwrote 7 -della-porta 7 -2001-11 7 -low-current 7 -fomites 7 -super-sleek 7 -meinrad 7 -wilson-smith 7 -aerodromes 7 -subnormal 7 -arronategui 7 -tracheas 7 -rydze 7 -2,985 7 -gir 7 -parchments 7 -double-layer 7 -bioethical 7 -b92 7 -manspread 7 -sholay 7 -ivoirian 7 -superdog 7 -almer 7 -mealing 7 -anglo-italian 7 -land-grabbing 7 -aliyana 7 -sarn 7 -aesa 7 -silkair 7 -natalias 7 -sart 7 -hategan 7 -drawbridges 7 -14-months 7 -coke-bottle 7 -wheelchair-friendly 7 -siouxsie 7 -1996-1999 7 -russo-japanese 7 -charro 7 -hypochondroplasia 7 -gnanasara 7 -bare-headed 7 -@moreandagain 7 -highbeam 7 -widney 7 -humpbacked 7 -burzynski 7 -mahendran 7 -rosoboronexport 7 -state-style 7 -wiersema 7 -cedarwood 7 -highest-valued 7 -barcene 7 -2011man 7 -khankhel 7 -civelli 7 -ranko 7 -ghandour 7 -aliaa 7 -18mins 7 -broyard 7 -oke 7 -johansens 7 -j/p 7 -in-town 7 -yumbo 7 -garowe 7 -labiofam 7 -araiby 7 -konrath 7 -baoji 7 -1706 7 -petranca 7 -ikirma 7 -dilligaf 7 -canete 7 -kutsy 7 -ex-state 7 -viento 7 -noiseworks 7 -xixi 7 -uia 7 -uim 7 -33in 7 -plain-dealer 7 -barchester 7 -elner 7 -muma 7 -orkut 7 -sanday 7 -hirshon 7 -4-digit 7 -harvard-yale 7 -rougie 7 -holterman 7 -guell 7 -joblonkay 7 -3,290 7 -michiganders 7 -gilsenan 7 -robitaille 7 -#engaged 7 -mail.com 7 -w7656 7 -dougill 7 -4800 7 -kahreem 7 -fedexforum 7 -bird-brained 7 -fdr/east 7 -self-learning 7 -jasser 7 -nakhi 7 -kiss-ins 7 -westerveld 7 -streetmuseum 7 -trichtillomania 7 -vermiglio 7 -37p 7 -kozic 7 -knockdowns 7 -opensignal 7 -cabby 7 -re-conviction 7 -araras 7 -ismaeel 7 -miscontrolled 7 -ultranationalists 7 -aw-mohamed 7 -27-foot 7 -guest-starring 7 -killiney 7 -segmental 7 -wolfenstein 7 -sculptresse 7 -yasuko 7 -contemporaneously 7 -t-130 7 -bvudzijena 7 -rain-slickened 7 -8,130 7 -phr 7 -forded 7 -forder 7 -tokidoki 7 -moonesamy 7 -ayi 7 -idv 7 -idg 7 -@andreysmygov 7 -menczyk 7 -knuckey 7 -pavlovsky 7 -death-trap 7 -35,000-per-week 7 -plaskova 7 -caiuajara 7 -dalmahoy 7 -market-share 7 -duchene 7 -baverstock 7 -cialella 7 -guidoni 7 -gamera 7 -near-shore 7 -tapei 7 -ex-f1 7 -15.75 7 -muzny 7 -viognier 7 -neurochemical 7 -sabotages 7 -raiswell 7 -pyrosomes 7 -yelich-o'connor 7 -4.13 7 -a15 7 -senneval 7 -interfish 7 -strategos 7 -dallas-ft 7 -20,600 7 -clippy 7 -pro-brussels 7 -sheffields 7 -lianna 7 -shiregreen 7 -molena 7 -kakureya 7 -21-game 7 -keylogger 7 -146.5 7 -146.9 7 -badji 7 -square-miles 7 -saldhana 7 -jitterbug 7 -212th 7 -triskaidekaphobia 7 -azmoun 7 -steeliness 7 -soundman 7 -o'hehir 7 -walzak 7 -linmei 7 -fruit-seller 7 -bengies 7 -non-moving 7 -seawright 7 -dora-22 7 -neur 7 -albertz 7 -abdulatif 7 -nooristani 7 -mid-court 7 -coonabarabran 7 -telemonitoring 7 -moeed 7 -58mm 7 -equines 7 -carstarphen 7 -atelea 7 -3,520 7 -haarlemmermeer 7 -blow-drys 7 -zivkovic 7 -brigs 7 -tellier 7 -retro-chic 7 -penida 7 -24-bit 7 -cuthill 7 -kattil 7 -koogle 7 -polaco 7 -maaloul 7 -thomas-larkin 7 -399.4 7 -denatured 7 -hisb-ul-islam 7 -aro 7 -atrt 7 -bride-prices 7 -two-runway 7 -nadikdik 7 -léa 7 -422,000 7 -kapheim 7 -lapresi 7 -tiaa-cref 7 -oldroyd 7 -back-pack 7 -124.9 7 -azumah 7 -mcswane 7 -jianwan 7 -raras 7 -food-aid 7 -buschow 7 -1,801 7 -vologda 7 -27s 7 -slum-dwellers 7 -kompong 7 -riparian 7 -lyakhovsky 7 -cheese-filled 7 -unsustainability 7 -danske 7 -14-piece 7 -quad-play 7 -schilder 7 -teitiota 7 -qf-16 7 -moroyoqui-yocupicio 7 -hard-scrabble 7 -teleworking 7 -winchmore 7 -frankiee 7 -caylor 7 -creal 7 -colani 7 -roggo 7 -bernthal 7 -salcura 7 -trichinosis 7 -aibos 7 -dustbag 7 -matlow 7 -miam 7 -sullen-looking 7 -azpilcueta 7 -touch-enabled 7 -preparers 7 -orkneys 7 -woodling 7 -single-year 7 -estanislao 7 -micheaux 7 -pfg 7 -gurudwara 7 -@repweiner 7 -apprehends 7 -t46 7 -altonaga 7 -cappelen 7 -perfectly-executed 7 -avegant 7 -17/10 7 -mapmyride 7 -lumpia 7 -wgntv 7 -2,205 7 -bantry 7 -arm-wrestle 7 -329.99 7 -guasti 7 -hand-washed 7 -32red 7 -disparages 7 -1602 7 -helgeson 7 -vanchester 7 -taffs 7 -bullet-pierced 7 -vws 7 -kekau 7 -barbir 7 -cicek 7 -galex 7 -motorcross 7 -boloni 7 -sanocki 7 -turleigh 7 -mecum 7 -macrobert 7 -lexical 7 -movenpick 7 -friedhelm 7 -chemical-soaked 7 -startin 7 -cannibalise 7 -redmen 7 -over-activity 7 -irgun 7 -fence-mending 7 -totaliser 7 -bambari 7 -cawte 7 -dunkers 7 -kayleigh-anne 7 -100mw 7 -lipp 7 -uncomplaining 7 -samieri 7 -grandads 7 -drools 7 -ex-commons 7 -democratic-majority 7 -mapletoft 7 -ifone 7 -ampatuans 7 -dakosaurus-maximus 7 -84-page 7 -al-khawahir 7 -3.5-litre 7 -queanbeyan 7 -162.50 7 -jamell 7 -apolipoprotein 7 -muntadher 7 -jeneece 7 -440m 7 -dressier 7 -generes 7 -pikin 7 -six-meter 7 -ferrous 7 -flunks 7 -eyring 7 -mso-bidi-font-family 7 -showplace 7 -hailer 7 -sattiewhite 7 -senna-prost 7 -carseat 7 -ss80 7 -drop-top 7 -pied-à-terre 7 -self-mocking 7 -dedivanovic 7 -tlalmanalco 7 -namche 7 -holly-mae 7 -tifo 7 -ceremonials 7 -medecin 7 -terrusa 7 -palapa 7 -izzana 7 -cantillon 7 -rabi 7 -casey-lee 7 -francoli 7 -care-related 7 -anaya-carlis 7 -un-dead 7 -back-tracked 7 -hegewisch 7 -jcbs 7 -36-18-33 7 -tishina 7 -flic 7 -48-week 7 -schmidt-burbach 7 -snowmageddon 7 -nasatir 7 -megamind 7 -charland 7 -nonlife-threatening 7 -12-ton 7 -dilga 7 -9999 7 -previously-released 7 -western-looking 7 -1,439 7 -weligama 7 -hughleys 7 -aliant 7 -then-10-year-old 7 -7034 7 -2,609.31 7 -rayong 7 -khater 7 -pop-out 7 -iranian-british 7 -110bn 7 -pomorski 7 -entwine 7 -vrain 7 -2000-04 7 -time-shifted 7 -montufar 7 -launderer 7 -gomstyn 7 -pizam 7 -geosocial 7 -11-count 7 -bakersville 7 -'71 7 -whp 7 -khushboo 7 -gethings 7 -grindstone 7 -codetalkers 7 -shearings 7 -calorie-dense 7 -rangali 7 -pluto-sized 7 -entreprenuer 7 -storting 7 -elliana 7 -d-ill 7 -malacia 7 -nabj 7 -ec155 7 -madrid-born 7 -once-celebrated 7 -nassari 7 -parsekian 7 -lamah 7 -regales 7 -proliferator 7 -900lb 7 -huckins 7 -kgl 7 -micrognathia 7 -yaylamis 7 -penticton 7 -eagleburger 7 -kerli 7 -hider 7 -11.36 7 -pocheon 7 -camello 7 -melanson 7 -truncus 7 -noreena 7 -shaa 7 -abasbahi-gotti 7 -nonviable 7 -zingano 7 -kayelisa 7 -garone 7 -aspen-pitkin 7 -part-closure 7 -makowska 7 -inside-left 7 -back-flips 7 -korogocho 7 -hatzofe 7 -danish-based 7 -wikitude 7 -butthead 7 -haggarty 7 -mossalam 7 -muffed 7 -african-inspired 7 -stalkerish 7 -akhada 7 -gobowen 7 -sarepta 7 -tassoni 7 -yeow 7 -hudec 7 -tankchair 7 -l'iris 7 -al-raimi 7 -henhouse 7 -coutts-trotter 7 -tutsi-led 7 -22,800 7 -forti 7 -watercredit 7 -169million 7 -4-10 7 -http://www.easports.com/uk/fifa/ultimate-team 7 -workroom 7 -phytochemicals 7 -saxe 7 -nbc.com 7 -klingeman 7 -kittanning 7 -italiana 7 -punk-inspired 7 -donaldsons 7 -showa 7 -@mailsport 7 -androgyne 7 -watnall 7 -co-responsibility 7 -commonly-held 7 -reak 7 -white-nose 7 -snell-rood 7 -heping 7 -game-clinching 7 -takeyh 7 -greenfield-sanders 7 -275mph 7 -january-june 7 -crus 7 -jimenezes 7 -kocoras 7 -probabilistic 7 -o2m 7 -asmo 7 -bernfeld 7 -hudgell 7 -temüjin 7 -casiday 7 -decleor 7 -segule 7 -kaper 7 -shc 7 -88ft 7 -1,000-yard 7 -facilites 7 -spalton 7 -railfuture 7 -plumtree 7 -polii 7 -post-arrest 7 -collabro 7 -samye 7 -3.96 7 -urbanek 7 -particulary 7 -cawthorn 7 -cyganiak 7 -raqqawi 7 -coarseness 7 -designee 7 -smorgon 7 -simcha 7 -ala'a 7 -preibus 7 -oxidising 7 -kissana 7 -free-spirit 7 -war-damaged 7 -nutracheck 7 -1:2 7 -breus 7 -breul 7 -beltagy 7 -colliver 7 -newly-dyed 7 -semi-regular 7 -10.48 7 -linas 7 -carriage-shaped 7 -andrelle 7 -peerzada 7 -hassocks 7 -boxill 7 -sverre 7 -bulava 7 -71billion 7 -poseur 7 -769,000 7 -inarguably 7 -libelling 7 -goethals 7 -torday 7 -tollemache 7 -oleksiak 7 -puea 7 -panek 7 -trackways 7 -minuten 7 -rosenberger 7 -raubenheimer 7 -gyuto 7 -fanzines 7 -20.55 7 -recklinghausen 7 -protegees 7 -fly-drive 7 -lomalito 7 -non-battle 7 -kolken 7 -amortization 7 -aerofex 7 -frewin 7 -rapiro 7 -andel-schipper 7 -dubensky 7 -sarnia 7 -dragut 7 -tuomioja 7 -schirach 7 -wtvd-tv 7 -samurais 7 -corps-iraq 7 -braum 7 -open-house 7 -fric 7 -nucleation 7 -lillith 7 -ahlswede 7 -schellenberg 7 -76cm 7 -harmonising 7 -eight-woman 7 -shechter 7 -veremu 7 -ornithological 7 -webstagram 7 -doofus 7 -schafernaker 7 -seychellois 7 -ex-florida 7 -compactness 7 -chumley-roberts 7 -barrecore 7 -chiri 7 -fassino 7 -1,647 7 -do-right 7 -budovsky 7 -sigtuna 7 -six-week-long 7 -long-departed 7 -tremarle 7 -ater 7 -arrhythmogenic 7 -liverpol 7 -30-foot-high 7 -snaphack 7 -recalcitrance 7 -vekaric 7 -400,000-a-year 7 -1,753 7 -quire 7 -attali 7 -lobe-finned 7 -ipub 7 -46km/h 7 -sleepyhead 7 -tixtla 7 -sólheimasandur 7 -winchfield 7 -uke 7 -shellacked 7 -side-channel 7 -re-surface 7 -high-ball 7 -pre-tox 7 -uridge 7 -komachi 7 -nasca 7 -blassie 7 -acsm 7 -7.13 7 -96.2 7 -catchments 7 -mazibuko 7 -decoratively 7 -lovvorn 7 -cyber-haven 7 -garoppolo 7 -gregg-ball 7 -omysha 7 -fortney 7 -footballl-wide 7 -nkoana-mashabane 7 -havins 7 -kwing 7 -secretan 7 -n81 7 -public-safety 7 -bellicosity 7 -tiffanie 7 -edgeton 7 -permatan 7 -prana 7 -inter-squad 7 -852,000 7 -clarke-murphy 7 -benlysta 7 -odets 7 -cerna 7 -35a 7 -broomhill 7 -corowa 7 -maistriaux 7 -grumet 7 -bewilder 7 -raeford 7 -ablutions 7 -cross-kick 7 -agong 7 -lgbts 7 -crittercams 7 -wcmh 7 -data-based 7 -mateship 7 -plesser 7 -gonorrhoeae 7 -thorsby 7 -iisc 7 -blackalicious 7 -susselbeck 7 -scrapyards 7 -morphogens 7 -plaschkes 7 -,5 7 -owusu-abeyie 7 -quam 7 -cloos 7 -hakata 7 -non-incumbent 7 -mofokeng 7 -table-tennis 7 -arabo 7 -araba 7 -19-months 7 -1964-65 7 -hossa 7 -sharab 7 -connellan 7 -nikias 7 -pisagua 7 -rumsby 7 -hoai 7 -stratolaunch 7 -cribb 7 -thoreson 7 -chicopee 7 -corio 7 -guisewite 7 -osokogu 7 -316million 7 -34ft 7 -ukrainian-held 7 -rahmanian 7 -kalak 7 -kalat 7 -murphy-west 7 -hardgrave 7 -babwah 7 -xijun 7 -wishlade 7 -tselios 7 -wickison 7 -habersetzer 7 -4.33 7 -mysterio 7 -99million 7 -rinzler 7 -sisha 7 -erma 7 -trifari 7 -dickey-wicker 7 -sandelli 7 -arjuna 7 -mav 7 -family-values 7 -plotner 7 -elounda 7 -winegarten 7 -cellulite-busting 7 -earthquake-resistant 7 -loirp 7 -spruik 7 -6:38 7 -fact-finder 7 -1700km 7 -ex-senate 7 -gospodarski 7 -cearnel 7 -baddy 7 -energy-giving 7 -gretton 7 -para-military 7 -indooroopilly 7 -sculli 7 -m'nong 7 -seven-day-a-week 7 -achey 7 -craver 7 -tingled 7 -warrawong 7 -regrading 7 -orabi 7 -aviatrix 7 -2,009 7 -jocky 7 -gromark 7 -elfridges 7 -??!! 7 -lactivists 7 -baxters 7 -penllergare 7 -takanakuy 7 -westhampnett 7 -cadwalader 7 -xkr-s 7 -valdes-dapena 7 -zefang 7 -monadnock 7 -glenolden 7 -aylin 7 -calorically 7 -5-foot-7-inch 7 -voula 7 -12,404 7 -45.44 7 -zwicharowski 7 -biograph 7 -twitter-style 7 -malama 7 -bahcesehir 7 -no-carb 7 -ten-game 7 -al-anbar 7 -tank-automotive 7 -konkus 7 -lawn-care 7 -e.g 7 -questionned 7 -barjenbruch 7 -kärcher 7 -porchon-lynch 7 -setauket 7 -ovulate 7 -heaven-sent 7 -chattisgarh 7 -seekonk 7 -sobon 7 -fuze 7 -press-register 7 -kurata 7 -erandy 7 -hydrologists 7 -klasfeld 7 -105-day 7 -somalian-born 7 -getu 7 -kozlowsky 7 -hsmr 7 -perdanakusuma 7 -lokoli 7 -kcet 7 -clark-lynn 7 -sidhum 7 -drye 7 -chivilcoy 7 -warner-smith 7 -cavalieri 7 -jean-armel 7 -duyvil 7 -habala 7 -16,250 7 -inadvertantly 7 -best/biggest 7 -bunggal 7 -geiss 7 -brazil-mexico 7 -ogando 7 -2metres 7 -himax 7 -ksdk-tv 7 -guixin 7 -440lbs 7 -1,203 7 -remap 7 -alenia 7 -crego 7 -456ft 7 -marren 7 -maxwell-nelson 7 -double-majoring 7 -i-tele 7 -baton-charged 7 -agw 7 -annwen 7 -shannley 7 -azocar 7 -kariega 7 -lengthiest 7 -pdx 7 -mission-driven 7 -reinterview 7 -bruziene 7 -odg 7 -east-to-west 7 -evermoor 7 -2,268 7 -sidder 7 -gbp1 7 -ipswich-based 7 -mp4-29 7 -mp4-26 7 -lerici 7 -mixtapes 7 -smarteyeglass 7 -sawyan 7 -sejm 7 -1629 7 -72-600 7 -masango 7 -eventers 7 -penydarren 7 -222nd 7 -oost 7 -nad-e-ali 7 -mudick 7 -hot-pants 7 -drawstrings 7 -ravensbrueck 7 -1980x1200 7 -1tbsp 7 -duddingston 7 -walshe 7 -pitts-taylor 7 -despatcher 7 -folman 7 -maaren 7 -birthwright 7 -andruska 7 -2613 7 -healthexpress 7 -tcpalm.com 7 -hypnocise 7 -nienaber 7 -dongfang 7 -queneau 7 -hideaki 7 -8:07 7 -over-reached 7 -zambales 7 -emenyonu 7 -zapotoczny 7 -thapliyal 7 -77,220 7 -tomić 7 -pro-india 7 -shew 7 -stakey 7 -bolz-weber 7 -sauschuck 7 -hjort 7 -dimario 7 -mu'adh 7 -cathodes 7 -evertomb 7 -gusai 7 -inupiaq 7 -landskrona 7 -documentarians 7 -sagrans 7 -umbellifers 7 -taverham 7 -reves 7 -17-match 7 -craft-kerney 7 -acceptably 7 -19.45 7 -thant 7 -applecart 7 -mary-le-bow 7 -yohanna 7 -mcsheffrey 7 -non-natives 7 -eyewash 7 -frier 7 -nyalandu 7 -afshan 7 -hatim 7 -goodhall 7 -disadvantageous 7 -saturns 7 -hydrophobins 7 -briella 7 -denollet 7 -mountain-side 7 -i-131 7 -acklam 7 -dealmaking 7 -morny 7 -supranano 7 -crispier 7 -matabeleland 7 -440lb 7 -tarceva 7 -stepanian 7 -no-where 7 -jenssen 7 --------- 7 -burgate 7 -wolferts 7 -zasavica 7 -postlethwaite 7 -aciro 7 -luxleaks 7 -adayane 7 -nanjing-based 7 -datalink 7 -al-uqla 7 -shoham 7 -schnapper 7 -nazaki 7 -tinklenberg 7 -berekmeri 7 -142million 7 -adjustable-rate 7 -garas 7 -hellenthal 7 -11-second 7 -kelcher 7 -day-job 7 -zouch 7 -mallott 7 -sackful 7 -qiong 7 -tolton 7 -shouldnt 7 -citrussy 7 -in-resort 7 -bank-level 7 -druckerman 7 -neuve 7 -ferdiand 7 -hhmi 7 -wno 7 -chilecito 7 -machelle 7 -home-spun 7 -near-hysterical 7 -pendine 7 -barinas 7 -26-room 7 -then-7-year-old 7 -73per 7 -limited-time 7 -hovater 7 -ku-ring-gai 7 -4,760 7 -ahed 7 -gulf-based 7 -appals 7 -ecock 7 -driehaus 7 -ungraceful 7 -leutza 7 -zas 7 -0-16 7 -bowl-record 7 -junuzovic 7 -castlebrae 7 -young-guk 7 -matapan 7 -møller 7 -stekel 7 -pellissippi 7 -tightlipped 7 -pearl-studded 7 -millionairematch.com 7 -g.p. 7 -saintes 7 -tryp 7 -34cm 7 -grun 7 -krasovsky 7 -square-jawed 7 -accesorised 7 -pasi 7 -doos 7 -dook 7 -jaggar 7 -wising 7 -bispo 7 -al-ahrar 7 -averianov 7 -konsza 7 -panaro 7 -arkhangelsk 7 -lans 7 -7-litre 7 -ambra 7 -meininger 7 -matrie 7 -6,270 7 -330-mile 7 -keema 7 -328,835 7 -child-proofing 7 -hamme 7 -bodice-rippers 7 -huvelle 7 -assasination 7 -hosseinzadeh 7 -icsid 7 -muzher 7 -truax 7 -nerdiest 7 -patels 7 -devasting 7 -strontium-90 7 -kessie 7 -zero-zero 7 -dasso 7 -bensham 7 -two-putt 7 -w/o 7 -mitchim 7 -mean-spiritedness 7 -nowshahr 7 -remmers 7 -plainsong 7 -shoua 7 -skeeters 7 -prostates 7 -nafzinger 7 -incentivizes 7 -chivalric 7 -aristides 7 -snuffling 7 -tablet-operated 7 -9-day 7 -shags 7 -phyo 7 -mergui 7 -diagramming 7 -property-developer 7 -130cm 7 -saslow 7 -chargeboard 7 -preventatives 7 -housel 7 -phoneutria 7 -minkus 7 -matchball 7 -amazeballs 7 -jurinka 7 -self-assembled 7 -vasiliteanu 7 -dorne 7 -colqhuhoun 7 -steirn 7 -2,326 7 -njewadda 7 -tcas 7 -kiah 7 -takanobu 7 -craftsman-style 7 -kanbia 7 -ah-64 7 -1,100,000 7 -16-seat 7 -spirometer 7 -girlies 7 -trinians 7 -polke 7 -attrap 7 -dressen 7 -crunchie 7 -caucusus 7 -webbys 7 -viewfinders 7 -revengeance 7 -re-activated 7 -sheknows.com 7 -tomscha 7 -35-54 7 -35-50 7 -aldersley 7 -dinoflagellates 7 -mipim 7 -high-style 7 -allergy-friendly 7 -videomaker 7 -dourada 7 -refson 7 -emington 7 -bi-planes 7 -kitra 7 -600kg 7 -kingstonian 7 -liquipel 7 -ouramazingplanet 7 -arvs 7 -loblaw 7 -whac-a-mole 7 -moralism 7 -janzow 7 -evenly-matched 7 -eyestalks 7 -5.42 7 -hiroyo 7 -soukya 7 -crowd-source 7 -after-life 7 -alliot-marie 7 -zappalorto 7 -oxidiser 7 -putignano 7 -dangor 7 -talinn 7 -kents 7 -heiler 7 -14,990 7 -odjidja-ofoe 7 -www.sunshine.co.uk 7 -kirlyam 7 -falvo 7 -zachs 7 -cbsa 7 -non-selection 7 -vulpes 7 -degroot 7 -steeplejack 7 -dominici 7 -boroondara 7 -shimanami 7 -dorset-born 7 -now-customary 7 -kuan-yin 7 -superstructures 7 -140cm 7 -gms 7 -al-qubati 7 -western-leaning 7 -tarrats 7 -7,492 7 -possiblity 7 -zheijiang 7 -fashion-related 7 -duisburg-essen 7 -creekstone 7 -all-smiles 7 -community-owned 7 -farmsteads 7 -elissandro 7 -suely 7 -kongresshaus 7 -espree 7 -majordomo 7 -coulee 7 -annihilates 7 -announcment 7 -choosier 7 -stucky 7 -zsombor 7 -sapong 7 -chawan 7 -freep 7 -freem 7 -vajira 7 -wydad 7 -godsmark 7 -laksa 7 -vittoriosa 7 -saudiwoman 7 -nerma 7 -jtac 7 -intermezzo 7 -doukoure 7 -sea-going 7 -vallodolid 7 -avalanche-journal 7 -prefabs 7 -atcha 7 -oldland 7 -eco-label 7 -ranga 7 -sandy-haired 7 -kbmt 7 -usero 7 -high-mileage 7 -export-oriented 7 -garhi-khuda 7 -oakenshaw 7 -g.g. 7 -overtopping 7 -retrevo 7 -isoblox 7 -touch-screens 7 -sub-60 7 -enigmas 7 -decolonization 7 -xiaoling 7 -al-farsi 7 -o-shot 7 -plastination 7 -1747 7 -1,773 7 -green-brown 7 -tibisay 7 -huaman 7 -leaf-chronicle 7 -bisek 7 -weaponising 7 -doorley 7 -senone 7 -expedites 7 -umf 7 -ezenagu 7 -drovers 7 -hammarstedt 7 -manhattanite 7 -yero 7 -mid-mounted 7 -56billion 7 -kozelek 7 -135-mile 7 -ribak 7 -hocker 7 -yagé 7 -demises 7 -stinsford 7 -action-oriented 7 -practicably 7 -u-visas 7 -race-derived 7 -adorjan 7 -velautham 7 -cirelli 7 -bearnaise 7 -woobenson 7 -glass-covered 7 -keyon 7 -4,960 7 -newly-painted 7 -apovian 7 -cornettos 7 -fosdick 7 -soprintendenza 7 -satpreet 7 -critton 7 -name-recognition 7 -omnee 7 -caunter 7 -cesp 7 -top-10s 7 -w-2s 7 -lower-than-normal 7 -53-6 7 -charlottenburg 7 -kohlin 7 -leg-breaker 7 -marigny 7 -123,745 7 -sun-loving 7 -hellholes 7 -bacanovic 7 -guest-house 7 -asset-rich 7 -mabasa 7 -lens-shaped 7 -superhot 7 -iska 7 -sonae 7 -imbierowicz 7 -now-outlawed 7 -sparagna 7 -tegretol 7 -1,666 7 -melbournians 7 -dalyell 7 -horserace 7 -low-strength 7 -sriharan 7 -oaklander 7 -quoit 7 -rafli 7 -storr 7 -31-hour 7 -inter-play 7 -31,300 7 -elone 7 -ourimbah 7 -paraisopolis 7 -gangbangers 7 -1/6 7 -starikov 7 -abdulqawi 7 -theus 7 -lynxes 7 -kitman 7 -bestrode 7 -floreana 7 -romeoville 7 -mendips 7 -burneside 7 -zero-rated 7 -neg 7 -parapark 7 -rattu 7 -reconciler 7 -champoux 7 -asian-looking 7 -gribkov 7 -mcr 7 -moheng 7 -lindeman 7 -courtemanche 7 -martez 7 -marter 7 -roehrl 7 -saiyma 7 -azubuike 7 -degc 7 -savolainen 7 -flumazenil 7 -asri 7 -wozzilroy 7 -best-organized 7 -hunza 7 -ractliffe 7 -855,000 7 -decimates 7 -turkmani 7 -anglada-escude 7 -koi-314c 7 -hda 7 -masrur 7 -lebroning 7 -2pts 7 -4,709 7 -2,026 7 -2,023 7 -blanes 7 -utmc 7 -silentium 7 -jackass-style 7 -lw 7 -fancy-free 7 -co-producers 7 -hartmanns 7 -paywall 7 -tofurky 7 -lehighton 7 -buttonholes 7 -colver 7 -office-approved 7 -benjawatananun 7 -two-hander 7 -pinelands 7 -17mins 7 -nullabor 7 -ex-gratia 7 -locksley 7 -arnulfo 7 -wsp 7 -agonistic 7 -hawk-like 7 -judaica 7 -yenesis 7 -well-acquainted 7 -châteaux 7 -rockiest 7 -21/20 7 -moonbeam 7 -diderot 7 -38,387 7 -rainone 7 -hussaina 7 -barno 7 -demonizes 7 -all-vegan 7 -decreasingly 7 -seremban 7 -milliman 7 -leiserowitz 7 -shuriken 7 -mcilhagga 7 -shankill 7 -aldar 7 -rvc 7 -baronne 7 -holecheck 7 -uglies 7 -nrao 7 -jorvik 7 -raelian 7 -mi-2s 7 -accelera 7 -800ad 7 -league-table 7 -86p 7 -ruge 7 -canham 7 -animasia 7 -26-inch 7 -wind-tunnel 7 -crop-protection 7 -marcinelle 7 -stanback 7 -canelas 7 -hiero 7 -nixes 7 -puntus 7 -18-person 7 -mandibular 7 -ottaviano 7 -xiaochuan 7 -janjua 7 -odaise 7 -gahaya 7 -spatulae 7 -chupacabras 7 -5.98 7 -5,620 7 -mukhlas 7 -vaccinia 7 -conservative-controlled 7 -weihai 7 -haracourt 7 -sikandar 7 -81.1 7 -terrazza 7 -shihabi 7 -niebla 7 -lidgey 7 -leeb 7 -leen 7 -fusilli 7 -goddammit 7 -kinkiest 7 -behaviorally 7 -takehiko 7 -2009-13 7 -cryin 7 -evidentially 7 -bottled-up 7 -umrah 7 -d'aquilla 7 -greaves-lord 7 -biddable 7 -beatboxer 7 -ball-player 7 -chilford 7 -seebock 7 -throw-up 7 -zellen 7 -16-piece 7 -cranham 7 -vandenbroucke 7 -stuever 7 -shirko 7 -oranyendu 7 -magidson 7 -7900 7 -cat-head 7 -aroch 7 -degersdorff 7 -kessock 7 -broadmarsh 7 -staredown 7 -bosanac 7 -sydnee 7 -482-foot 7 -taitz 7 -wolmar 7 -63.1 7 -itunu 7 -turn-key 7 -last-surviving 7 -post-mao 7 -pastie 7 -wretchedness 7 -gharnavati 7 -phatic 7 -ononiwu 7 -vsc 7 -digga 7 -mbabu 7 -illulissat 7 -coex 7 -banc 7 -milovanovic 7 -5.76 7 -centraal 7 -tiredgate 7 -tyrwhitt 7 -borsa 7 -2-3million 7 -truncate 7 -arhinia 7 -freeganism 7 -bilboa 7 -vitaioli 7 -alyse 7 -treankler 7 -mambu 7 -kerm 7 -kera 7 -airblue 7 -slaked 7 -chantae 7 -dognapping 7 -945,000 7 -lito 7 -slapdown 7 -2012-14 7 -downtonisms 7 -tubmanburg 7 -blankenhorn 7 -rhumble 7 -greenback 7 -pashkevich 7 -8:22 7 -8:24 7 -palframan 7 -angula 7 -vetheuil 7 -wejebe 7 -jasmyne 7 -yayoi 7 -chauzy 7 -celje 7 -giant-killings 7 -mirwaiz 7 -szczepanik 7 -lukovic 7 -ap-norc 7 -ameganvi 7 -16-pound 7 -natca 7 -sahal 7 -saham 7 -chaohu 7 -brucculeri 7 -justen 7 -arwel 7 -misogny 7 -wook-pyo 7 -rezidor 7 -pillcam 7 -curuguaty 7 -bazrcar 7 -cnn-opinion 7 -etrack 7 -may-traenor 7 -gabilondo 7 -kappes 7 -myfoxphilly 7 -isolator 7 -fuzz-free 7 -tiji 7 -48.50 7 -zschape 7 -self-starvation 7 -okun-wiese 7 -well-delivered 7 -linnane 7 -yokozuna 7 -proteomics 7 -ashed 7 -maccow 7 -chingaiz 7 -quintano 7 -yulianto 7 -6,928 7 -kaminski-morrow 7 -brodman 7 -caproni 7 -ellenita 7 -bouzaglos 7 -whitefly 7 -nourizadeh 7 -basmanny 7 -john-joseph 7 -royalty-free 7 -solomonese 7 -50miles 7 -yahiye 7 -photoshopsurgeon 7 -yeats-brown 7 -fifield 7 -coypu 7 -fruitiness 7 -rco 7 -air-strike 7 -pow-wow 7 -802,000 7 -holsworth 7 -mid-major 7 -gentility 7 -22-strong 7 -steelwork 7 -lobianco 7 -aviator-style 7 -miró 7 -imoji 7 -d-los 7 -crusinberry 7 -zhangzhou 7 -as&e 7 -bruneau 7 -tere 7 -stell 7 -svengali-like 7 -rosebank 7 -o'lakes 7 -kusi 7 -czarist 7 -maspeth 7 -saruhan 7 -norina 7 -gedmark 7 -hrach 7 -buddy-cop 7 -burano 7 -zhizhi 7 -non-relatives 7 -dayman 7 -pongy 7 -glanvill 7 -druggy 7 -lodwidge 7 -radom 7 -utian 7 -bare-footed 7 -hackerspaces 7 -pittakionophobia 7 -druglords 7 -ikebukuro 7 -seith 7 -508th 7 -benoît 7 -@netflix 7 -ellmer 7 -flashmobs 7 -escriba 7 -minigames 7 -scrugg 7 -guralnick 7 -moreishness 7 -miyakoji 7 -press-ganged 7 -clubine 7 -pay-as-you-throw 7 -anti-cholinergic 7 -jopson 7 -concrete-filled 7 -dhruv 7 -media-saturated 7 -worroll 7 -hematopoietic 7 -107mph 7 -twice-monthly 7 -mk17 7 -signed-off 7 -pre-exam 7 -ddr 7 -dde 7 -typecasting 7 -triplelift 7 -phillis 7 -wronski 7 -spooners 7 -menil 7 -bonforte 7 -third-base 7 -violative 7 -ajali 7 -dunaden 7 -balashankar 7 -displeases 7 -46-inch 7 -barbarellas 7 -superchef 7 -rufous 7 -c100 7 -outgross 7 -sajeel 7 -emos 7 -super-groomed 7 -viagra-like 7 -sushmita 7 -pink-red 7 -schürrle 7 -twin-opposed 7 -kletzy 7 -fervid 7 -telugu 7 -sky-scraping 7 -lolaycia 7 -#ghostplane 7 -tiverios 7 -braybrook 7 -fish-and-chips 7 -indo-asian 7 -cedc 7 -ankerwycke 7 -peahi 7 -kickaround 7 -11000 7 -bernieres 7 -kanj 7 -lovemore 7 -okoro 7 -rq36 7 -two-finger 7 -miedzianowski-sinclair 7 -sapelo 7 -@papajohns 7 -scaccetti 7 -boleslawiec 7 -al-soufi 7 -23-hours 7 -back-bench 7 -stn 7 -century-maker 7 -silivri 7 -slepnev 7 -phenylalanine 7 -h.i.v. 7 -cowper 7 -berhan 7 -breidis 7 -mackenzy 7 -7,392 7 -famulak 7 -pulse-pounding 7 -boqueria 7 -roder 7 -characterless 7 -drooled 7 -lessya 7 -al-jaberi 7 -osayomi 7 -asymco 7 -pop-country 7 -oil-related 7 -aranha 7 -burrillville 7 -16f 7 -snellesk 7 -re-position 7 -seefried 7 -tipitina 7 -ethnic-minority 7 -podgie 7 -reheman 7 -jilting 7 -target-rich 7 -amberleigh 7 -mediatakeout 7 -fayaz 7 -gianturco 7 -fipps 7 -hohokam 7 -vinters 7 -raffling 7 -6-hour 7 -dhelsing 7 -harfield 7 -liddiment 7 -overcompensation 7 -l115a3 7 -britnell 7 -treeby 7 -674,000 7 -kamppi 7 -termaine 7 -wadia 7 -deilar 7 -.120 7 -by-the-minute 7 -glass-blowing 7 -negging 7 -asiaair 7 -miasma 7 -misko 7 -gce 7 -gcm 7 -playtech 7 -menge 7 -fitness-to-work 7 -knobloch 7 -jamijarvi 7 -@femail 7 -benson-green 7 -petreikis 7 -anti-competition 7 -whitefoot 7 -lendas 7 -zermeno 7 -vinluan 7 -delousing 7 -speiser 7 -cakebread 7 -enfranchised 7 -witchetty 7 -2,500-a-month 7 -gabbert 7 -refund.me 7 -261/2004 7 -madhere 7 -murshid 7 -pellissier 7 -hala'ufia 7 -biogenfutures 7 -palatka 7 -sea-facing 7 -josephat 7 -blakenhurst 7 -zagorski 7 -kxas-tv 7 -latapy 7 -average-priced 7 -realas 7 -chiswell 7 -aguri 7 -jack-o-lantern 7 -widdup 7 -malari 7 -higher-earning 7 -location-tracking 7 -snail-eating 7 -heagney 7 -cagoules 7 -antonius 7 -107,932,603.20 7 -phoebus 7 -1,790 7 -pealed 7 -omotayo 7 -o'ween 7 -win-win-win 7 -mabhena 7 -10.68 7 -post-divorce 7 -sweatband 7 -guertin 7 -zalabia 7 -monowitz 7 -torsade 7 -caramels 7 -summerland 7 -transcuba 7 -laviolette 7 -cortaca 7 -kariya 7 -staphylococcal 7 -underley 7 -save-the-date 7 -2cvs 7 -crowdstrike 7 -bovid 7 -grandbabies 7 -juhni 7 -unsaleable 7 -gervase 7 -embryotomy 7 -anti-dumping 7 -slim-cut 7 -nerines 7 -publicly-run 7 -eckerman 7 -otta 7 -sailani 7 -flight-testing 7 -obwalden 7 -priuses 7 -mcelhaney 7 -46-page 7 -fishbone 7 -kalhammer 7 -4ft-deep 7 -mid-1500s 7 -18bn 7 -great-granddad 7 -strawberry-flavoured 7 -31g 7 -kastoria 7 -trevolta 7 -atv-2 7 -lyburd 7 -mulwa 7 -#betrue 7 -myrtle/willoughby 7 -eurogeddon 7 -a684 7 -50-a-week 7 -hotheaded 7 -carriles 7 -post-referendum 7 -ageros 7 -11-11-11 7 -lewelling 7 -chirino 7 -milsee 7 -hallmarked 7 -guenon 7 -mcmeikan 7 -pickstock 7 -isim 7 -songo 7 -songa 7 -vinovia 7 -bramlette 7 -26,200 7 -anti-drink 7 -desjoyeaux 7 -fatemah 7 -danay 7 -worsham 7 -papoose 7 -lexisnexis 7 -corer 7 -interspecies 7 -lazin 7 -swon 7 -gun-carrying 7 -godby 7 -dx110 7 -campin 7 -guor 7 -weht 7 -60-room 7 -stape 7 -baby-related 7 -alldredge 7 -freshly-cut 7 -aighton 7 -unabridged 7 -viatafa 7 -mcdavitt 7 -popat 7 -36-yard 7 -jinghua 7 -jabalia 7 -hacktivism 7 -bibiana 7 -less-than-two-week 7 -bikaner 7 -planet-like 7 -2013-2013 7 -590-foot 7 -skynrg 7 -potter-dixon 7 -cosmopolitans 7 -dulong 7 -240ft 7 -meignan 7 -promescent 7 -6,125 7 -m.r. 7 -wgc-ca 7 -elastomer 7 -haviv 7 -non-doms 7 -taliban-aligned 7 -60-degree 7 -sovereign-citizen 7 -taunus 7 -shut-outs 7 -closed-doors 7 -sweat-soaked 7 -noisemakers 7 -snoozer 7 -doucet 7 -sciarrino 7 -fassel 7 -orientale 7 -bushfield 7 -36,000-a-year 7 -yak-130 7 -dubbeldam 7 -novosvitlivka 7 -edwardson 7 -yarlington 7 -ghostnet 7 -mawusimensah 7 -reneta 7 -gilhooley 7 -benedetta 7 -upperville 7 -maziarka 7 -anses 7 -shamilia 7 -0805 7 -ghurabaa 7 -gweneth 7 -tylor 7 -gps-equipped 7 -townhead 7 -nazi-sponsored 7 -animal-tested 7 -a-listed 7 -depo 7 -singhs 7 -lilydale 7 -non-experts 7 -adelboden 7 -erciyesspor 7 -oubre 7 -cerný 7 -bleated 7 -ahi 7 -417th 7 -charnel 7 -macuja 7 -pitanguy 7 -nesodden 7 -subcategory 7 -borgholm 7 -seeked 7 -ponvert 7 -ichthyologist 7 -toddlewood 7 -refuseniks 7 -kinray 7 -sesquicentennial 7 -tripcase 7 -broadribb 7 -minor-fareast 7 -indiscernible 7 -quilmes 7 -hsia 7 -4,140 7 -4,144 7 -swallowtails 7 -zonday 7 -windigo 7 -mcmullin 7 -pretentiousness 7 -doulting 7 -nonconfrontational 7 -knifings 7 -13-megapixel 7 -33mins 7 -siegrune 7 -5-bedroom 7 -khawli 7 -ex-addict 7 -9-enders 7 -eu-based 7 -karti 7 -barjot 7 -20,200 7 -macungie 7 -hadiza 7 -sital 7 -non-traffic 7 -four-day-long 7 -whigs 7 -wellerstein 7 -alfredson 7 -out-of-sessions 7 -92.8 7 -eygpt 7 -serials 7 -home-coming 7 -five-megapixel 7 -shemagh 7 -aleksic 7 -frappicino 7 -green-tregaro 7 -woodglue 7 -piddling 7 -ww9000 7 -zingerle 7 -satanazes 7 -flounces 7 -flounced 7 -roy-laroche 7 -somafotorm 7 -sichani 7 -107.1 7 -107.3 7 -warthen 7 -senf 7 -stessel 7 -fariza 7 -foreward 7 -47.28 7 -patapsco 7 -vocaloid 7 -08454 7 -still-unfolding 7 -crisa 7 -crish 7 -rotonda 7 -rockerfeller 7 -bross 7 -freewheel 7 -calahan 7 -krazy-8 7 -sub-divided 7 -krivickaite 7 -particularities 7 -lyketsos 7 -6-inches 7 -dutch/german 7 -darvall 7 -haeundae 7 -padlocking 7 -ex-naval 7 -patissier 7 -26-0 7 -guest-star 7 -bramcote 7 -andreou 7 -8:44 7 -rastani 7 -hash-tagged 7 -goreel 7 -omfori 7 -plutonium-based 7 -mpe 7 -mpl 7 -newyork-presbyterian 7 -murty 7 -brightwater 7 -617,000 7 -green-skinned 7 -honeyeater 7 -liquefies 7 -.57 7 -combover 7 -ridin 7 -gring 7 -enforcement-only 7 -monegasques 7 -mahanoy 7 -ex-guards 7 -swastika-like 7 -g5s 7 -44-years-old 7 -ruoppolo 7 -bep 7 -campbell-savours 7 -pimpernel 7 -30-bed 7 -reviv 7 -lileikis 7 -@jasoncollins34 7 -wamp 7 -1556 7 -ios8 7 -finger-print 7 -rhodes-hughes 7 -bassanos 7 -madonnina 7 -worsbrough 7 -koyen 7 -l641441 7 -palmisanos 7 -rads 7 -sutro 7 -pleasantry 7 -47mins 7 -gabrial 7 -scheuplein 7 -galatoire 7 -aini 7 -miata 7 -140-an-hour 7 -bookbook 7 -k6 7 -k5 7 -unstintingly 7 -reapportionment 7 -jyothi 7 -dayawan 7 -patchin 7 -articular 7 -revel-reade 7 -1085 7 -self-censoring 7 -vipul 7 -1,491 7 -9.33 7 -non-tobacco 7 -debt-reduction 7 -haemorrage 7 -radnicki 7 -redbull.com 7 -post-competition 7 -m14 7 -pettorino 7 -told-ya-so 7 -intermittency 7 -=-rrb- 7 -union-patriotic 7 -antônio 7 -geey 7 -bargallo 7 -brylcreem 7 -blood-related 7 -6.98 7 -techo 7 -myrfors 7 -moltz 7 -goofiness 7 -973,000 7 -7:08 7 -#sorrynotsorry 7 -botstein 7 -h1n2 7 -wedlake 7 -islamic-american 7 -kyrgystan 7 -lavado 7 -766,000 7 -#cnnireport 7 -delois 7 -abdali 7 -ejaria 7 -deadens 7 -four-sided 7 -Øygard 7 -ahar 7 -hispanic/latino 7 -u.f.o. 7 -bocconi 7 -comesa 7 -suroosh 7 -thornlie 7 -cariaso 7 -bendixsen 7 -akune 7 -godforsaken 7 -fundraises 7 -twcs 7 -maduekwe 7 -xenos 7 -elva 7 -5.2-inch 7 -mamatov 7 -masaru 7 -rayara 7 -viggle 7 -teed-up 7 -weinerman 7 -trec 7 -treu 7 -hyper-inflation 7 -antekeier 7 -horse-head 7 -postma 7 -merfolk 7 -angeleri 7 -weatherstone 7 -kasaona 7 -decio 7 -nomen 7 -23-yard 7 -intermix 7 -q41 7 -tepania 7 -jonesing 7 -2,505 7 -watchkit 7 -palix 7 -sandpoint 7 -rusks 7 --51 7 -sahily 7 -raudenbush 7 -karkos 7 -noerr 7 -dinges 7 -hamis 7 -2,760 7 -2,765 7 -emab 7 -high-alert 7 -homayoonpoor 7 -titlist 7 -walgrave 7 -mitic 7 -ear-biting 7 -ousley 7 -sabz 7 -clarisse 7 -viard 7 -mobos 7 -penumbra 7 -stutts 7 -schottel 7 -passably 7 -rubber-band 7 -koh-i-noor 7 -availabilities 7 -march/april 7 -parantha 7 -ruzzamenti 7 -accordion-like 7 -leahovcenco 7 -railwayman 7 -steenburgen 7 -gathungu 7 -cranebank 7 -non-transgender 7 -kalo 7 -120-metre 7 -smiley-faced 7 -sharypov 7 -market-friendly 7 -tubas 7 -contagiously 7 -knee-replacement 7 -cash-dispensing 7 -floraunce 7 -krivoshapkin 7 -playpark 7 -moyamba 7 -still-unidentified 7 -aboulhosn 7 -2,367 7 -2,365 7 -2,368 7 -tisher 7 -hima 7 -iosco 7 -ferentino 7 -nyro 7 -mestizo 7 -u.s.-friendly 7 -bespeaks 7 -wyevale 7 -profaci 7 -rankling 7 -double-tapping 7 -steg 7 -33,400 7 -web-tv 7 -binaschi 7 -houssine 7 -collusive 7 -mylands 7 -tullamarine 7 -bellyful 7 -litigators 7 -non-cooperation 7 -hannie 7 -elsalameen 7 -recoleta 7 -bourns 7 -wrongness 7 -goateesaver 7 -strip-searching 7 -hussy 7 -booze-free 7 -marienplatz 7 -frogameni 7 -hair-brained 7 -skiatook 7 -finalwomen 7 -w.g. 7 -lobley 7 -undergird 7 -wsj.com 7 -kakum 7 -flouncing 7 -small-talk 7 -al-juindy 7 -bi-turbo 7 -eep 7 -idylls 7 -heatstick 7 -adde 7 -#australianlife 7 -moofushi 7 -generalissimo 7 -re-uptake 7 -3,990 7 -feguer 7 -cordials 7 -cattan 7 -marsan 7 -vajazzling 7 -mesel 7 -bartimaeus 7 -mousiou 7 -discolour 7 -gikonyo 7 -57-page 7 -hochhauser 7 -skalic 7 -waitin 7 -adulatory 7 -dissociating 7 -spokeless 7 -chinese-built 7 -roehrkasse 7 -daishi 7 -b10 7 -b19 7 -gamma-rays 7 -ramjet 7 -ever-impressive 7 -molai 7 -officinalis 7 -five-percent 7 -nations-african 7 -50-somethings 7 -chiquis 7 -mourne 7 -radiation-contaminated 7 -instantcheckmate.com 7 -dancin 7 -reibnitz 7 -ghazal 7 -onalaska 7 -madrona 7 -alexeyeva 7 -22in 7 -clarkstown 7 -burntisland 7 -ambon 7 -drewery 7 -davidovich 7 -public-address 7 -romilda 7 -venture-backed 7 -vigar 7 -onyedinma 7 -rebel-territory 7 -sakazakii 7 -intelligibly 7 -plateroti 7 -colossa 7 -threatexchange 7 -roobarb 7 -pro-zelaya 7 -nikolaev 7 -radiographs 7 -michishita 7 -hatted 7 -khazri 7 -ikeoluwa 7 -kilobytes 7 -taweez 7 -bodyboarders 7 -guitar-shaped 7 -178g 7 -mansar 7 -faghani 7 -dwb 7 -chapatti 7 -malkani 7 -el-gamaty 7 -honey-coloured 7 -ungallant 7 -nickey 7 -larouche 7 -ramstetter 7 -chadwick-edgar 7 -rusts 7 -muen 7 -markit/cips 7 -daksa 7 -saint-martin 7 -mogale 7 -tristam 7 -malski 7 -putulowski 7 -souici 7 -compensable 7 -elanor 7 -surespot 7 -sarbjit 7 -:\ 7 -bucentaure 7 -2000-year-old 7 -hofuf 7 -appreciations 7 -petersburg-based 7 -mairia 7 -shukatsu 7 -convertibility 7 -federale 7 -117-year-old 7 -mkoko 7 -byrn 7 -student-loan 7 -nailene 7 -semi-mythical 7 -leroi 7 -15,000-plus 7 -carndonagh 7 -tma-13m 7 -johson 7 -kimberli 7 -sukacita 7 -sherrell 7 -power-play 7 -kinnick 7 -137-mile 7 -brutalities 7 -stevensville 7 -yarian 7 -free-flow 7 -dowton 7 -1,959 7 -5,542 7 -million-year 7 -kostis 7 -hoathly 7 -mechtler 7 -35mins 7 -anti-surveillance 7 -peopleâ 7 -stickball 7 -sugarhill 7 -liasing 7 -quick-pick 7 -f-86 7 -kabonero 7 -bunkbed 7 -2003-2008 7 -16-3 7 -16-6 7 -cyber-bully 7 -worricker 7 -bhagwan 7 -sisaket 7 -aegis-class 7 -chelsfield 7 -polglase 7 -slaver 7 -torres-manteufel 7 -overbilled 7 -legitimization 7 -anori 7 -.460 7 -symbolist 7 -tajul 7 -weft 7 -baffert 7 -hajsafi 7 -kavukcu 7 -five-team 7 -herengracht 7 -self-empowerment 7 -second-long 7 -gambling-related 7 -bogren 7 -puzzlebox 7 -hullavington 7 -saboora 7 -qihui 7 -jiverly 7 -bludgers 7 -whitlum 7 -mdma-assisted 7 -thenew 7 -kallaste 7 -deschamp 7 -#blackout 7 -vincula 7 -organdonation.nhs.uk 7 -kabin 7 -hotlist 7 -bendita 7 -salac 7 -salaz 7 -salay 7 -katty 7 -galland 7 -al-masjid 7 -propps 7 -mgf 7 -enviro 7 -anandakrishnan 7 -72p 7 -tutorcare 7 -airy-fairy 7 -jabril 7 -poplin 7 -15-stone 7 -velas 7 -shoveler 7 -one-twentieth 7 -premal 7 -stretz 7 -mips 7 -sevres 7 -nurturer 7 -benicassim 7 -håkansson 7 -campbell-hughes 7 -beer-making 7 -slepe 7 -katsumi 7 -papillae 7 -95th-minute 7 -nonprofessional 7 -alcázar 7 -bulengo 7 -bugner 7 -bullocks 7 -ex-nypd 7 -coldhearted 7 -gacon 7 -ralphs 7 -jingu 7 -retrenched 7 -sammlung 7 -trevele 7 -cutecircuit 7 -jelassi 7 -total-body 7 -highest-security 7 -knightsec 7 -hapi 7 -cable-knit 7 -fox6now 7 -once-peaceful 7 -balcomb 7 -cihak 7 -bardach 7 -bardack 7 -karaiskakis 7 -arntz 7 -painell 7 -gushungo 7 -kamien 7 -samarst 7 -de-puffing 7 -mosahebi 7 -tool-maker 7 -curbed.com 7 -214.135 7 -cayle 7 -standard-essential 7 -ecospace 7 -furled 7 -ladybower 7 -blantons 7 -lindu 7 -bahir 7 -semma 7 -800ml 7 -scandal-tainted 7 -autogyros 7 -ciabattini 7 -kotaku.com 7 -cellulaze 7 -molder 7 -uriguen 7 -alexion 7 -rainsy 7 -pavier 7 -sideboob 7 -freeflying 7 -rosarito 7 -quancard 7 -keever 7 -hinsdale 7 -475ft 7 -gadeir 7 -robinson-baker 7 -malverne 7 -fionn 7 -now-extinct 7 -mazeika 7 -queasiness 7 -bradatan 7 -ampoule 7 -gohlar 7 -commission-based 7 -matanzima 7 -f/t 7 -silesian 7 -millington-day 7 -posch 7 -valeron 7 -tooba 7 -usair 7 -barnton 7 -48,500 7 -gympanzee 7 -move.this 7 -acsinte 7 -data-hungry 7 -#justkidding 7 -odst 7 -befuddlement 7 -footpad 7 -pettite 7 -sabryna 7 -rogoyska 7 -hynard 7 -arbeia 7 -wheaty 7 -mingmei 7 -1,342 7 -tadese 7 -musclebound 7 -3,154 7 -most-improved 7 -irwins 7 -50-ton 7 -salmons 7 -blatner 7 -scozzoli 7 -piromya 7 -shobanjo 7 -oommen 7 -ngoh 7 -jandal 7 -usni 7 -quasi-governmental 7 -ewold 7 -tomasso 7 -yodaville 7 -ishin-den-shin 7 -truthiness 7 -6:26 7 -verney 7 -whatcha 7 -quartiano 7 -deep-towed 7 -anti-growth 7 -mnookin 7 -rockcastle 7 -zaqout 7 -cinematographic 7 -krem-tv 7 -swokowski 7 -druckenmiller 7 -non-surgically 7 -jdate 7 -samangan 7 -laclede 7 -crossin 7 -eight-tenths 7 -hasibullah 7 -nagymaros 7 -rosenkilde 7 -winnington 7 -finagle 7 -processers 7 -bifold 7 -conk 7 -telegdy 7 -windcatchers 7 -line-of-duty 7 -trusteer 7 -esteruelas 7 -wigfield 7 -8,167 7 -sycophancy 7 -afghanistan/pakistan 7 -44-40 7 -mcclenaghan 7 -narellan 7 -saaed 7 -anat 7 -greenish-blue 7 -tma-05m 7 -gerondis 7 -asci 7 -manvelyan 7 -make-up-free 7 -gusky 7 -blaik 7 -nikky 7 -korff 7 -renclawowicz 7 -bgi 7 -1,833 7 -1,836 7 -srour 7 -radio-canada 7 -muhanned 7 -praxis 7 -,700 7 -56-mile 7 -tord 7 -gay-themed 7 -clinger 7 -braelynn 7 -150,00 7 -topgear.com 7 -gavrilova 7 -rynecki 7 -dogme 7 -inkland 7 -penoplasty 7 -a'ntaar 7 -yu-mi 7 -julen 7 -blow-by 7 -37.99 7 -assented 7 -148,656,000 7 -breton-style 7 -1.012 7 -adokiye 7 -3,330 7 -3,338 7 -star-nosed 7 -mahey 7 -mellinger 7 -chinese-led 7 -ballotelli 7 -kersiene 7 -zieser 7 -youngjin 7 -olanzapine 7 -nearly-naked 7 -naah 7 -larung 7 -solarmax 7 -sequent 7 -gimli 7 -hodsdon 7 -thodoris 7 -man-on-man 7 -malayalam 7 -faezeh 7 -tripadvisor-style 7 -organophosphate 7 -twiter 7 -qua 7 -meece 7 -opinion-formers 7 -28l 7 -boggia 7 -mozgov 7 -thoroton 7 -cross-body 7 -b&t 7 -curzen 7 -30-15 7 -hollyford 7 -villedieu-les-poeles 7 -autobiographythe 7 -1392 7 -carducci 7 -stubblefield 7 -meylor 7 -stockpot 7 --52 7 -spiderfab 7 -nicoli 7 -christle 7 -leti 7 -daymer 7 -unitedmanchester 7 -czocha 7 -acehnese 7 -mirz 7 -washoku 7 -horsefair 7 -view-based 7 -chain-of-command 7 -resendez 7 -322million 7 -arrundale 7 -dolstad 7 -brushwork 7 -waskada 7 -emam 7 -apperley 7 -taubmann 7 -spammed 7 -cristi 7 -miyashita 7 -trubridge 7 -leanda 7 -riney 7 -campstove 7 -derens 7 -lamestream 7 -ppargamma 7 -165lbs 7 -rhiwbina 7 -doretti 7 -10gb 7 -osmington 7 -coquettishly 7 -melbar 7 -porbeagles 7 -adedjumo-dani 7 -ellis-fraser 7 -dhn 7 -sensitisation 7 -cicpc 7 -bouglione 7 -philles 7 -gabino 7 -tauseef 7 -@thebritishcop 7 -borba 7 -zanno 7 -orthopets 7 -phone-records 7 -toño 7 -jaggermeryx 7 -bolnick 7 -anti-rabies 7 -kutaro 7 -meopham 7 -babyish 7 -27lb 7 -soukaina 7 -sarson 7 -450lbs 7 -mahara 7 -dourlen 7 -bowdler 7 -video 7 -benj 7 -untrimmed 7 -midvale 7 -close-minded 7 -67-0 7 -jabel 7 -reil 7 -584,000 7 -jenya 7 -yoshio 7 -donnan 7 -lihong 7 -aydintasbas 7 -showing-off 7 -tehsil 7 -manufactory 7 -everth 7 -bournmouth 7 -mileskiewicz 7 -perfitt 7 -springboards 7 -paleoamericans 7 -halanaerobium 7 -arrrested 7 -uziel 7 -neumaier 7 -emily-kate 7 -spg 7 -waggle 7 -abiquiu 7 -alkyl 7 -stanworth 7 -12345678 7 -paint-by-numbers 7 -foulke 7 -emospark 7 -coolatta 7 -hollister-jones 7 -52billion 7 -vanderberg 7 -masoom 7 -aeons 7 -rodic 7 -104,500 7 -mastain 7 -37-and-a-half 7 -alayne 7 -archive.org 7 -nihil 7 -southern-based 7 -cuzzy 7 -jodlowiec 7 -nicolet 7 -sahlen 7 -chante 7 -hirose 7 -sanjid 7 -moisander 7 -14.55 7 -wauconda 7 -winegrowers 7 -poyntz 7 -elahian 7 -heien 7 -uppercuts 7 -single-mindedly 7 -kilee 7 -red-tinged 7 -1690s 7 -niemeijer 7 -51,500 7 -mizuguchi 7 -juan-luis 7 -elliptic 7 -agaric 7 -bloom.fm 7 -cidre 7 -harjit 7 -irbesartan 7 -oceanscape 7 -chenggang 7 -clickstick 7 -nobiiru 7 -foisting 7 -jg 7 -badest 7 -4min 7 -kilmurry 7 -105.1 7 -baalbeh 7 -www.b-eat.co.uk 7 -eastlack 7 -actioned 7 -gg2 7 -trans-vaginal 7 -unshuffled 7 -flesh-baring 7 -springle 7 -398,000 7 -israeli-arab 7 -tendercrisp 7 -allclear 7 -gilli 7 -scunnered 7 -fug 7 -moh 7 -waiz 7 -maccalube 7 -g-tummo 7 -paxson 7 -ear-shaped 7 -microlens 7 -mukhadram 7 -womick 7 -ouandja 7 -backpedalling 7 -grimus 7 -donators 7 -cebic 7 -al-absi 7 -leafe 7 -luquet 7 -gulcin 7 -poorly-paid 7 -crow-smith 7 -goneril 7 -creggan 7 -ridglea 7 -virgalla 7 -secteur 7 -aschiana 7 -snugglers 7 -second-gun 7 -hinesville 7 -faa-staffed 7 -lapdogs 7 -sanghrajka 7 -pavitt 7 -sommerlath 7 -rough-and-ready 7 -10ft-high 7 -sorcinelli 7 -clod 7 -lordan 7 -vannucci 7 -vassal 7 -mengshan 7 -kadyn 7 -18-ft 7 -trainload 7 -ceceila 7 -barrettes 7 -not-so-super 7 -shindler 7 -wassily 7 -lingnan 7 -insulators 7 -hightops 7 -donath 7 -harrisdale 7 -myfoxhouston 7 -bunnychow 7 -sauter 7 -moonriver 7 -lemberg 7 -furtivo 7 -taymullah 7 -tomarchio 7 -mariem 7 -mallacoota 7 -f40 7 -two-bit 7 -tanton 7 -face-tracking 7 -lukimya 7 -g-slate 7 -11mm 7 -milners 7 -662-563-6230 7 -bacchanalia 7 -chin-up 7 -howdens 7 -icelolly 7 -reddits 7 -akli 7 -delatorre 7 -mickeys 7 -pageboys 7 -monohon 7 -urie 7 -21:9 7 -dolison 7 -jedrzejczyk 7 -apparels 7 -lyssa 7 -half-down 7 -hacipasa 7 -edeson 7 -asscher 7 -siddons 7 -diametre 7 -food-producing 7 -post-speech 7 -otaki 7 -kimerer 7 -cyller 7 -0.90 7 -arnotts 7 -bulks 7 -oetker 7 -14-meter 7 -79th-minute 7 -prooth 7 -32-30 7 -emma-jane 7 -265th 7 -watersport 7 -193-member 7 -strangely-shaped 7 -delagarza 7 -buttoning 7 -dorje 7 -nosediving 7 -46lbs 7 -maiani 7 -horseflesh 7 -lirangwe 7 -463,846 7 -openwork 7 -lamplugh 7 -typhoon-devastated 7 -stri 7 -140-pound 7 -shadhat 7 -weidhaas 7 -sukhvender 7 -kliger 7 -state-held 7 -croissant-donut 7 -3,430 7 -3,439 7 -cozzarelli 7 -ejide 7 -cooksley 7 -el-kadomi 7 -sezer 7 -druchen 7 -cryptozoologists 7 -wedc 7 -scalper 7 -6,370 7 -changing-room 7 -funkiest 7 -fracktivist 7 -huka 7 -2,409 7 -tregardock 7 -worldofgood.com 7 -brugnon 7 -matterley 7 -18,900 7 -door-buster 7 -alsvin 7 -veb 7 -gphc 7 -akaydin 7 -papakonstantinou 7 -deworm 7 -h.r.h. 7 -gaeltacht 7 -48per 7 -yerima 7 -gastronomical 7 -zoomed-out 7 -h.m.s. 7 -kerri-ann 7 -vintage-look 7 -stalk-like 7 -@klm 7 -colmans 7 -criticality 7 -child-centred 7 -lougnot 7 -zhirov 7 -louanne 7 -espenson 7 -re-feeding 7 -lie-down 7 -rd-180 7 -waveforms 7 -fedexed 7 -viriviri 7 -chouhaib 7 -2000-2011 7 -2000-2010 7 -dignite 7 -lemington 7 -non-reflective 7 -frode 7 -purdham 7 -broken-up 7 -statesman-like 7 -westwego 7 -forbath 7 -springtown 7 -burba 7 -harel 7 -flechettes 7 -chiriqui 7 -nathuram 7 -at uefa.com 7 -pot-hole 7 -radinn 7 -track-only 7 -filmthe 7 -mutoko 7 -travail 7 -teyana 7 -balderton 7 -misà 7 -crozer-chester 7 -leisa 7 -dynastie 7 -clooneys 7 -darkfetishnet.com 7 -sutton-on-sea 7 -davitashvili 7 -gold-digging 7 -dabaan 7 -germinating 7 -servicers 7 -dog-mad 7 -aeromedical 7 -mansourov 7 -norlane 7 -adv 7 -cayne 7 -megatonnes 7 -matthaeus 7 -thefa.com 7 -l-g 7 -70099 7 -magnetotail 7 -654,000 7 -briggate 7 -14/16 7 -1560s 7 -unhate 7 -lola-grace 7 -hendarso 7 -mid-interview 7 -luckcock 7 -skudder 7 -ceàgo 7 -lukosiute 7 -buck-passing 7 -kameg 7 -serero 7 -henty 7 -globulettes 7 -ebsen 7 -boded 7 -lunar-like 7 -mother-figure 7 -overdevelopment 7 -erkin 7 -saya 7 -takoma 7 -nanoseconds 7 -havanna 7 -crisler 7 -berjon 7 -matajudios 7 -cauvery 7 -40-30 7 -oak-studded 7 -under-occupancy 7 -asree 7 -femtosecond 7 -46lb 7 -cuddihy 7 -hollington 7 -spread-betting 7 -junee 7 -armwear 7 -gorji 7 -pennyweights 7 -somnath 7 -nuckin 7 -chigirinsky 7 -bisevac 7 -whiteouts 7 -fosu 7 -ellastone 7 -maner 7 -maned 7 -olr 7 -peppermints 7 -kandiah 7 -jacobean-style 7 -monocles 7 -eighty-seven 7 -bugajewski 7 -two-track 7 -colour-blocked 7 -medek 7 -15-car 7 -rentoul 7 -titfers 7 -bamidele 7 -18-meter 7 -nondiscriminatory 7 -super-power 7 -845million 7 -lolls 7 -kindergarten-age 7 -giammetti 7 -edelbijev 7 -watson-smith 7 -figments 7 -ceylan 7 -zoheb 7 -habbal 7 -95.8 7 -95.2 7 -plmr 7 -ketk 7 -hayatabad 7 -grandnephew 7 -gensitskiy 7 -12,618 7 -oupa 7 -kirkpinar 7 -iniestra 7 -ntd 7 -rateb 7 -franque 7 -greysia 7 -r&m 7 -araneus 7 -laforge 7 -outten 7 -15024 7 -mtu 7 -greenhoff 7 -digital-media 7 -d'une 7 -rationalizes 7 -31bn 7 -khurasani 7 -lip-gloss 7 -dormund 7 -pharaon 7 -analytically 7 -bluebeards 7 -scallan 7 -roll-necks 7 -lashbrook 7 -krugerrands 7 -busked 7 -#christmas 7 -irland 7 -delneri 7 -asao 7 -sterilisations 7 -ogoniland 7 -jahnz 7 -moistened 7 -amebic 7 -fog-shrouded 7 -lorcen 7 -financee 7 -quacked 7 -denuclearisation 7 -incapsula 7 -vaster 7 -strottman 7 -fleed 7 -triblive 7 -serb-led 7 -technophobic 7 -ringler 7 -grigoriadis 7 -sunsmart 7 -naiad 7 -witheld 7 -truther 7 -siderov 7 -ducos 7 -undercoat 7 -raguindin 7 -reducers 7 -orrison 7 -cardio-vascular 7 -country-club 7 -kastelruther 7 -diddo 7 -karaffa 7 -swaddles 7 -afterbirth 7 -candacraig 7 -eugenides 7 -ohga 7 -gibbous 7 -massonneau 7 -louiseville-duke 7 -three-paragraph 7 -baden-württemberg 7 -restelica 7 -melanocytic 7 -cattier 7 -retro-looking 7 -villehuchet 7 -triable 7 -beibut 7 -smithsonian.com 7 -ndaa 7 -ndas 7 -little-studied 7 -http://www.civiced.org/index.php?page=stds 7 -dopplers 7 -kraiss 7 -cfcuk 7 -2m-a-year 7 -bodfan 7 -mehterlam 7 -5:34 7 -mckelvy 7 -petroecuador 7 -two-orbit 7 -andar 7 -sthlm 7 -quantel 7 -regionals 7 -tahlil 7 -mizuuchi 7 -2:3 7 -upul 7 -ekchian 7 -mob-like 7 -crowle 7 -iñarritu 7 -1,00 7 -al-ula 7 -hartebeest 7 -44-point 7 -climatology 7 -shearwaters 7 -arma 7 -melvoin-berg 7 -antaki 7 -still-existing 7 -acclimatizing 7 -ryugin 7 -eligenys 7 -snooky 7 -2087 7 -9.01 7 -ill-named 7 -timket 7 -mushaima 7 -munatones 7 -8.42 7 -chiweenie 7 -sub-continental 7 -cipro 7 -460ft 7 -rheubottom 7 -22-degree 7 -11.97 7 -aboo 7 -each-other 7 -judge-only 7 -stevenette 7 -6,950 7 -alvernia 7 -kinasiewicz 7 -rolon 7 -#superstarfinger 7 -big-eared 7 -trai 7 -langtang 7 -eby 7 -otosclerosis 7 -e-junkie 7 -thrombectomy 7 -super-fertile 7 -antipasti 7 -hiv-resistant 7 -iraq-based 7 -recoupment 7 -addaction 7 -canet 7 -pizzle 7 -quagmires 7 -tootling 7 -democrat-herald 7 -shivam 7 -hoeing 7 -revering 7 -gehrman 7 -moleskine 7 -muddles 7 -layin 7 -damola 7 -necole 7 -raithwaite 7 -salado 7 -penningroth 7 -socking 7 -ex-sex 7 -dezso 7 -ojagbemi 7 -xultun 7 -flightview 7 -sheilagh 7 -torgya 7 -spycraft 7 -graphologist 7 -okapis 7 -hajdu 7 -hamidullah 7 -meknes 7 -wrinkly-faced 7 -13,350 7 -objet 7 -collomp 7 -1,223 7 -110.5 7 -29mins 7 -entrekin 7 -18-1 7 -phau 7 -birdsnap 7 -84.8 7 -docuseries 7 -quicksand-like 7 -sequestering 7 -brp 7 -drug-abuse 7 -bellhops 7 -systèmes 7 -prehensile 7 -fuel-guzzling 7 -omotola 7 -rahaf 7 -babad 7 -peschong 7 -macon-moore 7 -roussell 7 -addressees 7 -connect-the-dots 7 -beautifully-designed 7 -kiii 7 -wifi-enabled 7 -soother 7 -4-acre 7 -43p 7 -3:24 7 -digerati 7 -106.7 7 -lofar 7 -cotterstock 7 -bacaltos 7 -vengthlang 7 -irresistable 7 -sentimentally 7 -@clarencehouse 7 -back-rowers 7 -heart-valve 7 -withrow 7 -wedgewood 7 -47-0 7 -sekonda 7 -willke 7 -rawi 7 -muddiest 7 -isrrael 7 -26,100 7 -betterfly 7 -maiffret 7 -loverde 7 -w.c. 7 -olszok 7 -oxygen-poor 7 -unhealthiness 7 -nark 7 -narc 7 -cantile 7 -helliesen 7 -hedman 7 -lieut. 7 -ashby-hammond 7 -elazabawy 7 -e-patients 7 -anti-gambling 7 -andreoff 7 -minister-in-waiting 7 -middlebrow 7 -bogdanova 7 -taxus 7 -british-accented 7 -tuğçe 7 -rebecchi 7 -garra 7 -probs 7 -limiters 7 -trook 7 -villiers-sur-marne 7 -149mph 7 -speakmans 7 -2,350,000 7 -hak-bong 7 -antibody-drug 7 -220-ft 7 -katlehong 7 -833,000 7 -erasmo 7 -esgut 7 -winikka 7 -preveau 7 -miessan 7 -steel-and-glass 7 -fynley 7 -oshane 7 -oshana 7 -skillicorn 7 -post-campaign 7 -fifers 7 -cyprus-based 7 -34-10 7 -39billion 7 -resister 7 -139million 7 -karner 7 -dungey 7 -poussin 7 -allmusic 7 -yare 7 -yari 7 -josebachvili 7 -senka 7 -magicjack 7 -@itv 7 -anthrax-laced 7 -econo 7 -citygames 7 -charkh 7 -pelura 7 -agribusinesses 7 -copan 7 -low-set 7 -french-swiss 7 -guapo 7 -health-based 7 -candian 7 -sarkari 7 -movius 7 -kanin 7 -traumatizes 7 -stamas 7 -937,500 7 -max-style 7 -tocohua 7 -zaltrap 7 -azahari 7 -assignation 7 -man-about-town 7 -tyryshkin 7 -druce 7 -mansel 7 -creditworthy 7 -anti-houthi 7 -settlement-building 7 -bickles 7 -okubo 7 -harnisch 7 -maldivians 7 -somodio 7 -kyrillos 7 -al-hosni 7 -25-a-night 7 -naliah 7 -safetynet 7 -della-giacoma 7 -birdieing 7 -tea-growing 7 -stringbean 7 -chukkas 7 -gorditos 7 -ne'eman 7 -rouged 7 -castlemilk 7 -mujawar 7 -lambskin 7 -granzyme 7 -1295 7 -phillpotts 7 -breymaier 7 -urbani 7 -39per 7 -fuel-price 7 -sorters 7 -besi 7 -rocester 7 -leman 7 -segrera 7 -airpano 7 -times-leader 7 -non-factor 7 -jenaveve 7 -bilchik 7 -articulable 7 -situate 7 -severalls 7 -elia-belle 7 -fawziya 7 -bezerra 7 -rainclouds 7 -giannetti 7 -gfci 7 -32,800 7 -rigby-style 7 -1,993 7 -oedekoven 7 -zisopoulos 7 -aud$ 7 -seremaia 7 -nthabiseng 7 -unleased 7 -sidey 7 -sider 7 -cnne 7 -hutin-blay 7 -ozubko 7 -25-inch 7 -sagamore 7 -freixa 7 -iha 7 -weapon-related 7 -raccosta 7 -ghanim 7 -suwanmon 7 -zzzz 7 -jalel 7 -head-tracking 7 -binner 7 -diedhiou 7 -formatting 7 -neighborhood-based 7 -atr72 7 -manyisa 7 -arav 7 -araj 7 -w.va 7 -limonene 7 -trayton 7 -malignancies 7 -maenner 7 -super-aged 7 -kema 7 -hypoactive 7 -maranon 7 -young-doo 7 -derinkuyu 7 -pagoda-style 7 -6,586,000 7 -imprisonable 7 -rubberbands 7 -three-and-a-half-inch 7 -lohmeier 7 -them.the 7 -magistracy 7 -volumizing 7 -yongqing 7 -ragui 7 -al-australi 7 -zuloaga 7 -fuehring 7 -third-busiest 7 -bailiwick 7 -poreporena 7 -bacteroidetes 7 -497,000 7 -l555 7 -feda 7 -gee-whiz 7 -berarducci 7 -mcdonah 7 -yosra 7 -635million 7 -göranson 7 -taikonaut 7 -saudi-backed 7 -veiga 7 -scherrs 7 -biomolecular 7 -boccaccio 7 -turda 7 -swaisgood 7 -merrigan 7 -bhutto-zardari 7 -kune 7 -lasley 7 -122.4 7 -122.9 7 -mattes 7 -antosik 7 -xyloto 7 -ficus 7 -british-designed 7 -milovanović 7 -amanzi 7 -hudnut 7 -jealousy-inducing 7 -yashonandan 7 -utep 7 -univac 7 -wherefore 7 -skymark 7 -gallacinao 7 -ansol 7 -konduga 7 -famiy 7 -caracalla 7 -15-megapixel 7 -comedy.tv 7 -oligodendrocyte 7 -speedtest.net 7 -pereverzeva 7 -earthships 7 -laeticia 7 -chandrashekhar 7 -29-18 7 -baggers 7 -25i 7 -pall-ex 7 -team-talks 7 -darko-frempong 7 -gocek 7 -resource-hungry 7 -malach 7 -abdolali 7 -personation 7 -sango 7 -houssaye 7 -akabusi 7 -lense 7 -barnwood 7 -schrodinger 7 -becony 7 -abdela 7 -ammer 7 -53.50 7 -fortuño 7 -zaretskys 7 -unchr 7 -sija 7 -facist 7 -streetpilot 7 -majoda 7 -4,000,000 7 -better-armed 7 -remizov 7 -drag-and-drop 7 -kong-registered 7 -a337 7 -tholut 7 -marinis 7 -fupi 7 -sobey 7 -claritin 7 -sappleton 7 -gorrin 7 -8-minute 7 -tayshana 7 -vanguards 7 -4,120 7 -fardy 7 -atreya 7 -zuckoff 7 -bioengineers 7 -0645 7 -byes 7 -gottwald 7 -hightail 7 -long-suspected 7 -tryna 7 -ganjavian 7 -fifth-straight 7 -soneva 7 -gopro3 7 -power-grab 7 -139-day 7 -cremes 7 -carbonaro 7 -redesignated 7 -peyo 7 -heddy 7 -skifjell 7 -abstruse 7 -burkart 7 -ramli 7 -8-speed 7 -faizulin 7 -pzt 7 -rohn 7 -mfaa 7 -half-a-minute 7 -sbnation 7 -lievre 7 -damanjit 7 -diagne 7 -timera 7 -lystrosaurus 7 -termoli 7 -tightropes 7 -agno 7 -kersee 7 -kilbourne-smith 7 -biobee 7 -557million 7 -dursun 7 -sealfit 7 -riggio 7 -93billion 7 -aghajanian 7 -daily-deals 7 -guilan 7 -teach-ins 7 -woosh 7 -proshop 7 -retreads 7 -pennyworth 7 -leatherland 7 -balakot 7 -readmit 7 -booska 7 -hek 7 -kuwar 7 -trentonian 7 -trencher-fed 7 -well-trimmed 7 -makdad 7 -dijokota 7 -kosuth-phillips 7 -meier-on-rothschild 7 -parrillo 7 -pranna 7 -pilchards 7 -overbite 7 -choses 7 -frigging 7 -yellowlees 7 -1,561 7 -multi-step 7 -lths 7 -banca 7 -terrett 7 -forename 7 -man-hating 7 -20m-rated 7 -lionti 7 -late-game 7 -fire-bombing 7 -countback 7 -poppett 7 -exwick 7 -dadeville 7 -maini 7 -karlskrona 7 -israel-lebanon 7 -velofeet 7 -deadshot 7 -muhannad 7 -yl 7 -moaners 7 -berlitz 7 -hipa 7 -makhubela 7 -cantal 7 -lucy-anne 7 -gimpel 7 -1537 7 -didymus 7 -1,161 7 -1,168 7 -reactable 7 -klima 7 -chindits 7 -constantinescu 7 -batcombe 7 -sartorialist 7 -hb56 7 -jubilo 7 -raihan 7 -kaluga 7 -19,341 7 -19,340 7 -post-office 7 -phebus 7 -israel-hezbollah 7 -meah 7 -capilla 7 -leikanger 7 -pin-stripe 7 -labatt 7 -nevzat 7 -dettman 7 -ephesos 7 -ex-smoker 7 -ghauri 7 -availed 7 -tandel 7 -lehan 7 -classically-trained 7 -kember 7 -gappy 7 -limpid 7 -duckworth-lewis 7 -zaka 7 -4:34 7 -khandaker 7 -empty-headed 7 -scooper 7 -osses 7 -magli 7 -foxhill 7 -tree-living 7 -standard-sized 7 -furkan 7 -child-molestation 7 -d'luxe 7 -sopel 7 -roadrunners 7 -hefele 7 -hardwoods 7 -games-themed 7 -lapitsky 7 -gang-like 7 -heinemann 7 -weterings 7 -narrow-bodied 7 -6.38 7 -shoot-on-sight 7 -greason 7 -emane 7 -fleshes 7 -gambol 7 -1356 7 -kirker 7 -kronmiller 7 -kinyarwanda 7 -resprayed 7 -wds 7 -wdw 7 -kitchenettes 7 -qriocity 7 -yupaha 7 -encierro 7 -500-square 7 -q-tips 7 -bevelled 7 -joint-bottom 7 -orb-shaped 7 -stigell 7 -haricot 7 -pedo 7 -amsalem 7 -carry-all 7 -galuvao 7 -on-song 7 -brubaker 7 -poncing 7 -pob 7 -poc 7 -chilapa 7 -kungfu 7 -abiy 7 -neukölln 7 -stringency 7 -compiler 7 -2,293 7 -daddydada 7 -military-issued 7 -▲ 7 -opposition-run 7 -ellard 7 -vaird 7 -edp 7 -christian-muslim 7 -faceplant 7 -northug 7 -storrington 7 -julaikah 7 -15-carat 7 -afp/file 7 -schweddy 7 -henstock 7 -3,755 7 -a259 7 -dulces 7 -lamarni 7 -calorie-rich 7 -unis 7 -penguin-cam 7 -long-rumoured 7 -seagrim 7 -fibonacci 7 -stephanorhinus 7 -left-center 7 -bow-and-arrow 7 -padmashini 7 -korea-watchers 7 -4,224 7 -beibi 7 -jaragua 7 -magicbands 7 -8732 7 -joachim-eckert 7 -pilosof 7 -ladbrookes 7 -jobb 7 -4-cylinder 7 -valvano 7 -cue-card 7 -moreirense 7 -koryak 7 -hydrocolloid 7 -sun/part 7 -kibumba 7 -hangmen 7 -5:08 7 -40km/h 7 -1740-1812 7 -1,204 7 -qalandiya 7 -germ-killing 7 -ctd 7 -transmittance 7 -waren 7 -bertulano 7 -canters 7 -ossietzky 7 -#gutted 7 -tanganga 7 -crowd-fund 7 -sts-7 7 -3,030 7 -boyeson 7 -dunnam 7 -4:44 7 -ussocom 7 -pilipchuk 7 -top-grade 7 -domiri 7 -neavin 7 -hvar 7 -boulder-strewn 7 -zummar 7 -brentry 7 -scart 7 -dalles 7 -winickoff 7 -animal-welfare 7 -munck 7 -1,953 7 -hillington 7 -christkind 7 -lukaszewski 7 -chalcot 7 -grandfather-of-eight 7 -zau 7 -cornberg 7 -rogowska 7 -pre-qualify 7 -matauaina 7 -out-done 7 -mcflurries 7 -25th-anniversary 7 -yos 7 -basayev 7 -catan-keeler 7 -landstra 7 -zelman 7 -soarigami 7 -barrenjoey 7 -lunula 7 -brittanie 7 -massroots 7 -basalts 7 -abdulbaset 7 -anti-asian 7 -mamen 7 -ramidus 7 -16-13 7 -hexogen 7 -11-over-par 7 -non-holiday 7 -dueted 7 -beare 7 -three-sentence 7 -trowels 7 -ynysboeth 7 -palazzani 7 -condy 7 -barager 7 -standaard 7 -crct 7 -lashof 7 -reimann 7 -title-winner 7 -sapungiu 7 -80-inch 7 -korean-language 7 -makerere 7 -enthral 7 -piselli 7 -110km 7 -sun-blocking 7 -right-field 7 -rahwan 7 -putney-wilcox 7 -birchbox 7 -gramado 7 -fiancees 7 -wuning 7 -interest-bearing 7 -calcioscommesse 7 -marondera 7 -13-match 7 -chameleon-like 7 -hussainkhil 7 -pastorally 7 -pasta-maker 7 -podkopaev 7 -deonee 7 -sheers 7 -kucinski 7 -butterly 7 -shaiming 7 -tatianna 7 -soltvedt 7 -habemus 7 -trolleyed 7 -keret 7 -water-gen 7 -kent-born 7 -three-panel 7 -igelko 7 -overspends 7 -kiswahili 7 -mucopolysaccharide 7 -almi 7 -shadoxhurst 7 -119-108 7 -couty 7 -colcord 7 -conary 7 -3m-a-year 7 -pre-winter 7 -icmp 7 -flameless 7 -paxi 7 -krepon 7 -sweezey 7 -coxeter 7 -batger 7 -petulantly 7 -flooding-related 7 -guffawing 7 -reliquaries 7 -woog 7 -displair 7 -super-sizing 7 -check-points 7 -jarjanaz 7 -anglicized 7 -achmad 7 -uwc 7 -soumillon 7 -pascolini 7 -portended 7 -lipoma 7 -paczkowski 7 -sobolik 7 -wired.co.uk 7 -drug-makers 7 -re-analyzed 7 -dustier 7 -totalitarians 7 -luminex 7 -chenlair 7 -senckenberg 7 -reseachers 7 -naaladl2 7 -www.royalcollection.org.uk 7 -291,000 7 -graig 7 -kapustka 7 -feodorovna 7 -4,985 7 -westwell 7 -ultra-federalist 7 -sojitra 7 -ritesh 7 -government-organized 7 -run-of-the 7 -lerer 7 -cubillas 7 -jerryson 7 -heavily-edited 7 -manzur 7 -russia-based 7 -fornos 7 -dorneywood 7 -early-to-mid 7 -grebmeier 7 -zuzana 7 -jeong-min 7 -pmoi 7 -ultra-dense 7 -scm 7 -speakeasies 7 -pawdicures 7 -smaller-sized 7 -preshow 7 -hickel 7 -nextworth 7 -gayner 7 -denni 7 -luetkemeyer 7 -lettre 7 -yonni 7 -2294 7 -unionpay 7 -iju 7 -torch-lit 7 -copulate 7 -colecovision 7 -latonia 7 -tacklekeown@mailonline.co.uk 7 -strimming 7 -5ft5in 7 -hosken 7 -annabella 7 -eytan 7 -kinect-like 7 -self-declaration 7 -work-around 7 -ktxl-tv 7 -domestics 7 -adenine 7 -tolerances 7 -lagoa 7 -al-sakher 7 -ship-borne 7 -cosi 7 -kiloelectron 7 -44-day 7 -wing-suit 7 -meegan 7 -two-sport 7 -esserman 7 -still-under-construction 7 -vidriales 7 -falko 7 -heswall 7 -biagi 7 -wheeliker 7 -high-net-worth 7 -ronquillo-ovalle 7 -torda 7 -gneil 7 -de-funding 7 -zuhal 7 -bendet 7 -under-sevens 7 -maiya 7 -hemed 7 -handbridge 7 -turfe 7 -sosf 7 -forthe 7 -korths 7 -yepmou 7 -kalmykov 7 -propeller-powered 7 -slimes 7 -houvenaghel 7 -altwegg 7 -88.6 7 -12mins 7 -@arsenal 7 -pulpy 7 -stereo-a 7 -fourneyron 7 -password-protect 7 -homeschoolers 7 -blanka 7 -cholobargia 7 -utcs 7 -rossomando 7 -lensky 7 -dorkiness 7 -crow-era 7 -karikari 7 -cortlandt 7 -hallyu 7 -byungpoongon 7 -orel 7 -pungency 7 -wickerman 7 -agosto 7 -overhears 7 -savanovic 7 -lockinge 7 -#leahstrong 7 -cfls 7 -699,000 7 -taurids 7 -donkor 7 -twenty-four-year-old 7 -rowinski 7 -nai 7 -km2 7 -rajasthani 7 -africa2moon 7 -aimti 7 -wosniak 7 -salento 7 -re-analysis 7 -under-5 7 -coalminer 7 -swansea-based 7 -bourges 7 -nahrath 7 -antalina 7 -florence-firestone 7 -llerena 7 -raviglione 7 -1990-1993 7 -cafs 7 -favaloro 7 -actblue 7 -400-point 7 -reduced-calorie 7 -48,876 7 -ramblin 7 -rodriguez-chavez 7 -lahcen 7 -bath-tub 7 -ferntree 7 -pre-check 7 -harleysville 7 -beitenu 7 -makibox 7 -dolliver 7 -recursive 7 -al-awami 7 -pinillos 7 -cristante 7 -ciccarello 7 -american-trained 7 -aurobindo 7 -tswalu 7 -battreal 7 -reull 7 -7.87 7 -7.84 7 -7.81 7 -starline 7 -wdfw 7 -sub-human 7 -mini-revolution 7 -95kg 7 -refrigerants 7 -rabiyah 7 -anzalone 7 -crystallisation 7 -garabito 7 -posin 7 -morange 7 -n16 7 -keyingham 7 -multifunction 7 -strikeout 7 -karkhano 7 -hudong.com 7 -mudgal 7 -schlicker 7 -10,000-a-head 7 -cat-food 7 -light-field 7 -tourdot 7 -6,000-8 7 -1,328 7 -1,325 7 -cross-atlantic 7 -meanderings 7 -shellow 7 -earworm 7 -gorno 7 -lazarevo 7 -supertarget 7 -gedde 7 -anti-proliferation 7 -mavhunga 7 -de-chavving 7 -research-gathering 7 -half-formed 7 -29,028 7 -jullien 7 -gbla 7 -breshnahan 7 -microglia 7 -summerell 7 -springlike 7 -01273 7 -fain 7 -face-on 7 -puisto 7 -partially-clothed 7 -self-dealing 7 -crimestoppers.com.au 7 -northlake 7 -porthcothan 7 -qiaodan 7 -mousinho 7 -bado 7 -deroue 7 -zennor 7 -gheorghiu 7 -decertified 7 -alaïa 7 -premium-class 7 -federspiel 7 -derenda 7 -icepick 7 -2nd-l 7 -gazer 7 -bobigny 7 -riberalta 7 -troublingly 7 -danine 7 -jaffee 7 -bruij 7 -komedy 7 -tradition-bound 7 -20-inches 7 -single-spaced 7 -hand-dug 7 -totterdell 7 -devanand 7 -shaoshan 7 -gierzynski 7 -guanghan 7 -stylebop.com 7 -oft-times 7 -chrysostom 7 -cardew 7 -leishmaniasis 7 -rocksavage 7 -bao'an 7 -mh4 7 -7million-a-year 7 -gummed 7 -navillod 7 -entrainment 7 -3mg 7 -explosive-filled 7 -leytzan 7 -drone-like 7 -kempthorne 7 -grifa 7 -double-layered 7 -gut-renovated 7 -ketcham 7 -dolus 7 -liebhold 7 -czocher 7 -corking 7 -malherbe 7 -injinoo 7 -#starbuckswedding 7 -gardyne 7 -ranchita 7 -sub-contract 7 -moneim 7 -textura 7 -anti-shark 7 -neolithic-style 7 -mykhailo 7 -elizabethton 7 -bassinets 7 -atr-42 7 -millvina 7 -paoli 7 -multimillion-euro 7 -zvonimir 7 -okina 7 -adedy 7 -day-over-day 7 -margalef 7 -khelaifi 7 -intracel 7 -merhi 7 -meci 7 -standardising 7 -now-departed 7 -h8 7 -sadiku 7 -hj 7 -pluckers 7 -broadsheets 7 -joselu 7 -fossil-hunting 7 -belly-dancer 7 -buyannemekh 7 -kalyan 7 -bardia 7 -ozertem 7 -antediluvian 7 -location-aware 7 -0500 7 -dushy 7 -pumbien 7 -zaia 7 -krisna 7 -al-maa 7 -ex-fulham 7 -super-hydrophobic 7 -brusby 7 -rovinescu 7 -7.94 7 -dayhoff 7 -remeber 7 -staind 7 -antworth 7 -bialowieza 7 -polcawich 7 -fripperies 7 -thébault 7 -anti-royalist 7 -1335 7 -fullerians 7 -kneip 7 -on-farm 7 -smithills 7 -soundstages 7 -already-eliminated 7 -kaminskiy 7 -lere 7 -lera 7 -code-name 7 -laramidia 7 -'79 7 -low-tar 7 -saltor 7 -shatsky 7 -gigilo 7 -toluidines 7 -hibernates 7 -hibernated 7 -qmilch 7 -pmd 7 -milhous 7 -action-drama 7 -nationally-ranked 7 -left-hand-drive 7 -muhidin 7 -2,776 7 -nikata 7 -104mph 7 -treyarnon 7 -400sq 7 -cavassa 7 -kyan 7 -o'quin 7 -barbu 7 -front-rowers 7 -stookesberry 7 -sward 7 -jazlin 7 -hwacheon 7 -cseh 7 -mctaggart 7 -hohmann 7 -42-story 7 -joint-chairmen 7 -nature-lovers 7 -vimadalal 7 -hoodbhoy 7 -afreen 7 -distrito 7 -arns 7 -osayemi 7 -75,215 7 -ball-bearing 7 -pyroxene 7 -ardoch 7 -barjac 7 -defaulter 7 -post-injury 7 -yardbirds 7 -gossett 7 -mysanantonio.com 7 -high-salt 7 -felson 7 -.49 7 -michèle 7 -oshodi 7 -mahato 7 -ostrovsky 7 -resy 7 -resi 7 -twyla 7 -snarr 7 -re-assessing 7 -aeroclinic 7 -beaucamps-ligny 7 -scribed 7 -peskin 7 -1,266 7 -1,269 7 -d'banj 7 -evercreech 7 -seabeds 7 -diemoz 7 -barwanah 7 -shamisen 7 -devaanshi 7 -bvo 7 -nine-in-a-row 7 -arribas 7 -consummation 7 -nominator 7 -gray-bearded 7 -kandovan 7 -himbara 7 -laningham 7 -silvas 7 -bongiovanni 7 -brewhouse 7 -care-o-bot 7 -yergen 7 -figalora 7 -wipe-outs 7 -8-ball 7 -ecliptic 7 -chanaleah 7 -townroe 7 -over-weight 7 -madhepura 7 -quyen 7 -nouhad 7 -dixy 7 -smart-gilmour 7 -337.8 7 -nardini 7 -thrifting 7 -mcelhenney 7 -retrenching 7 -pasqualino 7 -andrewsi 7 -croskell 7 -rich-list 7 -byttow 7 -resch 7 -outside-of-the-boot 7 -adnews 7 -kiszczak 7 -shekh 7 -adeniyi 7 -equinoxes 7 -estright 7 -94.8 7 -hypochondria 7 -i-word 7 -koopmeiners 7 -ekpo 7 -botan 7 -save-a-pet 7 -40.00 7 -688,000 7 -maremmas 7 -16-30 7 -calorie-conscious 7 -savitri 7 -thrown-together 7 -blaupunkt 7 -moaby 7 -spa-like 7 -1,448 7 -1,443 7 -1,445 7 -said.it 7 -palaeobiology 7 -blowhards 7 -pamoja 7 -bedwetting 7 -folgers 7 -venere 7 -twin-hulled 7 -cbgb 7 -elokobi 7 -bonehill-paine 7 -half-clothed 7 -mushroom-like 7 -catatumbo 7 -neeses 7 -novalia 7 -grunfeld 7 -negativism 7 -phyllon 7 -16-bit 7 -sturdiness 7 -half-dog 7 -diver-legg 7 -ingrassia 7 -boundary-pushing 7 -30-million 7 -becketts 7 -nivola 7 -said/she 7 -1974-83 7 -uni-cub 7 -fursova 7 -stedham 7 -fsh 7 -18-unit 7 -scaramuzza 7 -swagged 7 -commissioner-general 7 -brained 7 -neustrashimy 7 -70-mph 7 -chillaxed 7 -kstp-tv 7 -jin-su 7 -mcgeoch 7 -larger-size 7 -frigatebird 7 -pay-for-performance 7 -1,047 7 -telmex 7 -mcvities 7 -ostracization 7 -bezoar 7 -eufrosin 7 -ass-kicking 7 -heavily-built 7 -tjimba 7 -scotney 7 -hard-to-get 7 -ampl 7 -anti-stalking 7 -ldv 7 -54.2 7 -reforge 7 -irenee 7 -secularisation 7 -peltomaki 7 -irrgang 7 -reassignments 7 -chiliboy 7 -tavano 7 -lp560-4 7 -milishambles 7 -burkesville 7 -milongas 7 -sedat 7 -tissue-thin 7 -non-exercisers 7 -pink-tinged 7 -bazley 7 -lugosi 7 -shedlock 7 -phial 7 -529s 7 -alster 7 -sakie 7 -radii 7 -radig 7 -crowland 7 -woodtv 7 -narenda 7 -langley-evans 7 -jirgas 7 -already-qualified 7 -alsea 7 -quizmaster 7 -hurlstone 7 -seafronts 7 -teimuraz 7 -print-run 7 -supercuts 7 -coldham 7 -multifarious 7 -60-somethings 7 -swamplands 7 -.2007 7 -javanfekr 7 -tomiichi 7 -bruesewitz 7 -esa/nasa 7 -vetiver 7 -el-tayeb 7 -darvon 7 -charolais 7 -under-sea 7 -rudra 7 -nadey 7 -cumani 7 -57mph 7 -tipp-ex 7 -whio-tv 7 -juggs 7 -dramarama 7 -trailled 7 -uros 7 -glass-encased 7 -yibin 7 -haier 7 -environmentally-minded 7 -devas 7 -12-weeks 7 -paraphenalia 7 -fabon 7 -bayton 7 -f-pace 7 -115billion 7 -heart-bypass 7 -speilberg 7 -community-level 7 -karolewski 7 -esterhuysen 7 -egan-jones 7 -@seahawks 7 -ironical 7 -barbering 7 -mailonline/yougov 7 -yuru-kyara 7 -pre-impact 7 -gamemakers 7 -louhelainen 7 -Óscar 7 -nip-and-tuck 7 -cannella 7 -2,390 7 -fridge-freezers 7 -bouthillier 7 -dry-dock 7 -stong 7 -mopey 7 -khabar 7 -cocaine-snorting 7 -kurtur-balas 7 -integrator 7 -patteson 7 -homeguard 7 -ofac 7 -branka 7 -thaggard 7 -buscaglia 7 -oyesanya 7 -near-pristine 7 -snowplowing 7 -prmpa 7 -tiscareno 7 -resarch 7 -merandy 7 -garberville 7 -vaccinates 7 -vinnemann 7 -5:14 7 -@generalboles 7 -unerringly 7 -selma-to-montgomery 7 -linthorpe 7 -lowenbach 7 -458,000-a-week 7 -knightstone 7 -polonia 7 -nobbe 7 -super-maglev 7 -double-bill 7 -methamphetamine-fueled 7 -hammarström 7 -naevi 7 -grachvogel 7 -fascino 7 -lower-tech 7 -mediapro 7 -salin 7 -salix 7 -montbrial 7 -narcotrafficker 7 -genex 7 -brck 7 -himlung 7 -laduke 7 -scotland-born 7 -9,650 7 -bernays 7 -geroulanos 7 -linkery 7 -smarty-pants 7 -sickies 7 -129mph 7 -yasuaki 7 -34,200 7 -orleans-area 7 -lippe 7 -cailey 7 -anti-carbon 7 -85.2 7 -flypaper 7 -evreux 7 -schoolwide 7 -shachnev 7 -newstalk 7 -julich 7 -flavel 7 -imbecility 7 -art-world 7 -lasalvia 7 -frond 7 -develin 7 -mani-pedi 7 -phillyburbs.com 7 -neef 7 -ardrey 7 -slighton 7 -disease-stricken 7 -paloschi 7 -flore 7 -panadda 7 -nurman 7 -fortysomething 7 -500-euro 7 -socafrica 7 -f/4 7 -1.2-mile 7 -noblitt 7 -under-report 7 -300ft-long 7 -berasategui 7 -handule 7 -donkin 7 -pentathlete 7 -non-syrian 7 -wurzels 7 -apiata 7 -feierstein 7 -rileyy_69 7 -paresh 7 -ziprealty 7 -oil-dependent 7 -ulnar 7 -cads 7 -paydirt 7 -edsac 7 -water-pipe 7 -shelef 7 -acclimation 7 -3,831 7 -sinsa-dong 7 -zanele 7 -skirbekk 7 -multi-hazard 7 -musadiq 7 -unkrich 7 -cross-gender 7 -3.5-acre 7 -snuffle 7 -fucile 7 -u.s.-venezuela 7 -esquiline 7 -half-size 7 -millonarios 7 -nonpermanent 7 -7.69 7 -2,685 7 -seven-weeks-old 7 -ponsot 7 -tuanzebe 7 -pre-stunning 7 -hartenstein 7 -bodor 7 -reuses 7 -lner 7 -heorot 7 -iley 7 -epidiolex 7 -vivi 7 -saugerties 7 -paryan 7 -khaliif 7 -water-taxi 7 -greek-americans 7 -gollwitzer 7 -reworded 7 -sussie 7 -paraskevi 7 -sangfroid 7 -dalisha 7 -13.94 7 -rinaldo 7 -cop17 7 -unhooks 7 -airtasker 7 -imparato 7 -gervis 7 -11f 7 -huai'an 7 -burn-related 7 -multi-religious 7 -thai-cambodian 7 -tolerson 7 -capin 7 -heddon-on-the-wall 7 -tsunami-affected 7 -sterligov 7 -jamall 7 -erlewine 7 -abhimanyu 7 -vatersay 7 -n'sync 7 -zardes 7 -niya 7 -pre-roll 7 -kohr 7 -ophoven 7 -morgenavisen 7 -noxon 7 -star-turned-politician 7 -prizemoney 7 -masakadza 7 -prison-industrial 7 -nsb 7 -leaderboards 7 -bordelle 7 -1,531 7 -self-organised 7 -14.49 7 -1,349 7 -sealer 7 -lilyrose 7 -knocker 7 -stricture 7 -orleton 7 -saina 7 -templetown 7 -sisal 7 -schonwald 7 -people-trafficking 7 -aitkin 7 -makeweights 7 -dreamcoat 7 -log-cabin 7 -wadey 7 -zafra 7 -teflon-coated 7 -al-qadhi 7 -rollerskates 7 -7-12 7 -overwrites 7 -foushee 7 -three-to-four 7 -off-the-beaten-track 7 -nomikos 7 -mirabilis 7 -hamidullin 7 -perogies 7 -ex-teammates 7 -cmr 7 -usakovs 7 -culham 7 -cosgrave 7 -katsuhiko 7 -mik 7 -haloumi 7 -#thisbook 7 -hattam 7 -froud 7 -dorrington 7 -lepère 7 -cockenthrice 7 -shiyi 7 -adoli 7 -nurhati 7 -stouter 7 -wragby 7 -community-driven 7 -tendril 7 -darbon 7 -milkha 7 -asn 7 -daktronics 7 -waterbirds 7 -anti-democracy 7 -thoba 7 -mccovey 7 -abugan 7 -harlem-born 7 -chain-smoked 7 -handballs 7 -barkow 7 -roels 7 -ehrmantraut 7 -charlisse 7 -pawnbroking 7 -pelé 7 -buchlyvie 7 -4,447 7 -frauens 7 -mardjito 7 -barningham 7 -hunnicutt 7 -horse-race 7 -ferraioli 7 -antagonisms 7 -coriolis 7 -colder-than-average 7 -out-pacing 7 -borgella 7 -mcneilage 7 -maresco 7 -bretagne-seche 7 -tellem 7 -adjudicates 7 -wullenweber 7 -self-driven 7 -waygood 7 -saroyan 7 -wolchek 7 -32aa 7 -pre-attack 7 -redspeed 7 -castlepoint 7 -eco-efficient 7 -zygote 7 -500c 7 -nicheala 7 -chunyu 7 -151,200 7 -breastplates 7 -krippendorf 7 -castlehill 7 -silvereye 7 -penygroes 7 -geita 7 -parentheses 7 -watership 7 -most-important 7 -schooners 7 -self-radicalised 7 -enews 7 -jawless 7 -low-point 7 -eula 7 -visualizes 7 -flavorwire 7 -whirs 7 -cuff-links 7 -widely-accepted 7 -vogelman 7 -oversaturated 7 -banora 7 -cartree 7 -dadiani 7 -waxmonsky 7 -hasidim 7 -solarreserve 7 -gerrity 7 -tepoztlan 7 -inver 7 -pkf 7 -sixth-most 7 -mhirsi 7 -papery 7 -highlife 7 -shaddadeh 7 -fruited 7 -lamping 7 -roelker 7 -nyamuragira 7 -myrdal 7 -calculatingly 7 -manolos 7 -bojar 7 -flood-risk 7 -jean-maurice 7 -nilesh 7 -nuss 7 -pargeter 7 -patch-up 7 -longboarder 7 -wassup 7 -mangarahara 7 -microwave-safe 7 -kurbonova 7 -gilo 7 -skidoo 7 -gile 7 -toshiyuki 7 -kill-off 7 -diaz-marrero 7 -poulton-palmer 7 -nejloveanus 7 -adamsson 7 -cobbett 7 -merryfield 7 -thanksgiving-themed 7 -pro-league 7 -forget-me-nots 7 -couponer 7 -scrunched-up 7 -latex-free 7 -malachite 7 -bb.suit 7 -two-wheel-drive 7 -akpa-akpro 7 -65f 7 -fox.com 7 -obama-netanyahu 7 -splatted 7 -pipo 7 -human-related 7 -hololens 7 -mobileactive 7 -homonyms 7 -lyricism 7 -mazrreku 7 -chasteness 7 -sywell 7 -two-wheelers 7 -maiolo 7 -re-arrange 7 -one-hole 7 -voo 7 -02_ag 7 -paper-bag 7 -cooter 7 -sahn 7 -sahu 7 -snugburys 7 -reimbursable 7 -data-protection 7 -121mm 7 -lens-style 7 -reality-competition 7 -josee 7 -non-college-educated 7 -seadevil 7 -scenicc 7 -weather-hit 7 -zac_r 7 -invalidation 7 -dominican-american 7 -regime-change 7 -cpe 7 -triérweiler 7 -kaidyn 7 -fizzah 7 -dalacoura 7 -bhm 7 -ganeless 7 -labouré-roi 7 -emmanuele 7 -kunta 7 -froetscher 7 -pastel-painted 7 -perusse 7 -29,700 7 -beer-soaked 7 -court-supervised 7 -nundasuchus 7 -nenndorf 7 -ithil 7 -watch_dogs 7 -stok 7 --80 7 -carita 7 -super-welterweight 7 -policlinico 7 -lusa 7 -rosenstiel 7 -food-focused 7 -ever-ready 7 -barcelona-catalunya 7 -cheever 7 -zeo 7 -ouerghi 7 -correale 7 -woeser 7 -bridge-naming 7 -rationalisation 7 -kheng 7 -carbamazepine 7 -andriani 7 -autodromo 7 -holmoe 7 -reser 7 -morrision 7 -labung 7 -lgbt-inclusive 7 -ctenoides 7 -12.90 7 -side-parted 7 -bazzani 7 -rugger 7 -paleoanthropology 7 -massive-scale 7 -haveeru 7 -1053 7 -1055 7 -dörfelt 7 -stehle 7 -Çelik 7 -assortments 7 -1,800-square-foot 7 -alianelli 7 -melsop 7 -gv 7 -reaganesque 7 -residencia 7 -muezzinoglu 7 -wallbanks 7 -focus@will 7 -arch-foe 7 -nixonland 7 -mgh 7 -tenau 7 -i-502 7 -barcelona-bound 7 -husic 7 -aerogel 7 -micro-brewery 7 -sportbrake 7 -mso-fareast-font-family 7 -barbero 7 -then-un 7 -filipov 7 -gagliardi 7 -narendran 7 -overanxious 7 -s9c 7 -deivid 7 -rosÉ 7 -rohrbeck 7 -www.macmillan.org.uk 7 -achterberg 7 -predjama 7 -yarmulkes 7 -'27 7 -pasinetti 7 -black/white 7 -bassuk 7 -827 7 -117bn 7 -time-barred 7 -paynes 7 -aubrey-fletcher 7 -145m 7 -anime-inspired 7 -oruk 7 -1,061 7 -moules 7 -nidhi 7 -angelillo 7 -aberdeen-based 7 -annwyn 7 -siyed 7 -jouty 7 -umphenours 7 -giuly 7 -nono 7 -jobarteh 7 -eritrean-born 7 -janai 7 -opaques 7 -fujisawa 7 -kandis 7 -sportingbet 7 -shoud 7 -segebarth 7 -dogger 7 -preproduction 7 -footages 7 -manouvres 7 -scuadmore 7 -bow-tied 7 -sukhumbhand 7 -kytv 7 -crisci 7 -four-tournament 7 -31-goal 7 -restuarant 7 -shortstops 7 -730million 7 -epirigenys 7 -doly-com 7 -aoptix 7 -fidelio 7 -us-russia 7 -bergant 7 -rowdies 7 -revocations 7 -post-disaster 7 -leg-room 7 -fogelman 7 -meezee 7 -happily-ever-after 7 -procrastinator 7 -3252 7 -witting 7 -faseb 7 -whettingsteel 7 -pigeon-feeding 7 -know-nothing 7 -jeronta 7 -muumuu 7 -teeth-baring 7 -demoss 7 -lock-picking 7 -26-goal 7 -caked-on 7 -idem 7 -lutfallah 7 -teviot 7 -earthscraper 7 -o'brien-quinn 7 -1270 7 -1274 7 -moad 7 -side-boob 7 -acetelecom 7 -pamiri 7 -hanegev 7 --11.1 7 -lowen 7 -atlantan 7 -sharpley 7 -6,850 7 -al-fatiha 7 -intelligence-based 7 -fryerning 7 -perfumers 7 -kelaif 7 -gonner 7 -smarason 7 -blabbermouth 7 -pictographs 7 -37cm 7 -three-stop 7 -synchs 7 -fabbri 7 -shanique 7 -yertle 7 -schneuwly 7 -xinrui 7 -120-degree 7 -amdahl 7 -88kg 7 -bernardeschi 7 -hills/malibu 7 -slimmons 7 -hallfredsson 7 -tamerlane 7 -brotheridge 7 -hampden-sydney 7 -iapv 7 -srikakulam 7 -achromatopsia 7 -leprechauns 7 -vakoch 7 -pro-austerity 7 -seoud 7 -yomken 7 -eco-activist 7 -warszawa 7 -villeda 7 -semi-conductor 7 -eaux 7 -storyboarding 7 -medised 7 -twin-prop 7 -kayo 7 -690b 7 -derbas 7 -leauge 7 -magique 7 -yuzhno-sakhalinsk 7 -cristiani 7 -seuil 7 -pickax 7 -pleyclub 7 -yellow-card 7 -wigtownshire 7 -mahomud 7 -mobile-tech 7 -adin 7 -0445 7 -langbroek 7 -marleigh 7 -kittykind 7 -durber 7 -crown-wearing 7 -shigir 7 -itamar 7 -ondine 7 -chained-up 7 -kizziar 7 -fogey 7 -gampong 7 -bradass87 7 -aulika 7 -cafiero 7 -over-excitable 7 -chushcoff 7 -hyperventilated 7 -hardheaded 7 -bruceville-eddy 7 -iglehart 7 -ffi 7 -chi-man 7 -traduce 7 -al-sissi 7 -pacificorp 7 -zefs 7 -buttoned-down 7 -braehmer 7 -88-day 7 -etisalat 7 -gps-guided 7 -anklebones 7 -collar-bone 7 -jayplas 7 -frogmarch 7 -dibusz 7 -jakubik 7 -siginaw 7 -ewald 7 -schonberg 7 -hans-georg 7 -orio 7 -predations 7 -hallel 7 -contemporized 7 -deverill 7 -wimm 7 -‟ 7 -bonk 7 -40-7 7 -cernek 7 -idealisation 7 -boxleitner 7 -skirvin 7 -laarayedh 7 -damrau 7 -all-economy 7 -79-year 7 -tùng 7 -kellybronze 7 -baños 7 -pinole 7 -bourgie 7 -sexually-motivated 7 -lipodystrophy 7 -jocumsen 7 -hemingways 7 -lumba 7 -bozan 7 -98.1 7 -copy-kates 7 -boyish-looking 7 -julienned 7 -3,800-year-old 7 -procycling 7 -10-11pm 7 -movie-plot 7 -st.louis 7 -ditko 7 -rozerem 7 -kokopelli 7 -dockerill 7 -stutterers 7 -69ft 7 -alie 7 -wetzler 7 -latte-sipping 7 -banach 7 -7.41 7 -non-musical 7 -nixing 7 -vote-getter 7 -taegrin 7 -flickinger 7 -thornham 7 -lothians 7 -nieveen 7 -minnewaska 7 -slicking 7 -cargin 7 -kibali 7 -counter-narcotic 7 -m-blocks 7 -saint-omer 7 -88mins 7 -911-call 7 -weipa 7 -6x 7 -congham 7 -actress-model 7 -magnet-related 7 -shiret 7 -27.95 7 -valongo 7 -hbgary 7 -benefit-dependent 7 -alaine 7 -legionaries 7 -llanzon 7 -burlingham 7 -fourthly 7 -landerman 7 -manhasset 7 -spethmann 7 -ride-off 7 -punny 7 -wasylyshyn 7 -uninspected 7 -1993-96 7 -cynefin 7 -goal-saving 7 -virianda 7 -broadstreet 7 -stiker 7 -eilender 7 -eotaxin 7 -loosest 7 -ruchira 7 -scheuer 7 -loar 7 -kljatov 7 -mulaney 7 -streetinsider 7 -72-storey 7 -tetsill 7 -syria-jordan 7 -vppa 7 -zapiro 7 -aseli 7 -often-repeated 7 -schnetter 7 -braut 7 -aerojet 7 -midshaft 7 -1328 7 -cudanin 7 -bulgheroni 7 -now-suspended 7 -plainclothed 7 -utterback 7 -ihjaz 7 -chromatic 7 -pleo 7 -mh318 7 -gabricci 7 -39in 7 -26.4.26 7 -neflix 7 -gathercole 7 -taquitos 7 -high-season 7 -miniaturizing 7 -charnos 7 -deisha 7 -china-bound 7 -larrell 7 -barguil 7 -oil-drilling 7 -fabcot 7 -dyckman 7 -bulk-billed 7 -bolshie 7 -500metres 7 -sadegh-khanjani 7 -tykoski 7 -azurdia-montenegro 7 -tracee 7 -salves 7 -greenwater 7 -75,500 7 -knebel 7 -selfie-taking 7 -priem 7 -prescreening 7 -kleinfeld 7 -27,000-acre 7 -500,000-a-week 7 -recognizably 7 -vaduva 7 -medimmune 7 -five-race 7 -yoenis 7 -thitima 7 -stir-crazy 7 -mobile-computing 7 -enucleated 7 -hyper-violent 7 -buch 7 -ielpi-brengel 7 -line-free 7 -faulker 7 -rosebury 7 -disentangling 7 -verboven 7 -kostunica 7 -mcaleenan 7 -landowning 7 -ingratiated 7 -over-bleaching 7 -sundeep 7 -kalonji 7 -opiyo 7 -lentink 7 -mego 7 -lustfully 7 -unselfconsciously 7 -wallbank 7 -cross-benchers 7 -schaerli 7 -bleaches 7 -bobsleds 7 -turimetta 7 -seatown 7 -t.m. 7 -plowright 7 -balled-up 7 -radlinski 7 -clanwilliam 7 -cairos 7 -alsarabbi 7 -berthe 7 -germiest 7 -graybiel 7 -barrista 7 -primas 7 -maj-gen 7 -smithkin 7 -headedness 7 -delgado-galban 7 -multiple-birth 7 -flexsys 7 -volte-face 7 -tweetping 7 -poleaxed 7 -olympico 7 -marischal 7 -m.a.c 7 -superbus 7 -joyrides 7 -katsumata 7 -missouri-columbia 7 -allerdale 7 -third-in-line-to-the-throne 7 -hsps 7 -nctc 7 -re-doing 7 -tillotson 7 -harrup 7 -home-start 7 -mh-53e 7 -dysgenesis 7 -param 7 -yoroizuka 7 -thabet 7 -lonigan 7 -longmeadow 7 -nicklaus-designed 7 -mujdza 7 -89.5 7 -artist-friendly 7 -eggman 7 -provençal 7 -ronneberg 7 -punnett 7 -thailand-burma 7 -self-trained 7 -9.83 7 -right-hand-man 7 -bre'asia 7 -labinot 7 -leclaire 7 -tuculet 7 -3-10 7 -pii 7 -pif 7 -morrisette 7 -lavorgna 7 -ditsayabut 7 -uttara 7 -manji 7 -taofledermaus 7 -campowerment 7 -oco 7 -radisch 7 -17/20 7 -c-160 7 -sanamen 7 -pisciotti 7 -neurotrauma 7 -kilmister 7 -liveness 7 -cheesemongers 7 -mapuche 7 -alisauskaite 7 -nypdcrimestoppers.com 7 -namale 7 -late-breaking 7 -kastrup 7 -saper 7 -rollergirls 7 -theekoy 7 -passavant 7 -abdulqader 7 -gailliard 7 -abu-eisha 7 -fontmell 7 -intersexuality 7 -cengizhan 7 -montse 7 -konga 7 -tjostolv 7 -boffman 7 -yozgat 7 -proto-earth 7 -ahrenkilde 7 -hotplate 7 -dinubile 7 -lidari 7 -berezniki 7 -weeper 7 -whorf 7 -mihalynuk 7 -adams-groom 7 -peleton 7 -guns.com 7 -echaabi 7 -sariah 7 -parminter 7 -nutrient-poor 7 -default-on 7 -lacey-jo 7 -98-96 7 -jangra 7 -rettenbach 7 -larrakeyah 7 -2-in-1 7 -front-loading 7 -mothana 7 -rousted 7 -sorum 7 -bushy-browed 7 -clan-based 7 -vayrynen 7 -flaubert 7 -native-american 7 -ultraconservatives 7 -jaddoo 7 -adaoui 7 -simbel 7 -pavlak 7 -profiteer 7 -magnetar 7 -seven-room 7 -tv5 7 -cheikou 7 -10,000-15 7 -much-revered 7 -ckdu 7 -kapsa 7 -carriage-driving 7 -chungcheong 7 -monagan 7 -sight-seers 7 -crytek 7 -kasie 7 -+94 7 -methylene 7 -cut-scenes 7 -murara 7 -y-combinator 7 -silverio 7 -chawrasia 7 -caucused 7 -akwaton 7 -lymphoid 7 -dt38 7 -truculence 7 -qub 7 -ex-gunner 7 -scharenbroch 7 -janbazian 7 -verducci 7 -combustibles 7 -sundermann 7 -otterbourne 7 -cheglakov 7 -bne 7 -l'affaire 7 -ruehl 7 -lobelville 7 -tokarev 7 -inle 7 -caulking 7 -bereto 7 -yaniv 7 -funnest 7 -inglish 7 -kenda 7 -worse-than-expected 7 -mexican-style 7 -#bring 7 -re-invigorated 7 -1,402 7 -councer 7 -keplerian 7 -235lb 7 -congonhas 7 -172million 7 -sub-satellite 7 -hell-bound 7 -1998-2001 7 -1998-2002 7 -winnefeld 7 -2/9 7 -linval 7 -nauset 7 -seevers 7 -snappily 7 -pronckus 7 -jersey.com 7 -pictus 7 -goiter 7 -wink-wink 7 -discoe 7 -dekovic 7 -under-hit 7 -breathwork 7 -dinajpur 7 -xss 7 -20-bed 7 -top-seller 7 -slammin 7 -adhira 7 -100.3-degree 7 -parise 7 -nakaitaci 7 -murakhovsky 7 -hunger-relief 7 -sign-offs 7 -radosevich 7 -ukaea 7 -authority-run 7 -877,000 7 -duck-egg 7 -sheetz 7 -mjondalen 7 -often-used 7 -needleman 7 -65,700 7 -pubcos 7 -cemex 7 -6.67 7 -narrators 7 -elbayomy 7 -kilshaw 7 -love-affair 7 -6-foot-long 7 -gonski 7 -al-gamaa 7 -templars 7 -chunhua 7 -75-pound 7 -d'hebron 7 -coatis 7 -all-ireland 7 -sixteen-time 7 -steel-hulled 7 -mostrom 7 -rheann 7 -england-only 7 -redway 7 -agilkia 7 -short-wave 7 -luridly 7 -bankok 7 -renney 7 -gwenn 7 -dco 7 -sonars 7 -chroniclers 7 -arbella 7 -beidaihe 7 -clock-watching 7 -afroman 7 -megaplex 7 -ehret 7 -96.8 7 -civits 7 -7400 7 -sucessfully 7 -acad 7 -cusker 7 -960m 7 -looner 7 -hockman 7 -degeurin 7 -timoner 7 -pomaska 7 -goertz 7 -suriya 7 -eatmon 7 -nadie 7 -1217 7 -knopp 7 -phone-based 7 -bleats 7 -factorydesign 7 -mooc 7 -moog 7 -moof 7 -lucasarts 7 -then-brother-in-law 7 -gambarota 7 -leiser 7 -ladieswear 7 -15-room 7 -75-page 7 -b-1b 7 -dadi 7 -re-affirm 7 -fluzone 7 -russia-georgia 7 -70km/h 7 -ulczycki 7 -darina 7 -eleazar 7 -re-timed 7 -tomeis 7 -cat-shaped 7 -siphokazi 7 -bloomberg.com 7 -swelters 7 -tcn 7 -ghotbi 7 -home-security 7 -@vodka_samm 7 -cesspits 7 -diamond-producing 7 -hathershaw 7 -expatriated 7 -intracytoplasmic 7 -hardraw 7 -4590 7 -reserve/non-football 7 -debden 7 -usaid-funded 7 -ravich 7 -dente 7 -12-a-night 7 -djoser 7 -el-bayoumi 7 -suryavarman 7 -arata 7 -simplifications 7 -misselling 7 -freiherr 7 -wildrego 7 -yazdanparast 7 -bet365.com 7 -bongco 7 -al-qaida-affiliated 7 -mega-deal 7 -275lbs 7 -endellion 7 -lifelessness 7 -razi 7 -fior 7 -yodeler 7 -13-room 7 -ruscio 7 -ouessant 7 -khvichava 7 -goerlitz 7 -menini 7 -kcal/kcbs 7 -tremelling 7 -pedring 7 -pabellon 7 -berean 7 -calcium-rich 7 -haule 7 -depredation 7 -prednisolone 7 -innocente 7 -mennini 7 -gruffly 7 -establishment-backed 7 -loose-limbed 7 -analynne 7 -super-sewer 7 -perenise 7 -under-the-hood 7 -fire-suppression 7 -amberjack 7 -kakavas 7 -hpvs 7 -morthole 7 -bust-boosting 7 -bi-centenary 7 -20.05 7 -bluebonnet 7 -khaiden 7 -trearddur 7 -moneta 7 -zientek 7 -vezzosi 7 -inkblots 7 -iola 7 -mullawallah 7 -rzss 7 -non-customers 7 -exotica 7 -kodippili 7 -dubaeva 7 -fp 7 -down-on-his-luck 7 -bathrick 7 -grandcentral 7 -breci 7 -makassar 7 -dachas 7 -sjinkie 7 -xiaoni 7 -conter 7 -adkisson 7 -toy-boy 7 -50-kilometer 7 -vigne 7 -anti-shock 7 -ogeil 7 -wicu 7 -platformer 7 -10,250 7 -mirrlees 7 -schmandt 7 -hadn 7 -hospital-cedar 7 -designer-clad 7 -dargai 7 -long-stated 7 -ruehlman 7 -bugarcic 7 -2,530 7 -stampfer 7 -dobbies 7 -handbuilt 7 -nevinson 7 -elizade 7 -biotechnological 7 -self-efficacy 7 -groh 7 -vassilakis 7 -werkheiser 7 -juarez-popoca 7 -hoppin 7 -paganelli 7 -pumper 7 -paix 7 -cubas 7 -letcher 7 -tamper-resistant 7 -rippingale 7 -penhale 7 -naturelle 7 -nebra 7 -helsingor 7 -latu 7 -uhm 7 -uhh 7 -bulthaup 7 -2,575 7 -42,700 7 -sabre-tooth 7 -400-room 7 -i-55 7 -megayachts 7 -schiefelbein 7 -937,000 7 -non-allergenic 7 -camerman 7 -walwyk 7 -watchfield 7 -treebones 7 -unfavoured 7 -cuddington 7 -ruperts 7 -kuciak 7 -virg 7 -super-powerful 7 -decimus 7 -courtyard-residence 7 -mamoru 7 -olomouc 7 -burdfield 7 -dress-rehearsal 7 -strasburger 7 -profanely 7 -55.2 7 -frondeuse 7 -16-storey 7 -watchespn 7 -jazzmine 7 -food-grade 7 -churg-strauss 7 -sengoku 7 -paktiya 7 -raijon 7 -tanit 7 -u.s.-operated 7 -cruise.co.uk 7 -brabender 7 -veeck 7 -anglo-norman 7 -madridista 7 -taiwanese-american 7 -nbc-2 7 -mammographers 7 -margin-bottom 7 -first-option 7 -stejneger 7 -meckes 7 -pither 7 -notus 7 -georger 7 -circus-themed 7 -mosasaurs 7 -merode 7 -churros 7 -pakula 7 -wilfrido 7 -hma 7 -kipman 7 -gretsch 7 -nectar-rich 7 -varshney 7 -lysistrata 7 -re-charge 7 -airlander 7 -parsutt 7 -alfonsin 7 -wfor-tv 7 -extremadura 7 -greaseproof 7 -non-indians 7 -perrot 7 -950m 7 -diaoyutai 7 -scheneidau 7 -jergenson 7 -snap-on 7 -re-plastering 7 -groysman 7 -brooks-jimenez 7 -neck-high 7 -papam 7 -pikon 7 -elberton 7 -fonhandle 7 -113g 7 -cobs 7 -1135 7 -therizinosaurs 7 -re-mortgaging 7 -seven-decade 7 -nonpregnant 7 -hayalla 7 -daugter 7 -fitness-tracking 7 -cloth-like 7 -18-14 7 -tomori 7 -arachnological 7 -cozzan 7 -seldom-used 7 -canadia 7 -421million 7 -cir 7 -18-team 7 -on-side 7 -ruritania 7 -unitech 7 -bushy-haired 7 -ermen 7 -clatterbridge 7 -v3 7 -martinville 7 -bandara 7 -jamphel 7 -zulkipli 7 -apertures 7 -tickly 7 -2016ers 7 -tanneri 7 -71,500 7 -lapp 7 -bunker-like 7 -2p/encke 7 -doubled-up 7 -narjes 7 -kink.com 7 -disestablishment 7 -sharaa 7 -durban-based 7 -over-population 7 -antúnez 7 -pre-conviction 7 -grammies 7 -berlanti 7 -karidis 7 -fajita 7 -ifo 7 -heidrich 7 -1,000-bottle 7 -matusheski 7 -baraki 7 -wendleken 7 -chunmun 7 -non-presidential 7 -lilima 7 -khazaei 7 -samwise 7 -stamm 7 -hanigan 7 -tealight 7 -hughes-john 7 -mises 7 -unschooling 7 -jesting 7 -ryuta 7 -4.6-inch 7 -lingala 7 -time-starved 7 -schmitzberger 7 -hanafy 7 -hedison 7 -thinly-disguised 7 -delerme 7 -tanauan 7 -wardie 7 -1/100th 7 -24g 7 -farrant 7 -szwajgier 7 -convulses 7 -marnebek 7 -michelsen 7 -toullec 7 -thawra 7 -nodoz 7 -djukic 7 -pandher 7 -6-foot-10 7 -wvtm 7 -twin-turboprop 7 -highly-enriched 7 -mistakidis 7 -twala 7 -codenomicon 7 -buerkle 7 -solper 7 -gsee 7 -kasuga 7 -mobile-friendly 7 -kiuchi 7 -prefixed 7 -o'shell 7 -neuropsychiatrist 7 -corries 7 -mumin 7 -water-dropping 7 -brovedani 7 -thirty-year 7 -skrill 7 -four-stage 7 -bonnemaison 7 -six-tonne 7 -332,000 7 -coffered 7 -philidelphia 7 -ghostwriters 7 -mifi 7 -publica 7 -cosy-looking 7 -domestos 7 -faa-approved 7 -promotion/relegation 7 -last-ball 7 -chiaroscuro 7 -foulquier 7 -foxe 7 -bowett 7 -abas 7 -galadriel 7 -60miles 7 -30-stone 7 -room-temperature 7 -fire-stricken 7 -haissa 7 -precis 7 -copulating 7 -stopford 7 -cobalt-blue 7 -2,212 7 -mctwist 7 -2,137 7 -bardi 7 -jatte 7 -unlatch 7 -emmy-winner 7 -elp 7 -swampflyer 7 -surf-inspired 7 -in-play 7 -imaginat10n 7 -melodically 7 -shannons 7 -pratik 7 -pastebin.com 7 -nobile 7 -rusevs 7 -bioweapon 7 -camelopardalis 7 -winyard 7 -galfi 7 -coldiretti 7 -fpies 7 -#rockbone 7 -skinsuit 7 -sumulong 7 -216million 7 -post-party 7 -45-ton 7 -welk 7 -greebull 7 -999-year 7 -haute-couture 7 -toggles 7 -cop/bad 7 -melin 7 -boxwood 7 -kozazcki 7 -non-credible 7 -hordley 7 -viners 7 -azzalure 7 -15,000-acre 7 -trayvoning 7 -grandage 7 -bozorghmehr 7 -slytherin 7 -guenter 7 -ex-brother-in-law 7 -tarrouche 7 -20-10 7 -mass-media 7 -dodoma 7 -chalerm 7 -barton-on-sea 7 -mahaffee 7 -draznin 7 -52.50 7 -vidot 7 -vidor 7 -verza 7 -9-16 7 -two-michelin 7 -kimono-style 7 -sugarbaker 7 -manettas 7 -deevoy 7 -babybell 7 -entelognathus 7 -coppery 7 -wtvj 7 -wtvc 7 -cockscomb 7 -caijing 7 -rimsky 7 -133-year 7 -cell-to-cell 7 -haaser 7 -ramallo 7 -49c 7 -ribeirao 7 -colleville-sur-mer 7 -tomilino 7 -holtznagel 7 -cytell 7 -bossom 7 -yetev 7 -olenka 7 -revocable 7 -real-mayorga 7 -branstetter 7 -semi-derelict 7 -guterson 7 -mingary 7 -dayak 7 -royse 7 -shannon-marie 7 -mero 7 -gledfield 7 -oxenholme 7 -da-da 7 -flng 7 -fifty-something 7 -aaah 7 -strafe 7 -breay 7 -lissauer 7 -myfoxphoenix 7 -star-power 7 -easterlands 7 -tetrachromats 7 -subutex 7 -tarsometatarsus 7 -megabeth 7 -military-technical 7 -e-bay 7 -courcy 7 -grecians 7 -1,421 7 -mmca 7 -roshydromet 7 -ibraev 7 -karaiskaki 7 -rld 7 -mid-noughties 7 -11.22 7 -campsfield 7 -aviat 7 -fivefingers 7 -halo-like 7 -72.8 7 -wailers 7 -dipstick 7 -mohanty 7 -hexafluoride 7 -clerkson 7 -shamari 7 -bench-warmer 7 -yapi-yapo 7 -brinicle 7 -gamache 7 -mashrabiya 7 -meishan 7 -buisness 7 -transperth 7 -cortesin 7 -ramsauer 7 -cell-tower 7 -grid-based 7 -record-smashing 7 -dermeersch 7 -bachu 7 -vilkkonen 7 -furreal 7 -opteka 7 -mazewski 7 -nayer 7 -'62 7 -wkl 7 -highly-sought 7 -look-outs 7 -1,459 7 -conspiratorially 7 -triaging 7 -snuffy 7 -193mph 7 -cop15 7 -bleeter 7 -taketsuru 7 -m.i.t. 7 -inter-species 7 -air-travel 7 -fleenor 7 -alimento 7 -bakia 7 -german-themed 7 -m-theory 7 -shoulder-width 7 -mountcharles 7 -pantless 7 -wearability 7 -202nd 7 -boardgames 7 -lakpa 7 -unsted 7 -11.46 7 -11.48 7 -tangela 7 -400bc 7 -lundry 7 -cabled 7 -self-replicate 7 -chicza 7 -microbrew 7 -irish-americans 7 -chalco 7 -loudmouths 7 -oknoplast 7 -elapse 7 -arkitect 7 -vising 7 -ewb 7 -ewr 7 -bozza 7 -nahant 7 -qajar 7 -environmentally-conscious 7 -kleindienst 7 -trumble 7 -libya-style 7 -carnavon 7 -freesia 7 -hazaei 7 -blackburne 7 -pritikin 7 -gamecock 7 -3210 7 -sybbie 7 -binelli 7 -goggle-eyed 7 -fitzalan 7 -kobel 7 -dudley-ward 7 -poulsom 7 -laureys 7 -mannkind 7 -klaatu 7 -zakov 7 -freebirthing 7 -gloss-varnished 7 -asto 7 -second-day 7 -beachley 7 -rumpelstiltskin 7 -closers 7 -hakodate 7 -quintal 7 -connecticut. 7 -traktor 7 -sveinung 7 -saliba 7 -indrebo 7 -newlin 7 -mamasapano 7 -microbus 7 -helpusadopt.org 7 -dums 7 -debt-riddled 7 -urgent-care 7 -mancor 7 -reyes-neal 7 -letour.com 7 -marziyeh 7 -sawle 7 -borderline-to-mild 7 -sweetens 7 -lalula 7 -re-charging 7 -actor/model 7 -sobhy 7 -theodosius 7 -trifiletti 7 -4,671 7 -38kg 7 -dekenipp 7 -brancott 7 -steinmayr 7 -ante-post 7 -al-abrashi 7 -mendacity 7 -steitz 7 -39-minute 7 -advisedly 7 -irr 7 -3.81 7 -bertelsman 7 -azlan 7 -4-month 7 -myh9 7 -pencak 7 -moderno 7 -autopod 7 -melanoidins 7 -chinshanlo 7 -marzena 7 -merilee 7 -surgey 7 -syla 7 -seekatz 7 -micro-businesses 7 -re-packaged 7 -encephalocele 7 -arromanches-les-bains 7 -unwelcomed 7 -chinese-run 7 -3:46 7 -sensers 7 -19g 7 -extreme-weather 7 -big-brand 7 -10.76 7 -sunitinib 7 -preedy 7 -y-shape 7 -porfido-gibson 7 -manawa 7 -cumbaa 7 -gustaffson 7 -robo-cheetah 7 -wearmouth 7 -pargaien 7 -adml 7 -kurihara 7 -zlate 7 -ilocos 7 -gwenyth 7 -winebald 7 -booky 7 -animal-cruelty 7 -precariat 7 -villisca 7 -lota 7 -gaudenzi 7 -ferguson-related 7 -vagina-like 7 -huffy 7 -2,001 7 -bukidnon 7 -4,095 7 -2,990 7 -collie-cross 7 -plebeian 7 -gna 7 -keaveney 7 -samat 7 -motherload 7 -c/2011 7 -non-starchy 7 -lavar 7 -buchbinder 7 -small-bodied 7 -chi-med 7 -self-regard 7 -equafleece 7 -samanyolu 7 -matulavich 7 -money-transfer 7 -potato-shaped 7 -glarznak 7 -lorey 7 -over-centralised 7 -anonib 7 -tiegen 7 -mcivor 7 -hollidaysburg 7 -cardiotoxicity 7 -head-teacher 7 -cointelpro 7 -obradovic 7 -954,000 7 -netherworld 7 -ropeable 7 -bechet 7 -lychgate 7 -1,000-point 7 -zelectric 7 -plummy-voiced 7 -doutch 7 -reverby 7 -serratos 7 -420singles 7 -berkshire-based 7 -downingtown 7 -super-maximum 7 -kaedar 7 -p11 7 -macros 7 -keb 7 -re-focused 7 -shamanism 7 -pieris 7 -dair 7 -2.4-litre 7 -repressors 7 -apiary 7 -finck 7 -3trillion 7 -knapdale 7 -one-man-band 7 -polyphenol 7 -jauberty 7 -pomerania 7 -talton 7 -z8_gnd_5296 7 -ugwu 7 -wallinga 7 -roseraie 7 -murder-accused 7 -invista 7 -tudos 7 -cank 7 -feets 7 -1,722 7 -1,729 7 -colanders 7 -l'enclume 7 -majoro 7 -dot.com 7 -eco-lodges 7 -sqaud 7 -over-90s 7 -228million 7 -lovasova 7 -simplice 7 -kyalami 7 -fresh-looking 7 -mula 7 -mulu 7 -mallgoers 7 -7.08 7 -6.1-litre 7 -foveaux 7 -unfreezing 7 -martials 7 -hardrock 7 -12-foot-long 7 -thylakoids 7 -gonzalez-becerra 7 -fully-sighted 7 -ateltico 7 -r.m. 7 -ngyuen 7 -frayer 7 -tarling 7 -premixed 7 -goal-fest 7 -tantalo 7 -legside 7 -multi-occupancy 7 -army-led 7 -jaycees 7 -tichenor 7 -eco-warriors 7 -massoudi 7 -broeker 7 -cabey 7 -larranaga 7 -brownshirts 7 -mulugheta 7 -massgap 7 -satyapal 7 -tanawha 7 -borovets 7 -marlie-grace 7 -22-20 7 -22-21 7 -ppa 7 -alehouses 7 -chilean-born 7 -0.66 7 -smog-forming 7 -binya 7 -tvpa 7 -remounted 7 -werenâ 7 -jianu 7 -enfeebled 7 -1mph 7 -eight-piece 7 -quick-service 7 -8am-6pm 7 -cantalupo 7 -pim-1 7 -maxilla 7 -beltman 7 -nomi 7 -deparment 7 -237,500 7 -skyfarm 7 -lockart 7 -al-hamdulillah 7 -travalena 7 -dandi 7 -somethin 7 -rowley-goodchild 7 -dhatwalia 7 -carzorb 7 -beroe 7 -council-backed 7 -unbelief 7 -ataye 7 -biyi 7 -holiday.com 7 -camphor 7 -o'chiese 7 -narouzzad 7 -kalash 7 -c.p. 7 -danceteria 7 -punctilious 7 -2005/2006 7 -allotting 7 -marryoke 7 -calvey 7 -fallings 7 -sharita 7 -rehabs.com 7 -4.08 7 -manabe 7 -portakabin 7 -a2a 7 -weight-lifter 7 -35th-minute 7 -buirencu 7 -suarez-navarro 7 -90miles 7 -five-six 7 -trenchcoats 7 -agartala 7 -phonetics 7 -sportscotland 7 -second-favourite 7 -loisy 7 -murdy 7 -tutone 7 -jumpfrompaper 7 -25,000-seater 7 -mercatus 7 -dedo 7 -1,258 7 -groesbeck 7 -960million 7 -74per 7 -skara 7 -68-32 7 -llangefni 7 -cosmopolitan.com 7 -590million 7 -chiofalo 7 -natural-colour 7 -lisenne 7 -dartitup 7 -fuller-looking 7 -beechdale 7 -cowbirds 7 -25,700 7 -tablet-optimized 7 -anteiker 7 -paltenghi 7 -golf-themed 7 -herrenschmidt 7 -eft-1 7 -bone-deep 7 -scuppers 7 -fadola 7 -28,060 7 -rendina 7 -megacommuters 7 -luisiana 7 -howker 7 -yoshimi 7 -hornbill 7 -9m-mro 7 -long-barrelled 7 -bundesnachrichtendienst 7 -12-yard 7 -nkotb 7 -namazie 7 -nonlawyers 7 -inconceivably 7 -sirt1 7 -unstitched 7 -rommedahl 7 -collinsdictionary.com 7 -schwarznegger 7 -pre-history 7 -westville 7 -dewsnip 7 -shanking 7 -rambouillet 7 -676million 7 -choppington 7 -ching-chong 7 -bornmann 7 -39-years-old 7 -hirabe 7 -come-ons 7 -el-helw 7 -linfoots 7 -fuer 7 -fox43 7 -belmopan 7 -egyptian-led 7 -quizup 7 -castigation 7 -yasmina 7 -money-obsessed 7 -swoons 7 -anti-crisis 7 -graaff 7 -greaser 7 -flatpicking 7 -encantada 7 -medal-winners 7 -histopathologist 7 -nutan 7 -sub-groups 7 -erw 7 -inverinate 7 -saitavci 7 -ice-like 7 -desalinator 7 -barshim 7 -brouwers 7 -storica 7 -trapko 7 -ppks 7 -pink-coloured 7 -landspeeder 7 -crudeness 7 -egalite 7 -ragel 7 -petrossov 7 -pend 7 -belém 7 -wgc-match 7 -36-story 7 -playbill.com 7 -pre-retirement 7 -tinnies 7 -krenn 7 -sanlinurfa 7 -subchapter 7 -moeraki 7 -wolfsberger 7 -qumran 7 -ostad-mohammad 7 -vagner 7 -lauric 7 -864,000 7 -ogn 7 -mini-mansion 7 -zakkiah 7 -schlussler 7 -agas 7 -2,276 7 -computerworld 7 -freiman 7 -soft-cover 7 -riseth 7 -62-gun 7 -hitler-style 7 -most-populated 7 -1/100 7 -sobków 7 -heavy-hearted 7 -deur-davidson 7 -legian 7 -eckart 7 -photo-call 7 -laelan 7 -233rd 7 -sugarcraft 7 -sewage-filled 7 -rehung 7 -cecere 7 -redid 7 -classic-winning 7 -picchetti 7 -maosonon 7 -kenitra 7 -f50s 7 -azabache 7 -braninski 7 -santin 7 -nenuco 7 -dysregulation 7 -satisfit-ltg 7 -babystart 7 -sandline 7 -mevlevis 7 -dahoul 7 -8:14 7 -r/v 7 -davidson-olley 7 -oprey 7 -iron-willed 7 -esma 7 -taxonomic 7 -handywoman 7 -blight-resistant 7 -bang-on-trend 7 -soba 7 -1913-1921 7 -lazaney-rodriguez 7 -chronicle-telegram 7 -ex-employers 7 -krovatin 7 -roxby 7 -63,837 7 -race-winning 7 -trimarans 7 -158-year-old 7 -u.s.-germany 7 -22-months-old 7 -industrialize 7 -chimurenga 7 -swastika-emblazoned 7 -avil 7 -shouter 7 -9ft-high 7 -19.59 7 -spindel 7 -1909-1913 7 -psarianos 7 -grooms-to-be 7 -@iggyazalea 7 -nunziato 7 -fulsom 7 -rocholl 7 -1,198 7 -cheek-defining 7 -benelli 7 -44kg 7 -then-florida 7 -salvatori 7 -litterbugs 7 -up-turned 7 -aboveboard 7 -scicli 7 -labadie 7 -bargoed 7 -34mins 7 -razorback 7 -portmiami 7 -mekhi 7 -tapas-style 7 -bt6500 7 -economise 7 -shayler 7 -rewardable 7 -1997/98 7 -ronit 7 -radomin 7 -torqued 7 -candelabrum 7 -kxmb 7 -automower 7 -quirin 7 -refreezing 7 -mcaveeney 7 -letheren 7 -gradualist 7 -gobies 7 -dorantes 7 -khetran 7 -decrypting 7 -no-drama 7 -strimpel 7 -gordon-jones 7 -ukmedix.com 7 -ferrary 7 -evarna 7 -18-10 7 -entry-exit 7 -229,138 7 -wire-tapping 7 -skeletor 7 -toffee-tastic 7 -validas 7 -kincoppal-rose 7 -helg 7 -gardened 7 -bagher 7 -zugibe 7 -vorontsovski 7 -berteroniana 7 -sockless 7 -poku 7 -wis 7 -soysal 7 -andujor 7 -curado 7 -auto-rickshaws 7 -multiculturalist 7 -hoevels 7 -recirculating 7 -docility 7 -jafaar 7 -fontier 7 -rone 7 -wtnh.com 7 -niilo 7 -keron 7 -badmouthed 7 -11.23 7 -war-battered 7 -pacris 7 -ex-politicians 7 -farase 7 -unfitness 7 -nanase 7 -summer-like 7 -megavideo 7 -si-tian 7 -oeth 7 -triqui 7 -clothes-free 7 -sapphire-based 7 -gravels 7 -clyma 7 -@hillaryclinton 7 -#speedtheplow 7 -houseful 7 -kolochenko 7 -tecfidera 7 -mesmerise 7 -10.51 7 -72mm 7 -haeffner 7 -styris 7 -racingtheplanet 7 -pullins 7 -18,000-foot 7 -reconcilable 7 -industrialism 7 -klinkrad 7 -mroe 7 -trevose 7 -timolin 7 -hasnan 7 -turn-ons 7 -castlow 7 -wilderswil 7 -picavet 7 -ygritte 7 -bairin 7 -hi-fis 7 -12-7 7 -femodene 7 -vedal 7 -staneva 7 -jimani 7 -jook 7 -gene-silencing 7 -peaslee 7 -morgart 7 -emjoyment 7 -puregym 7 -speedwatch 7 -up-scale 7 -gummies 7 -douville 7 -21503 7 -us50 7 -republcian 7 -reny 7 -kingsmen 7 -huicochea-gonzalez 7 -reproducible 7 -cerenkov 7 -samede 7 -sulla 7 -paleo-indian 7 -bicultural 7 -side-view 7 -fat-rendering 7 -82kg 7 -doukrou 7 -mulia 7 -patojos 7 -krasnopolski 7 -volkmar 7 -g/f 7 -lheandrew 7 -83.8 7 -no-charge 7 -83.6 7 -polsgrove 7 -omilig 7 -irujo 7 -tabulate 7 -post-market 7 -soccio 7 -goldson 7 -mwakilema 7 -phokeerdoss 7 -pdo 7 -humanizes 7 -rodenticide 7 -veldmeijer 7 -mujtaba 7 -auk 7 -geopalz 7 -rozenbergs 7 -sipcaddy 7 -mosimann 7 -sennik 7 -868,000 7 -eucalypt 7 -julya 7 -janita 7 -owatonna 7 -leehmann 7 -pedicles 7 -silicosis 7 -motorcoach 7 -sujeet 7 -w.j. 7 -clubfoot 7 -tenenbaums 7 -not-too-modern 7 -10.53 7 -10.52 7 -zerrouk 7 -third-last 7 -uffe 7 -8,000-year-old 7 -vanderkam 7 -lyford 7 -synaptics 7 -cleaving 7 -unpreparedness 7 -gringos 7 -pepsi-cola 7 -river-gulf 7 -froghoppers 7 -ratings-grabbing 7 -mock-gothic 7 -larkins 7 -chazray 7 -fieldings 7 -bush-clinton 7 -corlatean 7 -werksman 7 -ahari 7 -bidwill 7 -bioarchaeology 7 -balash 7 -impoundment 7 -outift 7 -zuzu 7 -brodhead 7 -kidney-shaped 7 -mst3k 7 -chetwoods 7 -petard 7 -midcourse 7 -free-lance 7 -korean-style 7 -gluckman 7 -spreen 7 -basket-like 7 -al-assa 7 -awi 7 -merret 7 -skocpol 7 -cleat 7 -nitrogen-rich 7 -mcwhirter 7 -eurazhdarcho 7 -collectivist 7 -al-tet 7 -perfectly-formed 7 -coutts-wood 7 -reeducation 7 -conditions-based 7 -scotty-bob 7 -measureable 7 -12-11 7 -kaunakakai 7 -people.the 7 -conrads 7 -stat1 7 -pensmore 7 -ofentse 7 -warsop 7 -drug-abusing 7 -florante 7 -d.f. 7 -llanishen 7 -reconstructionist 7 -22,350 7 -bonfanti 7 -1752 7 -175c 7 -hizb-ut-tahrir 7 -1,747 7 -doum 7 -wippert 7 -drought-plagued 7 -machala 7 -iprc 7 -leaf-like 7 -easyart 7 -alpes-maritimes 7 -aminur 7 -hafting 7 -gas-like 7 -raposo 7 -mastoiditis 7 -kight 7 -ganjian 7 -commandeers 7 -saint-gaudens 7 -dolev 7 -re-submit 7 -high-sided 7 -chaska 7 -peaker 7 -snug-fitting 7 -dervin 7 -deleonardis 7 -codger 7 -second-wicket 7 -nogovitsyn 7 -lessner 7 -tenuously 7 -sicoli 7 -96th-minute 7 -canadian-vietnamese 7 -spahn 7 -thweatt 7 -long-brewing 7 -aegerion 7 -werchter 7 -double-sized 7 -xuelong 7 -buyuk 7 -3-day-old 7 -transplantable 7 -win-at-any-cost 7 -francois-xavier 7 -illegally-built 7 -olufisayo 7 -xiaoying 7 -forkan 7 -pragaret 7 -bonnice 7 -supercapacitors 7 -broadmead 7 -hum-drum 7 -green-fanged 7 -mist-shrouded 7 -featherbed 7 -nanostim 7 -misapplying 7 -lanesra 7 -full-stop 7 -gallaudet 7 -migena 7 -45-years 7 -ex-fighter 7 -6-litre 7 -ericksen 7 -kick-butt 7 -a-half 7 -fortese 7 -plemmons 7 -rewinds 7 -urucu 7 -maarouf 7 -cochet 7 -elleni 7 -non-player 7 -bocanegra 7 -forestalled 7 -131m 7 -59.33 7 -stoltzfus 7 -oney 7 -josselyn 7 -a100 7 -seacombe 7 -thrice-weekly 7 -post-round 7 -lavrusik 7 -connive 7 -virus-infected 7 -kostigen 7 -model-turned 7 -blücher 7 -nonvoting 7 -machination 7 -7:39 7 -co-opts 7 -khusen 7 -paroxysmal 7 -full-sugar 7 -sinitsin 7 -huthi 7 -blackall 7 -24.25 7 -latos 7 -edgel 7 -foodcycle 7 -ten-thousandth 7 -asst 7 -3-cent 7 -short-faced 7 -susa 7 -causevic 7 -non-skiers 7 -karseno 7 -dolph 7 -rooti 7 -fitzearle 7 -shashidhar 7 -hydraotes 7 -wehelie 7 -vladimirovich 7 -uhlmann 7 -cassetti 7 -topol 7 -arcturus 7 -lifecasting 7 -six-toed 7 -everclear 7 -785million 7 -noodling 7 -tavriya 7 -4,730 7 -sapucai 7 -five-megawatt 7 -aerobiology 7 -shhhhut 7 -hashtaggers 7 -outshined 7 -8-mile 7 -145.5 7 -saray 7 -winnfield 7 -crofty 7 -choicest 7 -crewless 7 -glascarnoch 7 -edelmann 7 -eider 7 -u.s.-colombia 7 -non-pc 7 -dussault 7 -samarkand 7 -shamera 7 -swiss-style 7 -marchup 7 -mceachern 7 -alise 7 -tesler 7 -lacelle 7 -lutsenko 7 -raybin 7 -in-group 7 -am. 7 -stigmata 7 -lisabeth 7 -archana 7 -misnamed 7 -monoculture 7 -killer-whale 7 -kitv.com 7 -@joey7barton 7 -horsewomen 7 -invelox 7 -opper 7 -football-field 7 -mundari 7 -schottenstein 7 -azerbaijanis 7 -ikeja 7 -tarja 7 -trevisan 7 -button-mashing 7 -pro-syria 7 -sadequee 7 -forevermore 7 -2,656 7 -shotshells 7 -lettenbichler 7 -mahoning 7 -ncri 7 -palmanova 7 -meuser 7 -fabricator 7 -kardin 7 -al-khatteeb 7 -condliffe 7 -orderlies 7 -sony-atv 7 -kuplovs-okinskis 7 -recession-busting 7 -bumpology 7 -under-50s 7 -kellingley 7 -carnelia 7 -oil-water 7 -match-days 7 -40-49 7 -calenda 7 -handbills 7 -sun-lit 7 -ledo 7 -car-loving 7 -wendreda 7 -dabalsa 7 -antonioni 7 -peps 7 -gaiffe 7 -quelle 7 -white-bellied 7 -sikka 7 -hibernator 7 -itai 7 -fairbourn 7 -pch 7 -suppleness 7 -25.50 7 -kynurenine 7 -bansky 7 -hogrefe 7 -bahiya 7 -2,252 7 -2,258 7 -post-career 7 -fierberg 7 -bingyan 7 -meiju 7 -re-programme 7 -oxborough 7 -elapses 7 -tefina 7 -conceptualised 7 -slipperiness 7 -valencia-based 7 -nyiahh 7 -wii-fit 7 -schleswig 7 -ftld 7 -baie 7 -twentieth-century 7 -leiths 7 -knighthawk 7 -inscribing 7 -maccarinelli 7 -buzzer-beater 7 -cassinelli 7 -siebring 7 -borro 7 -shawano 7 -levet 7 -tailplane 7 -konik 7 -santisteban 7 -15.20 7 -bralets 7 -theoutnet.com 7 -unlockyourbrain 7 -locally-owned 7 -kese 7 -bohlig 7 -waterlilies 7 -back-cover 7 -wholly-owned 7 -mandalas 7 -pappu 7 -cling-film 7 -brinnington 7 -0.311 7 -reclusiveness 7 -8:33 7 -8:36 7 -tianshan 7 -arrey 7 -pascuzzi 7 -hundred-dollar 7 -blagnac 7 -lesaka 7 -masek 7 -thuthuzela 7 -grappenhall 7 -wentzell 7 -sportsradio 7 -samant 7 -esco 7 -mishchenko 7 -husseyin 7 -corner-cutting 7 -5:28 7 -sixth-best 7 -thurswell 7 -erdolino 7 -follie 7 -bogdhan 7 -reeps 7 -asho 7 -prizm 7 -switalskis 7 -bratsk 7 -ex-wrestler 7 -elcho 7 -exhausted-looking 7 -schuettler 7 -deardorff 7 -spinouts 7 -markeson 7 -mallowan 7 -x-rock 7 -seamour 7 -gyurta 7 -sportsbet.com.au 7 -19.30 7 -shehryar 7 -land-speed 7 -catrall 7 -pendulous 7 -1754 7 -high-point 7 -most-mentioned 7 -swedwood 7 -g10 7 -boomgard 7 -ankawa 7 -1,749 7 -minards 7 -tsaraev 7 -fish-oil 7 -dibb 7 -11th-place 7 -department-wide 7 -visibilities 7 -tin-eared 7 -127,500 7 -micro-financing 7 -ravin 7 -mevs 7 -xuhui 7 -fowlie 7 -ofitserov 7 -tyr 7 -deformable 7 -12.13 7 -gemenis 7 -lindenhurst 7 -kultury 7 -civilian-military 7 -privies 7 -emanuels 7 -classe 7 -wpbf.com 7 -felis 7 -warwood 7 -kiesel 7 -full-coverage 7 -yadina 7 -11 7 -ryokans 7 -4,900-a-month 7 -i-can 7 -pa31 7 -golladay 7 -capozzi 7 -immunizing 7 -heighington 7 -9/9/09 7 -murnau 7 -syston 7 -yaeger 7 -spanoudakis 7 -drogin 7 -13,000-square-foot 7 -luisito 7 -qiming 7 -gwu 7 -angiotensin 7 -nishabd 7 -porgal 7 -arouca 7 -permutation 7 -hoppitt 7 -agitos 7 -benzie 7 -toe-capped 7 -no-lycra 7 -microneedle 7 -air-time 7 -sholes 7 -chimaltenango 7 -over-management 7 -gild 7 -2,115 7 -2,116 7 -sub-national 7 -health-tracking 7 -jo-jo 7 -pray-o-mat 7 -eliel 7 -rascasse 7 -idjirani 7 -forebrain 7 -long-beaked 7 -tjipetir 7 -thomas-hall 7 -simenon 7 -coladarci 7 -inl 7 -vicomte 7 -capaloff 7 -blaskiewicz 7 -sookia 7 -wahpeton 7 -hazlehead 7 -errasti 7 -pizzuti 7 -t&m 7 -powerpuff 7 -abpi 7 -gwer 7 -opo 7 -opi 7 -m-dwarf 7 -re-booking 7 -chobot 7 -left-heart 7 -prevc 7 -symbion 7 -vagabonds 7 -aykin 7 -patnick 7 -pollaro 7 -seres 7 -iambic 7 -wenwu 7 -eumundi 7 -force-iraq 7 -rognlin 7 -hyperconnected 7 -10ft-wide 7 -heart-beat 7 -2006â 7 -3.5trillion-a-day 7 -looks-obsessed 7 -giliand 7 -grandmother-of-ten 7 -cuxton 7 -6.2-litre 7 -dogge 7 --28 7 -housely 7 -10-furlong 7 -pruden 7 -home-cooking 7 -re-group 7 -jarstein 7 -pandurevic 7 -metacert 7 -sandidge 7 -schulhof 7 -rambert 7 -glugged 7 -icecap 7 -magnablend 7 -gershman 7 -warbirds 7 -parities 7 -antillia 7 -mazurak 7 -llanwenarth 7 -lone-parent 7 -taketh 7 -minnen 7 -satti 7 -skinneepix 7 -glassworks 7 -notah 7 -post-split 7 -hoogstraten 7 -wtev 7 -tec 7 -teu 7 -obenauer 7 -newly-erected 7 -fenimore 7 -policeone 7 -people-traffickers 7 -elsie-may 7 -childspring 7 -glikeriya 7 -windings 7 -swa 7 -swr 7 -taunauu 7 -buile 7 -nassralla 7 -heydour 7 -2,337 7 -ski-in/ski-out 7 -franscio 7 -pictrured 7 -berberian 7 -orions 7 -re-jig 7 -fagerholm 7 -shyest 7 -teigland 7 -scotchbrook 7 -mother/daughter 7 -acclaiming 7 -rumain 7 -swatton 7 -cocaine-fuelled 7 -kolencik 7 -hordern 7 -width-to-height 7 -middelfart 7 -megarocket 7 -glycolysis 7 -afrioceans 7 -arsenic-based 7 -33-minute 7 -watret 7 -bagai 7 -now-ex-wife 7 -winterberg 7 -zionsville 7 -five-vehicle 7 -ndotto 7 -balvinder 7 -woodlee 7 -2098 7 -warks. 7 -piver 7 -media-related 7 -memodel 7 -longest-range 7 -drop-kicked 7 -hooghly 7 -exxon-mobil 7 -smoothe 7 -blockbusting 7 -east-coast 7 -goodricke 7 -childnet 7 -chocolate-making 7 -on-ground 7 -corsley 7 -ikettle 7 -knolls 7 -zacky 7 -30,400 7 -nylons 7 -mid-operation 7 -laurentis 7 -epopoeia 7 -hubbell 7 -motorcaravans 7 -zafferana 7 -elmer-dewitt 7 -pierre-marie 7 -2005/6 7 -micro-sized 7 -weisberger 7 -goshawks 7 -aerate 7 -moggill 7 -concorso 7 -unpaired 7 -fontanez 7 -protecht 7 -reddings 7 -clenshaw 7 -s4c 7 -firebirds 7 -radonjic 7 -huettermann 7 -re-heated 7 -somboon 7 -inseminations 7 -wardynski 7 -retiro 7 -lambinon 7 -1487 7 -1488 7 -luxuriated 7 -home-produced 7 -350mph 7 -dotter 7 -haba 7 -shawver 7 -nelligan 7 -saury 7 -maureira 7 -antonsson 7 -mokulele 7 -krygios 7 -dongmin 7 -damo 7 -dama 7 -uselessly 7 -markku 7 -oberton 7 -now-vacant 7 -meshkati 7 -archdruid 7 -hydrosphere 7 -ratnett 7 -1,769 7 -bio-gas 7 -0615,1745 7 -borat-style 7 -aberconwy 7 -nagusa 7 -maunsell 7 -kamenetz 7 -lannon 7 -ausmin 7 -ecks 7 -zeheer 7 -cash-for-crash 7 -venda 7 -phials 7 -magnetosperm 7 -utsandiego.com 7 -thousand-fold 7 -askfm 7 -houli 7 -magee-women 7 -gherig 7 -subpopulation 7 -bat-winged 7 -nbcf 7 -48-minute 7 -girded 7 -grabe 7 -164million 7 -rachuta 7 -energi 7 -hately 7 -akan 7 -harvestman 7 -erlandsson 7 -bekesha 7 -150cl 7 -nakol 7 -impi 7 -syfret 7 -57per 7 -yogan 7 -speed-dial 7 -parnov 7 -ac3 7 -ach 7 -wpty 7 -open-eyed 7 -dahlenburg 7 -dismays 7 -squared-off 7 -corton 7 -cheeseheads 7 -akwaaba 7 -brooded 7 -hand-grip 7 -stoneycroft 7 -arous 7 -franziska 7 -non-supervisory 7 -mxene 7 -awosile 7 -thewrap.com 7 -zuweid 7 -multi-cellular 7 -liquid-filled 7 -single-shell 7 -klebahns 7 -gang-banging 7 -handlen 7 -iav 7 -holbreich 7 -#ripsambelcher 7 -sello 7 -baniszewski 7 -guilderland 7 -herault 7 -reaal 7 -grumpily 7 -clawdia 7 -embolisms 7 -size-14 7 -vanderburg 7 -b-line 7 -six-bedroomed 7 -frobisher 7 -bahraich 7 -kyerell 7 -dayglo 7 -arnson 7 -reformatting 7 -80-second 7 -rain-shortened 7 -martels 7 -wasik 7 -aldunin 7 -ex-ac 7 -mputu 7 -snailfish 7 -oldenborgh 7 -tear-shaped 7 -airguns 7 -valencian 7 -awoonga 7 -beltz 7 -a.j 7 -1,581 7 -1,582 7 -clumsier 7 -ndf 7 -misspeaking 7 -hunched-over 7 -37-foot 7 -basketmaker 7 -quarter-back 7 -choies 7 -maccarthy 7 -tech-based 7 -skully 7 -helms-burton 7 -jamul 7 -mirman 7 -6:02 7 -tweed-simmons 7 -keperra 7 -steindorff 7 -190billion 7 -amuah 7 -lucenti 7 -governable 7 -oskaloosa 7 -saladino 7 -hirschmiller 7 -hubbie 7 -unattractively 7 -primp 7 -bachai 7 -bachal 7 -nexen 7 -widely-publicized 7 -weed-smoking 7 -pre-work 7 -soumaila 7 -hervé 7 -broward-palm 7 -hunda 7 -wews-tv 7 -zhongliang 7 -black-belt 7 -cuttle 7 -chandrayaan 7 -rubenesque 7 -bukh 7 -verduzco 7 -ngawaka 7 -crasbos 7 -pontotoc 7 -julie-anne 7 -parhat 7 -one-percenter 7 -0207 386 0868 7 -musabekova 7 -scarponi 7 -alfas 7 -151.9 7 -tadas 7 -investoryear 7 -clebsch 7 -euphorium 7 -neufville 7 -gothe 7 -cannas 7 -cricket-crazy 7 -gassmans 7 -jundullah 7 -5,150 7 -santelli 7 -cuetara 7 -biologics 7 -rutina 7 -325th 7 -ktv 7 -kts 7 -kto 7 -medicinally 7 -lycaon 7 -1ib 7 -revenew 7 -borwick 7 -4ft-long 7 -upsize 7 -beibei 7 -daugther 7 -240g 7 -barbata 7 -inverts 7 -front-engined 7 -medikidz 7 -backscratcher 7 -laiti 7 -rossow 7 -pulverize 7 -leucism 7 -indo-fijians 7 -9,191 7 -spouses-to-be 7 -nanan 7 -nanfuka 7 -liebmann 7 -olas 7 -second-team 7 -palle 7 -chelles 7 -almost-complete 7 -despierta 7 -maxillary 7 -caunitis 7 -persuader 7 -umayeer 7 -unroll 7 -kines 7 -earnestine 7 -wulingyuan 7 -kibbeh 7 -behaviourally 7 -monsonego 7 -shantikumar 7 -30million-a-year 7 -krasnow 7 -mumpuru 7 -superlight 7 -minidresses 7 -accessorize.com 7 -parry-romberg 7 -blunsdon 7 -red-alert 7 -sizzlin 7 -whiteladies 7 -1/36 7 -13.49 7 -anjum-wilkinson 7 -hemmelsbach 7 -pre-judged 7 -paf 7 -simser 7 -resuscitator 7 -vallisa 7 -skin-crawling 7 -huzzah 7 -153-acre 7 -blandness 7 -hempleman-adams 7 -87,200 7 -pediment 7 -sabden 7 -jerko 7 -96p 7 -230mph 7 -sols 7 -jalopnik.com 7 -polyneuropathy 7 -kanchenjunga 7 -34-point 7 -fremaux 7 -natrona 7 -pedobook 7 -whateley 7 -asbel 7 -pelisor 7 -robinet 7 -hammond-moore 7 -florita 7 -picchi 7 -sonatrach 7 -cloudiness 7 -lipnicki 7 -murhaf 7 -aurat 7 -daqing 7 -1200cc 7 -jenolan 7 -sasic 7 -biggam 7 -gatton 7 -rosca 7 -snowblowers 7 -dundreggan 7 -thigh-deep 7 -renninger 7 -befitted 7 -504,000 7 -qarmat 7 -twiiter 7 -verbandkammer 7 -contextualises 7 -cartoony 7 -icmeler 7 -naatlo 7 -hamanaka 7 -well-natured 7 -enberg 7 -rigua 7 -br-116 7 -metelits 7 -bacolet 7 -cfm 7 -cfx 7 -berahile 7 -cauterise 7 -strohl 7 -5-feet-2 7 -22.64 7 -reashonda 7 -florit 7 -manezh 7 -ribonucleic 7 -denuclearizing 7 -concision 7 -herberger 7 -flatfish 7 -algordanza 7 -brainwashes 7 -bankrupts 7 -ho-ho 7 -yusanti 7 -stelzer 7 -brain-washed 7 -katsis 7 -318,500 7 -outlookindia.com 7 -khuram 7 -pai-1 7 -minneriya 7 -corsaire 7 -vendel 7 -flippen 7 -resource-poor 7 -dry-cleaned 7 -soofa 7 -trivialities 7 -carlen 7 -0-14 7 -breon 7 -gimay 7 -seat-to-seat 7 -cross-cutting 7 -pridgeon 7 -765lb 7 -fcbs 7 -bedfords 7 -kemptown 7 -latawiec 7 -rb9 7 -rbt 7 -byanyima 7 -xintiandi 7 -kmax 7 -festuccia 7 -four-plus 7 -backlashes 7 -esendex 7 -bribie 7 -evie-mae 7 -daiish 7 -stripogram 7 -warbled 7 -diazinon 7 -cuozzo 7 -near-monopoly 7 -kwada 7 -balmiest 7 -kalandia 7 -biohydrogen 7 -gobby 7 -guk 7 -guh 7 -pennyslvania 7 -al-sharea 7 -guu 7 -rousseaux 7 -ormston 7 -recoded 7 -yellow-feathered 7 -1:43 7 -bourdais 7 -karapantsios 7 -roof-tops 7 -boneham 7 -18ins 7 -2,170 7 -2,177 7 -antonie 7 -wallpapered 7 -88f 7 -christal 7 -usies 7 -three-mile-long 7 -folios 7 -villalobo 7 -non-infectious 7 -kijivu 7 -robodynamics 7 -dymista 7 -sarbanes-oxley 7 -moitinho 7 -boat-building 7 -cartographic 7 -squirmy 7 -schodack 7 -specially-commissioned 7 -ciguatera 7 -cleggster 7 -homechat 7 -glazes 7 -mulkahainen 7 -ethiopian-born 7 -woodwind 7 -flash-in-the-pan 7 -reiza 7 -sar-e-pul 7 -kleanthous 7 -show-offs 7 -jet-heeled 7 -altenberg 7 -l'eau 7 -shilshole 7 -splawn 7 -mortiz 7 -womenomics 7 -ajam 7 -non-pashtun 7 -70300 7 -marlborough-educated 7 -triers 7 -dkt 7 -vantaggiato 7 -contibuted 7 -half-circle 7 -fatemi 7 -brenchley 7 -30,600 7 -homos 7 -multi-millions 7 -georgine 7 -25-14 7 -civile 7 -pursers 7 -sigolene 7 -vlierden 7 -himawari-8 7 -amniyat 7 -napalm-like 7 -zoroaster 7 -mini-suites 7 -martin-sperry 7 -figleaves.com 7 -summered 7 -over-55 7 -spatzen 7 -noongar 7 -shaukatally 7 -teppers 7 -schlange 7 -marginalia 7 -nokian 7 -nokias 7 -sarabeth 7 -casters 7 -rubdowns 7 -celtel 7 -2012-present 7 -sorbs 7 -galen-bisping 7 -graden 7 -thoopid 7 -dambe 7 -slapton 7 -blabbed 7 -palmore 7 -agamemnon 7 -kurdish-language 7 -yippee 7 -anti-children 7 -shaxi 7 -white-on-white 7 -wightlink 7 -dvorkin 7 -grip-based 7 -tke 7 -erlotinib 7 -atanasio 7 -degtyaryov 7 -prognostication 7 -sahul 7 -parmigiana 7 -kranensee 7 -ryabinsky 7 -stora 7 -miladys 7 -antolic 7 -mid-2018 7 -tolken 7 -29,900 7 -xxiv 7 -cancelation 7 -deltoid 7 -205million 7 -ava-jane 7 -10,380 7 -model-like 7 -automaidan 7 -inukjuak 7 -heredity 7 -zenna 7 -soifer 7 -litisha 7 -ex-pro 7 -fonthes 7 -kannapolis 7 -enrica 7 -silves 7 -leydon 7 -jayer 7 -uddingston 7 -lawgiver 7 -shadyside 7 -manzione 7 -carbisdale 7 -5.78 7 -5.73 7 -donestk 7 -schmidt-cassegrain 7 -quintuplet 7 -carl-philip 7 -taikonauts 7 -centime 7 -nagumo 7 -givi 7 -55,000-a-week 7 -adge 7 -steephill 7 -majahua 7 -valdespino-torres 7 -emrick 7 -578,000 7 -underplays 7 -ex-trainer 7 -widely-praised 7 -crudités 7 -short-program 7 -dd-g 7 -sandhaus 7 -701st 7 -heathcock 7 -kocijancic 7 -first-degree-murder 7 -buzeid 7 -oppostion 7 -24-seater 7 -gandecka 7 -anti-everything 7 -incurably 7 -denoon 7 -lyor 7 -hanbury-tenison 7 -lasek 7 -unmodernised 7 -cowpen 7 -sideman 7 -darna 7 -faizel 7 -biomaterials 7 -connerly 7 -retorting 7 -wide-release 7 -firebombings 7 -leonavicius 7 -risk-benefit 7 -non-country 7 -para-athletes 7 -housley 7 -youngman 7 -223.5 7 -lingerfelt 7 -resewo 7 -slavia 7 -shefrin 7 -key-hole 7 -unlovable 7 -parleman 7 -yaghoubi 7 -heavily-armored 7 -cigs 7 -cannoli 7 -walcot 7 -dargis 7 -clondalkin 7 -cermis 7 -zakarya 7 -coveteur 7 -kovary 7 -poorly-timed 7 -@delta 7 -aurtenetxe 7 -ephx2 7 -g.b. 7 -dubuc 7 -lcp-i 7 -dwarf-tossing 7 -icds 7 -rymar 7 -eval 7 -evac 7 -christopheros 7 -unoccupy 7 -hebbourne 7 -sando 7 -dpt 7 -dph 7 -sanriku 7 -risk-assessment 7 -hydrolyzed 7 -jinzhu 7 -pty. 7 -wittich 7 -sturminster 7 -pronsan 7 -bullyboy 7 -medien 7 -45,000-a-week 7 -rasure 7 -eliquiam 7 -elbridge 7 -cap-eden-roc 7 -flits 7 -hemmerstoffer 7 -betunia 7 -navneet 7 -manrara 7 -sagues 7 -tamasi 7 -kiyosaki 7 -berden 7 -relevantly 7 -md-90 7 -10-years-ago 7 -6,561 7 -kairouan 7 -fiell 7 -almaraoui 7 -sulphites 7 -yemeni-born 7 -ashton-pyatt 7 -cerea 7 -yannoni 7 -boddingtons 7 -wernher 7 -vinspired 7 -ferren 7 -catena 7 -teflon-covered 7 -sandalo 7 -19in 7 -coalinga 7 -humbugs 7 -eyetoy 7 -u.s.-egypt 7 -sidford 7 -air-safety 7 -872-acre 7 -winterized 7 -omkari 7 -mis-timed 7 -mood-boosting 7 -citreon 7 -pinatubo 7 -ladislav 7 -shopian 7 -generali 7 -sichel 7 -75.1 7 -stericycle 7 -2001-2010 7 -ratboy 7 -mouzakitis 7 -northfields 7 -youth-rated 7 -fall-guy 7 -220g 7 -.00004 7 -sep. 7 -icb 7 -grishchenko 7 -moraly 7 -341.8 7 -amole 7 -poverty-fighting 7 -arikawa 7 -pot-laced 7 -mx-1 7 -dickinson-lilley 7 -segesta 7 -crims 7 -pascaline 7 -cordi 7 -kurchatova 7 -3,440 7 -runge 7 -fairyfly 7 -42,475 7 -carsley 7 -superconductivity 7 -anti-psychotics 7 -cavalin 7 -impolitic 7 -now-ubiquitous 7 -112million 7 -besent 7 -unitard 7 -sagacious 7 -jannette 7 -reyes-manzo 7 -orinda 7 -mahale 7 -knibbs 7 -droops 7 -duodenal 7 -helmet-cam 7 -mccartt 7 -adailton 7 -urbandale 7 -casterbridge 7 -gamusino 7 -fech 7 -lacewing 7 -gadoci 7 -lemonaid 7 -ultravisual 7 -akenhead 7 -beasants 7 -ebrima 7 -mandarinia 7 -s0 7 -2002-2008 7 -stars-and-stripes 7 -radnedge 7 -wathke 7 -senghani 7 -weinerizer 7 -ghaleb 7 -fedewa 7 -postcard-worthy 7 -newsflare 7 -brother-in-arms 7 -haffa 7 -faraone 7 -women-owned 7 -majorelle 7 -claudet 7 -midstream 7 -durai 7 -murray-ryan 7 -surakarta 7 -slopping 7 -malzahn 7 -great-great-great-great-grandmother 7 -bocephus 7 -telarah 7 -gevorg 7 -ayash 7 -myprotein 7 -meany 7 -5,135 7 -gutta 7 -dmc-12 7 -russian-leaning 7 -dance/electronica 7 -messageboards 7 -issaquena 7 -white-coated 7 -kissogram 7 -drusilla 7 -record-breakers 7 -melodramatically 7 -verlinden 7 -maral 7 -tampabay.com 7 -warwickshire-based 7 -1579 7 -profitless 7 -ruba 7 -mbombela 7 -banksys 7 -nowocinska 7 -struan 7 -merisca 7 -63.00 7 -kirkheaton 7 -counter-espionage 7 -bisous 7 -bugtown 7 -willersley 7 -lejre 7 -metalurh 7 -26.00 7 -14.966 7 -abettor 7 -universiti 7 -monocrotophos 7 -mongolarachne 7 -coderre 7 -larger-than-expected 7 -farney 7 -gattsek 7 -sterecycle 7 -great-great-grandchild 7 -baradei 7 -ugarit 7 -herfordshire 7 -identical-looking 7 -mychai 7 -slashtags 7 -andexer 7 -347million 7 -sagiv 7 -tallahatchie 7 -garfagnana 7 -shichida 7 -snuggery 7 -monawwer 7 -mushi 7 -1,352 7 -parking-lot 7 -gaytms 7 -merenda 7 -aengus 7 -papple 7 -post-citizens 7 -woollacott 7 -pregunta 7 -vorovoro 7 -jablonska 7 -rehkow 7 -lethal-injection 7 -horkerz 7 -subtropics 7 -ostéopathique 7 -gehring 7 -atmore 7 -woosley 7 -co-heads 7 -lizhi 7 -toom 7 -778,000 7 -magners 7 -virta 7 -toukan 7 -1677 7 -valiente 7 -chantelles 7 -stracathro 7 -denhay 7 -ahuezoteco 7 -900-foot 7 -shetaxi 7 -tips@dailymail.co.uk 7 -plods 7 -mashrou 7 -campeonato 7 -tnk-bp 7 -mylink 7 -prohaska 7 -visa-related 7 -snag-free 7 -central-western 7 -supremos 7 -pids 7 -sachiko 7 -changwon 7 -amphipod 7 -aloul 7 -oleniak 7 -grail-a 7 -quindell 7 -competitiors 7 -tillous-borde 7 -bairro 7 -1992/93 7 -impactors 7 -libdemvoice 7 -leckwith 7 -makgoba 7 -maccorkle 7 -microtel 7 -aggressive-looking 7 -a-word 7 -745th 7 -leganes 7 -frozen-over 7 -magnifico 7 -subversively 7 -iswimband 7 -marcleudo 7 -tidjane 7 -3,000-word 7 -pesek 7 -overachieve 7 -surjit 7 -tsopas 7 -hln-tv 7 -kyncl 7 -viburnum 7 -cd8 7 -muchamore 7 -creegor 7 -baotou 7 -catters 7 -52.9 7 -71.70 7 -coppersmith 7 -pre-test 7 -lagercrantz 7 -supplication 7 -1,820 7 -down-turned 7 -agathocleous 7 -0030 7 -oxpecker 7 -5,435 7 -menderes 7 -nienhuis 7 -ghadir 7 -ethno-religious 7 -herklotz 7 -altaie 7 -photochroms 7 -osadiya 7 -dalreoch 7 -guenons 7 -lorinc 7 -230c 7 -230g 7 -arrangers 7 -tighty 7 -female-oriented 7 -cung 7 -erythema 7 -finningley 7 -saborio 7 -demacio 7 -jeanson 7 -azazi 7 -marimekko 7 -akindayini 7 -hot-topic 7 -aeroseven 7 -gaffers 7 -olakpe 7 -prohibition-style 7 -44.97 7 -silom 7 -technically-gifted 7 -crime-hit 7 -right-front 7 -juth 7 -as332 7 -12.59 7 -12.58 7 -cytology 7 -heidi-ray 7 -demigods 7 -62-round 7 -qaseera 7 -bd594 7 -loutherbourg 7 -26km 7 -trash-strewn 7 -right-center 7 -plotnitsky 7 -narvik 7 -darayya 7 -two-passenger 7 -casasola 7 -chalfield 7 -stymieing 7 -souping 7 -4:46 7 -demaria 7 -superjet-100 7 -manici 7 -batboat 7 -abrantee 7 -jolies 7 -sablikova 7 -24/25 7 -sadou 7 -haliski 7 -cordes 7 -777-300 7 -self-medicated 7 -carisma 7 -6.80 7 -6.84 7 -mcalevey 7 -matloch 7 -reventon 7 -mallawi 7 -sigrist 7 -no-expense-spared 7 -al-haj 7 -trinca 7 -new-mom 7 -responce 7 -raimer 7 -willden 7 -thecla 7 -s.t. 7 -election-night 7 -najia 7 -eichar 7 -diffuses 7 -illinois-chicago 7 -misr 7 -hallford 7 -misidjan 7 -340km 7 -desaray 7 -azizah 7 -argana 7 -abkhazian 7 -82.3 7 -al-arbaeen 7 -17-count 7 -0-62 7 -orange-dyed 7 -rieper 7 -lowest-grossing 7 -bibbings 7 -atchity 7 -1470-80 7 -wgrz.com 7 -@easyjet 7 -peahen 7 -shell-shock 7 -northesk 7 -all-london 7 -cypresses 7 -sahana 7 -pratto 7 -newkey-burden 7 -snapchatted 7 -farino 7 -dominantly 7 -shacklett 7 -37-second 7 -greenburg 7 -trosper 7 -dodaro 7 -longer-serving 7 -jong-ok 7 -boral 7 -felixy 7 -bijoux 7 -eighteenth-century 7 -festival-inspired 7 -cartorhynchus 7 -chassery 7 -highly-experienced 7 -de-stigmatize 7 -fernee 7 -zavoranu 7 -60-kilometer 7 -poltorak 7 -donya 7 -mogees 7 -sidloyi 7 -pumba 7 -bio-printing 7 -hansom 7 -broadleaf 7 -cattoos 7 -pika 7 -mezhprombank 7 -50-storey 7 -leyfield 7 -yoffe 7 -70db 7 -dendrites 7 -u.s.-supplied 7 -1860-1960 7 -methylation 7 -zingatan 7 -waga-tv 7 -milora 7 -three-for-two 7 -aurally 7 -jung-jin 7 -ayob 7 -reinartz 7 -ploucha 7 -hypatia 7 -tsirbas 7 -hering 7 -groused 7 -price-hughes 7 -iroquois 7 -leeland-cunningham 7 -boy-next-door 7 -1.2-billion 7 -indonesian-born 7 -goju-ryu 7 -sessionm 7 -fribourg 7 -musikka 7 -nephrons 7 -hildur 7 -mts 7 -steelie 7 -taruni 7 -ignition-switch 7 -picnik 7 -lagunas 7 -dhanka 7 -fairgrieve 7 -a.m.-1 7 -ozone-depleting 7 -64,280 7 -kittell 7 -adam-smith 7 -super-sweet 7 -griffeath 7 -sportiest 7 -collenette 7 -division-baghdad 7 -nursemaid 7 -attacking-wise 7 -newly-renamed 7 -silvestro 7 -geekbench 7 -arifeen 7 -smenner 7 -tazo 7 -conflict-hit 7 -#watchhungerstop 7 -aggrey 7 -69mins 7 -ihemere 7 -stumptown 7 -phibbs 7 -459.5 7 -eloph 7 -ybh 7 -richt 7 -arsh 7 -ironsides 7 -inter-cities 7 -olymp-hicks 7 -vatican-watchers 7 -red-breasted 7 -sonvico 7 -sheru 7 -late-winter 7 -juri 7 -warner/chappell 7 -bluemel 7 -outrace 7 -falkenham 7 -cumbia 7 -ctia-the 7 -federman 7 -guarida 7 -kobkarn 7 -fireboat 7 -food-insecure 7 -raynham 7 -instal 7 -3,980 7 -georgakopolos 7 -derike 7 -sudafed 7 -thst 7 -centara 7 -boxmasters 7 -pulu 7 -romero-alvarez 7 -n.o.v.a. 7 -gfw 7 -gfi 7 -2,917 7 -lueneburg 7 -shembe 7 -diaz-ortiz 7 -cinday 7 -zaborniak 7 -roychoudhury 7 -low-balled 7 -balaya 7 -snopes.com 7 -socl 7 -ah-ha 7 -cerate 7 -transvaal 7 -brookses 7 -markowicz 7 -tudorvale 7 -cityeverton 7 -sensorineural 7 -hewas 7 -dipina 7 -geo-location 7 -hews 7 -associative 7 -maracanazo 7 -germano 7 -germana 7 -cyberintrusions 7 -christianawati 7 -6,300-page 7 -socha 7 -leagu 7 -corallo 7 -gordian 7 -noisettes 7 -cartegena 7 -nourmand 7 -anastrozole 7 -bratza 7 -then-athletic 7 -moviefone 7 -pâtisserie 7 -credeur 7 -artcaffe 7 -lane-fox 7 -so6 7 -daad 7 -frisé 7 -24-match 7 -honorarium 7 -apollos 7 -stary 7 -ynetnews.com 7 -balean 7 -bigrig 7 -autographing 7 -repacked 7 -suidobashi 7 -o'casey 7 -dts 7 -leelan 7 -pay-as-you-drive 7 -lynnettee 7 -three-and-a-half-year-old 7 -non-albino 7 -comfort-food 7 -clanfield 7 -non-urban 7 -anophthalmia 7 -mashad 7 -73-mile 7 -125,000-a-year 7 -obsequious 7 -mother-and-baby 7 -russian-style 7 -heroin-filled 7 -desanctis 7 -1720s 7 -kincsem 7 -epidurals 7 -tuffin 7 -tatarsky 7 -al-jabar 7 -al-jabal 7 -marchesi 7 -20minutes 7 -butare 7 -tshifhiwa 7 -savo 7 -superluminous 7 -shimmies 7 -narine 7 -stacye 7 -co-enzyme 7 -circulars 7 -dozhd 7 -drug-impaired 7 -uncork 7 -shoef 7 -allahdadi 7 -shivanath 7 -nose-up 7 -prawer 7 -mathema 7 -west-coast 7 -6.83 7 -truesteam 7 -56lb 7 -weise-mack 7 -bourdonnec 7 -832c 7 -three-province 7 -resounds 7 -elsecar 7 -grosdidier 7 -colombus 7 -lapinskas 7 -honey-colored 7 -versini-fernandez 7 -czink 7 -dipesh 7 -human-friendly 7 -sncb 7 -biokangtai 7 -dispell 7 -11,490 7 -mezzogiorno 7 -rylo 7 -gassin 7 -uptalkers 7 -baile 7 -blurrier 7 -backstabber 7 -photo-based 7 -hst 7 -whee 7 -766million 7 -scerbo 7 -hydrographer 7 -myung-chul 7 -bionickangaroo 7 -richings 7 -bellamacina 7 -cuddell 7 -carvantes 7 -himmelb 7 -2,414 7 -rakhima 7 -coronated 7 -onita-olojo 7 -oxygen-producing 7 -ethelbert 7 -steirereck 7 -60-ton 7 -dicochea 7 -9:09 7 -cardarelli 7 -zanger 7 -depopulation 7 -postert 7 -gissin 7 -woodend 7 -investable 7 -rizeena 7 -800-1 7 -greenhorn 7 -raghunandan 7 -co-edited 7 -decliners 7 -holledge 7 -sun-starved 7 -raudnitz 7 -jackelyn 7 -fettouh 7 -addon 7 -well-spent 7 -chuntering 7 -bater 7 -idzik 7 -cross-site 7 -disequilibrium 7 -asset-freezing 7 -fod 7 -immunocompromised 7 -compression-only 7 -3:58 7 -spuyten 7 -bernadotte 7 -4,754 7 -4,752 7 -degassing 7 -illusionary 7 -peppersmith 7 -afia 7 -enrolments 7 -eyedropper 7 -rounded-up 7 -migranes 7 -organista 7 -qemali 7 -nopi 7 -parachinar 7 -colombian-american 7 -chidi 7 -edmondsham 7 -guzzles 7 -1-in-10 7 -pentatonix 7 -dudus 7 -shamsia 7 -igniter 7 -melzak 7 -mindme 7 -bpex 7 -hardinge 7 -0735 7 -sickos 7 -akh 7 -ako 7 -cardioplegic 7 -greencastle 7 -haefeli 7 -self-deprecatingly 7 -broffman 7 -frothingham 7 -tail-less 7 -staplewood 7 -situps 7 -nrol-65 7 -spence-jones 7 -2019/20 7 -cheap-looking 7 -lhd 7 -doo-wops 7 -cockers 7 -well-choreographed 7 -marsing 7 -berh 7 -norrkoping 7 -brazosport 7 -rights-holders 7 -doubter 7 -mitanovski 7 -massam 7 -youssoufou 7 -mianyang 7 -dirty-blond 7 -flip-phone 7 -maiquetia 7 -fleury-merogis 7 -hundred-year-old 7 -brelfie 7 -whoring 7 -3:2 7 -weened 7 -positon 7 -lincs. 7 -dallying 7 -head-of-state 7 -martone 7 -burghardt 7 -lusby 7 -smuts 7 -chemerinsky 7 -gouache 7 -disinhibited 7 -1,379 7 -furniture-maker 7 -non-drug 7 -hodnett 7 -wiflux 7 -fatael 7 -aeroloft 7 -ndonye 7 -refighting 7 -igualada 7 -heaths 7 -burling 7 -k10 7 -weckler 7 -takeway 7 -ooi 7 -57,897 7 -youyou 7 -judaic 7 -5xl 7 -schnabel 7 -prems 7 -placentals 7 -niskanen 7 -barro 7 -bogner 7 -heinzpeter 7 -handfield 7 -faln 7 -skjaerstad 7 -pismo 7 -pro-vitamin 7 -caprica 7 -tolland 7 -gangi 7 -metsävainio 7 -warrillow 7 -@coleenroo 7 -futureproof 7 -baci 7 -meiliana 7 -half-billion-dollar 7 -witted 7 -hand-in-glove 7 -keun 7 -yansheng 7 -110-page 7 -gnanduillet 7 -ex-scout 7 -hrycyk 7 -16.45 7 -lily-mai 7 -d'satmar 7 -amodu 7 -rambuss 7 -stenography 7 -bandu 7 -manuelian 7 -0.86 7 -themsleves 7 -delbene 7 -maxjet 7 -31cm 7 -nalia 7 -10th-seeded 7 -hausa-fulani 7 -bhuller 7 -celerity 7 -murrish 7 -2xu 7 -lop-eared 7 -104-97 7 -2x4 7 -serzhan 7 -emuobo 7 -batard 7 -full-ride 7 -machindranath 7 -kbb.com 7 -northamptonshire-based 7 -bambaataa 7 -uncrossing 7 -non-blacks 7 -xiaowei 7 -koree 7 -32-member 7 -yoshiyuki 7 -deathwatch 7 -1,848 7 -dollers 7 -anyah 7 -yianni 7 -torruella 7 -sigurjónsson 7 -changyuraptor 7 -hinnigan 7 -hiut 7 -sadovnik 7 -unwatched 7 -jurbala 7 -5,000-6 7 -tingey 7 -boutique-style 7 -barad 7 -feet-long 7 -solicitor-advocate 7 -eco-guard 7 -dragicevich 7 -tree-filled 7 -benchmarked 7 -moulian 7 -parrella 7 -gennie 7 -sankore 7 -avenatti 7 -tooth-and-nail 7 -nogier 7 -1.25-mile 7 -ultra-soft 7 -nigra 7 -landato 7 -sukhera 7 -ojamaa 7 -dsg 7 -holbeck 7 -perming 7 -florczak 7 -klusmire 7 -karyssa 7 -narayanaswami 7 -joyfulness 7 -highchairs 7 -basotho 7 -bisceglie 7 -#mcfc 7 -m45 7 -jumpstarting 7 -gleans 7 -bcap 7 -neumayr 7 -laboratory-based 7 -palazuelo 7 -cd34 7 -lepus 7 -dinitrophenol 7 -gosney 7 -wallecan 7 -wilhelmshaven 7 -tenon 7 -gabapentin 7 -hagwood 7 -bocian 7 -kenalog 7 -akhund 7 -chev 7 -ches 7 -thurnby 7 -energy-generating 7 -pizzo 7 -pelan 7 -diary-like 7 -utaka 7 -charcot-marie-tooth 7 -shabbir 7 -chihuahuan 7 -isovaleric 7 -zacharie 7 -waf 7 -waa 7 -19-week-old 7 -blade-like 7 -rendcomb 7 -archor 7 -2099 7 -facetimed 7 -asbros 7 -peau 7 -half-expected 7 -garavito 7 -crookston 7 -luinahi 7 -85-foot 7 -wahlquist 7 -justinianic 7 -brathay 7 -gun-battle 7 -eynsford 7 -murietta 7 -1,087 7 -four-deck 7 -156mph 7 -44con 7 -lindsay-hogg 7 -tregg 7 -resile 7 -junkfood 7 -self-obsession 7 -phylogeny 7 -checking-in 7 -glasshead 7 -baris 7 -bojji 7 -klingbeil 7 -varietal 7 -hecate 7 -alladyce 7 -jinfeng 7 -sweetgrass 7 -micro-sim 7 -larizza 7 -pre-slaughter 7 -rozalski 7 -xfm 7 -mbare 7 -maselli 7 -koto 7 -mebyon 7 -swansea-born 7 -cha-cha-cha 7 -pandza 7 -shiela 7 -iñaki 7 -neka 7 -dowers 7 -female-on-male 7 -sugaring 7 -catbird 7 -darda 7 -bonsoy 7 -oumarou 7 -ribéry 7 -outgrows 7 -swiss-educated 7 -hardy-pickering 7 -location-sharing 7 -triathalons 7 -145km 7 -joga 7 -passionless 7 -19-15 7 -19-14 7 -bergs 7 -waterpipes 7 -myracle 7 -tabora 7 -babybloom 7 -stockbroking 7 -charbucks 7 -pre-signing 7 -hijab-wearing 7 -nonnegotiable 7 -bottino 7 -ungenerous 7 -camarasa 7 -serthar 7 -altynbekova 7 -terez 7 -livlife 7 -holodecks 7 -pikus 7 -pykett 7 -birtherism 7 -remley 7 -zeven 7 -benmerzouga 7 -fahma 7 -porlock 7 -maningrida 7 -ghera 7 -pre-second 7 -herslip 7 -montreuil 7 -lauries 7 -soldier-on-soldier 7 -111mph 7 -despoil 7 -dist 7 -0750 7 -37.66 7 -mccormac 7 -lily-white 7 -bouffants 7 -d'hoore 7 -#stolenlives 7 -kahlua 7 -kheaa 7 -ahoua 7 -79-years-old 7 -dormans 7 -4x200 7 -kurstin 7 -wtcp 7 -405.2 7 -w.b. 7 -al-naamani 7 -pearlies 7 -sackett 7 -stereoscope 7 -solloo 7 -fundawear 7 -wakrah 7 -5.34 7 -wavy-tv 7 -incb 7 -lasseur 7 -dilettantes 7 -225ft 7 -heida 7 -laamistad 7 -vohs 7 -tuxtla 7 -c-shape 7 -kolsun 7 -ryegrass 7 -lugazi 7 -150,000-a-month 7 -trong 7 -sihan 7 -plebes 7 -400ppm 7 -elhaik 7 -matsumara-san 7 -drunk-driver 7 -mid-victorian 7 -blak 7 -cuidad 7 -idevice 7 -kuca 7 -velkov 7 -bahaji 7 -kasserine 7 -odelugo 7 -embarass 7 -ftm 7 -orobets 7 -monday-saturday 7 -9,842 7 -calderone 7 -barela 7 -#pride 7 -ronney 7 -nosiviwe 7 -still-struggling 7 -hellrood 7 -calstock 7 -negation 7 -410km 7 -cubela 7 -142g 7 -shamma 7 -nià 7 -illetes 7 -lionizing 7 -kerswill 7 -highly-effective 7 -f-18s 7 -clein 7 -liras 7 -flu-stricken 7 -ivanman 7 -panania 7 -finsen 7 -issaic 7 -barbaris 7 -blasà 7 -oliker 7 -harvey-lee 7 -jellyfish-like 7 -hensrud 7 -deep-cleaned 7 -caird 7 -omentum 7 -rawcliffe 7 -medjool 7 -unnasch 7 -seethes 7 -bluewaters 7 -splitscreen 7 -gramophones 7 -9800 7 -vershbow 7 -rumgay 7 -coxsackie 7 -udderly 7 -sagarmatha 7 -ticonderoga 7 -yousfi 7 -hobbie 7 -jin-hyeon 7 -2.5-liter 7 -moneyline 7 -white-brick 7 -succor 7 -3.5-metre 7 -diakité 7 -dog-tired 7 -akaryn 7 -precint 7 -wrynose 7 -tancrede 7 -autolib 7 -mealie 7 -metrowest 7 -carparrazzi 7 -embodiments 7 -cell-like 7 -hartington 7 -hang-outs 7 -schuerholz 7 -fabini 7 -adbusters 7 -rolodexes 7 -methylamphetamines 7 -ceregatti 7 -al-hasan 7 -dasan 7 -da'quan 7 -foldout 7 -bartashevitch 7 -lorelai 7 -akka 7 -keelia 7 -lurgashall 7 -wyburne-ridsdale 7 -tholstrup 7 -metre-tall 7 -ahlborn 7 -maziere 7 -falchuk 7 -klemen 7 -musina 7 -itray 7 -lathom 7 -nyomi 7 -sunsport 7 -shail 7 -shaid 7 -ianzano 7 -romete 7 -caulton 7 -0.88 7 -kingshott 7 -hollow-tipped 7 -haem 7 -kinumi 7 -sodeto 7 -bluecoat 7 -tendercare 7 -sda 7 -sdc 7 -calipso 7 -frietas 7 -cardamon 7 -kenedy 7 -ex-linebacker 7 -blissfield 7 -stanojevic 7 -northwestward 7 -ridolfi 7 -rowas 7 -olalla 7 -long-repressed 7 -basenji 7 -pynchon 7 -water-proof 7 -adashi 7 -osmotic 7 -boot-cut 7 -750km 7 -andika 7 -phone-sex 7 -rummy 7 -260billion 7 -ostracizing 7 -yudof 7 -in-transit 7 -catalin 7 -3,408 7 -fonteles 7 -mauregne 7 -double-yellow 7 -hook-like 7 -self-educated 7 -steiner-adair 7 -adiala 7 -scorpius 7 -lunesdale 7 -yanny 7 -transgressive 7 -action-hero 7 -awindra 7 -hendriks 7 -#prayformorgan 7 -indidis 7 -affinion 7 -richardson-blake 7 -manalo 7 -bike-sharing 7 -hinze 7 -aydeniz 7 -nbi 7 -ayanoglu 7 -bryostatin 7 -reinga 7 -amptp 7 -itinerants 7 -trouble-shooter 7 -egypt-based 7 -lazaroff 7 -pomranky 7 -ciencias 7 -wadi'a 7 -mary-jane 7 -cockatiels 7 -kfmb-tv 7 -d'elegance 7 -director/writer 7 -bushrod 7 -lightbourne 7 -gelsomino 7 -costil 7 -röntgen 7 -self-defensive 7 -holleben 7 -50cc 7 -shandling 6 -al-shari 6 -al-sharq 6 -azurite 6 -disrosopus 6 -balade 6 -fiz 6 -estrow 6 -empada 6 -steenbeeke 6 -kukuchka 6 -one-cup 6 -visitng 6 -near-tragedy 6 -63st 6 -reabsorption 6 -chaib 6 -bellemare 6 -kobashigawa 6 -halladay 6 -superhorse 6 -plounevez 6 -ouerbacker 6 -42,250 6 -unpractical 6 -@gbarlowofficial 6 -prousalis 6 -psychodynamic 6 -krampf 6 -absense 6 -hayden-gordon 6 -stambaugh 6 -sarit 6 -groundskeeping 6 -dogparents 6 -nore 6 -denamrk 6 -55db 6 -paultre 6 -uprise 6 -tiajuana 6 -plin2 6 -chrystie 6 -technogym 6 -83ft 6 -simon-miller 6 -elling 6 -huayin 6 -haskin 6 -sleekest 6 -oxidize 6 -eslami 6 -geeti 6 -knh 6 -17.90 6 -graben 6 -out-spoken 6 -non-somalis 6 -miekka 6 -mosaic-tiled 6 -yoshikawa 6 -ladan 6 -deptula 6 -rayban 6 -g-eyes 6 -aes 6 -gorodetsky 6 -aei 6 -macconnel 6 -one-hand 6 -icub 6 -coriams 6 -wear-ability 6 -mazumdar 6 -lifeform 6 -seraphim 6 -@queen_uk 6 -22mins 6 -mobile-enabled 6 -me-time 6 -chanakarn 6 -mortally-wounded 6 -shirehampton 6 -12-part 6 -snuffin 6 -melowese 6 -alfi 6 -d-notice 6 -2:36 6 -reinterviewed 6 -leflore 6 -f-6049 6 -kundor 6 -sandos 6 -unforthcoming 6 -josafat 6 -kepler-20e 6 -birchard 6 -40-foot-deep 6 -inadvertant 6 -lgbt-friendly 6 -montañez 6 -fellow-american 6 -scherlach 6 -biometrically 6 -lysterfield 6 -beneifts 6 -muntafiq 6 -kulcsar 6 -al-hamwi 6 -hydrus 6 -lazzaras 6 -sumanahalli 6 -wennekes 6 -kinch 6 -boudchar 6 -happold 6 -menzies-gow 6 -post-1992 6 -ong-bak 6 -great-grandsons 6 -aggravations 6 -6600 6 -ringbearer 6 -tortoise-shell 6 -similan 6 -televisual 6 -balkiz 6 -balkin 6 -onyeahialam 6 -mirkin 6 -golmakani 6 -frelick 6 -elliotts 6 -alhija 6 -kawuri 6 -watterberg 6 -mattisyn 6 -1,314 6 -1,315 6 -1,317 6 -pro-surfing 6 -116890 6 -bergere 6 -vitrolles 6 -feber 6 -35,406 6 -micro-surgery 6 -russian-speakers 6 -hikmet 6 -acheived 6 -counter-strike 6 -boursin 6 -snowsuits 6 -vudu 6 -popular/electoral 6 -city-run 6 -sotak 6 -fire-eater 6 -all-fruit 6 -korean-chinese 6 -2004-2014 6 -paperboys 6 -lower-half 6 -terri-lynne 6 -apoa5 6 -monan 6 -makkelie 6 -monas 6 -babangida 6 -poppy-free 6 -banterbury 6 -roslindale 6 -136cm 6 -cashon 6 -kencia 6 -wild-child 6 -truisms 6 -afra 6 -flatterer 6 -lehndorff 6 -dla2222-0946 6 -gun-and-bomb 6 -crassus 6 -al-ouja 6 -rasky 6 -competizione 6 -dallimore 6 -shanty-town 6 -wimslow 6 -merridale 6 -hirschler 6 -music-making 6 -creekmur 6 -kurdy 6 -blind-spot 6 -cornflower-blue 6 -gambaro 6 -sacca 6 -tetrault 6 -swiffer 6 -crossed-out 6 -kranish 6 -lowlight 6 -amin-smith 6 -seatrepid 6 -kalua 6 -alkiviades 6 -superfalcon 6 -potocki 6 -berthelet 6 -goldspring 6 -handmaid 6 -purtell 6 -flat-screens 6 -autocraft 6 -al-gadhafi 6 -bigbee 6 -smith-crowe 6 -absolutists 6 -lightyears 6 -boothby 6 -wayfarers 6 -millis 6 -abisko 6 -arimed 6 -quickies 6 -behaviourial 6 -plain-looking 6 -yazji 6 -nutbag 6 -bottlenosed 6 -eyelet 6 -callsign 6 -five-meter 6 -.06 6 -galavanting 6 -bijeljina 6 -niebuhr 6 -birthstone 6 -spoilage 6 -hawaiinewsnow 6 -milanich 6 -locked-down 6 -biocompatible 6 -breanne 6 -cult-style 6 -paleoindian 6 -vespas 6 -mudrov 6 -senthooran 6 -megalolz 6 -priest-hunters 6 -jacobe 6 -delaurentis 6 -teleco 6 -sub-headline 6 -smudgeguard 6 -workrooms 6 -hiromichi 6 -volumised 6 -74.9 6 -prawit 6 -stantonbury 6 -n.a. 6 -unimportance 6 -seaway 6 -black-headed 6 -quaynor 6 -long-tail 6 -aspatria 6 -150c 6 -sutley 6 -trapence 6 -master-class 6 -abdel-azeem 6 -some-one 6 -g4tv 6 -flauntr 6 -studio-based 6 -deffo 6 -muqtedar 6 -44.52 6 -nakorn 6 -ghoneim 6 -toddlerhood 6 -macchu 6 -1,592 6 -calligraphic 6 -tavarua 6 -mirsky 6 -government-contracted 6 -stiffy 6 -19-goal 6 -vesterbro 6 -23/20 6 -golshifteh 6 -homogenization 6 -grovers 6 -kobiashvili 6 -rabbitte 6 -z-dollars 6 -betzaida 6 -double-hundred 6 -hxmm01 6 -well-practiced 6 -guinea-pig 6 -firstenergy 6 -non-irritating 6 -scca 6 -mckinty 6 -quake-stricken 6 -taxol 6 -besar 6 -prosectors 6 -lankester 6 -bealby 6 -scavo 6 -dhanteras 6 -impugning 6 -high-jumper 6 -pea-green 6 -110-acre 6 -oodnadatta 6 -alfafa 6 -hoshiyar 6 -zurer 6 -sportradar 6 -1.5-million 6 -grimsel 6 -,37 6 -dokoupil 6 -shimbashi 6 -ingoe 6 -ossetra 6 -50-point 6 -six-letter 6 -wallpapering 6 -light-footed 6 -tylicki 6 -kourtessiss 6 -snøhetta 6 -reboard 6 -kenebrew 6 -1362 6 -family-maintained 6 -pistelak 6 -sensitizing 6 -battlefronts 6 -nayim 6 -melborne 6 -kinte 6 -abugida 6 -odai 6 -2,196 6 -shuozhou 6 -colturi 6 -52nd-minute 6 -blondish 6 -agilodocodon 6 -crinions 6 -voeller 6 -pazin 6 -1,050,000 6 -9.18 6 -9.12 6 -exotic-looking 6 -levitates 6 -8.76 6 -cookisto 6 -microlending 6 -kalinina 6 -mioko 6 -five-iron 6 -estamos 6 -devanadera 6 -category-a 6 -senlac 6 -cannito 6 -grillings 6 -onformative 6 -race-relations 6 -hamirpur 6 -hucul 6 -tartlet 6 -campbell-moore 6 -davis-correia 6 -caykur 6 -5,012 6 -25.80 6 -fook 6 -saltsburg 6 -multi-brand 6 -quervain 6 -bushcraft 6 -film-inspired 6 -mso-ascii-theme-font 6 -powerpac 6 -barreno 6 -rolly 6 -kazunori 6 -nuna 6 -hoerling 6 -afghan-australian 6 -boruch 6 -pantelligent 6 -bbpa 6 -dmk 6 -3,760 6 -fomina 6 -kipnis 6 -no-spin 6 -lsts 6 -cottars 6 -fatle 6 -jesu 6 -alteplase 6 -foued 6 -bull-fighting 6 -nestora 6 -jeddou 6 -unmaintained 6 -worker-owned 6 -gerwyn 6 -citronelle 6 -obayashi 6 -updates/upgrades 6 -beaney 6 -21mph 6 -vainglory 6 -non-mission 6 -rudds 6 -decimals 6 -150-a-night 6 -andalex 6 -full-fare 6 -shavit 6 -sama 6 -gitesh 6 -bestbuy.com 6 -adultfriendfinder 6 -rawstrone 6 -nickles 6 -tarkhan 6 -dormandy 6 -morayef 6 -womanâ 6 -lineswoman 6 -police-escorted 6 -kailen 6 -oesophago-gastric 6 -strimmers 6 -vapourise 6 -short-cropped 6 -pawlicki 6 -one-take 6 -kinggett 6 -aniruddha 6 -rezayee 6 -mpigi 6 -1,233 6 -essex/hertfordshire 6 -boudia 6 -miles/s 6 -ivano 6 -cwp 6 -langendorff 6 -re-registration 6 -dainus 6 -255-pound 6 -fetishisation 6 -elizabethans 6 -camidryl 6 -chapron 6 -half-blue 6 -hammurabi 6 -22.93 6 -22.95 6 -weeee 6 -l'archevêché 6 -59,500 6 -durlston 6 -pikse 6 -68-page 6 -mpossible 6 -caecilian 6 -well-insulated 6 -great-grandkids 6 -isbn 6 -japanese-trained 6 -mushens 6 -waggin 6 -tcnl 6 -v-reg 6 -bitzer 6 -droniak 6 -tochka 6 -smathers 6 -3:31 6 -pordenone 6 -jew-hatred 6 -multhaup 6 -ogas 6 -ogan 6 -433.70 6 -landreth 6 -thanksgivukkah 6 -x-mas 6 -50-44 6 -tookie 6 -ofri 6 -movietickets.com 6 -subdues 6 -explainers 6 -clear-air 6 -paunchy 6 -dysfunctionality 6 -eblaster 6 -kiwarkis 6 -sollit 6 -materialist 6 -170-pound 6 -apothecanna 6 -berest 6 -l'estaque 6 -sekope 6 -kidasha 6 -corieltauvi 6 -fraternité 6 -rigo 6 -al-islah 6 -qm 6 -landra 6 -title-winners 6 -hulkenburg 6 -jackmans 6 -muradjan 6 -ground-shaking 6 -newton-conover 6 -tinay 6 -boumzar 6 -wishah 6 -moonrakers 6 -proca 6 -lesbo 6 -hypno 6 -broglie 6 -buiding 6 -amgad 6 -strompolos 6 -eight-tonne 6 -aquamarines 6 -shelbie 6 -evandro 6 -faircompanies.com 6 -saturnalia 6 -stadius-horn 6 -uksa 6 -diffuso 6 -nondirective 6 -issei 6 -step-parents 6 -rasied 6 -zhuara 6 -superheavyweight 6 -season 6 -5,790 6 -sbragia 6 -copywriters 6 -myfoxdc.com 6 -six-year-long 6 -rabies-infected 6 -pontianak 6 -eurl 6 -test-drove 6 -5,400,000 6 -fuel-air 6 -372million 6 -feministing.com 6 -a-train 6 -anekke 6 -cuche 6 -vibha 6 -saundry 6 -1403 6 -140c 6 -oreste 6 -cherubim 6 -fossilise 6 -boyfriend-lawyer 6 -frend 6 -mcvicar 6 -sub-division 6 -noia 6 -tannoys 6 -artai 6 -ekane 6 -state-inspired 6 -ostling 6 -2547-id8 6 -vascularized 6 -olimpic 6 -matos-davis 6 -cartodb 6 -millecamps 6 -cristofer 6 -amerigo 6 -davinderjit 6 -junipero 6 -hawkmoth 6 -151,000-ton 6 -pechyonkin 6 -szwadjer 6 -katabi 6 -#putyourbatsout 6 -dalarna 6 -bemrose 6 -digression 6 -well-filled 6 -moriera 6 -cranach 6 -kardono 6 -restructurings 6 -mogawane 6 -wiseguys 6 -huihui 6 -anti-rocket 6 -get-well-soon 6 -lavrentyev 6 -39-26 6 -worthies 6 -tarana 6 -onewave 6 -wola 6 -ibrutinib 6 -horn-shaped 6 -hugin 6 -shalva 6 -changewave 6 -dionysos 6 -328m 6 -outré 6 -d'amboise 6 -eco-village 6 -49.41 6 -docteur 6 -31.75 6 -candy-coated 6 -anteroom 6 -family-to-be 6 -b'nai 6 -anti-indian 6 -bernabéu 6 -anley 6 -soldered 6 -faciitis 6 -mountnorris 6 -tumulus 6 -sex-starved 6 -uttley 6 -tabber 6 -sandokan 6 -sachenbacher-stehle 6 -multi-way 6 -34-inch 6 -predefined 6 -ostracising 6 -gyantse 6 -lowne 6 -sorbets 6 -fits.me 6 -akie 6 -ex-factor 6 -sharepoint 6 -struck-off 6 -loo-cille 6 -bouchat 6 -thaer 6 -manole 6 -tvshack 6 -soulfulness 6 -hundred-thousand 6 -tolima 6 -menacing-looking 6 -orley 6 -krivan 6 -eco-awareness 6 -sixth-year 6 -magong 6 -beltoise 6 -westerlies 6 -wanjia 6 -maiken 6 -vectored 6 -@number10gov 6 -phase-eight 6 -hs250h 6 -reuinted 6 -personell 6 -probative 6 -czugaj 6 -kongbai 6 -scioto 6 -nathanael 6 -dancy-power 6 -near-darkness 6 -kvue.com 6 -todayâ 6 -silversides 6 -benchers 6 -yaney 6 -counter-radicalisation 6 -560ft 6 -prodromakis 6 -beauregarde 6 -kalachev 6 -bonora 6 -labat 6 -13,520 6 -strongheart 6 -nostell 6 -a69 6 -packers-seahawks 6 -ophelie 6 -romaric 6 -nart 6 -aoraki 6 -meterological 6 -disha 6 -neenah 6 -mandrell 6 -sakurako 6 -darbenzio 6 -yudin 6 -d-conn. 6 -molecomb 6 -limites 6 -potato-like 6 -holmesburg 6 -1,508 6 -glancey 6 -de-activated 6 -paunescu 6 -braca2 6 -arkleston 6 -unconscionably 6 -telescreens 6 -gulshan 6 -20kgs 6 -sldn 6 -keld 6 -huni 6 -2,456 6 -eighth-century 6 -20-some 6 -demolli 6 -dzerzhinsk 6 -manand 6 -chapaevsk 6 -45millon 6 -generative 6 -paredon 6 -copp 6 -hawkeyes 6 -nlf 6 -sivola 6 -gunkel 6 -kenting 6 -couplet 6 -shebitku 6 -haresh 6 -al-kene 6 -grannis 6 -merrymaking 6 -kambem 6 -xenomorphs 6 -unstressed 6 -yichang 6 -barbershopera 6 -verbalizing 6 -bagiada 6 -18-night 6 -ngako 6 -mymusic 6 -youseff 6 -24.82 6 -nayeri 6 -borocz 6 -34,250 6 -republican-appointed 6 -chenghua 6 -non-music 6 -talibans 6 -doubleheader 6 -llyr 6 -90,718 6 -kundert 6 -barrantes 6 -brackley-based 6 -nihonryori 6 -half-minute 6 -blepharitis 6 -yueqing 6 -dessler 6 -tu-204 6 -mada 6 -second-stage 6 -most-likely 6 -539,000 6 -tenners 6 -fertel 6 -56-minute 6 -gavanis 6 -goujon 6 -music-buying 6 -rannou 6 -sheet-metal 6 -e-retailers 6 -false-positive 6 -buttershaw 6 -anacostia-bolling 6 -clappers 6 -yussef 6 -gigayachts 6 -xdr-tb 6 -oliphant-hope 6 -kpaingba 6 -semi-sheer 6 -mid-to 6 -brudov 6 -shrapnel-packed 6 -fazah 6 -leath 6 -pig-like 6 -killled 6 -chihi 6 -al-nabi 6 -countenanced 6 -swathing 6 -non-transparent 6 -pekár 6 -krcr 6 -mariage 6 -counter-fraud 6 -concessionaire 6 -daramola 6 -coronor 6 -test-optional 6 -luhan 6 -retransmission 6 -1,292 6 -checksfield 6 -dash-8 6 -woolliss 6 -fitschen 6 -marketeer 6 -helicam 6 -pre-requisites 6 -njoroge 6 -druker 6 -playdom 6 -bernucci 6 -yansel 6 -lamonsoff 6 -blackham 6 -14,805 6 -narco-subs 6 -gandhara 6 -pbs.org 6 -tarase 6 -strictly-controlled 6 -a329 6 -nom-de-guerre 6 -re-nationalise 6 -sheriff-coroner 6 -mandleson 6 -hotard 6 -rodgriguez 6 -ual 6 -52-mile 6 -38kkk 6 -bjorkstam 6 -cattron 6 -annenbergs 6 -yomitan 6 -binali 6 -prime-boost 6 -1,107 6 -wallmeyer 6 -4,130 6 -tsunami-crippled 6 -edifício 6 -nodal 6 -namaqua 6 -wdef 6 -masques 6 -father-of-the-bride 6 -jobing.com 6 -tech-themed 6 -stonefaced 6 -barron-edgley 6 -up-down 6 -jts 6 -choccy 6 -epoch-making 6 -usana 6 -moggach 6 -re-fill 6 -lemans 6 -4-dinitrophenol 6 -sudano 6 -bronaugh 6 -wildmon 6 -vijayann 6 -chancres 6 -preplanning 6 -baby-friendly 6 -1,339 6 -lelung 6 -fondaps 6 -24mbps 6 -150,000-square-foot 6 -durably 6 -pyo 6 -kavouni 6 -disberger 6 -kaesmacher 6 -mezzoiuso 6 -ruegen 6 -beshers 6 -times2 6 -21.00 6 -parvaz 6 -tackiness 6 -8seconds 6 -velika 6 -cost-control 6 -adware 6 -armyansk 6 -taxmen 6 -surmountable 6 -jumilah 6 -puffery 6 -clitoridectomy 6 -shahidul 6 -fermín 6 -fifth-most 6 -pop-pop 6 -mtongana 6 -#nypd 6 -happy-clappy 6 -karolev 6 -defrancis 6 -fariña 6 -selys 6 -rodenstock 6 -denmead 6 -12m-rated 6 -booralie 6 -ryuji 6 -enemy-occupied 6 -anti-biotics 6 -kahramanmaras 6 -captain-in-waiting 6 -anti-chelsea 6 -forseth 6 -jobsmatch 6 -togo-flagged 6 -two-million-year-old 6 -ginnaga 6 -keye 6 -moldering 6 -ganzhou 6 -edzna 6 -halit 6 -antidate 6 -empedocle 6 -4.96 6 -gun-maker 6 -rysbrack 6 -dawdon 6 -scaparrotti 6 -weeders 6 -bell-ringer 6 -absecon 6 -hemlocks 6 -149th 6 -francois-marie 6 -self-organise 6 -ever-stylish 6 -bifurcated 6 -stock-car 6 -hesperonychus 6 -under-75s 6 -mwr 6 -mwa 6 -zytaze 6 -poinciana 6 -mohebi 6 -brako 6 -uzaroshvili 6 -behrouz 6 -intimidator 6 -ieft 6 -bodysurfer 6 -kelekian 6 -griga 6 -internalisation 6 -phurba 6 -mid-14th 6 -marcelina 6 -nationalizes 6 -dzaria 6 -nonpunitive 6 -temujin 6 -munchie 6 -sunoto 6 -440-foot 6 -gusta 6 -polykretis 6 -mcgreen 6 -al-shaab 6 -aerating 6 -kolmar 6 -x-wings 6 -99,913 6 -saison 6 -stickered 6 -two-pilot 6 -paddle-like 6 -teacher-pupil 6 -derrice 6 -tejero 6 -newsgroups 6 -phone-calls 6 -mouelhi 6 -contextualizing 6 -horswill 6 -herbarium 6 -bio-weapon 6 -ciroc 6 -gym-honed 6 -mud-soaked 6 -lawsky 6 -computer-related 6 -goldenballs 6 -boobed 6 -al-nasser 6 -1528 6 -striking-off 6 -1,154 6 -anti-labor 6 -trestman 6 -cratchit 6 -zavier 6 -augustyn 6 -womey 6 -urgel 6 -competiveness 6 -laywers 6 -theftie 6 -engvall 6 -smx-ocean 6 -silah 6 -4:47 6 -ariha 6 -1981-1989 6 -rutles 6 -ashly 6 -kathreen 6 -vavrinyukat 6 -jackpotjoy 6 -zenrobotics 6 -isaih 6 -beddau 6 -rlif 6 -klecandova 6 -slaveholders 6 -foregrounds 6 -shameem 6 -clairsville 6 -oohed 6 -ardour 6 -4,478 6 -grende 6 -incongruent 6 -segodnya 6 -tipoffs 6 -nufer 6 -18,870 6 -skin-whitening 6 -illma 6 -hershesons 6 -seattle-born 6 -bardhe 6 -cedena 6 -unfavorables 6 -phagura 6 -archerfield 6 -thurmont 6 -haviland 6 -tombstoner 6 -schnitts 6 -jaggers 6 -non-prisoners 6 -pre-shot 6 -youth-based 6 -school-day 6 -babyshambles 6 -tupolev-154 6 -pro-ahmadinejad 6 -graslie 6 -rousell 6 -car-jackings 6 -#shootthepolice 6 -feser 6 -siki 6 -pederast 6 -siemian 6 -abasteceme 6 -diffusely 6 -nerve-wrecking 6 -@joan_rivers 6 -chouette 6 -puscariu 6 -dominicanas 6 -ryans 6 -6.28 6 -h.l 6 -croot 6 -polihale 6 -anounced 6 -head-dresses 6 -lchf 6 -nechad 6 -non-islamists 6 -pageot 6 -vasilaros 6 -bellydancer 6 -49,893 6 -powa 6 -drunkeness 6 -freema 6 -500,0000 6 -leap-frogged 6 -bagger 6 -horsdean 6 -cordner 6 -arvier 6 -morou 6 -dumba 6 -mirabile 6 -j.h. 6 -44.24 6 -9.37 6 -doyon 6 -summerscales 6 -8.14 6 -8.13 6 -sleepier 6 -were-rabbit 6 -0.96 6 -non-australian 6 -eboo 6 -revitalisation 6 -amli 6 -barbourville 6 -local10.com 6 -self-medication 6 -thought-through 6 -hersden 6 -jeetan 6 -fauvel 6 -dowagiac 6 -cyclogenesis 6 -sundeen 6 -wallis-bennett 6 -atheroma 6 -unsterilized 6 -fusses 6 -izhak 6 -2,280 6 -2,285 6 -2,289 6 -abysses 6 -pemuteran 6 -brashears 6 -forestiere 6 -sexson 6 -isafjordur 6 -asian-based 6 -16-ft 6 -hunstville 6 -friends-of-friends 6 -branyan 6 -godfreys 6 -gadlin 6 -wingett 6 -farihi 6 -anti-marriage 6 -phythians 6 -calvino 6 -firestation 6 -hudler 6 -stress-test 6 -beta-catenin 6 -smurfit 6 -fitzwater 6 -juneberries 6 -c-45 6 -kronotsky 6 -68g 6 -r.e.a.d. 6 -mcdiving 6 -downland 6 -memphis-arkansas 6 -tyshawn 6 -okpo 6 -closely-knit 6 -translogic 6 -blinkah 6 -kosmos-1220 6 -8700 6 -kabatensis 6 -layland 6 -unprecendented 6 -baldwins 6 -borsodi 6 -kjolhede 6 -awrey 6 -waddon 6 -50,900 6 -isumi 6 -binyah 6 -quasi-official 6 -pre-debate 6 -sanmiguel 6 -non-graduates 6 -471,192 6 -suger 6 -clausewitz 6 -oliveria 6 -fosita 6 -robot-maker 6 -erechtheion 6 -futtock 6 -barasky 6 -1,210 6 -1,213 6 -al-khilafa 6 -makeba 6 -shiress 6 -steampunks 6 -2night 6 -whitington 6 -lushest 6 -portbou 6 -kael 6 -7,517 6 -peritoneum 6 -bathyscaphe 6 -52,650 6 -tsn 6 -bwi 6 -re-manufacturing 6 -carrender 6 -punch-out 6 -mukerji 6 -vietjetair 6 -incahuasi 6 -hans-dieter 6 -varallo-specken 6 -ba'ponga 6 -crudes 6 -cruder 6 -doree 6 -horridus 6 -marmor 6 -mahendraparvata 6 -annussek 6 -anmuth 6 -high-reward 6 -shafting 6 -ojen 6 -spodek 6 -flame-red 6 -curnook 6 -mashadur 6 -koroush 6 -eharmony.co.uk 6 -wdrb.com 6 -pamphleteer 6 -capitulations 6 -western-born 6 -pollocks 6 -pro-establishment 6 -oxelson 6 -monobrow 6 -time-based 6 -hyung-sung 6 -knopfel 6 -dirge 6 -akobo 6 -treehugger 6 -huangdi 6 -taizidang 6 -vanderwork 6 -lodgepole 6 -two-and-a-half-hours 6 -frizz-free 6 -cross-trained 6 -chatwin 6 -spear-throwers 6 -butyrate 6 -jayashi 6 -skateway 6 -hoermanseder 6 -blipfoto 6 -brooklyn-raised 6 -baleful 6 -américas 6 -goguen 6 -niosh 6 -pre-inaugural 6 -rieh 6 -in-sync 6 -stemguard 6 -105.9 6 -sweetbreads 6 -price-war 6 -18,365 6 -1069 6 -1065 6 -hairlines 6 -gradulenko 6 -fantasy-themed 6 -banyam 6 -said.he 6 -kolars 6 -smoke-exposed 6 -a-dd 6 -matings 6 -qe3 6 -garath 6 -catharina-amalia 6 -reicher 6 -chain-like 6 -king-emperor 6 -falahee 6 -gaydos 6 -coolalinga 6 -self-reflective 6 -professionalization 6 -open.richard 6 -hyperpartisanship 6 -collectivization 6 -yolan 6 -pontiacs 6 -kuga 6 -bubblicious 6 -first-of-a-kind 6 -hallo 6 -asahikawa 6 -molja 6 -jinshanling 6 -boeings 6 -unusal 6 -konjuh 6 -post-verdict 6 -merlis 6 -reason.tv 6 -se-yul 6 -rauisuchid 6 -ratnoff 6 -stanfa 6 -peronard 6 -jaruzelska 6 -85s 6 -green-jobs 6 -peltomaa 6 -kassamali 6 -puhl 6 -1463 6 -grade-level 6 -kalidas 6 -1,031 6 -1,037 6 -1,036 6 -gilleo 6 -pre-washed 6 -has-beens 6 -albayati 6 -shaine 6 -ps853 6 -yousuke 6 -basilicata 6 -19996 6 -noki 6 -frends 6 -lep 6 -lirey 6 -angelucci 6 -ashby-de-la-zouch 6 -warmisham 6 -turquoises 6 -cold-war 6 -estimable 6 -self-executing 6 -doelen 6 -chanthalavong 6 -23.00 6 -wgal 6 -ishbel 6 -most-respected 6 -morning-show 6 -cherdchai 6 -retrains 6 -ahwaz 6 -one-bathroom 6 -mateel 6 -short-to-medium 6 -leatham 6 -laines 6 -ericha 6 -pavol 6 -hoskison 6 -whip-like 6 -year-after-year 6 -epc 6 -bivvy 6 -namara 6 -mindstorms 6 -1,150,000 6 -koops 6 -machuret 6 -customer-facing 6 -pre-defined 6 -patellar 6 -vlasko 6 -castelo 6 -burchmore 6 -valere 6 -speen 6 -heuberger 6 -waqas 6 -sub-post 6 -korean-made 6 -3268 6 -dorada 6 -tweedle 6 -sapunaru 6 -uyanwah 6 -milefield 6 -sheika 6 -etage 6 -vinyls 6 -i-league 6 -hendler 6 -1999-2003 6 -heimler 6 -rear-impact 6 -shenzhen-based 6 -wingback 6 -stormforce 6 -allder 6 -cute-looking 6 -troitsky 6 -20-bedroom 6 -drpic 6 -bienvenidos 6 -insuperable 6 -well-disposed 6 -code-cracking 6 -12,360 6 -tight-rope 6 -w14 6 -w1k 6 -lamilla 6 -non-humans 6 -boydston 6 -metaspriggina 6 -oxidants 6 -asani 6 -gnip 6 -10,000-word 6 -bonassar 6 -taser-related 6 -amirahmadi 6 -2.375 6 -prestonpans 6 -blaspheme 6 -galal 6 -fellow-spaniard 6 -washburne 6 -cent2 6 -nazi-propaganda 6 -jalbert 6 -aubrianne 6 -chemung 6 -shanthakumaran 6 -kode 6 -long-accepted 6 -sawer 6 -jaime-leigh 6 -taverne 6 -lobster-red 6 -loper 6 -newstrom 6 -wryneck 6 -yanca 6 -b.a.s.e. 6 -northville 6 -numismatics 6 -moneygram 6 -jafry 6 -blacket 6 -friese 6 -138.9 6 -golla 6 -drawing-room 6 -hugine 6 -timebombs 6 -shahrekord 6 -malicious/wanton 6 -litening 6 -7,359 6 -padasas 6 -chewed-up 6 -weebubbie 6 -51bn 6 -boetcher 6 -ribberink 6 -kjeldergaard 6 -under-statement 6 -danyel 6 -all-defensive 6 -gufa 6 -shontelle 6 -change.gov 6 -detesting 6 -tasoff 6 -taybarns 6 -2,473 6 -mchinji 6 -dad-to-be 6 -cossairt 6 -etitle 6 -brené 6 -muddier 6 -limburger 6 -book-keeping 6 -delevaux 6 -bombrini 6 -oubina 6 -ngbangu 6 -ibbs 6 -scagell 6 -phangura 6 -tempus 6 -khaim 6 -n,a-depea 6 -vibrance 6 -15metres 6 -super-storms 6 -uhmbt 6 -spoorwegen 6 -wader 6 -torey 6 -kaycee 6 -stoneyholme 6 -stress-buster 6 -somoza 6 -postholes 6 -zachys 6 -76per 6 -methodism 6 -naraha 6 -seven-branched 6 -page-harvey 6 -jacobsson 6 -munteau 6 -whip-smart 6 -pundir 6 -instabraid 6 -reinsert 6 -6.3-inch 6 -service.the 6 -vilca 6 -12,100 6 -datejust 6 -larkana 6 -blakley 6 -pro-hunger 6 -prcic 6 -1292 6 -unhackable 6 -starbright 6 -chami 6 -kentigern 6 -afrojack 6 -liskula 6 -mosko 6 -moska 6 -lemaster 6 -over-payments 6 -palmiers 6 -hibachi 6 -plesea 6 -petrosky 6 -semi-automated 6 -lynwen 6 -yeechoo 6 -holms 6 -costică 6 -10-way 6 -barkman 6 -cross-device 6 -baabaas 6 -lrc 6 -skoosh 6 -diaz-sosa 6 -sashina 6 -letšeng 6 -scoots 6 -alcohol-fulled 6 -10-by-10-foot 6 -kinkier 6 -santaella 6 -horkan 6 -soesilo 6 -lamerat 6 -qnexa 6 -pooing 6 -iborra 6 -maizes 6 -vrede 6 -cavor 6 -festina 6 -jung-su 6 -237million 6 -lumas 6 -semones 6 -bilotta 6 -oriental-style 6 -micro-yachtsman 6 -english-hating 6 -israeli-annexed 6 -l-plate 6 -zero-star 6 -cheeked 6 -429.25 6 -ikramm 6 -ladarious 6 -movie-theater 6 -china-watchers 6 -rodale 6 -gloeckler 6 -windemere 6 -kwqc 6 -mandrills 6 -daytrip 6 -meiosis 6 -sussi 6 -back-channels 6 -dead-pan 6 -barragry 6 -kiekow 6 -masslive 6 -gottschlich 6 -sobbi 6 -oncotype 6 -ucd 6 -threadsmiths 6 -eichorn 6 -peaceniks 6 -suniel 6 -phlebas 6 -65876 6 -fochriw 6 -childre 6 -six-months-pregnant 6 -anthawn 6 -rothelowman 6 -civil-liberties 6 -albero 6 -indyref 6 -nacua 6 -oldmeadow 6 -pieles 6 -scoffings 6 -faraque 6 -tarida 6 -back-track 6 -scott-moncrieff 6 -1136x640 6 -out-take 6 -370-acre 6 -chedwyn 6 -kmbc-tv 6 -antianxiety 6 -hiebert 6 -khap 6 -bull-run 6 -vác 6 -nowling 6 -ilda 6 -heyring 6 -hamayoon 6 -raucher 6 -670-page 6 -paetz 6 -nongaming 6 -craviotto 6 -nalani 6 -only-child 6 -lurvey 6 -lemalu 6 -overdrinking 6 -backheeling 6 -miscellany 6 -newspeak 6 -q-waves 6 -mareto 6 -azahar 6 -3-g 6 -3-9 6 -kedem 6 -kanavape 6 -89.68 6 -mazzotta 6 -rahulan 6 -unshockable 6 -c/d 6 -aynsley-green 6 -dekofsky 6 -2013-2030 6 -lipset 6 -kuppers 6 -3,129 6 -3,120 6 -surayev 6 -never-released 6 -pappardelle 6 -antov 6 -aeron 6 -lossau 6 -rochdale-born 6 -gogarth 6 -circumvents 6 -bassmaster 6 -beveren 6 -300-horsepower 6 -herschelle 6 -siamo 6 -255.8 6 -356million 6 -democrat-friendly 6 -mohiddin 6 -maquettes 6 -detweiler 6 -taslaq 6 -half-lives 6 -ozmint 6 -medistat 6 -hyeon 6 -withern 6 -biomed 6 -30,300 6 -51-pass 6 -jhonattan 6 -hassle.com 6 -under-7s 6 -niton 6 -crimeline 6 -krakatoa 6 -piccone 6 -lacquers 6 -marxism-leninism-mao 6 -bogden 6 -kpcb 6 -kakata 6 -montagnier 6 -cristopher 6 -cerone 6 -iaass 6 -marteyn 6 -servet 6 -kipsiro 6 -kalil 6 -shifman 6 -safah 6 -safai 6 -campell 6 -cat-loving 6 -camelids 6 -kalbarri 6 -eternit 6 -scrumbag 6 -tuffy 6 -inoki 6 -kashkarova 6 -#nycwhat 6 -fimoral 6 -one-sheet 6 -alametifarika 6 -nse 6 -1,533 6 -1,539 6 -liepiøö 6 -payrise 6 -jaures 6 -medical-device 6 -backover 6 --2.9 6 -ye-bin 6 -six-stroke 6 -85-percent 6 -tumino 6 -baraniuk 6 -confiscatory 6 -shu-chen 6 -home-birth 6 -smokeys 6 -erosive 6 -valentijn 6 -darnah 6 -goni 6 -curiousity 6 -brucker-cohen 6 -re-appeal 6 -saale 6 -anagraciela 6 -biffin 6 -swingy 6 -winarsky 6 -lotterywest 6 -vtol 6 -leetaru 6 -echostar 6 -panschow 6 -235mph 6 -sustainably-sourced 6 -extruded 6 -receieved 6 -teske 6 -schep 6 -qaradhi 6 -yongala 6 -hengchun 6 -162826 6 -vouches 6 -august/september 6 -clopidogrel 6 -emotion-charged 6 -cupholder 6 -tirri 6 -ghadar 6 -20-minutes 6 -1-100 6 -pink-ball 6 -580million 6 -aquanauts 6 -baitadi 6 -cortona 6 -zagallo 6 -nine-times 6 -geall 6 -nerazzuri 6 -burro 6 -non-aryans 6 -sarsour 6 -much-watched 6 -238m 6 -xz494 6 -zoulika 6 -surenos 6 -social-economic 6 -mso-ansi-language 6 -hellesdon 6 -demaree 6 -transnistrian 6 -tyjuan 6 -edgehd 6 -austerely 6 -katsavos 6 -miram 6 -osten 6 -p-1 6 -deverell 6 -mcbrain 6 -42,550 6 -40519 6 -galicians 6 -pypt 6 -aleysha 6 -disease-related 6 -souad 6 -weightier 6 -r-pa. 6 -26cm 6 -prize-fighting 6 -f.h. 6 -sutcliffe-keenan 6 -tahun 6 -southern-fried 6 -brithday 6 -java-based 6 -sebel 6 -bored-looking 6 -kitchen-table 6 -gabri 6 -apete 6 -quance 6 -satruday 6 -puttonyos 6 -nirad 6 -bisoli 6 -i-275 6 -chavin 6 -roomoon 6 -tuukka 6 -nashoba 6 -demaray 6 -mulyadi 6 -zilker 6 -malham 6 -14,183 6 -tarak 6 -2010man 6 -tshiri 6 -122-page 6 -ncib 6 -ncic 6 -zouaiou 6 -quad-band 6 -overtreatment 6 -buchtel 6 -four-nil 6 -godefroit 6 -level-one 6 -176lbs 6 -cmag 6 -55891 6 -gwisai 6 -ladywell 6 -actuly 6 -gharials 6 -al-chalabi 6 -alphanumeric 6 -shatanawi 6 -yotun 6 -oakenfold 6 -zelkowitz 6 -parmoor 6 -mini-computer 6 -konchinsky 6 -lehmacher 6 -seaweeds 6 -cayler 6 -well-flighted 6 -5in-long 6 -discontinuous 6 -avibus 6 -nikolov 6 -bhatkal 6 -tintern 6 -8.38 6 -whapshare 6 -amne 6 -thurles 6 -whiffed 6 -forty-year 6 -sumanda 6 -libres 6 -denitra 6 -saaristo 6 -coolman 6 -four-fight 6 -solferino 6 -16-foot-high 6 -pinna 6 -illinoisans 6 -battens 6 -2011-15 6 -colmes 6 -650bhp 6 -marcinkova 6 -shkolnik 6 -bjorklund 6 -9spitch 6 -#louisville 6 -44,000-a-year 6 -martinovic 6 -j-10 6 -three-horned 6 -carsick 6 -hyperextension 6 -stickman 6 -evena 6 -wearing?caller 6 -izmash 6 -elshamy 6 -brentside 6 -7:59 6 -maktabah 6 -dallakoti 6 -de-icers 6 -29-second 6 -penketh 6 -gueguen 6 -501-day 6 -3,720 6 -aids2014 6 -thullbery 6 -sanaei 6 -steinfort 6 -broon 6 -rausings 6 -kayapo 6 -optum 6 -cyclotron 6 -daveed 6 -dahane 6 -krizan 6 -koeltl 6 -benchich 6 -zankoul 6 -siculus 6 -raubal 6 -t-64 6 -unnava 6 -metabank 6 -@pat_healy 6 -mixmag 6 -saic 6 -boatlift 6 -kuszak 6 -one-hit-wonder 6 -105mw 6 -xultzn 6 -azizollah 6 -watermarking 6 -siswi 6 -kemane 6 -antonellis 6 -dhc-3 6 -steenberg 6 -46min 6 -germantown-penn 6 -darriel 6 -axolotls 6 -powledge 6 -beir 6 -beis 6 -husseine 6 -marchington 6 -ejective 6 -jesselyn 6 -gider 6 -kempenaar 6 -fruit-pickers 6 -tatoo 6 -1:26 6 -vanua 6 -rajkovic 6 -1,278 6 -1,272 6 -samera 6 -jianyin 6 -unkept 6 -vongerichten 6 -philippino 6 -yemen-born 6 -upwardly-mobile 6 -stenseth 6 -villian 6 -penderyn 6 -pekarek 6 -kennedy-thomas 6 -footpads 6 -3,060 6 -imagineers 6 -hybrid-electric 6 -over-commit 6 -rambasek 6 -2-bedroom 6 -268th 6 -cross-fit 6 -nintendoland 6 -loupe 6 -bereket 6 -chamorro-premuzic 6 -linalool 6 -pennycook 6 -hifa 6 -vallinas 6 -kamogawa 6 -tigolo 6 -annouced 6 -tesser 6 -lura 6 -niner 6 -progam 6 -idiakez 6 -quartermaine 6 -mashall 6 -dil-doh 6 -wide-brim 6 -hauswirth 6 -rebelution 6 -ji-hoon 6 -zbc 6 -whitsand 6 -41-yard 6 -sanilac 6 -carrozzini 6 -synth-pop 6 -rahs 6 -rahu 6 -schara 6 -lake-side 6 -suntrap 6 -58817 6 -goulbourne 6 -quiana 6 -bank-robbing 6 -laththam 6 -john-roger 6 -tukwini 6 -doña 6 -pallino 6 -heeling 6 -kasanin 6 -106km/h 6 -dunwel 6 -gownder 6 -hodgman 6 -mabeliever 6 -lamacq 6 -readington 6 -a7734 6 -family-controlled 6 -nadeshot 6 -perrons 6 -worldstarhiphop.com 6 -zainal 6 -gobind 6 -andthe 6 -goather 6 -vonk 6 -lapicida 6 -money-saver 6 -nordstrom.com 6 -halmich 6 -pirro 6 -okeke 6 -70-1 6 -tinpot 6 -gailani 6 -kmtr 6 -16ft-long 6 -priester 6 -monkwearmouth 6 -spaeth 6 -sunnybank 6 -mederos 6 -mesny 6 -lva 6 -near-silent 6 -drane-burdick 6 -barazani 6 -wierzbicka 6 -jiamei 6 -sietske 6 -now-derelict 6 -sussurro 6 -amjed 6 -spiff 6 -honesty-humility 6 -alzayani 6 -totman 6 -loth 6 -e.surv 6 -flipflops 6 -medicare-approved 6 -issak 6 -roquebrune 6 -mollard 6 -appealingly 6 -ohchr 6 -unmis 6 -chimichanga 6 -best-actor 6 -etude 6 -chelle 6 -borongan 6 -lulas 6 -self-centeredness 6 -mbvoumin 6 -ondigital 6 -36th-minute 6 -lillien 6 -raeside 6 -biogeography 6 -831 6 -83m 6 -guiting 6 -water-intensive 6 -zonked 6 -sa'ilele 6 -pizza-eating 6 -spore-forming 6 -herrion 6 -gropp 6 -vivente 6 -jerilyn 6 -asuad 6 -olano 6 -charni 6 -singley 6 -proliferates 6 -41,200 6 -streamco 6 -china.com.cn 6 -marie-antoinette 6 -v/h/s 6 -hairatan 6 -pilska 6 -d-md 6 -börse 6 -sweatx 6 -soto-class 6 -lundin 6 -eskenazi 6 -suretha 6 -mieczkowski 6 -gayl 6 -traide 6 -female-owned 6 -orsola 6 -190.5 6 -zarkava 6 -678,000 6 -38-7 6 -rielly 6 -800-metre 6 -americanum 6 -bemilo 6 -melquiesha 6 -vardinoyannis 6 -anirudh 6 -zhdanova 6 -minqin 6 -erf 6 -ern 6 -zakroczymski 6 -proact 6 -chiadika 6 -newark-liberty 6 -mège-mouriès 6 -kanchoo 6 -superflare 6 -alcover 6 -washington-dulles 6 -bouclé 6 -highview 6 -heatons 6 -nontherapeutic 6 -troutman 6 -o'er 6 -anti-quarks 6 -fondaco 6 -shaabi 6 -hasabah 6 -freelances 6 -human-generated 6 -soughton 6 -demobilisation 6 -embolisation 6 -ddr3 6 -york-seoul 6 -paluzzi 6 -49.00 6 --94 6 -water-carved 6 -blaenymaes 6 -forced-labor 6 -urilift 6 -marilu 6 -priewpan 6 -demory 6 -wonderlick 6 -olympiastadion 6 -high-magnification 6 -27307 6 -sub-types 6 -bergdhal 6 -chautard 6 -pittaway 6 -newsbusters 6 -1264 6 -masutha 6 -villified 6 -Ølby 6 -akua 6 -akut 6 -clegane 6 -bio-weapons 6 -ffls 6 -500-700 6 -trevilla 6 -azran 6 -110-metre 6 -citty 6 -chicago-to-amsterdam 6 -sawhney 6 -willaston 6 -hyper-sensitivity 6 -midgette 6 -belarusians 6 -moran-allen 6 -rb10 6 -craybas 6 -samycia 6 -houstonians 6 -mcmillon 6 -sjogreen 6 -110-foot 6 -sixways 6 -reiljan-dillon 6 -geral 6 -hingorani 6 -skåne 6 -half-point 6 -ktab 6 -over-paid 6 -state/we 6 -creditably 6 -moroxydine 6 -licancabur 6 -yenisei 6 -snt 6 -crimestopper 6 -4,685 6 -pohontu 6 -diva-ish 6 -unley 6 -175lb 6 -porto-vecchio 6 -westfjords 6 -1993-1995 6 -baylen 6 -badama 6 -badami 6 -salernitana 6 -rotax 6 -staithes 6 -aikau 6 -re-certified 6 -managements 6 -al-faiz 6 -kamale 6 -nitrosamines 6 -aour 6 -metzker 6 -lewitsky 6 -black-furred 6 -heerema 6 -vandiver 6 -stephensons 6 -l13 6 -ogren 6 -tumlinson 6 -prabhupada 6 -decoupled 6 -baumgardner 6 -c-x17 6 -tayana 6 -ubiquitously 6 -mabille 6 -142.1 6 -leappad2 6 -10.28 6 -torremocha 6 -jdeida 6 -kuebler 6 -wfc3 6 -salked 6 -conservatorium 6 -lollypop 6 -prugova 6 -ayoreos 6 -septi 6 -sugarbabe 6 -terracycle 6 -akinremi 6 -safe-houses 6 -tonbul 6 -novelty-seeking 6 -zyablikova 6 -firkin 6 -nonjihadist 6 -barbarini 6 -wade-brown 6 -drug-ravaged 6 -video-rental 6 -fiddian-green 6 -in-kyung 6 -pref 6 -9,622 6 -tee-time 6 -linoleic 6 -over-fifties 6 -10ten 6 -89,770 6 -481,098 6 -slathers 6 -500,000-per-year 6 -guman 6 -handcross 6 -canova 6 -robothespians 6 -casemate 6 -semester-long 6 -brisenia 6 -myaeung 6 -mint-condition 6 -olzak 6 -feuchtwang 6 -fgh 6 -ozin 6 -huayi 6 -ic3 6 -sucharita 6 -fridging 6 -sub-second 6 -sigge 6 -wheelwrights 6 -broecker 6 -anshun 6 -tejinder 6 -bidart 6 -neophytes 6 -suprising 6 -eylandt 6 -vinent 6 -surles 6 -hadayati 6 -receptivity 6 -privitisation 6 -quakertown 6 -non-immigrant 6 -petryszyn 6 -sursok 6 -claudon 6 -2-week 6 -chittering 6 -anesi 6 -aradhana 6 -feiner 6 -asdago 6 -chubbiest 6 -soccer-loving 6 -askale 6 -dacalanio 6 -aklan 6 -sarod 6 -saros 6 -wearable-tech 6 -switch-over 6 -jouni 6 -94per 6 -offspinner 6 -lpp 6 -hahne 6 -five-diamond 6 -rubirosa 6 -sub-fossilised 6 -sgr-1 6 -post-2012 6 -liveleaks 6 -two-for 6 -meindertsma 6 -polska 6 -dats 6 -2,548 6 -2,540 6 -positivism 6 -terps 6 -cnn/time 6 -multi-annual 6 -zilna 6 -brundrett 6 -heliostat 6 -g.b.f. 6 -doraiswamy 6 -mosler 6 -senedd 6 -bananaman 6 -grievers 6 -esgaio 6 -conducing 6 -mjelde 6 -#meninist 6 -knutton 6 -oestrogen-like 6 -terblanche 6 -mecklenburg-vorpommern 6 -4,000-7 6 -slitty 6 -durgos 6 -109.7 6 -109.6 6 -senova 6 -bairnsdale 6 -johnsburg 6 -300million-a-year 6 -d-n.j. 6 -quattroporte 6 -pro-rights 6 -second-skin 6 -understory 6 -brazi 6 -sensitiser 6 -townview 6 -jean-bouin 6 -genographic 6 -42,995 6 -hawaiian-style 6 -crew-neck 6 -wdav 6 -three-race 6 -harran 6 -coquette 6 -er2015 6 -altocumulus 6 -data-intensive 6 -toe-sucking 6 -52245 6 -n2o 6 -ameida 6 -schoomaker 6 -dark-grey 6 -jardine-brown 6 -yiwei 6 -ready-prepared 6 -crepp 6 -slant-eyed 6 -fashion-inspired 6 -debt-to-income 6 -fanney 6 -punishingly 6 -aromatase 6 -dagupan 6 -metasearch 6 -soltaniyeh 6 -f18s 6 -gsk/niaid 6 -torneo 6 -giulietti 6 -pul 6 -puz 6 -cosslett 6 -dulu 6 -politicker 6 -hernandez-brown 6 -vaulters 6 -nickel-plated 6 -zakouma 6 -laprade 6 -chiranjeevi 6 -webdriver 6 -schwanz 6 -louisiana-lafayette 6 -70-somethings 6 -2,165 6 -beuzelin 6 -inflexion 6 -24oz 6 -hinves 6 -shaban 6 -evissa 6 -geekier 6 -banyala 6 -berven 6 -minibuilders 6 -lenda 6 -tellez-gagliano 6 -tv-friendly 6 -helena-west 6 -catchline 6 -oblong-shaped 6 -fabriah.com 6 -bimonthly 6 -fyretv 6 -sluis 6 -map-making 6 -anstis 6 -under-5s 6 -mso-hansi-font-family 6 -strongarm 6 -bigalow 6 -tobola 6 -sfax 6 -78-page 6 -near-the-knuckle 6 -dadawa 6 -quartz.com 6 -hathitrust 6 -facta 6 -unsa 6 -guard-interior 6 -tryin 6 -1088 6 -horse-and-buggy 6 -obesity-linked 6 -recognisance 6 -inbal 6 -zolani 6 -398million 6 -v.j. 6 -fille 6 -houtong 6 -mitlin 6 -right-to-left 6 -bruty 6 -tylney 6 -sooooooo 6 -soheir 6 -pre-trip 6 -anybots 6 -a52 6 -synecdoche 6 -d'amours 6 -morphologically 6 -1,515 6 -heinousness 6 -muncy 6 -mahala 6 -skyscape 6 -ould-abdallah 6 -killens 6 -mootoo 6 -khata 6 -nordlys 6 -mkt 6 -mk7 6 -walecka 6 -3he 6 -isreali 6 -fakarova 6 -nordschleife 6 -preece-kelly 6 -m.h. 6 -netherfield 6 -night.the 6 -crash-lands 6 -delayno 6 -ellie-beth 6 -behrle 6 -epke 6 -yansoro 6 -chairmanships 6 -abaseya 6 -swines 6 -arzuaga 6 -hernán 6 -kratos 6 -moallim 6 -shobhna 6 -mini-jobs 6 -toilet-shaped 6 -rockie 6 -intersperses 6 -cleavage-boosting 6 -abaetetuba 6 -#freegaza 6 -haramis 6 -6,790 6 -half-ironman 6 -watemberg 6 -gelareh 6 -goldmann 6 -#wecanlandonacometbutwecant 6 -cactuses 6 -1,500-a-month 6 -circe 6 -stainrod 6 -mewes 6 -state-of-art 6 -rain-free 6 -skyliner 6 -vilely 6 -dichio 6 -naja 6 -overlapper 6 -hadaway 6 -smartthings 6 -adrenalin-fuelled 6 -hs2aa 6 -xhibitionist 6 -adventurousness 6 -bolutito 6 -re-igniting 6 -co-developer 6 -akinesia 6 -mahlo 6 -webinars 6 -salon-style 6 -vividness 6 -ndn 6 -gear-box 6 -promptings 6 -38,300 6 -cossington 6 -burnoski 6 -sharpeners 6 -12/08/2012 6 -koruna 6 -topmouth 6 -chistyokov 6 -lycerius 6 -yingst 6 -breneisha 6 -sustersic 6 -burtenshaw 6 -throttle-control 6 -vetra 6 -atifa 6 -laemmle 6 -intourist 6 -scim 6 -desmoplastic 6 -dissolutions 6 -lashun 6 -torito 6 -zubin 6 -zdenka 6 -jogye 6 -neurocam 6 -paluku 6 -paraorchestra 6 -israeli-style 6 -super-long 6 -mumbo-jumbo 6 -pre-lunch 6 -degeer 6 -qf904 6 -ka'ohe 6 -gamze 6 -guanghua 6 -robotcar 6 -submillimeter 6 -aesculapian 6 -stickup 6 -nikesh 6 -scar-faced 6 -quietens 6 -almax 6 -buker 6 -hans-christian 6 -six-discipline 6 -laferlita 6 -falbo 6 -sumo-1 6 -heart-break 6 -6.69 6 -wdtv 6 -may-october 6 -champs-elysées 6 -blood-doping 6 -al-niran 6 -reveries 6 -1302 6 -1303 6 -bizot 6 -4.2-metre 6 -bulcke 6 -gatineau 6 -yannakoudakis 6 -recently-announced 6 -sportspersons 6 -schloesser 6 -alpine-style 6 -echinoderms 6 -ausbrook 6 -survivorman 6 -hetton-le-hole 6 -sexters 6 -greensides 6 -spsqa 6 -ucil 6 -lemp 6 -mizoulina 6 -alarm-monitoring 6 -9.76 6 -9.72 6 -lenford 6 -pimbongkod 6 -malabsorption 6 -hydrometeorological 6 -cross-stitch 6 -hyung-jin 6 -underqualified 6 -iribaren 6 -drina 6 -if/when 6 -drini 6 -˜we 6 -lakenham 6 -beautymeter 6 -dogpile 6 -boho-chic 6 -frankenberg 6 -kormos 6 -quick-moving 6 -blood-and-guts 6 -brunstrom 6 -hanzelin 6 -selfie-sticks 6 -pjh 6 -kivel 6 -calera 6 -kaganda 6 -miskeen 6 -tamsen 6 -hospitalising 6 -sexperts 6 -terma 6 -slickly-edited 6 -2.5million-a-year 6 -obj 6 -gastroscopy 6 -640million 6 -lumper 6 -personalising 6 -biscoff 6 -al-mustafa 6 -kassar 6 -kassai 6 -stenigot 6 -lubitsch 6 -kissh 6 -h3d-50 6 -mispronounces 6 -borré 6 -mcwade 6 -nahle 6 -shyamol 6 -belgian-style 6 -25million-a-year 6 --0.6 6 -zacharia 6 -13,450 6 -baby-sitters 6 -galit 6 -laiblova 6 -funtleyder 6 -canary-yellow 6 -sixth-biggest 6 -stridgeon 6 -garoupe 6 -sommarström 6 -ex-blackpool 6 -high-blood 6 -palta 6 -pac-10 6 -bretforton 6 -lucidly 6 -2003/4 6 -pantaloons 6 -6,280 6 -marzio 6 -light-gathering 6 -hambo 6 -309th 6 -caldbeck 6 -passi 6 -bandula 6 -outrageousness 6 -caliban 6 -five-for 6 -celevac 6 -berrio 6 -sobota 6 -pre-fascist 6 -adulteress 6 -ballogie 6 -syphoning 6 -350th 6 -onamia 6 -38,000-a-year 6 -hunsaker 6 -secor 6 -rabodirect 6 -airaisa 6 -takemori 6 -al-qarawi 6 -swibinski 6 -ardbeg 6 -mabhunu 6 -mukono 6 -gold-wrapped 6 -amatullah 6 -d-alaska 6 -drogon 6 -quicksands 6 -tusken 6 -swee 6 -vergura 6 -tatma 6 -cheerlead 6 -grab-bag 6 -coffee-coloured 6 -cromdale 6 -disapply 6 -frumkin 6 -betsson 6 -naiden 6 -jagiela 6 -venkys 6 -kimmage 6 -hair-tie 6 -@sirjvenables 6 -delapoer 6 -afrah 6 -anti-organized 6 -pro-moussavi 6 -llanrwst 6 -doram 6 -ex-racehorse 6 -circadia 6 -reo-coker 6 -unhatched 6 -boomsound 6 -sloman 6 -cohadon 6 -firewire 6 -biolite 6 -vicari 6 -austalia 6 -yellow-spotted 6 -momani 6 -al-mohammed 6 -non-koreans 6 -essington 6 -uplinks 6 -bloodsucker 6 -tiba 6 -neurostimulator 6 -zdt 6 -kräutli 6 -steel-capped 6 -ganganagar 6 -jenman 6 -amangalla 6 -mbanenande 6 -mega-project 6 -gambarin 6 -oath-taking 6 -maekawa 6 -umaine 6 -pelsall 6 -moodiest 6 -kiejkuty 6 -sm-3 6 -drystone 6 -azaris 6 -sadiya 6 -rappahannock 6 -sawrey 6 -flyspeck 6 -frangos 6 -semiletov 6 -real-money 6 -rat-bite 6 -paleosol 6 -tranquillityite 6 -ndsu 6 -kornze 6 -volograd 6 -lucansky 6 -lidoline 6 -skyroll 6 -1022 6 -1028 6 -1,471 6 -1,473 6 -1,472 6 -1,474 6 -80,000-strong 6 -hand-making 6 -box-sized 6 -term-limits 6 -surgeon-in-chief 6 -bumbliness 6 -dystocia 6 -www.liverpoolfc.com 6 -misnomers 6 -Éric 6 -al-badani 6 -driver-davies 6 -mob-style 6 -breaktime 6 -squeem 6 -danwei.org 6 -westwynd 6 -jhona 6 -bergantinos 6 -llenroc 6 -male-centric 6 -worldy 6 -97.1 6 -urubamba 6 -redler 6 -lilliputians 6 -485lb 6 -winckler 6 -lorz 6 -eliazrov 6 -upi.com 6 -arogya 6 -zlatea 6 -sayas 6 -whitetips 6 -canach 6 -catizone 6 -iorys 6 -'35 6 -try-scorers 6 -bushe 6 -500,000-square-foot 6 -camas 6 -silopi 6 -geminoid 6 -nymphas 6 -whupping 6 -foula 6 -blackish 6 -18-inch-wide 6 -muscaria 6 -cataldo 6 -1,073 6 -chotinaram 6 -ergen 6 -nooo 6 -wilkesboro 6 -innovates 6 -brisas 6 -over-thinking 6 -long-vacant 6 -general-designate 6 -ex-france 6 -anudanit 6 -bobbio 6 -gt86 6 -then-business 6 -beirut-born 6 -suffragan 6 -boscone 6 -#bluelivesmatter 6 -ghanouchi 6 -eiriol 6 -ben-ghiat 6 -soto-barraza 6 -maisons-laffitte 6 -5,260 6 -200-mph 6 -wgem 6 -tard 6 -taru 6 -charity-run 6 -countdowns 6 -pirrie 6 -yeehaw 6 -al-shalan 6 -testwuide 6 -finanza 6 -kysa 6 -cll 6 -munasar 6 -beechworth 6 -yakubov 6 -niabi 6 -rehberger 6 -seaux 6 -kotal 6 -etx 6 -guinn 6 -pauk 6 -pre-integrated 6 -augustenborg 6 -rockhouse 6 -zakoscielny 6 -mlb2k13 6 -post-code 6 -kragh 6 -girlanda 6 -re-graded 6 -mi-5 6 -sevaré 6 -hawala 6 -multidrug-resistant 6 -ultra-long 6 -papandronicou 6 -cuttin 6 -tramaine 6 -chotu 6 -fouth 6 -harrowden 6 -fürstenberg 6 -gitta 6 -30.00 6 -robe-like 6 -c-type 6 -cukraszda 6 -jellyroll 6 -matadeen 6 -pennery 6 -pennysylvania 6 -rumniak 6 -1208 6 -shinnick 6 -marystell 6 -schoolgate 6 -burbull 6 -tipis 6 -moben 6 -lutzes 6 -interminably 6 -gner 6 -jauncey 6 -self-organized 6 -bezy 6 -chaudhuri 6 -us24 6 -sammonds 6 -esurance 6 -osti 6 -permissiveness 6 -ugg-a-wugg 6 -7.3-magnitude 6 -hierapolis 6 -pistol-wielding 6 -rahina 6 -bosun 6 -dubh 6 -six-string 6 -liseberg 6 -angeles-bound 6 -kuriakose 6 -vaenuku 6 -data-stealing 6 -long-reported 6 -blickling 6 -998cc 6 -spuyten-duyvil 6 -twitter.com/the_topspin 6 -longboarding 6 -funseekers 6 -swartz-garcia 6 -cepheids 6 -ithug 6 -rublyovka 6 -undiagnosable 6 -lonafarnib 6 -innis 6 -wrong-sized 6 -neuropsychopharmacology 6 -cledford 6 -prior-palmer 6 -kronospan 6 -myob 6 -portugalophis 6 -authorial 6 -feriozzi 6 -bedie 6 -instant-message 6 -nelson-king 6 -1102 6 -generalizing 6 -asabe 6 -felches 6 -quick-time 6 -438,000 6 -467,000 6 -repetti 6 -matloff 6 -re-bar 6 -hard-bound 6 -southyork 6 -metabolizing 6 -slow-burn 6 -engavest 6 -greyed 6 -miltants 6 -24-15 6 -fiancées 6 -birwood 6 -free-runners 6 -vam.ac.uk 6 -lotti 6 -diamond-rich 6 -theda 6 -kallos 6 -sainvil 6 -queniborough 6 -hoisin 6 -harpaz 6 -telescoping 6 -#royalbaby 6 -21-time 6 -0.2-inch 6 -shubenacadie 6 -skyman 6 -zemmamouche 6 -care.can 6 -madzilla 6 -demar 6 -nunchuks 6 -volyn 6 -tourrettes-sur-loup 6 -r15 6 -bushmans 6 -bliar 6 -pyronin 6 -kempenaers 6 -self-mutilating 6 -kanun 6 -cash-filled 6 -montana-based 6 -c-retriever 6 -toran 6 -ctrl.alt.shift 6 -creches 6 -canonsburg 6 -topley 6 -caplen 6 -xxxxxxxxxl 6 -crosshair 6 -governator 6 -megunticook 6 -non-marital 6 -tax-avoiding 6 -krush 6 -addar 6 -20.16 6 -obfuscated 6 -noppadon 6 -sirena 6 -re/max 6 -solutionism 6 -14-21 6 -valdobbiadene 6 -50kw 6 -drabek-chritten 6 -guarapuava 6 -54823 6 -impractically 6 -101.6 6 -internees 6 -mcghee-brown 6 -masaharu 6 -downwash 6 -faz 6 -fah 6 -gafes 6 -l-series 6 -wiedwald 6 -gallaxhar 6 -mackynzie 6 -post-herpetic 6 -hakwons 6 -5,275 6 -six-berth 6 -4,180-passenger 6 -kittle 6 -mastung 6 -menzah 6 -self-combust 6 -deprecating 6 -pointy-headed 6 -hoefsloot 6 -pixelate 6 -wilko 6 -pro-chavez 6 -moyoy 6 -zouk 6 -readymade 6 -tunes-style 6 -white-knuckled 6 -hungate 6 -bool 6 -a.l.o. 6 -gulmarg 6 -starhawk 6 -saimir 6 -rizzo-acevedo 6 -elliff 6 -hirschorn 6 -allaf 6 -chargé 6 -pronghorn 6 -dava 6 -4,560 6 -shillington 6 -woodlesford 6 -behavour 6 -kahlood 6 -aisam-ul-haq 6 -surabaya-to-singapore 6 -amaani 6 -rotberg 6 -atal 6 -wastepaper 6 -handwrite 6 -stuckart 6 -cengher 6 -pulvinar 6 -diffractive 6 -104.7 6 -postdoc 6 -172r 6 -beaties 6 -172m 6 -randeep 6 -1729 6 -grahovo 6 -gombi 6 -dot-111 6 -merovingian 6 -sree 6 -fact-checks 6 -intimation 6 -3,865 6 -3,860 6 -957,000 6 -dsc-qx10 6 -lajpat 6 -plot-line 6 -tomiko 6 -chadeisson 6 -hpakant 6 -szymanska 6 -paetongtarn 6 -non-affiliated 6 -bhanot 6 -compeition 6 -jiefang 6 -laso 6 -medermit 6 -freshfield 6 -davises 6 -al-rimi 6 -pancreatoblastoma 6 -tri-bar 6 -cushier 6 -himbrechts 6 -capitolina 6 -hertzel 6 -near-miracle 6 -improbabilities 6 -ahwatukee 6 -quanique 6 -qualitest 6 -trademe 6 -paxos 6 -158billion 6 -gypped 6 -jamelyn 6 -ratuva 6 -helgesson 6 -trivialization 6 -batzel 6 -selfie-loving 6 -khalily 6 -firies 6 -todaschev 6 -delma 6 -maiolica 6 -voiceprint 6 -commanday 6 -drumset 6 -basendowah 6 -bharadia 6 -1800km 6 -tuataras 6 -dinkytown 6 -pavs 6 -fullscreen 6 -jetsuite 6 -mastiff-type 6 -aurland 6 -mid-1700s 6 -dendle 6 -unconquerable 6 -book-lined 6 -giroptic 6 -distaff 6 -authentec 6 -cossies 6 -businessinsider.com 6 -crausby 6 -goral 6 -a606 6 -eye-fi 6 -frackowiak 6 -0.58 6 -roastery 6 -stukus 6 -iconeme 6 -mikulec 6 -asexually 6 -scraton 6 -pietrangelo 6 -turnbulls 6 -small-batch 6 -multi-taskers 6 -saygin 6 -kiro-fm 6 -digusting 6 -isaq 6 -seys 6 -mausolea 6 -hamnell 6 -bolides 6 -reinfected 6 -reoccupation 6 -hovantseva 6 -slave-trading 6 -fifteen-minute 6 -94-degree 6 -rumbo 6 -157.9 6 -unsheathed 6 -andrette 6 -hummell 6 -school-yard 6 -lungaro 6 -cognoptix 6 -belfast-based 6 -non-shared 6 -french-controlled 6 -37-minute 6 -cothill 6 -mattino 6 -fuel-tank 6 -nagaoka 6 -moita 6 -lakehal 6 -sucumbios 6 -sukumar 6 -xipamide 6 -kpakio 6 -wasfi 6 -neurostimulation 6 -pldb 6 -keci 6 -open-era 6 -katchadourian 6 -anti-technology 6 -wftx 6 -abdalsalam 6 -parasitoid 6 -lich 6 -a76 6 -lionhearted 6 -bokolmayo 6 -20-hectare 6 -nivison 6 -1105 6 -fast-finishing 6 -al-auwewy 6 -r12 6 -wmas 6 -pizzaexpress 6 -marshale 6 -mmx 6 -mmu 6 -mmi 6 -mmc 6 -pyrrhic 6 -146million 6 -internists 6 -31in 6 -grovesnor 6 -vankulick 6 -montignac 6 -tulepo 6 -udong 6 -waist-to-height 6 -chipewyan 6 -seagrim-trinder 6 -ollman 6 -longcroft 6 -chm 6 -chahuites 6 -coving 6 -wolwedans 6 -aquiline 6 -tumaco 6 -maartje 6 -portues 6 -kihei 6 -kyleigh 6 -bridesburg 6 -vaudin 6 -30-km 6 -scarlett-marie 6 -guediora 6 -sbarro 6 -2020-21 6 -farm-fresh 6 -supercavitating 6 -joint-biggest 6 -chazz 6 -orana 6 -kankhwende 6 -odgers 6 -y-chromosomes 6 -2,041 6 -robba 6 -rubieh 6 -wahed 6 -hadham 6 -ta-nehisi 6 -under-inflating 6 -bilyik 6 -encumber 6 -clucky 6 -pedophilic 6 -muffuletta 6 -avocation 6 -vitas 6 -armenian-american 6 -pegulas 6 -radio/tv 6 -h-4 6 -egg-based 6 -sweatsuits 6 -pollie 6 -x132 6 -gielinor 6 -wairarapa 6 -simeonette 6 -km3net 6 -immunologists 6 -phinny 6 -savoy-trained 6 -b4rn 6 -rkoi 6 -juhi 6 -45.07 6 -talon-like 6 -savannas 6 -biswal 6 -university-led 6 -gittes 6 -gatusso 6 -davyd 6 -scorns 6 -rashidov 6 -tiebreaking 6 -talking-point 6 -anibong 6 -apy 6 -apf 6 -clubgoer 6 -deckhands 6 -sagami 6 -kimchee 6 -dacai 6 -rpp 6 -rpr 6 -multidirectional 6 -colombino 6 -empanelled 6 -akoun 6 -outskirt 6 -ocle 6 -wtae-tv 6 -www.net-a-porter.com 6 -knie 6 -bédat 6 -fifty-year-old 6 -sieh 6 -fluoresce 6 -kazel 6 -25a 6 -khq.com 6 -towaco 6 -farry 6 -unclogging 6 -cephas 6 -mlynarczyk 6 -darwich 6 -genetically-blessed 6 -semi-collapsed 6 -zinnbauer 6 -6.44 6 -airspaces 6 -ismai 6 -bailkal 6 -malaria-related 6 -brownrigg 6 -19mph 6 -bigdog 6 -2015-now 6 -stanchfield 6 -björk 6 -fougasse 6 -dornella 6 -post-military 6 -aventis 6 -shiley 6 -mclachrie 6 -jadarius 6 -odil 6 -175-mile 6 -mohnton 6 -overdramatic 6 -radiocentre 6 -n-y-p-d 6 -80-bed 6 -nederlandse 6 -leos 6 -iron-on 6 -ranjeet 6 -smokable 6 -idents 6 -leemon 6 -63-years-old 6 -teklehaimanot 6 -highest-priority 6 -estatesdirect.com 6 -flexibilities 6 -x904 6 -66.1 6 -helichrysum 6 -opalescent 6 -nest-building 6 -semi-urban 6 -leagrave 6 -schuhbeck 6 -gega 6 -rocío 6 -wolffepack 6 -sucker-punching 6 -greep 6 -naftalis 6 -aeroworks 6 -child-endangerment 6 -phencyclidine 6 -2,227 6 -zmeinyj 6 -longdi 6 -ehya 6 -posessing 6 -gajdusek 6 -skhurina 6 -monkhood 6 -wango 6 -@nswpolice 6 -shhhh 6 -orthosis 6 -tree-line 6 -eku 6 -a8x 6 -bang-up 6 -chiffons 6 -warhammer 6 -ductile 6 -menorrhagia 6 -israel-egypt 6 -gulacsi 6 -stanage 6 -non-engagement 6 -canicon 6 -kohein 6 -iknife 6 -942,000 6 -romboni 6 -cabrel 6 -2,000-a-head 6 -star-packed 6 -kharay 6 -mils 6 -cdna 6 -90-pound 6 -ny/nj 6 -garrone 6 -leggera 6 -mumpreneur 6 -eight-stage 6 -rennae 6 -delthy 6 -michniewicz 6 -pontyates 6 -piso 6 -daylyn 6 -28kg 6 -cquin 6 -1,565 6 -1,563 6 -diringer 6 -conference-goers 6 -schabas 6 -orange-tinted 6 -contalmaison 6 -rebroadcasts 6 -111ft 6 -tehran-bound 6 -toy-maker 6 -280ft 6 -spohn 6 -kamille 6 -bayetti 6 -well-formed 6 -j&k 6 -highly-detailed 6 -kwanten 6 -0.346 6 -millwork 6 -564,000 6 -newchurch 6 -katcharian 6 -over-qualified 6 -superstud 6 -per-cent 6 -sugoi 6 -afronauts 6 -cordina 6 -thymosin 6 -churchgate 6 -rathburn 6 -70pc 6 -venzone 6 -kupers 6 -truesdale 6 -katee 6 -sellards 6 -sling-back 6 -satisfactions 6 -barehanded 6 -cinephiles 6 -iclvr 6 -eco-community 6 -perella 6 -wyborne 6 -high-cbd 6 -quasicrystal 6 -blace 6 -63,800 6 -lowermoor 6 -jousted 6 -swakeleys 6 -1979-87 6 -kirra-belle 6 -19.87 6 -19.82 6 -hibu 6 -regoli 6 -goliad 6 -dilshad 6 -hyung-suk 6 -+65 6 -doireann 6 -+60 6 -ponferradina 6 -amnestic 6 -ushuaïa 6 -konkov 6 -al-mayadeen 6 -infuser 6 -220km 6 -wangyan 6 -coast-hugging 6 -ex-staffer 6 -iwokrama 6 -now-cancelled 6 -namee 6 -radar-like 6 -battle-winning 6 -rouges 6 -hlinko 6 -sayyaff 6 -scurtis 6 -countinho 6 -maruti 6 -groundlessly 6 -haeber 6 -tilyard 6 -8100 6 -well-disciplined 6 -carcini 6 -inculcating 6 -ostrich-like 6 -stimphil 6 -wangechi 6 -whetton 6 -al-loheidan 6 -brantham 6 -flapping-wing 6 -iriana 6 -victoriaville 6 -quiapo 6 -interrogates 6 -hardmen 6 -a-minus 6 -butterfinger 6 -bandow 6 -wjw-tv 6 -tripod-mounted 6 -nn 6 -stuti 6 -kokotan 6 -jürgens 6 -1007 6 -eikmeier 6 -bekhal 6 -vegara 6 -15,278 6 -loose-leaf 6 -jai'launi 6 -m.b.a. 6 -bouallegue 6 -lovech 6 -sauteraud 6 -1,725,000 6 -festy 6 -offredo 6 -tax-saving 6 -tct 6 -motor-neurone 6 -helensvale 6 -muons 6 -mapother 6 -hupmobile 6 -lampung 6 -grooveshark 6 -vanhall 6 -kozorog 6 -80-feet 6 -e-meter 6 -palitzsch 6 -german-held 6 -heit 6 -lepton 6 -halfy 6 -adath 6 -benns 6 -laevis 6 -e-voting 6 -lopa 6 -parum 6 -2006-10 6 -kotler 6 -480ft 6 -mamafesto 6 -webvan 6 -ortolan 6 -maquis 6 -20,000-worth 6 -aparicio 6 -ex-senior 6 -queiq 6 -6,450 6 -79mph 6 -neroes 6 -jaws-dropping 6 -77.9 6 -wampanoag 6 -2006-2011 6 -wftv-tv 6 -islamist-inspired 6 -borcino 6 -quadrangles 6 -185billion 6 -noortje 6 -keyfer 6 -sigiriya 6 -laxami 6 -hjortur 6 -paddle-out 6 -ligne 6 -presdiential 6 -scops 6 -cashley 6 -olaba 6 -legume 6 -jaelyn 6 -vastu 6 -novemeber 6 -tyning 6 -rubinger 6 -11.51 6 -11.57 6 -5wkt 6 -glams 6 -hursts 6 -bemment 6 -meetme.com 6 -side-line 6 -goettingen 6 -380ft 6 -semnan 6 -plane-mounted 6 -shelfie 6 -merimbula 6 -sonkar 6 -tati 6 -nagol 6 -altug 6 -goldpaint 6 -lovitt 6 -prudishness 6 -cajas 6 -evy 6 -rheams 6 -thickeners 6 -filderman 6 -pawp 6 -gch 6 -feminize 6 -purdin 6 -mershon 6 -maraig 6 -over-produced 6 -gweon 6 -meral 6 -bonidy 6 -abuzayd 6 -bourchier 6 -austero 6 -heraud 6 -49mins 6 -al-samawi 6 -rigid-hulled 6 -hollywood-based 6 -american-ness 6 -hunshandake 6 -roweni 6 -co-runs 6 -0808 800 2222 6 -ocularists 6 -placas 6 -tech-world 6 -she-spots 6 -neckwear 6 -tergat 6 -putrescine 6 -1221 6 -1220 6 -1228 6 -surfthechannel.com 6 -lascaux 6 -solution-oriented 6 -damali 6 -edification 6 -metalhead 6 -makelovenotporn.tv 6 -d-brooklyn 6 -field-of-view 6 -subscription-only 6 -coastalwatch 6 --0 6 -lonrho 6 -125km 6 -irag 6 -herlovson 6 -swinemoor 6 -three-on-one 6 -foundries 6 -concessionaires 6 -01622 6 -over-controlling 6 -rapino 6 -combat-style 6 -kozhara 6 -wintle 6 -iason 6 -rowville 6 -jewelry-designer 6 -ukrainian-speaking 6 -consigli 6 -il-78 6 -faux-documentary 6 -heiselt 6 -dulk 6 -cosmi 6 -queenland 6 -optimises 6 -pheme 6 -revatio 6 -diet-pill 6 -tbl 6 -idilbi 6 -partially-naked 6 -glapton 6 -probating 6 -miramshah 6 -levines 6 -squid-fishing-boats 6 -zazzz 6 -wenders 6 -strong-smelling 6 -35-nation 6 -capuchino 6 -peverall 6 -2600bc 6 -dennen 6 -chiagouris 6 -michaelwade 6 -20,000-foot 6 -wide-area 6 -kronosaurus 6 -portocarrero 6 -changez 6 -recombine 6 -portello 6 -hoefler 6 -appletv 6 -gywneth 6 -al-suhail 6 -bozich 6 -neocolonial 6 -jackpot-winning 6 -cast-mates 6 -decadron 6 -owuor 6 -osmani 6 -twinztv 6 -fulkerson 6 -repetitiveness 6 -german-controlled 6 -fiambala 6 -buhach 6 -zaccard 6 -philydrosauras 6 -isentress 6 -rockstars 6 -rockstarz 6 -hermanus 6 -long-denied 6 -anti-brotherhood 6 -diapause 6 -channa 6 -boera 6 -non-school 6 -mayadin 6 -lings 6 -frog-marched 6 -externalize 6 -5.81 6 -5.82 6 -5.89 6 -kunsthaus 6 -trail-blazing 6 -walne 6 -morella 6 -mejorada 6 -jgc 6 -finnegans 6 -#pumpkinfest 6 -resentence 6 -ill-feelings 6 -amsterdammers 6 -frebbles 6 -roughened 6 -dodie 6 -1,300-acre 6 -jydesmon 6 -189.99 6 -26,955 6 -maskey 6 -thompstone 6 -1941-1945 6 -isobar 6 -anti-ukrainian 6 -70000 6 --91 6 -donhe 6 -n.y.c. 6 -cogently 6 -unite-here 6 -amhurst 6 -impasses 6 -targowski 6 -kersjes 6 -gie 6 -gib 6 -livestreamed 6 -sourness 6 -healthline 6 -qfes 6 -houpapa 6 -mcclare 6 -lipoproteins 6 -slipstreaming 6 -mahoney-smith 6 -crosscheck 6 -winmill 6 -northington 6 -omokudu 6 -rhum 6 -crueler 6 -al-hamdani 6 -bresma 6 -saltonstall 6 -matei 6 -uncomplimentary 6 -generationally 6 -linnes 6 -radenovic 6 -sirikoi 6 -six-team 6 -renly 6 -haev 6 -ruoff 6 -randel 6 -saint-hilaire 6 -ltc 6 -cadgwith 6 -collodictyon 6 -tulou 6 -kenshill 6 -kouchak 6 -dust-off 6 -petite-malle 6 -cinq 6 -25/26 6 -ceptor 6 -jobling 6 -14.500 6 -29.0 6 -2.8-inch 6 -floral-inspired 6 -cascia 6 -2,509 6 -sheelagh 6 -gallery-goers 6 -ifeoma 6 -theen 6 -uzoenyi 6 -kakkar 6 -abidogun 6 -yotaphone 6 -szanto 6 -durrow 6 -caol 6 -jilib 6 -2,068 6 -leeched 6 -1,739 6 -vastly-inflated 6 -european-backed 6 -oxon. 6 -ebitda 6 -horam 6 -ljova 6 -horay 6 -co-anchoring 6 -supergiants 6 -3,842 6 -697,000 6 -ecpa 6 -jovon 6 -kyari 6 -cancer-suffering 6 -terraforming 6 -uig 6 -uis 6 -tortuguero 6 -by-passing 6 -reuel 6 -keala 6 -hengdian 6 -160.3 6 -jennat 6 -chenille 6 -depersonalisation 6 -shantaram 6 -saint-lô 6 -50.0 6 -highbaugh 6 -dermaeraze 6 -adjoua 6 -8:58 6 -ktnv-tv 6 -chidham 6 -jennalyn 6 -teven 6 -38k 6 -heckendorf 6 -3,295 6 -simplyhealth 6 -anti-knife 6 -neimann 6 -contempts 6 -furrer 6 -kat-7 6 -marenoff 6 -dummying 6 -praha 6 -elliott-smith 6 -grimsman 6 -essaghaier 6 -nunneries 6 -brianwlee1 6 -caffé 6 -ardon 6 -ceja 6 -samajis 6 -self-pleasuring 6 -rabbit-themed 6 -romagnoli 6 -adzes 6 -bajram 6 -fulminating 6 -128km 6 -taliban-run 6 -aviana 6 -wtaj 6 -hepatocytes 6 -jaidyn 6 -a628 6 -22-13 6 -stanleys 6 -22-18 6 -sinop 6 -tanji 6 -somber-looking 6 -roboscreens 6 -sieger 6 -whatchu 6 -virendra 6 -vernet 6 -xukang 6 -punic 6 -soderlund 6 -muifa 6 -binstead 6 -254th 6 -cordiello 6 -ecoisland 6 -milson 6 -cornellfetch 6 -biochemists 6 -three-weeks-old 6 -manresa 6 -68-foot 6 -rozs 6 -farsetti 6 -white-clad 6 -stupas 6 -junazaj 6 -rodriguez-jeff 6 -bug-free 6 -verrückt 6 -inroad 6 -49-46 6 -49-48 6 -merwedeplein 6 -pogge 6 -carnitine 6 -15.76 6 -al-bouti 6 -brelfies 6 -20-35 6 -gilby 6 -hantman 6 -g.e. 6 -hernon 6 -superwomen 6 -35,000-ton 6 -domaines 6 -shakerchi 6 -saggital 6 -saiki 6 -a1m 6 -double-edge 6 -a17 6 -as-salaam 6 -novatek 6 -ricken 6 -phenolics 6 -pawscars 6 -90-95 6 -cianni 6 -aravind 6 -hydroxyanisole 6 -howrey 6 -150/1 6 -30m-rated 6 -maizhokunggar 6 -krymzen 6 -nalanda 6 -murex 6 -facewaver 6 -charitynavigator.com 6 -nineham 6 -torrx 6 -almshouse 6 -6:51 6 -6:57 6 -petropavlovsk-kamchatsky 6 -near-bankruptcy 6 -pottawattamie 6 -turfgrass 6 -brearey 6 -paramethoxyamphetamine 6 -sulphur-crested 6 -metamodernist 6 -freezer-wave 6 -tordrillo 6 -sophisticates 6 -wbstv 6 -minirdis 6 -cnnfc 6 -mcmartin 6 -quiero 6 -obeng 6 -interferometry 6 -makarenko 6 -carmi 6 -self-limited 6 -immortalizing 6 -meleri 6 -prixs 6 -left-eye 6 -re-label 6 -mail_gpoll 6 -harakat-ul-mujahedeen 6 -rhymefest 6 -bismuth 6 -algibhah 6 -wakar 6 -record-eagle 6 -badibanga 6 -florie 6 -hamdaniya 6 -petrini 6 -axe-like 6 -eastburns 6 -hervías 6 -karabekir 6 -vargas-perez 6 -2fwww 6 -norwell 6 -magdeline 6 -two-count 6 -tradeshow 6 -tablada 6 -verbrugghe 6 -bennis 6 -plecity 6 -700ad 6 -lankler 6 -krøyer 6 -d-68 6 -leendertz 6 -raffie 6 -out-sprinted 6 -rotundus 6 -trevon 6 -foggia 6 -australia-born 6 -web-surfing 6 -arl 6 -805/646 6 -pizzorusso 6 -garba 6 -60-64 6 -trolle 6 -11-400 6 -orientalist 6 -stupefaction 6 -shetisha 6 -motsepe 6 -asokummar 6 -wevill 6 -roullier 6 -alvarez-icaza 6 -sahaba 6 -sila 6 -windpower 6 -sturge 6 -guadango 6 -brontosaurus 6 -chitral 6 -greenhow 6 -durfield 6 -inseam 6 -smith-start 6 -scotlandsdna 6 -victorian-themed 6 -appleby-socket 6 -varathabawan 6 -ornella 6 -43rd-minute 6 -arch-villain 6 -cryolift 6 -szczypiorski 6 -pseudonymously 6 -potty-training 6 -anthracotheres 6 -cheverlaine 6 -#bbc 6 -aromatherapeutic 6 -underdone 6 -triviality 6 -1m-plus 6 -nuzman 6 -perchance 6 -1991-96 6 -64-man 6 -ogles 6 -local10 6 -co-executors 6 -french-speakers 6 -angiovac 6 -degolyer 6 -jio 6 -jit 6 -jip 6 -ceiriog 6 -rajni 6 -personalty 6 -nawarkhele 6 -dameron 6 -moynier 6 -porto-novo 6 -777f 6 -self-effacement 6 -kilmuir 6 -nastily 6 -pygmys 6 -brisbanites 6 -ucmd 6 -rodnina 6 -mauricienne 6 -re-occurrence 6 -albertus 6 -bread-based 6 -ferryr 6 -zoulova 6 -denouncements 6 -rifat 6 -diganosed 6 -sub-class 6 -promiscuously 6 -saysell 6 -velissariou 6 -home-schools 6 -xxxxxxxxx 6 -nu-tek 6 -internet-linked 6 -rollespilsfabrikken 6 -brabata 6 -bordallo 6 -long-terms 6 -homesafe 6 -agbi 6 -tenafly 6 -67,609 6 -avoriaz 6 -natufian 6 -jepkosgei 6 -baret 6 -armazones 6 -fitness-freak 6 -buckycubes 6 -women.com 6 -9pictured 6 -atasha 6 -emy 6 -pousoulidis 6 -timelord 6 -makawao 6 -160c 6 -wible 6 -yifei 6 -lubkivsky 6 -laminator 6 -andrise 6 -26th-minute 6 -laid-out 6 -alang 6 -unsouvenirs 6 -ctfs 6 -lithophone 6 -507,000 6 -big-bottomed 6 -nonvenomous 6 -burpham 6 -baldwin-felts 6 -buday 6 -hartono 6 -yankovich 6 -mixed-income 6 -rzeszut 6 -piete 6 -jenkins-pietrzak 6 -building-block 6 -game-management 6 -30-city 6 -kammerer 6 -450-mile 6 -yijian 6 -yonge 6 -gagandeep 6 -catelyn 6 -war-zones 6 -saint-surin 6 -berchem 6 -chepeha 6 -dyrstad 6 -moulted 6 -ellenora 6 -4900 6 -wfsb-tv 6 -767,000 6 -foss-greenway 6 -balaz 6 -yemane 6 -murru 6 -harteau 6 -then-mistress 6 -non-participation 6 -breton-striped 6 -yaleni 6 -shigal 6 -lobzin 6 -shift.ms 6 -well-staffed 6 -comegna 6 -rosaliac 6 -alkadamani 6 -highly-specialised 6 -haemorrhoid 6 -rogowski 6 -khiri 6 -32mins 6 -xcitra 6 -boz 6 -phasers 6 -flunky 6 -my9nj 6 -dicowden 6 -saher 6 -forward-half 6 -two-team 6 -19.65 6 -ailton 6 -s-word 6 -http://www.nbcdfw.com/templates/nbc_partner_player?cmsid= 6 -hypochlorite 6 -al-nida 6 -fish-like 6 -paddypower 6 -eowyn 6 -agronomist 6 -ubik 6 -colline 6 -cieszlak 6 -b777 6 -euphorically 6 -metservice 6 -wathkes 6 -hall-long 6 -basque-only 6 -prizeo 6 -sadowski 6 -wn114 6 -320ft 6 -goretorium 6 -mustards 6 -front-office 6 -al-shallal 6 -fowlty 6 -49-game 6 -1,300-square-meter 6 -64lbs 6 -vallaury 6 -almost-certain 6 -elesban 6 -sbeity 6 -medbox 6 -riml 6 -adamopoulou 6 -diageo/hotline 6 -al-akri 6 -quick-draw 6 -intensions 6 -1,433 6 -helperby 6 -dequattro 6 -beechboro 6 -gentlemint 6 -non-greek 6 -purkiss 6 -cheal 6 -evgenii 6 -goop-branded 6 -750bhp 6 -algodones 6 -18-22 6 -barona 6 -tarra 6 -3hrs 6 -tmorej 6 -dhaulagiri 6 -anti-noise 6 -seppenfield 6 -cadaver-sniffing 6 -kepler-37 6 -kepler-32 6 -redlap 6 -pizap 6 -30-98 6 -mediatech 6 -hagenuk 6 -krummrich 6 -dzongs 6 -brazlian 6 -amiee 6 -nhanes 6 -15-15 6 -122-mm 6 -nayda 6 -whf 6 -blackthorne 6 -whu 6 -whs 6 -all-time-low 6 -aksakov 6 -streader 6 -8,709 6 -countermanded 6 -niedermeier 6 -self-study 6 -ecomumy 6 -hawkley 6 -55-plus 6 -thinkpad 6 -car-mounted 6 -thefix.com 6 -catasaurus 6 -cylance 6 -adamyan 6 -hexagon-shaped 6 -stratou 6 -tasik 6 -tasic 6 -natural-gas 6 -emb-500 6 -narleski 6 -ogola 6 -ursus 6 -siarnicki 6 -state-financed 6 -aristov 6 -sulphite 6 -sunshimmer 6 -sino-british 6 -gulenists 6 -better-for-you 6 -11.37 6 -14,000-foot 6 -tempel-tuttle 6 -526,000 6 -300,000-ton 6 -mogil 6 -twis 6 -d'amario 6 -venzon 6 -edrich 6 -costwolds 6 -then-israeli 6 -hudsons 6 -kar-wai 6 -heavily-accented 6 -aerostar 6 -hilty 6 -raisch 6 -khirbat 6 -hipbones 6 -genens 6 -she-hulk 6 -nogali 6 -door-knock 6 -dilsukhnagar 6 -zipped-up 6 -fun-packed 6 -stefanatos 6 -gianpaolo 6 -quechuan 6 -sianturi 6 -coccolithophores 6 -hunt-foster 6 -vocalized 6 -bomp 6 -grimaud 6 -desley 6 -caiping 6 -match-by-match 6 -hocus-pocus 6 -marijo 6 -bartesch 6 -anti-christmas 6 -616,529 6 -whities 6 -4-17 6 -sport-specific 6 -crash-related 6 -superieure 6 -chef/owner 6 -austal 6 -grapo 6 -gutfield 6 -nereida 6 -kanagasingham 6 -divic 6 -gerty 6 -pereria 6 -photo/alexander 6 -westerleigh 6 -peak-season 6 -jeyakumar 6 -castiglione 6 -superfine 6 -rhoys 6 -space-x 6 -gurian 6 -15,208 6 -tcx1638 6 -stansall 6 -iwelumo 6 -27-day 6 -karu 6 -bergisch 6 -ribouem 6 -vicitm 6 -rakhmon 6 -gunters 6 -debentures 6 -mikelsons 6 -biocsl 6 -ezme 6 -arteisha 6 -tropiquaria 6 -behance 6 -castro.dispatcher 6 -wapo 6 -137-house 6 -westerdam 6 -banshees 6 -clemenceau 6 -2,309 6 -xazziel 6 -stopped-and-frisked 6 -muttbombed 6 -makaryus 6 -crotone 6 -ramón 6 -amatokwu 6 -cccs 6 -cccc 6 -nampo 6 -stod 6 -stoa 6 -lycian 6 -nabire 6 -myke 6 -myki 6 -pulping 6 -art-lover 6 -dercum 6 -al-khalidi 6 -zur 6 -kyung-wha 6 -concert_mark 6 -maysaa 6 -burgan 6 -gm-free 6 -mikac 6 -19-week 6 -74.95 6 -cctlds 6 -fairtest 6 -greenewald 6 -diarrohea 6 -natural-gas-powered 6 -heptagon 6 -chacho 6 -artz 6 -canyoneering 6 -treehugger.com 6 -a'zhiah 6 -21,400 6 -gilfellan 6 -waiting-time 6 -5.4-acre 6 -ephgrave 6 -nortenos 6 -needful 6 -muruga 6 -10.43 6 -hammarskjold 6 -salovey 6 -#notabugsplat 6 -adbi 6 -laight 6 -war-town 6 -voltigeur 6 -mlas 6 -morrisania 6 -10,607 6 -chondritic 6 -short-hair 6 -fedida 6 -kinlochleven 6 -oxynitride 6 -subcategories 6 -areal 6 -lipolysis 6 -serpent-handling 6 -madal 6 -grandmama 6 -ex-madam 6 -15ozs 6 -dere 6 -fritkot 6 -eglet 6 -al-a 6 -deery 6 -miasik 6 -39358 6 -idachaba 6 -benalla 6 -chest-bumping 6 -faileigh 6 -gos 6 -put-off 6 -hissom 6 -turow 6 -1.2-meter 6 -motoglo 6 -ilyasova 6 -wholley 6 -mazek 6 -dwarka 6 -freshly-squeezed 6 -brandin 6 -barfing 6 -enviropig 6 -pellecchia 6 -terezinha 6 -zeca 6 -cardioverter-defibrillator 6 -frid 6 -chaek 6 -corp.-built 6 -khandker 6 -anticlockwise 6 -hausmalar 6 -religious-oriented 6 -ajuri 6 -homocon 6 -72-inch 6 -mini-fridge 6 -niver 6 -hallissey 6 -bunter 6 -waggled 6 -seiko 6 -kucova 6 -drugstore.com 6 -greasepaint 6 -khurrassani 6 -knightbridge 6 -enloe 6 -chuansha 6 -a590 6 -krio 6 -haighton 6 -tech-free 6 -tabacco 6 -goyder 6 -sedov 6 -ranil 6 -njeri 6 -night-lighting 6 -tip-line 6 -kaolack 6 -andropov 6 -cent4 6 -gomulka 6 -drug-fighting 6 -biniaz 6 -kompas.com 6 -monley 6 -comeans 6 -marsoc 6 -rajiha 6 -ndung 6 -koorana 6 -jamayne 6 -zadar 6 -bornu 6 -birchwood-pocono 6 -endresen 6 -keany 6 -zagged 6 -96.1 6 -sunnydale 6 -zipps 6 -avakian 6 -corwins 6 -1,000-a-year 6 -pedipower 6 -bigger-than-life 6 -isotretinoin 6 -ridenoure 6 -turnipschool 6 -brosque 6 -cytosport 6 -yothu 6 -megaburgerpizza 6 -2,000-seat 6 -koletas 6 -production-line 6 -bondz 6 -bentonite 6 -butta 6 -lysander 6 -renegotiates 6 -14-second 6 -299,950 6 -osez 6 -cerrudo 6 -mathern 6 -vielmann 6 -continetti 6 -neuvecelle 6 -cadamuro 6 -bronnie 6 -periostin 6 -jackknife 6 -over-stimulated 6 -thich 6 -lulli 6 -yedigaryan 6 -87.4 6 -46.88 6 -1000,000 6 -narraser 6 -schnepf 6 -now-a-days 6 -ringly 6 -pro-militant 6 -o'melveny 6 -jubilee-themed 6 -lukindo 6 -2tbsp 6 -800metres 6 -omotoso 6 -earthjustice 6 -480-page 6 -vanhan 6 -syria-bound 6 -nonito 6 -paschenko 6 -houran 6 -murphy-johnson 6 -lafuente 6 -isms 6 -45-percent 6 -ifb 6 -8-meter-long 6 -radiation-emitting 6 -métiers 6 -shigatse 6 -afrocubism 6 -nationalgeographic.com 6 -nagley 6 -rudiments 6 -48-16 6 -ghazanfar 6 -hjs 6 -closet49 6 -results-driven 6 -space-flight 6 -unversity 6 -unhuman 6 -redwing 6 -bellarmino 6 -skol 6 -tarabin 6 -semi-pornographic 6 -capretto 6 -frog-like 6 -welp 6 -destanie 6 -zifa 6 -husa 6 -wwsb 6 -coupled-up 6 -20-14 6 -ruggedcom 6 -lichtenfeld 6 -parahippocampal 6 -cloudland 6 -reinstituting 6 -queen-to-be 6 -bensons 6 -cofounded 6 -imke 6 -super-smooth 6 -spangly 6 -illah 6 -alborno 6 -@nikefootball 6 -louisiana-mississippi 6 -sulmasy 6 -vellios 6 -evo-stick 6 -super-elite 6 -re-considered 6 -noor-eldeen 6 -dry-ice 6 -emsstrom 6 -vlahakis 6 -embarrassed-looking 6 -15-plus 6 -300.5 6 -insultingly 6 -weathervane 6 -cucchi 6 -denzle 6 -one-in-two 6 -360heros 6 -engelmayer 6 -annerley 6 -trepidatious 6 -demiroglu 6 -dependably 6 -prorsus 6 -smaller-than-expected 6 -tribune-herald 6 -abuse-related 6 -jachnik 6 -pupo 6 -dullards 6 -self-justification 6 -honks 6 -mata-alvarez 6 -gojowczyk 6 -galiulina 6 -85.50 6 -albarus-lindo 6 -medicalization 6 -tex. 6 -8,030 6 -triple-layer 6 -busaidi 6 -franich 6 -158.9 6 -pro-jewish 6 -sholto 6 -specially-formulated 6 -rutch 6 -transformable 6 -flower-bedecked 6 -yellott 6 -petrosyan 6 -dikka 6 -argali 6 -150,000-plus 6 -duckweed 6 -winkel 6 -btecs 6 -2,006 6 -black-only 6 -abramovitch 6 -martitegi 6 -cross-bencher 6 -danielsen 6 -duquette 6 -gianato 6 -wahie 6 -hutsby 6 -mohanned 6 -colur 6 -look-up 6 -agri-food 6 -0843 6 -mandingo 6 -gannons 6 -5,000-a-month 6 -23-week 6 -fewell 6 -fish-based 6 -sakkaf 6 -melchisedek 6 -totale 6 -alatan 6 -bombogenesis 6 -khandelwal 6 -ward-off 6 -christies.com 6 -heinously 6 -kyp 6 -tiwai 6 -goodwine 6 -well-integrated 6 -swat-style 6 -parmajit 6 -dog-sized 6 -pan-islamic 6 -khote 6 -kovida 6 -halaris 6 -legarde 6 -seckinger 6 -adeyinka 6 -7,000-an-hour 6 -8million-a-year 6 -leblancs 6 -ineffectually 6 -alv 6 -marketa 6 -3.5-ounce 6 -privacygrade.org 6 -gladrags 6 -duva 6 -rtd 6 -mumpower 6 -cs-1 6 -l355 6 -al-rastan 6 -zeezee 6 -godshall 6 -29,800 6 -teahupo'o 6 -phyto 6 -jinhai 6 -least-expensive 6 -leffingwell 6 -katriina 6 -zafir 6 -ruffians 6 -motuzas 6 -gampy 6 -meshulam 6 -kilobits 6 -rear-projection 6 -sather 6 -reoch 6 -farrakh 6 -olare 6 -phenotypic 6 -scottish-themed 6 -maswadeh 6 -qadisiya 6 -lebwohl 6 -3,330,000 6 -rock-ribbed 6 -aggregations 6 -cervinka 6 -bloxworth 6 -holocaust-denying 6 -ordish 6 -drmacich 6 -800-calorie 6 -selfoss 6 -43cm 6 -ratpac 6 -farmworker 6 -sheilla 6 -razvi 6 -saitova 6 -enonchong 6 -suttle 6 -satawake 6 -fgf20 6 -tynedale 6 -party-run 6 -ayyash 6 -fellenbaums 6 -boletini 6 -cohesively 6 -joseph-albert 6 -immuno-suppressant 6 -kirilov 6 -nocentini 6 -anandamide 6 -galiley 6 -ante-room 6 -garcia-ixtacua 6 -lamagna 6 -tiahnybida 6 -qathani 6 -isauro 6 -girl-power 6 -recker 6 -burban 6 -pizzagate 6 -australia-new 6 -opinion-writing 6 -dhea 6 -trash-filled 6 -gundrums 6 -malook 6 -89.95 6 -mojtabavi 6 -kaweah 6 -lemahieu 6 -blued 6 -garelli 6 -sagittal 6 -baby-changing 6 -candle-cutting 6 -minchinbrook 6 -bartoshuk 6 -pdr 6 -pdb 6 -steam-engine 6 -sarchet 6 -topfer 6 -bronzini 6 -fully-restored 6 -digitally-combined 6 -tambourines 6 -isis-occupied 6 -dismuke-blakely 6 -aashtar 6 -2,265 6 -ex-great 6 -ngin 6 -post-operating 6 -script-writing 6 -2010the 6 -sidenetting 6 -lvcva 6 -e-tayyiba 6 -j.r.r 6 -consi 6 -boemmels 6 -up-close-and-personal 6 -162m 6 -cossio 6 -ommid 6 -knapkes 6 -embroidering 6 -64km/h 6 -queric 6 -kelong 6 -giel 6 -4.6-liter 6 -2015mamatobe 6 -govic 6 -thorong 6 -out-buildings 6 -meghann 6 -shurrle 6 -hydro-fracking 6 -54-year-olds 6 -maller 6 -presleys 6 -mid-fight 6 -macsween 6 -cbsla.com 6 -jail-time 6 -bordaberry 6 -el-megarif 6 -interrelate 6 -herchcovitch 6 -abdel-kader 6 -issue-advocacy 6 -garand 6 -137.9 6 -wigginton 6 -mikhaimar 6 -apraxia 6 -nicolis 6 -southampton-born 6 -masutani 6 -boxwork 6 -pre-medicine 6 -jaksic 6 -djinnit 6 -pneumatically 6 -2610 6 -snetterton 6 -millikan 6 -petcetera 6 -macivor 6 -mchawala 6 -strong-headed 6 -clonmel 6 -break-away 6 -resiliance 6 -video-call 6 -d-delaware 6 -anti-americans 6 -lamposts 6 -600-20 6 -desktop-class 6 -waste-disposing 6 -joswiak 6 -esla 6 -statist 6 -galatic 6 -female-centric 6 -schiear 6 -matysniak 6 -yayin 6 -deschacht 6 -ankh 6 -four-days 6 -grijo 6 -mso-hansi-theme-font 6 -cashbox 6 -poisoners 6 -babygirl 6 -asid 6 -double-hulled 6 -interplast 6 -phrygian 6 -blaga 6 -patronato 6 -ba1 6 -meenagh 6 -baw 6 -250-plus 6 -wheatgerm 6 -shujaya 6 -bagneres-de-luchon 6 -jalousier 6 -canne 6 -turbochargers 6 -tessy 6 -anthracothere 6 -e-village 6 -karuri 6 -uac 6 -jazzlyn 6 -thanx 6 -kabonge 6 -smallmouth 6 -advertizing 6 -chelsey-lee 6 -cricket-mad 6 -over-complicate 6 -pentameter 6 -stewart-stone 6 -geode 6 -sitting-down 6 -1,189 6 -bio-tech 6 -7-elevens 6 -pérez-mohedano 6 -doxie 6 -gassman 6 -hub-and-spoke 6 -ndefo 6 -oelwein 6 -bielat 6 -nipnominate 6 -dftd 6 -frankenlouie 6 -havenhand 6 -a300-600st 6 -teacher-in-space 6 -12.08 6 -bintang 6 -kartee 6 -pinas 6 -family-free 6 -60.2 6 -haidrani 6 -musoma 6 -humph 6 -clean-shaved 6 -kregear 6 -highbank 6 -55752 6 -cheplak 6 -okech 6 -thorkildsen 6 -telecharge 6 -170km 6 -170kg 6 -200-foot-long 6 -ghan 6 -ormus 6 -out-of-home 6 -hominoids 6 -believably 6 -okrent 6 -beachheads 6 -swopes 6 -froths 6 -alley-oop 6 -audiologists 6 -running-back 6 -interring 6 -heavy-weight 6 -14-goal 6 -vuzix 6 -tenth-grader 6 -kabardino-balkaria 6 -renier 6 -infrabel 6 -joycelynn 6 -gein 6 -macarons 6 -50-date 6 -blox 6 -emoshape 6 -biohybrid 6 -mento 6 -kump 6 -norng 6 -price-wise 6 -olesia 6 -substrain 6 -neiers 6 -#imstickingwithtony 6 -filippino 6 -pro-coptic 6 -ground-to-ground 6 -turmail 6 -minesh 6 -cetaphil 6 -makhmudov 6 -shawan 6 -devonshires 6 -bikramjeet 6 -good-enough 6 -ming-wei 6 -wne 6 -apelian 6 -mohawked 6 -debenedetti 6 -gang-kuk 6 -javis 6 -alailima 6 -dossing 6 -seap 6 -enfranchise 6 -adefemi 6 -shapwick 6 -six-length 6 -elife 6 -32-26 6 -32-22 6 -disappearnce 6 -cordite 6 -urziceni 6 -burtis 6 -recirculation 6 -nuked 6 -shadrake 6 -ampitheatre 6 -over/under 6 -weihagen 6 -cupful 6 -zercher 6 -sverker 6 -nunciature 6 -over-hunted 6 -l'alpe 6 -aleen 6 -aguayo 6 -bentegodi 6 -polayes 6 -heart-breaker 6 -portrayer 6 -sweetly-struck 6 -armengol 6 -strangelets 6 -xinfeng 6 -gallery-style 6 -golbert 6 -hidemyass.com 6 -5,208 6 -u.p. 6 -abola 6 -careshare 6 -guirado 6 -cutta 6 -devita 6 -washbasin 6 -rock-ola 6 -diekema 6 -linton-on-ouse 6 -ex-resident 6 -rosenbluth 6 -udel 6 -#bloodycyclists 6 -2fnews 6 -147-year-old 6 -half-completed 6 -anti-tumour 6 -pasa 6 -uyghur-han 6 -nomar 6 -bungert 6 -memoire 6 -facerig 6 -signa 6 -cobilita 6 -fivethirtyeight.com 6 -dfi 6 -megatoad 6 -svetlik-mccarthy 6 -kraal 6 -cryoballoon 6 -democratic-farmer-labor 6 -al-afghani 6 -bayous 6 -ill-matched 6 -super-centre 6 -lee-han 6 -manouevres 6 -matina 6 -31,200 6 -@christinaedkins 6 -suv-sized 6 -deregulatory 6 -16-episode 6 -picclick 6 -208.5 6 -dextromethorphan 6 -vibgyor 6 -mauretania 6 -bellecote 6 -mid-speech 6 -lebovitz 6 -zande 6 -outlander 6 -flash-freezing 6 -toutatis 6 -gold-coated 6 -torrin 6 -creteil 6 -pately 6 -#ripch 6 -18-bit 6 -venessa 6 -end-point 6 -saft 6 -grannan 6 -ziadi 6 -remould 6 -lazonby 6 -roux-en-y 6 -dewfeed 6 -para-badminton 6 -3,587 6 -karlsruher 6 -derrick-frost 6 -scarabs 6 -wholesomeness 6 -subway-related 6 -ex-para 6 -bellino 6 -palotta 6 -mezz 6 -clockmaker 6 -over-stretching 6 -damar 6 -death-porn 6 -199th 6 -independently-owned 6 -badly-beaten 6 -’94 6 -dressipi 6 -cfdt 6 -ktiv 6 -inists 6 -ghiberti 6 -strug 6 -life-supporting 6 -jedem 6 -vrbo.com 6 -sagir 6 -qidian 6 -ex-reds 6 -benshoof 6 -northern-based 6 -smize 6 -sva 6 -barkerend 6 -2,328 6 -46,600 6 -text-only 6 -khloey 6 -lide 6 -mcelroen 6 -self-sustainability 6 -girlier 6 -crossmichael 6 -mattituck 6 -wvlt 6 -climatically 6 -ruebens 6 -specially-convened 6 -tiefenthal 6 -illinoisan 6 -non-interest 6 -memeber 6 -tornabuoni 6 -elyette 6 -15-kilometer 6 -ndakasi 6 -redpolls 6 -hutias 6 -hoyah 6 -nonrenewable 6 -belhaven 6 -eicosapentaenoic 6 -nginx 6 -aridion 6 -amrouche 6 -instantness 6 -non-slaveholding 6 -chmagh 6 -jackee 6 -mayorship 6 -semi-dressed 6 -caersws 6 -guarente 6 -earthquake-triggered 6 -viscounts 6 -bazil 6 -griffons 6 -takis 6 -u.s.-listed 6 -nwcn 6 -dockland 6 -heichels 6 -porsoi 6 -outa 6 -centini 6 -misconducted 6 -0.002 6 -money-management 6 -haitang 6 -kinescopes 6 -daguin 6 -byefelipe 6 -mud-spattered 6 -22nd-minute 6 -pa-34 6 -extraordinaires 6 -45lbs 6 -ramapough 6 -luark 6 -macool 6 -54per 6 -tampa-based 6 -re-writes 6 -mcinnerny 6 -factly 6 -cbs6 6 -240bn 6 -cbsc 6 -joliot-curie 6 -praetorian 6 -licentiousness 6 -avrillier 6 -tweedale 6 -chorleywood 6 -pristinely 6 -rowby-john 6 -haplogroup 6 -pettingill 6 -saadon 6 -waterfronts 6 -gml 6 -16-years 6 -pinnebog 6 -friesen-remple 6 -heidenberger 6 -tortorella 6 -ivoirien 6 -brandow 6 -oleoylsarcosine 6 -kauderer 6 -perfomed 6 -aronfeld 6 -adris 6 -adrie 6 -@bwilliams 6 -helmich 6 -kronfield 6 -bigon 6 -préval 6 -blabber 6 -10-an-hour 6 -fgr4 6 -surl 6 -moneylenders 6 -zozulica 6 -over-the-phone 6 -brodifacoum 6 -then-lawyer 6 -viger 6 -decelerates 6 -alambritis 6 -haar 6 -bpsfw 6 -fynes 6 -polnikova 6 -osteomalacia 6 -lhr 6 -negroid 6 -humidifiers 6 -backmarker 6 -rys-sikora 6 -anatomies 6 -kondrat 6 -spear-headed 6 -cocodrie 6 -bayero 6 -peahens 6 -medwatch 6 -yorichika 6 -kanat 6 -owalabi 6 -burke-dunsmore 6 -short-tailed 6 -podles 6 -eight-bedrooms 6 -tacs 6 -gadap 6 -chee-hwa 6 -harrara 6 -meetme 6 -2fbit 6 -shirtmaker 6 -harston 6 -germanium 6 -torlonia 6 -marlows 6 -tyber 6 -rozier 6 -sakey 6 -mingalar 6 -bayman 6 -theives 6 -quipu 6 -dsd 6 -igloo-like 6 -rabil 6 -morphex 6 -526-acre 6 -mega-bout 6 -bolek 6 -cancer-risk 6 -taepodong 6 -leerdam 6 -hayvenhurst 6 -retinyl 6 -1.287 6 -photo-essay 6 -carderock 6 -laye 6 -montserrado 6 -sunduki 6 -explosion-like 6 -cannonballing 6 -donancricchia 6 -muaz 6 -musudans 6 -schwabe 6 -bucuti 6 -ratchaprasong 6 -kuramathi 6 -daryan 6 -lumsdon 6 -athole 6 -pizazz 6 -re-shore 6 -townhall 6 -horkovy 6 -florida-13 6 -on-the-fly 6 -meningitis-related 6 -grave-digger 6 -kopke 6 -tialir 6 -dfn 6 -once-happy 6 -nunam 6 -thelocal.de 6 -substrate-independent 6 -innovent 6 -melt-down 6 -bruise-like 6 -crimping 6 -re-normalise 6 -meieran 6 -j-o-b-s 6 -salusbury 6 -4845 6 -almeira 6 -america-based 6 -flaying 6 -orthotist 6 -claspers 6 -hacohen 6 -hadal 6 -ursu 6 -paris-dakar 6 -wicha 6 -enbrel 6 -bledniak 6 -akasia 6 -flachau 6 -@hollywills 6 -beggar-my-neighbor 6 -goldbeck 6 -anticonvulsant 6 -doco 6 -philemotte 6 -fauvist 6 -delicto 6 -yoseyln 6 -metyrapone 6 -urraca 6 -guitar-driven 6 -sondergaard 6 -guffey 6 -chinese-flagged 6 -esquoia 6 -werris 6 -morris-karp 6 -maywand 6 -reinterment 6 -rtanj 6 -41-second 6 -blue-clad 6 -lanessa 6 -mabass 6 -zohair 6 -zohaib 6 -fahz 6 -al-maitah 6 -buffalo-niagara 6 -golf-course 6 -kingsmore 6 -balding-trained 6 -1,663 6 -smackheads 6 -bucketing 6 -akii-bua 6 -tissue-engineered 6 -nini 6 -caravaggios 6 -ecoatm 6 -gaopo 6 -shorewood 6 -31-14 6 -31-10 6 -lowville 6 -ductus 6 -remebering 6 -1/f 6 -groomzilla 6 -three-layer 6 -zida 6 -boatshed 6 -qubaisi 6 -#tmt 6 -jungle-like 6 -keymoji 6 -most-trafficked 6 -nimbleness 6 -isidor-mendoza 6 -skritter 6 -messchaert 6 -a-b 6 -a-g 6 -a-4 6 -two-parter 6 -bitar 6 -day-one 6 -mahgreb 6 -war/missing 6 -prewpan 6 -banta 6 -uralvagonzavod 6 -neutralization 6 -villagey 6 -wisc. 6 -gastropubs 6 -falat 6 -aneke 6 -jebsen 6 -blondell 6 -farthman 6 -divs 6 -cremonese 6 -greenhowe 6 -tvone 6 -viganella 6 -jalaludin 6 -aretz 6 -luxo 6 -roehrs 6 -hashemipour 6 -suenos 6 -wilker 6 -koliatsos 6 -epsn 6 -kehrer 6 -gafoor 6 -carrs 6 -supercontinents 6 -burkta 6 -afternooon 6 -jucker 6 -re-let 6 -multisyllabic 6 -15/2 6 -doll-sized 6 -95cm 6 -halloween-inspired 6 -malem 6 -melham 6 -sozzi 6 -34-years 6 -sensitization 6 -starbucksdrakehands 6 -ristau 6 -serravalle 6 -reupholstered 6 -9ct 6 -oo.com.au 6 -appolonia 6 -even-handedly 6 -2,024 6 -2,020 6 -shinkaruk 6 -silksworth 6 -teece 6 -buji 6 -18-months-ago 6 -maturities 6 -gangidine 6 -near-disaster 6 -jinky 6 -thwacking 6 -westgreen 6 -chargesheet 6 -laurentian 6 -damascas 6 -håkan 6 -schraer 6 -tejon 6 -dakhlallah 6 -891,600 6 -transporation 6 -quassia 6 -sickle-shaped 6 -49f 6 -sluyter 6 -www.ritzcarlton.com 6 -waltman 6 -saull 6 -nucleated 6 -lovaas 6 -gulches 6 -sealyhams 6 -vache 6 -huayna 6 -westford 6 -karstan 6 -eastchester 6 -fixed-blade 6 -purchasable 6 -unpublicised 6 -lonta 6 -under-63kg 6 -nice-guy 6 -#roofbreakup 6 -trzaskowski 6 -keniston 6 -larkings 6 -suryavathi 6 -stennett 6 -noncontroversial 6 -hermetic 6 -bird-watcher 6 -sreenevasan 6 -flavanol 6 -kalashnikova 6 -well-accepted 6 -stodden 6 -lower-budget 6 -freshly-painted 6 -badly-wounded 6 -anisyah 6 -flower-laden 6 -abdein 6 -fatkini 6 -senesh 6 -albertsen 6 -andalusians 6 -austro-hungarians 6 -18inches 6 -wyn-jones 6 -ataollah 6 -meuse-argonne 6 -ex-south 6 -phai-nguern 6 -tonye 6 -ddemiri 6 -sodong 6 -whas-tv 6 -bouillon 6 -besos 6 -tarryn 6 -crown-shaped 6 -palm-size 6 -triple-decker 6 -musically-inclined 6 -hanami 6 -15.533 6 -vaguer 6 -congresbury 6 -acceler8 6 -lumbley 6 -impersonally 6 -keesingia 6 -heatherly 6 -ranchero 6 -@spaghettios 6 -against-all-odds 6 -doubling-down 6 -mcpoison 6 -svenk 6 -perfil 6 -barbarically 6 -oculi 6 -spined 6 -möller 6 -unroadworthy 6 -wnyw 6 -shpetniy 6 -akhisar 6 -al-badr 6 -2,643 6 -roadbed 6 -lokone 6 -blagdon 6 -bum-rushed 6 -43ad 6 -gamblero 6 -venecia 6 -unikia 6 -gamblerz 6 -kozisek 6 -post-columbia 6 -dvice 6 -hohenzollern 6 -153billion 6 -queenscliff 6 -punk-style 6 -megastardom 6 -5,800-square-foot 6 -marquitta 6 -haehre 6 -jms 6 -12ozs 6 -byung-un 6 -continentals 6 -parentline 6 -62509 6 -to/from 6 -creer 6 -matius 6 -przemysl 6 -c.i.a 6 -hour-plus 6 -109,700 6 -mazelike 6 -gasbarri 6 -jagdeo 6 -joll 6 -thilini 6 -tabex 6 -sniveling 6 -hs-crp 6 -13.55 6 -makai 6 -raiatea 6 -hobcraft 6 -pb2 6 -ja'kela 6 -2000bc 6 -37060 6 -rags2riches 6 -oji 6 -kolmykova 6 -dameck 6 -flag-planting 6 -aapp 6 -naledi 6 -mascarene 6 -kassie 6 -kassid 6 -ihara 6 -taite 6 -asola 6 -conales 6 -meika 6 -valenciano 6 -gharmaoui 6 -troedsson 6 -5percent 6 -all-africa 6 -background.com 6 -ratanasirivillai 6 -vsv 6 -blumenfeld 6 -pretreatment 6 -syamsul 6 -salomé 6 -ex-pussycat 6 -1997-2003 6 -schroller 6 -311mph 6 -university-owned 6 -miaoqing 6 -peshwari 6 -miot 6 -nicocigs 6 -14.60 6 -woolfson 6 -420ft 6 -wwny 6 -mella 6 -soundprint 6 -yanadi 6 -three-spined 6 -pre-cast 6 -scent-free 6 -thura 6 -a811 6 -lembah 6 -fourtwozero 6 -mchip 6 -litt 6 -neneng 6 -jayatilleka 6 -8:27 6 -life-spans 6 -ludogerets 6 -lubunga 6 -killean 6 -ant-eating 6 -araneta 6 -waywell 6 -er24 6 -cuddalore 6 -chagin 6 -jyah 6 -mediations 6 -gital 6 -potty-mouth 6 -bismullah 6 -deignan 6 -eqt 6 -skywindpower 6 -cye 6 -diliunas 6 -yartsa 6 -poreya 6 -pense 6 -0/10 6 -bosansko 6 -black-feathered 6 -babtan 6 -sohiel 6 -muthui 6 -tiangaye 6 -earworms 6 -nahidullah 6 -cudell 6 -herrold 6 -dollinger 6 -coffee-drinking 6 -biella 6 -once-independent 6 -wbez 6 -bracchi 6 -wreghitt 6 -goodhand 6 -flossmoor 6 -serialtek 6 -hr980 6 -metal-rich 6 -goodsouls 6 -coroico 6 -kangarlou 6 -birdly 6 -russia-led 6 -cloudberry 6 -tijs 6 -navvies 6 -misterton 6 -11-day-old 6 -48809 6 -silja 6 -backpack-mounted 6 -hand-tied 6 -scholorship 6 -lanuza 6 -toven 6 -kimmelman 6 -sheiham 6 -bhagavad 6 -laerdal 6 -transfered 6 -12.24 6 -glantaf 6 -aid-funded 6 -film-star 6 -tohono 6 -fluo 6 -flud 6 -brena 6 -extra-lean 6 -drinking-water 6 -tuneless 6 -botlr 6 -intra-team 6 -weapon-wielding 6 -binn 6 -caraveo 6 -coverted 6 -mihalov 6 -manistee 6 -candelight 6 -atiku 6 -jdimytai 6 -imdahl 6 -44,450 6 -bhakti 6 -lower-earning 6 -pumpkinsteins 6 -torian 6 -undergirding 6 -conegliano 6 -alternation 6 -kamper 6 -nazrul 6 -modulators 6 -melt-in-the-mouth 6 -78,109 6 -ppis 6 -80-something 6 -xenomania 6 -38.09 6 -assaad 6 -billingborough 6 -marybelle 6 -housing-related 6 -anti-platelet 6 -gt4 6 -mofarrej 6 -misted 6 -twenge 6 -lotty 6 -ammonds 6 -indian-americans 6 -charitybets 6 -loja 6 -southcliffe 6 -leftback 6 -gvsu 6 -cantellow 6 -amiah 6 -gliebe 6 -wiegert 6 -hochspring 6 -pons 6 -unmistakeably 6 -fatbooth 6 -2,164 6 -surapong 6 -phmsa 6 -mohaned 6 -saracoglu 6 -five-length 6 -biophysicists 6 -silverside 6 -acclimatized 6 -meriva 6 -successfulmatch 6 -baynet 6 -erle 6 -trendafilova 6 -hypomania 6 -nuoro 6 -lamer 6 -amee 6 -amet 6 -submarine-based 6 -miatake 6 -hyperpigmentation 6 -prudhams 6 -palladio 6 -daedone 6 -swaine 6 -it.the 6 -42cm 6 -pansieri 6 -coco-mat 6 -30-week 6 -stephanie.linning@mailonline.co.uk 6 -hinatuan 6 -apra 6 --7.6 6 -averbrook 6 -anti-russia 6 -toddler-sized 6 -venzke 6 -kratzke 6 -166-page 6 -helianthus 6 -sjc 6 -hwnt 6 -frazzles 6 -peshmergas 6 -morbihan 6 -hirtella 6 -jonason 6 -chernyh 6 -emdadur 6 -vedeler 6 -csail 6 -http://www.nbcmiami.com/templates/nbc_partner_player?cmsid= 6 -dslrs 6 -thidar 6 -mk10 6 -mk16 6 -siglo 6 -floor-plan 6 -turnford 6 -nudity-oriented 6 -maxalt 6 -geekiness 6 -alkanes 6 -re-litigate 6 -306m 6 -overtown 6 -rough-housing 6 -4,727.67 6 -golembiowski 6 -knacks 6 -grindingly 6 -maugeri 6 -tartness 6 -neuro-degenerative 6 -selph 6 -vantagepoint 6 -tufa 6 -saland 6 -2,744 6 -khiel 6 -sorokdo 6 -132.8 6 -milford-on-sea 6 -fadime 6 -interferogram 6 -megajoules 6 -mailien 6 -thuer 6 -daushvili 6 -larkin-wallace 6 -longdendale 6 -cefepime 6 -thunborg 6 -markazi 6 -elisabeta 6 -bone-headed 6 -virak 6 -329-count 6 -demark 6 -hard-packed 6 -leigh-ann 6 -muchdi 6 -rouffignac 6 -masinloc 6 -240-yard 6 -nearly-man 6 -superlab 6 -mouaz 6 -al-makhtoum 6 -baqouba 6 -abowath 6 -memorialization 6 -irreducible 6 -quinones-fontanez 6 -gazzaroli 6 -l.b. 6 -win-less 6 -capacchione 6 -waldarena 6 -perrottet 6 -problematicpranna 6 -gamarra 6 -57mins 6 -wataru 6 -just-completed 6 -clauson 6 -10-plus 6 -hcas 6 -mis-management 6 -#findjenniferhuston 6 -rosandick 6 -british-bound 6 -172-year-old 6 -nwcn.com 6 -manele 6 -su-hyeon 6 -ghostlike 6 -dimenno 6 -medicalized 6 -poverty-level 6 -tuppance 6 -mompi 6 -godsiff 6 -bullheaded 6 -6,650 6 -fatcats 6 -436ft 6 -wats 6 -no-dog 6 -2,340 6 -galgo 6 -vaishnav 6 -gang-infested 6 -chistolini 6 -molhem 6 -mihaylov 6 -eyewall 6 -wenske 6 -azoz 6 -azor 6 -azot 6 -wassell 6 -wutang 6 -d-vt 6 -polet 6 -mid-2004 6 -shrock 6 -tabakow 6 -glaucia 6 -0800 789 321 6 -discontinuity 6 -nevandro 6 -180-acre 6 -d+d 6 -levatich 6 -hartley-brewer 6 -kodsi 6 -refueller 6 -delegate-rich 6 -10-finger 6 -verifinger 6 -togarashi 6 -stadium-sized 6 -54428 6 -portaloos 6 -gelashvili 6 -cichlids 6 -uggie 6 -pfizenmaier 6 -raborn 6 -705.07 6 -disbury 6 -13-7 6 -shults 6 -16v 6 -nusreta 6 -formidable-looking 6 -al-mubarac 6 -al-mubarak 6 -kokoity 6 -waudby 6 -5.67 6 -chulov 6 -pedicabs 6 -self-exiled 6 -pro-feminist 6 -zabara 6 -bubble-gum 6 -socialbakers 6 -achnagart 6 -shidler 6 -0.024 6 -granta 6 -hillsville 6 -bayesian 6 -spironolactone 6 -horsemanning 6 -nunchuck 6 -obamadon 6 -geston 6 -goro 6 -genny 6 -genna 6 -foul-mouth 6 -689,000 6 -elpida 6 -pop!tech 6 -falica 6 -warkawater 6 -ai-jen 6 -qlr 6 -guernsey-based 6 -cinnamoney 6 -jokke 6 -laarhuis 6 -el-fna 6 -stampylonghead 6 -smail 6 -amazin 6 -auluk 6 -am-drams 6 -cushiest 6 -steroid-like 6 -2,920 6 -cripmas 6 -pilip-florea 6 -steyr 6 -heroin-fentanyl 6 -dallal 6 -takhtehchian 6 -hotch-potch 6 -hphpa 6 -begazo 6 -czarina 6 -held-up 6 -mazi 6 -amite 6 -lyna 6 -assembler 6 -nassiri 6 -ynys 6 -panyanouvong 6 -ivian 6 -cauterized 6 -knome 6 -orlich 6 -hypothesizing 6 -wivenhoe 6 -#openingceremony 6 -dkr 6 -kere 6 -ctf-151 6 -axall 6 -bringman 6 -wrabness 6 -tcaciuc 6 -149m 6 -wategos 6 -cristaldo 6 -aires-based 6 -face-mounted 6 -fazlyeva 6 -diene 6 -gulleys 6 -n'djida 6 -adamas 6 -lorry-loads 6 -mouritsen 6 -coraliz 6 -cacdac 6 -zalkind 6 -web-page 6 -fruitizz 6 -flabbergasting 6 -lieshiaj 6 -cerniglia 6 -sedky 6 -ranea 6 -mckairnes 6 -valov 6 -wisson 6 -33/40 6 -trustpolitik 6 -139,500 6 -pass-rushers 6 -bryant-davis 6 -jardiniere 6 -szulc 6 -icey 6 -castro-vega 6 -osunkoya 6 -mortgage-style 6 -timoci 6 -caig 6 -60-a-day 6 -j.j 6 -1,793 6 -1,798 6 -blehr 6 -ballgirl 6 -hlhs 6 -down-the-line 6 -fist-bumps 6 -wildlands 6 -marske 6 -hippo-like 6 -kens-tv 6 -lamido 6 -evolutionist 6 -hospital-themed 6 -nebus 6 -yaritza 6 -grandmother-in-law 6 -ivanca 6 -taquin 6 -tocantins 6 -colour-coordinated 6 -wedding-style 6 -queue-jumping 6 -double-crossing 6 -594,000 6 -myoclonic 6 -27,432 6 -mandhir 6 -then-pakistan 6 -i-29 6 -goranin 6 -effusion 6 -harvey-jay 6 -473,000 6 -nanostructure 6 -aldridges 6 -thewaterwhispers 6 -trans-pennine 6 -john-john 6 -shirvani 6 -nacac 6 -biryulyovo 6 -loriana 6 -bisd 6 -zentani 6 -marie-thérèse 6 -nyali 6 -klyuchevskaya 6 -hibel 6 -handelsbanken 6 -565million 6 -d'orleans 6 -scarpers 6 -milward 6 -auvi-q 6 -species-specific 6 -clealls 6 -ottl 6 -contract-free 6 -pigs-in-blankets 6 -cazarez 6 -individualize 6 -totenham 6 -floater 6 -25,550 6 -prickling 6 -dadachova 6 -150bn 6 -liquid-like 6 -swigert 6 -osas 6 -mialan 6 -mathena 6 -@uber 6 -nonphysical 6 -dinorwic 6 -inclosure 6 -heulitt 6 -tawashi 6 -atv-5 6 -nose-picking 6 -tma-15m 6 -pre-ceremony 6 -ethedge 6 -aneurisms 6 -yamahata 6 -hanun 6 -condemnatory 6 -drinkmate 6 -mojab 6 -ruckledge 6 -mutuals 6 -24-person 6 -gibby 6 -pigmentosum 6 -colantonio 6 -over-loaded 6 -baldie 6 -1,972 6 -1,978 6 -sabbata 6 -sodomites 6 -przemysław 6 -krishtal 6 --872 6 -owner-operator 6 -mp-e 6 -face-lifts 6 -alphonzo 6 -mutiple 6 -93-90 6 -221b 6 -faza 6 -jamahiriya 6 -in-and-out 6 -fence-sitters 6 -belgian-french 6 -sojourners 6 -1,648 6 -1,643 6 -samiun 6 -samiul 6 -70755 6 -danas 6 -nilu 6 -sarkysian 6 -40-month 6 -growler 6 -3,471 6 -overseal 6 -dweidary 6 -bultemeier 6 -aggressivity 6 -miyazoto 6 -super-tall 6 -matison 6 -milijas 6 -kalen 6 -chimbonda 6 -shevchuk 6 -sonita 6 -surtsey 6 -@rustyrockets 6 -avramopoulos 6 -fridjonsson 6 -citysouthampton 6 -grigorovich 6 -injury-troubled 6 -onkar 6 -universite 6 -mandache 6 -pharmacopoeia 6 -hiner 6 -small-budget 6 -ngh 6 -granat 6 -direst 6 -paddleboards 6 -risk-reward 6 -mawsynram 6 -narender 6 -tackiest 6 -falck 6 -professionally-produced 6 -bonvoyage.co.uk 6 -louis-dreyfuss 6 -dog-meat 6 -capretta 6 -beneman 6 -hustede 6 -germaphobe 6 -league-chasing 6 -wallick 6 -wallich 6 -deam 6 -carth 6 -krenwinkle 6 -hanlong 6 -shallowly 6 -ironworker 6 -radler 6 -crummel 6 -live-in-lover 6 -megahit 6 -loughtman 6 -washougal 6 -times-herald 6 -berwin 6 -higher-energy 6 -obamarama 6 -blackett-ord 6 -bergensten 6 -occultations 6 -edwords 6 -uncelebrated 6 -woude 6 -5,325 6 -al-iraq 6 -taptap 6 -gurrola 6 -quiett 6 -quiets 6 -mattos 6 -4,769 6 -puzey 6 -rattail 6 -fourth-bottom 6 -skywalking 6 -bakharev 6 -well-focused 6 -hafey 6 -aaish 6 -charr 6 -anne-style 6 -stancu 6 -ogmore 6 -south-south 6 -tamesha 6 -al-umda 6 -newaygo 6 -laderika 6 -etendeka 6 -streetcorner 6 -sellotaping 6 -harrisyn 6 -hizmet 6 -snowglobe 6 -buonadonna 6 -guen-hye 6 -ethnographic 6 -gotke 6 -kqds 6 -practicals 6 -advance-purchase 6 -all-boy 6 -super-storm 6 -westhill 6 -hamstreet 6 -borchgrevink 6 -debt/gdp 6 -spacenk.com 6 -trimboli 6 -tayyiba 6 -borghetti 6 -lofotr 6 -470,300 6 -12,000-a-month 6 -avolt 6 -myeong-dong 6 -wikus 6 -ahw 6 -ciprianis 6 -yingyi 6 -srpk1 6 -hazeldon 6 -73840 6 -pakistani-administered 6 -ooraikul 6 -kook-young 6 -lyagushkin 6 -loran-c 6 -francois-michel 6 -pale-looking 6 -openhearted 6 -melchiode 6 -uberpool 6 -barnehurst 6 -dynamax 6 -torchering 6 -tugman 6 -eniac 6 -circuited 6 -australia-china 6 -hamami 6 -koening 6 -latyef 6 -step-grandson 6 -j'ade 6 -breeze-block 6 -kreuzman 6 -ferell 6 -sang-hon 6 -5inches 6 -hadash 6 -givat 6 -newly-expanded 6 -football-obsessed 6 -mutungo 6 -divining 6 -4,146 6 -spiber 6 -r-n.j. 6 -biscuity 6 -idealize 6 -coumarin 6 -afroze 6 -ball-like 6 -f-7 6 -cyphers 6 -carleen 6 -schachtay 6 -re-touching 6 -breazeal 6 -energy-burning 6 -owlerton 6 -selick 6 -superthin 6 -kartz 6 -haruka 6 -haruko 6 -casually-dressed 6 -5-minute 6 -innabah 6 -kochagov 6 -hadri 6 -ice-shelf 6 -robitussin 6 -much-prized 6 -suren 6 -d'agata 6 -ajmer 6 -poupyrev 6 -promette 6 -jike 6 -night-fighter 6 -@waterstones 6 -caspit 6 -over-stimulation 6 -zigan 6 -arachnoid 6 -jianglang 6 -7.0.6 6 -c&d 6 -woolfall 6 -joliverie 6 -0800 854 440 6 -8-foot-tall 6 -samawi 6 -krowski 6 -gyfun 6 -142,500-a-year 6 -hills-based 6 -highly-valued 6 -oval-ball 6 -efficy 6 -ohn 6 -ohh 6 -ohi 6 -christmasses 6 -o'odham 6 -delsea 6 -fosbury 6 -amarkhil 6 -hotty 6 -bagnolo 6 -bagnold 6 -todorovich 6 -mega-stardom 6 -samuelsen 6 -computer-guided 6 -gbta 6 -chilout 6 -geo-tagging 6 -huvafen 6 -wellses 6 -monasticism 6 -braydion 6 -ruangsak 6 -l'or 6 -107.2 6 -107.7 6 -kehmi 6 -screen-reader 6 -al-dumaini 6 -oakenshield 6 -one-world 6 -kermanshah 6 -michaelmas 6 -jigged 6 -#diamondsandpearls 6 -gritzmaker 6 -dibden 6 -loss-prevention 6 -hajiu 6 -cdfa 6 -re-airs 6 -lsdp 6 -hodge-podge 6 -rabadan 6 -stefans 6 -ajrestan 6 -972 6 -62-foot 6 -whattaburger 6 -salaang 6 -arcattack 6 -359.99 6 -pasir 6 -11-length 6 -olliver 6 -kimilsungia 6 -heritability 6 -zygmunt 6 -898,000 6 -alfuj 6 -livi 6 -cancer-promoting 6 -21-10 6 -mirebalais 6 -candler 6 -pierre-paul 6 -aargau 6 -jaggi 6 -aj-26 6 -grotta 6 -cloncurry 6 -philizot 6 -uyghur-populated 6 -nikhom 6 -wazen 6 -declawed 6 -validator 6 -mpx 6 -rassoullallah 6 -nanolabs 6 -revivalists 6 -akande 6 -filmakers 6 -woradet 6 -tvnewser 6 -31ft 6 -beginner-friendly 6 -bidirectional 6 -swop 6 -milarepa 6 -jamis 6 -doxey 6 -mikaila 6 -party-driven 6 -cayford 6 -174.1 6 -980ft 6 -newyork 6 -zopiclone 6 -centum 6 -sallows 6 -hagger 6 -fussier 6 -pleiss 6 -adebambo 6 -en-nahas 6 -160,873 6 -0044 6 -eight-count 6 -ipad-only 6 -reversibility 6 -harambe 6 -burghead 6 -over-full 6 -party-hearty 6 -protherough 6 -threepence 6 -loggins 6 -sentinel-tribune 6 -4x100metres 6 -petabecquerels 6 -out-of-the 6 -christian-oriented 6 -al-otaibi 6 -anti-french 6 -ex-gurkha 6 -jail-term 6 -26,300 6 -40,000-mile 6 -14kgs 6 -1980-88 6 -moellering 6 -non-jihadist 6 -plaine 6 -disorganisation 6 -prize-fighter 6 -corrib 6 -goetsch 6 -genero 6 -cfsm 6 -phaistos 6 -dymott 6 -shulan 6 -mamberti 6 -ethereally 6 -colak 6 -cityswansea 6 -12.49 6 -12.46 6 -enam 6 -bilion 6 -sub-camps 6 -diamond-mining 6 -nyborg 6 -mcarthur-king 6 -independencia 6 -kx 6 -two-city 6 -40-seater 6 -kxjb 6 -counter-act 6 -re-growing 6 -@mcfc 6 -1,494 6 -nonsensically 6 -linkevicius 6 -kalisz 6 -kalish 6 -zipties 6 -sitiveni 6 -penraat 6 -zaad 6 -m17 6 -yahir 6 -oppezzo 6 -quivira 6 -direct-action 6 -tsvetan 6 -gosafe 6 -hellabrun 6 -sarotin 6 -sharath 6 -scarpering 6 -orser 6 -kwando 6 -enervating 6 -adulteresses 6 -zahair 6 -incake 6 -duero 6 -perton 6 -five-minutes 6 -nano-coating 6 -salesian 6 -rowington 6 -chiminello 6 -re-submitted 6 -kcrw 6 -red-ball 6 -#mh370 6 -candy-coloured 6 -barket 6 -perdana 6 -jumio 6 -6.92 6 -boksic 6 -61.56 6 -molto 6 -hypergrowth 6 -low-achieving 6 -upticks 6 -undefinable 6 -llangynwyd 6 -rhys-meyers 6 -p17a 6 -rubaish 6 -1stopship 6 -copus 6 -triggerfish 6 -weather-affected 6 -44per 6 -gateacre 6 -120lb 6 -hindi-language 6 -alemu 6 -depailler 6 -busra 6 -reforest 6 -baelish 6 -ruchun 6 -votyakov 6 -mendonsa 6 -un-retouched 6 -1000-a-night 6 -reenactor 6 -port-a-loos 6 -ruymbeke 6 -vertue 6 -higher-speed 6 -carmina 6 -14mins 6 -floatplane 6 -shaken-up 6 -green-tinged 6 -gordeev 6 -15-vehicle 6 -budgerigars 6 -corpina 6 -gautreau 6 -tangen 6 -felipao 6 -568ft 6 -elmslie 6 -hektor 6 -trefilov 6 -bojnourd 6 -sungrazing 6 -bouattia 6 -narky 6 -jasmiyah 6 -1994-2000 6 -falkowski 6 -73,800 6 -mobuto 6 -saddlebaby 6 -stasytyte 6 -bagful 6 -mostagedda 6 -abandonments 6 -49per 6 -knight-percival 6 -reinstituted 6 -sapphire-crystal 6 -telesar 6 -sub-biosphere 6 -marshallton 6 -hi-vision 6 -dalan 6 -45min 6 -jarrel 6 -globe-shaped 6 -death-hunters 6 -dekaney 6 -pipedream 6 -footrests 6 -sitorus 6 -cox-brown 6 -papist 6 -rusko 6 -hrsa 6 --53 6 --58 6 -lockman 6 -gizmopal 6 -pallardo 6 -moysey 6 -2,767 6 -multireligious 6 -125,001 6 -huttleston 6 -solovyev 6 -tamasin 6 -liese 6 -non-indictment 6 -engine-powered 6 -jerman 6 -fit-to-work 6 -makeka 6 -aktarer 6 -terrestrialized 6 -ramos-horta 6 -narigua 6 -hackney-born 6 -boydy 6 -19-24 6 -19-23 6 -zvika 6 -gunwale 6 -malingerer 6 -88-82 6 -cold-snap 6 -dimmable 6 -pedra 6 -montz 6 -manitowish 6 -112.3 6 -112.1 6 -millership 6 -post-disney 6 -seher 6 -cambur 6 -obama-led 6 -neca 6 -non-orthodox 6 -rookeries 6 -kalt 6 -blatchington 6 -56.80 6 -wine-lovers 6 -hoogendoorn 6 -sternfeld 6 -vaginalis 6 -sloughs 6 -yapias 6 -91million 6 -10-a-day 6 -funnymals 6 -otomo 6 -canonbury 6 -couplies 6 -back-date 6 -dragonball 6 -exfoliators 6 -applebees 6 -oostveen 6 -anatsui 6 -123.7 6 -123.4 6 -nifaz-e-shariat 6 -poppy-growing 6 -srf 6 -sry 6 -krishnamachar 6 -290lb 6 -parents/carers 6 -2,369 6 -toot-toot 6 -foulis 6 -paari 6 -pre-ordained 6 -raschio 6 -zubrzycki 6 -17,250 6 -baylie 6 -321km 6 -coracles 6 -kjellsson 6 -3.32 6 -ccif 6 -carratu 6 -9,831.99 6 -edfu 6 -brascom 6 -lovefool 6 -intraocular 6 -m-302 6 -bird-flu 6 -never-never 6 -fisted 6 -rast 6 -arra 6 -arro 6 -altiveros 6 -willmar 6 -metabolises 6 -western-influenced 6 -1229 6 -marathon-running 6 -erisbel 6 -waxwing 6 -perrelli 6 -environment-friendly 6 -roiphe 6 -heat-stricken 6 -snake-infested 6 -usol 6 -curci 6 -gacanja 6 -prayer-like 6 -vodquila 6 -ex-college 6 -major-party 6 -hornberg 6 -gunnels 6 -birkins 6 -no-exception 6 -x-shaped 6 -next-best 6 -minutos 6 -dictat 6 -sombrely 6 -323,000 6 -brewington 6 -kilonzo 6 -m'lord 6 -toonies 6 -pebrel 6 -fifth-fastest 6 -gak 6 -over-achieving 6 -gopin 6 -zemlianichenko 6 -yeahhhh 6 -sherron 6 -up-field 6 -raclette 6 -ledyard 6 -l'ame 6 -laotians 6 -duffins 6 -conahan 6 -0145 6 -bra-wearing 6 -uxbs 6 -kriegsmarine 6 -nanosensor 6 -174cm 6 -mizukami 6 -tauqeer 6 -paume 6 -fluttery 6 -nqinana 6 -daltry 6 -coquard 6 -thermography 6 -mrozowsi 6 -lardons 6 -rendu 6 -kotzebue 6 -pietilae-holmner 6 -freie 6 -pasttime 6 -beanotherlab 6 -vigan 6 -chernomorneftegaz 6 -halida 6 -apatosaurus 6 -zaubek 6 -derby-born 6 -kilometer-long 6 -oceanus 6 -dreamboy 6 -ciff 6 -hooved 6 -yeasty 6 -xuemei 6 -1qn 6 -6.97 6 -malapa 6 -garo 6 -59165 6 -uragan 6 -kolelisvhili 6 -weiboscope 6 -il-khanid 6 -aw12 6 -internalizes 6 -bukamal 6 -chicle 6 -neller 6 -fraenzel 6 -monywa 6 -3840 6 -ombudsmen 6 -agria 6 -3-feet 6 -mcdive 6 -ramnut 6 -mainsheet 6 -icco 6 -deseeded 6 -plain-text 6 -glomser 6 -dozers 6 -weese 6 -14-member 6 -dwr 6 -252-161 6 -tippet 6 -snapkidz 6 -allston-brighton 6 -polcari 6 -symmons 6 -azhdarchids 6 -jakhyrian 6 -triomphant 6 -radigan 6 -wobbleson 6 -balaknama 6 -o.g. 6 -brima 6 -500-a-month 6 -hermoine 6 -skavlan 6 -sheepscombe 6 -lantern-lit 6 -wenjie 6 -quinnan 6 -scotswood 6 -97455 6 -ex-culture 6 -ultra-wide 6 -eleven-week 6 -whetsel 6 -133-year-old 6 -oenotheque 6 -kicking-off 6 -horspool 6 -curviness 6 -alanbrooke 6 -:] 6 -metiers 6 -then-22-year-old 6 -macaron 6 -tech-giant 6 -re-employ 6 -hargey 6 -mobile-payments 6 -cuper 6 -brigue 6 -57493 6 -long-been 6 -vasconcellos 6 -canaii 6 -rainhill 6 -touch-down 6 -100ft-wide 6 -imageboard 6 -osca 6 -salita 6 -sweyn 6 -muellerova 6 -byrs 6 -foremen 6 -foolhardiness 6 -americain 6 -bogalusa 6 -berleburg 6 -56cm 6 -häggberg 6 -vladimirovna 6 -maclise 6 -forsyte 6 -araria 6 -al-awadi 6 -chepiga 6 -616,000 6 -sajdah 6 -polegato 6 -supertasters 6 -vranjes 6 -mentari 6 -sovetov 6 -anti-glazer 6 -cezus 6 -curfiss 6 -hd-quality 6 -land-banking 6 -1,957 6 -run-and-gun 6 -saddarth 6 -perigueux 6 -chiantla 6 -sarwan 6 -citynorwich 6 -akindona 6 -percolators 6 -pomroy 6 -cyclobenzaprine 6 -@nfl 6 -younge 6 -pannawonica 6 -soner 6 -luli 6 -lule 6 -ladypool 6 -husid-shamir 6 -1,627 6 -nicolites 6 -blood-clot 6 -insecam.com 6 -1.3-mile 6 -outreaches 6 -bukhantsov 6 -fandi 6 -hth 6 -htw 6 -chates 6 -duddles 6 -pre-opening 6 -160-pixel 6 -polehna 6 -shahabuddin 6 -nine-ton 6 -zeichner 6 -burdan 6 -reprocesses 6 -homeostasis 6 -ex-no 6 -redur 6 -girlfirend 6 -huit 6 -#cricketfamily 6 -hoorah 6 -half-made 6 -shallowest 6 -treorchy 6 -bropleh 6 -mini-ice 6 -mosquito-transmitted 6 -goiânia 6 -sohdi 6 -capecitabine 6 -lonato 6 -beygelzimer 6 -decertify 6 -monney 6 -lacerate 6 -body-snatching 6 -49-0 6 -teymour 6 -nørgaard 6 -post-financial 6 -ciccariello-maher 6 -stogie 6 -neftaly 6 -sw11 6 -prouts 6 -@espn 6 -cricked 6 -biden-led 6 -capucines 6 -triston 6 -croesyceiliog 6 -ivillage 6 -ellon 6 -ellor 6 -heraclitus 6 -helpmann 6 -majeczka 6 -frusciante 6 -lezcano 6 -slug-like 6 -6-foot-1-inch 6 -shodeinde 6 -twistex 6 -par-72 6 -clickers 6 -willians 6 -typeof 6 -3.5-mile 6 -one-and-half 6 -waldvogel 6 -tribiano 6 -hell-hole 6 -sonically 6 -reinvestigating 6 -fll 6 -vanrees 6 -wowurdumb 6 -s&c 6 -rashomon 6 -elitest 6 -s-max 6 -37g 6 -mcgraths 6 -istandwithphil.com 6 -skolling 6 -hardigg 6 -keithville 6 -highrises 6 -digsby 6 -350bhp 6 -burdi 6 -unilateralist 6 -ampuan 6 -duck-like 6 -surenas 6 -shoqbox 6 -delaminations 6 -vitko 6 -121million 6 -humanitaria 6 -primeknit 6 -chanterelles 6 -newly-obtained 6 -redegalli 6 -drinkwine 6 -lap-dance 6 -lyz 6 -markdowns 6 -ayapa 6 -sankofa 6 -brigit 6 -83kg 6 -degerolamo 6 -piriformis 6 -ovacion 6 -10th-century 6 -99.9999 6 -74-13 6 -ombui 6 -williamette 6 -ulht 6 -villwock 6 -5,126 6 -low-fi 6 -agrochemicals 6 -super-popular 6 -carnita 6 -medium-rare 6 -biosca 6 -atomised 6 -epoxi 6 -micro-targeting 6 -cuyabeno 6 -tsigris 6 -sartaj 6 -evangulov 6 -t.f. 6 -supertrash 6 -mccary 6 -fankaty 6 -tag-line 6 -5-hta1 6 -tasmia 6 -conservative-held 6 -entsminger 6 -ammal 6 -sedita 6 -adelsons 6 -concrete-like 6 -stilt-walker 6 -jensens 6 -ensnares 6 -mcclellands 6 -poppitt 6 -sprng 6 -camera-friendly 6 -over-compensating 6 -smog-ridden 6 -bi-yearly 6 -franscell 6 -10secs 6 -haggog 6 -sibsey 6 -cnngo.com 6 -derry-londonderry 6 -raingeard 6 -thawil 6 -massarella 6 -attero 6 -olympic-only 6 -a625 6 -eurozone-style 6 -kolobrzeg 6 -unrepaired 6 -palada 6 -profesor 6 -u.n.c.l.e. 6 -12192 6 -mayer-rokitansky-kuster-hauser 6 -stockebrand 6 -essenburg 6 -usatiy 6 -boiko 6 -gundrum 6 -richen 6 -persiraja 6 -son-tinh 6 -communist-backed 6 -100-mph 6 -tugonon 6 -vanheest 6 -defconomy 6 -elvy 6 -jagdip 6 -virtis 6 -ulfkotte 6 -kapron 6 -1milion 6 -ligand 6 -flattus 6 -half-foot 6 -f138 6 -emploi 6 -13.12 6 -machell 6 -hyperkyphosis 6 -mcraney 6 -waringin 6 -rolt 6 -hydroguard 6 -foraker 6 -yunlin 6 -jerk.com 6 -bonazzoli 6 -secularized 6 -overexertion 6 -wikler 6 -still-potent 6 -wylby 6 -al-haudali 6 -jannic 6 -togiola 6 -doubleone 6 -shepac 6 -mylonas 6 -sele 6 -transfermarkt 6 -four-tonne 6 -bright-pink 6 -attorny 6 -vor 6 -137mph 6 -chudzicki 6 -zohreen 6 -topline 6 -17mph 6 -frullani 6 -happyvegemitekr 6 -cavitation 6 -derosa 6 -refurnished 6 -snow-like 6 -galgos 6 -camaya 6 -andriana 6 -falseness 6 -24-foot-long 6 -sterilisers 6 -160bn 6 -sophie-may 6 -out-earned 6 -gabbing 6 -rearward 6 -injury-causing 6 -al-chaar 6 -pacolli 6 -sporich 6 -undereducated 6 -voyaged 6 -brymore 6 -hortus 6 -five-cap 6 -corey.charlton@mailonline.co.uk 6 -adnkronos 6 -probabtion 6 -bogumila 6 -multi-star 6 -swisdak 6 -multi-person 6 -milk-dependent 6 -bejesus 6 -#goodtimes 6 -million-volt 6 -bodywarmer 6 -15000 6 -80-storey 6 -artifices 6 -tukki 6 -ricardas 6 -ex-patient 6 -mrl 6 -woodworker 6 -proloquo2go 6 -11,500-acre 6 -#hugdontjudge 6 -tbm-700 6 -kavos 6 -anar 6 -just-ended 6 -exhilaratingly 6 -bernier-toth 6 -ceu 6 -now-15-year-old 6 -2,827 6 -tikehau 6 -fish-and-chip 6 -burkey 6 -worline 6 -ronaldo-inspired 6 -hasoloan 6 -qormozi 6 -year-old-boy 6 -crofthouse 6 -viggósdóttir 6 -langerud 6 -pantelic 6 -vorinostat 6 -296,900 6 -different-coloured 6 -light-brown 6 -dorus 6 -bythe 6 -one-liter 6 -xeridat 6 -masachusetts 6 -music-related 6 -sunonna 6 -humilation 6 -zacary 6 -zacara 6 -hitz 6 -bodycam 6 -ubc 6 -sandrock 6 -now-razed 6 -enstone-based 6 -reymond 6 -nasturtiums 6 -mlinar 6 -pascua 6 -waasland-beveren 6 -tigerfish 6 -grimond 6 -1,124 6 -marcak 6 -www.5mag.co 6 -rx450h 6 -birnberg 6 -alfille 6 -avvakumova 6 -madarian 6 -eidum 6 -simorangkir 6 -liquid-fueled 6 -tightheads 6 -lcvp 6 -nemsadze 6 -grandroid 6 -government-administered 6 -zhixiang 6 -arnos 6 -12.67 6 -grovetown 6 -honest-to-god 6 -unfashionista 6 -elyjah 6 -norka 6 -melander 6 -harsono 6 -dupont-columbia 6 -oubai 6 -ghia 6 -lehel 6 -salford-based 6 -near-deserted 6 -re-hydration 6 -haseena 6 -petrograd 6 -enchantingly 6 -multimillions 6 -frahn 6 -5:17 6 -drisana 6 -unpressurized 6 -yigan 6 -spicuzza 6 -rememberance 6 -abbreviating 6 -kristene 6 -wafik 6 -maracay 6 -jiping 6 -bighearted 6 -shinned 6 -28s 6 -danneels 6 -jouret 6 -ilkkaracan 6 -caloosahatchee 6 -azema 6 -hitchcockian 6 -boggie 6 -cauna 6 -jesudason 6 -horseguard 6 -aurelian 6 -russian-trained 6 -shihoko 6 -margaretta 6 -paroy 6 -strather 6 -14-men 6 -xxv 6 -nature-based 6 -30-17 6 -kooistra 6 -highly-trafficked 6 -puenzo 6 -kasems 6 -camacha 6 -116000 6 -balen 6 -baler 6 -lovelle 6 -hrsc 6 -miami-born 6 -camms 6 -eudy 6 -immitation 6 -es-335 6 -paid-off 6 -cangialosi 6 -emoov.co.uk 6 -revengeful 6 -@melissastetten 6 -'64 6 -bazso 6 -33,200 6 -hanock 6 -edesc 6 -re-admission 6 -novakovich 6 -schefft 6 -prefacing 6 -tregony 6 -segsations 6 -rage-type 6 -furnish-john 6 -munafo 6 -swindell 6 -vertegaal 6 -115-pound 6 -78,400 6 -litter-pickers 6 -shonbeh 6 -nptn 6 -thinkmoney 6 -narcomensajes 6 -clean-burning 6 -bekki 6 -oup 6 -harlem-based 6 -now-lost 6 -super-station 6 -11,780 6 -gmfrs 6 -13,418 6 -aliamin 6 -wrixon 6 -sanoah 6 -memomi 6 -drinking-related 6 -katayama 6 -pro-iraqi 6 -badding 6 -disco-themed 6 -diverter 6 -topp 6 -pearce-higgins 6 -bollocks 6 -weidemann 6 -itv3 6 -barstarzz 6 -downview 6 -503,000 6 -davoult 6 -smtv 6 -n17 6 -vulvas 6 -psychotically 6 -petrol-soaked 6 -jeta 6 -96mm 6 -12-4 6 -latinos/hispanics 6 -ilsan 6 -fifty-fifty 6 -dejon 6 -female-to-female 6 -2,704 6 -2,708 6 -buzziest 6 -mxs-rh 6 -embayment 6 -merseysider 6 -dionisi 6 -ulugbek 6 -gold-lined 6 -dayroom 6 -chantry 6 -full-timers 6 -placemen 6 -gannascoli 6 -hard-copy 6 -chaminda 6 -k'inich 6 -bernardinis 6 -23,600 6 -full-priced 6 -motz 6 -folster 6 -radionuclide 6 -joff 6 -isgur 6 -bensusan 6 -deblaquiere 6 -sabe 6 -filchenov 6 -bena 6 -airida 6 -frizzby 6 -jeovanni 6 -zuercher 6 -megapolis 6 -mono-unsaturated 6 -muzaffargarh 6 -benghazi-related 6 -wfmz-tv 6 -daire 6 -cutaways 6 -pappalardi 6 -language-learning 6 -127-page 6 -police/media 6 -everts 6 -arkanas 6 -hannu 6 -allonby 6 -ths 6 -dunney 6 -dambulla 6 -tchekhanovets 6 -pre-register 6 -griffes 6 -sutras 6 -girlishness 6 -metopic 6 -volcano-like 6 -nylander 6 -tecoma 6 -sun-trap 6 -spi 6 -cobreloa 6 -shopbreaking 6 -fatoumata 6 -www.prodirectsoccer.com 6 -tea-licious 6 -benett 6 -govinden 6 -yachimovich 6 -numismatist 6 -iosac 6 -fontenay-aux-roses 6 -a1a 6 -meth-for-sex 6 -feeroz 6 -folad 6 -alvimedica 6 -copelands 6 -avansino 6 -long-mooted 6 -rikje 6 -32,256 6 -tarbet 6 -samac 6 -biobank 6 -zarco 6 -signficant 6 -kafon 6 -eddi 6 -unintimidating 6 -yabaolu 6 -iraq-iran 6 -112m 6 -adam4adam.com 6 -trende 6 -blomqvist 6 -tymal 6 -self-isolation 6 -chrisette 6 -super-rat 6 -abele 6 -ivancroft 6 -red-tiled 6 -sportiness 6 -benoits 6 -18.40 6 -55-48 6 -under-recording 6 -defensa 6 -rudlin 6 -full-field 6 -sucher 6 -pk12 6 -gonesse 6 -shema 6 -sanya-jeet 6 -sclerotherapy 6 -chaddesley 6 -netherfields 6 -hiccupping 6 -kaserne 6 -bondara 6 -oteng 6 -kwes 6 -helck 6 -14.56 6 -bereny 6 -eldercare 6 -blue-water 6 -1,209 6 -camaron 6 -van-den 6 -admilton 6 -cyber-sex 6 -stainsby 6 -sherzinger 6 -debt-related 6 -adze 6 -androgel 6 -demobilizing 6 -kang-kuk 6 -recommunity 6 -dump-at-sea 6 -60-foot-tall 6 -concious 6 -ecoterra 6 -tyus 6 -sodomising 6 -zarb 6 -coronae 6 -thrs 6 -iwanicka 6 -dawlas 6 -32.65 6 -pratomtang 6 -wish-lists 6 -pentaceratops 6 -shinoona 6 -lamolla 6 -fifth-wicket 6 -baalbec 6 -al-mujahideen 6 -lickfold 6 -veits 6 -bell-newman 6 -solid-fuel 6 -ggw 6 -ggi 6 -agerpres 6 -elsad 6 -ex-worker 6 -kawaguchi 6 -juraci 6 -d'administration 6 -tolfree 6 -soteri 6 -.2004 6 -schoppink 6 -bacta 6 -fuc 6 -pxm 6 -coppernoll 6 -gastro-pubs 6 -wtnh-tv 6 -faymann 6 -sibsons 6 -non-vintage 6 -microhome 6 -soulsville 6 -medium-format 6 -hallworth 6 -thickset 6 -al-wasat 6 -76kg 6 -broad-reaching 6 -mass-marketed 6 -equal-opportunity 6 -30,000-a-month 6 -starwars.com 6 -vanommen 6 -performance-driven 6 -meterologists 6 -climate-sceptic 6 -1885-1889 6 -photo-shop 6 -soft-rock 6 -codina 6 -soobrazitelny 6 -new-builds 6 -gigis 6 -145lbs 6 -re-birth 6 -zawa 6 -gudelj 6 -a514 6 -hdev 6 -9min 6 -ditchello 6 -dieing 6 -suffices 6 -taniya 6 -swimshorts 6 -munshiganj 6 -mitchell-leef 6 -abkhaz 6 -www.zonecoveragefootballshow.com 6 -ethopia 6 -tryline 6 -sarhadi 6 -tiguan 6 -prerecession 6 -dilorenzo 6 -out-paced 6 -sokht 6 -meaninglessness 6 -langemark 6 -airspeeds 6 -sakon 6 -annelise 6 -zinkhans 6 -rosenstock 6 -autoslash 6 -travel24.com 6 -@loveliteuk 6 -i-a 6 -berker 6 -sanai 6 -elizabeth-class 6 -chairman-elect 6 -casteix 6 -sobashima 6 -riggsbee 6 -blatchley 6 -machete-armed 6 -carns 6 -usu 6 -al-rasheed 6 -belabored 6 -documentary-makers 6 -woljciech 6 -abstraktes 6 -rsmas 6 -tje 6 -5ft2 6 -tomoz 6 -jawahar 6 -37ins 6 -gumbrecht 6 -southcom 6 -pavlovitz 6 -glacier-capped 6 -al-alami 6 -shelvy 6 -maoca 6 -gaede 6 -nazeris 6 -out-selling 6 -starstreak 6 -kob.com 6 -buildanest.com 6 -656-page 6 -non-monetary 6 -cinderella-themed 6 -m.k.j. 6 -lowdham 6 -teneues 6 -hanlon-catlow 6 -beuc 6 -lozach 6 -demonology 6 -mousset 6 -methil 6 -1914-15 6 -damaraland 6 -on-style 6 -plain-coloured 6 -vims 6 -waterkeeper 6 -tedxeuston 6 -agganis 6 -naeroyfjord 6 -handsley 6 -witholding 6 -aguilero 6 -angelov 6 -notalone.gov 6 -krockenberger 6 -privae 6 -skirball 6 -full-grain 6 -hypovolemic 6 -0.94 6 -half-measure 6 -samano 6 -kashin-beck 6 -ingeominas 6 -rabczewska 6 -36-0 6 -euharamiyida 6 -street-naming 6 -pin-pointing 6 -linaker 6 -adnani 6 -91st-minute 6 -dainesha 6 -pulseless 6 -over-elaborate 6 -cawdor 6 -rybus 6 -schaumburg 6 -500sq 6 -maragogi 6 -sarraj 6 -talhelm 6 -ultra-clean 6 -224-foot 6 -mizan 6 -tycho 6 -revina 6 -swintek 6 -loveline 6 -biswa 6 -spoon-feeding 6 -1,606 6 -1,607 6 -airtours 6 -hermansson 6 -machtley 6 -defriend 6 -burkley 6 -realy 6 -maides 6 -claire.carter@mailonline.co.uk 6 -ouseph 6 -m-ch 6 -elterwater 6 -hitesh 6 -3,436 6 -demarais 6 -catch-and-drive 6 -jemmott 6 -lichtenberger 6 -jirachareonkul 6 -gerhardt 6 -linnéa 6 -plodder 6 -in-stadium 6 -beaverhead-deerlodge 6 -onlf 6 -movie-set 6 -lavold 6 -2,405 6 -2,402 6 -gernaat 6 -133.1 6 -garrotte 6 -pizzaruso 6 -dangeours 6 -bailin 6 -5mbps 6 -cowx 6 -father-in 6 -pleached 6 -estudios 6 -yurman 6 -arnesen 6 -pedder 6 -tumblewood 6 -aspers 6 -unimpaired 6 -erhman 6 -wahib 6 -calafate 6 -corrigan-belajonas 6 -debre 6 -alveoli 6 -luyt 6 -zt 6 -tobolsk 6 -delineating 6 -us-south 6 -2,616 6 -dunscombe 6 -3-axis 6 -tea-light 6 -marder 6 -six-stone 6 -caldwell-stone 6 -khudair 6 -lechia 6 -home-making 6 -campfield 6 -mtor 6 -moamer 6 -remainders 6 -moisture-producing 6 -bussmann 6 -judgment-free 6 -talev 6 -gang-bangers 6 -tomtato 6 -sienna-lilly 6 -cassese 6 -sowe 6 -four-stop 6 -lng-ius 6 -monsalve 6 -fnc 6 -courtiour 6 -rabies-like 6 -makh 6 -2000-2012 6 -harmonically 6 -edwardstone 6 -tabulations 6 -mid-devon 6 -scutari 6 -2,085 6 -vineet 6 -simpletons 6 -70-hour 6 -markwayne 6 -vanbrugh 6 -suhrawardy 6 -103mph 6 -sambath 6 -contol 6 -lensbury 6 -brassknocker 6 -kluitenberg 6 -1,475 6 -club-wielding 6 -wp7 6 -newly-made 6 -harl 6 -kirkum 6 -biggish 6 -louise-marie 6 -daouda 6 -35ml 6 -sandy-bottomed 6 -chatuchak 6 -diapering 6 -29-30 6 -trentini 6 -levered 6 -rigamer 6 -spaceballs 6 -grantchester 6 -karkemish 6 -arcebal 6 -kocaeli 6 -pichot 6 -soon-to-open 6 -49,600 6 -motaparthy 6 -38-pound 6 -attock 6 -red-heads 6 -iliffes 6 -hannis 6 -belhanda 6 -kharzei 6 -guldgubbars 6 -theanine 6 -visoth 6 -tweedledum 6 -zocor 6 -whiteson 6 -surf-a-thon 6 -fotoflexer 6 -looners 6 -800kg 6 -a358 6 -lohmann 6 -bumper-sticker 6 -govind 6 -nangang 6 -65,000-per-week 6 -hung-over 6 -kvaratskhelia 6 -shirt-fronting 6 -100cameras 6 -lantapan 6 -howorth 6 -casebook 6 -stanningley 6 -skone-roberts 6 -beardie 6 -jar-jar 6 -asyut 6 -walbank 6 -straight-backed 6 -masucci 6 -hatjani 6 -overstaffed 6 -bodek 6 -2009chelsea 6 -jawas 6 -jsm 6 -faisa 6 -lemoore 6 -bunawan 6 -mouhamud 6 -interjecting 6 -continental/united 6 -cascavel 6 -nihon 6 -cuppy 6 -murf-1 6 -alhacen 6 -14-win 6 -delamar 6 -xyrem 6 -blow-dryer 6 -penrhyndeudraeth 6 -tatenda 6 -skidgel 6 -rapidgate 6 -145ft 6 -bermejo 6 -zinczenko 6 -un-happy 6 -makiadi 6 -cleworth 6 -post-luis 6 -rouland 6 -brinsford 6 -hemmerde 6 -kambale 6 -9b 6 -niseko 6 -aparcana 6 -liverpoolsep 6 -warblington 6 -fieldsend 6 -3,175 6 -flightcompensation.com 6 -pierogi 6 -letiza 6 -havanese 6 -olg 6 -olo 6 -ols 6 -olx 6 -gyasi 6 -unionised 6 -tullie 6 -muehlhausen 6 -lower-risk 6 -350mg 6 -runnalls 6 -internationalists 6 -bujol 6 -postyourtest.com 6 -melchiot 6 -1.4-inch 6 -cheyer 6 -lenko 6 -adreena 6 -honsaker 6 -hunsbury 6 -wmctv.com 6 -vir 6 -336.4 6 -para-methoxyamphetamine 6 -flyht 6 -facebook-related 6 -cotham 6 -hajjaji 6 -5.7-liter 6 -hoti 6 -tumini 6 -u.n.-protected 6 -gawped 6 -skorupa 6 -roomstanding 6 -mahdaly 6 -iraqi-turkish 6 -a-chill-us 6 -asikainen 6 -saundersfoot 6 -legation 6 -vietnamnet 6 -charente-maritime 6 -re-connected 6 -slevinsky 6 -bell-bottoms 6 -541,250 6 -inter-korea 6 -mummifying 6 -perren 6 -marinkovic 6 -khamanei 6 -apprise 6 -95.4 6 -95.6 6 -kaoru 6 -coravin 6 -jaffay 6 -#nofilter 6 -kashdan 6 -gowens 6 -second-season 6 -j20 6 -mg/ml 6 -shah-klorfine 6 -zanten-hyllner 6 -ylva 6 -herbed 6 -herber 6 -r&c 6 -hazlet 6 -dabit 6 -29,000-a-year 6 -demain 6 -swooshing 6 -fahnestock 6 -percutaneous 6 -real-word 6 -gbomo 6 -rwcl 6 -enticingly 6 -ypacarai 6 -kalaitzaki 6 -51,000-tonne 6 -80014 6 -spoonbill 6 -kniazev 6 -52.86 6 -tiesha 6 -ribi 6 -26-16 6 -26-18 6 -226million 6 -jamea 6 -ls516 6 -mudbusters 6 -auto-brewery 6 -29ins 6 -decompressing 6 -360-foot 6 -104-87 6 -sukiennik 6 -borodulina 6 -just-announced 6 -picaboo 6 -printings 6 -edmead 6 -haggas 6 -simey 6 -35,000-word 6 -kiddey 6 -modjdehi 6 -under-nourished 6 -diphallia 6 -doorbal 6 -1,853 6 -1,851 6 -photo-bomb 6 -talbots 6 -campayo 6 -piercingly 6 -inducting 6 -autons 6 -adofo 6 -pitcavage 6 -bedstead 6 -gaquan 6 -counter-programming 6 -dogon 6 -well-rewarded 6 -sillman 6 -55287 6 -radjenovic 6 -issawi 6 -klick 6 -cadishead 6 -heremaia 6 -205,120 6 -grimster 6 -ec225 6 -crackup 6 -tipi 6 -h&k 6 -persieing 6 -volos 6 -patosi 6 -louigens 6 -kokiri 6 -below-cost 6 -7,020 6 -glaciares 6 -wide-left 6 -over-physical 6 -manta-on-call 6 -4-under 6 -ziemczonek 6 -@michaeldelzotto 6 -arnal 6 -barkas 6 -nyirenda 6 -blansett 6 -1,062 6 -gladieux 6 -cogito 6 -ekgs 6 -ballwin 6 -personality-driven 6 -sèvres 6 -konectbus 6 -sportsmanlike 6 -48275 6 -puked 6 -time-traveller 6 -vespasian 6 -kolsch 6 -primis 6 -maros 6 -close-fought 6 -examinees 6 -ray-garcia 6 -m54 6 -ex-coronation 6 -lashkah 6 -mascott 6 -some1 6 -oced 6 -squinching 6 -32gg 6 -tancrède 6 -sautman 6 -dualit 6 -rugamba 6 -jaheem 6 -manspreaders 6 -koola 6 -start-to-finish 6 -blaydon 6 -wider-ranging 6 -mcanulty 6 -face-pulling 6 -paria 6 -multifoetal 6 -phipson 6 -depakote 6 -hot-footing 6 -turino 6 -paduka 6 -pinpricks 6 -kalaeloa 6 -kfyi 6 -minsheng 6 -skynanny.net 6 -turbidity 6 -cities/we 6 -right-angles 6 -4.8-magnitude 6 -gourami 6 -65-mph 6 -eidm 6 -@uklabour 6 -self-rated 6 -refa'a 6 -9.02 6 -sihamoni 6 -well-hydrated 6 -quryna 6 -manpreet 6 -drigg 6 -kisanak 6 -shadrach 6 -coldlike 6 -sherborn 6 -caistor 6 -210kg 6 -sound-bite 6 -berkely 6 -butterkist 6 -11.90 6 -me.i 6 -fonk 6 -fone 6 -nutbush 6 -5,001 6 -muthan 6 -decharles 6 -nargas 6 -mini-game 6 -mini-vacation 6 -kirkbymoorside 6 -prisum 6 -quarter-marking 6 -condescend 6 -trav 6 -trax 6 -simpsonized 6 -aristi 6 -juxtopia 6 -homola 6 -murjatmodjo 6 -bopa-rai 6 -ebc 6 -el-ashmunein 6 -amarteifio 6 -eastbury 6 -mynor 6 -screen-printed 6 -maupiti 6 -labbadia 6 -ex-treasury 6 -slideshare 6 -1995-97 6 -thoroughgoing 6 -matras 6 -xeljanz 6 -wynnton 6 -www.healthcare.gov 6 -rehovot 6 -stolze 6 -e-money 6 -hamez 6 -2,723 6 -2,720 6 -hendrikje 6 -ruban 6 -red-roofed 6 -pieslor 6 -sweet-talking 6 -sanu 6 -otok 6 -thickener 6 -violence-ravaged 6 -india-nepal 6 -syringed 6 -1480s 6 -farren-price 6 -tiësto 6 -minoxidil 6 -speddings 6 -mozarteum 6 -woodlouse 6 -ex-spouses 6 -ladies-in-waiting 6 -spellista 6 -anti-coalition 6 -frizzy-haired 6 -johnson-weiner 6 -cargo-carrying 6 -vrouwe 6 -epithelium 6 -36-32 6 -mazzari 6 -18-3 6 -agresta 6 -empathises 6 -non-partner 6 -desert-dwelling 6 -’10 6 -bryie 6 -househusband 6 -tnr 6 -leora 6 -hamour 6 -ex-boxers 6 -freshens 6 -benera 6 -food-allergic 6 -clamming 6 -silvertip 6 -43c 6 -#loveislove 6 -full-frame 6 -3.77 6 -ccm3 6 -jasinda 6 -schreier 6 -pre-occupation 6 -phone-like 6 -730-acre 6 -kempson 6 -cruisin 6 -2021-22 6 -clear/white 6 -donachy 6 -black-draped 6 -metailler 6 -biorobotics 6 -malcontents 6 -schain 6 -end-triassic 6 -schaik 6 -zarihana 6 -squitieri 6 -gop-run 6 -oberpfalz 6 -waitrose.com 6 -wellby 6 -kimora 6 -vittachi 6 -spear-thrower 6 -spoon-shaped 6 -jingzhou 6 -baumler 6 -indu 6 -bigotries 6 -ameritrade 6 -suicide-by-cop 6 -poulan 6 -remploy 6 -pre-menopausal 6 -solinsky 6 -milders 6 -nisour 6 -giorgi-guarnieri 6 -oylear 6 -duckbill 6 -harshani 6 -pidg 6 -gorantla 6 -adamick 6 -rasouli-arsala 6 -adamjee 6 -mixed-martial 6 -zapu 6 -tastiness 6 -onement 6 -875million 6 -work-issued 6 -motty 6 -voluntary-aided 6 -keisling 6 -spigelman 6 -pressings 6 -profit-seeking 6 -codifies 6 -kiprono 6 -garching 6 -zoltar 6 -highly-sophisticated 6 -euthanizes 6 -cianciullo 6 -wainaina 6 -selahattin 6 -refeere 6 -wittke 6 -etawah 6 -109-102 6 -falsifies 6 -non-binary 6 -1,700-acre 6 -berghofer 6 -moley 6 -tegtmeier 6 -fwm 6 -9am-8pm 6 -aimspro 6 -jawzjan 6 -now-2-year-old 6 -allopregnanolone 6 -super-tight 6 -kastrinos 6 -ballot-box 6 -eithad 6 -eusa 6 -huekler 6 -uncharismatic 6 -pinch-hitter 6 -molly-coddled 6 -ewaso 6 -grendel 6 -merigo 6 -r.b. 6 -grenstad 6 -walczuch 6 -south-bound 6 -whatsyourprice.com 6 -cornets 6 -56.9 6 -walkergate 6 -inexpert 6 -suctioned 6 -224sqft 6 -390m 6 -xzibit 6 -f.a.a. 6 -cibc 6 -dilwar 6 -black-and-gold 6 -kanaski 6 -amchide 6 -18159 6 -half-used 6 -top-seven 6 -meshach 6 -28-26 6 -59120 6 -pit-wall 6 -obama-boehner 6 -brownites 6 -bumptious 6 -gatts 6 -huotilainen 6 -eagnews 6 -nomophobic 6 -picklo 6 -arrow-shaped 6 -gadloch 6 -seann 6 -tweten 6 -imtech 6 -syariah 6 -usbwa 6 -campaigning/counselling 6 -panavia 6 -hemmington 6 -cronauer 6 -Ángela 6 -fantasma 6 -milesi 6 -arsalas 6 -59-8 6 -raspbian 6 -39-12 6 -monsal 6 -arms-control 6 -internet-only 6 -pararoos 6 -carsickness 6 -ishiuyama 6 -vineeta 6 -alumina 6 -ocean-facing 6 -laas 6 -segiet 6 -uup 6 -stephensen 6 -aaugh 6 -fruehwald 6 -supercups 6 -damita 6 -samsungs 6 -31.49 6 -@mtv 6 -pug-lover 6 -mcgibbon 6 -karlstad 6 -changefifa 6 -afghan-americans 6 -psy-ops 6 -becirovic 6 -jasperse 6 -check-list 6 -vyas 6 -f60 6 -martinetti 6 -crime-infested 6 -barrowfield 6 -1291 6 -1296 6 -water-stained 6 -saso 6 -wenona 6 -baldonado 6 -years-to-life 6 -gindlesberger 6 -de-icer 6 -materno 6 -enriquez-ominami 6 -tikhonov 6 -lyonette 6 -barrada 6 -yardwork 6 -work-force 6 -wickers 6 -conero 6 -longparish 6 -right-of-center 6 -wallison 6 -singerman 6 -svanberg 6 -well-like 6 -726,000 6 -kimelberg 6 -neopolitan 6 -19lb 6 -vanoc 6 -yellow-orange 6 -palatinate 6 -life-loving 6 -fishburn 6 -derricos 6 -pichaya 6 -shoe-shining 6 -two-foot-wide 6 -okonsky 6 -retinoid 6 -cross-sectarian 6 -annel 6 -micera 6 -persbrandt 6 -perfick 6 -udalls 6 -vechiola 6 -tatted 6 -sennheiser 6 -determined-looking 6 -velenje 6 -cejka 6 -23-bed 6 -superstrong 6 -goia 6 -emblazon 6 -yuliy 6 -8,150 6 -country-pop 6 -demon-like 6 -heyjo 6 -pickrodt 6 -karpeles 6 -zinca 6 -issf 6 -all-of-the-above 6 -23in 6 -lindmeir 6 -o'bryne 6 -23.98 6 -sexualises 6 -port-of-call 6 -gullo 6 -taffaro 6 -suppertime 6 -benthic 6 -66-foot 6 -asaiante 6 -rustington 6 -zipperman 6 -surowiecki 6 -hartshill 6 -staplehurst 6 -sebago 6 -badrishah 6 -nessel 6 -arar 6 -tejano 6 -nailedit 6 -guiltless 6 -jd.com 6 -2,427 6 -10.90 6 -10.93 6 -short-hand 6 -home-testing 6 -well-scripted 6 -montévrain 6 -8ft-high 6 -jenell 6 -mueller-technik 6 -fingerstyle 6 -five-seater 6 -race/ethnicity 6 -9:39 6 -egwuekwe 6 -nmt 6 -landhi 6 -non-porous 6 -non-ionizing 6 -swakopmund 6 -ex-civil 6 -pradia 6 -drippy 6 -khadi 6 -ravensbrück 6 -then-owner 6 -hml2 6 -nantambu 6 -fozia 6 -theresienwiese 6 -dobkin 6 -ondracek 6 -pontcysyllte 6 -laible 6 -zeidenberg 6 -baxterstorey 6 -non-living 6 -1,348 6 -found-footage 6 -laundry-list 6 -wowforreel 6 -nicollet 6 -haselin 6 -public-housing 6 -nordskog 6 -syambhu 6 -kyw-tv 6 -non-registered 6 -ukiyo 6 -chobham 6 -chvrches 6 -10-times 6 -dontesk 6 -kunle 6 -120-minute 6 -near-collisions 6 -palaeobiologist 6 -gameloft 6 -antonenko 6 -hitchock 6 -56,008,113 6 -myddelton 6 -seniang 6 -eliasberg 6 -melroy 6 -pflag 6 -cutt 6 -xiangcheng 6 -helenio 6 -showoff 6 -31-7 6 -17-man 6 -ciego 6 -karlstrand 6 -adalbert 6 -sikkel 6 -tarhouna 6 -sunblocks 6 -alliant 6 -nowacka 6 -milkomeda 6 -moneybox 6 -29-10 6 -not-yet-released 6 -khandahar 6 -mortier 6 -gocer 6 -casinelli 6 -nominators 6 -suspcious 6 -amalgamating 6 -,25 6 -anti-interventionist 6 -code-sharing 6 -80-meter 6 -gambella 6 -kanyce 6 -bhandara 6 -non-farm 6 -afs 6 -salsbery 6 -conquista 6 -tudan 6 -dordain 6 -cubie 6 -yehia 6 -manship 6 -misspells 6 -bootland 6 -jean-daniel 6 -bootstrap 6 -sophocles 6 -benina 6 -rusbatch 6 -horror-comedy 6 -tennis-ball 6 -geminis 6 -try-out 6 -kukula 6 -ultrapixel 6 -kettleball 6 -fardc 6 -mencia 6 -streetfootballworld 6 -hammerschmidt 6 -myalgia 6 -vifriends 6 -401ks 6 -turkman 6 -sirius/xm 6 -bodyshell 6 -co-plaintiff 6 -#hasjustinelandedyet 6 -manwood 6 -sandstones 6 -gretsky 6 -step-grandchildren 6 -dustings 6 -top-range 6 -decilitre 6 -game-players 6 -free-play 6 -style-savvy 6 -four-state 6 -aciduria 6 -carolina-born 6 -aleckna 6 -psdb 6 -afzan 6 -wetherington 6 -kokang 6 -kowawisarat 6 -1,306 6 -skiddaw 6 -day-in-day-out 6 -lambden 6 -saint-pierre 6 -then-european 6 -dog-fight 6 -specially-engineered 6 -benhall 6 -dedek 6 -compering 6 -tebunginako 6 -two-cd 6 -khnl-tv 6 -bomgardner 6 -3,114 6 -3,118 6 -6.3-litre 6 -loos-en-gohelle 6 -firdous 6 -jale 6 -brickhill 6 -indepedent 6 -holabird 6 -radiophysique 6 -menudo 6 -fast-bowling 6 -agna 6 -kaltenegger 6 -hippogriff 6 -50min 6 -non-genetically 6 -degradations 6 -jirí 6 -72098 6 -bleiberg 6 -mattingley 6 -mindshare 6 -heybeliada 6 -petrea 6 -platoon-mates 6 -saqwan 6 -report-style 6 -halbig 6 -jurrasic 6 -logina 6 -mancunia 6 -endreson 6 -47.49 6 -bakaraha 6 -four-birdie 6 -light-show 6 -kold 6 -koli 6 -asefa 6 -belyaeva 6 -christofaro 6 -parallelism 6 -nuytco 6 -promethean 6 -denialism 6 -homelink 6 -termeh 6 -bush-gore 6 -swype 6 -wilburys 6 -nyongbyon 6 -croituru 6 -climbié 6 -zolkiwsky 6 -yourspins.com 6 -khwaja 6 -reddi 6 -henderon 6 -parrilla 6 -ziks 6 -vallees 6 -baldivis 6 -bree'anna 6 -egg-white 6 -1548 6 -khaizaran 6 -blumls 6 -blame-game 6 -ellerman 6 -ciaccio 6 -xatar 6 -gymraeg 6 -house-guest 6 -blue-colored 6 -aquatina 6 -igene 6 -pulkovo 6 -michoud 6 -life-coaching 6 -slidin 6 -fish-finder 6 -einkorn 6 -makhasi 6 -shutterbug 6 -didactic 6 -galbraiths 6 -rebkong 6 -.37 6 -chagan 6 -lattara 6 -under-occupying 6 -plate-like 6 -goghs 6 -lenticularis 6 -betleski 6 -felinheli 6 -inhibitory 6 -lye-laced 6 -plasencia 6 -xiangyan 6 -anticorruption 6 -kaemba 6 -peritoneal 6 -1,450-foot 6 -42-foot 6 -houstonian 6 -57-second 6 -69-yard 6 -1,876 6 -71mph 6 -nzooh 6 -thalasso 6 -skorka 6 -y1 6 -y6 6 -obegi 6 -yh 6 -ys 6 -ruabon 6 -re-injure 6 -jodeci 6 -arctic-like 6 -mehdy 6 -daszak 6 -laskoski 6 -40-kilometer 6 -alvis 6 -ephrian 6 -brucey 6 -1531 6 -1986-2005 6 -ifra 6 -nosecone 6 -affectations 6 -house-by-house 6 -bucceroni 6 -guttierrez 6 -golcar 6 -fire-eating 6 -undereye 6 -typhoon-battered 6 -tûranor 6 -attaway 6 -szavay 6 -79ft 6 -margallo 6 -100,000-litre 6 -djeugoue 6 -boardgame 6 -da'aboth 6 -re-assemble 6 -bottrell 6 -malmierca 6 -4,404 6 -français 6 -internews 6 -hukporti 6 -faubus 6 -breville 6 -tander 6 -warbucks 6 -sairanen 6 -hifikepunye 6 -pakaya 6 -5:54 6 -miyar 6 -m79 6 -6.5-acre 6 -osseo 6 -herodyon 6 -grape-growing 6 -santissima 6 -ly-au 6 -223-page 6 -much-disputed 6 -hochstetter 6 -13-track 6 -state-designate 6 -dhurringile 6 -hydrogen-dominated 6 -bardabunga 6 -sodbury 6 -shinnar 6 -milia 6 -bertaux 6 -rumold 6 -#mylapd 6 -dubovitskaya 6 -soronko 6 -chatpong 6 -maryjo 6 -pantomimed 6 -mehdipour 6 -ruchi 6 -street-based 6 -borivali 6 -classism 6 -slanderer 6 -cycler 6 -coast-versus-west 6 -chappelow 6 -homicide-suicide 6 -nevyansk 6 -murder-free 6 -hellosociety 6 -vueltiao 6 -om/one 6 -skagway 6 -sascoc 6 -strand-feeding 6 -wdr 6 -satcher 6 -karstens 6 -grotius 6 -post-savile 6 -belinelli 6 -hakskeen 6 -schurkova 6 -solucar 6 -mylene 6 -ponor 6 -9.29 6 -1/16th 6 -sourvelises 6 -84,451,320 6 -unitedwest 6 -bariyarpur 6 -logsdon 6 -froehling 6 -150ft-wide 6 -love-nest 6 -aldourie 6 -wilayat 6 -waimoku 6 -1/8th 6 -mid-evening 6 -gillanders 6 -tante 6 -rabchenko 6 -stagehand 6 -anti-nbc 6 -chocolate-dipped 6 -ajeet 6 -strike-slip 6 -postgenomic 6 -breathalyse 6 -bham 6 -spread-out 6 -arrobio 6 -reisz 6 -whiskys 6 -burkhas 6 -london-style 6 -597,000 6 -piechowski 6 -non-thai 6 -usb-style 6 -florence-based 6 -10cc 6 -fresh-air 6 -namadamu 6 -hoteltonight 6 -delpech 6 -rustem 6 -cscc 6 -rabboni 6 -trebon 6 -panamericana 6 -fressange 6 -madron 6 -mutton-chopped 6 -243.6 6 -bureaucratically 6 -magreb 6 -yankel 6 -sleep-deprivation 6 -pentene 6 -kvue-tv 6 -stantz 6 -d-wa 6 -shoebill 6 -sulcus 6 -zakari 6 -tazeem 6 -hudig 6 -downdetector.com 6 -wernersville 6 -hamzat 6 -swisscom 6 -piti 6 -28bn 6 -harmid 6 -1095 6 -feringa 6 -dingos 6 -cat-sized 6 -al-kibsi 6 -set-points 6 -melan 6 -parche 6 -demerara 6 -foscam 6 -part-sedan 6 -masaï 6 -100db 6 -mud-hut 6 -hutley 6 -rubdown 6 -smallz 6 -krankies 6 -physiologists 6 -spiked-heel 6 -koukliati 6 -borror 6 -shapeways.com 6 -1,294 6 -timochenko 6 -supertalent 6 -ishfaq 6 -helwan 6 -@username 6 -carrbridge 6 -passholders 6 -upjohn 6 -vidalin 6 -jabin 6 -antiperspirant 6 -reul 6 -keygene 6 -risalah 6 -manslaughters 6 -graterford 6 -hughes-games 6 -niemczyk 6 -cachexia 6 -36-16 6 -36-17 6 -travel-weary 6 -annibali 6 -phog 6 -henggeler 6 -poddy 6 -persis 6 -mickleson 6 -tourino 6 -colourisation 6 -tatarescu 6 -tessi 6 -tolsma 6 -painesville 6 -egeberg 6 -boding 6 -devilry 6 -edwige 6 -myford 6 -mottinger 6 -mannie 6 -tuges 6 -210-foot 6 -abdiweli 6 -77mph 6 -lapham 6 -1989-93 6 -eventuate 6 -kason 6 -bucklin 6 -prizefighters 6 -c-grade 6 -v-necks 6 -nouman 6 -rj100 6 -dogra 6 -pooladi 6 -neeman 6 -ctip2 6 -seamonster 6 -six-round 6 -rescaldani 6 -coracoes 6 -myers-walls 6 -guardbridge 6 -polemical 6 -grain-finished 6 -1,200-mile 6 -hagelberg 6 -bugueno 6 -porcelains 6 -campaign-related 6 -yetkin 6 -haradh 6 -collegehumor 6 -cornici 6 -efemini 6 -bmx-style 6 -yorkshires 6 -20,900 6 -napf 6 -anti-authority 6 -sedates 6 -flunkeys 6 -leisurecorp 6 -regrows 6 -kimmings 6 -b-17f 6 -99.1 6 -lip-synch 6 -ovulated 6 -mcarthurs 6 -25-foot-long 6 -freekennow.com 6 -3,970 6 -usaa 6 -egorova 6 -uluru-kata 6 -caiazzo 6 -harakat-ul-jihad-islami 6 -buttersoft 6 -olsens 6 -hobnailed 6 -emiworo 6 -qdd 6 -dighton-andrews 6 -lameloise 6 -ramgoolam 6 -shorebird 6 -twitcher 6 -nordlund 6 -resende 6 -girlforward 6 -xigui 6 -canyoneer 6 -goitom 6 -salahaddin 6 -eberhart 6 -halon 6 -haloe 6 -ex-felon 6 -askegard 6 -criselda 6 -top-winning 6 -lyfe 6 -e-government 6 -yegge 6 -yassky 6 -four-month-long 6 -palcaraju 6 -roughhouse 6 -screpante 6 -wsl 6 -annihilator 6 -bodged 6 -bredernitz 6 -misstating 6 -hubig 6 -munchbar 6 -141m 6 -ulrome 6 -moleskin 6 -sorm 6 -dungeoneer 6 -masvidal 6 -thermogenesis 6 -co-organised 6 -493,289 6 -acid-based 6 -marinela 6 -gladd 6 -macrosty 6 -acid-attack 6 -ballston 6 -apan 6 -ciaglia 6 -gato 6 -hyoksin 6 -bromadiolone 6 -120kph 6 -lewis-style 6 -weren 6 -shafrir 6 -resubmitting 6 -dodrill 6 -ed-d68 6 -gyong 6 -baggywrinkle 6 -1-800-red-cross 6 -rankin-bass 6 -cacheris 6 -woodburne 6 -schuessler 6 -bicar 6 -us500 6 -pastora 6 -arpornkaew 6 -uws 6 -conciliator 6 -tokyo-mitsubishi 6 -inelastic 6 -rossius 6 -made.com 6 -fasan 6 -ottolini 6 -theobalds 6 -gay-loving 6 -forkful 6 -super-connected 6 -falber 6 -putron 6 -donio 6 -ornamentals 6 -piggybacked 6 -algemeiner 6 -indie-rock 6 -solemnized 6 -ciobo 6 -agronomy 6 -tolia 6 -thrones-themed 6 -moex 6 -moec 6 -grabsky 6 -lampman 6 -shapeshifting 6 -barnacle-covered 6 -superflat 6 -obscurities 6 -akha 6 -shackels 6 -know-it-alls 6 -dvora 6 -153.1 6 -zlotys 6 -ceiling-high 6 -checkerspot 6 -kneibler 6 -300,000,000 6 -crime-prevention 6 -39g 6 -robenstein 6 -re-check 6 -taymouth 6 -@australia 6 -odifreddi 6 -trikala 6 -unequipped 6 -kentridge 6 -milou 6 -voldermort 6 -passley-quesada 6 -11th-floor 6 -taishan 6 -lonres 6 -governors-general 6 -matayoshi 6 -xunantunich 6 -zylva 6 -95,000-capacity 6 -cs100 6 -pallette 6 -kobre 6 -scr 6 -catnaps 6 -gheller 6 -forba 6 -late-19th 6 -belloumi 6 -celeron 6 -fire-power 6 -devourer 6 -shrops 6 -yitzchak 6 -low-technology 6 -caister-on-sea 6 -polychrome 6 -digits2widgets 6 -pokorny 6 -rscg 6 -glass-bottom 6 -stannah 6 -free-climbing 6 -zwelling 6 -gummidge 6 -ginobli 6 -arc4 6 -trantershill 6 -skytower 6 -coffea 6 -batheaston 6 -campout 6 -awas 6 -gwalia 6 -zowin 6 -chaikin 6 -shariat 6 -sibericum 6 -225th 6 -wilshe 6 -pickart 6 -confeitaria 6 -ibar 6 -hasenauer 6 -much-traveled 6 -prositution 6 -arnoldussen 6 -fogleman 6 -if/then 6 -moskal 6 -protheroe 6 -psychokinesis 6 -boerum 6 -kishi 6 -deeks 6 -drori 6 -body-checked 6 -waddesdon 6 -sausan 6 -ouside 6 -diaper-changing 6 -yehven 6 -crassphage 6 -talaq 6 -talas 6 -talan 6 -hemes 6 -broadheath 6 -hengyang 6 -dancia 6 -predictaroo 6 -10-seater 6 -abromovich 6 -malom 6 -schlagetter 6 -cology 6 -concocts 6 -manber 6 -gaspin 6 -ub-122 6 -chichewa 6 -proviruses 6 -stefanyszyn 6 -oil-and-gas 6 -150-square-foot 6 -cross-currents 6 -xisco 6 -rabaah 6 -orrville 6 -wyck 6 -solictor 6 -balaclava-style 6 -zygos 6 -umaid 6 -lst 6 -gumbiti-zimuto 6 -918ft 6 -vladas 6 -ripley-aitchison 6 -long-bladed 6 -lacasse 6 -maharajas 6 -store-front 6 -p95 6 -mukaber 6 -57.50 6 -reciprocation 6 -ex-holland 6 -gaag 6 -sea-skimming 6 -xsmg 6 -escalopes 6 -wymore 6 -deputation 6 -pishtacos 6 -dodd-flemming 6 -hiroyuki 6 -kerekes 6 -zosel 6 -olumuyiwa 6 -cerioli 6 -1968-69 6 -reibly 6 -charity-funded 6 -powerlace 6 -cefaa 6 -bennett-jones 6 -mungadze 6 -barrel-vaulted 6 -gas-propelled 6 -23-mile 6 -vinaya 6 -lynn-herbenick 6 -shaftsbury 6 -figgy 6 -motorbiking 6 -lanne 6 -velvin 6 -reppetto 6 -drebin 6 -anti-free 6 -wojtek 6 -electroplating 6 -qiugen 6 -dustbowl 6 -ceinwen 6 -50ft-wide 6 -alem 6 -apposed 6 -nawton 6 -molesky 6 -cnn/time/opinion 6 -struff 6 -chailey 6 -moriyama 6 -andranik 6 -bladenboro 6 -electromagnetically 6 -partal 6 -pyjama-style 6 -pingtung 6 -224.6 6 -eight-bed 6 -anti-separation 6 -ninis 6 -ilga 6 -20-a-week 6 -vidulfo 6 -349,000 6 -25,000-a-week 6 -jemima-style 6 -celikbilek 6 -wluc 6 -kenickie 6 -self-management 6 -717,000 6 -powazki 6 -weatherly 6 -smartpen 6 -outside-the-box 6 -at800 6 -liberati 6 -subbie 6 -post-jail 6 -gennaco 6 -photosensitivity 6 -krotenberg 6 -moinssonm 6 -okemo 6 -nature-loving 6 -gerkin 6 -mfcs 6 -hardwire 6 -soft-pedal 6 -3,135 6 -3,130 6 -a472 6 -70,000-strong 6 -trans-shipment 6 -252mph 6 -re-sent 6 -29,029 6 -drop-ins 6 -e-mailers 6 -onthursday 6 -häagen-dazs 6 -all-williams 6 -mamajek 6 -late-1950s 6 -villainess 6 -faia 6 -scudetti 6 -restalrig 6 -markmann 6 -dazed-looking 6 -duhigg 6 -vex 6 -15,091 6 -ivermectin 6 -muck-raking 6 -badakshan 6 -konz 6 -hohl 6 -gummery 6 -bads 6 -corre 6 -loates 6 -suplee 6 -sand-free 6 -kurak 6 -stirkens 6 -85,454 6 -23,100 6 -sulfates 6 -saffy 6 -filiu 6 -bp-cnpc 6 -dandyish 6 -power-walking 6 -disarticulated 6 -bide-thomas 6 -n'duja 6 -lins 6 -josias 6 -tesfaye 6 -31percent 6 -last-hole 6 -brown-hunter 6 -soupçon 6 -cultivators 6 -anaglyph 6 -clean-out 6 -1,543 6 -1,547 6 -4g/lte 6 -mis-folded 6 -milagro 6 -@invisibleobama 6 -140bhp 6 -oaurovics 6 -beaver-like 6 -atapattu 6 -umeda 6 -mallord 6 -flutie 6 -tutorship 6 -mha 6 -stuckman 6 -10-race 6 -re-supply 6 -3mb 6 -tapiero 6 -marlons 6 -giampaoli 6 -epdt 6 -derogative 6 -affordable-housing 6 -conversationally 6 -caris 6 -illmatic 6 -oldcorn 6 -inch-deep 6 -surveillance-camera 6 -albutt 6 -egypt-brokered 6 -over-dramatic 6 -blass 6 -1921-1923 6 -nikai 6 -avas 6 -triple-digits 6 -commie 6 -afro-colombians 6 -narahashi 6 -low-sulfur 6 -re-engineer 6 -asian-pacific 6 -twinkled 6 -qbiotics 6 -whittlesford 6 -1,891 6 -cervelo 6 -tosy 6 -restage 6 -great-gran 6 -gut-churning 6 -birbalsingh 6 -montour 6 -codecademy 6 -mikulas 6 -airfreight 6 -maddline 6 -pallenberg 6 -semi-synthetic 6 -riham 6 -captiol 6 -unscarred 6 -gondolfo 6 -mega-fights 6 -1,143 6 -daedalus 6 -caravaning 6 -captor-e 6 -mangelal 6 -chromed 6 -51-20 6 -mso-bidi-theme-font 6 -batman-themed 6 -strasshof 6 -cryos 6 -lopreto 6 -aerospike 6 -31-stone 6 -329million 6 -counternarrative 6 -sneakier 6 -nordens 6 -aliaga 6 -dekay 6 -richardon 6 -soppet 6 -imprudence 6 -well-coiffed 6 -krien 6 -bardemcilla 6 -palopo 6 -:36.0 6 -n.s. 6 -pelagia 6 -second-highest-ranking 6 -boo-boo 6 -286million 6 -glassborow 6 -137ft 6 -garcia-jauregui 6 -pharmaceutical-grade 6 -near-mythical 6 -muette 6 -looooong 6 -galezkij 6 -ghazaliya 6 -www.gov.uk 6 -forward-planning 6 -farzin 6 -kimmell 6 -pramono 6 -nivalis 6 -7-foot-1 6 -jannini 6 -solidiance 6 -alldritt 6 -viduka 6 -andalucía 6 -kapino 6 -hkd 6 -6.11 6 -repopulated 6 -scyler 6 -veniamin 6 -1336 6 -sunrisers 6 -#takedownjulienblanc 6 -de-escalated 6 -rigali 6 -green-collar 6 -us-soviet 6 -choat 6 -eyewriter 6 -8/5 6 -1.618 6 -pilat 6 -three-decade-long 6 -morna 6 -pilaf 6 -zasio 6 -204m 6 -b.o.b 6 -9.44 6 -digitally-altered 6 -'74 6 -grody 6 -8.01 6 -8.08 6 -birky 6 -money-conscious 6 -one-too-many 6 -kreidler 6 -18-to-1 6 -elzbieta 6 -riffa 6 -amoo 6 -amoz 6 -satarov 6 -43,300 6 -abdalhaleem 6 -c5n 6 -tamseel 6 -nanoflowcell 6 -cheesemaking 6 -hammerfest 6 -saucon 6 -footboards 6 -turbiville 6 -sharney 6 -vizina 6 -consumer-facing 6 -farmhands 6 -uekman 6 -yovanna 6 -sukabumi 6 -tangoed 6 -hesketh-harvey 6 -titov 6 -wontons 6 -brewerton 6 -self-actualization 6 -pay-night 6 -rule-book 6 -onstad 6 -powhite 6 -buildon 6 -lidded 6 -per-location 6 -falkenburg 6 -settelen 6 -cedrique 6 -monofilament 6 -boshintang 6 -vlatka 6 -micael 6 -quitmeyer 6 -craftster.org 6 -elmaghribi 6 -glink 6 -drug-trade 6 -donnachie 6 -realclearpolitics.com 6 -brumbley 6 -near-permanent 6 -re-injected 6 -holmbergh 6 -trapnell 6 -solarcity 6 -fikret 6 -richardbranson.xxx 6 -lise-lotte 6 -blu-ray/dvd 6 -de-stigmatise 6 -cocaine-filled 6 -arxan 6 -727-200 6 -xt 6 -crop-producing 6 -filoni 6 -gallah 6 -slowik 6 -untagging 6 -lanique 6 -stutman 6 -tanikka 6 -leusner 6 -shumsky 6 -agapakis 6 -kulayigye 6 -abelisaurid 6 -kokrajhar 6 -jolablot 6 -ostrovski 6 -30-a-week 6 -jahzara 6 -hynix 6 -taqueria 6 -p-e-t-a 6 -ketts 6 -sufa 6 -furies 6 -cranmer-brown 6 -thobani 6 -barrydale 6 -benchley 6 -ragnhild 6 -prabha 6 -goodfaith 6 -woertz 6 -91.1 6 -91.2 6 -pasparakis 6 -crj 6 -gtb/c 6 -f-350 6 -final-lap 6 -unbuckling 6 -rhodin 6 -scarola 6 -weird-looking 6 -roseacre 6 -tr4 6 -homegoing 6 -seoul-born 6 -al-ikhbaria 6 -allograft 6 -northumberlandia 6 -frable 6 -salwens 6 -okara 6 -fremington 6 -o'neil-baker 6 -cerrejonensis 6 -lemtongthai 6 -disease-fighting 6 -décolleté 6 -253,000 6 -pinco 6 -melena 6 -maidenform 6 -sunlamp 6 -pchr 6 -krimmer 6 -78-inch 6 -infantilised 6 -catalist 6 -rubberband 6 -ivig 6 -hannay 6 -raki 6 -1994-1999 6 -xr 6 -buszek 6 -openleaks 6 -non-sanctioned 6 -libourne 6 -comras 6 -3-pin 6 -too-high 6 -vandenbergh 6 -msop 6 -authorties 6 -reither 6 -quats 6 -modupeh 6 -thumbelina 6 -adderson 6 -94.6 6 -gandhian 6 -ronay 6 -dillwynia 6 -dutro-boggess 6 -exning 6 -missanelli 6 -antigha 6 -1076 6 -107m 6 -107g 6 -delbarton 6 -1,447 6 -said.in 6 -gumbrell 6 -misspending 6 -1845-1849 6 -1.319 6 -single-page 6 -qf2 6 -copywriting 6 -clouden 6 -soyabean 6 -addair 6 -delzell 6 -riverboats 6 -aepyornis 6 -crummock 6 -courbessac 6 -zakrzewska 6 -kalamafoni 6 -willford 6 -gyp 6 -malielegaoi 6 -off-earth 6 -petley 6 -hydrophobia 6 -thumbell 6 -fleet-wide 6 -edable 6 -fsv 6 -serrat 6 -availing 6 -pogonophobia 6 -silky-smooth 6 -nories 6 -o-negative 6 -1,442 6 -bootes 6 -whitesnake 6 -midan 6 -vonda 6 -schilthorn 6 -chaulk 6 -fretboard 6 -p-e-t-e-r 6 -coulis 6 -intima 6 -carring 6 -84p 6 -193,049 6 -jeppesen 6 -sunu 6 -guhonda 6 -o'rawe 6 -homefree-usa 6 -ellner 6 -1471 6 -1479 6 -164.4 6 -bonnant 6 -easyfoodstore 6 -pclob 6 -ldk 6 -stahre 6 -weggen 6 -582,000 6 -barungi 6 -unairworthy 6 -arab-backed 6 -raybone 6 -ski-less 6 -lutwidge 6 -delliste 6 -stamen 6 -solipsistic 6 -visual-spatial 6 -scarefest 6 -benzenberg 6 -quoits 6 -school-children 6 -bronwynne 6 -ronfet 6 -mission-based 6 -girven 6 -skrobonja 6 -sundecks 6 -vanquisher 6 -evolver 6 -clisby 6 -teessiders 6 -cyrulnik 6 -65-inch 6 -thymes 6 -dickherber 6 -kilcreggan 6 -prelims 6 -frenier 6 -klawunn 6 -ddss 6 -band-mate 6 -zerona 6 -dakka 6 -abounaddara 6 -2million-plus 6 -jeev 6 -klucznik 6 -attaboy 6 -10.19 6 -reestablishment 6 -.2009 6 -.2005 6 -unconfined 6 -muhanad 6 -boydell 6 -al-alagi 6 -auto-icon 6 -clarinda 6 -mwenge 6 -zrinjski 6 -guseva 6 -6-an-hour 6 -andrin 6 -andric 6 -teruya 6 -andria 6 -#boycottexodusmovie 6 -989 6 -ocala.com 6 -ex-wba 6 -lascars 6 -imf-world 6 -konkola 6 -autoerotic 6 -tuqiri 6 -kjellberg 6 -visnu 6 -hardgrove 6 -tai-young 6 -ffmc 6 -arborists 6 -constantini 6 -refe 6 -anti-graffiti 6 -scutts 6 -plateosaurus 6 -ancre 6 -rubinson 6 -woolfsmith 6 -post-nup 6 -50.50 6 -leocal 6 -taymyr 6 -2-11 6 -2-18 6 -tranquilizing 6 -makhaela 6 -vicinanza 6 -cleator 6 -straphanger 6 -barchetti 6 -two-ounce 6 -overemphasis 6 -sklepkowski 6 -boardercross 6 -700,000-a-year 6 -yakopin 6 -6,500,000 6 -15-seater 6 -davios 6 -hvtn 6 -whitegoods 6 -38mm 6 -xiaojiangtun 6 -iraq-born 6 -episodically 6 -2,392 6 -grimness 6 -nufctv 6 -fscs 6 -wahlers 6 -egg-q-ber 6 -golos 6 -dead-bolted 6 -minibrake 6 -niesen 6 -luda 6 -non-verbally 6 -semanza 6 -tumnus 6 -ihejirika 6 -2,379 6 -exchange-rate 6 -sixth-forms 6 -rieders 6 -tafreshi 6 -80.0 6 -80.1 6 -lion-like 6 -critcising 6 -arrendondo 6 -6ft7in 6 -arrivederci 6 -montbovon 6 -steponavicius 6 -1,782 6 -pacaembu 6 -shoosmiths 6 -merkers 6 -odzala-kokoua 6 -gaspare 6 -silvie 6 -wikipedia-style 6 -24-27 6 -zip2 6 -gladioli 6 -half-truth 6 -site-wide 6 -mini-opera 6 -bridego 6 -al-misri 6 -speed-flying 6 -democratically-controlled 6 -3-15 6 -nio 6 -blithering 6 -zambon 6 -yasuhiro 6 -weinke 6 -service-industry 6 -dowayan 6 -stillhart 6 -zelník 6 -catholique 6 -tiffindell 6 -twinbrook 6 -johura 6 -espinet 6 -2,011 6 -#bostonstrong 6 -eola 6 -knabb 6 -niranjan 6 -7,403 6 -castner 6 -hypersexual 6 -oh-so-now 6 -self-adjust 6 -nvqs 6 -saddler 6 -seehofer 6 -1281 6 -cunliffes 6 -lurleen 6 -lasix 6 -fluoride-free 6 -wehrle 6 -shibuya-ku 6 -hommen 6 -leuco 6 -frons 6 -subotica 6 -gyro-sensor 6 -palagor 6 -ex-aston 6 -under-ice 6 -jon-un 6 -highest-end 6 -prebiotics 6 -neverwinter 6 -hayabusa2 6 -saracho 6 -zuccatti 6 -capitanich 6 -32nd-minute 6 -skiable 6 -font-size 6 -zozo 6 -2-under 6 -westland/hallmark 6 -supovitz 6 -desisa 6 -179,750 6 -sub-sahara 6 -anadappa 6 -4636 6 -klyuchevskoy 6 -chimo 6 -ex-number 6 -post-2008 6 -20-times 6 -ticked-off 6 -dooney 6 -turist 6 -tunheim 6 -econlockhatchee 6 -mcenaney 6 -gruder 6 -exhaustingly 6 -paprocki 6 -knock-back 6 -grimmson 6 -freier 6 -dspd 6 -ohso 6 -gater 6 -cosens 6 -eaglescliffe 6 -longsands 6 -tajeddine 6 -griebel 6 -24-8 6 -khaldiya 6 -villavicincio 6 -portz 6 -time-stamp 6 -nizari 6 -aysultan 6 -43282 6 -frownies 6 -gun-metal 6 -mausoleum-like 6 -f-450 6 -inter-related 6 -dimorphism 6 -34250 6 -a111t 6 -bball 6 -kaliebe 6 -hoskinson 6 -joveer 6 -arkansan 6 -visioneering 6 -teerat 6 -pifer-bixler 6 -wagin 6 -golabbakhsh 6 -bedding-in 6 -laurynas 6 -azzuro 6 -seven-season 6 -bjornsdottir 6 -five-strand 6 -gamberini 6 -gurton 6 -seaspray 6 -earplug 6 -palace-headed 6 -second-to-none 6 -nahill 6 -senties 6 -nwofor 6 -chhetri 6 -twynholm 6 -furans 6 -niman 6 -laughrun 6 -carbendazim 6 -eyecare 6 -orb-like 6 -jyp 6 -smolnikov 6 -bread-winning 6 -dog-breeding 6 -vargas-silva 6 -corolle 6 -run-offs 6 -articulately 6 -mclaughlin-weber 6 -addley 6 -102billion 6 -w/monitor 6 -ex-hurricane 6 -abseilers 6 -whiskery 6 -newmont 6 -uninsulated 6 -wangled 6 -braziers 6 -voguing 6 -llanbedr 6 -hockessin 6 -obama-stare 6 -salischiker 6 -solidi 6 -salesianum 6 -mcpeak 6 -water-reclamation 6 -thuong 6 -boogying 6 -xeroderma 6 -four-foot-high 6 -chinnaswamy 6 -civardi 6 -eskander 6 -alonna 6 -checklight 6 -vornonov 6 -straight-talker 6 -adverts.getrsivalues 6 -saibai 6 -freehills 6 -arnwine 6 -#unbonjuif 6 -kristallis 6 -spotts 6 -kinsley 6 -tech-heads 6 -stolley 6 -fast-attack 6 -giusti 6 -koonce 6 -whileon 6 -ball-carrier 6 -hollender 6 -basswood 6 -take-back 6 -honh 6 -hony 6 -sonoma-marin 6 -kyphoplasty 6 -solesta 6 -haz 6 -25,100 6 -octocopters 6 -risoul 6 -rspo 6 -prancer 6 -self-neglect 6 -xenophobe 6 -munros 6 -kottabos 6 -#ripcw 6 -kolofata 6 -1,537 6 -gevorgyan 6 -deniece 6 -1,346 6 -calibur 6 -quagliaroli 6 -gallegly 6 -great-great-great-granddaughter 6 -uinta 6 -boeung 6 -16.35 6 -suntaj 6 -racially-insensitive 6 -yoshitake 6 -minus-30 6 -wrist-mounted 6 -cholesterol-reducing 6 -laurélie 6 -1,521 6 -1,525 6 -pohamba 6 -puttmann 6 -drone-bombs 6 -créme 6 -milevsky 6 -migbelis 6 -nerium 6 -lutfullah 6 -borlongan 6 -untidiness 6 -30070 6 -marcinko 6 -lyppard 6 -other-than-honorable 6 -hovanesian 6 -velpen 6 -mjj 6 -boorishness 6 -64ft 6 -mittag 6 -five-block 6 -5/8 6 -deconstructs 6 -kairat 6 -saami 6 -felicetti 6 -cmf 6 -marietas 6 -shakib 6 -52-minute 6 -leuzzi 6 -banlieue 6 -bosserman 6 -monpods 6 -gomshall 6 -harless 6 -4shared 6 -totaljobs.com 6 -gibraltar-bound 6 -halyburton 6 -o'bryant 6 -signore 6 -decrepitude 6 -earthier 6 -102,400 6 -planarian 6 -sagacity 6 -lellouche 6 -multi-candidate 6 -grigoriadou 6 -shanell 6 -saxophones 6 -industrialising 6 -ex-maoist 6 -chayo 6 -bernbach 6 -petrolul 6 -murco 6 -anomalocarids 6 -detoxifier 6 -colli 6 -gruffydd 6 -armour-piercing 6 -right-to-know 6 -consales 6 -teitelman 6 -gold-dust 6 -anti-blood 6 -xsara 6 -camusso 6 -marillyn 6 -well-settled 6 -cuitzeo 6 -four-decade-long 6 -boscastle 6 -reutte 6 -funnel-like 6 -nuestras 6 -counterstrike 6 -saralyn 6 -delmar4fun 6 -rs10 6 -hospitalet 6 -crf1 6 -nawalka 6 -raseluna 6 -cozette 6 -gerardmer 6 -miniero 6 -biophysical 6 -skywatch 6 -meep 6 -interviu 6 -westmoore 6 -truschel 6 -105billion 6 -lietenant 6 -sarmenti 6 -4,440 6 -optionally 6 -itbayat 6 -sibila 6 -9 6 -pelagos 6 -queensgate 6 -chock-a-block 6 -kutcha 6 -88-years-old 6 -prevage 6 -ayreshire 6 -showdog.com 6 -detestation 6 -mortagy 6 -marik 6 -over-the-head 6 -quino 6 -abdullayeva 6 -92nd-minute 6 -margolick 6 -smriti 6 -dagger-like 6 -dictionary.com 6 -deyrolle 6 -bee-stung 6 -gilmerton 6 -nichia 6 -siha 6 -visionless 6 -32-years 6 -in-migration 6 -wheelchair-user 6 -mauselaine 6 -investment-friendly 6 -kayvan 6 -super-slow 6 -non-injury 6 -marfishes 6 -candy-floss 6 -popigai 6 -kudryk 6 -boy-girl 6 -regni 6 -6.76 6 -gundimore 6 -morkunas 6 -viagas 6 -wednesday-to-sunday 6 -crohy 6 -none-of-the-above 6 -bisgard 6 -91-years-old 6 -spofforth 6 -farecast 6 -yellow-shirted 6 -1312 6 -entscho 6 -reiz 6 -tekapo 6 -sackos 6 -waisman 6 -22-country 6 -mothershead 6 -odni 6 -uv-b 6 -wen-jing 6 -selfina 6 -2065 6 -ballygowan 6 -motors.co.uk 6 -nagimianov 6 -40/41 6 -opdorp 6 -gasification 6 -underwing 6 -pohnpei 6 -ex-basketball 6 -saudi-u.s. 6 -misérable 6 -bosphorous 6 -koat-tv 6 -french-spanish 6 -paekdu 6 -sea-worthy 6 -hand-craft 6 -benatouil 6 -pangeran 6 -systemes 6 -mcdouble 6 -wolobah 6 -control-wear 6 -dopping-hepenstal 6 -reacquired 6 -mafoumbi 6 -pivnik 6 -gogoleva 6 -winkett 6 -shs 6 -cristoph 6 -overfill 6 -flightpaths 6 -rometsch 6 -vetters 6 -grim-looking 6 -advisory/finance 6 -paint-spattered 6 -abductee 6 -conghaíle 6 -blues-rock 6 -bertschinger 6 -ehm 6 -kinberg 6 -gisenyi 6 -qattan 6 -giudici 6 -mesoderm 6 -greylag 6 -gerzmehle 6 -boogers 6 -choriocarcincoma 6 -#bringbackourboys 6 -barbini 6 -4.176 6 -spruiker 6 -pictorials 6 -wardroom 6 -moily 6 -46,432,285 6 -chandelles 6 -65g 6 -auwkit 6 -severodvinsk 6 -nishinaga 6 -hotelsweep 6 -wd40 6 -weartrons 6 -mcquay 6 -hyksos 6 -milbanke 6 -ferlito 6 -prebendary 6 -stuyvenbergh 6 -yois 6 -salafi-jihadi 6 -motton 6 -adf-nalu 6 -somerset-born 6 -dikov 6 -bardgett 6 -trinchet 6 -barbie-esque 6 -bohanon 6 -jonasson 6 -lambertucci 6 -apoe-e4 6 -ladetec 6 -on-orbit 6 -akiyuki 6 -reverb 6 -chatzky 6 -vibeke 6 -round-faced 6 -trs-80 6 -1,248 6 -row2recovery 6 -phylogenetic 6 -kabb 6 -sand-like 6 -co-operatively 6 -all-inclusives 6 -iraqi-kurdish 6 -diehl-armstrong 6 -schlinder 6 -3,077 6 -modest-looking 6 -givrins 6 -pillagers 6 -anti-elitist 6 -cardholding 6 -culotte 6 -west-to-east 6 -kapun 6 -therapods 6 -annalena 6 -@geniebouchard 6 -bieler 6 -pinel 6 -mcferrin 6 -sibusiso 6 -townsquare 6 -lusy 6 -troedyrhiw 6 -samii 6 -detailling 6 -jetskier 6 -novodevichy 6 -325-member 6 -cheron 6 -bogollagama 6 -tabanan 6 -sixty-year-old 6 -zec 6 -zep 6 -canjura 6 -yiruma 6 -kliewer 6 -bootmakers 6 -zárate 6 -tithecott 6 -stepanovich 6 -skivvy 6 -dayem 6 -million-person 6 -shellings 6 -in-excess 6 -kiyota 6 -pac-3 6 -fixer-uppers 6 -182cm 6 -nale 6 -ronco 6 -liquored 6 -velloza 6 -retyped 6 -cumbre 6 -larin 6 -quiron 6 -versilia 6 -ethiopian-backed 6 -waterbaby 6 -angelcare 6 -apurímac 6 -ontong 6 -fire-hit 6 -e23 6 -dichter 6 -ignatieff 6 -customisations 6 -mussies 6 -nativities 6 -rhd 6 -kicillof 6 -bear-h 6 -vileness 6 -3,935 6 -132.2 6 -agirnasli 6 -natasa 6 -catholic-affiliated 6 -airgo 6 -lochrist 6 -high-jinks 6 -hi-lo 6 -mozammel 6 -chueca 6 -strapper 6 -unsurpassable 6 -dhabi-owned 6 -laposta 6 -sercombe 6 -honozumo 6 -dorsolateral 6 -ribchester 6 -kitchen/dining 6 -montell 6 -lykkebak 6 -moodley 6 -gullino 6 -then-us 6 -megatrends 6 -onsie 6 -then-fbi 6 -alstory 6 -initative 6 -lydgate 6 -sukarno 6 -mugamu 6 -bromantic 6 -yamamota 6 -'26 6 -bricktop 6 -anansi 6 -kevi 6 -halfling 6 -greenbriar 6 -mencken 6 -peleteiro 6 -#fact 6 -jose-based 6 -rosaria 6 -xliv 6 -sgpc 6 -ishaque 6 -legonardo 6 -almost-identical 6 -1,067 6 -blankety 6 -stabaek 6 -greyscale 6 -polymorphisms 6 -742,000 6 -rajpath 6 -titler 6 -f-117 6 -zahree 6 -anneli 6 -amry 6 -al-kasaesbeh 6 -5,500-mile 6 -7,740 6 -livestreaming 6 -remarkables 6 -yelpers 6 -kandie 6 -homebodies 6 -benigni 6 -nardo 6 -post-afghanistan 6 -microarray-based 6 -masayo 6 -drusille 6 -asymmetrically 6 -ghirardelli 6 -esv 6 -esu 6 -esq 6 -ishigami 6 -grrl 6 -colorado-boulder 6 -jor-el 6 -tweezing 6 -throat-grabbing 6 -fidelis 6 -35-count 6 -treignac 6 -bazomba 6 -dyyl 6 -turnquest 6 -hardcourts 6 -virtuality 6 -arvest 6 -pirutinsky 6 -finton 6 -mcdreamy 6 -www.lotterygoodcauses.org.uk 6 -325m 6 -beat-em-up 6 -57-day 6 -amesh 6 -jech 6 -vc-25 6 -wyithe 6 -palmal 6 -gateaux 6 -urologic 6 -hollowood 6 -jeromine 6 -curtsying 6 -end-of-school 6 -fursman 6 -szalay 6 -price-match 6 -itzik 6 -iron-nickel 6 -confirmable 6 -buttaccio 6 -jeanna 6 -pamirs 6 -uranium-235 6 -al-khansaa 6 -ganatra 6 -interlacing 6 -brown-like 6 -torshammere 6 -totzauer 6 -akt1 6 -tiggar 6 -froudakis 6 -tijernia 6 -2121 6 -turaab 6 -tonkotsu 6 -mehreen 6 -semi-homemade 6 -jutarnji 6 -chaggar 6 -lincoln-west 6 -gavilanes 6 -coronets 6 -kawa 6 -leonay 6 -sture 6 -b001 6 -fortna 6 -dehmer 6 -buzzo 6 -taitex 6 -appearence 6 -williams-paisley 6 -crazysexycool 6 -seibertron.com 6 -disfavored 6 -brimham 6 -switchfoot 6 -off-break 6 -ashooh 6 -shunichi 6 -sor 6 -aube 6 -mazzarella 6 -1300ft 6 -different-sex 6 -stold 6 -factory-made 6 -matute 6 -ermotti 6 -warrengate 6 -mastan 6 -prevelly 6 -pinarello 6 -wisn-tv 6 -parcelcopter 6 -time-lapsed 6 -socialist-style 6 -vummiti 6 -velvet-lined 6 -needier 6 -conservativeblackchick.com 6 -pitch-sized 6 -laerdalsoyri 6 -frivolously 6 -kakutani 6 -narcoanalytic 6 -three-michelin-starred 6 -pranikoff 6 -age-grade 6 -shipsey 6 -musumeci 6 -non-private 6 -nouni 6 -genital-to-genital 6 -tsaidamotherium 6 -simplicio 6 -55-mph 6 -rietmann 6 -jayyousi 6 -deducing 6 -bartling 6 -polanksi 6 -savaricas 6 -doctor-administered 6 -traidcraft 6 -41-years-old 6 -mehigan 6 -test-launch 6 -ill-starred 6 -upworthy 6 -weepers 6 -31,900 6 -inose 6 -khogyani 6 -nato/isaf 6 -kentucky-bred 6 -holkins 6 -farmersonly 6 -kynurenic 6 -blue-white 6 -news-making 6 -diarists 6 -wn 6 -wy 6 -borihanh 6 -civic-mindedness 6 -paeans 6 -vitrification 6 -ethnic-based 6 -parool 6 -ajijic 6 -aerovelo 6 -18-bedroom 6 -pintos 6 -ducats 6 -sulaco 6 -1,400-hectare 6 -buffalino 6 -mylifesuxnow 6 -breadcrumb 6 -shuncheng 6 -conquerer 6 -bomb-blast 6 -parupalli 6 -callies 6 -ectogenesis 6 -lamadrid 6 -steamrollering 6 -saensiri 6 -canzini 6 -w00t 6 -thriftiest 6 -boston-born 6 -spoodle 6 -mickel 6 -appelt 6 -slow-going 6 -hombres 6 -romanus 6 -xylella 6 -merisi 6 -29,600 6 -krager 6 -gutzman 6 -manbag 6 -sururul 6 -axhayes 6 -175.2 6 -pitchay 6 -9-point 6 -ferrett 6 -21.5-inch 6 -dusatoir 6 -cuene-grandidier 6 -lorbeer 6 -callighan 6 -hallet 6 -versaille 6 -renin 6 -missle 6 -stablisation 6 -clinton-dix 6 -axlerod 6 -prostatectomy 6 -kokal 6 -tasso 6 -hegeler 6 -lwt 6 -cassowary 6 -shogan 6 -quickshift 6 -make-work 6 -schmaing 6 -p50 6 -copps 6 -fibre-reinforced 6 -back-down 6 -kix 6 -kis 6 -bilpin 6 -sirevag 6 -issler 6 -mickelsen 6 -airtrain 6 -scott-directed 6 -bramschreiber 6 -bioethicists 6 -one-stop-shop 6 -mostert 6 -,43 6 -,41 6 -latabe 6 -recognisably 6 -bourgin 6 -ju-young 6 -tempranillo 6 -441lbs 6 -darton 6 -foaled 6 -post-courier 6 -bloeser 6 -higher-dose 6 -androphy 6 -haarp 6 -titshall 6 -partida 6 -easybase 6 -88-strong 6 -over-sexualized 6 -cabi 6 -tee-shirts 6 -windbreaks 6 -gomel 6 -50-year-olds 6 -tear-drop 6 -muehl 6 -over-representation 6 -corot-7b 6 -200-member 6 -izetbegovic 6 -ausmat 6 -kulaybi 6 -argenta 6 -duporte 6 -gtlds 6 -509mw 6 -miraikan 6 -papakouli 6 -ufs 6 -ki-suk 6 -jeffersonian 6 -cybulski 6 -earth-observation 6 -alik 6 -low-pay 6 -zaghah 6 -singuluma 6 -mervat 6 -lindsie 6 -karanovs 6 -career-ender 6 -bransons 6 -rhoton 6 -all-age 6 -astrocytoma 6 -tanorexia 6 -tamra 6 -self-repairing 6 -builtvisible 6 -quippy 6 -distention 6 -selbyville 6 -nanosuit 6 -vitz 6 -jefferey 6 -arm-mounted 6 -much-repeated 6 -kostanay 6 -baader-meinhof 6 -hallowell 6 -trini 6 -ucsc 6 -helicopter-borne 6 -mariscal 6 -simspon 6 -murcer 6 -inveigled 6 -pessary 6 -elviria 6 -itm 6 -preventively 6 -lenina 6 -marineking 6 -al-jazirah 6 -martyne 6 -shirey 6 -hand-warmers 6 -squeegees 6 -91-page 6 -lindens 6 -delwin 6 -blackpos 6 -illict 6 -jsut 6 -pugilists 6 -balkrishnan 6 -violo 6 -muick 6 -p.p.s. 6 -41lbs 6 -two-mile-long 6 -surburb 6 -gokyo 6 -oladeji 6 -dillane 6 -immunity-boosting 6 -bistrot 6 -hartig 6 -honeyhill 6 -freudenstein 6 -anti-consumerism 6 -heyer 6 -marine-derived 6 -laishley 6 -suphi 6 -11/12/13 6 -just-caught 6 -filamentous 6 -manhunter 6 -uhersky 6 -arlnow.com 6 -sim/elwa 6 -spheramid 6 -hipstory 6 -glioblastomas 6 -non-criminals 6 -fania 6 -non-dangerous 6 -beaute 6 -blaquart 6 -chamberlayne 6 -school-owned 6 -whir 6 -halona 6 -shanghai-born 6 -peric 6 -seven-bed 6 -grapeseed 6 -hornworm 6 -abertridwr 6 -v-bomber 6 -last-standing 6 -towriss 6 -92-85 6 -marriam 6 -shofique 6 -ethology 6 -verheiden 6 -szarewski 6 -wasat 6 -streets/you 6 -highwire 6 -roesch 6 -flager 6 -flagey 6 -mid-face 6 -crvena 6 -anti-cruelty 6 -martineaus 6 -700-square-foot 6 -abrego 6 -kilbourne 6 -heli-ski 6 -asayish 6 -elspet 6 -alchornea 6 -barnell 6 -jcq 6 -khalas 6 -22-date 6 -ship-based 6 -motor-home 6 -1110 6 -1112 6 -brelis 6 -mid-trial 6 -1,509 6 -1,503 6 -vote-by-vote 6 -preposition 6 -groundnuts 6 -kurdish-dominated 6 -wmbd 6 -mengistu 6 -mislan 6 -goriest 6 -doogle 6 -malone-guerbaa 6 -tunnell 6 -shoulder-pads 6 -telacia 6 -ever-smiling 6 -lickable 6 -reshot 6 -binoua 6 -lorenc 6 -gagneux 6 -ciapperini 6 -carme 6 -domachowski 6 -globalist 6 -liverpoool 6 -ekaterinburg 6 -spallanzani 6 -james-collier 6 -neufield 6 -jahrling 6 -fotopedia 6 -1996/97 6 -hoogenband 6 -poquiz 6 -down-to-the-wire 6 -andrology 6 -dimatteo 6 -medomsley 6 -muxfeldt 6 -mailbags 6 -cressoti 6 -52-acre 6 -beepers 6 -daiten 6 -burchetta 6 -2,057 6 -knowable 6 -howieson 6 -cc398 6 -mehgrabi 6 -cecconi 6 -quarterbacked 6 -salutatorian 6 -ransil 6 -hartswick 6 -borobudur 6 -exterminations 6 -kerrin 6 -1000-year-old 6 -denguin 6 -vvv-venlo 6 -ippolito 6 -windjammer 6 -recinos 6 -silviu 6 -liquidized 6 -mirny 6 -mirna 6 -permethrin 6 -post-impact 6 -eynden 6 -handwringing 6 -girion 6 -shagadelic 6 -soufees 6 -31-story 6 -bench-pressing 6 -madelynn 6 -tymkiw 6 -megs 6 -samanata 6 -ogunleye 6 -arabianbusiness.com 6 -chang-soo 6 -awwww 6 -bratislav 6 -devaluations 6 -dan-dan 6 -inuring 6 -30th-anniversary 6 -shrewbot 6 -jollies 6 -vesicular 6 -ballestero 6 -comprehends 6 -two-weeks 6 -eberson 6 -margi 6 -rokatenda 6 -calmette-guerin 6 -connectu 6 -tour-leading 6 -nobleworks 6 -jaywalk 6 -jessamy 6 -hirano 6 -delta-mendota 6 -haga 6 -gounon 6 -afghan/pakistan 6 -michaelle 6 -difficult-to-treat 6 -then-alaska 6 -siff 6 -well-furnished 6 -joyrider 6 -vahl 6 -rave-style 6 -pepeijn 6 -midship 6 -mccunn 6 -mcsteamy 6 -9708 6 -aleutians 6 -6.57 6 -6.53 6 -transdermal 6 -flopsy 6 -then-royal 6 -ad70 6 -anti-gay-marriage 6 -livres 6 -city-st 6 -ortis 6 -an26 6 -13/2 6 -beer-guzzling 6 -indymedia 6 -suttie 6 -blankness 6 -tawakul 6 -15inch 6 -achurch 6 -less-than-impressed 6 -89.3 6 -aktenzeichen 6 -drug-users 6 -non-interventionist 6 -lucha 6 -then-apartment 6 -57958 6 -dystopias 6 -szikszai 6 -exoneree 6 -cbc.ca 6 -tempelman 6 -6,000-pound 6 -b3075 6 -rudd-rockford-marble 6 -teesville 6 -birol 6 -eventualis 6 -thaljieh 6 -abdullah-hassan 6 -faced-off 6 -011-52/744 6 -begining 6 -villicana 6 -re-calibrated 6 -volksline 6 -grapefruit-sized 6 -45th-floor 6 -285million 6 -trijicon 6 -siziwe 6 -challege 6 -5,087 6 -alred 6 -a.m.-11 6 -8-july 6 -2,235 6 -sajil-2 6 -altan 6 -rinka 6 -fan-boy 6 -nyers 6 -righetti 6 -timber-clad 6 -dauntsey 6 -sstc 6 -janakpur 6 -paibi 6 -590m 6 -trotro 6 -ago.the 6 -fathoms 6 -i-don 6 -dearmond 6 -well-tuned 6 -delker 6 -dem-controlled 6 -1945-1953 6 -markgraf 6 -flighttrack 6 -xojet 6 -34.50 6 -intraday 6 -diebenkorn 6 -j1023 6 -rocketeers 6 -ranasia 6 -hollifield 6 -half-step 6 -argentina-based 6 -20-season 6 -weeped 6 -multirole 6 -freeload 6 -stiners 6 -temidayo 6 -yowling 6 -etrit 6 -daniel.piotrowski@mailonline.com 6 -clap-off 6 -arrol 6 -masow 6 -deeper-lying 6 -noncriminals 6 -552,000 6 -derji 6 -icecreamists 6 -founder/ceo 6 -10-ounce 6 -druzkowska 6 -testings 6 -anghel 6 -eatonville 6 -swangstu 6 -20-week-old 6 -ecocina 6 -pento 6 -detectible 6 -tvc 6 -refold 6 -braziliense 6 -wilcomes 6 -3,057 6 -capocchiano 6 -crowdfund 6 -menominee 6 -ipatova 6 -sinopoda 6 -5,490 6 -dcns 6 -wafl 6 -kobata 6 -s/s13 6 -meal-replacement 6 -wikileak 6 -patroller 6 -kalathas 6 -house-grown 6 -kaati 6 -brazil-croatia 6 -kasit 6 -nuanquan 6 -psen1 6 -henretig 6 -mawazine 6 -n.w. 6 -spelsbury 6 -consuela 6 -industry-standard 6 -castiglioncello 6 -tenenti 6 -bonnington 6 -face-like 6 -arlauskis 6 -589,165 6 -concertinaed 6 -baikie 6 -choupo 6 -overcooking 6 -ljubisa 6 -portuguese-american 6 -320kg 6 -non-breeding 6 -kukena 6 -beeding 6 -stutchbury 6 -preoperative 6 -yudyohono 6 -63879 6 -brauns 6 -#islamicstate 6 -brackensick 6 -ronel 6 -tajima 6 -preindustrial 6 -ipsos/mori 6 -court-room 6 -6-13 6 -crash-and-burn 6 -face-plant 6 -kripke 6 -juluca 6 -guideposts 6 -jerold 6 -re-staged 6 -just-opened 6 -triple-amputee 6 -50-60mph 6 -fargnoli 6 -intrade 6 -bcuz 6 -skill-sets 6 -zaharris 6 -levothyroxine 6 -five-metres 6 -midgut 6 -qbd 6 -seminude 6 -green-themed 6 -dibrugarh 6 -geolocated 6 -xuehong 6 -4,000-pound 6 -ramrods 6 -rhucroft 6 -inglis-jones 6 -mattiello 6 -@adamschefter 6 -l'anse 6 -derechoes 6 -fundly 6 -shark-fishing 6 -mcconway 6 -ansicar 6 -carannante 6 -halab 6 -scheib 6 -b-s 6 -noris 6 -24,923 6 -chrapkowski 6 -launch-pad 6 -73mins 6 -rovinsky 6 -parth 6 -160,000-a-week 6 -oilmen 6 -cañizares 6 -agitations 6 -555,000 6 -martellozzo 6 -meth-making 6 -merkava 6 -huallhua 6 -flamin 6 -ibori-ibie 6 -boilermaker 6 -weather-resistant 6 -bedimo 6 -114,950 6 -narwhals 6 -huntbach 6 -motherâ 6 -iqua 6 -oft-used 6 -jalaa'a 6 -africaread 6 -girotto 6 -brecksville-northfield 6 -seghill 6 -montsouris 6 -phytokinetic 6 -gunbu 6 -3,728 6 -jiemin 6 -anti-aid 6 -l'ouverture 6 -maumee 6 -zubi 6 -3,271 6 -11.60 6 -tinyscreen 6 -wakeskating 6 -aileron 6 -osbourn 6 -bhpd 6 -neuraminidase 6 -re-mastered 6 -birthrights 6 -piracy-related 6 -renotta 6 -pestano 6 -jaques-mcmillin 6 -al-basheer 6 -grindcore 6 -airscouter 6 -10pc 6 -istavrioglou 6 -mainstage 6 -arminda 6 -nittaya 6 -toupin 6 -nipon 6 -jenzen 6 -baliffs 6 -365million 6 -hatang 6 -10-spot 6 -chromoly 6 -foxct 6 -yuhe 6 -delashmit 6 -dipendra 6 -ifetch 6 -oasthouse 6 -corfidi 6 -hedtler 6 -bellitto 6 -priapic 6 -four-ton 6 -annotating 6 -boquet 6 -once-prominent 6 -levita 6 -arbel 6 -pain-killers 6 -spyhole 6 -home-bringer 6 -profepa 6 -cleaveland 6 -wahlburgers 6 -de-boned 6 -1218 6 -mawlah 6 -mool 6 -shipbroker 6 -most-prized 6 -darío 6 -laser-printed 6 -mdlankomo 6 -poll-tested 6 -eegs 6 -eung-tae 6 -1,100-kilometer 6 -devestated 6 -antillon 6 -dar-es-salaam 6 -ramsby 6 -bounmy 6 -mulchrone 6 -mini-defibrillator 6 -kassewitz 6 -freesurfer 6 -4.4-magnitude 6 -facca 6 -lavishly-decorated 6 -surveillance-broadcast 6 -tannat 6 -188m 6 -gowerton 6 -daehlie 6 -aryanisation 6 -honey-baked 6 -brovent 6 -prypiat 6 -shekel 6 -undie 6 -waisel 6 -burnhope 6 -rcips 6 -paulsboro 6 -lambastes 6 -bonfadini 6 -birkenhauer 6 -5,385 6 -momin 6 -momii 6 -thimbles 6 -al-awadhi 6 -bronzeville 6 -sex-crime 6 -diet-conscious 6 -bafétimbi 6 -atonio 6 -uncombed 6 -mohand 6 -paperlater 6 -chazelle 6 -parabellum 6 -kaige 6 -ipe 6 -nazri 6 -#icezilla 6 -pindara 6 -crushers 6 -kensil 6 -sick-minded 6 -iveco 6 -pieczenik 6 -al-fath 6 -ardron 6 -yammer 6 -weretilneck 6 -skyjacking 6 -putih 6 -nonghyup 6 -82mins 6 -fire-sale 6 -sunstar 6 -razz 6 -fontella 6 -wisbeys 6 -out-classed 6 -fruitfully 6 -respondees 6 -45-64 6 -daidone 6 -malinois-german 6 -world-building 6 -color-changing 6 -boese 6 -leisinger 6 -chanot 6 -yoshiaki 6 -knee-level 6 -iterate 6 -10.14 6 -rafiqullah 6 -kenleigh 6 -fredonia 6 -nones 6 -man-of-the-people 6 -tech-focused 6 -55km 6 -adom 6 -fazlic 6 -48km 6 -outstaying 6 -pradal 6 -beauty-wise 6 -sowder 6 -bullet-hole 6 -re-hear 6 -footmarks 6 -vailati 6 -learning-disabled 6 -pommier 6 -56,300 6 -osmans 6 -fourmost 6 -lindie 6 -haselau 6 -55-years-old 6 -benhoff 6 -kühn 6 -14-19 6 -implosions 6 -mountsorrel 6 -rubha 6 -10-lane 6 -continuances 6 -mobile-payment 6 -masharah 6 -ndrc 6 -extra-hot 6 -#bringbackourhumvee 6 -putrefying 6 -mortell 6 -@beyonce 6 -drontal 6 -knaidel 6 -beachsafe 6 -backfill 6 -ing-wen 6 -innovisor 6 -angloamerican 6 -airline-style 6 -anett 6 -sherstyuk 6 -saint-making 6 -tahawwur 6 -city-killer 6 -juried 6 -moulvi 6 -41.50 6 -serape 6 -xining 6 -amantova 6 -odd-eyed 6 -tahoes 6 -vgastro 6 -buckelew 6 -lafleur 6 -bisson 6 -nicotext 6 -joumblatt 6 -pemulwuy 6 -bctga 6 -geo-tag 6 -pre-nups 6 -138.4 6 -subrata 6 -sonatas 6 -espalmador 6 -10,000-1 6 -87,360 6 -pipettes 6 -cortexica 6 -izmailovsky 6 -visijet 6 -bowlsby 6 -elmina 6 -gay-bashing 6 -westgate-style 6 -hauducoeur 6 -bladerunner 6 -hoppie 6 -tiddles 6 -starfire 6 -irotatheri 6 -lukoil 6 -kutti 6 -gakuen 6 -adforton 6 -venders 6 -konishi 6 -wal-marts 6 -a379 6 -sala-i-martin 6 -anti-cybercrime 6 -kadison 6 -co-chairperson 6 -landjahr 6 -lata 6 -uhb 6 -eveready 6 -potty-mouthed 6 -taguman 6 -koebbe 6 -civilizational 6 -anti-matter 6 -police-approved 6 -charsley 6 -muja 6 -axillary 6 -mirifica 6 -himmelstrand 6 -dilday 6 -fiord 6 -re-locate 6 -dunnings 6 -gustine 6 -publicis 6 -dred.com 6 -equalisation 6 -synagro 6 -anythings 6 -aqualandia 6 -keepie 6 -rubinfeld 6 -green-hued 6 -giallo 6 -wdaf-tv 6 -alioth 6 -securitas 6 -midlander 6 -lupfer 6 -terror-attack 6 -medicalising 6 -hoenlein 6 -4pts 6 -ema401 6 -grandparents-to-be 6 -decolonisation 6 -camera-loving 6 -clubf 6 -sulemans 6 -fratello 6 -1,385 6 -876,000 6 -55.1 6 -arcadio 6 -shellen 6 -doonbeg 6 -obscurior 6 -@ajkeen 6 -dlugash 6 -kontaveit 6 -skopelos 6 -kruman 6 -nadolski 6 -kace 6 -guessgen 6 -graw 6 -23-storey 6 -mutangana 6 -l'isle-verte 6 -ambuklao 6 -moment-to-moment 6 -biodegradation 6 -malopo 6 -torresdale 6 -jannet 6 -pradaxa 6 -industrywide 6 -poertschach 6 -grandson-in-law 6 -19-man 6 -bottled-water 6 -sonni 6 -shalini 6 -linboom 6 -1,931 6 -ongchu 6 -scarfia 6 -one-ring 6 -wanging 6 -vaper 6 -disaronno 6 -metallo 6 -#nothappy 6 -unremittingly 6 -come-on 6 -weilin 6 -nieh 6 -mckenzy 6 -hobe 6 -half-mexican 6 -dement 6 -hme 6 -oxfordshire-based 6 -maladaptive 6 -mignonette 6 -goralnick 6 -lithopedion 6 -jaeger.co.uk 6 -erzsebet 6 -segerstrom 6 -apple-branded 6 -24-room 6 -885,000 6 -#arsenal 6 -faerie 6 -grimoldby 6 -kalli 6 -flaggs 6 -viatcheslav 6 -oponyo 6 -electrx 6 -traeger 6 -chiuso 6 -schrieffer 6 -challand 6 -ailena 6 -nearly-complete 6 -ruinously 6 -borgsten 6 -centenario 6 -arsena 6 -azman 6 -dihydroxyacetone 6 -jugend 6 -hand-gun 6 -conflict-ending 6 -bareknuckle 6 -orianne 6 -biava 6 -8.4-inch 6 -gazetted 6 -counter-demonstrations 6 -marcoullier 6 -clutters 6 -stevendale 6 -pernambucano 6 -para-table 6 -flipswap 6 -rabih 6 -tsrnetwork.com 6 -cim 6 -cil 6 -respray 6 -blokland 6 -tail-enders 6 -half-deaf 6 -lightpaper 6 -book-smart 6 -dahlholzli 6 -globe-nominated 6 -eastward-moving 6 -ermey 6 -shovel-shaped 6 -ikoyi 6 -pamporovo 6 -seesawed 6 -arriaza 6 -cnnic 6 -zhiyun 6 -samb 6 -goucher 6 -seybold 6 -2,075 6 -mbarek 6 -weijie 6 -watch-style 6 -evloev 6 -littleredbunny 6 -over-expansion 6 -magaliesberg 6 -97.87 6 -app-store 6 -ndjida 6 -gavels 6 -hockx 6 -streeps 6 -#one2eleven 6 -reallocation 6 -#yeswecode 6 -zatopkova 6 -inswing 6 -loungepac 6 -bossart 6 -bluescope 6 -al-tabqa 6 -solidos 6 -kaliakra 6 -pest-free 6 -kanunnikov 6 -one-track 6 -merrymakers 6 -nandipati 6 -now-signature 6 -mackechnie 6 -compartmentalizing 6 -haskew 6 -safari-goers 6 -lakeman 6 -oppo 6 -hamima 6 -less-than-impressive 6 -biopark 6 -dropdown 6 -cloud-connected 6 -damphousse 6 -abigaille 6 -pulmonology 6 -yayasan 6 -microwedges 6 -funnell 6 -aql 6 -indian-held 6 -absher 6 -barbari 6 -landesberg 6 -hyper-intelligent 6 -kgw.com 6 -gorringe 6 -jonction 6 -god-ordained 6 -busy-ness 6 -trostel 6 -792nd 6 -birth-weight 6 -glasto 6 -tahsin 6 -varginha 6 -kolbjorn 6 -preciosa 6 -uhnwi 6 -awassa 6 -hanson-abbott 6 -gianello 6 -i.b. 6 -fogbow 6 -stenroos 6 -100,000-a-day 6 -grimaldo 6 -karaliova 6 -non-metal 6 -kinara 6 -pakpourtabrizi 6 -olear 6 -zerrillo 6 -oblates 6 -ianto 6 -2032/33 6 -mendouo 6 -saltine 6 -chervenka 6 -13-4 6 -13-8 6 -rashia 6 -balin 6 -bocchetti 6 -121.8 6 -121.5 6 -animal-derived 6 -mcbaguette 6 -tricho 6 -lawrance 6 -fitness-to-practise 6 -piëch 6 -hosain 6 -mckuen 6 -over-staying 6 -bangguo 6 -tangney 6 -brutzman 6 -relabel 6 -oathcarn 6 -lohia 6 -jipa 6 -macguffin 6 -alekseeva 6 -proffers 6 -wir 6 -mini-submarines 6 -u.s.-german 6 -@oxmas_tree 6 -sabik 6 -wichien 6 -shrubsole 6 -cloakrooms 6 -pge 6 -hexamine 6 -drinmore 6 -fox9 6 -gypos 6 -ongyal 6 -kilbirnie 6 -transhumance 6 -schottenhamel 6 -pan-hellenic 6 -whovian 6 -oam 6 -dobruskii 6 -dawie 6 -cutolo 6 -multi-hour 6 -bebionic3 6 -manozzi 6 -dexion 6 -tanglin 6 -gruja 6 -mygoodness.com 6 -etchison 6 -short-chain 6 -refashioning 6 -birch-machin 6 -creatinine 6 -aytug 6 -sotshole 6 -avowal 6 -blythswood 6 -yeouido 6 -koya 6 -hoys 6 -jottings 6 -yuendumu 6 -essick 6 -19-piece 6 -cyberview 6 -drylaw 6 -road-mobile 6 -safety-critical 6 -borve 6 -farnsfield 6 -qinyuan 6 -21-11 6 -tuffnell 6 -salonga 6 -kianerci 6 -kones 6 -frasers 6 -r1200gs 6 -chaffed 6 -off-time 6 -handoffs 6 -licence-holders 6 -aptitudes 6 -birbeck 6 -t-33 6 -264m 6 -rendle 6 -cambage 6 -protogeo 6 -shihadeh 6 -bresloff 6 -idelson 6 -moxy 6 -kelmscott 6 -bramer 6 -mat-su 6 -precession 6 -camera-trap 6 -banni 6 -mittimus 6 -love-rat 6 -critised 6 -urich 6 -70,000-a-year 6 -howrse 6 -ultra-secret 6 -stazione 6 -737-400 6 -myf 6 -500-a-day 6 -9,750 6 -good-news 6 -bequia 6 -chlorine-free 6 -winden 6 -lucienne 6 -easthope 6 -sweet-and-sour 6 -paasewe 6 -muico 6 -toback 6 -checked-bag 6 -sturgill 6 -hackling 6 -mimmy 6 -nailia 6 -stone-walled 6 -herzig 6 -hanting 6 -tacopino 6 -mehring 6 -visisted 6 -tto 6 -ttb 6 -illogically 6 -hao-ching 6 -bld 6 -gastrobus 6 -youku.com 6 -consensus-builder 6 -hospita 6 -sefer 6 -blenheims 6 -nicolls 6 -dust-coated 6 -wangchuck 6 -kapitan 6 -foix 6 -qiblawi 6 -mammas 6 -mamman 6 -183million 6 -hich 6 -streambed 6 -over-supply 6 -+78 6 -iyegbe 6 -umeano 6 -understudied 6 -lynley 6 -storfer 6 -budke 6 -ubotddstarl 6 -fegan 6 -masur 6 -second-century 6 -open-sourced 6 -feed-in 6 -erbie 6 -mazdack 6 -scornavacchi 6 -ladurée 6 -bearian 6 -keep-away 6 -yesua 6 -turkish-controlled 6 -al-jarida 6 -ikey 6 -dayal 6 -kpk 6 -osironke 6 -poison-pen 6 -33.99 6 -stephanopoulus 6 -strike-partner 6 -bocouture 6 -pepiezep 6 -obama-appointed 6 -virgitti 6 -lounibos 6 -8secs 6 -injury-interrupted 6 -trilobites 6 -mavisa 6 -rila 6 -rill 6 -cnn-us 6 -hemeryck 6 -101f 6 -amusement-park 6 -galinhas 6 -british-inspired 6 -telerobotics 6 -jiaxing-shaoxing 6 -5.0.1 6 -takebayashi 6 -deveri 6 -used-by 6 -pixel-by-pixel 6 -cheggers 6 -pressvess 6 -mannai 6 -mannar 6 -bakowski 6 -valentinian 6 -40,200 6 -non-sterilized 6 -ataxic 6 -ostojic 6 -maybee 6 -london-new 6 -blasnek 6 -djemal 6 -murisciano 6 -madagascar-type 6 -195mph 6 -contestable 6 -teneo 6 -wallsten 6 -imette 6 -16-under-par 6 -clobbers 6 -folllowing 6 -harmander 6 -gsv 6 -craigholme 6 -blinged-up 6 -drug-involved 6 -conciliazione 6 -yoong 6 -unclipping 6 -depreciating 6 -halkirk 6 -rahder 6 -vachan 6 -youth-obsessed 6 -emmel 6 -thirty-somethings 6 -'66 6 -'67 6 -userbase 6 -taslima 6 -october/november 6 -gangster-style 6 -cdu/csu 6 -bovingdon 6 -re-positioning 6 -popovka 6 -balkwill 6 -deal-maker 6 -karmal 6 -pazen 6 -trave 6 -aliments 6 -mashregh 6 -decerega 6 -ruthman 6 -marckenson 6 -eco-minded 6 -pamiris 6 -multiscreen 6 -mulitple 6 -lee-grace 6 -everychild 6 -anti-trolling 6 -susteran 6 -35-a-head 6 -three-foot-long 6 -kerneels 6 -tulsyan 6 -tiesel 6 -gläce 6 -rotherwick 6 -non-ticketed 6 -year-old-man 6 -byrant 6 -hsiang 6 -atrash 6 -baitfish 6 -coffee-maker 6 -fully-licensed 6 -stumpnuts 6 -teuns 6 -ex-villa 6 -reacquire 6 -ews 6 -lachance 6 -simasiku 6 -multilayer 6 -uk/u 6 -anglians 6 -papd 6 -petrolatum 6 -i10 6 -integro 6 -integra 6 -anthee 6 -beatties 6 -winlaton 6 -lukqun 6 -strensham 6 -mount/haram 6 -front-three 6 -52,600 6 -micato 6 -higashi 6 -154kg 6 -willgoose 6 -burlakoffs 6 -topliffe 6 -theravada 6 -feijoo 6 -agnolo 6 -keywest 6 -not-to-be-missed 6 -cussins 6 -carnac 6 -anomalocaridids 6 -eckstrand 6 -pengelley 6 -forst 6 -taumalolo 6 -donat 6 -over-30 6 -whitemore 6 -ferrill 6 -bernick 6 -gesticulations 6 -ineluctable 6 -1236 6 -villagra-garzon 6 -ampner 6 -procreative 6 -bradie 6 -dasna 6 -mbangwa 6 -specially-arranged 6 -harriot 6 -botkinburg 6 -nasery 6 -arbitrageur 6 -23,000-a-year 6 -prochadzkova 6 -netters 6 -two-edged 6 -011-52/669 6 -56.19 6 -meyerbeer 6 -sumi-e 6 -smith-hughes 6 -fondevrider 6 -kovner 6 -baydon 6 -spe 6 -ivanovna 6 -quickquid 6 -redmire 6 -oldsmar 6 -fishhook 6 -kobza 6 -appropriates 6 -cloake 6 -sku 6 -skg 6 -halfway-line 6 -sobecki 6 -u.s.-launched 6 -4,672 6 -4,670 6 -135mm 6 -teamsky.com 6 -fuera 6 -stohl 6 -#freefreya 6 -stepehen 6 -stylecycle 6 -etim 6 -irv 6 -en-us 6 -d'etudes 6 -barcleona 6 -sompie 6 -eight-meter 6 -anti-acid 6 -novasure 6 -rapey 6 -creuset 6 -flat-top 6 -phripp 6 -mechaphilia 6 -denburn 6 -amplatz 6 -260-mile 6 -soulagnet 6 -wolpert 6 -pimply 6 -340m 6 -dirik 6 -monari 6 -213ft 6 -mikeala 6 -herkes 6 -34-15 6 -ceiling-mounted 6 -amparo 6 -jarmal 6 -10.78 6 -deysher 6 -ranegie 6 -kanhai 6 -gonnella 6 -instant-on 6 -#runforboston 6 -skillshot 6 -100,500 6 -novecento 6 -busboys 6 -unscrupulously 6 -fenske 6 -breann 6 -revuln 6 -genck 6 -brey 6 -oropharynx 6 -krawitz 6 -miss-hits 6 -high-standard 6 -unleavened 6 -2001-04 6 -cundiff 6 -92,200 6 -prison-based 6 -deqa 6 -well-above 6 -impeller 6 -família 6 -verdery 6 -sambol 6 -lyzhina 6 -ecologic 6 -kohona 6 -brietbart 6 -gnp 6 -huxham 6 -transoral 6 -@manutd 6 -xactware 6 -lavao 6 -icenetwork 6 -nimrods 6 -75-story 6 -wubby 6 -cowser 6 -soka 6 -fulhamish 6 -79per 6 -amanecer 6 -carboard 6 -acadian 6 -y-ers 6 -rabinovich 6 -yuyuan 6 -sutarman 6 -set-in-stone 6 -syphoned 6 -jordan-based 6 -pettler 6 -tayar 6 -neom 6 -manion-borek 6 -sullie 6 -poice 6 -biafran 6 -american-iranian 6 -sabara 6 -sackin 6 -for-and-against 6 -ex-blackwater 6 -desribed 6 -frenzel 6 -yssel-richards 6 -eacott 6 -neo-pagan 6 -silvery-white 6 -molinares 6 -arachnologist 6 -koekohe 6 -ferdi 6 -olestra 6 -politecnico 6 -highly-ranked 6 -5,000-word 6 -logline 6 -sq/km 6 -almudaina 6 -robot-astronaut 6 -preparator 6 -ibrahimovich 6 -manneken 6 -berrigan 6 -ellick 6 -dehler 6 -perdidos 6 -kez 6 -vorobyov 6 -17.20 6 -chongquing 6 -democracy.com 6 -karumbé 6 -bodewits 6 -broadwall 6 -cleggmania 6 -elegy 6 -chopticon 6 -aqrab 6 -sundstrom 6 -aboukir 6 -hogshire 6 -victimes 6 -62-0 6 -schira 6 -digesters 6 -coxed 6 -menchov 6 -non-racial 6 -matroshka 6 -bionnassay 6 -alveda 6 -roanne 6 -gabinetto 6 -chinaâ 6 -childwall 6 -dowe 6 -www.nypdcrimestoppers.com 6 -goldens 6 -margulis-ohnuma 6 -winsconsin 6 -24-team 6 -asdrubal 6 -catadore 6 -cocksedge 6 -coppersmiths 6 -ex-justice 6 -cartograms 6 -vasealli 6 -facetracker 6 -of-two 6 -mendeleev 6 -5,675 6 -eighth-degree 6 -uk-mean 6 -pro-freedom 6 -shaposhnikov 6 -croupiers 6 -kazzan 6 -7.01 6 -laist 6 -wabel 6 -sanderholm 6 -hadzovic 6 -ultra-high-definition 6 -servicer 6 -quereshi 6 -askja 6 -teaspoonful 6 -overlappers 6 -nimko 6 -flea-market 6 -lieras 6 -sea-floor 6 -poseyville 6 -nonworking 6 -efstathios 6 -25-kilogram 6 -akhil 6 -akey 6 -gierek 6 -3,540 6 -aesthetician 6 -dyneema 6 -j0855-0714 6 -zinzanni 6 -kiravan 6 -digeorge 6 -lion-tiger 6 -carrer 6 -zangana 6 -cambio 6 -dechane 6 -organohalogens 6 -moonflask 6 -22-26 6 -22-27 6 -22-28 6 -ngouboua 6 -anti-hacking 6 -schelbert 6 -tanka 6 -hotelied 6 -unsent 6 -intrasquad 6 -heliotail 6 -phthalate-free 6 -mountain-like 6 -kievan 6 -mirimskaya 6 -deigo 6 -nwaolisa 6 -chocolate-flavored 6 -patrich 6 -water-main 6 -estimator 6 -frenetically 6 -147mph 6 -gurkhan 6 -papilio 6 diff --git a/reproduction/Summarization/Baseline/test_data.py b/reproduction/Summarization/Baseline/test_data.py deleted file mode 100644 index d7430d59..00000000 --- a/reproduction/Summarization/Baseline/test_data.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import os -import sys - -sys.path.append('/remote-home/dqwang/FastNLP/fastNLP_brxx/') - -from fastNLP.core.const import Const - -from data.dataloader import SummarizationLoader -from tools.data import ExampleSet, Vocab - -vocab_size = 100000 -vocab_path = "test/testdata/vocab" -sent_max_len = 100 -doc_max_timesteps = 50 - -# paths = {"train": "test/testdata/train.jsonl", "valid": "test/testdata/val.jsonl"} -paths = {"train": "/remote-home/dqwang/Datasets/CNNDM/train.label.jsonl", "valid": "/remote-home/dqwang/Datasets/CNNDM/val.label.jsonl"} -sum_loader = SummarizationLoader() -dataInfo = sum_loader.process(paths=paths, vocab_size=vocab_size, vocab_path=vocab_path, sent_max_len=sent_max_len, doc_max_timesteps=doc_max_timesteps, load_vocab_file=True) -trainset = dataInfo.datasets["train"] - -vocab = Vocab(vocab_path, vocab_size) -dataset = ExampleSet(paths["train"], vocab, doc_max_timesteps, sent_max_len) - -# print(trainset[0]["text"]) -# print(dataset.get_example(0).original_article_sents) -# print(trainset[0]["words"]) -# print(dataset[0][0].numpy().tolist()) -b_size = len(trainset) -for i in range(b_size): - if i <= 7327: - continue - print(trainset[i][Const.INPUT]) - print(dataset[i][0].numpy().tolist()) - assert trainset[i][Const.INPUT] == dataset[i][0].numpy().tolist(), i - assert trainset[i][Const.INPUT_LEN] == dataset[i][2].numpy().tolist(), i - assert trainset[i][Const.TARGET] == dataset[i][1].numpy().tolist(), i \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/tools/Callback.py b/reproduction/Summarization/Baseline/tools/Callback.py deleted file mode 100644 index 3fe27daa..00000000 --- a/reproduction/Summarization/Baseline/tools/Callback.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import os -import sys -import time -import numpy as np - -import torch - -from fastNLP.core.const import Const -from fastNLP.io.model_io import ModelSaver -from fastNLP.core.callback import Callback, EarlyStopError - -from fastNLP.core._logger import logger - -class TrainCallback(Callback): - def __init__(self, hps, patience=3, quit_all=True): - super().__init__() - self._hps = hps - self.patience = patience - self.wait = 0 - self.train_loss = 0.0 - self.prev_train_avg_loss = 1000.0 - self.train_dir = os.path.join(self._hps.save_root, "train") - - if type(quit_all) != bool: - raise ValueError("In KeyBoardInterrupt, quit_all arguemnt must be a bool.") - self.quit_all = quit_all - - def on_epoch_begin(self): - self.epoch_start_time = time.time() - self.model.Train = True - - def on_backward_begin(self, loss): - """ - - :param loss: [] - :return: - """ - if not (np.isfinite(loss.data)).numpy(): - logger.error("train Loss is not finite. Stopping.") - logger.info(loss) - for name, param in self.model.named_parameters(): - if param.requires_grad: - logger.info(name) - logger.info(param.grad.data.sum()) - raise Exception("train Loss is not finite. Stopping.") - self.train_loss += loss.data - - - def on_backward_end(self): - if self._hps.grad_clip: - torch.nn.utils.clip_grad_norm_(self.model.parameters(), self._hps.max_grad_norm) - torch.cuda.empty_cache() - - def on_epoch_end(self): - epoch_avg_loss = self.train_loss / self.n_steps - logger.info(' | end of epoch {:3d} | time: {:5.2f}s | train loss: {:5.6f}' - .format(self.epoch, (time.time() - self.epoch_start_time), epoch_avg_loss)) - if self.prev_train_avg_loss < epoch_avg_loss: - save_file = os.path.join(self.train_dir, "earlystop.pkl") - self.save_model(save_file) - else: - self.prev_train_avg_loss = epoch_avg_loss - self.train_loss = 0.0 - - # save epoch - save_file = os.path.join(self.train_dir, "epoch_%d.pkl" % self.epoch) - self.save_model(save_file) - - - - def on_valid_begin(self): - self.valid_start_time = time.time() - self.model.Train = False - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - logger.info(' | end of valid {:3d} | time: {:5.2f}s | ' - .format(self.epoch, (time.time() - self.valid_start_time))) - - # early stop - if not is_better_eval: - if self.wait == self.patience: - train_dir = os.path.join(self._hps.save_root, "train") - save_file = os.path.join(train_dir, "earlystop.pkl") - self.save_model(save_file) - raise EarlyStopError("Early stopping raised.") - else: - self.wait += 1 - else: - self.wait = 0 - - # lr descent - if self._hps.lr_descent: - new_lr = max(5e-6, self._hps.lr / (self.epoch + 1)) - for param_group in list(optimizer.param_groups): - param_group['lr'] = new_lr - logger.info("[INFO] The learning rate now is %f", new_lr) - - - def on_exception(self, exception): - if isinstance(exception, KeyboardInterrupt): - logger.error("[Error] Caught keyboard interrupt on worker. Stopping supervisor...") - save_file = os.path.join(self.train_dir, "earlystop.pkl") - self.save_model(save_file) - - if self.quit_all is True: - sys.exit(0) # 直接退出程序 - else: - pass - else: - raise exception # 抛出陌生Error - - def save_model(self, save_file): - saver = ModelSaver(save_file) - saver.save_pytorch(self.model) - logger.info('[INFO] Saving model to %s', save_file) - - - - - - - diff --git a/reproduction/Summarization/Baseline/tools/PositionEmbedding.py b/reproduction/Summarization/Baseline/tools/PositionEmbedding.py deleted file mode 100644 index 985223bb..00000000 --- a/reproduction/Summarization/Baseline/tools/PositionEmbedding.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import torch -import numpy as np - -def get_sinusoid_encoding_table(n_position, d_hid, padding_idx=None): - ''' Sinusoid position encoding table ''' - - def cal_angle(position, hid_idx): - return position / np.power(10000, 2 * (hid_idx // 2) / d_hid) - - def get_posi_angle_vec(position): - return [cal_angle(position, hid_j) for hid_j in range(d_hid)] - - sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)]) - - sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i - sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 - - if padding_idx is not None: - # zero vector for padding dimension - sinusoid_table[padding_idx] = 0. - - return torch.FloatTensor(sinusoid_table) \ No newline at end of file diff --git a/reproduction/Summarization/Baseline/tools/__init__.py b/reproduction/Summarization/Baseline/tools/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/reproduction/Summarization/Baseline/tools/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/reproduction/Summarization/Baseline/tools/data.py b/reproduction/Summarization/Baseline/tools/data.py deleted file mode 100644 index 0cbfbb06..00000000 --- a/reproduction/Summarization/Baseline/tools/data.py +++ /dev/null @@ -1,480 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""This file contains code to read the train/eval/test data from file and process it, and read the vocab data from file and process it""" - -import os -import re -import glob -import copy -import random -import json -import collections -from itertools import combinations -import numpy as np -from random import shuffle - -import torch.utils.data -import time -import pickle - -from nltk.tokenize import sent_tokenize - -import tools.utils -from tools.logger import * - -# and are used in the data files to segment the abstracts into sentences. They don't receive vocab ids. -SENTENCE_START = '' -SENTENCE_END = '' - -PAD_TOKEN = '[PAD]' # This has a vocab id, which is used to pad the encoder input, decoder input and target sequence -UNKNOWN_TOKEN = '[UNK]' # This has a vocab id, which is used to represent out-of-vocabulary words -START_DECODING = '[START]' # This has a vocab id, which is used at the start of every decoder input sequence -STOP_DECODING = '[STOP]' # This has a vocab id, which is used at the end of untruncated target sequences - -# Note: none of , , [PAD], [UNK], [START], [STOP] should appear in the vocab file. - - -class Vocab(object): - """Vocabulary class for mapping between words and ids (integers)""" - - def __init__(self, vocab_file, max_size): - """ - Creates a vocab of up to max_size words, reading from the vocab_file. If max_size is 0, reads the entire vocab file. - :param vocab_file: string; path to the vocab file, which is assumed to contain " " on each line, sorted with most frequent word first. This code doesn't actually use the frequencies, though. - :param max_size: int; The maximum size of the resulting Vocabulary. - """ - self._word_to_id = {} - self._id_to_word = {} - self._count = 0 # keeps track of total number of words in the Vocab - - # [UNK], [PAD], [START] and [STOP] get the ids 0,1,2,3. - for w in [PAD_TOKEN, UNKNOWN_TOKEN, START_DECODING, STOP_DECODING]: - self._word_to_id[w] = self._count - self._id_to_word[self._count] = w - self._count += 1 - - # Read the vocab file and add words up to max_size - with open(vocab_file, 'r', encoding='utf8') as vocab_f: #New : add the utf8 encoding to prevent error - cnt = 0 - for line in vocab_f: - cnt += 1 - pieces = line.split("\t") - # pieces = line.split() - w = pieces[0] - # print(w) - if w in [SENTENCE_START, SENTENCE_END, UNKNOWN_TOKEN, PAD_TOKEN, START_DECODING, STOP_DECODING]: - raise Exception(', , [UNK], [PAD], [START] and [STOP] shouldn\'t be in the vocab file, but %s is' % w) - if w in self._word_to_id: - logger.error('Duplicated word in vocabulary file Line %d : %s' % (cnt, w)) - continue - self._word_to_id[w] = self._count - self._id_to_word[self._count] = w - self._count += 1 - if max_size != 0 and self._count >= max_size: - logger.info("[INFO] max_size of vocab was specified as %i; we now have %i words. Stopping reading." % (max_size, self._count)) - break - logger.info("[INFO] Finished constructing vocabulary of %i total words. Last word added: %s", self._count, self._id_to_word[self._count-1]) - - def word2id(self, word): - """Returns the id (integer) of a word (string). Returns [UNK] id if word is OOV.""" - if word not in self._word_to_id: - return self._word_to_id[UNKNOWN_TOKEN] - return self._word_to_id[word] - - def id2word(self, word_id): - """Returns the word (string) corresponding to an id (integer).""" - if word_id not in self._id_to_word: - raise ValueError('Id not found in vocab: %d' % word_id) - return self._id_to_word[word_id] - - def size(self): - """Returns the total size of the vocabulary""" - return self._count - - def word_list(self): - """Return the word list of the vocabulary""" - return self._word_to_id.keys() - -class Word_Embedding(object): - def __init__(self, path, vocab): - """ - :param path: string; the path of word embedding - :param vocab: object; - """ - logger.info("[INFO] Loading external word embedding...") - self._path = path - self._vocablist = vocab.word_list() - self._vocab = vocab - - def load_my_vecs(self, k=200): - """Load word embedding""" - word_vecs = {} - with open(self._path, encoding="utf-8") as f: - count = 0 - lines = f.readlines()[1:] - for line in lines: - values = line.split(" ") - word = values[0] - count += 1 - if word in self._vocablist: # whether to judge if in vocab - vector = [] - for count, val in enumerate(values): - if count == 0: - continue - if count <= k: - vector.append(float(val)) - word_vecs[word] = vector - return word_vecs - - def add_unknown_words_by_zero(self, word_vecs, k=200): - """Solve unknown by zeros""" - zero = [0.0] * k - list_word2vec = [] - oov = 0 - iov = 0 - for i in range(self._vocab.size()): - word = self._vocab.id2word(i) - if word not in word_vecs: - oov += 1 - word_vecs[word] = zero - list_word2vec.append(word_vecs[word]) - else: - iov += 1 - list_word2vec.append(word_vecs[word]) - logger.info("[INFO] oov count %d, iov count %d", oov, iov) - return list_word2vec - - def add_unknown_words_by_avg(self, word_vecs, k=200): - """Solve unknown by avg word embedding""" - # solve unknown words inplaced by zero list - word_vecs_numpy = [] - for word in self._vocablist: - if word in word_vecs: - word_vecs_numpy.append(word_vecs[word]) - col = [] - for i in range(k): - sum = 0.0 - for j in range(int(len(word_vecs_numpy))): - sum += word_vecs_numpy[j][i] - sum = round(sum, 6) - col.append(sum) - zero = [] - for m in range(k): - avg = col[m] / int(len(word_vecs_numpy)) - avg = round(avg, 6) - zero.append(float(avg)) - - list_word2vec = [] - oov = 0 - iov = 0 - for i in range(self._vocab.size()): - word = self._vocab.id2word(i) - if word not in word_vecs: - oov += 1 - word_vecs[word] = zero - list_word2vec.append(word_vecs[word]) - else: - iov += 1 - list_word2vec.append(word_vecs[word]) - logger.info("[INFO] External Word Embedding iov count: %d, oov count: %d", iov, oov) - return list_word2vec - - def add_unknown_words_by_uniform(self, word_vecs, uniform=0.25, k=200): - """Solve unknown word by uniform(-0.25,0.25)""" - list_word2vec = [] - oov = 0 - iov = 0 - for i in range(self._vocab.size()): - word = self._vocab.id2word(i) - if word not in word_vecs: - oov += 1 - word_vecs[word] = np.random.uniform(-1 * uniform, uniform, k).round(6).tolist() - list_word2vec.append(word_vecs[word]) - else: - iov += 1 - list_word2vec.append(word_vecs[word]) - logger.info("[INFO] oov count %d, iov count %d", oov, iov) - return list_word2vec - - # load word embedding - def load_my_vecs_freq1(self, freqs, pro): - word_vecs = {} - with open(self._path, encoding="utf-8") as f: - freq = 0 - lines = f.readlines()[1:] - for line in lines: - values = line.split(" ") - word = values[0] - if word in self._vocablist: # whehter to judge if in vocab - if freqs[word] == 1: - a = np.random.uniform(0, 1, 1).round(2) - if pro < a: - continue - vector = [] - for count, val in enumerate(values): - if count == 0: - continue - vector.append(float(val)) - word_vecs[word] = vector - return word_vecs - -class DomainDict(object): - """Domain embedding for Newsroom""" - def __init__(self, path): - self.domain_list = self.readDomainlist(path) - # self.domain_list = ["foxnews.com", "cnn.com", "mashable.com", "nytimes.com", "washingtonpost.com"] - self.domain_number = len(self.domain_list) - self._domain_to_id = {} - self._id_to_domain = {} - self._cnt = 0 - - self._domain_to_id["X"] = self._cnt - self._id_to_domain[self._cnt] = "X" - self._cnt += 1 - - for i in range(self.domain_number): - domain = self.domain_list[i] - self._domain_to_id[domain] = self._cnt - self._id_to_domain[self._cnt] = domain - self._cnt += 1 - - def readDomainlist(self, path): - domain_list = [] - with open(path) as f: - for line in f: - domain_list.append(line.split("\t")[0].strip()) - logger.info(domain_list) - return domain_list - - def domain2id(self, domain): - """ Returns the id (integer) of a domain (string). Returns "X" for unknow domain. - :param domain: string - :return: id; int - """ - if domain in self.domain_list: - return self._domain_to_id[domain] - else: - logger.info(domain) - return self._domain_to_id["X"] - - def id2domain(self, domain_id): - """ Returns the domain (string) corresponding to an id (integer). - :param id: int; - :return: domain: string - """ - if domain_id not in self._id_to_domain: - raise ValueError('Id not found in DomainDict: %d' % domain_id) - return self._id_to_domain[id] - - def size(self): - return self._cnt - - -class Example(object): - """Class representing a train/val/test example for text summarization.""" - def __init__(self, article_sents, abstract_sents, vocab, sent_max_len, label, domainid=None): - """ Initializes the Example, performing tokenization and truncation to produce the encoder, decoder and target sequences, which are stored in self. - - :param article_sents: list of strings; one per article sentence. each token is separated by a single space. - :param abstract_sents: list of strings; one per abstract sentence. In each sentence, each token is separated by a single space. - :param domainid: int; publication of the example - :param vocab: Vocabulary object - :param sent_max_len: int; the maximum length of each sentence, padding all sentences to this length - :param label: list of int; the index of selected sentences - """ - - self.sent_max_len = sent_max_len - self.enc_sent_len = [] - self.enc_sent_input = [] - self.enc_sent_input_pad = [] - - # origin_cnt = len(article_sents) - # article_sents = [re.sub(r"\n+\t+", " ", sent) for sent in article_sents] - # assert origin_cnt == len(article_sents) - - # Process the article - for sent in article_sents: - article_words = sent.split() - self.enc_sent_len.append(len(article_words)) # store the length after truncation but before padding - # self.enc_sent_input.append([vocab.word2id(w) for w in article_words]) # list of word ids; OOVs are represented by the id for UNK token - self.enc_sent_input.append([vocab.word2id(w.lower()) for w in article_words]) # list of word ids; OOVs are represented by the id for UNK token - self._pad_encoder_input(vocab.word2id('[PAD]')) - - # Store the original strings - self.original_article = " ".join(article_sents) - self.original_article_sents = article_sents - - if isinstance(abstract_sents[0], list): - logger.debug("[INFO] Multi Reference summaries!") - self.original_abstract_sents = [] - self.original_abstract = [] - for summary in abstract_sents: - self.original_abstract_sents.append([sent.strip() for sent in summary]) - self.original_abstract.append("\n".join([sent.replace("\n", "") for sent in summary])) - else: - self.original_abstract_sents = [sent.replace("\n", "") for sent in abstract_sents] - self.original_abstract = "\n".join(self.original_abstract_sents) - - # Store the label - self.label = np.zeros(len(article_sents), dtype=int) - if label != []: - self.label[np.array(label)] = 1 - self.label = list(self.label) - - # Store the publication - if domainid != None: - if domainid == 0: - logger.debug("domain id = 0!") - self.domain = domainid - - def _pad_encoder_input(self, pad_id): - """ - :param pad_id: int; token pad id - :return: - """ - max_len = self.sent_max_len - for i in range(len(self.enc_sent_input)): - article_words = self.enc_sent_input[i] - if len(article_words) > max_len: - article_words = article_words[:max_len] - while len(article_words) < max_len: - article_words.append(pad_id) - self.enc_sent_input_pad.append(article_words) - -class ExampleSet(torch.utils.data.Dataset): - """ Constructor: Dataset of example(object) """ - def __init__(self, data_path, vocab, doc_max_timesteps, sent_max_len, domaindict=None, randomX=False, usetag=False): - """ Initializes the ExampleSet with the path of data - - :param data_path: string; the path of data - :param vocab: object; - :param doc_max_timesteps: int; the maximum sentence number of a document, each example should pad sentences to this length - :param sent_max_len: int; the maximum token number of a sentence, each sentence should pad tokens to this length - :param domaindict: object; the domain dict to embed domain - """ - self.domaindict = domaindict - if domaindict: - logger.info("[INFO] Use domain information in the dateset!") - if randomX==True: - logger.info("[INFO] Random some example to unknow domain X!") - self.randomP = 0.1 - logger.info("[INFO] Start reading ExampleSet") - start = time.time() - self.example_list = [] - self.doc_max_timesteps = doc_max_timesteps - cnt = 0 - with open(data_path, 'r') as reader: - for line in reader: - try: - e = json.loads(line) - article_sent = e['text'] - tag = e["tag"][0] if usetag else e['publication'] - # logger.info(tag) - if "duc" in data_path: - abstract_sent = e["summaryList"] if "summaryList" in e.keys() else [e['summary']] - else: - abstract_sent = e['summary'] - if domaindict: - if randomX == True: - p = np.random.rand() - if p <= self.randomP: - domainid = domaindict.domain2id("X") - else: - domainid = domaindict.domain2id(tag) - else: - domainid = domaindict.domain2id(tag) - else: - domainid = None - logger.debug((tag, domainid)) - except (ValueError,EOFError) as e : - logger.debug(e) - break - else: - example = Example(article_sent, abstract_sent, vocab, sent_max_len, e["label"], domainid) # Process into an Example. - self.example_list.append(example) - cnt += 1 - # print(cnt) - logger.info("[INFO] Finish reading ExampleSet. Total time is %f, Total size is %d", time.time() - start, len(self.example_list)) - self.size = len(self.example_list) - - # self.example_list.sort(key=lambda ex: ex.domain) - - def get_example(self, index): - return self.example_list[index] - - def __getitem__(self, index): - """ - :param index: int; the index of the example - :return - input_pad: [N, seq_len] - label: [N] - input_mask: [N] - domain: [1] - """ - item = self.example_list[index] - input = np.array(item.enc_sent_input_pad) - label = np.array(item.label, dtype=int) - # pad input to doc_max_timesteps - if len(input) < self.doc_max_timesteps: - pad_number = self.doc_max_timesteps - len(input) - pad_matrix = np.zeros((pad_number, len(input[0]))) - input_pad = np.vstack((input, pad_matrix)) - label = np.append(label, np.zeros(pad_number, dtype=int)) - input_mask = np.append(np.ones(len(input)), np.zeros(pad_number)) - else: - input_pad = input[:self.doc_max_timesteps] - label = label[:self.doc_max_timesteps] - input_mask = np.ones(self.doc_max_timesteps) - if self.domaindict: - return torch.from_numpy(input_pad).long(), torch.from_numpy(label).long(), torch.from_numpy(input_mask).long(), item.domain - return torch.from_numpy(input_pad).long(), torch.from_numpy(label).long(), torch.from_numpy(input_mask).long() - - def __len__(self): - return self.size - -class MultiExampleSet(): - def __init__(self, data_dir, vocab, doc_max_timesteps, sent_max_len, domaindict=None, randomX=False, usetag=False): - self.datasets = [None] * (domaindict.size() - 1) - data_path_list = [os.path.join(data_dir, s) for s in os.listdir(data_dir) if s.endswith("label.jsonl")] - for data_path in data_path_list: - fname = data_path.split("/")[-1] # cnn.com.label.json - dataname = ".".join(fname.split(".")[:-2]) - domainid = domaindict.domain2id(dataname) - logger.info("[INFO] domain name: %s, domain id: %d" % (dataname, domainid)) - self.datasets[domainid - 1] = ExampleSet(data_path, vocab, doc_max_timesteps, sent_max_len, domaindict, randomX, usetag) - - def get(self, id): - return self.datasets[id] - -from torch.utils.data.dataloader import default_collate -def my_collate_fn(batch): - ''' - :param batch: (input_pad, label, input_mask, domain) - :return: - ''' - start_domain = batch[0][-1] - # for i in range(len(batch)): - # print(batch[i][-1], end=',') - batch = list(filter(lambda x: x[-1] == start_domain, batch)) - print("start_domain %d" % start_domain) - print("batch_len %d" % len(batch)) - if len(batch) == 0: return torch.Tensor() - return default_collate(batch) # 用默认方式拼接过滤后的batch数据 - diff --git a/reproduction/Summarization/Baseline/tools/logger.py b/reproduction/Summarization/Baseline/tools/logger.py deleted file mode 100644 index 0c6ca0e0..00000000 --- a/reproduction/Summarization/Baseline/tools/logger.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- - -import logging -import sys - -# 获取logger实例,如果参数为空则返回root logger -logger = logging.getLogger("Summarization logger") -# logger = logging.getLogger() - -# 指定logger输出格式 -formatter = logging.Formatter('%(asctime)s %(levelname)-8s: %(message)s') - -# # 文件日志 -# file_handler = logging.FileHandler("test.log") -# file_handler.setFormatter(formatter) # 可以通过setFormatter指定输出格式 - -# 控制台日志 -console_handler = logging.StreamHandler(sys.stdout) -console_handler.formatter = formatter # 也可以直接给formatter赋值 -console_handler.setLevel(logging.INFO) - -# 为logger添加的日志处理器 -# logger.addHandler(file_handler) -logger.addHandler(console_handler) - -# 指定日志的最低输出级别,默认为WARN级别 -logger.setLevel(logging.DEBUG) diff --git a/reproduction/Summarization/Baseline/tools/utils.py b/reproduction/Summarization/Baseline/tools/utils.py deleted file mode 100644 index f49339ee..00000000 --- a/reproduction/Summarization/Baseline/tools/utils.py +++ /dev/null @@ -1,297 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -import re -import os -import shutil -import copy -import datetime -import numpy as np - -from rouge import Rouge - -from .logger import * -# from data import * - -import sys -sys.setrecursionlimit(10000) - -REMAP = {"-lrb-": "(", "-rrb-": ")", "-lcb-": "{", "-rcb-": "}", - "-lsb-": "[", "-rsb-": "]", "``": '"', "''": '"'} - -def clean(x): - return re.sub( - r"-lrb-|-rrb-|-lcb-|-rcb-|-lsb-|-rsb-|``|''", - lambda m: REMAP.get(m.group()), x) - - -def rouge_eval(hyps, refer): - rouge = Rouge() - # print(hyps) - # print(refer) - # print(rouge.get_scores(hyps, refer)) - try: - score = rouge.get_scores(hyps, refer)[0] - mean_score = np.mean([score["rouge-1"]["f"], score["rouge-2"]["f"], score["rouge-l"]["f"]]) - except: - mean_score = 0.0 - return mean_score - -def rouge_all(hyps, refer): - rouge = Rouge() - score = rouge.get_scores(hyps, refer)[0] - # mean_score = np.mean([score["rouge-1"]["f"], score["rouge-2"]["f"], score["rouge-l"]["f"]]) - return score - -def eval_label(match_true, pred, true, total, match): - match_true, pred, true, match = match_true.float(), pred.float(), true.float(), match.float() - try: - accu = match / total - precision = match_true / pred - recall = match_true / true - F = 2 * precision * recall / (precision + recall) - except ZeroDivisionError: - F = 0.0 - logger.error("[Error] float division by zero") - return accu, precision, recall, F - - -def pyrouge_score(hyps, refer, remap = True): - from pyrouge import Rouge155 - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - PYROUGE_ROOT = os.path.join('/remote-home/dqwang/', nowTime) - SYSTEM_PATH = os.path.join(PYROUGE_ROOT,'gold') - MODEL_PATH = os.path.join(PYROUGE_ROOT,'system') - if os.path.exists(SYSTEM_PATH): - shutil.rmtree(SYSTEM_PATH) - os.makedirs(SYSTEM_PATH) - if os.path.exists(MODEL_PATH): - shutil.rmtree(MODEL_PATH) - os.makedirs(MODEL_PATH) - - if remap == True: - refer = clean(refer) - hyps = clean(hyps) - - system_file = os.path.join(SYSTEM_PATH, 'Reference.0.txt') - model_file = os.path.join(MODEL_PATH, 'Model.A.0.txt') - with open(system_file, 'wb') as f: - f.write(refer.encode('utf-8')) - with open(model_file, 'wb') as f: - f.write(hyps.encode('utf-8')) - - r = Rouge155('/home/dqwang/ROUGE/RELEASE-1.5.5') - - r.system_dir = SYSTEM_PATH - r.model_dir = MODEL_PATH - r.system_filename_pattern = 'Reference.(\d+).txt' - r.model_filename_pattern = 'Model.[A-Z].#ID#.txt' - - output = r.convert_and_evaluate(rouge_args="-e /home/dqwang/ROUGE/RELEASE-1.5.5/data -a -m -n 2 -d") - output_dict = r.output_to_dict(output) - - shutil.rmtree(PYROUGE_ROOT) - - scores = {} - scores['rouge-1'], scores['rouge-2'], scores['rouge-l'] = {}, {}, {} - scores['rouge-1']['p'], scores['rouge-1']['r'], scores['rouge-1']['f'] = output_dict['rouge_1_precision'], output_dict['rouge_1_recall'], output_dict['rouge_1_f_score'] - scores['rouge-2']['p'], scores['rouge-2']['r'], scores['rouge-2']['f'] = output_dict['rouge_2_precision'], output_dict['rouge_2_recall'], output_dict['rouge_2_f_score'] - scores['rouge-l']['p'], scores['rouge-l']['r'], scores['rouge-l']['f'] = output_dict['rouge_l_precision'], output_dict['rouge_l_recall'], output_dict['rouge_l_f_score'] - return scores - -def pyrouge_score_all(hyps_list, refer_list, remap = True): - from pyrouge import Rouge155 - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - PYROUGE_ROOT = os.path.join('/remote-home/dqwang/', nowTime) - SYSTEM_PATH = os.path.join(PYROUGE_ROOT,'gold') - MODEL_PATH = os.path.join(PYROUGE_ROOT,'system') - if os.path.exists(SYSTEM_PATH): - shutil.rmtree(SYSTEM_PATH) - os.makedirs(SYSTEM_PATH) - if os.path.exists(MODEL_PATH): - shutil.rmtree(MODEL_PATH) - os.makedirs(MODEL_PATH) - - assert len(hyps_list) == len(refer_list) - for i in range(len(hyps_list)): - system_file = os.path.join(SYSTEM_PATH, 'Reference.%d.txt' % i) - model_file = os.path.join(MODEL_PATH, 'Model.A.%d.txt' % i) - - refer = clean(refer_list[i]) if remap else refer_list[i] - hyps = clean(hyps_list[i]) if remap else hyps_list[i] - - with open(system_file, 'wb') as f: - f.write(refer.encode('utf-8')) - with open(model_file, 'wb') as f: - f.write(hyps.encode('utf-8')) - - r = Rouge155('/remote-home/dqwang/ROUGE/RELEASE-1.5.5') - - r.system_dir = SYSTEM_PATH - r.model_dir = MODEL_PATH - r.system_filename_pattern = 'Reference.(\d+).txt' - r.model_filename_pattern = 'Model.[A-Z].#ID#.txt' - - output = r.convert_and_evaluate(rouge_args="-e /remote-home/dqwang/ROUGE/RELEASE-1.5.5/data -a -m -n 2 -d") - output_dict = r.output_to_dict(output) - - shutil.rmtree(PYROUGE_ROOT) - - scores = {} - scores['rouge-1'], scores['rouge-2'], scores['rouge-l'] = {}, {}, {} - scores['rouge-1']['p'], scores['rouge-1']['r'], scores['rouge-1']['f'] = output_dict['rouge_1_precision'], output_dict['rouge_1_recall'], output_dict['rouge_1_f_score'] - scores['rouge-2']['p'], scores['rouge-2']['r'], scores['rouge-2']['f'] = output_dict['rouge_2_precision'], output_dict['rouge_2_recall'], output_dict['rouge_2_f_score'] - scores['rouge-l']['p'], scores['rouge-l']['r'], scores['rouge-l']['f'] = output_dict['rouge_l_precision'], output_dict['rouge_l_recall'], output_dict['rouge_l_f_score'] - return scores - - -def pyrouge_score_all_multi(hyps_list, refer_list, remap = True): - from pyrouge import Rouge155 - nowTime = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - PYROUGE_ROOT = os.path.join('/remote-home/dqwang/', nowTime) - SYSTEM_PATH = os.path.join(PYROUGE_ROOT, 'system') - MODEL_PATH = os.path.join(PYROUGE_ROOT, 'gold') - if os.path.exists(SYSTEM_PATH): - shutil.rmtree(SYSTEM_PATH) - os.makedirs(SYSTEM_PATH) - if os.path.exists(MODEL_PATH): - shutil.rmtree(MODEL_PATH) - os.makedirs(MODEL_PATH) - - assert len(hyps_list) == len(refer_list) - for i in range(len(hyps_list)): - system_file = os.path.join(SYSTEM_PATH, 'Model.%d.txt' % i) - # model_file = os.path.join(MODEL_PATH, 'Reference.A.%d.txt' % i) - - hyps = clean(hyps_list[i]) if remap else hyps_list[i] - - with open(system_file, 'wb') as f: - f.write(hyps.encode('utf-8')) - - referType = ["A", "B", "C", "D", "E", "F", "G"] - for j in range(len(refer_list[i])): - model_file = os.path.join(MODEL_PATH, "Reference.%s.%d.txt" % (referType[j], i)) - refer = clean(refer_list[i][j]) if remap else refer_list[i][j] - with open(model_file, 'wb') as f: - f.write(refer.encode('utf-8')) - - r = Rouge155('/remote-home/dqwang/ROUGE/RELEASE-1.5.5') - - r.system_dir = SYSTEM_PATH - r.model_dir = MODEL_PATH - r.system_filename_pattern = 'Model.(\d+).txt' - r.model_filename_pattern = 'Reference.[A-Z].#ID#.txt' - - output = r.convert_and_evaluate(rouge_args="-e /remote-home/dqwang/ROUGE/RELEASE-1.5.5/data -a -m -n 2 -d") - output_dict = r.output_to_dict(output) - - shutil.rmtree(PYROUGE_ROOT) - - scores = {} - scores['rouge-1'], scores['rouge-2'], scores['rouge-l'] = {}, {}, {} - scores['rouge-1']['p'], scores['rouge-1']['r'], scores['rouge-1']['f'] = output_dict['rouge_1_precision'], output_dict['rouge_1_recall'], output_dict['rouge_1_f_score'] - scores['rouge-2']['p'], scores['rouge-2']['r'], scores['rouge-2']['f'] = output_dict['rouge_2_precision'], output_dict['rouge_2_recall'], output_dict['rouge_2_f_score'] - scores['rouge-l']['p'], scores['rouge-l']['r'], scores['rouge-l']['f'] = output_dict['rouge_l_precision'], output_dict['rouge_l_recall'], output_dict['rouge_l_f_score'] - return scores - -def cal_label(article, abstract): - hyps_list = article - - refer = abstract - scores = [] - for hyps in hyps_list: - mean_score = rouge_eval(hyps, refer) - scores.append(mean_score) - - selected = [] - selected.append(int(np.argmax(scores))) - selected_sent_cnt = 1 - - best_rouge = np.max(scores) - while selected_sent_cnt < len(hyps_list): - cur_max_rouge = 0.0 - cur_max_idx = -1 - for i in range(len(hyps_list)): - if i not in selected: - temp = copy.deepcopy(selected) - temp.append(i) - hyps = "\n".join([hyps_list[idx] for idx in np.sort(temp)]) - cur_rouge = rouge_eval(hyps, refer) - if cur_rouge > cur_max_rouge: - cur_max_rouge = cur_rouge - cur_max_idx = i - if cur_max_rouge != 0.0 and cur_max_rouge >= best_rouge: - selected.append(cur_max_idx) - selected_sent_cnt += 1 - best_rouge = cur_max_rouge - else: - break - - # label = np.zeros(len(hyps_list), dtype=int) - # label[np.array(selected)] = 1 - # return list(label) - return selected - -def cal_label_limited3(article, abstract): - hyps_list = article - - refer = abstract - scores = [] - for hyps in hyps_list: - try: - mean_score = rouge_eval(hyps, refer) - scores.append(mean_score) - except ValueError: - scores.append(0.0) - - selected = [] - selected.append(np.argmax(scores)) - selected_sent_cnt = 1 - - best_rouge = np.max(scores) - while selected_sent_cnt < len(hyps_list) and selected_sent_cnt < 3: - cur_max_rouge = 0.0 - cur_max_idx = -1 - for i in range(len(hyps_list)): - if i not in selected: - temp = copy.deepcopy(selected) - temp.append(i) - hyps = "\n".join([hyps_list[idx] for idx in np.sort(temp)]) - cur_rouge = rouge_eval(hyps, refer) - if cur_rouge > cur_max_rouge: - cur_max_rouge = cur_rouge - cur_max_idx = i - selected.append(cur_max_idx) - selected_sent_cnt += 1 - best_rouge = cur_max_rouge - - # logger.info(selected) - # label = np.zeros(len(hyps_list), dtype=int) - # label[np.array(selected)] = 1 - # return list(label) - return selected - -import torch -def flip(x, dim): - xsize = x.size() - dim = x.dim() + dim if dim < 0 else dim - x = x.contiguous() - x = x.view(-1, *xsize[dim:]).contiguous() - x = x.view(x.size(0), x.size(1), -1)[:, getattr(torch.arange(x.size(1)-1, - -1, -1), ('cpu','cuda')[x.is_cuda])().long(), :] - return x.view(xsize) - -def get_attn_key_pad_mask(seq_k, seq_q): - ''' For masking out the padding part of key sequence. ''' - - # Expand to fit the shape of key query attention matrix. - len_q = seq_q.size(1) - padding_mask = seq_k.eq(0.0) - padding_mask = padding_mask.unsqueeze(1).expand(-1, len_q, -1) # b x lq x lk - - return padding_mask - -def get_non_pad_mask(seq): - - assert seq.dim() == 2 - - return seq.ne(0.0).type(torch.float).unsqueeze(-1) diff --git a/reproduction/Summarization/Baseline/train.py b/reproduction/Summarization/Baseline/train.py deleted file mode 100644 index fa45a6fc..00000000 --- a/reproduction/Summarization/Baseline/train.py +++ /dev/null @@ -1,269 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Train Model1: baseline model""" -import os -import sys -import json -import shutil -import argparse -import datetime - -import torch -import torch.nn - -os.environ['FASTNLP_BASE_URL'] = 'http://10.141.222.118:8888/file/download/' -os.environ['FASTNLP_CACHE_DIR'] = '/remote-home/hyan01/fastnlp_caches' -sys.path.append('/remote-home/dqwang/FastNLP/fastNLP_brxx/') - - -from fastNLP.core._logger import logger -# from fastNLP.core._logger import _init_logger -from fastNLP.core.const import Const -from fastNLP.core.trainer import Trainer, Tester -from fastNLP.io.pipe.summarization import ExtCNNDMPipe -from fastNLP.io.model_io import ModelLoader, ModelSaver -from fastNLP.io.embed_loader import EmbedLoader - -# from tools.logger import * -# from model.TransformerModel import TransformerModel -from model.TForiginal import TransformerModel -from model.LSTMModel import SummarizationModel -from model.Metric import LossMetric, LabelFMetric, FastRougeMetric, PyRougeMetric -from model.Loss import MyCrossEntropyLoss -from tools.Callback import TrainCallback - - - - -def setup_training(model, train_loader, valid_loader, hps): - """Does setup before starting training (run_training)""" - - train_dir = os.path.join(hps.save_root, "train") - if not os.path.exists(train_dir): os.makedirs(train_dir) - - if hps.restore_model != 'None': - logger.info("[INFO] Restoring %s for training...", hps.restore_model) - bestmodel_file = os.path.join(train_dir, hps.restore_model) - loader = ModelLoader() - loader.load_pytorch(model, bestmodel_file) - else: - logger.info("[INFO] Create new model for training...") - - run_training(model, train_loader, valid_loader, hps) # this is an infinite loop until interrupted - -def run_training(model, train_loader, valid_loader, hps): - logger.info("[INFO] Starting run_training") - - train_dir = os.path.join(hps.save_root, "train") - if os.path.exists(train_dir): shutil.rmtree(train_dir) - os.makedirs(train_dir) - eval_dir = os.path.join(hps.save_root, "eval") # make a subdir of the root dir for eval data - if not os.path.exists(eval_dir): os.makedirs(eval_dir) - - optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=hps.lr) - criterion = MyCrossEntropyLoss(pred = "p_sent", target=Const.TARGET, mask=Const.INPUT_LEN, reduce='none') - - trainer = Trainer(model=model, train_data=train_loader, optimizer=optimizer, loss=criterion, - n_epochs=hps.n_epochs, print_every=100, dev_data=valid_loader, metrics=[LossMetric(pred = "p_sent", target=Const.TARGET, mask=Const.INPUT_LEN, reduce='none'), LabelFMetric(pred="prediction"), FastRougeMetric(hps, pred="prediction")], - metric_key="loss", validate_every=-1, save_path=eval_dir, - callbacks=[TrainCallback(hps, patience=5)], use_tqdm=False) - - train_info = trainer.train(load_best_model=True) - logger.info(' | end of Train | time: {:5.2f}s | '.format(train_info["seconds"])) - logger.info('[INFO] best eval model in epoch %d and iter %d', train_info["best_epoch"], train_info["best_step"]) - logger.info(train_info["best_eval"]) - - bestmodel_save_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - saver = ModelSaver(bestmodel_save_path) - saver.save_pytorch(model) - logger.info('[INFO] Saving eval best model to %s', bestmodel_save_path) - - -def run_test(model, loader, hps): - test_dir = os.path.join(hps.save_root, "test") # make a subdir of the root dir for eval data - eval_dir = os.path.join(hps.save_root, "eval") - if not os.path.exists(test_dir) : os.makedirs(test_dir) - if not os.path.exists(eval_dir) : - logger.exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it.", eval_dir) - raise Exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it." % (eval_dir)) - - if hps.test_model == "evalbestmodel": - bestmodel_load_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - elif hps.test_model == "earlystop": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'earlystop.pkl') - else: - logger.error("None of such model! Must be one of evalbestmodel/earlystop") - raise ValueError("None of such model! Must be one of evalbestmodel/earlystop") - logger.info("[INFO] Restoring %s for testing...The path is %s", hps.test_model, bestmodel_load_path) - - modelloader = ModelLoader() - modelloader.load_pytorch(model, bestmodel_load_path) - - if hps.use_pyrouge: - logger.info("[INFO] Use PyRougeMetric for testing") - tester = Tester(data=loader, model=model, - metrics=[LabelFMetric(pred="prediction"), PyRougeMetric(hps, pred="prediction")], - batch_size=hps.batch_size) - else: - logger.info("[INFO] Use FastRougeMetric for testing") - tester = Tester(data=loader, model=model, - metrics=[LabelFMetric(pred="prediction"), FastRougeMetric(hps, pred="prediction")], - batch_size=hps.batch_size) - test_info = tester.test() - logger.info(test_info) - -def main(): - parser = argparse.ArgumentParser(description='Summarization Model') - - # Where to find data - parser.add_argument('--data_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/train.label.jsonl', help='Path expression to pickle datafiles.') - parser.add_argument('--valid_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/val.label.jsonl', help='Path expression to pickle valid datafiles.') - parser.add_argument('--vocab_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/vocab', help='Path expression to text vocabulary file.') - - # Important settings - parser.add_argument('--mode', choices=['train', 'test'], default='train', help='must be one of train/test') - parser.add_argument('--embedding', type=str, default='glove', choices=['word2vec', 'glove', 'elmo', 'bert'], help='must be one of word2vec/glove/elmo/bert') - parser.add_argument('--sentence_encoder', type=str, default='transformer', choices=['bilstm', 'deeplstm', 'transformer'], help='must be one of LSTM/Transformer') - parser.add_argument('--sentence_decoder', type=str, default='SeqLab', choices=['PN', 'SeqLab'], help='must be one of PN/SeqLab') - parser.add_argument('--restore_model', type=str , default='None', help='Restore model for further training. [bestmodel/bestFmodel/earlystop/None]') - - # Where to save output - parser.add_argument('--save_root', type=str, default='save/', help='Root directory for all model.') - parser.add_argument('--log_root', type=str, default='log/', help='Root directory for all logging.') - - # Hyperparameters - parser.add_argument('--gpu', type=str, default='0', help='GPU ID to use. For cpu, set -1 [default: -1]') - parser.add_argument('--cuda', action='store_true', default=False, help='use cuda') - parser.add_argument('--vocab_size', type=int, default=100000, help='Size of vocabulary. These will be read from the vocabulary file in order. If the vocabulary file contains fewer words than this number, or if this number is set to 0, will take all words in the vocabulary file.') - parser.add_argument('--n_epochs', type=int, default=20, help='Number of epochs [default: 20]') - parser.add_argument('--batch_size', type=int, default=32, help='Mini batch size [default: 128]') - - parser.add_argument('--word_embedding', action='store_true', default=True, help='whether to use Word embedding') - parser.add_argument('--embedding_path', type=str, default='/remote-home/dqwang/Glove/glove.42B.300d.txt', help='Path expression to external word embedding.') - parser.add_argument('--word_emb_dim', type=int, default=300, help='Word embedding size [default: 200]') - parser.add_argument('--embed_train', action='store_true', default=False, help='whether to train Word embedding [default: False]') - parser.add_argument('--min_kernel_size', type=int, default=1, help='kernel min length for CNN [default:1]') - parser.add_argument('--max_kernel_size', type=int, default=7, help='kernel max length for CNN [default:7]') - parser.add_argument('--output_channel', type=int, default=50, help='output channel: repeated times for one kernel') - parser.add_argument('--use_orthnormal_init', action='store_true', default=True, help='use orthnormal init for lstm [default: true]') - parser.add_argument('--sent_max_len', type=int, default=100, help='max length of sentences (max source text sentence tokens)') - parser.add_argument('--doc_max_timesteps', type=int, default=50, help='max length of documents (max timesteps of documents)') - parser.add_argument('--save_label', action='store_true', default=False, help='require multihead attention') - - # Training - parser.add_argument('--lr', type=float, default=0.0001, help='learning rate') - parser.add_argument('--lr_descent', action='store_true', default=False, help='learning rate descent') - parser.add_argument('--grad_clip', action='store_true', default=False, help='for gradient clipping') - parser.add_argument('--max_grad_norm', type=float, default=10, help='for gradient clipping max gradient normalization') - - # test - parser.add_argument('-m', type=int, default=3, help='decode summary length') - parser.add_argument('--test_model', type=str, default='evalbestmodel', help='choose different model to test [evalbestmodel/evalbestFmodel/trainbestmodel/trainbestFmodel/earlystop]') - parser.add_argument('--use_pyrouge', action='store_true', default=False, help='use_pyrouge') - - args = parser.parse_args() - - os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu - torch.set_printoptions(threshold=50000) - - # File paths - DATA_FILE = args.data_path - VALID_FILE = args.valid_path - VOCAL_FILE = args.vocab_path - LOG_PATH = args.log_root - - # # train_log setting - if not os.path.exists(LOG_PATH): - if args.mode == "train": - os.makedirs(LOG_PATH) - else: - raise Exception("[Error] Logdir %s doesn't exist. Run in train mode to create it." % (LOG_PATH)) - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - log_path = os.path.join(LOG_PATH, args.mode + "_" + nowTime) - # logger = _init_logger(path=log_path) - # file_handler = logging.FileHandler(log_path) - # file_handler.setFormatter(formatter) - # logger.addHandler(file_handler) - - logger.info("Pytorch %s", torch.__version__) - - # dataset - hps = args - dbPipe = ExtCNNDMPipe(vocab_size=hps.vocab_size, - vocab_path=VOCAL_FILE, - sent_max_len=hps.sent_max_len, - doc_max_timesteps=hps.doc_max_timesteps) - if hps.mode == 'test': - hps.recurrent_dropout_prob = 0.0 - hps.atten_dropout_prob = 0.0 - hps.ffn_dropout_prob = 0.0 - logger.info(hps) - paths = {"test": DATA_FILE} - db = dbPipe.process_from_file(paths) - else: - paths = {"train": DATA_FILE, "valid": VALID_FILE} - db = dbPipe.process_from_file(paths) - - - # embedding - if args.embedding == "glove": - vocab = db.get_vocab("vocab") - embed = torch.nn.Embedding(len(vocab), hps.word_emb_dim) - if hps.word_embedding: - embed_loader = EmbedLoader() - pretrained_weight = embed_loader.load_with_vocab(hps.embedding_path, vocab) # unfound with random init - embed.weight.data.copy_(torch.from_numpy(pretrained_weight)) - embed.weight.requires_grad = hps.embed_train - else: - logger.error("[ERROR] embedding To Be Continued!") - sys.exit(1) - - # model - if args.sentence_encoder == "transformer" and args.sentence_decoder == "SeqLab": - model_param = json.load(open("config/transformer.config", "rb")) - hps.__dict__.update(model_param) - model = TransformerModel(hps, embed) - elif args.sentence_encoder == "deeplstm" and args.sentence_decoder == "SeqLab": - model_param = json.load(open("config/deeplstm.config", "rb")) - hps.__dict__.update(model_param) - model = SummarizationModel(hps, embed) - else: - logger.error("[ERROR] Model To Be Continued!") - sys.exit(1) - if hps.cuda: - model = model.cuda() - logger.info("[INFO] Use cuda") - - logger.info(hps) - - if hps.mode == 'train': - db.get_dataset("valid").set_target("text", "summary") - setup_training(model, db.get_dataset("train"), db.get_dataset("valid"), hps) - elif hps.mode == 'test': - logger.info("[INFO] Decoding...") - db.get_dataset("test").set_target("text", "summary") - run_test(model, db.get_dataset("test"), hps, limited=hps.limited) - else: - logger.error("The 'mode' flag must be one of train/eval/test") - raise ValueError("The 'mode' flag must be one of train/eval/test") - -if __name__ == '__main__': - main() diff --git a/reproduction/Summarization/Baseline/train_origin.py b/reproduction/Summarization/Baseline/train_origin.py deleted file mode 100644 index e1248025..00000000 --- a/reproduction/Summarization/Baseline/train_origin.py +++ /dev/null @@ -1,706 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Train Model1: baseline model""" - -import os -import sys -import time -import copy -import pickle -import datetime -import argparse -import logging - -import numpy as np - - -import torch -import torch.nn as nn -from torch.autograd import Variable - -from rouge import Rouge - -sys.path.append('/remote-home/dqwang/FastNLP/fastNLP/') - -from fastNLP.core.batch import DataSetIter -from fastNLP.core.const import Const -from fastNLP.io.model_io import ModelLoader, ModelSaver -from fastNLP.core.sampler import BucketSampler - -from tools import utils -from tools.logger import * -from data.dataloader import SummarizationLoader -from model.TForiginal import TransformerModel - -def setup_training(model, train_loader, valid_loader, hps): - """Does setup before starting training (run_training)""" - - train_dir = os.path.join(hps.save_root, "train") - if not os.path.exists(train_dir): os.makedirs(train_dir) - - if hps.restore_model != 'None': - logger.info("[INFO] Restoring %s for training...", hps.restore_model) - bestmodel_file = os.path.join(train_dir, hps.restore_model) - loader = ModelLoader() - loader.load_pytorch(model, bestmodel_file) - else: - logger.info("[INFO] Create new model for training...") - - try: - run_training(model, train_loader, valid_loader, hps) # this is an infinite loop until interrupted - except KeyboardInterrupt: - logger.error("[Error] Caught keyboard interrupt on worker. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop.pkl") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - -def run_training(model, train_loader, valid_loader, hps): - """Repeatedly runs training iterations, logging loss to screen and writing summaries""" - logger.info("[INFO] Starting run_training") - - train_dir = os.path.join(hps.save_root, "train") - if not os.path.exists(train_dir): os.makedirs(train_dir) - - lr = hps.lr - # optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=lr, betas=(0.9, 0.98), - # eps=1e-09) - optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=lr) - criterion = torch.nn.CrossEntropyLoss(reduction='none') - - best_train_loss = None - best_train_F= None - best_loss = None - best_F = None - step_num = 0 - non_descent_cnt = 0 - for epoch in range(1, hps.n_epochs + 1): - epoch_loss = 0.0 - train_loss = 0.0 - total_example_num = 0 - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - epoch_start_time = time.time() - for i, (batch_x, batch_y) in enumerate(train_loader): - # if i > 10: - # break - model.train() - - iter_start_time=time.time() - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - # logger.info(batch_x["text"][0]) - # logger.info(input[0,:,:]) - # logger.info(input_len[0:5,:]) - # logger.info(batch_y["summary"][0:5]) - # logger.info(label[0:5,:]) - - # logger.info((len(batch_x["text"][0]), sum(input[0].sum(-1) != 0))) - - batch_size, N, seq_len = input.size() - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - input = Variable(input) - label = Variable(label) - input_len = Variable(input_len) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - - outputs = model_outputs["p_sent"].view(-1, 2) - - label = label.view(-1) - - loss = criterion(outputs, label) # [batch_size, doc_max_timesteps] - # input_len = input_len.float().view(-1) - loss = loss.view(batch_size, -1) - loss = loss.masked_fill(input_len.eq(0), 0) - loss = loss.sum(1).mean() - logger.debug("loss %f", loss) - - if not (np.isfinite(loss.data)).numpy(): - logger.error("train Loss is not finite. Stopping.") - logger.info(loss) - for name, param in model.named_parameters(): - if param.requires_grad: - logger.info(name) - logger.info(param.grad.data.sum()) - raise Exception("train Loss is not finite. Stopping.") - - optimizer.zero_grad() - loss.backward() - if hps.grad_clip: - torch.nn.utils.clip_grad_norm_(model.parameters(), hps.max_grad_norm) - - optimizer.step() - step_num += 1 - - train_loss += float(loss.data) - epoch_loss += float(loss.data) - - if i % 100 == 0: - # start debugger - # import pdb; pdb.set_trace() - for name, param in model.named_parameters(): - if param.requires_grad: - logger.debug(name) - logger.debug(param.grad.data.sum()) - logger.info(' | end of iter {:3d} | time: {:5.2f}s | train loss {:5.4f} | ' - .format(i, (time.time() - iter_start_time), - float(train_loss / 100))) - train_loss = 0.0 - - # calculate the precision, recall and F - prediction = outputs.max(1)[1] - prediction = prediction.data - label = label.data - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += int(batch_size * N) - - if hps.lr_descent: - # new_lr = pow(hps.hidden_size, -0.5) * min(pow(step_num, -0.5), - # step_num * pow(hps.warmup_steps, -1.5)) - new_lr = max(5e-6, lr / (epoch + 1)) - for param_group in list(optimizer.param_groups): - param_group['lr'] = new_lr - logger.info("[INFO] The learning rate now is %f", new_lr) - - epoch_avg_loss = epoch_loss / len(train_loader) - logger.info(' | end of epoch {:3d} | time: {:5.2f}s | epoch train loss {:5.4f} | ' - .format(epoch, (time.time() - epoch_start_time), - float(epoch_avg_loss))) - - logger.info("[INFO] Trainset match_true %d, pred %d, true %d, total %d, match %d", match_true, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - logger.info("[INFO] The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - - if not best_train_loss or epoch_avg_loss < best_train_loss: - save_file = os.path.join(train_dir, "bestmodel.pkl") - logger.info('[INFO] Found new best model with %.3f running_train_loss. Saving to %s', float(epoch_avg_loss), save_file) - saver = ModelSaver(save_file) - saver.save_pytorch(model) - best_train_loss = epoch_avg_loss - elif epoch_avg_loss > best_train_loss: - logger.error("[Error] training loss does not descent. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop.pkl") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - return - - if not best_train_F or F > best_train_F: - save_file = os.path.join(train_dir, "bestFmodel.pkl") - logger.info('[INFO] Found new best model with %.3f F score. Saving to %s', float(F), save_file) - saver = ModelSaver(save_file) - saver.save_pytorch(model) - best_train_F = F - - best_loss, best_F, non_descent_cnt = run_eval(model, valid_loader, hps, best_loss, best_F, non_descent_cnt) - - if non_descent_cnt >= 3: - logger.error("[Error] val loss does not descent for three times. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - return - -def run_eval(model, loader, hps, best_loss, best_F, non_descent_cnt): - """Repeatedly runs eval iterations, logging to screen and writing summaries. Saves the model with the best loss seen so far.""" - logger.info("[INFO] Starting eval for this model ...") - eval_dir = os.path.join(hps.save_root, "eval") # make a subdir of the root dir for eval data - if not os.path.exists(eval_dir): os.makedirs(eval_dir) - - model.eval() - - running_loss = 0.0 - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - pairs = {} - pairs["hyps"] = [] - pairs["refer"] = [] - total_example_num = 0 - criterion = torch.nn.CrossEntropyLoss(reduction='none') - iter_start_time = time.time() - - with torch.no_grad(): - for i, (batch_x, batch_y) in enumerate(loader): - # if i > 10: - # break - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - batch_size, N, _ = input.size() - - input = Variable(input, requires_grad=False) - label = Variable(label) - input_len = Variable(input_len, requires_grad=False) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - outputs = model_outputs["p_sent"] - prediction = model_outputs["prediction"] - - outputs = outputs.view(-1, 2) # [batch * N, 2] - label = label.view(-1) # [batch * N] - loss = criterion(outputs, label) - loss = loss.view(batch_size, -1) - loss = loss.masked_fill(input_len.eq(0), 0) - loss = loss.sum(1).mean() - logger.debug("loss %f", loss) - running_loss += float(loss.data) - - label = label.data.view(batch_size, -1) - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += batch_size * N - - # rouge - prediction = prediction.view(batch_size, -1) - for j in range(batch_size): - original_article_sents = batch_x["text"][j] - sent_max_number = len(original_article_sents) - refer = "\n".join(batch_x["summary"][j]) - hyps = "\n".join(original_article_sents[id] for id in range(len(prediction[j])) if prediction[j][id]==1 and id < sent_max_number) - if sent_max_number < hps.m and len(hyps) <= 1: - logger.error("sent_max_number is too short %d, Skip!" , sent_max_number) - continue - - if len(hyps) >= 1 and hyps != '.': - # logger.debug(prediction[j]) - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - elif refer == "." or refer == "": - logger.error("Refer is None!") - logger.debug("label:") - logger.debug(label[j]) - logger.debug(refer) - elif hyps == "." or hyps == "": - logger.error("hyps is None!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug("prediction:") - logger.debug(prediction[j]) - logger.debug(hyps) - else: - logger.error("Do not select any sentences!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug(original_article_sents) - logger.debug("label:") - logger.debug(label[j]) - continue - - running_avg_loss = running_loss / len(loader) - - if hps.use_pyrouge: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - logging.getLogger('global').setLevel(logging.WARNING) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - if isinstance(pairs["refer"][0], list): - logger.info("Multi Reference summaries!") - scores_all = utils.pyrouge_score_all_multi(pairs["hyps"], pairs["refer"]) - else: - scores_all = utils.pyrouge_score_all(pairs["hyps"], pairs["refer"]) - else: - if len(pairs["hyps"]) == 0 or len(pairs["refer"]) == 0 : - logger.error("During testing, no hyps is selected!") - return - rouge = Rouge() - scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - # try: - # scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - # except ValueError as e: - # logger.error(repr(e)) - # scores_all = [] - # for idx in range(len(pairs["hyps"])): - # try: - # scores = rouge.get_scores(pairs["hyps"][idx], pairs["refer"][idx])[0] - # scores_all.append(scores) - # except ValueError as e: - # logger.error(repr(e)) - # logger.debug("HYPS:\t%s", pairs["hyps"][idx]) - # logger.debug("REFER:\t%s", pairs["refer"][idx]) - # finally: - # logger.error("During testing, some errors happen!") - # logger.error(len(scores_all)) - # exit(1) - - logger.info('[INFO] End of valid | time: {:5.2f}s | valid loss {:5.4f} | ' - .format((time.time() - iter_start_time), - float(running_avg_loss))) - - logger.info("[INFO] Validset match_true %d, pred %d, true %d, total %d, match %d", match_true, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - logger.info("[INFO] The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", - total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-1']['p'], scores_all['rouge-1']['r'], scores_all['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-2']['p'], scores_all['rouge-2']['r'], scores_all['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-l']['p'], scores_all['rouge-l']['r'], scores_all['rouge-l']['f']) - logger.info(res) - - # If running_avg_loss is best so far, save this checkpoint (early stopping). - # These checkpoints will appear as bestmodel- in the eval dir - if best_loss is None or running_avg_loss < best_loss: - bestmodel_save_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - if best_loss is not None: - logger.info('[INFO] Found new best model with %.6f running_avg_loss. The original loss is %.6f, Saving to %s', float(running_avg_loss), float(best_loss), bestmodel_save_path) - else: - logger.info('[INFO] Found new best model with %.6f running_avg_loss. The original loss is None, Saving to %s', float(running_avg_loss), bestmodel_save_path) - saver = ModelSaver(bestmodel_save_path) - saver.save_pytorch(model) - best_loss = running_avg_loss - non_descent_cnt = 0 - else: - non_descent_cnt += 1 - - if best_F is None or best_F < F: - bestmodel_save_path = os.path.join(eval_dir, 'bestFmodel.pkl') # this is where checkpoints of best models are saved - if best_F is not None: - logger.info('[INFO] Found new best model with %.6f F. The original F is %.6f, Saving to %s', float(F), float(best_F), bestmodel_save_path) - else: - logger.info('[INFO] Found new best model with %.6f F. The original loss is None, Saving to %s', float(F), bestmodel_save_path) - saver = ModelSaver(bestmodel_save_path) - saver.save_pytorch(model) - best_F = F - - return best_loss, best_F, non_descent_cnt - -def run_test(model, loader, hps, limited=False): - """Repeatedly runs eval iterations, logging to screen and writing summaries. Saves the model with the best loss seen so far.""" - test_dir = os.path.join(hps.save_root, "test") # make a subdir of the root dir for eval data - eval_dir = os.path.join(hps.save_root, "eval") - if not os.path.exists(test_dir) : os.makedirs(test_dir) - if not os.path.exists(eval_dir) : - logger.exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it.", eval_dir) - raise Exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it." % (eval_dir)) - - if hps.test_model == "evalbestmodel": - bestmodel_load_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - elif hps.test_model == "evalbestFmodel": - bestmodel_load_path = os.path.join(eval_dir, 'bestFmodel.pkl') - elif hps.test_model == "trainbestmodel": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'bestmodel.pkl') - elif hps.test_model == "trainbestFmodel": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'bestFmodel.pkl') - elif hps.test_model == "earlystop": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'earlystop,pkl') - else: - logger.error("None of such model! Must be one of evalbestmodel/trainbestmodel/earlystop") - raise ValueError("None of such model! Must be one of evalbestmodel/trainbestmodel/earlystop") - logger.info("[INFO] Restoring %s for testing...The path is %s", hps.test_model, bestmodel_load_path) - - modelloader = ModelLoader() - modelloader.load_pytorch(model, bestmodel_load_path) - - import datetime - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S')#现在 - if hps.save_label: - log_dir = os.path.join(test_dir, hps.data_path.split("/")[-1]) - resfile = open(log_dir, "w") - else: - log_dir = os.path.join(test_dir, nowTime) - resfile = open(log_dir, "wb") - logger.info("[INFO] Write the Evaluation into %s", log_dir) - - model.eval() - - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - total_example_num = 0.0 - pairs = {} - pairs["hyps"] = [] - pairs["refer"] = [] - pred_list = [] - iter_start_time=time.time() - with torch.no_grad(): - for i, (batch_x, batch_y) in enumerate(loader): - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - batch_size, N, _ = input.size() - - input = Variable(input) - input_len = Variable(input_len, requires_grad=False) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - prediction = model_outputs["prediction"] - - if hps.save_label: - pred_list.extend(model_outputs["pred_idx"].data.cpu().view(-1).tolist()) - continue - - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += batch_size * N - - for j in range(batch_size): - original_article_sents = batch_x["text"][j] - sent_max_number = len(original_article_sents) - refer = "\n".join(batch_x["summary"][j]) - hyps = "\n".join(original_article_sents[id].replace("\n", "") for id in range(len(prediction[j])) if prediction[j][id]==1 and id < sent_max_number) - if limited: - k = len(refer.split()) - hyps = " ".join(hyps.split()[:k]) - logger.info((len(refer.split()),len(hyps.split()))) - resfile.write(b"Original_article:") - resfile.write("\n".join(batch_x["text"][j]).encode('utf-8')) - resfile.write(b"\n") - resfile.write(b"Reference:") - if isinstance(refer, list): - for ref in refer: - resfile.write(ref.encode('utf-8')) - resfile.write(b"\n") - resfile.write(b'*' * 40) - resfile.write(b"\n") - else: - resfile.write(refer.encode('utf-8')) - resfile.write(b"\n") - resfile.write(b"hypothesis:") - resfile.write(hyps.encode('utf-8')) - resfile.write(b"\n") - - if hps.use_pyrouge: - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - else: - try: - scores = utils.rouge_all(hyps, refer) - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - except ValueError: - logger.error("Do not select any sentences!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug(original_article_sents) - logger.debug("label:") - logger.debug(label[j]) - continue - - # single example res writer - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-1']['p'], scores['rouge-1']['r'], scores['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-2']['p'], scores['rouge-2']['r'], scores['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-l']['p'], scores['rouge-l']['r'], scores['rouge-l']['f']) - - resfile.write(res.encode('utf-8')) - resfile.write(b'-' * 89) - resfile.write(b"\n") - - if hps.save_label: - import json - json.dump(pred_list, resfile) - logger.info(' | end of test | time: {:5.2f}s | '.format((time.time() - iter_start_time))) - return - - resfile.write(b"\n") - resfile.write(b'=' * 89) - resfile.write(b"\n") - - if hps.use_pyrouge: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - if isinstance(pairs["refer"][0], list): - logger.info("Multi Reference summaries!") - scores_all = utils.pyrouge_score_all_multi(pairs["hyps"], pairs["refer"]) - else: - scores_all = utils.pyrouge_score_all(pairs["hyps"], pairs["refer"]) - else: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - rouge = Rouge() - scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - - # the whole model res writer - resfile.write(b"The total testset is:") - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-1']['p'], scores_all['rouge-1']['r'], scores_all['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-2']['p'], scores_all['rouge-2']['r'], scores_all['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-l']['p'], scores_all['rouge-l']['r'], scores_all['rouge-l']['f']) - resfile.write(res.encode("utf-8")) - logger.info(res) - logger.info(' | end of test | time: {:5.2f}s | ' - .format((time.time() - iter_start_time))) - - - - # label prediction - logger.info("match_true %d, pred %d, true %d, total %d, match %d", match, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - res = "The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f" % (total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - resfile.write(res.encode('utf-8')) - logger.info("The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", len(loader), accu, precision, recall, F) - - -def main(): - parser = argparse.ArgumentParser(description='Transformer Model') - - # Where to find data - parser.add_argument('--data_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/train.label.jsonl', help='Path expression to pickle datafiles.') - parser.add_argument('--valid_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/val.label.jsonl', help='Path expression to pickle valid datafiles.') - parser.add_argument('--vocab_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/vocab', help='Path expression to text vocabulary file.') - parser.add_argument('--embedding_path', type=str, default='/remote-home/dqwang/Glove/glove.42B.300d.txt', help='Path expression to external word embedding.') - - # Important settings - parser.add_argument('--mode', type=str, default='train', help='must be one of train/test') - parser.add_argument('--restore_model', type=str , default='None', help='Restore model for further training. [bestmodel/bestFmodel/earlystop/None]') - parser.add_argument('--test_model', type=str, default='evalbestmodel', help='choose different model to test [evalbestmodel/evalbestFmodel/trainbestmodel/trainbestFmodel/earlystop]') - parser.add_argument('--use_pyrouge', action='store_true', default=False, help='use_pyrouge') - - # Where to save output - parser.add_argument('--save_root', type=str, default='save/', help='Root directory for all model.') - parser.add_argument('--log_root', type=str, default='log/', help='Root directory for all logging.') - - # Hyperparameters - parser.add_argument('--gpu', type=str, default='0', help='GPU ID to use. For cpu, set -1 [default: -1]') - parser.add_argument('--cuda', action='store_true', default=False, help='use cuda') - parser.add_argument('--vocab_size', type=int, default=100000, help='Size of vocabulary. These will be read from the vocabulary file in order. If the vocabulary file contains fewer words than this number, or if this number is set to 0, will take all words in the vocabulary file.') - parser.add_argument('--n_epochs', type=int, default=20, help='Number of epochs [default: 20]') - parser.add_argument('--batch_size', type=int, default=32, help='Mini batch size [default: 128]') - - parser.add_argument('--word_embedding', action='store_true', default=True, help='whether to use Word embedding') - parser.add_argument('--word_emb_dim', type=int, default=300, help='Word embedding size [default: 200]') - parser.add_argument('--embed_train', action='store_true', default=False, help='whether to train Word embedding [default: False]') - parser.add_argument('--min_kernel_size', type=int, default=1, help='kernel min length for CNN [default:1]') - parser.add_argument('--max_kernel_size', type=int, default=7, help='kernel max length for CNN [default:7]') - parser.add_argument('--output_channel', type=int, default=50, help='output channel: repeated times for one kernel') - parser.add_argument('--n_layers', type=int, default=12, help='Number of deeplstm layers') - parser.add_argument('--hidden_size', type=int, default=512, help='hidden size [default: 512]') - parser.add_argument('--ffn_inner_hidden_size', type=int, default=2048, help='PositionwiseFeedForward inner hidden size [default: 2048]') - parser.add_argument('--n_head', type=int, default=8, help='multihead attention number [default: 8]') - parser.add_argument('--recurrent_dropout_prob', type=float, default=0.1, help='recurrent dropout prob [default: 0.1]') - parser.add_argument('--atten_dropout_prob', type=float, default=0.1,help='attention dropout prob [default: 0.1]') - parser.add_argument('--ffn_dropout_prob', type=float, default=0.1, help='PositionwiseFeedForward dropout prob [default: 0.1]') - parser.add_argument('--use_orthnormal_init', action='store_true', default=True, help='use orthnormal init for lstm [default: true]') - parser.add_argument('--sent_max_len', type=int, default=100, help='max length of sentences (max source text sentence tokens)') - parser.add_argument('--doc_max_timesteps', type=int, default=50, help='max length of documents (max timesteps of documents)') - parser.add_argument('--save_label', action='store_true', default=False, help='require multihead attention') - - # Training - parser.add_argument('--lr', type=float, default=0.0001, help='learning rate') - parser.add_argument('--lr_descent', action='store_true', default=False, help='learning rate descent') - parser.add_argument('--warmup_steps', type=int, default=4000, help='warmup_steps') - parser.add_argument('--grad_clip', action='store_true', default=False, help='for gradient clipping') - parser.add_argument('--max_grad_norm', type=float, default=1.0, help='for gradient clipping max gradient normalization') - - parser.add_argument('-m', type=int, default=3, help='decode summary length') - parser.add_argument('--limited', action='store_true', default=False, help='limited decode summary length') - - args = parser.parse_args() - - os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu - torch.set_printoptions(threshold=50000) - - hps = args - - # File paths - DATA_FILE = args.data_path - VALID_FILE = args.valid_path - VOCAL_FILE = args.vocab_path - LOG_PATH = args.log_root - - # train_log setting - if not os.path.exists(LOG_PATH): - if hps.mode == "train": - os.makedirs(LOG_PATH) - else: - logger.exception("[Error] Logdir %s doesn't exist. Run in train mode to create it.", LOG_PATH) - raise Exception("[Error] Logdir %s doesn't exist. Run in train mode to create it." % (LOG_PATH)) - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - log_path = os.path.join(LOG_PATH, hps.mode + "_" + nowTime) - file_handler = logging.FileHandler(log_path) - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) - - logger.info("Pytorch %s", torch.__version__) - logger.info(args) - logger.info(args) - - sum_loader = SummarizationLoader() - - - if hps.mode == 'test': - paths = {"test": DATA_FILE} - hps.recurrent_dropout_prob = 0.0 - hps.atten_dropout_prob = 0.0 - hps.ffn_dropout_prob = 0.0 - logger.info(hps) - else: - paths = {"train": DATA_FILE, "valid": VALID_FILE} - - dataInfo = sum_loader.process(paths=paths, vocab_size=hps.vocab_size, vocab_path=VOCAL_FILE, sent_max_len=hps.sent_max_len, doc_max_timesteps=hps.doc_max_timesteps, load_vocab=os.path.exists(VOCAL_FILE)) - - vocab = dataInfo.vocabs["vocab"] - model = TransformerModel(hps, vocab) - - if len(hps.gpu) > 1: - gpuid = hps.gpu.split(',') - gpuid = [int(s) for s in gpuid] - model = nn.DataParallel(model,device_ids=gpuid) - logger.info("[INFO] Use Multi-gpu: %s", hps.gpu) - if hps.cuda: - model = model.cuda() - logger.info("[INFO] Use cuda") - - if hps.mode == 'train': - trainset = dataInfo.datasets["train"] - train_sampler = BucketSampler(batch_size=hps.batch_size, seq_len_field_name=Const.INPUT) - train_batch = DataSetIter(dataset=trainset, batch_size=hps.batch_size, sampler=train_sampler) - validset = dataInfo.datasets["valid"] - validset.set_input("text", "summary") - valid_batch = DataSetIter(dataset=validset, batch_size=hps.batch_size) - setup_training(model, train_batch, valid_batch, hps) - elif hps.mode == 'test': - logger.info("[INFO] Decoding...") - testset = dataInfo.datasets["test"] - testset.set_input("text", "summary") - test_batch = DataSetIter(dataset=testset, batch_size=hps.batch_size) - run_test(model, test_batch, hps, limited=hps.limited) - else: - logger.error("The 'mode' flag must be one of train/eval/test") - raise ValueError("The 'mode' flag must be one of train/eval/test") - -if __name__ == '__main__': - main() diff --git a/reproduction/Summarization/Baseline/train_transformer.py b/reproduction/Summarization/Baseline/train_transformer.py deleted file mode 100644 index e838a803..00000000 --- a/reproduction/Summarization/Baseline/train_transformer.py +++ /dev/null @@ -1,705 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -"""Train Model1: baseline model""" - -import os -import sys -import time -import copy -import pickle -import datetime -import argparse -import logging - -import numpy as np - - -import torch -import torch.nn as nn -from torch.autograd import Variable - -from rouge import Rouge - -sys.path.append('/remote-home/dqwang/FastNLP/fastNLP/') - -from fastNLP.core.batch import Batch -from fastNLP.core.const import Const -from fastNLP.io.model_io import ModelLoader, ModelSaver -from fastNLP.core.sampler import BucketSampler - -from tools import utils -from tools.logger import * -from data.dataloader import SummarizationLoader -from model.TransformerModel import TransformerModel - -def setup_training(model, train_loader, valid_loader, hps): - """Does setup before starting training (run_training)""" - - train_dir = os.path.join(hps.save_root, "train") - if not os.path.exists(train_dir): os.makedirs(train_dir) - - if hps.restore_model != 'None': - logger.info("[INFO] Restoring %s for training...", hps.restore_model) - bestmodel_file = os.path.join(train_dir, hps.restore_model) - loader = ModelLoader() - loader.load_pytorch(model, bestmodel_file) - else: - logger.info("[INFO] Create new model for training...") - - try: - run_training(model, train_loader, valid_loader, hps) # this is an infinite loop until interrupted - except KeyboardInterrupt: - logger.error("[Error] Caught keyboard interrupt on worker. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop.pkl") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - -def run_training(model, train_loader, valid_loader, hps): - """Repeatedly runs training iterations, logging loss to screen and writing summaries""" - logger.info("[INFO] Starting run_training") - - train_dir = os.path.join(hps.save_root, "train") - if not os.path.exists(train_dir): os.makedirs(train_dir) - - lr = hps.lr - # optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=lr, betas=(0.9, 0.98), - # eps=1e-09) - optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=lr) - criterion = torch.nn.CrossEntropyLoss(reduction='none') - - best_train_loss = None - best_train_F= None - best_loss = None - best_F = None - step_num = 0 - non_descent_cnt = 0 - for epoch in range(1, hps.n_epochs + 1): - epoch_loss = 0.0 - train_loss = 0.0 - total_example_num = 0 - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - epoch_start_time = time.time() - for i, (batch_x, batch_y) in enumerate(train_loader): - # if i > 10: - # break - model.train() - - iter_start_time=time.time() - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - # logger.info(batch_x["text"][0]) - # logger.info(input[0,:,:]) - # logger.info(input_len[0:5,:]) - # logger.info(batch_y["summary"][0:5]) - # logger.info(label[0:5,:]) - - # logger.info((len(batch_x["text"][0]), sum(input[0].sum(-1) != 0))) - - batch_size, N, seq_len = input.size() - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - input = Variable(input) - label = Variable(label) - input_len = Variable(input_len) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - - outputs = model_outputs[Const.OUTPUT].view(-1, 2) - - label = label.view(-1) - - loss = criterion(outputs, label) # [batch_size, doc_max_timesteps] - input_len = input_len.float().view(-1) - loss = loss * input_len - loss = loss.view(batch_size, -1) - loss = loss.sum(1).mean() - - if not (np.isfinite(loss.data)).numpy(): - logger.error("train Loss is not finite. Stopping.") - logger.info(loss) - for name, param in model.named_parameters(): - if param.requires_grad: - logger.info(name) - logger.info(param.grad.data.sum()) - raise Exception("train Loss is not finite. Stopping.") - - optimizer.zero_grad() - loss.backward() - if hps.grad_clip: - torch.nn.utils.clip_grad_norm_(model.parameters(), hps.max_grad_norm) - - optimizer.step() - step_num += 1 - - train_loss += float(loss.data) - epoch_loss += float(loss.data) - - if i % 100 == 0: - # start debugger - # import pdb; pdb.set_trace() - for name, param in model.named_parameters(): - if param.requires_grad: - logger.debug(name) - logger.debug(param.grad.data.sum()) - logger.info(' | end of iter {:3d} | time: {:5.2f}s | train loss {:5.4f} | ' - .format(i, (time.time() - iter_start_time), - float(train_loss / 100))) - train_loss = 0.0 - - # calculate the precision, recall and F - prediction = outputs.max(1)[1] - prediction = prediction.data - label = label.data - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += int(batch_size * N) - - if hps.lr_descent: - # new_lr = pow(hps.hidden_size, -0.5) * min(pow(step_num, -0.5), - # step_num * pow(hps.warmup_steps, -1.5)) - new_lr = max(5e-6, lr / (epoch + 1)) - for param_group in list(optimizer.param_groups): - param_group['lr'] = new_lr - logger.info("[INFO] The learning rate now is %f", new_lr) - - epoch_avg_loss = epoch_loss / len(train_loader) - logger.info(' | end of epoch {:3d} | time: {:5.2f}s | epoch train loss {:5.4f} | ' - .format(epoch, (time.time() - epoch_start_time), - float(epoch_avg_loss))) - - logger.info("[INFO] Trainset match_true %d, pred %d, true %d, total %d, match %d", match_true, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - logger.info("[INFO] The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - - if not best_train_loss or epoch_avg_loss < best_train_loss: - save_file = os.path.join(train_dir, "bestmodel.pkl") - logger.info('[INFO] Found new best model with %.3f running_train_loss. Saving to %s', float(epoch_avg_loss), save_file) - saver = ModelSaver(save_file) - saver.save_pytorch(model) - best_train_loss = epoch_avg_loss - elif epoch_avg_loss > best_train_loss: - logger.error("[Error] training loss does not descent. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop.pkl") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - return - - if not best_train_F or F > best_train_F: - save_file = os.path.join(train_dir, "bestFmodel.pkl") - logger.info('[INFO] Found new best model with %.3f F score. Saving to %s', float(F), save_file) - saver = ModelSaver(save_file) - saver.save_pytorch(model) - best_train_F = F - - best_loss, best_F, non_descent_cnt = run_eval(model, valid_loader, hps, best_loss, best_F, non_descent_cnt) - - if non_descent_cnt >= 3: - logger.error("[Error] val loss does not descent for three times. Stopping supervisor...") - save_file = os.path.join(train_dir, "earlystop") - saver = ModelSaver(save_file) - saver.save_pytorch(model) - logger.info('[INFO] Saving early stop model to %s', save_file) - return - -def run_eval(model, loader, hps, best_loss, best_F, non_descent_cnt): - """Repeatedly runs eval iterations, logging to screen and writing summaries. Saves the model with the best loss seen so far.""" - logger.info("[INFO] Starting eval for this model ...") - eval_dir = os.path.join(hps.save_root, "eval") # make a subdir of the root dir for eval data - if not os.path.exists(eval_dir): os.makedirs(eval_dir) - - model.eval() - - running_loss = 0.0 - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - pairs = {} - pairs["hyps"] = [] - pairs["refer"] = [] - total_example_num = 0 - criterion = torch.nn.CrossEntropyLoss(reduction='none') - iter_start_time = time.time() - - with torch.no_grad(): - for i, (batch_x, batch_y) in enumerate(loader): - # if i > 10: - # break - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - batch_size, N, _ = input.size() - - input = Variable(input, requires_grad=False) - label = Variable(label) - input_len = Variable(input_len, requires_grad=False) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - outputs = model_outputs[Const.OUTPUTS] - prediction = model_outputs["prediction"] - - outputs = outputs.view(-1, 2) # [batch * N, 2] - label = label.view(-1) # [batch * N] - loss = criterion(outputs, label) - input_len = input_len.float().view(-1) - loss = loss * input_len - loss = loss.view(batch_size, -1) - loss = loss.sum(1).mean() - running_loss += float(loss.data) - - label = label.data - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += batch_size * N - - # rouge - prediction = prediction.view(batch_size, -1) - for j in range(batch_size): - original_article_sents = batch_x["text"][j] - sent_max_number = len(original_article_sents) - refer = "\n".join(batch_x["summary"][j]) - hyps = "\n".join(original_article_sents[id] for id in range(len(prediction[j])) if prediction[j][id]==1 and id < sent_max_number) - if sent_max_number < hps.m and len(hyps) <= 1: - logger.error("sent_max_number is too short %d, Skip!" , sent_max_number) - continue - - if len(hyps) >= 1 and hyps != '.': - # logger.debug(prediction[j]) - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - elif refer == "." or refer == "": - logger.error("Refer is None!") - logger.debug("label:") - logger.debug(label[j]) - logger.debug(refer) - elif hyps == "." or hyps == "": - logger.error("hyps is None!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug("prediction:") - logger.debug(prediction[j]) - logger.debug(hyps) - else: - logger.error("Do not select any sentences!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug(original_article_sents) - logger.debug("label:") - logger.debug(label[j]) - continue - - running_avg_loss = running_loss / len(loader) - - if hps.use_pyrouge: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - logging.getLogger('global').setLevel(logging.WARNING) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - if isinstance(pairs["refer"][0], list): - logger.info("Multi Reference summaries!") - scores_all = utils.pyrouge_score_all_multi(pairs["hyps"], pairs["refer"]) - else: - scores_all = utils.pyrouge_score_all(pairs["hyps"], pairs["refer"]) - else: - if len(pairs["hyps"]) == 0 or len(pairs["refer"]) == 0 : - logger.error("During testing, no hyps is selected!") - return - rouge = Rouge() - scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - # try: - # scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - # except ValueError as e: - # logger.error(repr(e)) - # scores_all = [] - # for idx in range(len(pairs["hyps"])): - # try: - # scores = rouge.get_scores(pairs["hyps"][idx], pairs["refer"][idx])[0] - # scores_all.append(scores) - # except ValueError as e: - # logger.error(repr(e)) - # logger.debug("HYPS:\t%s", pairs["hyps"][idx]) - # logger.debug("REFER:\t%s", pairs["refer"][idx]) - # finally: - # logger.error("During testing, some errors happen!") - # logger.error(len(scores_all)) - # exit(1) - - logger.info('[INFO] End of valid | time: {:5.2f}s | valid loss {:5.4f} | ' - .format((time.time() - iter_start_time), - float(running_avg_loss))) - - logger.info("[INFO] Validset match_true %d, pred %d, true %d, total %d, match %d", match_true, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - logger.info("[INFO] The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", - total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-1']['p'], scores_all['rouge-1']['r'], scores_all['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-2']['p'], scores_all['rouge-2']['r'], scores_all['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-l']['p'], scores_all['rouge-l']['r'], scores_all['rouge-l']['f']) - logger.info(res) - - # If running_avg_loss is best so far, save this checkpoint (early stopping). - # These checkpoints will appear as bestmodel- in the eval dir - if best_loss is None or running_avg_loss < best_loss: - bestmodel_save_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - if best_loss is not None: - logger.info('[INFO] Found new best model with %.6f running_avg_loss. The original loss is %.6f, Saving to %s', float(running_avg_loss), float(best_loss), bestmodel_save_path) - else: - logger.info('[INFO] Found new best model with %.6f running_avg_loss. The original loss is None, Saving to %s', float(running_avg_loss), bestmodel_save_path) - saver = ModelSaver(bestmodel_save_path) - saver.save_pytorch(model) - best_loss = running_avg_loss - non_descent_cnt = 0 - else: - non_descent_cnt += 1 - - if best_F is None or best_F < F: - bestmodel_save_path = os.path.join(eval_dir, 'bestFmodel.pkl') # this is where checkpoints of best models are saved - if best_F is not None: - logger.info('[INFO] Found new best model with %.6f F. The original F is %.6f, Saving to %s', float(F), float(best_F), bestmodel_save_path) - else: - logger.info('[INFO] Found new best model with %.6f F. The original loss is None, Saving to %s', float(F), bestmodel_save_path) - saver = ModelSaver(bestmodel_save_path) - saver.save_pytorch(model) - best_F = F - - return best_loss, best_F, non_descent_cnt - -def run_test(model, loader, hps, limited=False): - """Repeatedly runs eval iterations, logging to screen and writing summaries. Saves the model with the best loss seen so far.""" - test_dir = os.path.join(hps.save_root, "test") # make a subdir of the root dir for eval data - eval_dir = os.path.join(hps.save_root, "eval") - if not os.path.exists(test_dir) : os.makedirs(test_dir) - if not os.path.exists(eval_dir) : - logger.exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it.", eval_dir) - raise Exception("[Error] eval_dir %s doesn't exist. Run in train mode to create it." % (eval_dir)) - - if hps.test_model == "evalbestmodel": - bestmodel_load_path = os.path.join(eval_dir, 'bestmodel.pkl') # this is where checkpoints of best models are saved - elif hps.test_model == "evalbestFmodel": - bestmodel_load_path = os.path.join(eval_dir, 'bestFmodel.pkl') - elif hps.test_model == "trainbestmodel": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'bestmodel.pkl') - elif hps.test_model == "trainbestFmodel": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'bestFmodel.pkl') - elif hps.test_model == "earlystop": - train_dir = os.path.join(hps.save_root, "train") - bestmodel_load_path = os.path.join(train_dir, 'earlystop,pkl') - else: - logger.error("None of such model! Must be one of evalbestmodel/trainbestmodel/earlystop") - raise ValueError("None of such model! Must be one of evalbestmodel/trainbestmodel/earlystop") - logger.info("[INFO] Restoring %s for testing...The path is %s", hps.test_model, bestmodel_load_path) - - modelloader = ModelLoader() - modelloader.load_pytorch(model, bestmodel_load_path) - - import datetime - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S')#现在 - if hps.save_label: - log_dir = os.path.join(test_dir, hps.data_path.split("/")[-1]) - resfile = open(log_dir, "w") - else: - log_dir = os.path.join(test_dir, nowTime) - resfile = open(log_dir, "wb") - logger.info("[INFO] Write the Evaluation into %s", log_dir) - - model.eval() - - match, pred, true, match_true = 0.0, 0.0, 0.0, 0.0 - total_example_num = 0.0 - pairs = {} - pairs["hyps"] = [] - pairs["refer"] = [] - pred_list = [] - iter_start_time=time.time() - with torch.no_grad(): - for i, (batch_x, batch_y) in enumerate(loader): - - input, input_len = batch_x[Const.INPUT], batch_x[Const.INPUT_LEN] - label = batch_y[Const.TARGET] - - if hps.cuda: - input = input.cuda() # [batch, N, seq_len] - label = label.cuda() - input_len = input_len.cuda() - - batch_size, N, _ = input.size() - - input = Variable(input) - input_len = Variable(input_len, requires_grad=False) - - model_outputs = model.forward(input, input_len) # [batch, N, 2] - prediction = model_outputs["pred"] - - if hps.save_label: - pred_list.extend(model_outputs["pred_idx"].data.cpu().view(-1).tolist()) - continue - - pred += prediction.sum() - true += label.sum() - match_true += ((prediction == label) & (prediction == 1)).sum() - match += (prediction == label).sum() - total_example_num += batch_size * N - - for j in range(batch_size): - original_article_sents = batch_x["text"][j] - sent_max_number = len(original_article_sents) - refer = "\n".join(batch_x["summary"][j]) - hyps = "\n".join(original_article_sents[id].replace("\n", "") for id in range(len(prediction[j])) if prediction[j][id]==1 and id < sent_max_number) - if limited: - k = len(refer.split()) - hyps = " ".join(hyps.split()[:k]) - logger.info((len(refer.split()),len(hyps.split()))) - resfile.write(b"Original_article:") - resfile.write("\n".join(batch_x["text"][j]).encode('utf-8')) - resfile.write(b"\n") - resfile.write(b"Reference:") - if isinstance(refer, list): - for ref in refer: - resfile.write(ref.encode('utf-8')) - resfile.write(b"\n") - resfile.write(b'*' * 40) - resfile.write(b"\n") - else: - resfile.write(refer.encode('utf-8')) - resfile.write(b"\n") - resfile.write(b"hypothesis:") - resfile.write(hyps.encode('utf-8')) - resfile.write(b"\n") - - if hps.use_pyrouge: - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - else: - try: - scores = utils.rouge_all(hyps, refer) - pairs["hyps"].append(hyps) - pairs["refer"].append(refer) - except ValueError: - logger.error("Do not select any sentences!") - logger.debug("sent_max_number:%d", sent_max_number) - logger.debug(original_article_sents) - logger.debug("label:") - logger.debug(label[j]) - continue - - # single example res writer - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-1']['p'], scores['rouge-1']['r'], scores['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-2']['p'], scores['rouge-2']['r'], scores['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores['rouge-l']['p'], scores['rouge-l']['r'], scores['rouge-l']['f']) - - resfile.write(res.encode('utf-8')) - resfile.write(b'-' * 89) - resfile.write(b"\n") - - if hps.save_label: - import json - json.dump(pred_list, resfile) - logger.info(' | end of test | time: {:5.2f}s | '.format((time.time() - iter_start_time))) - return - - resfile.write(b"\n") - resfile.write(b'=' * 89) - resfile.write(b"\n") - - if hps.use_pyrouge: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - if isinstance(pairs["refer"][0], list): - logger.info("Multi Reference summaries!") - scores_all = utils.pyrouge_score_all_multi(pairs["hyps"], pairs["refer"]) - else: - scores_all = utils.pyrouge_score_all(pairs["hyps"], pairs["refer"]) - else: - logger.info("The number of pairs is %d", len(pairs["hyps"])) - if not len(pairs["hyps"]): - logger.error("During testing, no hyps is selected!") - return - rouge = Rouge() - scores_all = rouge.get_scores(pairs["hyps"], pairs["refer"], avg=True) - - # the whole model res writer - resfile.write(b"The total testset is:") - res = "Rouge1:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-1']['p'], scores_all['rouge-1']['r'], scores_all['rouge-1']['f']) \ - + "Rouge2:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-2']['p'], scores_all['rouge-2']['r'], scores_all['rouge-2']['f']) \ - + "Rougel:\n\tp:%.6f, r:%.6f, f:%.6f\n" % (scores_all['rouge-l']['p'], scores_all['rouge-l']['r'], scores_all['rouge-l']['f']) - resfile.write(res.encode("utf-8")) - logger.info(res) - logger.info(' | end of test | time: {:5.2f}s | ' - .format((time.time() - iter_start_time))) - - - - # label prediction - logger.info("match_true %d, pred %d, true %d, total %d, match %d", match, pred, true, total_example_num, match) - accu, precision, recall, F = utils.eval_label(match_true, pred, true, total_example_num, match) - res = "The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f" % (total_example_num / hps.doc_max_timesteps, accu, precision, recall, F) - resfile.write(res.encode('utf-8')) - logger.info("The size of totalset is %d, accu is %f, precision is %f, recall is %f, F is %f", len(loader), accu, precision, recall, F) - - -def main(): - parser = argparse.ArgumentParser(description='Transformer Model') - - # Where to find data - parser.add_argument('--data_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/train.label.jsonl', help='Path expression to pickle datafiles.') - parser.add_argument('--valid_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/val.label.jsonl', help='Path expression to pickle valid datafiles.') - parser.add_argument('--vocab_path', type=str, default='/remote-home/dqwang/Datasets/CNNDM/vocab', help='Path expression to text vocabulary file.') - parser.add_argument('--embedding_path', type=str, default='/remote-home/dqwang/Glove/glove.42B.300d.txt', help='Path expression to external word embedding.') - - # Important settings - parser.add_argument('--mode', type=str, default='train', help='must be one of train/test') - parser.add_argument('--restore_model', type=str , default='None', help='Restore model for further training. [bestmodel/bestFmodel/earlystop/None]') - parser.add_argument('--test_model', type=str, default='evalbestmodel', help='choose different model to test [evalbestmodel/evalbestFmodel/trainbestmodel/trainbestFmodel/earlystop]') - parser.add_argument('--use_pyrouge', action='store_true', default=False, help='use_pyrouge') - - # Where to save output - parser.add_argument('--save_root', type=str, default='save/', help='Root directory for all model.') - parser.add_argument('--log_root', type=str, default='log/', help='Root directory for all logging.') - - # Hyperparameters - parser.add_argument('--gpu', type=str, default='0', help='GPU ID to use. For cpu, set -1 [default: -1]') - parser.add_argument('--cuda', action='store_true', default=False, help='use cuda') - parser.add_argument('--vocab_size', type=int, default=100000, help='Size of vocabulary. These will be read from the vocabulary file in order. If the vocabulary file contains fewer words than this number, or if this number is set to 0, will take all words in the vocabulary file.') - parser.add_argument('--n_epochs', type=int, default=20, help='Number of epochs [default: 20]') - parser.add_argument('--batch_size', type=int, default=32, help='Mini batch size [default: 128]') - - parser.add_argument('--word_embedding', action='store_true', default=True, help='whether to use Word embedding') - parser.add_argument('--word_emb_dim', type=int, default=300, help='Word embedding size [default: 200]') - parser.add_argument('--embed_train', action='store_true', default=False, help='whether to train Word embedding [default: False]') - parser.add_argument('--min_kernel_size', type=int, default=1, help='kernel min length for CNN [default:1]') - parser.add_argument('--max_kernel_size', type=int, default=7, help='kernel max length for CNN [default:7]') - parser.add_argument('--output_channel', type=int, default=50, help='output channel: repeated times for one kernel') - parser.add_argument('--n_layers', type=int, default=12, help='Number of deeplstm layers') - parser.add_argument('--hidden_size', type=int, default=512, help='hidden size [default: 512]') - parser.add_argument('--ffn_inner_hidden_size', type=int, default=2048, help='PositionwiseFeedForward inner hidden size [default: 2048]') - parser.add_argument('--n_head', type=int, default=8, help='multihead attention number [default: 8]') - parser.add_argument('--recurrent_dropout_prob', type=float, default=0.1, help='recurrent dropout prob [default: 0.1]') - parser.add_argument('--atten_dropout_prob', type=float, default=0.1,help='attention dropout prob [default: 0.1]') - parser.add_argument('--ffn_dropout_prob', type=float, default=0.1, help='PositionwiseFeedForward dropout prob [default: 0.1]') - parser.add_argument('--use_orthnormal_init', action='store_true', default=True, help='use orthnormal init for lstm [default: true]') - parser.add_argument('--sent_max_len', type=int, default=100, help='max length of sentences (max source text sentence tokens)') - parser.add_argument('--doc_max_timesteps', type=int, default=50, help='max length of documents (max timesteps of documents)') - parser.add_argument('--save_label', action='store_true', default=False, help='require multihead attention') - - # Training - parser.add_argument('--lr', type=float, default=0.0001, help='learning rate') - parser.add_argument('--lr_descent', action='store_true', default=False, help='learning rate descent') - parser.add_argument('--warmup_steps', type=int, default=4000, help='warmup_steps') - parser.add_argument('--grad_clip', action='store_true', default=False, help='for gradient clipping') - parser.add_argument('--max_grad_norm', type=float, default=1.0, help='for gradient clipping max gradient normalization') - - parser.add_argument('-m', type=int, default=3, help='decode summary length') - parser.add_argument('--limited', action='store_true', default=False, help='limited decode summary length') - - args = parser.parse_args() - - os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu - torch.set_printoptions(threshold=50000) - - hps = args - - # File paths - DATA_FILE = args.data_path - VALID_FILE = args.valid_path - VOCAL_FILE = args.vocab_path - LOG_PATH = args.log_root - - # train_log setting - if not os.path.exists(LOG_PATH): - if hps.mode == "train": - os.makedirs(LOG_PATH) - else: - logger.exception("[Error] Logdir %s doesn't exist. Run in train mode to create it.", LOG_PATH) - raise Exception("[Error] Logdir %s doesn't exist. Run in train mode to create it." % (LOG_PATH)) - nowTime=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') - log_path = os.path.join(LOG_PATH, hps.mode + "_" + nowTime) - file_handler = logging.FileHandler(log_path) - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) - - logger.info("Pytorch %s", torch.__version__) - logger.info(args) - logger.info(args) - - sum_loader = SummarizationLoader() - - - if hps.mode == 'test': - paths = {"test": DATA_FILE} - hps.recurrent_dropout_prob = 0.0 - hps.atten_dropout_prob = 0.0 - hps.ffn_dropout_prob = 0.0 - logger.info(hps) - else: - paths = {"train": DATA_FILE, "valid": VALID_FILE} - - dataInfo = sum_loader.process(paths=paths, vocab_size=hps.vocab_size, vocab_path=VOCAL_FILE, sent_max_len=hps.sent_max_len, doc_max_timesteps=hps.doc_max_timesteps, load_vocab=os.path.exists(VOCAL_FILE)) - - vocab = dataInfo.vocabs["vocab"] - model = TransformerModel(hps, vocab) - - if len(hps.gpu) > 1: - gpuid = hps.gpu.split(',') - gpuid = [int(s) for s in gpuid] - model = nn.DataParallel(model,device_ids=gpuid) - logger.info("[INFO] Use Multi-gpu: %s", hps.gpu) - if hps.cuda: - model = model.cuda() - logger.info("[INFO] Use cuda") - - if hps.mode == 'train': - trainset = dataInfo.datasets["train"] - train_sampler = BucketSampler(batch_size=hps.batch_size, seq_len_field_name=Const.INPUT) - train_batch = Batch(batch_size=hps.batch_size, dataset=trainset, sampler=train_sampler) - validset = dataInfo.datasets["valid"] - validset.set_input("text", "summary") - valid_batch = Batch(batch_size=hps.batch_size, dataset=validset) - setup_training(model, train_batch, valid_batch, hps) - elif hps.mode == 'test': - logger.info("[INFO] Decoding...") - testset = dataInfo.datasets["test"] - testset.set_input("text", "summary") - test_batch = Batch(batch_size=hps.batch_size, dataset=testset) - run_test(model, test_batch, hps, limited=hps.limited) - else: - logger.error("The 'mode' flag must be one of train/eval/test") - raise ValueError("The 'mode' flag must be one of train/eval/test") - -if __name__ == '__main__': - main() diff --git a/reproduction/Summarization/Baseline/transformer/Beam.py b/reproduction/Summarization/Baseline/transformer/Beam.py deleted file mode 100644 index 127b14f4..00000000 --- a/reproduction/Summarization/Baseline/transformer/Beam.py +++ /dev/null @@ -1,103 +0,0 @@ -""" Manage beam search info structure. - - Heavily borrowed from OpenNMT-py. - For code in OpenNMT-py, please check the following link: - https://github.com/OpenNMT/OpenNMT-py/blob/master/onmt/Beam.py -""" - -import torch -import numpy as np -import transformer.Constants as Constants - -class Beam(): - ''' Beam search ''' - - def __init__(self, size, device=False): - - self.size = size - self._done = False - - # The score for each translation on the beam. - self.scores = torch.zeros((size,), dtype=torch.float, device=device) - self.all_scores = [] - - # The backpointers at each time-step. - self.prev_ks = [] - - # The outputs at each time-step. - self.next_ys = [torch.full((size,), Constants.PAD, dtype=torch.long, device=device)] - self.next_ys[0][0] = Constants.BOS - - def get_current_state(self): - "Get the outputs for the current timestep." - return self.get_tentative_hypothesis() - - def get_current_origin(self): - "Get the backpointers for the current timestep." - return self.prev_ks[-1] - - @property - def done(self): - return self._done - - def advance(self, word_prob): - "Update beam status and check if finished or not." - num_words = word_prob.size(1) - - # Sum the previous scores. - if len(self.prev_ks) > 0: - beam_lk = word_prob + self.scores.unsqueeze(1).expand_as(word_prob) - else: - beam_lk = word_prob[0] - - flat_beam_lk = beam_lk.view(-1) - - best_scores, best_scores_id = flat_beam_lk.topk(self.size, 0, True, True) # 1st sort - best_scores, best_scores_id = flat_beam_lk.topk(self.size, 0, True, True) # 2nd sort - - self.all_scores.append(self.scores) - self.scores = best_scores - - # bestScoresId is flattened as a (beam x word) array, - # so we need to calculate which word and beam each score came from - prev_k = best_scores_id / num_words - self.prev_ks.append(prev_k) - self.next_ys.append(best_scores_id - prev_k * num_words) - - # End condition is when top-of-beam is EOS. - if self.next_ys[-1][0].item() == Constants.EOS: - self._done = True - self.all_scores.append(self.scores) - - return self._done - - def sort_scores(self): - "Sort the scores." - return torch.sort(self.scores, 0, True) - - def get_the_best_score_and_idx(self): - "Get the score of the best in the beam." - scores, ids = self.sort_scores() - return scores[1], ids[1] - - def get_tentative_hypothesis(self): - "Get the decoded sequence for the current timestep." - - if len(self.next_ys) == 1: - dec_seq = self.next_ys[0].unsqueeze(1) - else: - _, keys = self.sort_scores() - hyps = [self.get_hypothesis(k) for k in keys] - hyps = [[Constants.BOS] + h for h in hyps] - dec_seq = torch.LongTensor(hyps) - - return dec_seq - - def get_hypothesis(self, k): - """ Walk back to construct the full hypothesis. """ - hyp = [] - for j in range(len(self.prev_ks) - 1, -1, -1): - hyp.append(self.next_ys[j+1][k]) - k = self.prev_ks[j][k] - - return list(map(lambda x: x.item(), hyp[::-1])) diff --git a/reproduction/Summarization/Baseline/transformer/Constants.py b/reproduction/Summarization/Baseline/transformer/Constants.py deleted file mode 100644 index d805b03c..00000000 --- a/reproduction/Summarization/Baseline/transformer/Constants.py +++ /dev/null @@ -1,10 +0,0 @@ - -PAD = 0 -UNK = 1 -BOS = 2 -EOS = 3 - -PAD_WORD = '' -UNK_WORD = '' -BOS_WORD = '' -EOS_WORD = '' diff --git a/reproduction/Summarization/Baseline/transformer/Layers.py b/reproduction/Summarization/Baseline/transformer/Layers.py deleted file mode 100644 index f1b45bed..00000000 --- a/reproduction/Summarization/Baseline/transformer/Layers.py +++ /dev/null @@ -1,49 +0,0 @@ -''' Define the Layers ''' -import torch.nn as nn -from transformer.SubLayers import MultiHeadAttention, PositionwiseFeedForward - -__author__ = "Yu-Hsiang Huang" - - -class EncoderLayer(nn.Module): - ''' Compose with two layers ''' - - def __init__(self, d_model, d_inner, n_head, d_k, d_v, dropout=0.1): - super(EncoderLayer, self).__init__() - self.slf_attn = MultiHeadAttention( - n_head, d_model, d_k, d_v, dropout=dropout) - self.pos_ffn = PositionwiseFeedForward(d_model, d_inner, dropout=dropout) - - def forward(self, enc_input, non_pad_mask=None, slf_attn_mask=None): - enc_output, enc_slf_attn = self.slf_attn( - enc_input, enc_input, enc_input, mask=slf_attn_mask) - enc_output *= non_pad_mask - - enc_output = self.pos_ffn(enc_output) - enc_output *= non_pad_mask - - return enc_output, enc_slf_attn - - -class DecoderLayer(nn.Module): - ''' Compose with three layers ''' - - def __init__(self, d_model, d_inner, n_head, d_k, d_v, dropout=0.1): - super(DecoderLayer, self).__init__() - self.slf_attn = MultiHeadAttention(n_head, d_model, d_k, d_v, dropout=dropout) - self.enc_attn = MultiHeadAttention(n_head, d_model, d_k, d_v, dropout=dropout) - self.pos_ffn = PositionwiseFeedForward(d_model, d_inner, dropout=dropout) - - def forward(self, dec_input, enc_output, non_pad_mask=None, slf_attn_mask=None, dec_enc_attn_mask=None): - dec_output, dec_slf_attn = self.slf_attn( - dec_input, dec_input, dec_input, mask=slf_attn_mask) - dec_output *= non_pad_mask - - dec_output, dec_enc_attn = self.enc_attn( - dec_output, enc_output, enc_output, mask=dec_enc_attn_mask) - dec_output *= non_pad_mask - - dec_output = self.pos_ffn(dec_output) - dec_output *= non_pad_mask - - return dec_output, dec_slf_attn, dec_enc_attn diff --git a/reproduction/Summarization/Baseline/transformer/Models.py b/reproduction/Summarization/Baseline/transformer/Models.py deleted file mode 100644 index 2d928f96..00000000 --- a/reproduction/Summarization/Baseline/transformer/Models.py +++ /dev/null @@ -1,215 +0,0 @@ -''' Define the Transformer model ''' -import torch -import torch.nn as nn -import numpy as np -import transformer.Constants as Constants -from transformer.Layers import EncoderLayer, DecoderLayer - -__author__ = "Yu-Hsiang Huang" - - -def get_non_pad_mask(seq): - assert seq.dim() == 2 - return seq.ne(Constants.PAD).type(torch.float).unsqueeze(-1) - - -def get_sinusoid_encoding_table(n_position, d_hid, padding_idx=None): - ''' Sinusoid position encoding table ''' - - def cal_angle(position, hid_idx): - return position / np.power(10000, 2 * (hid_idx // 2) / d_hid) - - def get_posi_angle_vec(position): - return [cal_angle(position, hid_j) for hid_j in range(d_hid)] - - sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)]) - - sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i - sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 - - if padding_idx is not None: - # zero vector for padding dimension - sinusoid_table[padding_idx] = 0. - - return torch.FloatTensor(sinusoid_table) - - -def get_attn_key_pad_mask(seq_k, seq_q): - ''' For masking out the padding part of key sequence. ''' - - # Expand to fit the shape of key query attention matrix. - len_q = seq_q.size(1) - padding_mask = seq_k.eq(Constants.PAD) - padding_mask = padding_mask.unsqueeze(1).expand(-1, len_q, -1) # b x lq x lk - - return padding_mask - - -def get_subsequent_mask(seq): - ''' For masking out the subsequent info. ''' - - sz_b, len_s = seq.size() - subsequent_mask = torch.triu( - torch.ones((len_s, len_s), device=seq.device, dtype=torch.uint8), diagonal=1) - subsequent_mask = subsequent_mask.unsqueeze(0).expand(sz_b, -1, -1) # b x ls x ls - - return subsequent_mask - - -class Encoder(nn.Module): - ''' A encoder model with self attention mechanism. ''' - - def __init__( - self, - n_src_vocab, len_max_seq, d_word_vec, - n_layers, n_head, d_k, d_v, - d_model, d_inner, dropout=0.1): - - super().__init__() - - n_position = len_max_seq + 1 - - self.src_word_emb = nn.Embedding( - n_src_vocab, d_word_vec, padding_idx=Constants.PAD) - - self.position_enc = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(n_position, d_word_vec, padding_idx=0), - freeze=True) - - self.layer_stack = nn.ModuleList([ - EncoderLayer(d_model, d_inner, n_head, d_k, d_v, dropout=dropout) - for _ in range(n_layers)]) - - def forward(self, src_seq, src_pos, return_attns=False): - - enc_slf_attn_list = [] - - # -- Prepare masks - slf_attn_mask = get_attn_key_pad_mask(seq_k=src_seq, seq_q=src_seq) - non_pad_mask = get_non_pad_mask(src_seq) - - # -- Forward - enc_output = self.src_word_emb(src_seq) + self.position_enc(src_pos) - - for enc_layer in self.layer_stack: - enc_output, enc_slf_attn = enc_layer( - enc_output, - non_pad_mask=non_pad_mask, - slf_attn_mask=slf_attn_mask) - if return_attns: - enc_slf_attn_list += [enc_slf_attn] - - if return_attns: - return enc_output, enc_slf_attn_list - return enc_output, - - -class Decoder(nn.Module): - ''' A decoder model with self attention mechanism. ''' - - def __init__( - self, - n_tgt_vocab, len_max_seq, d_word_vec, - n_layers, n_head, d_k, d_v, - d_model, d_inner, dropout=0.1): - - super().__init__() - n_position = len_max_seq + 1 - - self.tgt_word_emb = nn.Embedding( - n_tgt_vocab, d_word_vec, padding_idx=Constants.PAD) - - self.position_enc = nn.Embedding.from_pretrained( - get_sinusoid_encoding_table(n_position, d_word_vec, padding_idx=0), - freeze=True) - - self.layer_stack = nn.ModuleList([ - DecoderLayer(d_model, d_inner, n_head, d_k, d_v, dropout=dropout) - for _ in range(n_layers)]) - - def forward(self, tgt_seq, tgt_pos, src_seq, enc_output, return_attns=False): - - dec_slf_attn_list, dec_enc_attn_list = [], [] - - # -- Prepare masks - non_pad_mask = get_non_pad_mask(tgt_seq) - - slf_attn_mask_subseq = get_subsequent_mask(tgt_seq) - slf_attn_mask_keypad = get_attn_key_pad_mask(seq_k=tgt_seq, seq_q=tgt_seq) - slf_attn_mask = (slf_attn_mask_keypad + slf_attn_mask_subseq).gt(0) - - dec_enc_attn_mask = get_attn_key_pad_mask(seq_k=src_seq, seq_q=tgt_seq) - - # -- Forward - dec_output = self.tgt_word_emb(tgt_seq) + self.position_enc(tgt_pos) - - for dec_layer in self.layer_stack: - dec_output, dec_slf_attn, dec_enc_attn = dec_layer( - dec_output, enc_output, - non_pad_mask=non_pad_mask, - slf_attn_mask=slf_attn_mask, - dec_enc_attn_mask=dec_enc_attn_mask) - - if return_attns: - dec_slf_attn_list += [dec_slf_attn] - dec_enc_attn_list += [dec_enc_attn] - - if return_attns: - return dec_output, dec_slf_attn_list, dec_enc_attn_list - return dec_output, - - -class Transformer(nn.Module): - ''' A sequence to sequence model with attention mechanism. ''' - - def __init__( - self, - n_src_vocab, n_tgt_vocab, len_max_seq, - d_word_vec=512, d_model=512, d_inner=2048, - n_layers=6, n_head=8, d_k=64, d_v=64, dropout=0.1, - tgt_emb_prj_weight_sharing=True, - emb_src_tgt_weight_sharing=True): - - super().__init__() - - self.encoder = Encoder( - n_src_vocab=n_src_vocab, len_max_seq=len_max_seq, - d_word_vec=d_word_vec, d_model=d_model, d_inner=d_inner, - n_layers=n_layers, n_head=n_head, d_k=d_k, d_v=d_v, - dropout=dropout) - - self.decoder = Decoder( - n_tgt_vocab=n_tgt_vocab, len_max_seq=len_max_seq, - d_word_vec=d_word_vec, d_model=d_model, d_inner=d_inner, - n_layers=n_layers, n_head=n_head, d_k=d_k, d_v=d_v, - dropout=dropout) - - self.tgt_word_prj = nn.Linear(d_model, n_tgt_vocab, bias=False) - nn.init.xavier_normal_(self.tgt_word_prj.weight) - - assert d_model == d_word_vec, \ - 'To facilitate the residual connections, \ - the dimensions of all module outputs shall be the same.' - - if tgt_emb_prj_weight_sharing: - # Share the weight matrix between target word embedding & the final logit dense layer - self.tgt_word_prj.weight = self.decoder.tgt_word_emb.weight - self.x_logit_scale = (d_model ** -0.5) - else: - self.x_logit_scale = 1. - - if emb_src_tgt_weight_sharing: - # Share the weight matrix between source & target word embeddings - assert n_src_vocab == n_tgt_vocab, \ - "To share word embedding table, the vocabulary size of src/tgt shall be the same." - self.encoder.src_word_emb.weight = self.decoder.tgt_word_emb.weight - - def forward(self, src_seq, src_pos, tgt_seq, tgt_pos): - - tgt_seq, tgt_pos = tgt_seq[:, :-1], tgt_pos[:, :-1] - - enc_output, *_ = self.encoder(src_seq, src_pos) - dec_output, *_ = self.decoder(tgt_seq, tgt_pos, src_seq, enc_output) - seq_logit = self.tgt_word_prj(dec_output) * self.x_logit_scale - - return seq_logit.view(-1, seq_logit.size(2)) diff --git a/reproduction/Summarization/Baseline/transformer/Modules.py b/reproduction/Summarization/Baseline/transformer/Modules.py deleted file mode 100644 index c711f44b..00000000 --- a/reproduction/Summarization/Baseline/transformer/Modules.py +++ /dev/null @@ -1,28 +0,0 @@ -import torch -import torch.nn as nn -import numpy as np - -__author__ = "Yu-Hsiang Huang" - -class ScaledDotProductAttention(nn.Module): - ''' Scaled Dot-Product Attention ''' - - def __init__(self, temperature, attn_dropout=0.1): - super().__init__() - self.temperature = temperature - self.dropout = nn.Dropout(attn_dropout) - self.softmax = nn.Softmax(dim=2) - - def forward(self, q, k, v, mask=None): - - attn = torch.bmm(q, k.transpose(1, 2)) - attn = attn / self.temperature - - if mask is not None: - attn = attn.masked_fill(mask, -np.inf) - - attn = self.softmax(attn) - attn = self.dropout(attn) - output = torch.bmm(attn, v) - - return output, attn diff --git a/reproduction/Summarization/Baseline/transformer/Optim.py b/reproduction/Summarization/Baseline/transformer/Optim.py deleted file mode 100644 index 8ad4458a..00000000 --- a/reproduction/Summarization/Baseline/transformer/Optim.py +++ /dev/null @@ -1,35 +0,0 @@ -'''A wrapper class for optimizer ''' -import numpy as np - -class ScheduledOptim(): - '''A simple wrapper class for learning rate scheduling''' - - def __init__(self, optimizer, d_model, n_warmup_steps): - self._optimizer = optimizer - self.n_warmup_steps = n_warmup_steps - self.n_current_steps = 0 - self.init_lr = np.power(d_model, -0.5) - - def step_and_update_lr(self): - "Step with the inner optimizer" - self._update_learning_rate() - self._optimizer.step() - - def zero_grad(self): - "Zero out the gradients by the inner optimizer" - self._optimizer.zero_grad() - - def _get_lr_scale(self): - return np.min([ - np.power(self.n_current_steps, -0.5), - np.power(self.n_warmup_steps, -1.5) * self.n_current_steps]) - - def _update_learning_rate(self): - ''' Learning rate scheduling per step ''' - - self.n_current_steps += 1 - lr = self.init_lr * self._get_lr_scale() - - for param_group in self._optimizer.param_groups: - param_group['lr'] = lr - diff --git a/reproduction/Summarization/Baseline/transformer/SubLayers.py b/reproduction/Summarization/Baseline/transformer/SubLayers.py deleted file mode 100644 index 42b7259d..00000000 --- a/reproduction/Summarization/Baseline/transformer/SubLayers.py +++ /dev/null @@ -1,82 +0,0 @@ -''' Define the sublayers in encoder/decoder layer ''' -import numpy as np -import torch.nn as nn -import torch.nn.functional as F -from transformer.Modules import ScaledDotProductAttention - -__author__ = "Yu-Hsiang Huang" - -class MultiHeadAttention(nn.Module): - ''' Multi-Head Attention module ''' - - def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1): - super().__init__() - - self.n_head = n_head - self.d_k = d_k - self.d_v = d_v - - self.w_qs = nn.Linear(d_model, n_head * d_k) - self.w_ks = nn.Linear(d_model, n_head * d_k) - self.w_vs = nn.Linear(d_model, n_head * d_v) - nn.init.xavier_normal_(self.w_qs.weight) - nn.init.xavier_normal_(self.w_ks.weight) - nn.init.xavier_normal_(self.w_vs.weight) - - self.attention = ScaledDotProductAttention(temperature=np.power(d_k, 0.5)) - self.layer_norm = nn.LayerNorm(d_model) - - self.fc = nn.Linear(n_head * d_v, d_model) - nn.init.xavier_normal_(self.fc.weight) - - self.dropout = nn.Dropout(dropout) - - - def forward(self, q, k, v, mask=None): - - d_k, d_v, n_head = self.d_k, self.d_v, self.n_head - - sz_b, len_q, _ = q.size() - sz_b, len_k, _ = k.size() - sz_b, len_v, _ = v.size() - - residual = q - - q = self.w_qs(q).view(sz_b, len_q, n_head, d_k) - k = self.w_ks(k).view(sz_b, len_k, n_head, d_k) - v = self.w_vs(v).view(sz_b, len_v, n_head, d_v) - - q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk - k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk - v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv - - if mask is not None: - mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x .. - output, attn = self.attention(q, k, v, mask=mask) - - output = output.view(n_head, sz_b, len_q, d_v) - output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv) - - output = self.dropout(self.fc(output)) - output = self.layer_norm(output + residual) - - return output, attn - -class PositionwiseFeedForward(nn.Module): - ''' A two-feed-forward-layer module ''' - - def __init__(self, d_in, d_hid, dropout=0.1): - super().__init__() - self.w_1 = nn.Conv1d(d_in, d_hid, 1) # position-wise - self.w_2 = nn.Conv1d(d_hid, d_in, 1) # position-wise - self.layer_norm = nn.LayerNorm(d_in) - self.dropout = nn.Dropout(dropout) - - def forward(self, x): - residual = x - output = x.transpose(1, 2) - output = self.w_2(F.relu(self.w_1(output))) - output = output.transpose(1, 2) - output = self.dropout(output) - output = self.layer_norm(output + residual) - return output diff --git a/reproduction/Summarization/Baseline/transformer/Translator.py b/reproduction/Summarization/Baseline/transformer/Translator.py deleted file mode 100644 index b22feabe..00000000 --- a/reproduction/Summarization/Baseline/transformer/Translator.py +++ /dev/null @@ -1,166 +0,0 @@ -''' This module will handle the text generation with beam search. ''' - -import torch -import torch.nn as nn -import torch.nn.functional as F - -from transformer.Models import Transformer -from transformer.Beam import Beam - -class Translator(object): - ''' Load with trained model and handle the beam search ''' - - def __init__(self, opt): - self.opt = opt - self.device = torch.device('cuda' if opt.cuda else 'cpu') - - checkpoint = torch.load(opt.model) - model_opt = checkpoint['settings'] - self.model_opt = model_opt - - model = Transformer( - model_opt.src_vocab_size, - model_opt.tgt_vocab_size, - model_opt.max_token_seq_len, - tgt_emb_prj_weight_sharing=model_opt.proj_share_weight, - emb_src_tgt_weight_sharing=model_opt.embs_share_weight, - d_k=model_opt.d_k, - d_v=model_opt.d_v, - d_model=model_opt.d_model, - d_word_vec=model_opt.d_word_vec, - d_inner=model_opt.d_inner_hid, - n_layers=model_opt.n_layers, - n_head=model_opt.n_head, - dropout=model_opt.dropout) - - model.load_state_dict(checkpoint['model']) - print('[Info] Trained model state loaded.') - - model.word_prob_prj = nn.LogSoftmax(dim=1) - - model = model.to(self.device) - - self.model = model - self.model.eval() - - def translate_batch(self, src_seq, src_pos): - ''' Translation work in one batch ''' - - def get_inst_idx_to_tensor_position_map(inst_idx_list): - ''' Indicate the position of an instance in a tensor. ''' - return {inst_idx: tensor_position for tensor_position, inst_idx in enumerate(inst_idx_list)} - - def collect_active_part(beamed_tensor, curr_active_inst_idx, n_prev_active_inst, n_bm): - ''' Collect tensor parts associated to active instances. ''' - - _, *d_hs = beamed_tensor.size() - n_curr_active_inst = len(curr_active_inst_idx) - new_shape = (n_curr_active_inst * n_bm, *d_hs) - - beamed_tensor = beamed_tensor.view(n_prev_active_inst, -1) - beamed_tensor = beamed_tensor.index_select(0, curr_active_inst_idx) - beamed_tensor = beamed_tensor.view(*new_shape) - - return beamed_tensor - - def collate_active_info( - src_seq, src_enc, inst_idx_to_position_map, active_inst_idx_list): - # Sentences which are still active are collected, - # so the decoder will not run on completed sentences. - n_prev_active_inst = len(inst_idx_to_position_map) - active_inst_idx = [inst_idx_to_position_map[k] for k in active_inst_idx_list] - active_inst_idx = torch.LongTensor(active_inst_idx).to(self.device) - - active_src_seq = collect_active_part(src_seq, active_inst_idx, n_prev_active_inst, n_bm) - active_src_enc = collect_active_part(src_enc, active_inst_idx, n_prev_active_inst, n_bm) - active_inst_idx_to_position_map = get_inst_idx_to_tensor_position_map(active_inst_idx_list) - - return active_src_seq, active_src_enc, active_inst_idx_to_position_map - - def beam_decode_step( - inst_dec_beams, len_dec_seq, src_seq, enc_output, inst_idx_to_position_map, n_bm): - ''' Decode and update beam status, and then return active beam idx ''' - - def prepare_beam_dec_seq(inst_dec_beams, len_dec_seq): - dec_partial_seq = [b.get_current_state() for b in inst_dec_beams if not b.done] - dec_partial_seq = torch.stack(dec_partial_seq).to(self.device) - dec_partial_seq = dec_partial_seq.view(-1, len_dec_seq) - return dec_partial_seq - - def prepare_beam_dec_pos(len_dec_seq, n_active_inst, n_bm): - dec_partial_pos = torch.arange(1, len_dec_seq + 1, dtype=torch.long, device=self.device) - dec_partial_pos = dec_partial_pos.unsqueeze(0).repeat(n_active_inst * n_bm, 1) - return dec_partial_pos - - def predict_word(dec_seq, dec_pos, src_seq, enc_output, n_active_inst, n_bm): - dec_output, *_ = self.model.decoder(dec_seq, dec_pos, src_seq, enc_output) - dec_output = dec_output[:, -1, :] # Pick the last step: (bh * bm) * d_h - word_prob = F.log_softmax(self.model.tgt_word_prj(dec_output), dim=1) - word_prob = word_prob.view(n_active_inst, n_bm, -1) - - return word_prob - - def collect_active_inst_idx_list(inst_beams, word_prob, inst_idx_to_position_map): - active_inst_idx_list = [] - for inst_idx, inst_position in inst_idx_to_position_map.items(): - is_inst_complete = inst_beams[inst_idx].advance(word_prob[inst_position]) - if not is_inst_complete: - active_inst_idx_list += [inst_idx] - - return active_inst_idx_list - - n_active_inst = len(inst_idx_to_position_map) - - dec_seq = prepare_beam_dec_seq(inst_dec_beams, len_dec_seq) - dec_pos = prepare_beam_dec_pos(len_dec_seq, n_active_inst, n_bm) - word_prob = predict_word(dec_seq, dec_pos, src_seq, enc_output, n_active_inst, n_bm) - - # Update the beam with predicted word prob information and collect incomplete instances - active_inst_idx_list = collect_active_inst_idx_list( - inst_dec_beams, word_prob, inst_idx_to_position_map) - - return active_inst_idx_list - - def collect_hypothesis_and_scores(inst_dec_beams, n_best): - all_hyp, all_scores = [], [] - for inst_idx in range(len(inst_dec_beams)): - scores, tail_idxs = inst_dec_beams[inst_idx].sort_scores() - all_scores += [scores[:n_best]] - - hyps = [inst_dec_beams[inst_idx].get_hypothesis(i) for i in tail_idxs[:n_best]] - all_hyp += [hyps] - return all_hyp, all_scores - - with torch.no_grad(): - #-- Encode - src_seq, src_pos = src_seq.to(self.device), src_pos.to(self.device) - src_enc, *_ = self.model.encoder(src_seq, src_pos) - - #-- Repeat data for beam search - n_bm = self.opt.beam_size - n_inst, len_s, d_h = src_enc.size() - src_seq = src_seq.repeat(1, n_bm).view(n_inst * n_bm, len_s) - src_enc = src_enc.repeat(1, n_bm, 1).view(n_inst * n_bm, len_s, d_h) - - #-- Prepare beams - inst_dec_beams = [Beam(n_bm, device=self.device) for _ in range(n_inst)] - - #-- Bookkeeping for active or not - active_inst_idx_list = list(range(n_inst)) - inst_idx_to_position_map = get_inst_idx_to_tensor_position_map(active_inst_idx_list) - - #-- Decode - for len_dec_seq in range(1, self.model_opt.max_token_seq_len + 1): - - active_inst_idx_list = beam_decode_step( - inst_dec_beams, len_dec_seq, src_seq, src_enc, inst_idx_to_position_map, n_bm) - - if not active_inst_idx_list: - break # all instances have finished their path to - - src_seq, src_enc, inst_idx_to_position_map = collate_active_info( - src_seq, src_enc, inst_idx_to_position_map, active_inst_idx_list) - - batch_hyp, batch_scores = collect_hypothesis_and_scores(inst_dec_beams, self.opt.n_best) - - return batch_hyp, batch_scores diff --git a/reproduction/Summarization/Baseline/transformer/__init__.py b/reproduction/Summarization/Baseline/transformer/__init__.py deleted file mode 100644 index 901dfa1f..00000000 --- a/reproduction/Summarization/Baseline/transformer/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -import transformer.Constants -import transformer.Modules -import transformer.Layers -import transformer.SubLayers -import transformer.Models -import transformer.Translator -import transformer.Beam -import transformer.Optim - -__all__ = [ - transformer.Constants, transformer.Modules, transformer.Layers, - transformer.SubLayers, transformer.Models, transformer.Optim, - transformer.Translator, transformer.Beam] diff --git a/reproduction/Summarization/BertSum/callback.py b/reproduction/Summarization/BertSum/callback.py deleted file mode 100644 index a1bb4f54..00000000 --- a/reproduction/Summarization/BertSum/callback.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import torch -import sys -from torch import nn - -from fastNLP.core.callback import Callback -from fastNLP.core.utils import _get_model_device - -class MyCallback(Callback): - def __init__(self, args): - super(MyCallback, self).__init__() - self.args = args - self.real_step = 0 - - def on_step_end(self): - if self.step % self.update_every == 0 and self.step > 0: - self.real_step += 1 - cur_lr = self.args.max_lr * 100 * min(self.real_step ** (-0.5), self.real_step * self.args.warmup_steps**(-1.5)) - for param_group in self.optimizer.param_groups: - param_group['lr'] = cur_lr - - if self.real_step % 1000 == 0: - self.pbar.write('Current learning rate is {:.8f}, real_step: {}'.format(cur_lr, self.real_step)) - - def on_epoch_end(self): - self.pbar.write('Epoch {} is done !!!'.format(self.epoch)) - -def _save_model(model, model_name, save_dir, only_param=False): - """ 存储不含有显卡信息的 state_dict 或 model - :param model: - :param model_name: - :param save_dir: 保存的 directory - :param only_param: - :return: - """ - model_path = os.path.join(save_dir, model_name) - if not os.path.isdir(save_dir): - os.makedirs(save_dir, exist_ok=True) - if isinstance(model, nn.DataParallel): - model = model.module - if only_param: - state_dict = model.state_dict() - for key in state_dict: - state_dict[key] = state_dict[key].cpu() - torch.save(state_dict, model_path) - else: - _model_device = _get_model_device(model) - model.cpu() - torch.save(model, model_path) - model.to(_model_device) - -class SaveModelCallback(Callback): - """ - 由于Trainer在训练过程中只会保存最佳的模型, 该 callback 可实现多种方式的结果存储。 - 会根据训练开始的时间戳在 save_dir 下建立文件夹,在再文件夹下存放多个模型 - -save_dir - -2019-07-03-15-06-36 - -epoch0step20{metric_key}{evaluate_performance}.pt # metric是给定的metric_key, evaluate_perfomance是性能 - -epoch1step40 - -2019-07-03-15-10-00 - -epoch:0step:20{metric_key}:{evaluate_performance}.pt # metric是给定的metric_key, evaluate_perfomance是性能 - :param str save_dir: 将模型存放在哪个目录下,会在该目录下创建以时间戳命名的目录,并存放模型 - :param int top: 保存dev表现top多少模型。-1为保存所有模型 - :param bool only_param: 是否只保存模型权重 - :param save_on_exception: 发生exception时,是否保存一份当时的模型 - """ - def __init__(self, save_dir, top=5, only_param=False, save_on_exception=False): - super().__init__() - - if not os.path.isdir(save_dir): - raise IsADirectoryError("{} is not a directory.".format(save_dir)) - self.save_dir = save_dir - if top < 0: - self.top = sys.maxsize - else: - self.top = top - self._ordered_save_models = [] # List[Tuple], Tuple[0]是metric, Tuple[1]是path。metric是依次变好的,所以从头删 - - self.only_param = only_param - self.save_on_exception = save_on_exception - - def on_train_begin(self): - self.save_dir = os.path.join(self.save_dir, self.trainer.start_time) - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - metric_value = list(eval_result.values())[0][metric_key] - self._save_this_model(metric_value) - - def _insert_into_ordered_save_models(self, pair): - # pair:(metric_value, model_name) - # 返回save的模型pair与删除的模型pair. pair中第一个元素是metric的值,第二个元素是模型的名称 - index = -1 - for _pair in self._ordered_save_models: - if _pair[0]>=pair[0] and self.trainer.increase_better: - break - if not self.trainer.increase_better and _pair[0]<=pair[0]: - break - index += 1 - save_pair = None - if len(self._ordered_save_models)=self.top and index!=-1): - save_pair = pair - self._ordered_save_models.insert(index+1, pair) - delete_pair = None - if len(self._ordered_save_models)>self.top: - delete_pair = self._ordered_save_models.pop(0) - return save_pair, delete_pair - - def _save_this_model(self, metric_value): - name = "epoch:{}_step:{}_{}:{:.6f}.pt".format(self.epoch, self.step, self.trainer.metric_key, metric_value) - save_pair, delete_pair = self._insert_into_ordered_save_models((metric_value, name)) - if save_pair: - try: - _save_model(self.model, model_name=name, save_dir=self.save_dir, only_param=self.only_param) - except Exception as e: - print(f"The following exception:{e} happens when saves model to {self.save_dir}.") - if delete_pair: - try: - delete_model_path = os.path.join(self.save_dir, delete_pair[1]) - if os.path.exists(delete_model_path): - os.remove(delete_model_path) - except Exception as e: - print(f"Fail to delete model {name} at {self.save_dir} caused by exception:{e}.") - - def on_exception(self, exception): - if self.save_on_exception: - name = "epoch:{}_step:{}_Exception:{}.pt".format(self.epoch, self.step, exception.__class__.__name__) - _save_model(self.model, model_name=name, save_dir=self.save_dir, only_param=self.only_param) - - diff --git a/reproduction/Summarization/BertSum/dataloader.py b/reproduction/Summarization/BertSum/dataloader.py deleted file mode 100644 index 6af797e4..00000000 --- a/reproduction/Summarization/BertSum/dataloader.py +++ /dev/null @@ -1,157 +0,0 @@ -from time import time -from datetime import timedelta - -from fastNLP.io.dataset_loader import JsonLoader -from fastNLP.modules.encoder._bert import BertTokenizer -from fastNLP.io.data_bundle import DataBundle -from fastNLP.core.const import Const - -class BertData(JsonLoader): - - def __init__(self, max_nsents=60, max_ntokens=100, max_len=512): - - fields = {'article': 'article', - 'label': 'label'} - super(BertData, self).__init__(fields=fields) - - self.max_nsents = max_nsents - self.max_ntokens = max_ntokens - self.max_len = max_len - - self.tokenizer = BertTokenizer.from_pretrained('/path/to/uncased_L-12_H-768_A-12') - self.cls_id = self.tokenizer.vocab['[CLS]'] - self.sep_id = self.tokenizer.vocab['[SEP]'] - self.pad_id = self.tokenizer.vocab['[PAD]'] - - def _load(self, paths): - dataset = super(BertData, self)._load(paths) - return dataset - - def process(self, paths): - - def truncate_articles(instance, max_nsents=self.max_nsents, max_ntokens=self.max_ntokens): - article = [' '.join(sent.lower().split()[:max_ntokens]) for sent in instance['article']] - return article[:max_nsents] - - def truncate_labels(instance): - label = list(filter(lambda x: x < len(instance['article']), instance['label'])) - return label - - def bert_tokenize(instance, tokenizer, max_len, pad_value): - article = instance['article'] - article = ' [SEP] [CLS] '.join(article) - word_pieces = tokenizer.tokenize(article)[:(max_len - 2)] - word_pieces = ['[CLS]'] + word_pieces + ['[SEP]'] - token_ids = tokenizer.convert_tokens_to_ids(word_pieces) - while len(token_ids) < max_len: - token_ids.append(pad_value) - assert len(token_ids) == max_len - return token_ids - - def get_seg_id(instance, max_len, sep_id): - _segs = [-1] + [i for i, idx in enumerate(instance['article']) if idx == sep_id] - segs = [_segs[i] - _segs[i - 1] for i in range(1, len(_segs))] - segment_id = [] - for i, length in enumerate(segs): - if i % 2 == 0: - segment_id += length * [0] - else: - segment_id += length * [1] - while len(segment_id) < max_len: - segment_id.append(0) - return segment_id - - def get_cls_id(instance, cls_id): - classification_id = [i for i, idx in enumerate(instance['article']) if idx == cls_id] - return classification_id - - def get_labels(instance): - labels = [0] * len(instance['cls_id']) - label_idx = list(filter(lambda x: x < len(instance['cls_id']), instance['label'])) - for idx in label_idx: - labels[idx] = 1 - return labels - - datasets = {} - for name in paths: - datasets[name] = self._load(paths[name]) - - # remove empty samples - datasets[name].drop(lambda ins: len(ins['article']) == 0 or len(ins['label']) == 0) - - # truncate articles - datasets[name].apply(lambda ins: truncate_articles(ins, self.max_nsents, self.max_ntokens), new_field_name='article') - - # truncate labels - datasets[name].apply(truncate_labels, new_field_name='label') - - # tokenize and convert tokens to id - datasets[name].apply(lambda ins: bert_tokenize(ins, self.tokenizer, self.max_len, self.pad_id), new_field_name='article') - - # get segment id - datasets[name].apply(lambda ins: get_seg_id(ins, self.max_len, self.sep_id), new_field_name='segment_id') - - # get classification id - datasets[name].apply(lambda ins: get_cls_id(ins, self.cls_id), new_field_name='cls_id') - - # get label - datasets[name].apply(get_labels, new_field_name='label') - - # rename filed - datasets[name].rename_field('article', Const.INPUTS(0)) - datasets[name].rename_field('segment_id', Const.INPUTS(1)) - datasets[name].rename_field('cls_id', Const.INPUTS(2)) - datasets[name].rename_field('lbael', Const.TARGET) - - # set input and target - datasets[name].set_input(Const.INPUTS(0), Const.INPUTS(1), Const.INPUTS(2)) - datasets[name].set_target(Const.TARGET) - - # set paddding value - datasets[name].set_pad_val('article', 0) - - return DataBundle(datasets=datasets) - - -class BertSumLoader(JsonLoader): - - def __init__(self): - fields = {'article': 'article', - 'segment_id': 'segment_id', - 'cls_id': 'cls_id', - 'label': Const.TARGET - } - super(BertSumLoader, self).__init__(fields=fields) - - def _load(self, paths): - dataset = super(BertSumLoader, self)._load(paths) - return dataset - - def process(self, paths): - - def get_seq_len(instance): - return len(instance['article']) - - print('Start loading datasets !!!') - start = time() - - # load datasets - datasets = {} - for name in paths: - datasets[name] = self._load(paths[name]) - - datasets[name].apply(get_seq_len, new_field_name='seq_len') - - # set input and target - datasets[name].set_input('article', 'segment_id', 'cls_id') - datasets[name].set_target(Const.TARGET) - - # set padding value - datasets[name].set_pad_val('article', 0) - datasets[name].set_pad_val('segment_id', 0) - datasets[name].set_pad_val('cls_id', -1) - datasets[name].set_pad_val(Const.TARGET, 0) - - print('Finished in {}'.format(timedelta(seconds=time()-start))) - - return DataBundle(datasets=datasets) diff --git a/reproduction/Summarization/BertSum/metrics.py b/reproduction/Summarization/BertSum/metrics.py deleted file mode 100644 index 228f6789..00000000 --- a/reproduction/Summarization/BertSum/metrics.py +++ /dev/null @@ -1,178 +0,0 @@ -import numpy as np -import json -from os.path import join -import torch -import logging -import tempfile -import subprocess as sp -from datetime import timedelta -from time import time - -from pyrouge import Rouge155 -from pyrouge.utils import log - -from fastNLP.core.losses import LossBase -from fastNLP.core.metrics import MetricBase - -_ROUGE_PATH = '/path/to/RELEASE-1.5.5' - -class MyBCELoss(LossBase): - - def __init__(self, pred=None, target=None, mask=None): - super(MyBCELoss, self).__init__() - self._init_param_map(pred=pred, target=target, mask=mask) - self.loss_func = torch.nn.BCELoss(reduction='none') - - def get_loss(self, pred, target, mask): - loss = self.loss_func(pred, target.float()) - loss = (loss * mask.float()).sum() - return loss - -class LossMetric(MetricBase): - def __init__(self, pred=None, target=None, mask=None): - super(LossMetric, self).__init__() - self._init_param_map(pred=pred, target=target, mask=mask) - self.loss_func = torch.nn.BCELoss(reduction='none') - self.avg_loss = 0.0 - self.nsamples = 0 - - def evaluate(self, pred, target, mask): - batch_size = pred.size(0) - loss = self.loss_func(pred, target.float()) - loss = (loss * mask.float()).sum() - self.avg_loss += loss - self.nsamples += batch_size - - def get_metric(self, reset=True): - self.avg_loss = self.avg_loss / self.nsamples - eval_result = {'loss': self.avg_loss} - if reset: - self.avg_loss = 0 - self.nsamples = 0 - return eval_result - -class RougeMetric(MetricBase): - def __init__(self, data_path, dec_path, ref_path, n_total, n_ext=3, ngram_block=3, pred=None, target=None, mask=None): - super(RougeMetric, self).__init__() - self._init_param_map(pred=pred, target=target, mask=mask) - self.data_path = data_path - self.dec_path = dec_path - self.ref_path = ref_path - self.n_total = n_total - self.n_ext = n_ext - self.ngram_block = ngram_block - - self.cur_idx = 0 - self.ext = [] - self.start = time() - - @staticmethod - def eval_rouge(dec_dir, ref_dir): - assert _ROUGE_PATH is not None - log.get_global_console_logger().setLevel(logging.WARNING) - dec_pattern = '(\d+).dec' - ref_pattern = '#ID#.ref' - cmd = '-c 95 -r 1000 -n 2 -m' - with tempfile.TemporaryDirectory() as tmp_dir: - Rouge155.convert_summaries_to_rouge_format( - dec_dir, join(tmp_dir, 'dec')) - Rouge155.convert_summaries_to_rouge_format( - ref_dir, join(tmp_dir, 'ref')) - Rouge155.write_config_static( - join(tmp_dir, 'dec'), dec_pattern, - join(tmp_dir, 'ref'), ref_pattern, - join(tmp_dir, 'settings.xml'), system_id=1 - ) - cmd = (join(_ROUGE_PATH, 'ROUGE-1.5.5.pl') - + ' -e {} '.format(join(_ROUGE_PATH, 'data')) - + cmd - + ' -a {}'.format(join(tmp_dir, 'settings.xml'))) - output = sp.check_output(cmd.split(' '), universal_newlines=True) - R_1 = float(output.split('\n')[3].split(' ')[3]) - R_2 = float(output.split('\n')[7].split(' ')[3]) - R_L = float(output.split('\n')[11].split(' ')[3]) - print(output) - return R_1, R_2, R_L - - def evaluate(self, pred, target, mask): - pred = pred + mask.float() - pred = pred.cpu().data.numpy() - ext_ids = np.argsort(-pred, 1) - for sent_id in ext_ids: - self.ext.append(sent_id) - self.cur_idx += 1 - print('{}/{} ({:.2f}%) decoded in {} seconds\r'.format( - self.cur_idx, self.n_total, self.cur_idx/self.n_total*100, timedelta(seconds=int(time()-self.start)) - ), end='') - - def get_metric(self, use_ngram_block=True, reset=True): - - def check_n_gram(sentence, n, dic): - tokens = sentence.split(' ') - s_len = len(tokens) - for i in range(s_len): - if i + n > s_len: - break - if ' '.join(tokens[i: i + n]) in dic: - return False - return True # no n_gram overlap - - # load original data - data = [] - with open(self.data_path) as f: - for line in f: - cur_data = json.loads(line) - if 'text' in cur_data: - new_data = {} - new_data['article'] = cur_data['text'] - new_data['abstract'] = cur_data['summary'] - data.append(new_data) - else: - data.append(cur_data) - - # write decode sentences and references - if use_ngram_block == True: - print('\nStart {}-gram blocking !!!'.format(self.ngram_block)) - for i, ext_ids in enumerate(self.ext): - dec, ref = [], [] - if use_ngram_block == False: - n_sent = min(len(data[i]['article']), self.n_ext) - for j in range(n_sent): - idx = ext_ids[j] - dec.append(data[i]['article'][idx]) - else: - n_sent = len(ext_ids) - dic = {} - for j in range(n_sent): - sent = data[i]['article'][ext_ids[j]] - if check_n_gram(sent, self.ngram_block, dic) == True: - dec.append(sent) - # update dic - tokens = sent.split(' ') - s_len = len(tokens) - for k in range(s_len): - if k + self.ngram_block > s_len: - break - dic[' '.join(tokens[k: k + self.ngram_block])] = 1 - if len(dec) >= self.n_ext: - break - - for sent in data[i]['abstract']: - ref.append(sent) - - with open(join(self.dec_path, '{}.dec'.format(i)), 'w') as f: - for sent in dec: - print(sent, file=f) - with open(join(self.ref_path, '{}.ref'.format(i)), 'w') as f: - for sent in ref: - print(sent, file=f) - - print('\nStart evaluating ROUGE score !!!') - R_1, R_2, R_L = RougeMetric.eval_rouge(self.dec_path, self.ref_path) - eval_result = {'ROUGE-1': R_1, 'ROUGE-2': R_2, 'ROUGE-L':R_L} - - if reset == True: - self.cur_idx = 0 - self.ext = [] - self.start = time() - return eval_result diff --git a/reproduction/Summarization/BertSum/model.py b/reproduction/Summarization/BertSum/model.py deleted file mode 100644 index 34a05495..00000000 --- a/reproduction/Summarization/BertSum/model.py +++ /dev/null @@ -1,51 +0,0 @@ -import torch -from torch import nn -from torch.nn import init - -from fastNLP.modules.encoder.bert import BertModel - - -class Classifier(nn.Module): - def __init__(self, hidden_size): - super(Classifier, self).__init__() - self.linear = nn.Linear(hidden_size, 1) - self.sigmoid = nn.Sigmoid() - - def forward(self, inputs, mask_cls): - h = self.linear(inputs).squeeze(-1) # [batch_size, seq_len] - sent_scores = self.sigmoid(h) * mask_cls.float() - return sent_scores - - -class BertSum(nn.Module): - - def __init__(self, hidden_size=768): - super(BertSum, self).__init__() - - self.hidden_size = hidden_size - - self.encoder = BertModel.from_pretrained('/path/to/uncased_L-12_H-768_A-12') - self.decoder = Classifier(self.hidden_size) - - def forward(self, article, segment_id, cls_id): - - # print(article.device) - # print(segment_id.device) - # print(cls_id.device) - - input_mask = 1 - (article == 0).long() - mask_cls = 1 - (cls_id == -1).long() - assert input_mask.size() == article.size() - assert mask_cls.size() == cls_id.size() - - bert_out = self.encoder(article, token_type_ids=segment_id, attention_mask=input_mask) - bert_out = bert_out[0][-1] # last layer - - sent_emb = bert_out[torch.arange(bert_out.size(0)).unsqueeze(1), cls_id] - sent_emb = sent_emb * mask_cls.unsqueeze(-1).float() - assert sent_emb.size() == (article.size(0), cls_id.size(1), self.hidden_size) # [batch_size, seq_len, hidden_size] - - sent_scores = self.decoder(sent_emb, mask_cls) # [batch_size, seq_len] - assert sent_scores.size() == (article.size(0), cls_id.size(1)) - - return {'pred': sent_scores, 'mask': mask_cls} diff --git a/reproduction/Summarization/BertSum/train_BertSum.py b/reproduction/Summarization/BertSum/train_BertSum.py deleted file mode 100644 index d34fa0b9..00000000 --- a/reproduction/Summarization/BertSum/train_BertSum.py +++ /dev/null @@ -1,147 +0,0 @@ -import sys -import argparse -import os -import json -import torch -from time import time -from datetime import timedelta -from os.path import join, exists -from torch.optim import Adam - -from utils import get_data_path, get_rouge_path - -from dataloader import BertSumLoader -from model import BertSum -from fastNLP.core.optimizer import AdamW -from metrics import MyBCELoss, LossMetric, RougeMetric -from fastNLP.core.sampler import BucketSampler -from callback import MyCallback, SaveModelCallback -from fastNLP.core.trainer import Trainer -from fastNLP.core.tester import Tester - - -def configure_training(args): - devices = [int(gpu) for gpu in args.gpus.split(',')] - params = {} - params['label_type'] = args.label_type - params['batch_size'] = args.batch_size - params['accum_count'] = args.accum_count - params['max_lr'] = args.max_lr - params['warmup_steps'] = args.warmup_steps - params['n_epochs'] = args.n_epochs - params['valid_steps'] = args.valid_steps - return devices, params - -def train_model(args): - - # check if the data_path and save_path exists - data_paths = get_data_path(args.mode, args.label_type) - for name in data_paths: - assert exists(data_paths[name]) - if not exists(args.save_path): - os.makedirs(args.save_path) - - # load summarization datasets - datasets = BertSumLoader().process(data_paths) - print('Information of dataset is:') - print(datasets) - train_set = datasets.datasets['train'] - valid_set = datasets.datasets['val'] - - # configure training - devices, train_params = configure_training(args) - with open(join(args.save_path, 'params.json'), 'w') as f: - json.dump(train_params, f, indent=4) - print('Devices is:') - print(devices) - - # configure model - model = BertSum() - optimizer = Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=0) - callbacks = [MyCallback(args), SaveModelCallback(args.save_path)] - criterion = MyBCELoss() - val_metric = [LossMetric()] - # sampler = BucketSampler(num_buckets=32, batch_size=args.batch_size) - trainer = Trainer(train_data=train_set, model=model, optimizer=optimizer, - loss=criterion, batch_size=args.batch_size, # sampler=sampler, - update_every=args.accum_count, n_epochs=args.n_epochs, - print_every=100, dev_data=valid_set, metrics=val_metric, - metric_key='-loss', validate_every=args.valid_steps, - save_path=args.save_path, device=devices, callbacks=callbacks) - - print('Start training with the following hyper-parameters:') - print(train_params) - trainer.train() - -def test_model(args): - - models = os.listdir(args.save_path) - - # load dataset - data_paths = get_data_path(args.mode, args.label_type) - datasets = BertSumLoader().process(data_paths) - print('Information of dataset is:') - print(datasets) - test_set = datasets.datasets['test'] - - # only need 1 gpu for testing - device = int(args.gpus) - - args.batch_size = 1 - - for cur_model in models: - - print('Current model is {}'.format(cur_model)) - - # load model - model = torch.load(join(args.save_path, cur_model)) - - # configure testing - original_path, dec_path, ref_path = get_rouge_path(args.label_type) - test_metric = RougeMetric(data_path=original_path, dec_path=dec_path, - ref_path=ref_path, n_total = len(test_set)) - tester = Tester(data=test_set, model=model, metrics=[test_metric], - batch_size=args.batch_size, device=device) - tester.test() - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='training/testing of BertSum(liu et al. 2019)' - ) - parser.add_argument('--mode', required=True, - help='training or testing of BertSum', type=str) - - parser.add_argument('--label_type', default='greedy', - help='greedy/limit', type=str) - parser.add_argument('--save_path', required=True, - help='root of the model', type=str) - # example for gpus input: '0,1,2,3' - parser.add_argument('--gpus', required=True, - help='available gpus for training(separated by commas)', type=str) - - parser.add_argument('--batch_size', default=18, - help='the training batch size', type=int) - parser.add_argument('--accum_count', default=2, - help='number of updates steps to accumulate before performing a backward/update pass.', type=int) - parser.add_argument('--max_lr', default=2e-5, - help='max learning rate for warm up', type=float) - parser.add_argument('--warmup_steps', default=10000, - help='warm up steps for training', type=int) - parser.add_argument('--n_epochs', default=10, - help='total number of training epochs', type=int) - parser.add_argument('--valid_steps', default=1000, - help='number of update steps for checkpoint and validation', type=int) - - args = parser.parse_args() - - if args.mode == 'train': - print('Training process of BertSum !!!') - train_model(args) - else: - print('Testing process of BertSum !!!') - test_model(args) - - - - diff --git a/reproduction/Summarization/BertSum/utils.py b/reproduction/Summarization/BertSum/utils.py deleted file mode 100644 index 2ba848b7..00000000 --- a/reproduction/Summarization/BertSum/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -import os -from os.path import exists - -def get_data_path(mode, label_type): - paths = {} - if mode == 'train': - paths['train'] = 'data/' + label_type + '/bert.train.jsonl' - paths['val'] = 'data/' + label_type + '/bert.val.jsonl' - else: - paths['test'] = 'data/' + label_type + '/bert.test.jsonl' - return paths - -def get_rouge_path(label_type): - if label_type == 'others': - data_path = 'data/' + label_type + '/bert.test.jsonl' - else: - data_path = 'data/' + label_type + '/test.jsonl' - dec_path = 'dec' - ref_path = 'ref' - if not exists(ref_path): - os.makedirs(ref_path) - if not exists(dec_path): - os.makedirs(dec_path) - return data_path, dec_path, ref_path diff --git a/reproduction/Summarization/README.md b/reproduction/Summarization/README.md deleted file mode 100644 index 1df15d56..00000000 --- a/reproduction/Summarization/README.md +++ /dev/null @@ -1,141 +0,0 @@ -# Summarization - -## Extractive Summarization - - -### Models - -FastNLP中实现的模型包括: - -1. Get To The Point: Summarization with Pointer-Generator Networks (See et al. 2017) -2. Searching for Effective Neural Extractive Summarization What Works and What's Next (Zhong et al. 2019) -3. Fine-tune BERT for Extractive Summarization (Liu et al. 2019) - - - - -### Dataset - -这里提供的摘要任务数据集包括: - -- CNN/DailyMail ([Get To The Point: Summarization with Pointer-Generator Networks](http://arxiv.org/abs/1704.04368)) -- Newsroom -- The New York Times Annotated Corpus - - NYT - - NYT50 -- DUC - - 2002 Task4 - - 2003/2004 Task1 -- arXiv -- PubMed - - -其中公开数据集(CNN/DailyMail, Newsroom, arXiv, PubMed)预处理之后的下载地址: - -- [百度云盘](https://pan.baidu.com/s/11qWnDjK9lb33mFZ9vuYlzA) (提取码:h1px) -- [Google Drive](https://drive.google.com/file/d/1uzeSdcLk5ilHaUTeJRNrf-_j59CQGe6r/view?usp=drivesdk) - -未公开数据集(NYT, NYT50, DUC)数据处理部分脚本放置于data文件夹 - - - -### Evaluation - -#### FastRougeMetric - -FastRougeMetric使用python实现的ROUGE非官方库来实现在训练过程中快速计算rouge近似值。 - 源代码可见 [https://github.com/pltrdy/rouge](https://github.com/pltrdy/rouge) - -在fastNLP中,该方法已经被包装成Metric.py中的FastRougeMetric类以供trainer直接使用。 -需要事先使用pip安装该rouge库。 - - pip install rouge - - -**注意:由于实现细节的差异,该结果和官方ROUGE结果存在1-2个点的差异,仅可作为训练过程优化趋势的粗略估计。** - - - -#### PyRougeMetric - -PyRougeMetric 使用论文 [*ROUGE: A Package for Automatic Evaluation of Summaries*](https://www.aclweb.org/anthology/W04-1013) 提供的官方ROUGE 1.5.5评测库。 - -由于原本的ROUGE使用perl解释器,[pyrouge](https://github.com/bheinzerling/pyrouge)对其进行了python包装,而PyRougeMetric将其进一步包装为trainer可以直接使用的Metric类。 - -为了使用ROUGE 1.5.5,需要使用sudo权限安装一系列依赖库。 - -1. ROUGE 本身在Ubuntu下的安装可以参考[博客](https://blog.csdn.net/Hay54/article/details/78744912) -2. 配置wordnet可参考: -```shell -$ cd ~/rouge/RELEASE-1.5.5/data/WordNet-2.0-Exceptions/ -$ ./buildExeptionDB.pl . exc WordNet-2.0.exc.db -$ cd ../ -$ ln -s WordNet-2.0-Exceptions/WordNet-2.0.exc.db WordNet-2.0.exc.db -``` -3. 安装pyrouge -```shell -$ git clone https://github.com/bheinzerling/pyrouge -$ cd pyrouge -$ python setup.py install -``` -4. 测试ROUGE安装是否正确 -```shell -$ pyrouge_set_rouge_path /absolute/path/to/ROUGE-1.5.5/directory -$ python -m pyrouge.test -``` - - - - -### Dataset_loader - -- SummarizationLoader: 用于读取处理好的jsonl格式数据集,返回以下field - - text: 文章正文 - - summary: 摘要 - - domain: 可选,文章发布网站 - - tag: 可选,文章内容标签 - - labels: 抽取式句子标签 - -- BertSumLoader:用于读取作为 BertSum(Liu 2019) 输入的数据集,返回以下 field: - - article:每篇文章被截断为 512 后的词表 ID - - segmet_id:每句话属于 0/1 的 segment - - cls_id:输入中 ‘[CLS]’ 的位置 - - label:抽取式句子标签 - - - -### Train Cmdline - -#### Baseline - -LSTM + Sequence Labeling - - python train.py --cuda --gpu --sentence_encoder deeplstm --sentence_decoder SeqLab --save_root --log_root --lr_descent --grad_clip --max_grad_norm 10 - -Transformer + Sequence Labeling - - python train.py --cuda --gpu --sentence_encoder transformer --sentence_decoder SeqLab --save_root --log_root --lr_descent --grad_clip --max_grad_norm 10 - - - -#### BertSum - - - -### Performance and Hyperparameters - -| Model | ROUGE-1 | ROUGE-2 | ROUGE-L | Paper | -| :-----------------------------: | :-----: | :-----: | :-----: | :-----------------------------------------: | -| LEAD 3 | 40.11 | 17.64 | 36.32 | our data pre-process | -| ORACLE | 55.24 | 31.14 | 50.96 | our data pre-process | -| LSTM + Sequence Labeling | 40.72 | 18.27 | 36.98 | | -| Transformer + Sequence Labeling | 40.86 | 18.38 | 37.18 | | -| LSTM + Pointer Network | - | - | - | | -| Transformer + Pointer Network | - | - | - | | -| BERTSUM | 42.71 | 19.76 | 39.03 | Fine-tune BERT for Extractive Summarization | -| LSTM+PN+BERT+RL | - | - | - | | - - - -## Abstractive Summarization -Still in Progress... \ No newline at end of file diff --git a/reproduction/__init__.py b/reproduction/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/coreference_resolution/README.md b/reproduction/coreference_resolution/README.md deleted file mode 100644 index c1a286e5..00000000 --- a/reproduction/coreference_resolution/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# 指代消解复现 -## 介绍 -Coreference resolution是查找文本中指向同一现实实体的所有表达式的任务。 -对于涉及自然语言理解的许多更高级别的NLP任务来说, -这是一个重要的步骤,例如文档摘要,问题回答和信息提取。 -代码的实现主要基于[ End-to-End Coreference Resolution (Lee et al, 2017)](https://arxiv.org/pdf/1707.07045). - - -## 数据获取与预处理 -论文在[OntoNote5.0](https://allennlp.org/models)数据集上取得了当时的sota结果。 -由于版权问题,本文无法提供数据集的下载,请自行下载。 -原始数据集的格式为conll格式,详细介绍参考数据集给出的官方介绍页面。 - -代码实现采用了论文作者Lee的预处理方法,具体细节参见[链接](https://github.com/kentonl/e2e-coref/blob/e2e/setup_training.sh)。 -处理之后的数据集为json格式,例子: -``` -{ - "clusters": [], - "doc_key": "nw", - "sentences": [["This", "is", "the", "first", "sentence", "."], ["This", "is", "the", "second", "."]], - "speakers": [["spk1", "spk1", "spk1", "spk1", "spk1", "spk1"], ["spk2", "spk2", "spk2", "spk2", "spk2"]] -} -``` - -### embedding 数据集下载 -[turian emdedding](https://lil.cs.washington.edu/coref/turian.50d.txt) - -[glove embedding](https://nlp.stanford.edu/data/glove.840B.300d.zip) - - - -## 运行 -```shell -# 训练代码 -CUDA_VISIBLE_DEVICES=0 python train.py -# 测试代码 -CUDA_VISIBLE_DEVICES=0 python valid.py -``` - -## 结果 -原论文作者在测试集上取得了67.2%的结果,AllenNLP复现的结果为 [63.0%](https://allennlp.org/models)。 -其中AllenNLP训练时没有加入speaker信息,没有variational dropout以及只使用了100的antecedents而不是250。 - -在与AllenNLP使用同样的超参和配置时,本代码复现取得了63.6%的F1值。 - - -## 问题 -如果您有什么问题或者反馈,请提issue或者邮件联系我: -yexu_i@qq.com diff --git a/reproduction/coreference_resolution/__init__.py b/reproduction/coreference_resolution/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/coreference_resolution/model/__init__.py b/reproduction/coreference_resolution/model/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/coreference_resolution/model/config.py b/reproduction/coreference_resolution/model/config.py deleted file mode 100644 index 6011257b..00000000 --- a/reproduction/coreference_resolution/model/config.py +++ /dev/null @@ -1,54 +0,0 @@ -class Config(): - def __init__(self): - self.is_training = True - # path - self.glove = 'data/glove.840B.300d.txt.filtered' - self.turian = 'data/turian.50d.txt' - self.train_path = "data/train.english.jsonlines" - self.dev_path = "data/dev.english.jsonlines" - self.test_path = "data/test.english.jsonlines" - self.char_path = "data/char_vocab.english.txt" - - self.cuda = "0" - self.max_word = 1500 - self.epoch = 200 - - # config - # self.use_glove = True - # self.use_turian = True #No - self.use_elmo = False - self.use_CNN = True - self.model_heads = True #Yes - self.use_width = True # Yes - self.use_distance = True #Yes - self.use_metadata = True #Yes - - self.mention_ratio = 0.4 - self.max_sentences = 50 - self.span_width = 10 - self.feature_size = 20 #宽度信息emb的size - self.lr = 0.001 - self.lr_decay = 1e-3 - self.max_antecedents = 100 # 这个参数在mention detection中没有用 - self.atten_hidden_size = 150 - self.mention_hidden_size = 150 - self.sa_hidden_size = 150 - - self.char_emb_size = 8 - self.filter = [3,4,5] - - - # decay = 1e-5 - - def __str__(self): - d = self.__dict__ - out = 'config==============\n' - for i in list(d): - out += i+":" - out += str(d[i])+"\n" - out+="config==============\n" - return out - -if __name__=="__main__": - config = Config() - print(config) diff --git a/reproduction/coreference_resolution/model/metric.py b/reproduction/coreference_resolution/model/metric.py deleted file mode 100644 index 7687e685..00000000 --- a/reproduction/coreference_resolution/model/metric.py +++ /dev/null @@ -1,164 +0,0 @@ -from fastNLP.core.metrics import MetricBase - -import numpy as np - -from collections import Counter -from sklearn.utils.linear_assignment_ import linear_assignment - -""" -Mostly borrowed from https://github.com/clarkkev/deep-coref/blob/master/evaluation.py -""" - - - -class CRMetric(MetricBase): - def __init__(self): - super().__init__() - self.evaluators = [Evaluator(m) for m in (muc, b_cubed, ceafe)] - - # TODO 改名为evaluate,输入也 - def evaluate(self, predicted, mention_to_predicted,target): - clusters = target - for e in self.evaluators: - e.update(predicted,mention_to_predicted, clusters) - - def get_f1(self): - return sum(e.get_f1() for e in self.evaluators) / len(self.evaluators) - - def get_recall(self): - return sum(e.get_recall() for e in self.evaluators) / len(self.evaluators) - - def get_precision(self): - return sum(e.get_precision() for e in self.evaluators) / len(self.evaluators) - - # TODO 原本的getprf - def get_metric(self,reset=False): - res = {"pre":self.get_precision(), "rec":self.get_recall(), "f":self.get_f1()} - self.evaluators = [Evaluator(m) for m in (muc, b_cubed, ceafe)] - return res - - - - - - -class Evaluator(): - def __init__(self, metric, beta=1): - self.p_num = 0 - self.p_den = 0 - self.r_num = 0 - self.r_den = 0 - self.metric = metric - self.beta = beta - - def update(self, predicted,mention_to_predicted,gold): - gold = gold[0].tolist() - gold = [tuple(tuple(m) for m in gc) for gc in gold] - mention_to_gold = {} - for gc in gold: - for mention in gc: - mention_to_gold[mention] = gc - - if self.metric == ceafe: - pn, pd, rn, rd = self.metric(predicted, gold) - else: - pn, pd = self.metric(predicted, mention_to_gold) - rn, rd = self.metric(gold, mention_to_predicted) - self.p_num += pn - self.p_den += pd - self.r_num += rn - self.r_den += rd - - def get_f1(self): - return f1(self.p_num, self.p_den, self.r_num, self.r_den, beta=self.beta) - - def get_recall(self): - return 0 if self.r_num == 0 else self.r_num / float(self.r_den) - - def get_precision(self): - return 0 if self.p_num == 0 else self.p_num / float(self.p_den) - - def get_prf(self): - return self.get_precision(), self.get_recall(), self.get_f1() - - def get_counts(self): - return self.p_num, self.p_den, self.r_num, self.r_den - - - -def b_cubed(clusters, mention_to_gold): - num, dem = 0, 0 - - for c in clusters: - if len(c) == 1: - continue - - gold_counts = Counter() - correct = 0 - for m in c: - if m in mention_to_gold: - gold_counts[tuple(mention_to_gold[m])] += 1 - for c2, count in gold_counts.items(): - if len(c2) != 1: - correct += count * count - - num += correct / float(len(c)) - dem += len(c) - - return num, dem - - -def muc(clusters, mention_to_gold): - tp, p = 0, 0 - for c in clusters: - p += len(c) - 1 - tp += len(c) - linked = set() - for m in c: - if m in mention_to_gold: - linked.add(mention_to_gold[m]) - else: - tp -= 1 - tp -= len(linked) - return tp, p - - -def phi4(c1, c2): - return 2 * len([m for m in c1 if m in c2]) / float(len(c1) + len(c2)) - - -def ceafe(clusters, gold_clusters): - clusters = [c for c in clusters if len(c) != 1] - scores = np.zeros((len(gold_clusters), len(clusters))) - for i in range(len(gold_clusters)): - for j in range(len(clusters)): - scores[i, j] = phi4(gold_clusters[i], clusters[j]) - matching = linear_assignment(-scores) - similarity = sum(scores[matching[:, 0], matching[:, 1]]) - return similarity, len(clusters), similarity, len(gold_clusters) - - -def lea(clusters, mention_to_gold): - num, dem = 0, 0 - - for c in clusters: - if len(c) == 1: - continue - - common_links = 0 - all_links = len(c) * (len(c) - 1) / 2.0 - for i, m in enumerate(c): - if m in mention_to_gold: - for m2 in c[i + 1:]: - if m2 in mention_to_gold and mention_to_gold[m] == mention_to_gold[m2]: - common_links += 1 - - num += len(c) * common_links / float(all_links) - dem += len(c) - - return num, dem - -def f1(p_num, p_den, r_num, r_den, beta=1): - p = 0 if p_den == 0 else p_num / float(p_den) - r = 0 if r_den == 0 else r_num / float(r_den) - return 0 if p + r == 0 else (1 + beta * beta) * p * r / (beta * beta * p + r) diff --git a/reproduction/coreference_resolution/model/model_re.py b/reproduction/coreference_resolution/model/model_re.py deleted file mode 100644 index 92f9bc03..00000000 --- a/reproduction/coreference_resolution/model/model_re.py +++ /dev/null @@ -1,602 +0,0 @@ -import torch -import numpy as np -import torch.nn as nn -import torch.nn.functional as F - -from allennlp.commands.elmo import ElmoEmbedder -from fastNLP.models.base_model import BaseModel -from fastNLP.modules.encoder.variational_rnn import VarLSTM -from reproduction.coreference_resolution.model import preprocess -from fastNLP.io.embed_loader import EmbedLoader -from fastNLP.core.const import Const -import random - -# 设置seed -torch.manual_seed(0) # cpu -torch.cuda.manual_seed(0) # gpu -np.random.seed(0) # numpy -random.seed(0) - -class ffnn(nn.Module): - def __init__(self, input_size, hidden_size, output_size): - super(ffnn, self).__init__() - - self.f = nn.Sequential( - # 多少层数 - nn.Linear(input_size, hidden_size), - nn.ReLU(inplace=True), - nn.Dropout(p=0.2), - nn.Linear(hidden_size, hidden_size), - nn.ReLU(inplace=True), - nn.Dropout(p=0.2), - nn.Linear(hidden_size, output_size) - ) - self.reset_param() - - def reset_param(self): - for name, param in self.named_parameters(): - if param.dim() > 1: - nn.init.xavier_normal_(param) - # param.data = torch.tensor(np.random.randn(*param.shape)).float() - else: - nn.init.zeros_(param) - - def forward(self, input): - return self.f(input).squeeze() - - -class Model(BaseModel): - def __init__(self, vocab, config): - word2id = vocab.word2idx - super(Model, self).__init__() - vocab_num = len(word2id) - self.word2id = word2id - self.config = config - self.char_dict = preprocess.get_char_dict('data/char_vocab.english.txt') - self.genres = {g: i for i, g in enumerate(["bc", "bn", "mz", "nw", "pt", "tc", "wb"])} - self.device = torch.device("cuda:" + config.cuda) - - self.emb = nn.Embedding(vocab_num, 350) - - emb1 = EmbedLoader().load_with_vocab(config.glove, vocab,normalize=False) - emb2 = EmbedLoader().load_with_vocab(config.turian, vocab ,normalize=False) - pre_emb = np.concatenate((emb1, emb2), axis=1) - pre_emb /= (np.linalg.norm(pre_emb, axis=1, keepdims=True) + 1e-12) - - if pre_emb is not None: - self.emb.weight = nn.Parameter(torch.from_numpy(pre_emb).float()) - for param in self.emb.parameters(): - param.requires_grad = False - self.emb_dropout = nn.Dropout(inplace=True) - - - if config.use_elmo: - self.elmo = ElmoEmbedder(options_file='data/elmo/elmo_2x4096_512_2048cnn_2xhighway_options.json', - weight_file='data/elmo/elmo_2x4096_512_2048cnn_2xhighway_weights.hdf5', - cuda_device=int(config.cuda)) - print("elmo load over.") - self.elmo_args = torch.randn((3), requires_grad=True).to(self.device) - - self.char_emb = nn.Embedding(len(self.char_dict), config.char_emb_size) - self.conv1 = nn.Conv1d(config.char_emb_size, 50, 3) - self.conv2 = nn.Conv1d(config.char_emb_size, 50, 4) - self.conv3 = nn.Conv1d(config.char_emb_size, 50, 5) - - self.feature_emb = nn.Embedding(config.span_width, config.feature_size) - self.feature_emb_dropout = nn.Dropout(p=0.2, inplace=True) - - self.mention_distance_emb = nn.Embedding(10, config.feature_size) - self.distance_drop = nn.Dropout(p=0.2, inplace=True) - - self.genre_emb = nn.Embedding(7, config.feature_size) - self.speaker_emb = nn.Embedding(2, config.feature_size) - - self.bilstm = VarLSTM(input_size=350+150*config.use_CNN+config.use_elmo*1024,hidden_size=200,bidirectional=True,batch_first=True,hidden_dropout=0.2) - # self.bilstm = nn.LSTM(input_size=500, hidden_size=200, bidirectional=True, batch_first=True) - self.h0 = nn.init.orthogonal_(torch.empty(2, 1, 200)).to(self.device) - self.c0 = nn.init.orthogonal_(torch.empty(2, 1, 200)).to(self.device) - self.bilstm_drop = nn.Dropout(p=0.2, inplace=True) - - self.atten = ffnn(input_size=400, hidden_size=config.atten_hidden_size, output_size=1) - self.mention_score = ffnn(input_size=1320, hidden_size=config.mention_hidden_size, output_size=1) - self.sa = ffnn(input_size=3980+40*config.use_metadata, hidden_size=config.sa_hidden_size, output_size=1) - self.mention_start_np = None - self.mention_end_np = None - - def _reorder_lstm(self, word_emb, seq_lens): - sort_ind = sorted(range(len(seq_lens)), key=lambda i: seq_lens[i], reverse=True) - seq_lens_re = [seq_lens[i] for i in sort_ind] - emb_seq = self.reorder_sequence(word_emb, sort_ind, batch_first=True) - packed_seq = nn.utils.rnn.pack_padded_sequence(emb_seq, seq_lens_re, batch_first=True) - - h0 = self.h0.repeat(1, len(seq_lens), 1) - c0 = self.c0.repeat(1, len(seq_lens), 1) - packed_out, final_states = self.bilstm(packed_seq, (h0, c0)) - - lstm_out, _ = nn.utils.rnn.pad_packed_sequence(packed_out, batch_first=True) - back_map = {ind: i for i, ind in enumerate(sort_ind)} - reorder_ind = [back_map[i] for i in range(len(seq_lens_re))] - lstm_out = self.reorder_sequence(lstm_out, reorder_ind, batch_first=True) - return lstm_out - - def reorder_sequence(self, sequence_emb, order, batch_first=True): - """ - sequence_emb: [T, B, D] if not batch_first - order: list of sequence length - """ - batch_dim = 0 if batch_first else 1 - assert len(order) == sequence_emb.size()[batch_dim] - - order = torch.LongTensor(order) - order = order.to(sequence_emb).long() - - sorted_ = sequence_emb.index_select(index=order, dim=batch_dim) - - del order - return sorted_ - - def flat_lstm(self, lstm_out, seq_lens): - batch = lstm_out.shape[0] - seq = lstm_out.shape[1] - dim = lstm_out.shape[2] - l = [j + i * seq for i, seq_len in enumerate(seq_lens) for j in range(seq_len)] - flatted = torch.index_select(lstm_out.view(batch * seq, dim), 0, torch.LongTensor(l).to(self.device)) - return flatted - - def potential_mention_index(self, word_index, max_sent_len): - # get mention index [3,2]:the first sentence is 3 and secend 2 - # [0,0,0,1,1] --> [[0, 0], [0, 1], [1, 1], [1, 2], [2, 2], [3, 3], [3, 4], [4, 4]] (max =2) - potential_mention = [] - for i in range(len(word_index)): - for j in range(i, i + max_sent_len): - if (j < len(word_index) and word_index[i] == word_index[j]): - potential_mention.append([i, j]) - return potential_mention - - def get_mention_start_end(self, seq_lens): - # 序列长度转换成mention - # [3,2] --> [0,0,0,1,1] - word_index = [0] * sum(seq_lens) - sent_index = 0 - index = 0 - for length in seq_lens: - for l in range(length): - word_index[index] = sent_index - index += 1 - sent_index += 1 - - # [0,0,0,1,1]-->[[0,0],[0,1],[0,2]....] - mention_id = self.potential_mention_index(word_index, self.config.span_width) - mention_start = np.array(mention_id, dtype=int)[:, 0] - mention_end = np.array(mention_id, dtype=int)[:, 1] - return mention_start, mention_end - - def get_mention_emb(self, flatten_lstm, mention_start, mention_end): - mention_start_tensor = torch.from_numpy(mention_start).to(self.device) - mention_end_tensor = torch.from_numpy(mention_end).to(self.device) - emb_start = flatten_lstm.index_select(dim=0, index=mention_start_tensor) # [mention_num,embed] - emb_end = flatten_lstm.index_select(dim=0, index=mention_end_tensor) # [mention_num,embed] - return emb_start, emb_end - - def get_mask(self, mention_start, mention_end): - # big mask for attention - mention_num = mention_start.shape[0] - mask = np.zeros((mention_num, self.config.span_width)) # [mention_num,span_width] - for i in range(mention_num): - start = mention_start[i] - end = mention_end[i] - # 实际上是宽度 - for j in range(end - start + 1): - mask[i][j] = 1 - mask = torch.from_numpy(mask) # [mention_num,max_mention] - # 0-->-inf 1-->0 - log_mask = torch.log(mask) - return log_mask - - def get_mention_index(self, mention_start, max_mention): - # TODO 后面可能要改 - assert len(mention_start.shape) == 1 - mention_start_tensor = torch.from_numpy(mention_start) - num_mention = mention_start_tensor.shape[0] - mention_index = mention_start_tensor.expand(max_mention, num_mention).transpose(0, - 1) # [num_mention,max_mention] - assert mention_index.shape[0] == num_mention - assert mention_index.shape[1] == max_mention - range_add = torch.arange(0, max_mention).expand(num_mention, max_mention).long() # [num_mention,max_mention] - mention_index = mention_index + range_add - mention_index = torch.min(mention_index, torch.LongTensor([mention_start[-1]]).expand(num_mention, max_mention)) - return mention_index.to(self.device) - - def sort_mention(self, mention_start, mention_end, candidate_mention_emb, candidate_mention_score, seq_lens): - # 排序记录,高分段在前面 - mention_score, mention_ids = torch.sort(candidate_mention_score, descending=True) - preserve_mention_num = int(self.config.mention_ratio * sum(seq_lens)) - mention_ids = mention_ids[0:preserve_mention_num] - mention_score = mention_score[0:preserve_mention_num] - - mention_start_tensor = torch.from_numpy(mention_start).to(self.device).index_select(dim=0, - index=mention_ids) # [lamda*word_num] - mention_end_tensor = torch.from_numpy(mention_end).to(self.device).index_select(dim=0, - index=mention_ids) # [lamda*word_num] - mention_emb = candidate_mention_emb.index_select(index=mention_ids, dim=0) # [lamda*word_num,emb] - assert mention_score.shape[0] == preserve_mention_num - assert mention_start_tensor.shape[0] == preserve_mention_num - assert mention_end_tensor.shape[0] == preserve_mention_num - assert mention_emb.shape[0] == preserve_mention_num - # TODO 不交叉没做处理 - - # 对start进行再排序,实际位置在前面 - # TODO 这里只考虑了start没有考虑end - mention_start_tensor, temp_index = torch.sort(mention_start_tensor) - mention_end_tensor = mention_end_tensor.index_select(dim=0, index=temp_index) - mention_emb = mention_emb.index_select(dim=0, index=temp_index) - mention_score = mention_score.index_select(dim=0, index=temp_index) - return mention_start_tensor, mention_end_tensor, mention_score, mention_emb - - def get_antecedents(self, mention_starts, max_antecedents): - num_mention = mention_starts.shape[0] - max_antecedents = min(max_antecedents, num_mention) - # mention和它是第几个mention之间的对应关系 - antecedents = np.zeros((num_mention, max_antecedents), dtype=int) # [num_mention,max_an] - # 记录长度 - antecedents_len = [0] * num_mention - for i in range(num_mention): - ante_count = 0 - for j in range(max(0, i - max_antecedents), i): - antecedents[i, ante_count] = j - ante_count += 1 - # 补位操作 - for j in range(ante_count, max_antecedents): - antecedents[i, j] = 0 - antecedents_len[i] = ante_count - assert antecedents.shape[1] == max_antecedents - return antecedents, antecedents_len - - def get_antecedents_score(self, span_represent, mention_score, antecedents, antecedents_len, mention_speakers_ids, - genre): - num_mention = mention_score.shape[0] - max_antecedent = antecedents.shape[1] - - pair_emb = self.get_pair_emb(span_represent, antecedents, mention_speakers_ids, genre) # [span_num,max_ant,emb] - antecedent_scores = self.sa(pair_emb) - mask01 = self.sequence_mask(antecedents_len, max_antecedent) - maskinf = torch.log(mask01).to(self.device) - assert maskinf.shape[1] <= max_antecedent - assert antecedent_scores.shape[0] == num_mention - antecedent_scores = antecedent_scores + maskinf - antecedents = torch.from_numpy(antecedents).to(self.device) - mention_scoreij = mention_score.unsqueeze(1) + torch.gather( - mention_score.unsqueeze(0).expand(num_mention, num_mention), dim=1, index=antecedents) - antecedent_scores += mention_scoreij - - antecedent_scores = torch.cat([torch.zeros([mention_score.shape[0], 1]).to(self.device), antecedent_scores], - 1) # [num_mentions, max_ant + 1] - return antecedent_scores - - ############################## - def distance_bin(self, mention_distance): - bins = torch.zeros(mention_distance.size()).byte().to(self.device) - rg = [[1, 1], [2, 2], [3, 3], [4, 4], [5, 7], [8, 15], [16, 31], [32, 63], [64, 300]] - for t, k in enumerate(rg): - i, j = k[0], k[1] - b = torch.LongTensor([i]).unsqueeze(-1).expand(mention_distance.size()).to(self.device) - m1 = torch.ge(mention_distance, b) - e = torch.LongTensor([j]).unsqueeze(-1).expand(mention_distance.size()).to(self.device) - m2 = torch.le(mention_distance, e) - bins = bins + (t + 1) * (m1 & m2) - return bins.long() - - def get_distance_emb(self, antecedents_tensor): - num_mention = antecedents_tensor.shape[0] - max_ant = antecedents_tensor.shape[1] - - assert max_ant <= self.config.max_antecedents - source = torch.arange(0, num_mention).expand(max_ant, num_mention).transpose(0,1).to(self.device) # [num_mention,max_ant] - mention_distance = source - antecedents_tensor - mention_distance_bin = self.distance_bin(mention_distance) - distance_emb = self.mention_distance_emb(mention_distance_bin) - distance_emb = self.distance_drop(distance_emb) - return distance_emb - - def get_pair_emb(self, span_emb, antecedents, mention_speakers_ids, genre): - emb_dim = span_emb.shape[1] - num_span = span_emb.shape[0] - max_ant = antecedents.shape[1] - assert span_emb.shape[0] == antecedents.shape[0] - antecedents = torch.from_numpy(antecedents).to(self.device) - - # [num_span,max_ant,emb] - antecedent_emb = torch.gather(span_emb.unsqueeze(0).expand(num_span, num_span, emb_dim), dim=1, - index=antecedents.unsqueeze(2).expand(num_span, max_ant, emb_dim)) - # [num_span,max_ant,emb] - target_emb_tiled = span_emb.expand((max_ant, num_span, emb_dim)) - target_emb_tiled = target_emb_tiled.transpose(0, 1) - - similarity_emb = antecedent_emb * target_emb_tiled - - pair_emb_list = [target_emb_tiled, antecedent_emb, similarity_emb] - - # get speakers and genre - if self.config.use_metadata: - antecedent_speaker_ids = mention_speakers_ids.unsqueeze(0).expand(num_span, num_span).gather(dim=1, - index=antecedents) - same_speaker = torch.eq(mention_speakers_ids.unsqueeze(1).expand(num_span, max_ant), - antecedent_speaker_ids) # [num_mention,max_ant] - speaker_embedding = self.speaker_emb(same_speaker.long().to(self.device)) # [mention_num.max_ant,emb] - genre_embedding = self.genre_emb( - torch.LongTensor([genre]).expand(num_span, max_ant).to(self.device)) # [mention_num,max_ant,emb] - pair_emb_list.append(speaker_embedding) - pair_emb_list.append(genre_embedding) - - # get distance emb - if self.config.use_distance: - distance_emb = self.get_distance_emb(antecedents) - pair_emb_list.append(distance_emb) - - pair_emb = torch.cat(pair_emb_list, 2) - return pair_emb - - def sequence_mask(self, len_list, max_len): - x = np.zeros((len(len_list), max_len)) - for i in range(len(len_list)): - l = len_list[i] - for j in range(l): - x[i][j] = 1 - return torch.from_numpy(x).float() - - def logsumexp(self, value, dim=None, keepdim=False): - """Numerically stable implementation of the operation - - value.exp().sum(dim, keepdim).log() - """ - # TODO: torch.max(value, dim=None) threw an error at time of writing - if dim is not None: - m, _ = torch.max(value, dim=dim, keepdim=True) - value0 = value - m - if keepdim is False: - m = m.squeeze(dim) - return m + torch.log(torch.sum(torch.exp(value0), - dim=dim, keepdim=keepdim)) - else: - m = torch.max(value) - sum_exp = torch.sum(torch.exp(value - m)) - - return m + torch.log(sum_exp) - - def softmax_loss(self, antecedent_scores, antecedent_labels): - antecedent_labels = torch.from_numpy(antecedent_labels * 1).to(self.device) - gold_scores = antecedent_scores + torch.log(antecedent_labels.float()) # [num_mentions, max_ant + 1] - marginalized_gold_scores = self.logsumexp(gold_scores, 1) # [num_mentions] - log_norm = self.logsumexp(antecedent_scores, 1) # [num_mentions] - return torch.sum(log_norm - marginalized_gold_scores) # [num_mentions]reduce_logsumexp - - def get_predicted_antecedents(self, antecedents, antecedent_scores): - predicted_antecedents = [] - for i, index in enumerate(np.argmax(antecedent_scores.detach(), axis=1) - 1): - if index < 0: - predicted_antecedents.append(-1) - else: - predicted_antecedents.append(antecedents[i, index]) - return predicted_antecedents - - def get_predicted_clusters(self, mention_starts, mention_ends, predicted_antecedents): - mention_to_predicted = {} - predicted_clusters = [] - for i, predicted_index in enumerate(predicted_antecedents): - if predicted_index < 0: - continue - assert i > predicted_index - predicted_antecedent = (int(mention_starts[predicted_index]), int(mention_ends[predicted_index])) - if predicted_antecedent in mention_to_predicted: - predicted_cluster = mention_to_predicted[predicted_antecedent] - else: - predicted_cluster = len(predicted_clusters) - predicted_clusters.append([predicted_antecedent]) - mention_to_predicted[predicted_antecedent] = predicted_cluster - - mention = (int(mention_starts[i]), int(mention_ends[i])) - predicted_clusters[predicted_cluster].append(mention) - mention_to_predicted[mention] = predicted_cluster - - predicted_clusters = [tuple(pc) for pc in predicted_clusters] - mention_to_predicted = {m: predicted_clusters[i] for m, i in mention_to_predicted.items()} - - return predicted_clusters, mention_to_predicted - - def evaluate_coref(self, mention_starts, mention_ends, predicted_antecedents, gold_clusters, evaluator): - gold_clusters = [tuple(tuple(m) for m in gc) for gc in gold_clusters] - mention_to_gold = {} - for gc in gold_clusters: - for mention in gc: - mention_to_gold[mention] = gc - predicted_clusters, mention_to_predicted = self.get_predicted_clusters(mention_starts, mention_ends, - predicted_antecedents) - evaluator.update(predicted_clusters, gold_clusters, mention_to_predicted, mention_to_gold) - return predicted_clusters - - - def forward(self, words1 , words2, words3, words4, chars, seq_len): - """ - 实际输入都是tensor - :param sentences: 句子,被fastNLP转化成了numpy, - :param doc_np: 被fastNLP转化成了Tensor - :param speaker_ids_np: 被fastNLP转化成了Tensor - :param genre: 被fastNLP转化成了Tensor - :param char_index: 被fastNLP转化成了Tensor - :param seq_len: 被fastNLP转化成了Tensor - :return: - """ - - sentences = words3 - doc_np = words4 - speaker_ids_np = words2 - genre = words1 - char_index = chars - - - # change for fastNLP - sentences = sentences[0].tolist() - doc_tensor = doc_np[0] - speakers_tensor = speaker_ids_np[0] - genre = genre[0].item() - char_index = char_index[0] - seq_len = seq_len[0].cpu().numpy() - - # 类型 - - # doc_tensor = torch.from_numpy(doc_np).to(self.device) - # speakers_tensor = torch.from_numpy(speaker_ids_np).to(self.device) - mention_emb_list = [] - - word_emb = self.emb(doc_tensor) - word_emb_list = [word_emb] - if self.config.use_CNN: - # [batch, length, char_length, char_dim] - char = self.char_emb(char_index) - char_size = char.size() - # first transform to [batch *length, char_length, char_dim] - # then transpose to [batch * length, char_dim, char_length] - char = char.view(char_size[0] * char_size[1], char_size[2], char_size[3]).transpose(1, 2) - - # put into cnn [batch*length, char_filters, char_length] - # then put into maxpooling [batch * length, char_filters] - char_over_cnn, _ = self.conv1(char).max(dim=2) - # reshape to [batch, length, char_filters] - char_over_cnn = torch.tanh(char_over_cnn).view(char_size[0], char_size[1], -1) - word_emb_list.append(char_over_cnn) - - char_over_cnn, _ = self.conv2(char).max(dim=2) - char_over_cnn = torch.tanh(char_over_cnn).view(char_size[0], char_size[1], -1) - word_emb_list.append(char_over_cnn) - - char_over_cnn, _ = self.conv3(char).max(dim=2) - char_over_cnn = torch.tanh(char_over_cnn).view(char_size[0], char_size[1], -1) - word_emb_list.append(char_over_cnn) - - # word_emb = torch.cat(word_emb_list, dim=2) - - # use elmo or not - if self.config.use_elmo: - # 如果确实被截断了 - if doc_tensor.shape[0] == 50 and len(sentences) > 50: - sentences = sentences[0:50] - elmo_embedding, elmo_mask = self.elmo.batch_to_embeddings(sentences) - elmo_embedding = elmo_embedding.to( - self.device) # [sentence_num,max_sent_len,3,1024]--[sentence_num,max_sent,1024] - elmo_embedding = elmo_embedding[:, 0, :, :] * self.elmo_args[0] + elmo_embedding[:, 1, :, :] * \ - self.elmo_args[1] + elmo_embedding[:, 2, :, :] * self.elmo_args[2] - word_emb_list.append(elmo_embedding) - # print(word_emb_list[0].shape) - # print(word_emb_list[1].shape) - # print(word_emb_list[2].shape) - # print(word_emb_list[3].shape) - # print(word_emb_list[4].shape) - - word_emb = torch.cat(word_emb_list, dim=2) - - word_emb = self.emb_dropout(word_emb) - # word_emb_elmo = self.emb_dropout(word_emb_elmo) - lstm_out = self._reorder_lstm(word_emb, seq_len) - flatten_lstm = self.flat_lstm(lstm_out, seq_len) # [word_num,emb] - flatten_lstm = self.bilstm_drop(flatten_lstm) - # TODO 没有按照论文写 - flatten_word_emb = self.flat_lstm(word_emb, seq_len) # [word_num,emb] - - mention_start, mention_end = self.get_mention_start_end(seq_len) # [mention_num] - self.mention_start_np = mention_start # [mention_num] np - self.mention_end_np = mention_end - mention_num = mention_start.shape[0] - emb_start, emb_end = self.get_mention_emb(flatten_lstm, mention_start, mention_end) # [mention_num,emb] - - # list - mention_emb_list.append(emb_start) - mention_emb_list.append(emb_end) - - if self.config.use_width: - mention_width_index = mention_end - mention_start - mention_width_tensor = torch.from_numpy(mention_width_index).to(self.device) # [mention_num] - mention_width_emb = self.feature_emb(mention_width_tensor) - mention_width_emb = self.feature_emb_dropout(mention_width_emb) - mention_emb_list.append(mention_width_emb) - - if self.config.model_heads: - mention_index = self.get_mention_index(mention_start, self.config.span_width) # [mention_num,max_mention] - log_mask_tensor = self.get_mask(mention_start, mention_end).float().to( - self.device) # [mention_num,max_mention] - alpha = self.atten(flatten_lstm).to(self.device) # [word_num] - - # 得到attention - mention_head_score = torch.gather(alpha.expand(mention_num, -1), 1, - mention_index).float().to(self.device) # [mention_num,max_mention] - mention_attention = F.softmax(mention_head_score + log_mask_tensor, dim=1) # [mention_num,max_mention] - - # TODO flatte lstm - word_num = flatten_lstm.shape[0] - lstm_emb = flatten_lstm.shape[1] - emb_num = flatten_word_emb.shape[1] - - # [num_mentions, max_mention_width, emb] - mention_text_emb = torch.gather( - flatten_word_emb.unsqueeze(1).expand(word_num, self.config.span_width, emb_num), - 0, mention_index.unsqueeze(2).expand(mention_num, self.config.span_width, - emb_num)) - # [mention_num,emb] - mention_head_emb = torch.sum( - mention_attention.unsqueeze(2).expand(mention_num, self.config.span_width, emb_num) * mention_text_emb, - dim=1) - mention_emb_list.append(mention_head_emb) - - candidate_mention_emb = torch.cat(mention_emb_list, 1) # [candidate_mention_num,emb] - candidate_mention_score = self.mention_score(candidate_mention_emb) # [candidate_mention_num] - - antecedent_scores, antecedents, mention_start_tensor, mention_end_tensor = (None, None, None, None) - mention_start_tensor, mention_end_tensor, mention_score, mention_emb = \ - self.sort_mention(mention_start, mention_end, candidate_mention_emb, candidate_mention_score, seq_len) - mention_speakers_ids = speakers_tensor.index_select(dim=0, index=mention_start_tensor) # num_mention - - antecedents, antecedents_len = self.get_antecedents(mention_start_tensor, self.config.max_antecedents) - antecedent_scores = self.get_antecedents_score(mention_emb, mention_score, antecedents, antecedents_len, - mention_speakers_ids, genre) - - ans = {"candidate_mention_score": candidate_mention_score, "antecedent_scores": antecedent_scores, - "antecedents": antecedents, "mention_start_tensor": mention_start_tensor, - "mention_end_tensor": mention_end_tensor} - - return ans - - def predict(self, words1 , words2, words3, words4, chars, seq_len): - """ - 实际输入都是tensor - :param sentences: 句子,被fastNLP转化成了numpy, - :param doc_np: 被fastNLP转化成了Tensor - :param speaker_ids_np: 被fastNLP转化成了Tensor - :param genre: 被fastNLP转化成了Tensor - :param char_index: 被fastNLP转化成了Tensor - :param seq_len: 被fastNLP转化成了Tensor - :return: - """ - - sentences = words1 - doc_np = words2 - speaker_ids_np = words3 - genre = words4 - char_index = chars - - # def predict(self, sentences, doc_np, speaker_ids_np, genre, char_index, seq_len): - ans = self(sentences, - doc_np, - speaker_ids_np, - genre, - char_index, - seq_len) - predicted_antecedents = self.get_predicted_antecedents(ans["antecedents"], ans["antecedent_scores"].cpu()) - predicted_clusters, mention_to_predicted = self.get_predicted_clusters(ans["mention_start_tensor"].cpu(), - ans["mention_end_tensor"].cpu(), - predicted_antecedents) - - - return {'predicted':predicted_clusters,"mention_to_predicted":mention_to_predicted} - - -if __name__ == '__main__': - pass diff --git a/reproduction/coreference_resolution/model/preprocess.py b/reproduction/coreference_resolution/model/preprocess.py deleted file mode 100644 index d97fcb4d..00000000 --- a/reproduction/coreference_resolution/model/preprocess.py +++ /dev/null @@ -1,225 +0,0 @@ -import json -import numpy as np -from . import util -import collections - -def load(path): - """ - load the file from jsonline - :param path: - :return: examples with many example(dict): {"clusters":[[[mention],[mention]],[another cluster]], - "doc_key":"str","speakers":[[,,,],[]...],"sentence":[[][]]} - """ - with open(path) as f: - train_examples = [json.loads(jsonline) for jsonline in f.readlines()] - return train_examples - -def get_vocab(): - """ - 从所有的句子中得到最终的字典,被main调用,不止是train,还有dev和test - :param examples: - :return: word2id & id2word - """ - word2id = {'PAD':0,'UNK':1} - id2word = {0:'PAD',1:'UNK'} - index = 2 - data = [load("../data/train.english.jsonlines"),load("../data/dev.english.jsonlines"),load("../data/test.english.jsonlines")] - for examples in data: - for example in examples: - for sent in example["sentences"]: - for word in sent: - if(word not in word2id): - word2id[word]=index - id2word[index] = word - index += 1 - return word2id,id2word - -def normalize(v): - norm = np.linalg.norm(v) - if norm > 0: - return v / norm - else: - return v - -# 加载glove得到embedding -def get_emb(id2word,embedding_size): - glove_oov = 0 - turian_oov = 0 - both = 0 - glove_emb_path = "../data/glove.840B.300d.txt.filtered" - turian_emb_path = "../data/turian.50d.txt" - word_num = len(id2word) - emb = np.zeros((word_num,embedding_size)) - glove_emb_dict = util.load_embedding_dict(glove_emb_path,300,"txt") - turian_emb_dict = util.load_embedding_dict(turian_emb_path,50,"txt") - for i in range(word_num): - if id2word[i] in glove_emb_dict: - word_embedding = glove_emb_dict.get(id2word[i]) - emb[i][0:300] = np.array(word_embedding) - else: - # print(id2word[i]) - glove_oov += 1 - if id2word[i] in turian_emb_dict: - word_embedding = turian_emb_dict.get(id2word[i]) - emb[i][300:350] = np.array(word_embedding) - else: - # print(id2word[i]) - turian_oov += 1 - if id2word[i] not in glove_emb_dict and id2word[i] not in turian_emb_dict: - both += 1 - emb[i] = normalize(emb[i]) - print("embedding num:"+str(word_num)) - print("glove num:"+str(glove_oov)) - print("glove oov rate:"+str(glove_oov/word_num)) - print("turian num:"+str(turian_oov)) - print("turian oov rate:"+str(turian_oov/word_num)) - print("both num:"+str(both)) - return emb - - -def _doc2vec(doc,word2id,char_dict,max_filter,max_sentences,is_train): - max_len = 0 - max_word_length = 0 - docvex = [] - length = [] - if is_train: - sent_num = min(max_sentences,len(doc)) - else: - sent_num = len(doc) - - for i in range(sent_num): - sent = doc[i] - length.append(len(sent)) - if (len(sent) > max_len): - max_len = len(sent) - sent_vec =[] - for j,word in enumerate(sent): - if len(word)>max_word_length: - max_word_length = len(word) - if word in word2id: - sent_vec.append(word2id[word]) - else: - sent_vec.append(word2id["UNK"]) - docvex.append(sent_vec) - - char_index = np.zeros((sent_num, max_len, max_word_length),dtype=int) - for i in range(sent_num): - sent = doc[i] - for j,word in enumerate(sent): - char_index[i, j, :len(word)] = [char_dict[c] for c in word] - - return docvex,char_index,length,max_len - -# TODO 修改了接口,确认所有该修改的地方都修改好 -def doc2numpy(doc,word2id,chardict,max_filter,max_sentences,is_train): - docvec, char_index, length, max_len = _doc2vec(doc,word2id,chardict,max_filter,max_sentences,is_train) - assert max(length) == max_len - assert char_index.shape[0]==len(length) - assert char_index.shape[1]==max_len - doc_np = np.zeros((len(docvec), max_len), int) - for i in range(len(docvec)): - for j in range(len(docvec[i])): - doc_np[i][j] = docvec[i][j] - return doc_np,char_index,length - -# TODO 没有测试 -def speaker2numpy(speakers_raw,max_sentences,is_train): - if is_train and len(speakers_raw)> max_sentences: - speakers_raw = speakers_raw[0:max_sentences] - speakers = flatten(speakers_raw) - speaker_dict = {s: i for i, s in enumerate(set(speakers))} - speaker_ids = np.array([speaker_dict[s] for s in speakers]) - return speaker_ids - - -def flat_cluster(clusters): - flatted = [] - for cluster in clusters: - for item in cluster: - flatted.append(item) - return flatted - -def get_right_mention(clusters,mention_start_np,mention_end_np): - flatted = flat_cluster(clusters) - cluster_num = len(flatted) - mention_num = mention_start_np.shape[0] - right_mention = np.zeros(mention_num,dtype=int) - for i in range(mention_num): - if [mention_start_np[i],mention_end_np[i]] in flatted: - right_mention[i]=1 - return right_mention,cluster_num - -def handle_cluster(clusters): - gold_mentions = sorted(tuple(m) for m in flatten(clusters)) - gold_mention_map = {m: i for i, m in enumerate(gold_mentions)} - cluster_ids = np.zeros(len(gold_mentions), dtype=int) - for cluster_id, cluster in enumerate(clusters): - for mention in cluster: - cluster_ids[gold_mention_map[tuple(mention)]] = cluster_id - gold_starts, gold_ends = tensorize_mentions(gold_mentions) - return cluster_ids, gold_starts, gold_ends - -# 展平 -def flatten(l): - return [item for sublist in l for item in sublist] - -# 把mention分成start end -def tensorize_mentions(mentions): - if len(mentions) > 0: - starts, ends = zip(*mentions) - else: - starts, ends = [], [] - return np.array(starts), np.array(ends) - -def get_char_dict(path): - vocab = [""] - with open(path) as f: - vocab.extend(c.strip() for c in f.readlines()) - char_dict = collections.defaultdict(int) - char_dict.update({c: i for i, c in enumerate(vocab)}) - return char_dict - -def get_labels(clusters,mention_starts,mention_ends,max_antecedents): - cluster_ids, gold_starts, gold_ends = handle_cluster(clusters) - num_mention = mention_starts.shape[0] - num_gold = gold_starts.shape[0] - max_antecedents = min(max_antecedents, num_mention) - mention_indices = {} - - for i in range(num_mention): - mention_indices[(mention_starts[i].detach().item(), mention_ends[i].detach().item())] = i - # 用来记录哪些mention是对的,-1表示错误,正数代表这个mention实际上对应哪个gold cluster的id - mention_cluster_ids = [-1] * num_mention - # test - right_mention_count = 0 - for i in range(num_gold): - right_mention = mention_indices.get((gold_starts[i], gold_ends[i])) - if (right_mention != None): - right_mention_count += 1 - mention_cluster_ids[right_mention] = cluster_ids[i] - - # i j 是否属于同一个cluster - labels = np.zeros((num_mention, max_antecedents + 1), dtype=bool) # [num_mention,max_an+1] - for i in range(num_mention): - ante_count = 0 - null_label = True - for j in range(max(0, i - max_antecedents), i): - if (mention_cluster_ids[i] >= 0 and mention_cluster_ids[i] == mention_cluster_ids[j]): - labels[i, ante_count + 1] = True - null_label = False - else: - labels[i, ante_count + 1] = False - ante_count += 1 - for j in range(ante_count, max_antecedents): - labels[i, j + 1] = False - labels[i, 0] = null_label - return labels - -# test=========================== - - -if __name__=="__main__": - word2id,id2word = get_vocab() - get_emb(id2word,350) - - diff --git a/reproduction/coreference_resolution/model/softmax_loss.py b/reproduction/coreference_resolution/model/softmax_loss.py deleted file mode 100644 index 1c1fcc69..00000000 --- a/reproduction/coreference_resolution/model/softmax_loss.py +++ /dev/null @@ -1,32 +0,0 @@ -from fastNLP.core.losses import LossBase - -from reproduction.coreference_resolution.model.preprocess import get_labels -from reproduction.coreference_resolution.model.config import Config -import torch - - -class SoftmaxLoss(LossBase): - """ - 交叉熵loss - 允许多标签分类 - """ - - def __init__(self, antecedent_scores=None, target=None, mention_start_tensor=None, mention_end_tensor=None): - """ - - :param pred: - :param target: - """ - super().__init__() - self._init_param_map(antecedent_scores=antecedent_scores, target=target, - mention_start_tensor=mention_start_tensor, mention_end_tensor=mention_end_tensor) - - def get_loss(self, antecedent_scores, target, mention_start_tensor, mention_end_tensor): - antecedent_labels = get_labels(target[0], mention_start_tensor, mention_end_tensor, - Config().max_antecedents) - - antecedent_labels = torch.from_numpy(antecedent_labels*1).to(torch.device("cuda:" + Config().cuda)) - gold_scores = antecedent_scores + torch.log(antecedent_labels.float()).to(torch.device("cuda:" + Config().cuda)) # [num_mentions, max_ant + 1] - marginalized_gold_scores = gold_scores.logsumexp(dim=1) # [num_mentions] - log_norm = antecedent_scores.logsumexp(dim=1) # [num_mentions] - return torch.sum(log_norm - marginalized_gold_scores) diff --git a/reproduction/coreference_resolution/model/util.py b/reproduction/coreference_resolution/model/util.py deleted file mode 100644 index 42cd09fe..00000000 --- a/reproduction/coreference_resolution/model/util.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import errno -import collections -import torch -import numpy as np -import pyhocon - - - -# flatten the list -def flatten(l): - return [item for sublist in l for item in sublist] - - -def get_config(filename): - return pyhocon.ConfigFactory.parse_file(filename) - - -# safe make directions -def mkdirs(path): - try: - os.makedirs(path) - except OSError as exception: - if exception.errno != errno.EEXIST: - raise - return path - - -def load_char_dict(char_vocab_path): - vocab = [""] - with open(char_vocab_path) as f: - vocab.extend(c.strip() for c in f.readlines()) - char_dict = collections.defaultdict(int) - char_dict.update({c: i for i, c in enumerate(vocab)}) - return char_dict - -# 加载embedding -def load_embedding_dict(embedding_path, embedding_size, embedding_format): - print("Loading word embeddings from {}...".format(embedding_path)) - default_embedding = np.zeros(embedding_size) - embedding_dict = collections.defaultdict(lambda: default_embedding) - skip_first = embedding_format == "vec" - with open(embedding_path) as f: - for i, line in enumerate(f.readlines()): - if skip_first and i == 0: - continue - splits = line.split() - assert len(splits) == embedding_size + 1 - word = splits[0] - embedding = np.array([float(s) for s in splits[1:]]) - embedding_dict[word] = embedding - print("Done loading word embeddings.") - return embedding_dict - - -# safe devide -def maybe_divide(x, y): - return 0 if y == 0 else x / float(y) - - -def shape(x, dim): - return x.get_shape()[dim].value or torch.shape(x)[dim] - - -def normalize(v): - norm = np.linalg.norm(v) - if norm > 0: - return v / norm - else: - return v - - -class RetrievalEvaluator(object): - def __init__(self): - self._num_correct = 0 - self._num_gold = 0 - self._num_predicted = 0 - - def update(self, gold_set, predicted_set): - self._num_correct += len(gold_set & predicted_set) - self._num_gold += len(gold_set) - self._num_predicted += len(predicted_set) - - def recall(self): - return maybe_divide(self._num_correct, self._num_gold) - - def precision(self): - return maybe_divide(self._num_correct, self._num_predicted) - - def metrics(self): - recall = self.recall() - precision = self.precision() - f1 = maybe_divide(2 * recall * precision, precision + recall) - return recall, precision, f1 - - - -if __name__=="__main__": - print(load_char_dict("../data/char_vocab.english.txt")) - embedding_dict = load_embedding_dict("../data/glove.840B.300d.txt.filtered",300,"txt") - print("hello") diff --git a/reproduction/coreference_resolution/requirements.txt b/reproduction/coreference_resolution/requirements.txt deleted file mode 100644 index a8f04f04..00000000 --- a/reproduction/coreference_resolution/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -prettytable==0.7.2 -allennlp==0.8.2 -scikit-learn==0.22.2 -pyhocon==0.3.50 -torch==1.1 diff --git a/reproduction/coreference_resolution/train.py b/reproduction/coreference_resolution/train.py deleted file mode 100644 index bf3ea624..00000000 --- a/reproduction/coreference_resolution/train.py +++ /dev/null @@ -1,62 +0,0 @@ -import sys -sys.path.append('../..') - -import torch -from torch.optim import Adam - -from fastNLP.core.callback import Callback, GradientClipCallback -from fastNLP.core.trainer import Trainer - -from fastNLP.io.pipe.coreference import CoReferencePipe -from fastNLP.core.const import Const - -from reproduction.coreference_resolution.model.config import Config -from reproduction.coreference_resolution.model.model_re import Model -from reproduction.coreference_resolution.model.softmax_loss import SoftmaxLoss -from reproduction.coreference_resolution.model.metric import CRMetric - - -class LRCallback(Callback): - def __init__(self, parameters, decay_rate=1e-3): - super().__init__() - self.paras = parameters - self.decay_rate = decay_rate - - def on_step_end(self): - if self.step % 100 == 0: - for para in self.paras: - para['lr'] = para['lr'] * (1 - self.decay_rate) - - -if __name__ == "__main__": - config = Config() - - print(config) - - def cache(): - bundle = CoReferencePipe(config).process_from_file({'train': config.train_path, 'dev': config.dev_path, - 'test': config.test_path}) - return bundle - data_bundle = cache() - print(data_bundle) - model = Model(data_bundle.get_vocab(Const.INPUTS(0)), config) - print(model) - - loss = SoftmaxLoss() - - metric = CRMetric() - - optim = Adam(model.parameters(), lr=config.lr) - - lr_decay_callback = LRCallback(optim.param_groups, config.lr_decay) - - trainer = Trainer(model=model, train_data=data_bundle.datasets["train"], dev_data=data_bundle.datasets["dev"], - loss=loss, metrics=metric, check_code_level=-1, sampler=None, - batch_size=1, device=torch.device("cuda:" + config.cuda) if torch.cuda.is_available() else None, - metric_key='f', n_epochs=config.epoch, - optimizer=optim, - save_path=None, - callbacks=[lr_decay_callback, GradientClipCallback(clip_value=5)]) - print() - - trainer.train() diff --git a/reproduction/coreference_resolution/valid.py b/reproduction/coreference_resolution/valid.py deleted file mode 100644 index e79642b8..00000000 --- a/reproduction/coreference_resolution/valid.py +++ /dev/null @@ -1,24 +0,0 @@ -import torch -from reproduction.coreference_resolution.model.config import Config -from reproduction.coreference_resolution.model.metric import CRMetric -from fastNLP.io.pipe.coreference import CoReferencePipe - -from fastNLP import Tester -import argparse - - -if __name__=='__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--path') - args = parser.parse_args() - - config = Config() - bundle = CoReferencePipe(Config()).process_from_file( - {'train': config.train_path, 'dev': config.dev_path, 'test': config.test_path}) - metirc = CRMetric() - model = torch.load(args.path) - tester = Tester(bundle.get_dataset("test"),model,metirc,batch_size=1,device="cuda:0") - tester.test() - print('test over') - - diff --git a/reproduction/joint_cws_parse/README.md b/reproduction/joint_cws_parse/README.md deleted file mode 100644 index 7fe77b47..00000000 --- a/reproduction/joint_cws_parse/README.md +++ /dev/null @@ -1,16 +0,0 @@ -Code for paper [A Unified Model for Chinese Word Segmentation and Dependency Parsing](https://arxiv.org/abs/1904.04697) - -### 准备数据 -1. 数据应该为conll格式,1, 3, 6, 7列应该对应为'words', 'pos_tags', 'heads', 'labels'. -2. 将train, dev, test放在同一个folder下,并将该folder路径填入train.py中的data_folder变量里。 -3. 从[百度云](https://pan.baidu.com/s/1uXnAZpYecYJITCiqgAjjjA)(提取:ua53)下载预训练vector,放到同一个folder下,并将train.py中vector_folder变量正确设置。 - - -### 运行代码 -``` -python train.py -``` - -### 其它 -ctb5上跑出论文中报道的结果使用以上的默认参数应该就可以了(应该会更高一些); ctb7上使用默认参数会低0.1%左右,需要调节 -learning rate scheduler. \ No newline at end of file diff --git a/reproduction/joint_cws_parse/__init__.py b/reproduction/joint_cws_parse/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/joint_cws_parse/data/__init__.py b/reproduction/joint_cws_parse/data/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/joint_cws_parse/data/data_loader.py b/reproduction/joint_cws_parse/data/data_loader.py deleted file mode 100644 index 4df46b04..00000000 --- a/reproduction/joint_cws_parse/data/data_loader.py +++ /dev/null @@ -1,284 +0,0 @@ - - -from fastNLP.io.data_bundle import DataSetLoader, DataBundle -from fastNLP.io.data_loader import ConllLoader -import numpy as np - -from itertools import chain -from fastNLP import DataSet, Vocabulary -from functools import partial -import os -from typing import Union, Dict -from reproduction.utils import check_dataloader_paths - - -class CTBxJointLoader(DataSetLoader): - """ - 文件夹下应该具有以下的文件结构 - -train.conllx - -dev.conllx - -test.conllx - 每个文件中的内容如下(空格隔开不同的句子, 共有) - 1 费孝通 _ NR NR _ 3 nsubjpass _ _ - 2 被 _ SB SB _ 3 pass _ _ - 3 授予 _ VV VV _ 0 root _ _ - 4 麦格赛赛 _ NR NR _ 5 nn _ _ - 5 奖 _ NN NN _ 3 dobj _ _ - - 1 新华社 _ NR NR _ 7 dep _ _ - 2 马尼拉 _ NR NR _ 7 dep _ _ - 3 8月 _ NT NT _ 7 dep _ _ - 4 31日 _ NT NT _ 7 dep _ _ - ... - - """ - def __init__(self): - self._loader = ConllLoader(headers=['words', 'pos_tags', 'heads', 'labels'], indexes=[1, 3, 6, 7]) - - def load(self, path:str): - """ - 给定一个文件路径,将数据读取为DataSet格式。DataSet中包含以下的内容 - words: list[str] - pos_tags: list[str] - heads: list[int] - labels: list[str] - - :param path: - :return: - """ - dataset = self._loader.load(path) - dataset.heads.int() - return dataset - - def process(self, paths): - """ - - :param paths: - :return: - Dataset包含以下的field - chars: - bigrams: - trigrams: - pre_chars: - pre_bigrams: - pre_trigrams: - seg_targets: - seg_masks: - seq_lens: - char_labels: - char_heads: - gold_word_pairs: - seg_targets: - seg_masks: - char_labels: - char_heads: - pun_masks: - gold_label_word_pairs: - """ - paths = check_dataloader_paths(paths) - data = DataBundle() - - for name, path in paths.items(): - dataset = self.load(path) - data.datasets[name] = dataset - - char_labels_vocab = Vocabulary(padding=None, unknown=None) - - def process(dataset, char_label_vocab): - dataset.apply(add_word_lst, new_field_name='word_lst') - dataset.apply(lambda x: list(chain(*x['word_lst'])), new_field_name='chars') - dataset.apply(add_bigram, field_name='chars', new_field_name='bigrams') - dataset.apply(add_trigram, field_name='chars', new_field_name='trigrams') - dataset.apply(add_char_heads, new_field_name='char_heads') - dataset.apply(add_char_labels, new_field_name='char_labels') - dataset.apply(add_segs, new_field_name='seg_targets') - dataset.apply(add_mask, new_field_name='seg_masks') - dataset.add_seq_len('chars', new_field_name='seq_lens') - dataset.apply(add_pun_masks, new_field_name='pun_masks') - if len(char_label_vocab.word_count)==0: - char_label_vocab.from_dataset(dataset, field_name='char_labels') - char_label_vocab.index_dataset(dataset, field_name='char_labels') - new_dataset = add_root(dataset) - new_dataset.apply(add_word_pairs, new_field_name='gold_word_pairs', ignore_type=True) - global add_label_word_pairs - add_label_word_pairs = partial(add_label_word_pairs, label_vocab=char_label_vocab) - new_dataset.apply(add_label_word_pairs, new_field_name='gold_label_word_pairs', ignore_type=True) - - new_dataset.set_pad_val('char_labels', -1) - new_dataset.set_pad_val('char_heads', -1) - - return new_dataset - - for name in list(paths.keys()): - dataset = data.datasets[name] - dataset = process(dataset, char_labels_vocab) - data.datasets[name] = dataset - - data.vocabs['char_labels'] = char_labels_vocab - - char_vocab = Vocabulary(min_freq=2).from_dataset(data.datasets['train'], field_name='chars') - bigram_vocab = Vocabulary(min_freq=5).from_dataset(data.datasets['train'], field_name='bigrams') - trigram_vocab = Vocabulary(min_freq=5).from_dataset(data.datasets['train'], field_name='trigrams') - - for name in ['chars', 'bigrams', 'trigrams']: - vocab = Vocabulary().from_dataset(field_name=name, no_create_entry_dataset=list(data.datasets.values())) - vocab.index_dataset(*data.datasets.values(), field_name=name, new_field_name='pre_' + name) - data.vocabs['pre_{}'.format(name)] = vocab - - for name, vocab in zip(['chars', 'bigrams', 'trigrams'], - [char_vocab, bigram_vocab, trigram_vocab]): - vocab.index_dataset(*data.datasets.values(), field_name=name, new_field_name=name) - data.vocabs[name] = vocab - - for name, dataset in data.datasets.items(): - dataset.set_input('chars', 'bigrams', 'trigrams', 'seq_lens', 'char_labels', 'char_heads', 'pre_chars', - 'pre_bigrams', 'pre_trigrams') - dataset.set_target('gold_word_pairs', 'seq_lens', 'seg_targets', 'seg_masks', 'char_labels', - 'char_heads', - 'pun_masks', 'gold_label_word_pairs') - - return data - - -def add_label_word_pairs(instance, label_vocab): - # List[List[((head_start, head_end], (dep_start, dep_end]), ...]] - word_end_indexes = np.array(list(map(len, instance['word_lst']))) - word_end_indexes = np.cumsum(word_end_indexes).tolist() - word_end_indexes.insert(0, 0) - word_pairs = [] - labels = instance['labels'] - pos_tags = instance['pos_tags'] - for idx, head in enumerate(instance['heads']): - if pos_tags[idx]=='PU': # 如果是标点符号,就不记录 - continue - label = label_vocab.to_index(labels[idx]) - if head==0: - word_pairs.append((('root', label, (word_end_indexes[idx], word_end_indexes[idx+1])))) - else: - word_pairs.append(((word_end_indexes[head-1], word_end_indexes[head]), label, - (word_end_indexes[idx], word_end_indexes[idx + 1]))) - return word_pairs - -def add_word_pairs(instance): - # List[List[((head_start, head_end], (dep_start, dep_end]), ...]] - word_end_indexes = np.array(list(map(len, instance['word_lst']))) - word_end_indexes = np.cumsum(word_end_indexes).tolist() - word_end_indexes.insert(0, 0) - word_pairs = [] - pos_tags = instance['pos_tags'] - for idx, head in enumerate(instance['heads']): - if pos_tags[idx]=='PU': # 如果是标点符号,就不记录 - continue - if head==0: - word_pairs.append((('root', (word_end_indexes[idx], word_end_indexes[idx+1])))) - else: - word_pairs.append(((word_end_indexes[head-1], word_end_indexes[head]), - (word_end_indexes[idx], word_end_indexes[idx + 1]))) - return word_pairs - -def add_root(dataset): - new_dataset = DataSet() - for sample in dataset: - chars = ['char_root'] + sample['chars'] - bigrams = ['bigram_root'] + sample['bigrams'] - trigrams = ['trigram_root'] + sample['trigrams'] - seq_lens = sample['seq_lens']+1 - char_labels = [0] + sample['char_labels'] - char_heads = [0] + sample['char_heads'] - sample['chars'] = chars - sample['bigrams'] = bigrams - sample['trigrams'] = trigrams - sample['seq_lens'] = seq_lens - sample['char_labels'] = char_labels - sample['char_heads'] = char_heads - new_dataset.append(sample) - return new_dataset - -def add_pun_masks(instance): - tags = instance['pos_tags'] - pun_masks = [] - for word, tag in zip(instance['words'], tags): - if tag=='PU': - pun_masks.extend([1]*len(word)) - else: - pun_masks.extend([0]*len(word)) - return pun_masks - -def add_word_lst(instance): - words = instance['words'] - word_lst = [list(word) for word in words] - return word_lst - -def add_bigram(instance): - chars = instance['chars'] - length = len(chars) - chars = chars + [''] - bigrams = [] - for i in range(length): - bigrams.append(''.join(chars[i:i + 2])) - return bigrams - -def add_trigram(instance): - chars = instance['chars'] - length = len(chars) - chars = chars + [''] * 2 - trigrams = [] - for i in range(length): - trigrams.append(''.join(chars[i:i + 3])) - return trigrams - -def add_char_heads(instance): - words = instance['word_lst'] - heads = instance['heads'] - char_heads = [] - char_index = 1 # 因此存在root节点所以需要从1开始 - head_end_indexes = np.cumsum(list(map(len, words))).tolist() + [0] # 因为root是0,0-1=-1 - for word, head in zip(words, heads): - char_head = [] - if len(word)>1: - char_head.append(char_index+1) - char_index += 1 - for _ in range(len(word)-2): - char_index += 1 - char_head.append(char_index) - char_index += 1 - char_head.append(head_end_indexes[head-1]) - char_heads.extend(char_head) - return char_heads - -def add_char_labels(instance): - """ - 将word_lst中的数据按照下面的方式设置label - 比如"复旦大学 位于 ", 对应的分词是"B M M E B E", 则对应的dependency是"复(dep)->旦(head)", "旦(dep)->大(head)".. - 对应的label是'app', 'app', 'app', , 而学的label就是复旦大学这个词的dependency label - :param instance: - :return: - """ - words = instance['word_lst'] - labels = instance['labels'] - char_labels = [] - for word, label in zip(words, labels): - for _ in range(len(word)-1): - char_labels.append('APP') - char_labels.append(label) - return char_labels - -# add seg_targets -def add_segs(instance): - words = instance['word_lst'] - segs = [0]*len(instance['chars']) - index = 0 - for word in words: - index = index + len(word) - 1 - segs[index] = len(word)-1 - index = index + 1 - return segs - -# add target_masks -def add_mask(instance): - words = instance['word_lst'] - mask = [] - for word in words: - mask.extend([0] * (len(word) - 1)) - mask.append(1) - return mask diff --git a/reproduction/joint_cws_parse/models/CharParser.py b/reproduction/joint_cws_parse/models/CharParser.py deleted file mode 100644 index bfb5da4e..00000000 --- a/reproduction/joint_cws_parse/models/CharParser.py +++ /dev/null @@ -1,311 +0,0 @@ - - - -from fastNLP.models.biaffine_parser import BiaffineParser -from fastNLP.models.biaffine_parser import ArcBiaffine, LabelBilinear - -import numpy as np -import torch -from torch import nn -from torch.nn import functional as F - -from fastNLP.modules.dropout import TimestepDropout -from fastNLP.modules.encoder.variational_rnn import VarLSTM -from fastNLP import seq_len_to_mask -from fastNLP.embeddings import Embedding - - -def drop_input_independent(word_embeddings, dropout_emb): - batch_size, seq_length, _ = word_embeddings.size() - word_masks = word_embeddings.new(batch_size, seq_length).fill_(1 - dropout_emb) - word_masks = torch.bernoulli(word_masks) - word_masks = word_masks.unsqueeze(dim=2) - word_embeddings = word_embeddings * word_masks - - return word_embeddings - - -class CharBiaffineParser(BiaffineParser): - def __init__(self, char_vocab_size, - emb_dim, - bigram_vocab_size, - trigram_vocab_size, - num_label, - rnn_layers=3, - rnn_hidden_size=800, #单向的数量 - arc_mlp_size=500, - label_mlp_size=100, - dropout=0.3, - encoder='lstm', - use_greedy_infer=False, - app_index = 0, - pre_chars_embed=None, - pre_bigrams_embed=None, - pre_trigrams_embed=None): - - - super(BiaffineParser, self).__init__() - rnn_out_size = 2 * rnn_hidden_size - self.char_embed = Embedding((char_vocab_size, emb_dim)) - self.bigram_embed = Embedding((bigram_vocab_size, emb_dim)) - self.trigram_embed = Embedding((trigram_vocab_size, emb_dim)) - if pre_chars_embed: - self.pre_char_embed = Embedding(pre_chars_embed) - self.pre_char_embed.requires_grad = False - if pre_bigrams_embed: - self.pre_bigram_embed = Embedding(pre_bigrams_embed) - self.pre_bigram_embed.requires_grad = False - if pre_trigrams_embed: - self.pre_trigram_embed = Embedding(pre_trigrams_embed) - self.pre_trigram_embed.requires_grad = False - self.timestep_drop = TimestepDropout(dropout) - self.encoder_name = encoder - - if encoder == 'var-lstm': - self.encoder = VarLSTM(input_size=emb_dim*3, - hidden_size=rnn_hidden_size, - num_layers=rnn_layers, - bias=True, - batch_first=True, - input_dropout=dropout, - hidden_dropout=dropout, - bidirectional=True) - elif encoder == 'lstm': - self.encoder = nn.LSTM(input_size=emb_dim*3, - hidden_size=rnn_hidden_size, - num_layers=rnn_layers, - bias=True, - batch_first=True, - dropout=dropout, - bidirectional=True) - - else: - raise ValueError('unsupported encoder type: {}'.format(encoder)) - - self.mlp = nn.Sequential(nn.Linear(rnn_out_size, arc_mlp_size * 2 + label_mlp_size * 2), - nn.LeakyReLU(0.1), - TimestepDropout(p=dropout),) - self.arc_mlp_size = arc_mlp_size - self.label_mlp_size = label_mlp_size - self.arc_predictor = ArcBiaffine(arc_mlp_size, bias=True) - self.label_predictor = LabelBilinear(label_mlp_size, label_mlp_size, num_label, bias=True) - self.use_greedy_infer = use_greedy_infer - self.reset_parameters() - self.dropout = dropout - - self.app_index = app_index - self.num_label = num_label - if self.app_index != 0: - raise ValueError("现在app_index必须等于0") - - def reset_parameters(self): - for name, m in self.named_modules(): - if 'embed' in name: - pass - elif hasattr(m, 'reset_parameters') or hasattr(m, 'init_param'): - pass - else: - for p in m.parameters(): - if len(p.size())>1: - nn.init.xavier_normal_(p, gain=0.1) - else: - nn.init.uniform_(p, -0.1, 0.1) - - def forward(self, chars, bigrams, trigrams, seq_lens, gold_heads=None, pre_chars=None, pre_bigrams=None, - pre_trigrams=None): - """ - max_len是包含root的 - :param chars: batch_size x max_len - :param ngrams: batch_size x max_len*ngram_per_char - :param seq_lens: batch_size - :param gold_heads: batch_size x max_len - :param pre_chars: batch_size x max_len - :param pre_ngrams: batch_size x max_len*ngram_per_char - :return dict: parsing results - arc_pred: [batch_size, seq_len, seq_len] - label_pred: [batch_size, seq_len, seq_len] - mask: [batch_size, seq_len] - head_pred: [batch_size, seq_len] if gold_heads is not provided, predicting the heads - """ - # prepare embeddings - batch_size, seq_len = chars.shape - # print('forward {} {}'.format(batch_size, seq_len)) - - # get sequence mask - mask = seq_len_to_mask(seq_lens).long() - - chars = self.char_embed(chars) # [N,L] -> [N,L,C_0] - bigrams = self.bigram_embed(bigrams) # [N,L] -> [N,L,C_1] - trigrams = self.trigram_embed(trigrams) - - if pre_chars is not None: - pre_chars = self.pre_char_embed(pre_chars) - # pre_chars = self.pre_char_fc(pre_chars) - chars = pre_chars + chars - if pre_bigrams is not None: - pre_bigrams = self.pre_bigram_embed(pre_bigrams) - # pre_bigrams = self.pre_bigram_fc(pre_bigrams) - bigrams = bigrams + pre_bigrams - if pre_trigrams is not None: - pre_trigrams = self.pre_trigram_embed(pre_trigrams) - # pre_trigrams = self.pre_trigram_fc(pre_trigrams) - trigrams = trigrams + pre_trigrams - - x = torch.cat([chars, bigrams, trigrams], dim=2) # -> [N,L,C] - - # encoder, extract features - if self.training: - x = drop_input_independent(x, self.dropout) - sort_lens, sort_idx = torch.sort(seq_lens, dim=0, descending=True) - x = x[sort_idx] - x = nn.utils.rnn.pack_padded_sequence(x, sort_lens, batch_first=True) - feat, _ = self.encoder(x) # -> [N,L,C] - feat, _ = nn.utils.rnn.pad_packed_sequence(feat, batch_first=True) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - feat = feat[unsort_idx] - feat = self.timestep_drop(feat) - - # for arc biaffine - # mlp, reduce dim - feat = self.mlp(feat) - arc_sz, label_sz = self.arc_mlp_size, self.label_mlp_size - arc_dep, arc_head = feat[:,:,:arc_sz], feat[:,:,arc_sz:2*arc_sz] - label_dep, label_head = feat[:,:,2*arc_sz:2*arc_sz+label_sz], feat[:,:,2*arc_sz+label_sz:] - - # biaffine arc classifier - arc_pred = self.arc_predictor(arc_head, arc_dep) # [N, L, L] - - # use gold or predicted arc to predict label - if gold_heads is None or not self.training: - # use greedy decoding in training - if self.training or self.use_greedy_infer: - heads = self.greedy_decoder(arc_pred, mask) - else: - heads = self.mst_decoder(arc_pred, mask) - head_pred = heads - else: - assert self.training # must be training mode - if gold_heads is None: - heads = self.greedy_decoder(arc_pred, mask) - head_pred = heads - else: - head_pred = None - heads = gold_heads - # heads: batch_size x max_len - - batch_range = torch.arange(start=0, end=batch_size, dtype=torch.long, device=chars.device).unsqueeze(1) - label_head = label_head[batch_range, heads].contiguous() - label_pred = self.label_predictor(label_head, label_dep) # [N, max_len, num_label] - # 这里限制一下,只有当head为下一个时,才能预测app这个label - arange_index = torch.arange(1, seq_len+1, dtype=torch.long, device=chars.device).unsqueeze(0)\ - .repeat(batch_size, 1) # batch_size x max_len - app_masks = heads.ne(arange_index) # batch_size x max_len, 为1的位置不可以预测app - app_masks = app_masks.unsqueeze(2).repeat(1, 1, self.num_label) - app_masks[:, :, 1:] = 0 - label_pred = label_pred.masked_fill(app_masks, -np.inf) - - res_dict = {'arc_pred': arc_pred, 'label_pred': label_pred, 'mask': mask} - if head_pred is not None: - res_dict['head_pred'] = head_pred - return res_dict - - @staticmethod - def loss(arc_pred, label_pred, arc_true, label_true, mask): - """ - Compute loss. - - :param arc_pred: [batch_size, seq_len, seq_len] - :param label_pred: [batch_size, seq_len, n_tags] - :param arc_true: [batch_size, seq_len] - :param label_true: [batch_size, seq_len] - :param mask: [batch_size, seq_len] - :return: loss value - """ - - batch_size, seq_len, _ = arc_pred.shape - flip_mask = (mask.eq(False)) - # _arc_pred = arc_pred.clone() - _arc_pred = arc_pred.masked_fill(flip_mask.unsqueeze(1), -float('inf')) - - arc_true.data[:, 0].fill_(-1) - label_true.data[:, 0].fill_(-1) - - arc_nll = F.cross_entropy(_arc_pred.view(-1, seq_len), arc_true.view(-1), ignore_index=-1) - label_nll = F.cross_entropy(label_pred.view(-1, label_pred.size(-1)), label_true.view(-1), ignore_index=-1) - - return arc_nll + label_nll - - def predict(self, chars, bigrams, trigrams, seq_lens, pre_chars, pre_bigrams, pre_trigrams): - """ - - max_len是包含root的 - - :param chars: batch_size x max_len - :param ngrams: batch_size x max_len*ngram_per_char - :param seq_lens: batch_size - :param pre_chars: batch_size x max_len - :param pre_ngrams: batch_size x max_len*ngram_per_cha - :return: - """ - res = self(chars, bigrams, trigrams, seq_lens, pre_chars=pre_chars, pre_bigrams=pre_bigrams, - pre_trigrams=pre_trigrams, gold_heads=None) - output = {} - output['arc_pred'] = res.pop('head_pred') - _, label_pred = res.pop('label_pred').max(2) - output['label_pred'] = label_pred - return output - -class CharParser(nn.Module): - def __init__(self, char_vocab_size, - emb_dim, - bigram_vocab_size, - trigram_vocab_size, - num_label, - rnn_layers=3, - rnn_hidden_size=400, #单向的数量 - arc_mlp_size=500, - label_mlp_size=100, - dropout=0.3, - encoder='var-lstm', - use_greedy_infer=False, - app_index = 0, - pre_chars_embed=None, - pre_bigrams_embed=None, - pre_trigrams_embed=None): - super().__init__() - - self.parser = CharBiaffineParser(char_vocab_size, - emb_dim, - bigram_vocab_size, - trigram_vocab_size, - num_label, - rnn_layers, - rnn_hidden_size, #单向的数量 - arc_mlp_size, - label_mlp_size, - dropout, - encoder, - use_greedy_infer, - app_index, - pre_chars_embed=pre_chars_embed, - pre_bigrams_embed=pre_bigrams_embed, - pre_trigrams_embed=pre_trigrams_embed) - - def forward(self, chars, bigrams, trigrams, seq_lens, char_heads, char_labels, pre_chars=None, pre_bigrams=None, - pre_trigrams=None): - res_dict = self.parser(chars, bigrams, trigrams, seq_lens, gold_heads=char_heads, pre_chars=pre_chars, - pre_bigrams=pre_bigrams, pre_trigrams=pre_trigrams) - arc_pred = res_dict['arc_pred'] - label_pred = res_dict['label_pred'] - masks = res_dict['mask'] - loss = self.parser.loss(arc_pred, label_pred, char_heads, char_labels, masks) - return {'loss': loss} - - def predict(self, chars, bigrams, trigrams, seq_lens, pre_chars=None, pre_bigrams=None, pre_trigrams=None): - res = self.parser(chars, bigrams, trigrams, seq_lens, gold_heads=None, pre_chars=pre_chars, - pre_bigrams=pre_bigrams, pre_trigrams=pre_trigrams) - output = {} - output['head_preds'] = res.pop('head_pred') - _, label_pred = res.pop('label_pred').max(2) - output['label_preds'] = label_pred - return output diff --git a/reproduction/joint_cws_parse/models/__init__.py b/reproduction/joint_cws_parse/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/joint_cws_parse/models/callbacks.py b/reproduction/joint_cws_parse/models/callbacks.py deleted file mode 100644 index 8de01109..00000000 --- a/reproduction/joint_cws_parse/models/callbacks.py +++ /dev/null @@ -1,65 +0,0 @@ - -from fastNLP.core.callback import Callback -import torch -from torch import nn - -class OptimizerCallback(Callback): - def __init__(self, optimizer, scheduler, update_every=4): - super().__init__() - - self._optimizer = optimizer - self.scheduler = scheduler - self._update_every = update_every - - def on_backward_end(self): - if self.step % self._update_every==0: - # nn.utils.clip_grad.clip_grad_norm_(self.model.parameters(), 5) - # self._optimizer.step() - self.scheduler.step() - # self.model.zero_grad() - - -class DevCallback(Callback): - def __init__(self, tester, metric_key='u_f1'): - super().__init__() - self.tester = tester - setattr(tester, 'verbose', 0) - - self.metric_key = metric_key - - self.record_best = False - self.best_eval_value = 0 - self.best_eval_res = None - - self.best_dev_res = None # 存取dev的表现 - - def on_valid_begin(self): - eval_res = self.tester.test() - metric_name = self.tester.metrics[0].__class__.__name__ - metric_value = eval_res[metric_name][self.metric_key] - if metric_value>self.best_eval_value: - self.best_eval_value = metric_value - self.best_epoch = self.trainer.epoch - self.record_best = True - self.best_eval_res = eval_res - self.test_eval_res = eval_res - eval_str = "Epoch {}/{}. \n".format(self.trainer.epoch, self.n_epochs) + \ - self.tester._format_eval_results(eval_res) - self.pbar.write(eval_str) - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - if self.record_best: - self.best_dev_res = eval_result - self.record_best = False - if is_better_eval: - self.best_dev_res_on_dev = eval_result - self.best_test_res_on_dev = self.test_eval_res - self.dev_epoch = self.epoch - - def on_train_end(self): - print("Got best test performance in epoch:{}\n Test: {}\n Dev:{}\n".format(self.best_epoch, - self.tester._format_eval_results(self.best_eval_res), - self.tester._format_eval_results(self.best_dev_res))) - print("Got best dev performance in epoch:{}\n Test: {}\n Dev:{}\n".format(self.dev_epoch, - self.tester._format_eval_results(self.best_test_res_on_dev), - self.tester._format_eval_results(self.best_dev_res_on_dev))) \ No newline at end of file diff --git a/reproduction/joint_cws_parse/models/metrics.py b/reproduction/joint_cws_parse/models/metrics.py deleted file mode 100644 index bf0f0622..00000000 --- a/reproduction/joint_cws_parse/models/metrics.py +++ /dev/null @@ -1,184 +0,0 @@ -from fastNLP.core.metrics import MetricBase -from fastNLP.core.utils import seq_len_to_mask -import torch - - -class SegAppCharParseF1Metric(MetricBase): - # - def __init__(self, app_index): - super().__init__() - self.app_index = app_index - - self.parse_head_tp = 0 - self.parse_label_tp = 0 - self.rec_tol = 0 - self.pre_tol = 0 - - def evaluate(self, gold_word_pairs, gold_label_word_pairs, head_preds, label_preds, seq_lens, - pun_masks): - """ - - max_len是不包含root的character的长度 - :param gold_word_pairs: List[List[((head_start, head_end), (dep_start, dep_end)), ...]], batch_size - :param gold_label_word_pairs: List[List[((head_start, head_end), label, (dep_start, dep_end)), ...]], batch_size - :param head_preds: batch_size x max_len - :param label_preds: batch_size x max_len - :param seq_lens: - :param pun_masks: batch_size x - :return: - """ - # 去掉root - head_preds = head_preds[:, 1:].tolist() - label_preds = label_preds[:, 1:].tolist() - seq_lens = (seq_lens - 1).tolist() - - # 先解码出words,POS,heads, labels, 对应的character范围 - for b in range(len(head_preds)): - seq_len = seq_lens[b] - head_pred = head_preds[b][:seq_len] - label_pred = label_preds[b][:seq_len] - - words = [] # 存放[word_start, word_end),相对起始位置,不考虑root - heads = [] - labels = [] - ranges = [] # 对应该char是第几个word,长度是seq_len+1 - word_idx = 0 - word_start_idx = 0 - for idx, (label, head) in enumerate(zip(label_pred, head_pred)): - ranges.append(word_idx) - if label == self.app_index: - pass - else: - labels.append(label) - heads.append(head) - words.append((word_start_idx, idx+1)) - word_start_idx = idx+1 - word_idx += 1 - - head_dep_tuple = [] # head在前面 - head_label_dep_tuple = [] - for idx, head in enumerate(heads): - span = words[idx] - if span[0]==span[1]-1 and pun_masks[b, span[0]]: - continue # exclude punctuations - if head == 0: - head_dep_tuple.append((('root', words[idx]))) - head_label_dep_tuple.append(('root', labels[idx], words[idx])) - else: - head_word_idx = ranges[head-1] - head_word_span = words[head_word_idx] - head_dep_tuple.append(((head_word_span, words[idx]))) - head_label_dep_tuple.append((head_word_span, labels[idx], words[idx])) - - gold_head_dep_tuple = set(gold_word_pairs[b]) - gold_head_label_dep_tuple = set(gold_label_word_pairs[b]) - - for head_dep, head_label_dep in zip(head_dep_tuple, head_label_dep_tuple): - if head_dep in gold_head_dep_tuple: - self.parse_head_tp += 1 - if head_label_dep in gold_head_label_dep_tuple: - self.parse_label_tp += 1 - self.pre_tol += len(head_dep_tuple) - self.rec_tol += len(gold_head_dep_tuple) - - def get_metric(self, reset=True): - u_p = self.parse_head_tp / self.pre_tol - u_r = self.parse_head_tp / self.rec_tol - u_f = 2*u_p*u_r/(1e-6 + u_p + u_r) - l_p = self.parse_label_tp / self.pre_tol - l_r = self.parse_label_tp / self.rec_tol - l_f = 2*l_p*l_r/(1e-6 + l_p + l_r) - - if reset: - self.parse_head_tp = 0 - self.parse_label_tp = 0 - self.rec_tol = 0 - self.pre_tol = 0 - - return {'u_f1': round(u_f, 4), 'u_p': round(u_p, 4), 'u_r/uas':round(u_r, 4), - 'l_f1': round(l_f, 4), 'l_p': round(l_p, 4), 'l_r/las': round(l_r, 4)} - - -class CWSMetric(MetricBase): - def __init__(self, app_index): - super().__init__() - self.app_index = app_index - self.pre = 0 - self.rec = 0 - self.tp = 0 - - def evaluate(self, seg_targets, seg_masks, label_preds, seq_lens): - """ - - :param seg_targets: batch_size x max_len, 每个位置预测的是该word的长度-1,在word结束的地方。 - :param seg_masks: batch_size x max_len,只有在word结束的地方为1 - :param label_preds: batch_size x max_len - :param seq_lens: batch_size - :return: - """ - - pred_masks = torch.zeros_like(seg_masks) - pred_segs = torch.zeros_like(seg_targets) - - seq_lens = (seq_lens - 1).tolist() - for idx, label_pred in enumerate(label_preds[:, 1:].tolist()): - seq_len = seq_lens[idx] - label_pred = label_pred[:seq_len] - word_len = 0 - for l_i, label in enumerate(label_pred): - if label==self.app_index and l_i!=len(label_pred)-1: - word_len += 1 - else: - pred_segs[idx, l_i] = word_len # 这个词的长度为word_len - pred_masks[idx, l_i] = 1 - word_len = 0 - - right_mask = seg_targets.eq(pred_segs) # 对长度的预测一致 - self.rec += seg_masks.sum().item() - self.pre += pred_masks.sum().item() - # 且pred和target在同一个地方有值 - self.tp += (right_mask.__and__(pred_masks.byte().__and__(seg_masks.byte()))).sum().item() - - def get_metric(self, reset=True): - res = {} - res['rec'] = round(self.tp/(self.rec+1e-6), 4) - res['pre'] = round(self.tp/(self.pre+1e-6), 4) - res['f1'] = round(2*res['rec']*res['pre']/(res['pre'] + res['rec'] + 1e-6), 4) - - if reset: - self.pre = 0 - self.rec = 0 - self.tp = 0 - - return res - - -class ParserMetric(MetricBase): - def __init__(self, ): - super().__init__() - self.num_arc = 0 - self.num_label = 0 - self.num_sample = 0 - - def get_metric(self, reset=True): - res = {'UAS': round(self.num_arc*1.0 / self.num_sample, 4), - 'LAS': round(self.num_label*1.0 / self.num_sample, 4)} - if reset: - self.num_sample = self.num_label = self.num_arc = 0 - return res - - def evaluate(self, head_preds, label_preds, heads, labels, seq_lens=None): - """Evaluate the performance of prediction. - """ - if seq_lens is None: - seq_mask = head_preds.new_ones(head_preds.size(), dtype=torch.byte) - else: - seq_mask = seq_len_to_mask(seq_lens.long(), float=False) - # mask out tag - seq_mask[:, 0] = 0 - head_pred_correct = (head_preds == heads).__and__(seq_mask) - label_pred_correct = (label_preds == labels).__and__(head_pred_correct) - self.num_arc += head_pred_correct.float().sum().item() - self.num_label += label_pred_correct.float().sum().item() - self.num_sample += seq_mask.sum().item() - diff --git a/reproduction/joint_cws_parse/train.py b/reproduction/joint_cws_parse/train.py deleted file mode 100644 index ed4b07f0..00000000 --- a/reproduction/joint_cws_parse/train.py +++ /dev/null @@ -1,137 +0,0 @@ -import sys -sys.path.append('../..') - -from reproduction.joint_cws_parse.data.data_loader import CTBxJointLoader -from fastNLP.embeddings.static_embedding import StaticEmbedding -from torch import nn -from functools import partial -from reproduction.joint_cws_parse.models.CharParser import CharParser -from reproduction.joint_cws_parse.models.metrics import SegAppCharParseF1Metric, CWSMetric -from fastNLP import BucketSampler, Trainer -from torch import optim -from reproduction.joint_cws_parse.models.callbacks import DevCallback -from torch.optim.lr_scheduler import StepLR -from fastNLP import Tester -from fastNLP import GradientClipCallback, LRScheduler -import os -from fastNLP import cache_results - -def set_random_seed(random_seed=666): - import random, numpy, torch - random.seed(random_seed) - numpy.random.seed(random_seed) - torch.cuda.manual_seed(random_seed) - torch.random.manual_seed(random_seed) - -uniform_init = partial(nn.init.normal_, std=0.02) - -################################################### -# 需要变动的超参放到这里 -lr = 0.002 # 0.01~0.001 -dropout = 0.33 # 0.3~0.6 -weight_decay = 0 # 1e-5, 1e-6, 0 -arc_mlp_size = 500 # 200, 300 -rnn_hidden_size = 400 # 200, 300, 400 -rnn_layers = 3 # 2, 3 -encoder = 'var-lstm' # var-lstm, lstm -emb_size = 100 # 64 , 100 -label_mlp_size = 100 - -batch_size = 32 -update_every = 4 -n_epochs = 100 -data_name = 'new_ctb7' -#################################################### -data_folder = f'/remote-home/hyan01/exps/JointCwsPosParser/data/{data_name}/output' # 填写在数据所在文件夹, 文件夹下应该有train, dev, test等三个文件 -vector_folder = '/remote-home/hyan01/exps/CWS/pretrain/vectors' # 预训练的vector,下面应该包含三个文件: 1grams_t3_m50_corpus.txt, 2grams_t3_m50_corpus.txt, 3grams_t3_m50_corpus.txt - -set_random_seed(1234) -device = 0 - -@cache_results('caches/{}.pkl'.format(data_name)) -def get_data(): - data = CTBxJointLoader().process(data_folder) - char_labels_vocab = data.vocabs['char_labels'] - - pre_chars_vocab = data.vocabs['pre_chars'] - pre_bigrams_vocab = data.vocabs['pre_bigrams'] - pre_trigrams_vocab = data.vocabs['pre_trigrams'] - - chars_vocab = data.vocabs['chars'] - bigrams_vocab = data.vocabs['bigrams'] - trigrams_vocab = data.vocabs['trigrams'] - pre_chars_embed = StaticEmbedding(pre_chars_vocab, - model_dir_or_name=os.path.join(vector_folder, '1grams_t3_m50_corpus.txt'), - init_method=uniform_init, normalize=False) - pre_chars_embed.embedding.weight.data = pre_chars_embed.embedding.weight.data / pre_chars_embed.embedding.weight.data.std() - pre_bigrams_embed = StaticEmbedding(pre_bigrams_vocab, - model_dir_or_name=os.path.join(vector_folder, '2grams_t3_m50_corpus.txt'), - init_method=uniform_init, normalize=False) - pre_bigrams_embed.embedding.weight.data = pre_bigrams_embed.embedding.weight.data / pre_bigrams_embed.embedding.weight.data.std() - pre_trigrams_embed = StaticEmbedding(pre_trigrams_vocab, - model_dir_or_name=os.path.join(vector_folder, '3grams_t3_m50_corpus.txt'), - init_method=uniform_init, normalize=False) - pre_trigrams_embed.embedding.weight.data = pre_trigrams_embed.embedding.weight.data / pre_trigrams_embed.embedding.weight.data.std() - - return chars_vocab, bigrams_vocab, trigrams_vocab, char_labels_vocab, pre_chars_embed, pre_bigrams_embed, pre_trigrams_embed, data - -chars_vocab, bigrams_vocab, trigrams_vocab, char_labels_vocab, pre_chars_embed, pre_bigrams_embed, pre_trigrams_embed, data = get_data() - -print(data) -model = CharParser(char_vocab_size=len(chars_vocab), - emb_dim=emb_size, - bigram_vocab_size=len(bigrams_vocab), - trigram_vocab_size=len(trigrams_vocab), - num_label=len(char_labels_vocab), - rnn_layers=rnn_layers, - rnn_hidden_size=rnn_hidden_size, - arc_mlp_size=arc_mlp_size, - label_mlp_size=label_mlp_size, - dropout=dropout, - encoder=encoder, - use_greedy_infer=False, - app_index=char_labels_vocab['APP'], - pre_chars_embed=pre_chars_embed, - pre_bigrams_embed=pre_bigrams_embed, - pre_trigrams_embed=pre_trigrams_embed) - -metric1 = SegAppCharParseF1Metric(char_labels_vocab['APP']) -metric2 = CWSMetric(char_labels_vocab['APP']) -metrics = [metric1, metric2] - -optimizer = optim.Adam([param for param in model.parameters() if param.requires_grad], lr=lr, - weight_decay=weight_decay, betas=[0.9, 0.9]) - -sampler = BucketSampler(seq_len_field_name='seq_lens') -callbacks = [] - -from fastNLP.core.callback import Callback -from torch.optim.lr_scheduler import LambdaLR -class SchedulerCallback(Callback): - def __init__(self, scheduler): - super().__init__() - self.scheduler = scheduler - - def on_backward_end(self): - if self.step % self.update_every==0: - self.scheduler.step() - -scheduler = LambdaLR(optimizer, lr_lambda=lambda step:(0.75)**(step//5000)) -# scheduler = LambdaLR(optimizer, lr_lambda=lambda step:(0.75)**(step//5000)) -# scheduler = StepLR(optimizer, step_size=18, gamma=0.75) -scheduler_callback = SchedulerCallback(scheduler) -# callbacks.append(optim_callback) -# scheduler_callback = LRScheduler(scheduler) -callbacks.append(scheduler_callback) -callbacks.append(GradientClipCallback(clip_type='value', clip_value=5)) - -tester = Tester(data=data.datasets['test'], model=model, metrics=metrics, - batch_size=64, device=device, verbose=0) -dev_callback = DevCallback(tester) -callbacks.append(dev_callback) - -trainer = Trainer(data.datasets['train'], model, loss=None, metrics=metrics, n_epochs=n_epochs, batch_size=batch_size, print_every=3, - validate_every=-1, dev_data=data.datasets['dev'], save_path=None, optimizer=optimizer, - check_code_level=0, metric_key='u_f1', sampler=sampler, num_workers=2, use_tqdm=True, - device=device, callbacks=callbacks, update_every=update_every) -trainer.train() \ No newline at end of file diff --git a/reproduction/legacy/Biaffine_parser/cfg.cfg b/reproduction/legacy/Biaffine_parser/cfg.cfg deleted file mode 100644 index 03040600..00000000 --- a/reproduction/legacy/Biaffine_parser/cfg.cfg +++ /dev/null @@ -1,34 +0,0 @@ -[train] -n_epochs = 20 -batch_size = 32 -use_cuda = true -use_tqdm=true -validate_every = 1000 -use_golden_train=true - -[test] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 64 -pickle_path = "./save/" -use_cuda = true - -[model] -word_vocab_size = -1 -word_emb_dim = 300 -pos_vocab_size = -1 -pos_emb_dim = 100 -rnn_layers = 3 -rnn_hidden_size = 256 -arc_mlp_size = 500 -label_mlp_size = 100 -num_label = -1 -dropout = 0.3 -encoder="var-lstm" -use_greedy_infer=false - -[optim] -lr = 2e-3 -;weight_decay = 3e-5 diff --git a/reproduction/legacy/Biaffine_parser/infer.py b/reproduction/legacy/Biaffine_parser/infer.py deleted file mode 100644 index 8ebfa91c..00000000 --- a/reproduction/legacy/Biaffine_parser/infer.py +++ /dev/null @@ -1,83 +0,0 @@ -import os -import sys - -sys.path.extend(['/home/yfshao/workdir/dev_fastnlp']) - -from fastNLP.api.processor import * -from fastNLP.models.biaffine_parser import BiaffineParser -from fastNLP.io.config_io import ConfigSection, ConfigLoader - -import _pickle as pickle -import torch - -def _load(path): - with open(path, 'rb') as f: - obj = pickle.load(f) - return obj - -def _load_all(src): - model_path = src - src = os.path.dirname(src) - - word_v = _load(src+'/word_v.pkl') - pos_v = _load(src+'/pos_v.pkl') - tag_v = _load(src+'/tag_v.pkl') - pos_pp = torch.load(src+'/pos_pp.pkl')['pipeline'] - - model_args = ConfigSection() - ConfigLoader.load_config('cfg.cfg', {'model': model_args}) - model_args['word_vocab_size'] = len(word_v) - model_args['pos_vocab_size'] = len(pos_v) - model_args['num_label'] = len(tag_v) - - model = BiaffineParser(**model_args.data) - model.load_state_dict(torch.load(model_path)) - return { - 'word_v': word_v, - 'pos_v': pos_v, - 'tag_v': tag_v, - 'model': model, - 'pos_pp':pos_pp, - } - -def build(load_path, save_path): - BOS = '' - NUM = '' - _dict = _load_all(load_path) - word_vocab = _dict['word_v'] - pos_vocab = _dict['pos_v'] - tag_vocab = _dict['tag_v'] - pos_pp = _dict['pos_pp'] - model = _dict['model'] - print('load model from {}'.format(load_path)) - word_seq = 'raw_word_seq' - pos_seq = 'raw_pos_seq' - - # build pipeline - # input - pipe = pos_pp - pipe.pipeline.pop(-1) - pipe.add_processor(Num2TagProcessor(NUM, 'word_list', word_seq)) - pipe.add_processor(PreAppendProcessor(BOS, word_seq)) - pipe.add_processor(PreAppendProcessor(BOS, 'pos_list', pos_seq)) - pipe.add_processor(IndexerProcessor(word_vocab, word_seq, 'word_seq')) - pipe.add_processor(IndexerProcessor(pos_vocab, pos_seq, 'pos_seq')) - pipe.add_processor(SeqLenProcessor('word_seq', 'word_seq_origin_len')) - pipe.add_processor(SetTensorProcessor({'word_seq':True, 'pos_seq':True, 'word_seq_origin_len':True}, default=False)) - pipe.add_processor(ModelProcessor(model, 'word_seq_origin_len')) - pipe.add_processor(SliceProcessor(1, None, None, 'head_pred', 'heads')) - pipe.add_processor(SliceProcessor(1, None, None, 'label_pred', 'label_pred')) - pipe.add_processor(Index2WordProcessor(tag_vocab, 'label_pred', 'labels')) - if not os.path.exists(save_path): - os.makedirs(save_path) - with open(save_path+'/pipeline.pkl', 'wb') as f: - torch.save({'pipeline': pipe}, f) - print('save pipeline in {}'.format(save_path)) - - -import argparse -parser = argparse.ArgumentParser(description='build pipeline for parser.') -parser.add_argument('--src', type=str, default='/home/yfshao/workdir/dev_fastnlp/reproduction/Biaffine_parser/save') -parser.add_argument('--dst', type=str, default='/home/yfshao/workdir/dev_fastnlp/reproduction/Biaffine_parser/pipe') -args = parser.parse_args() -build(args.src, args.dst) diff --git a/reproduction/legacy/Biaffine_parser/main.py b/reproduction/legacy/Biaffine_parser/main.py deleted file mode 100644 index f4fd5836..00000000 --- a/reproduction/legacy/Biaffine_parser/main.py +++ /dev/null @@ -1,114 +0,0 @@ -import sys - -sys.path.extend(['/home/yfshao/workdir/dev_fastnlp']) - -import torch -import argparse - -from fastNLP.io.dataset_loader import ConllxDataLoader, add_seg_tag -from fastNLP.core.dataset import DataSet -from fastNLP.core.instance import Instance - -parser = argparse.ArgumentParser() -parser.add_argument('--pipe', type=str, default='') -parser.add_argument('--gold_data', type=str, default='') -parser.add_argument('--new_data', type=str) -args = parser.parse_args() - -pipe = torch.load(args.pipe)['pipeline'] -for p in pipe: - if p.field_name == 'word_list': - print(p.field_name) - p.field_name = 'gold_words' - elif p.field_name == 'pos_list': - print(p.field_name) - p.field_name = 'gold_pos' - - -data = ConllxDataLoader().load(args.gold_data) -ds = DataSet() -for ins1, ins2 in zip(add_seg_tag(data), data): - ds.append(Instance(words=ins1[0], tag=ins1[1], - gold_words=ins2[0], gold_pos=ins2[1], - gold_heads=ins2[2], gold_head_tags=ins2[3])) - -ds = pipe(ds) - -seg_threshold = 0. -pos_threshold = 0. -parse_threshold = 0.74 - - -def get_heads(ins, head_f, word_f): - head_pred = [] - for i, idx in enumerate(ins[head_f]): - j = idx - 1 if idx != 0 else i - head_pred.append(ins[word_f][j]) - return head_pred - -def evaluate(ins): - seg_count = sum([1 for i, j in zip(ins['word_list'], ins['gold_words']) if i == j]) - pos_count = sum([1 for i, j in zip(ins['pos_list'], ins['gold_pos']) if i == j]) - head_count = sum([1 for i, j in zip(ins['heads'], ins['gold_heads']) if i == j]) - total = len(ins['gold_words']) - return seg_count / total, pos_count / total, head_count / total - -def is_ok(x): - seg, pos, head = x[1] - return seg > seg_threshold and pos > pos_threshold and head > parse_threshold - -res_list = [] - -for i, ins in enumerate(ds): - res_list.append((i, evaluate(ins))) - -res_list = list(filter(is_ok, res_list)) -print('{} {}'.format(len(ds), len(res_list))) - -seg_cor, pos_cor, head_cor, label_cor, total = 0,0,0,0,0 -for i, _ in res_list: - ins = ds[i] - # print(i) - # print('gold_words:\t', ins['gold_words']) - # print('predict_words:\t', ins['word_list']) - # print('gold_tag:\t', ins['gold_pos']) - # print('predict_tag:\t', ins['pos_list']) - # print('gold_heads:\t', ins['gold_heads']) - # print('predict_heads:\t', ins['heads'].tolist()) - # print('gold_head_tags:\t', ins['gold_head_tags']) - # print('predict_labels:\t', ins['labels']) - # print() - - head_pred = ins['heads'] - head_gold = ins['gold_heads'] - label_pred = ins['labels'] - label_gold = ins['gold_head_tags'] - total += len(head_gold) - seg_cor += sum([1 for i, j in zip(ins['word_list'], ins['gold_words']) if i == j]) - pos_cor += sum([1 for i, j in zip(ins['pos_list'], ins['gold_pos']) if i == j]) - length = len(head_gold) - for i in range(length): - head_cor += 1 if head_pred[i] == head_gold[i] else 0 - label_cor += 1 if head_pred[i] == head_gold[i] and label_gold[i] == label_pred[i] else 0 - - -print('SEG: {}, POS: {}, UAS: {}, LAS: {}'.format(seg_cor/total, pos_cor/total, head_cor/total, label_cor/total)) - -colln_path = args.gold_data -new_colln_path = args.new_data - -index_list = [x[0] for x in res_list] - -with open(colln_path, 'r', encoding='utf-8') as f1, \ - open(new_colln_path, 'w', encoding='utf-8') as f2: - for idx, ins in enumerate(ds): - if idx in index_list: - length = len(ins['gold_words']) - pad = ['_' for _ in range(length)] - for x in zip( - map(str, range(1, length+1)), ins['gold_words'], ins['gold_words'], ins['gold_pos'], - pad, pad, map(str, ins['gold_heads']), ins['gold_head_tags']): - new_lines = '\t'.join(x) - f2.write(new_lines) - f2.write('\n') - f2.write('\n') diff --git a/reproduction/legacy/Biaffine_parser/run.py b/reproduction/legacy/Biaffine_parser/run.py deleted file mode 100644 index 13c79b83..00000000 --- a/reproduction/legacy/Biaffine_parser/run.py +++ /dev/null @@ -1,248 +0,0 @@ -import os -import sys - -sys.path.append(os.path.join(os.path.dirname(__file__), '../..')) - -import fastNLP - -from fastNLP.core.trainer import Trainer -from fastNLP.core.instance import Instance -from fastNLP.api.pipeline import Pipeline -from fastNLP.models.biaffine_parser import BiaffineParser, ParserMetric, ParserLoss -from fastNLP.core.tester import Tester -from fastNLP.io.config_io import ConfigLoader, ConfigSection -from fastNLP.io.model_io import ModelLoader -from fastNLP.io.dataset_loader import ConllxDataLoader -from fastNLP.api.processor import * -from fastNLP.io.embed_loader import EmbedLoader -from fastNLP.core.callback import Callback - -BOS = '' -EOS = '' -UNK = '' -PAD = '' -NUM = '' -ENG = '' - -# not in the file's dir -if len(os.path.dirname(__file__)) != 0: - os.chdir(os.path.dirname(__file__)) - -def convert(data): - dataset = DataSet() - for sample in data: - word_seq = [BOS] + sample['words'] - pos_seq = [BOS] + sample['pos_tags'] - heads = [0] + sample['heads'] - head_tags = [BOS] + sample['labels'] - dataset.append(Instance(raw_words=word_seq, - pos=pos_seq, - gold_heads=heads, - arc_true=heads, - tags=head_tags)) - return dataset - - -def load(path): - data = ConllxDataLoader().load(path) - return convert(data) - - -datadir = "/remote-home/yfshao/workdir/ctb9.0/" -train_data_name = "train.conllx" -dev_data_name = "dev.conllx" -test_data_name = "test.conllx" -emb_file_name = "/remote-home/yfshao/workdir/word_vector/cc.zh.300.vec" - -cfgfile = './cfg.cfg' -processed_datadir = './save' - -# Config Loader -train_args = ConfigSection() -model_args = ConfigSection() -optim_args = ConfigSection() -ConfigLoader.load_config(cfgfile, {"train": train_args, "model": model_args, "optim": optim_args}) -print('trainre Args:', train_args.data) -print('model Args:', model_args.data) -print('optim_args', optim_args.data) - - -# Pickle Loader -def save_data(dirpath, **kwargs): - import _pickle - if not os.path.exists(dirpath): - os.mkdir(dirpath) - for name, data in kwargs.items(): - with open(os.path.join(dirpath, name+'.pkl'), 'wb') as f: - _pickle.dump(data, f) - - -def load_data(dirpath): - import _pickle - datas = {} - for f_name in os.listdir(dirpath): - if not f_name.endswith('.pkl'): - continue - name = f_name[:-4] - with open(os.path.join(dirpath, f_name), 'rb') as f: - datas[name] = _pickle.load(f) - return datas - -def P2(data, field, length): - ds = [ins for ins in data if len(ins[field]) >= length] - data.clear() - data.extend(ds) - return ds - -def update_v(vocab, data, field): - data.apply(lambda x: vocab.add_word_lst(x[field]), new_field_name=None) - - -# use pretrain embedding -word_v = Vocabulary(unknown=UNK, padding=PAD) -pos_v = Vocabulary(unknown=None, padding=PAD) -tag_v = Vocabulary(unknown=None, padding=None) -train_data = load(os.path.join(datadir, train_data_name)) -dev_data = load(os.path.join(datadir, dev_data_name)) -test_data = load(os.path.join(datadir, test_data_name)) -print('load raw data and preprocess') - -num_p = Num2TagProcessor(tag=NUM, field_name='raw_words', new_added_field_name='words') -for ds in (train_data, dev_data, test_data): - num_p(ds) -update_v(word_v, train_data, 'words') -update_v(pos_v, train_data, 'pos') -update_v(tag_v, train_data, 'tags') - -print('vocab build success {}, {}, {}'.format(len(word_v), len(pos_v), len(tag_v))) - -# Model -model_args['word_vocab_size'] = len(word_v) -model_args['pos_vocab_size'] = len(pos_v) -model_args['num_label'] = len(tag_v) - -model = BiaffineParser(**model_args.data) -print(model) - -word_idxp = IndexerProcessor(word_v, 'words', 'word_seq') -pos_idxp = IndexerProcessor(pos_v, 'pos', 'pos_seq') -tag_idxp = IndexerProcessor(tag_v, 'tags', 'label_true') -seq_p = SeqLenProcessor('word_seq', 'seq_lens') - -set_input_p = SetInputProcessor('word_seq', 'pos_seq', 'seq_lens', flag=True) -set_target_p = SetTargetProcessor('arc_true', 'label_true', 'seq_lens', flag=True) - -label_toword_p = Index2WordProcessor(vocab=tag_v, field_name='label_pred', new_added_field_name='label_pred_seq') - -for ds in (train_data, dev_data, test_data): - word_idxp(ds) - pos_idxp(ds) - tag_idxp(ds) - seq_p(ds) - set_input_p(ds) - set_target_p(ds) - -if train_args['use_golden_train']: - train_data.set_input('gold_heads', flag=True) -train_args.data.pop('use_golden_train') - -print(test_data[0]) -print('train len {}'.format(len(train_data))) -print('dev len {}'.format(len(dev_data))) -print('test len {}'.format(len(test_data))) - - -def train(path): - # test saving pipeline - save_pipe(path) - embed = EmbedLoader.load_with_vocab(emb_file_name, word_v) - embed = torch.tensor(embed, dtype=torch.float32) - - # embed = EmbedLoader.fast_load_embedding(emb_dim=model_args['word_emb_dim'], emb_file=emb_file_name, vocab=word_v) - # embed = torch.tensor(embed, dtype=torch.float32) - # model.word_embedding = torch.nn.Embedding.from_pretrained(embed, freeze=True) - model.word_embedding.padding_idx = word_v.padding_idx - model.word_embedding.weight.data[word_v.padding_idx].fill_(0) - model.pos_embedding.padding_idx = pos_v.padding_idx - model.pos_embedding.weight.data[pos_v.padding_idx].fill_(0) - - class MyCallback(Callback): - def on_step_end(self, optimizer): - step = self.trainer.step - # learning rate decay - if step > 0 and step % 1000 == 0: - for pg in optimizer.param_groups: - pg['lr'] *= 0.93 - print('decay lr to {}'.format([pg['lr'] for pg in optimizer.param_groups])) - - if step == 3000: - # start training embedding - print('start training embedding at {}'.format(step)) - model = self.trainer.model - for m in model.modules(): - if isinstance(m, torch.nn.Embedding): - m.weight.requires_grad = True - - # Trainer - trainer = Trainer(train_data=train_data, model=model, optimizer=fastNLP.Adam(**optim_args.data), loss=ParserLoss(), - dev_data=dev_data, metrics=ParserMetric(), metric_key='UAS', save_path=path, - callbacks=[MyCallback()]) - - # Start training - try: - trainer.train() - print("Training finished!") - finally: - # save pipeline - save_pipe(path) - print('pipe saved') - -def save_pipe(path): - pipe = Pipeline(processors=[num_p, word_idxp, pos_idxp, seq_p, set_input_p]) - pipe.add_processor(ModelProcessor(model=model, batch_size=32)) - pipe.add_processor(label_toword_p) - os.makedirs(path, exist_ok=True) - torch.save({'pipeline': pipe, - 'names':['num word_idx pos_idx seq set_input model tag_to_word'.split()], - }, os.path.join(path, 'pipe.pkl')) - - -def test(path): - # Tester - tester = Tester(**test_args.data) - - # Model - model = BiaffineParser(**model_args.data) - model.eval() - try: - ModelLoader.load_pytorch(model, path) - print('model parameter loaded!') - except Exception as _: - print("No saved model. Abort test.") - raise - - # Start training - print("Testing Train data") - tester.test(model, train_data) - print("Testing Dev data") - tester.test(model, dev_data) - print("Testing Test data") - tester.test(model, test_data) - - -if __name__ == "__main__": - import argparse - parser = argparse.ArgumentParser(description='Run a chinese word segmentation model') - parser.add_argument('--mode', help='set the model\'s model', choices=['train', 'test', 'infer']) - parser.add_argument('--path', type=str, default='') - # parser.add_argument('--dst', type=str, default='') - args = parser.parse_args() - if args.mode == 'train': - train(args.path) - elif args.mode == 'test': - test(args.path) - elif args.mode == 'infer': - pass - else: - print('no mode specified for model!') - parser.print_help() diff --git a/reproduction/legacy/Biaffine_parser/util.py b/reproduction/legacy/Biaffine_parser/util.py deleted file mode 100644 index aa40e4e9..00000000 --- a/reproduction/legacy/Biaffine_parser/util.py +++ /dev/null @@ -1,27 +0,0 @@ -class MyDataloader: - def load(self, data_path): - with open(data_path, "r", encoding="utf-8") as f: - lines = f.readlines() - data = self.parse(lines) - return data - - def parse(self, lines): - """ - [ - [word], [pos], [head_index], [head_tag] - ] - """ - sample = [] - data = [] - for i, line in enumerate(lines): - line = line.strip() - if len(line) == 0 or i + 1 == len(lines): - data.append(list(map(list, zip(*sample)))) - sample = [] - else: - sample.append(line.split()) - if len(sample) > 0: - data.append(list(map(list, zip(*sample)))) - return data - - diff --git a/reproduction/legacy/Chinese_word_segmentation/__init__.py b/reproduction/legacy/Chinese_word_segmentation/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/legacy/Chinese_word_segmentation/cws.cfg b/reproduction/legacy/Chinese_word_segmentation/cws.cfg deleted file mode 100644 index d2263353..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/cws.cfg +++ /dev/null @@ -1,46 +0,0 @@ -[train] -epochs = 40 -batch_size = 8 -pickle_path = "./save/" -validate = true -save_best_dev = true -model_saved_path = "./save/" -rnn_hidden_units = 100 -word_emb_dim = 100 -use_crf = true -use_cuda = true - -[test] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 640 -pickle_path = "./save/" -use_crf = true -use_cuda = true - - -[POS_test] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 640 -pickle_path = "./save/" -use_crf = true -use_cuda = true -rnn_hidden_units = 100 -word_emb_dim = 100 - -[model] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 640 -pickle_path = "./save/" -use_crf = true -use_cuda = true -rnn_hidden_units = 100 -word_emb_dim = 100 \ No newline at end of file diff --git a/reproduction/legacy/Chinese_word_segmentation/cws_io/__init__.py b/reproduction/legacy/Chinese_word_segmentation/cws_io/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/legacy/Chinese_word_segmentation/cws_io/cws_reader.py b/reproduction/legacy/Chinese_word_segmentation/cws_io/cws_reader.py deleted file mode 100644 index b28b04f6..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/cws_io/cws_reader.py +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/reproduction/legacy/Chinese_word_segmentation/models/__init__.py b/reproduction/legacy/Chinese_word_segmentation/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/legacy/Chinese_word_segmentation/models/cws_model.py b/reproduction/legacy/Chinese_word_segmentation/models/cws_model.py deleted file mode 100644 index 0d10d2e5..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/models/cws_model.py +++ /dev/null @@ -1,189 +0,0 @@ - -import torch -from torch import nn - -from fastNLP.models.base_model import BaseModel -from fastNLP.modules.decoder.mlp import MLP -from reproduction.legacy.Chinese_word_segmentation.utils import seq_lens_to_mask - - -class CWSBiLSTMEncoder(BaseModel): - def __init__(self, vocab_num, embed_dim=100, bigram_vocab_num=None, bigram_embed_dim=100, num_bigram_per_char=None, - hidden_size=200, bidirectional=True, embed_drop_p=0.2, num_layers=1): - super().__init__() - - self.input_size = 0 - self.num_bigram_per_char = num_bigram_per_char - self.bidirectional = bidirectional - self.num_layers = num_layers - self.embed_drop_p = embed_drop_p - if self.bidirectional: - self.hidden_size = hidden_size//2 - self.num_directions = 2 - else: - self.hidden_size = hidden_size - self.num_directions = 1 - - if not bigram_vocab_num is None: - assert not bigram_vocab_num is None, "Specify num_bigram_per_char." - - if vocab_num is not None: - self.char_embedding = nn.Embedding(num_embeddings=vocab_num, embedding_dim=embed_dim) - self.input_size += embed_dim - - if bigram_vocab_num is not None: - self.bigram_embedding = nn.Embedding(num_embeddings=bigram_vocab_num, embedding_dim=bigram_embed_dim) - self.input_size += self.num_bigram_per_char*bigram_embed_dim - - if not self.embed_drop_p is None: - self.embedding_drop = nn.Dropout(p=self.embed_drop_p) - - self.lstm = nn.LSTM(input_size=self.input_size, hidden_size=self.hidden_size, bidirectional=self.bidirectional, - batch_first=True, num_layers=self.num_layers) - - self.reset_parameters() - - def reset_parameters(self): - for name, param in self.named_parameters(): - if 'bias_hh' in name: - nn.init.constant_(param, 0) - elif 'bias_ih' in name: - nn.init.constant_(param, 1) - else: - nn.init.xavier_uniform_(param) - - def init_embedding(self, embedding, embed_name): - if embed_name == 'bigram': - self.bigram_embedding.weight.data = torch.from_numpy(embedding) - elif embed_name == 'char': - self.char_embedding.weight.data = torch.from_numpy(embedding) - - - def forward(self, chars, bigrams=None, seq_lens=None): - - batch_size, max_len = chars.size() - - x_tensor = self.char_embedding(chars) - - if hasattr(self, 'bigram_embedding'): - bigram_tensor = self.bigram_embedding(bigrams).view(batch_size, max_len, -1) - x_tensor = torch.cat([x_tensor, bigram_tensor], dim=2) - x_tensor = self.embedding_drop(x_tensor) - sorted_lens, sorted_indices = torch.sort(seq_lens, descending=True) - packed_x = nn.utils.rnn.pack_padded_sequence(x_tensor[sorted_indices], sorted_lens, batch_first=True) - - outputs, _ = self.lstm(packed_x) - outputs, _ = nn.utils.rnn.pad_packed_sequence(outputs, batch_first=True) - - _, desorted_indices = torch.sort(sorted_indices, descending=False) - outputs = outputs[desorted_indices] - - return outputs - - -class CWSBiLSTMSegApp(BaseModel): - def __init__(self, vocab_num, embed_dim=100, bigram_vocab_num=None, bigram_embed_dim=100, num_bigram_per_char=None, - hidden_size=200, bidirectional=True, embed_drop_p=None, num_layers=1, tag_size=2): - super(CWSBiLSTMSegApp, self).__init__() - - self.tag_size = tag_size - - self.encoder_model = CWSBiLSTMEncoder(vocab_num, embed_dim, bigram_vocab_num, bigram_embed_dim, num_bigram_per_char, - hidden_size, bidirectional, embed_drop_p, num_layers) - - size_layer = [hidden_size, 200, tag_size] - self.decoder_model = MLP(size_layer) - - - def forward(self, chars, seq_lens, bigrams=None): - device = self.parameters().__next__().device - chars = chars.to(device).long() - if not bigrams is None: - bigrams = bigrams.to(device).long() - else: - bigrams = None - seq_lens = seq_lens.to(device).long() - - feats = self.encoder_model(chars, bigrams, seq_lens) - probs = self.decoder_model(feats) - - pred_dict = {} - pred_dict['seq_lens'] = seq_lens - pred_dict['pred_probs'] = probs - - return pred_dict - - def predict(self, chars, seq_lens, bigrams=None): - pred_dict = self.forward(chars, seq_lens, bigrams) - pred_probs = pred_dict['pred_probs'] - _, pred_tags = pred_probs.max(dim=-1) - return {'pred_tags': pred_tags} - - -from fastNLP.modules.decoder.crf import ConditionalRandomField -from fastNLP.modules.decoder.crf import allowed_transitions - -class CWSBiLSTMCRF(BaseModel): - def __init__(self, vocab_num, embed_dim=100, bigram_vocab_num=None, bigram_embed_dim=100, num_bigram_per_char=None, - hidden_size=200, bidirectional=True, embed_drop_p=0.2, num_layers=1, tag_size=4): - """ - 默认使用BMES的标注方式 - :param vocab_num: - :param embed_dim: - :param bigram_vocab_num: - :param bigram_embed_dim: - :param num_bigram_per_char: - :param hidden_size: - :param bidirectional: - :param embed_drop_p: - :param num_layers: - :param tag_size: - """ - super(CWSBiLSTMCRF, self).__init__() - - self.tag_size = tag_size - - self.encoder_model = CWSBiLSTMEncoder(vocab_num, embed_dim, bigram_vocab_num, bigram_embed_dim, num_bigram_per_char, - hidden_size, bidirectional, embed_drop_p, num_layers) - - size_layer = [hidden_size, 200, tag_size] - self.decoder_model = MLP(size_layer) - allowed_trans = allowed_transitions({0:'b', 1:'m', 2:'e', 3:'s'}, encoding_type='bmes') - self.crf = ConditionalRandomField(num_tags=tag_size, include_start_end_trans=False, - allowed_transitions=allowed_trans) - - - def forward(self, chars, target, seq_lens, bigrams=None): - device = self.parameters().__next__().device - chars = chars.to(device).long() - if not bigrams is None: - bigrams = bigrams.to(device).long() - else: - bigrams = None - seq_lens = seq_lens.to(device).long() - masks = seq_lens_to_mask(seq_lens) - feats = self.encoder_model(chars, bigrams, seq_lens) - feats = self.decoder_model(feats) - losses = self.crf(feats, target, masks) - - pred_dict = {} - pred_dict['seq_lens'] = seq_lens - pred_dict['loss'] = torch.mean(losses) - - return pred_dict - - def predict(self, chars, seq_lens, bigrams=None): - device = self.parameters().__next__().device - chars = chars.to(device).long() - if not bigrams is None: - bigrams = bigrams.to(device).long() - else: - bigrams = None - seq_lens = seq_lens.to(device).long() - masks = seq_lens_to_mask(seq_lens) - feats = self.encoder_model(chars, bigrams, seq_lens) - feats = self.decoder_model(feats) - paths, _ = self.crf.viterbi_decode(feats, masks) - - return {'pred': paths, 'seq_lens':seq_lens} - diff --git a/reproduction/legacy/Chinese_word_segmentation/models/cws_transformer.py b/reproduction/legacy/Chinese_word_segmentation/models/cws_transformer.py deleted file mode 100644 index ae8a5a7f..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/models/cws_transformer.py +++ /dev/null @@ -1,199 +0,0 @@ - - - -""" -使用transformer作为分词的encoder端 - -""" - -from torch import nn -import torch -# from fastNLP.modules.encoder.transformer import TransformerEncoder -from reproduction.legacy.Chinese_word_segmentation.models import TransformerEncoder -from fastNLP.modules.decoder.crf import ConditionalRandomField,seq_len_to_byte_mask -from fastNLP.modules.decoder.crf import allowed_transitions - -class TransformerCWS(nn.Module): - def __init__(self, vocab_num, embed_dim=100, bigram_vocab_num=None, bigram_embed_dim=100, num_bigram_per_char=None, - hidden_size=200, embed_drop_p=0.3, num_layers=1, num_heads=8, tag_size=4): - super().__init__() - - self.embedding = nn.Embedding(vocab_num, embed_dim) - input_size = embed_dim - if bigram_vocab_num: - self.bigram_embedding = nn.Embedding(bigram_vocab_num, bigram_embed_dim) - input_size += num_bigram_per_char*bigram_embed_dim - - self.drop = nn.Dropout(embed_drop_p, inplace=True) - - self.fc1 = nn.Linear(input_size, hidden_size) - - # value_size = hidden_size//num_heads - # self.transformer = TransformerEncoder(num_layers, model_size=hidden_size, inner_size=hidden_size, - # key_size=value_size, - # value_size=value_size, num_head=num_heads) - self.transformer = TransformerEncoder(num_layers=num_layers, model_size=hidden_size, num_heads=num_heads, - hidden_size=hidden_size) - self.fc2 = nn.Linear(hidden_size, tag_size) - - allowed_trans = allowed_transitions({0:'b', 1:'m', 2:'e', 3:'s'}, encoding_type='bmes') - self.crf = ConditionalRandomField(num_tags=tag_size, include_start_end_trans=False, - allowed_transitions=allowed_trans) - - def forward(self, chars, target, seq_lens, bigrams=None): - masks = seq_len_to_byte_mask(seq_lens) - x = self.embedding(chars) - batch_size = x.size(0) - length = x.size(1) - if hasattr(self, 'bigram_embedding'): - bigrams = self.bigram_embedding(bigrams) # batch_size x seq_lens x per_char x embed_size - x = torch.cat([x, bigrams.view(batch_size, length, -1)], dim=-1) - self.drop(x) - x = self.fc1(x) - feats = self.transformer(x, masks) - feats = self.fc2(feats) - losses = self.crf(feats, target, masks.float()) - - pred_dict = {} - pred_dict['seq_lens'] = seq_lens - pred_dict['loss'] = torch.mean(losses) - - return pred_dict - - def predict(self, chars, seq_lens, bigrams=None): - masks = seq_len_to_byte_mask(seq_lens) - - x = self.embedding(chars) - batch_size = x.size(0) - length = x.size(1) - if hasattr(self, 'bigram_embedding'): - bigrams = self.bigram_embedding(bigrams) # batch_size x seq_lens x per_char x embed_size - x = torch.cat([x, bigrams.view(batch_size, length, -1)], dim=-1) - self.drop(x) - x = self.fc1(x) - feats = self.transformer(x, masks) - feats = self.fc2(feats) - - probs = self.crf.viterbi_decode(feats, masks, get_score=False) - - return {'pred': probs, 'seq_lens':seq_lens} - - -from reproduction.legacy.Chinese_word_segmentation.models import TransformerDilateEncoder - -class TransformerDilatedCWS(nn.Module): - def __init__(self, vocab_num, embed_dim=100, bigram_vocab_num=None, bigram_embed_dim=100, num_bigram_per_char=None, - embed_drop_p=0.3, hidden_size=200, kernel_size=3, dilate='none', - num_layers=1, num_heads=8, tag_size=4, - relative_pos_embed_dim=0): - super().__init__() - - self.embedding = nn.Embedding(vocab_num, embed_dim) - input_size = embed_dim - if bigram_vocab_num: - self.bigram_embedding = nn.Embedding(bigram_vocab_num, bigram_embed_dim) - input_size += num_bigram_per_char*bigram_embed_dim - - self.drop = nn.Dropout(embed_drop_p, inplace=True) - - self.fc1 = nn.Linear(input_size, hidden_size) - - # value_size = hidden_size//num_heads - # self.transformer = TransformerEncoder(num_layers, model_size=hidden_size, inner_size=hidden_size, - # key_size=value_size, - # value_size=value_size, num_head=num_heads) - self.transformer = TransformerDilateEncoder(num_layers=num_layers, model_size=hidden_size, num_heads=num_heads, - hidden_size=hidden_size, kernel_size=kernel_size, dilate=dilate, - relative_pos_embed_dim=relative_pos_embed_dim) - self.fc2 = nn.Linear(hidden_size, tag_size) - - allowed_trans = allowed_transitions({0:'b', 1:'m', 2:'e', 3:'s'}, encoding_type='bmes') - self.crf = ConditionalRandomField(num_tags=tag_size, include_start_end_trans=False, - allowed_transitions=allowed_trans) - - def forward(self, chars, target, seq_lens, bigrams=None): - masks = seq_len_to_byte_mask(seq_lens) - x = self.embedding(chars) - batch_size = x.size(0) - length = x.size(1) - if hasattr(self, 'bigram_embedding'): - bigrams = self.bigram_embedding(bigrams) # batch_size x seq_lens x per_char x embed_size - x = torch.cat([x, bigrams.view(batch_size, length, -1)], dim=-1) - self.drop(x) - x = self.fc1(x) - feats = self.transformer(x, masks) - feats = self.fc2(feats) - losses = self.crf(feats, target, masks.float()) - - pred_dict = {} - pred_dict['seq_lens'] = seq_lens - pred_dict['loss'] = torch.mean(losses) - - return pred_dict - - def predict(self, chars, seq_lens, bigrams=None): - masks = seq_len_to_byte_mask(seq_lens) - - x = self.embedding(chars) - batch_size = x.size(0) - length = x.size(1) - if hasattr(self, 'bigram_embedding'): - bigrams = self.bigram_embedding(bigrams) # batch_size x seq_lens x per_char x embed_size - x = torch.cat([x, bigrams.view(batch_size, length, -1)], dim=-1) - self.drop(x) - x = self.fc1(x) - feats = self.transformer(x, masks) - feats = self.fc2(feats) - - paths, _ = self.crf.viterbi_decode(feats, masks) - - return {'pred': paths, 'seq_lens':seq_lens} - - - -class NoamOpt(torch.optim.Optimizer): - "Optim wrapper that implements rate." - - def __init__(self, model_size, factor, warmup, optimizer): - super().__init__([torch.nn.Parameter(torch.ones(1))], {}) - - self.optimizer = optimizer - self._step = 0 - self.warmup = warmup - self.factor = factor - self.model_size = model_size - self._rate = 0 - - def step(self, **kwargs): - "Update parameters and rate" - self._step += 1 - rate = self.rate() - for p in self.optimizer.param_groups: - p['lr'] = rate - self._rate = rate - self.optimizer.step() - - def rate(self, step=None): - "Implement `lrate` above" - if step is None: - step = self._step - return self.factor * \ - (self.model_size ** (-0.5) * - min(step ** (-0.5), step * self.warmup ** (-1.5))) - -def TransformerCWS_test(): - transformer = TransformerCWS(10, embed_dim=100, bigram_vocab_num=10, bigram_embed_dim=100, num_bigram_per_char=8, - hidden_size=200, embed_drop_p=0.3, num_layers=1, num_heads=8, tag_size=4) - chars = torch.randint(10, size=(4, 7)).long() - bigrams = torch.randint(10, size=(4, 56)).long() - seq_lens = torch.ones(4).long()*7 - target = torch.randint(4, size=(4, 7)) - - print(transformer(chars, target, seq_lens, bigrams)) - - optimizer = torch.optim.Adam(transformer.parameters()) - - opt = NoamOpt(10 ,1, 400, optimizer) - -if __name__ == '__main__': - TransformerCWS_test() diff --git a/reproduction/legacy/Chinese_word_segmentation/process/__init__.py b/reproduction/legacy/Chinese_word_segmentation/process/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/legacy/Chinese_word_segmentation/process/cws_processor.py b/reproduction/legacy/Chinese_word_segmentation/process/cws_processor.py deleted file mode 100644 index 1f64bed2..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/process/cws_processor.py +++ /dev/null @@ -1,386 +0,0 @@ - -import re - -from fastNLP.api.processor import Processor -from fastNLP.core.dataset import DataSet -from fastNLP.core.vocabulary import Vocabulary -from reproduction.legacy.Chinese_word_segmentation.process.span_converter import SpanConverter - -_SPECIAL_TAG_PATTERN = '<[a-zA-Z]+>' - -class SpeicalSpanProcessor(Processor): - """ - 将DataSet中field_name使用span_converter替换掉。 - - """ - def __init__(self, field_name, new_added_field_name=None): - super(SpeicalSpanProcessor, self).__init__(field_name, new_added_field_name) - - self.span_converters = [] - - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - def inner_proc(ins): - sentence = ins[self.field_name] - for span_converter in self.span_converters: - sentence = span_converter.find_certain_span_and_replace(sentence) - return sentence - dataset.apply(func=inner_proc, new_field_name=self.new_added_field_name) - - return dataset - - def add_span_converter(self, converter): - assert isinstance(converter, SpanConverter), "Only SpanConverterBase is allowed, not {}."\ - .format(type(converter)) - self.span_converters.append(converter) - - -class CWSCharSegProcessor(Processor): - """ - 将DataSet中field_name这个field分成一个个的汉字,即原来可能为"复旦大学 fudan", 分成['复', '旦', '大', '学', - ' ', 'f', 'u', ...] - - """ - def __init__(self, field_name, new_added_field_name): - super(CWSCharSegProcessor, self).__init__(field_name, new_added_field_name) - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - def inner_proc(ins): - sentence = ins[self.field_name] - chars = self._split_sent_into_chars(sentence) - return chars - dataset.apply(func=inner_proc, new_field_name=self.new_added_field_name) - - return dataset - - def _split_sent_into_chars(self, sentence): - sp_tag_match_iter = re.finditer(_SPECIAL_TAG_PATTERN, sentence) - sp_spans = [match_span.span() for match_span in sp_tag_match_iter] - sp_span_idx = 0 - in_span_flag = False - chars = [] - num_spans = len(sp_spans) - for idx, char in enumerate(sentence): - if sp_span_idx', ''] + characters + ['', ''] - for idx in range(2, len(characters)-2): - cur_char = characters[idx] - pre_pre_char = characters[idx-2] - pre_char = characters[idx-1] - post_char = characters[idx+1] - post_post_char = characters[idx+2] - pre_pre_cur_bigram = pre_pre_char + cur_char - pre_cur_bigram = pre_char + cur_char - cur_post_bigram = cur_char + post_char - cur_post_post_bigram = cur_char + post_post_char - bigrams.extend([pre_pre_char, pre_char, post_char, post_post_char, - pre_pre_cur_bigram, pre_cur_bigram, - cur_post_bigram, cur_post_post_bigram]) - return bigrams - - -class VocabProcessor(Processor): - def __init__(self, field_name, min_freq=1, max_size=None): - - super(VocabProcessor, self).__init__(field_name, None) - self.vocab = Vocabulary(min_freq=min_freq, max_size=max_size) - - def process(self, *datasets): - for dataset in datasets: - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - dataset.apply(lambda ins: self.vocab.update(ins[self.field_name])) - - def get_vocab(self): - self.vocab.build_vocab() - return self.vocab - - def get_vocab_size(self): - return len(self.vocab) - - -class SegApp2OutputProcessor(Processor): - def __init__(self, chars_field_name='chars_list', tag_field_name='pred_tags', new_added_field_name='output'): - super(SegApp2OutputProcessor, self).__init__(None, None) - - self.chars_field_name = chars_field_name - self.tag_field_name = tag_field_name - - self.new_added_field_name = new_added_field_name - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - for ins in dataset: - pred_tags = ins[self.tag_field_name] - chars = ins[self.chars_field_name] - words = [] - start_idx = 0 - for idx, tag in enumerate(pred_tags): - if tag==1: - # 当前没有考虑将原文替换回去 - words.append(''.join(chars[start_idx:idx+1])) - start_idx = idx + 1 - ins[self.new_added_field_name] = ' '.join(words) - - -class BMES2OutputProcessor(Processor): - """ - 按照BMES标注方式推测生成的tag。由于可能存在非法tag,比如"BS",所以需要用以下的表格做转换,cur_B意思是当前tag是B, - next_B意思是后一个tag是B。则cur_B=S,即将当前被predict是B的tag标为S;next_M=B, 即将后一个被predict是M的tag标为B - | | next_B | next_M | next_E | next_S | end | - |:-----:|:-------:|:--------:|:--------:|:-------:|:-------:| - | start | 合法 | next_M=B | next_E=S | 合法 | - | - | cur_B | cur_B=S | 合法 | 合法 | cur_B=S | cur_B=S | - | cur_M | cur_M=E | 合法 | 合法 | cur_M=E | cur_M=E | - | cur_E | 合法 | next_M=B | next_E=S | 合法 | 合法 | - | cur_S | 合法 | next_M=B | next_E=S | 合法 | 合法 | - 举例: - prediction为BSEMS,会被认为是SSSSS. - - """ - def __init__(self, chars_field_name='chars_list', tag_field_name='pred', new_added_field_name='output', - b_idx = 0, m_idx = 1, e_idx = 2, s_idx = 3): - """ - - :param chars_field_name: character所对应的field - :param tag_field_name: 预测对应的field - :param new_added_field_name: 转换后的内容所在field - :param b_idx: int, Begin标签所对应的tag idx. - :param m_idx: int, Middle标签所对应的tag idx. - :param e_idx: int, End标签所对应的tag idx. - :param s_idx: int, Single标签所对应的tag idx - """ - super(BMES2OutputProcessor, self).__init__(None, None) - - self.chars_field_name = chars_field_name - self.tag_field_name = tag_field_name - - self.new_added_field_name = new_added_field_name - - self.b_idx = b_idx - self.m_idx = m_idx - self.e_idx = e_idx - self.s_idx = s_idx - # 还原init处介绍的矩阵 - self._valida_matrix = { - -1: [(-1, -1), (1, self.b_idx), (1, self.s_idx), (-1, -1)], # magic start idx - self.b_idx:[(0, self.s_idx), (-1, -1), (-1, -1), (0, self.s_idx), (0, self.s_idx)], - self.m_idx:[(0, self.e_idx), (-1, -1), (-1, -1), (0, self.e_idx), (0, self.e_idx)], - self.e_idx:[(-1, -1), (1, self.b_idx), (1, self.s_idx), (-1, -1), (-1, -1)], - self.s_idx:[(-1, -1), (1, self.b_idx), (1, self.s_idx), (-1, -1), (-1, -1)], - } - - def _validate_tags(self, tags): - """ - 给定一个tag的List,返回合法tag - - :param tags: Tensor, shape: (seq_len, ) - :return: 返回修改为合法tag的list - """ - assert len(tags)!=0 - padded_tags = [-1, *tags, -1] - for idx in range(len(padded_tags)-1): - cur_tag = padded_tags[idx] - if cur_tag not in self._valida_matrix: - cur_tag = self.s_idx - if padded_tags[idx+1] not in self._valida_matrix: - padded_tags[idx+1] = self.s_idx - next_tag = padded_tags[idx+1] - shift_tag = self._valida_matrix[cur_tag][next_tag] - if shift_tag[0]!=-1: - padded_tags[idx+shift_tag[0]] = shift_tag[1] - - return padded_tags[1:-1] - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - def inner_proc(ins): - pred_tags = ins[self.tag_field_name] - pred_tags = self._validate_tags(pred_tags) - chars = ins[self.chars_field_name] - words = [] - start_idx = 0 - for idx, tag in enumerate(pred_tags): - if tag==self.s_idx: - words.extend(chars[start_idx:idx+1]) - start_idx = idx + 1 - elif tag==self.e_idx: - words.append(''.join(chars[start_idx:idx+1])) - start_idx = idx + 1 - return ' '.join(words) - dataset.apply(func=inner_proc, new_field_name=self.new_added_field_name) - - -class InputTargetProcessor(Processor): - def __init__(self, input_fields, target_fields): - """ - 对DataSet操作,将input_fields中的field设置为input,target_fields的中field设置为target - - :param input_fields: List[str], 设置为input_field的field_name。如果为None,则不将任何field设置为target。 - :param target_fields: List[str], 设置为target_field的field_name。 如果为None,则不将任何field设置为target。 - """ - super(InputTargetProcessor, self).__init__(None, None) - - if input_fields is not None and not isinstance(input_fields, list): - raise TypeError("input_fields should be List[str], not {}.".format(type(input_fields))) - else: - self.input_fields = input_fields - if target_fields is not None and not isinstance(target_fields, list): - raise TypeError("target_fiels should be List[str], not{}.".format(type(target_fields))) - else: - self.target_fields = target_fields - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - if self.input_fields is not None: - for field in self.input_fields: - dataset.set_input(field) - if self.target_fields is not None: - for field in self.target_fields: - dataset.set_target(field) \ No newline at end of file diff --git a/reproduction/legacy/Chinese_word_segmentation/process/span_converter.py b/reproduction/legacy/Chinese_word_segmentation/process/span_converter.py deleted file mode 100644 index 2635df0e..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/process/span_converter.py +++ /dev/null @@ -1,185 +0,0 @@ - -import re - - -class SpanConverter: - def __init__(self, replace_tag, pattern): - super(SpanConverter, self).__init__() - - self.replace_tag = replace_tag - self.pattern = pattern - - def find_certain_span_and_replace(self, sentence): - replaced_sentence = '' - prev_end = 0 - for match in re.finditer(self.pattern, sentence): - start, end = match.span() - span = sentence[start:end] - replaced_sentence += sentence[prev_end:start] + \ - self.span_to_special_tag(span) - prev_end = end - replaced_sentence += sentence[prev_end:] - - return replaced_sentence - - def span_to_special_tag(self, span): - - return self.replace_tag - - def find_certain_span(self, sentence): - spans = [] - for match in re.finditer(self.pattern, sentence): - spans.append(match.span()) - return spans - - -class AlphaSpanConverter(SpanConverter): - def __init__(self): - replace_tag = '' - # 理想状态下仅处理纯为字母的情况, 但不处理<[a-zA-Z]+>(因为这应该是特殊的tag). - pattern = '[a-zA-Z]+(?=[\u4e00-\u9fff ,%.!<\\-"])' - - super(AlphaSpanConverter, self).__init__(replace_tag, pattern) - - -class DigitSpanConverter(SpanConverter): - def __init__(self): - replace_tag = '' - pattern = '\d[\d\\.]*(?=[\u4e00-\u9fff ,%.!<-])' - - super(DigitSpanConverter, self).__init__(replace_tag, pattern) - - def span_to_special_tag(self, span): - # return self.special_tag - if span[0] == '0' and len(span) > 2: - return '' - decimal_point_count = 0 # one might have more than one decimal pointers - for idx, char in enumerate(span): - if char == '.' or char == '﹒' or char == '·': - decimal_point_count += 1 - if span[-1] == '.' or span[-1] == '﹒' or span[ - -1] == '·': # last digit being decimal point means this is not a number - if decimal_point_count == 1: - return span - else: - return '' - if decimal_point_count == 1: - return '' - elif decimal_point_count > 1: - return '' - else: - return '' - - -class TimeConverter(SpanConverter): - def __init__(self): - replace_tag = '' - pattern = '\d+[::∶][\d::∶]+(?=[\u4e00-\u9fff ,%.!<-])' - - super().__init__(replace_tag, pattern) - - - -class MixNumAlphaConverter(SpanConverter): - def __init__(self): - replace_tag = '' - pattern = None - - super().__init__(replace_tag, pattern) - - def find_certain_span_and_replace(self, sentence): - replaced_sentence = '' - start = 0 - matching_flag = False - number_flag = False - alpha_flag = False - link_flag = False - slash_flag = False - bracket_flag = False - for idx in range(len(sentence)): - if re.match('[0-9a-zA-Z/\\(\\)\'′&\\-]', sentence[idx]): - if not matching_flag: - replaced_sentence += sentence[start:idx] - start = idx - if re.match('[0-9]', sentence[idx]): - number_flag = True - elif re.match('[\'′&\\-]', sentence[idx]): - link_flag = True - elif re.match('/', sentence[idx]): - slash_flag = True - elif re.match('[\\(\\)]', sentence[idx]): - bracket_flag = True - else: - alpha_flag = True - matching_flag = True - elif re.match('[\\.]', sentence[idx]): - pass - else: - if matching_flag: - if (number_flag and alpha_flag) or (link_flag and alpha_flag) \ - or (slash_flag and alpha_flag) or (link_flag and number_flag) \ - or (number_flag and bracket_flag) or (bracket_flag and alpha_flag): - span = sentence[start:idx] - start = idx - replaced_sentence += self.span_to_special_tag(span) - matching_flag = False - number_flag = False - alpha_flag = False - link_flag = False - slash_flag = False - bracket_flag = False - - replaced_sentence += sentence[start:] - return replaced_sentence - - def find_certain_span(self, sentence): - spans = [] - start = 0 - matching_flag = False - number_flag = False - alpha_flag = False - link_flag = False - slash_flag = False - bracket_flag = False - for idx in range(len(sentence)): - if re.match('[0-9a-zA-Z/\\(\\)\'′&\\-]', sentence[idx]): - if not matching_flag: - start = idx - if re.match('[0-9]', sentence[idx]): - number_flag = True - elif re.match('[\'′&\\-]', sentence[idx]): - link_flag = True - elif re.match('/', sentence[idx]): - slash_flag = True - elif re.match('[\\(\\)]', sentence[idx]): - bracket_flag = True - else: - alpha_flag = True - matching_flag = True - elif re.match('[\\.]', sentence[idx]): - pass - else: - if matching_flag: - if (number_flag and alpha_flag) or (link_flag and alpha_flag) \ - or (slash_flag and alpha_flag) or (link_flag and number_flag) \ - or (number_flag and bracket_flag) or (bracket_flag and alpha_flag): - spans.append((start, idx)) - start = idx - - matching_flag = False - number_flag = False - alpha_flag = False - link_flag = False - slash_flag = False - bracket_flag = False - - return spans - - - -class EmailConverter(SpanConverter): - def __init__(self): - replaced_tag = "" - pattern = '[0-9a-zA-Z]+[@][.﹒0-9a-zA-Z@]+(?=[\u4e00-\u9fff ,%.!<\\-"$])' - - super(EmailConverter, self).__init__(replaced_tag, pattern) \ No newline at end of file diff --git a/reproduction/legacy/Chinese_word_segmentation/utils.py b/reproduction/legacy/Chinese_word_segmentation/utils.py deleted file mode 100644 index 1dccb921..00000000 --- a/reproduction/legacy/Chinese_word_segmentation/utils.py +++ /dev/null @@ -1,151 +0,0 @@ - -import torch - - -def seq_lens_to_mask(seq_lens): - batch_size = seq_lens.size(0) - max_len = seq_lens.max() - - indexes = torch.arange(max_len).view(1, -1).repeat(batch_size, 1).to(seq_lens.device) - masks = indexes.lt(seq_lens.unsqueeze(1)) - - return masks - - -from itertools import chain - -def refine_ys_on_seq_len(ys, seq_lens): - refined_ys = [] - for b_idx, length in enumerate(seq_lens): - refined_ys.append(list(ys[b_idx][:length])) - - return refined_ys - -def flat_nested_list(nested_list): - return list(chain(*nested_list)) - -def calculate_pre_rec_f1(model, batcher, type='segapp'): - true_ys, pred_ys = decode_iterator(model, batcher) - - true_ys = flat_nested_list(true_ys) - pred_ys = flat_nested_list(pred_ys) - - cor_num = 0 - start = 0 - if type=='segapp': - yp_wordnum = pred_ys.count(1) - yt_wordnum = true_ys.count(1) - - if true_ys[0]==1 and pred_ys[0]==1: - cor_num += 1 - start = 1 - - for i in range(1, len(true_ys)): - if true_ys[i] == 1: - flag = True - if true_ys[start-1] != pred_ys[start-1]: - flag = False - else: - for j in range(start, i + 1): - if true_ys[j] != pred_ys[j]: - flag = False - break - if flag: - cor_num += 1 - start = i + 1 - elif type=='bmes': - yp_wordnum = pred_ys.count(2) + pred_ys.count(3) - yt_wordnum = true_ys.count(2) + true_ys.count(3) - for i in range(len(true_ys)): - if true_ys[i] == 2 or true_ys[i] == 3: - flag = True - for j in range(start, i + 1): - if true_ys[j] != pred_ys[j]: - flag = False - break - if flag: - cor_num += 1 - start = i + 1 - P = cor_num / (float(yp_wordnum) + 1e-6) - R = cor_num / (float(yt_wordnum) + 1e-6) - F = 2 * P * R / (P + R + 1e-6) - # print(cor_num, yt_wordnum, yp_wordnum) - return P, R, F - - -def decode_iterator(model, batcher): - true_ys = [] - pred_ys = [] - seq_lens = [] - with torch.no_grad(): - model.eval() - for batch_x, batch_y in batcher: - pred_dict = model.predict(**batch_x) - seq_len = batch_x['seq_lens'].cpu().numpy() - - pred_y = pred_dict['pred_tags'] - true_y = batch_y['tags'] - - pred_y = pred_y.cpu().numpy() - true_y = true_y.cpu().numpy() - - true_ys.extend(true_y.tolist()) - pred_ys.extend(pred_y.tolist()) - seq_lens.extend(list(seq_len)) - model.train() - - true_ys = refine_ys_on_seq_len(true_ys, seq_lens) - pred_ys = refine_ys_on_seq_len(pred_ys, seq_lens) - - return true_ys, pred_ys - - -from torch import nn -import torch.nn.functional as F - -class FocalLoss(nn.Module): - r""" - This criterion is a implemenation of Focal Loss, which is proposed in - Focal Loss for Dense Object Detection. - - Loss(x, class) = - \alpha (1-softmax(x)[class])^gamma \log(softmax(x)[class]) - - The losses are averaged across observations for each minibatch. - Args: - alpha(1D Tensor, Variable) : the scalar factor for this criterion - gamma(float, double) : gamma > 0; reduces the relative loss for well-classified examples (p > .5), - putting more focus on hard, misclassified examples - size_average(bool): size_average(bool): By default, the losses are averaged over observations for each minibatch. - However, if the field size_average is set to False, the losses are - instead summed for each minibatch. - """ - - def __init__(self, class_num, gamma=2, size_average=True, reduce=False): - super(FocalLoss, self).__init__() - self.gamma = gamma - self.class_num = class_num - self.size_average = size_average - self.reduce = reduce - - def forward(self, inputs, targets): - N = inputs.size(0) - C = inputs.size(1) - P = F.softmax(inputs, dim=-1) - - class_mask = inputs.data.new(N, C).fill_(0) - class_mask.requires_grad = True - ids = targets.view(-1, 1) - class_mask = class_mask.scatter(1, ids.data, 1.) - - probs = (P * class_mask).sum(1).view(-1, 1) - - log_p = probs.log() - - batch_loss = - (torch.pow((1 - probs), self.gamma)) * log_p - if self.reduce: - if self.size_average: - loss = batch_loss.mean() - else: - loss = batch_loss.sum() - return loss - return batch_loss \ No newline at end of file diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/README.md b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/README.md deleted file mode 100644 index dfb337ec..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# Prototype - -这是一个很旧版本的reproduction,待修改 - -## Word2Idx.py -A mapping model between words and indexes - -## embedding.py -embedding modules - -Contains a simple encapsulation for torch.nn.Embedding - -## encoder.py -encoder modules - -Contains a simple encapsulation for torch.nn.LSTM - -## aggregation.py -aggregation modules - -Contains a self-attention model, according to paper "A Structured Self-attentive Sentence Embedding", https://arxiv.org/abs/1703.03130 - -## predict.py -predict modules - -Contains a two layers perceptron for classification - -## example.py -An example showing how to use above modules to build a model - -Contains a model for sentiment analysis on Yelp dataset, and its training and testing procedures. See https://arxiv.org/abs/1703.03130 for more details. - -## prepare.py -A case of using Word2Idx to build Yelp datasets - -## dataloader.py -A dataloader for Yelp dataset - -It is an iterable object, returning a zero-padded batch every iteration. - - - - diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/Word2Idx.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/Word2Idx.py deleted file mode 100644 index 2499aeae..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/Word2Idx.py +++ /dev/null @@ -1,63 +0,0 @@ -import collections -import pickle - -class Word2Idx(): - """ - Build a word index according to word frequency. - - If "min_freq" is given, then only words with a frequncy not lesser than min_freq will be kept. - If "max_num" is given, then at most the most frequent $max_num words will be kept. - "words" should be a list [ w_1,w_2,...,w_i,...,w_n ] where each w_i is a string representing a word. - num is the size of the lookup table. - w2i is a lookup table assigning each word an index. - i2w is a vector which serves as an invert mapping of w2i. - Note that index 0 is token "" for padding - index 1 is token "" for unregistered words - e.g. i2w[w2i["word"]] == "word" - """ - def __init__(self): - self.__w2i = dict() - self.__i2w = [] - self.num = 0 - - def build(self, words, min_freq=0, max_num=None): - """build a model from words""" - counter = collections.Counter(words) - word_set = set(words) - if max_num is not None: - most_common = counter.most_common(min(len(word_set), max_num - 1)) - else: - most_common = counter.most_common() - self.__w2i = dict((w[0],i + 1) for i,w in enumerate(most_common) if w[1] >= min_freq) - self.__w2i[""] = 0 - self.__w2i[""] = 1 - self.__i2w = ["", ""] + [ w[0] for w in most_common if w[1] >= min_freq ] - self.num = len(self.__i2w) - - def w2i(self, word): - """word to index""" - if word in self.__w2i: - return self.__w2i[word] - return 0 - - def i2w(self, idx): - """index to word""" - if idx >= self.num: - raise Exception("out of range\n") - return self.__i2w[idx] - - def save(self, addr): - """save the model to a file with address "addr" """ - f = open(addr,"wb") - pickle.dump([self.__i2w, self.__w2i, self.num], f) - f.close() - - def load(self, addr): - """load a model from a file with address "addr" """ - f = open(addr,"rb") - paras = pickle.load(f) - self.__i2w, self.__w2i, self.num = paras[0], paras[1], paras[2] - f.close() - - - diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/config.cfg b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/config.cfg deleted file mode 100644 index 2d31cd0d..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/config.cfg +++ /dev/null @@ -1,13 +0,0 @@ -[train] -epochs = 30 -batch_size = 32 -pickle_path = "./save/" -validate = true -save_best_dev = true -model_saved_path = "./save/" -rnn_hidden_units = 300 -word_emb_dim = 300 -use_crf = true -use_cuda = false -loss_func = "cross_entropy" -num_classes = 5 \ No newline at end of file diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/dataloader.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/dataloader.py deleted file mode 100644 index dd7fc4f8..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/dataloader.py +++ /dev/null @@ -1,82 +0,0 @@ -import pickle -import random - -import torch -from torch.autograd import Variable - - -def float_wrapper(x, requires_grad=True, using_cuda=True): - """ - transform float type list to pytorch variable - """ - if using_cuda==True: - return Variable(torch.FloatTensor(x).cuda(), requires_grad=requires_grad) - else: - return Variable(torch.FloatTensor(x), requires_grad=requires_grad) - -def long_wrapper(x, requires_grad=True, using_cuda=True): - """ - transform long type list to pytorch variable - """ - if using_cuda==True: - return Variable(torch.LongTensor(x).cuda(), requires_grad=requires_grad) - else: - return Variable(torch.LongTensor(x), requires_grad=requires_grad) - -def pad(X, using_cuda): - """ - zero-pad sequnces to same length then pack them together - """ - maxlen = max([x.size(0) for x in X]) - Y = [] - for x in X: - padlen = maxlen - x.size(0) - if padlen > 0: - if using_cuda: - paddings = Variable(torch.zeros(padlen).long()).cuda() - else: - paddings = Variable(torch.zeros(padlen).long()) - x_ = torch.cat((x, paddings), 0) - Y.append(x_) - else: - Y.append(x) - return torch.stack(Y) - -class DataLoader(object): - """ - load data with form {"feature", "class"} - - Args: - fdir : data file address - batch_size : batch_size - shuffle : if True, shuffle dataset every epoch - using_cuda : if True, return tensors on GPU - """ - def __init__(self, fdir, batch_size, shuffle=True, using_cuda=True): - with open(fdir, "rb") as f: - self.data = pickle.load(f) - self.batch_size = batch_size - self.num = len(self.data) - self.count = 0 - self.iters = int(self.num / batch_size) - self.shuffle = shuffle - self.using_cuda = using_cuda - - def __iter__(self): - return self - - def __next__(self): - if self.count == self.iters: - self.count = 0 - if self.shuffle: - random.shuffle(self.data) - raise StopIteration() - else: - batch = self.data[self.count * self.batch_size : (self.count + 1) * self.batch_size] - self.count += 1 - X = [long_wrapper(x["sent"], using_cuda=self.using_cuda, requires_grad=False) for x in batch] - X = pad(X, self.using_cuda) - y = long_wrapper([x["class"] for x in batch], using_cuda=self.using_cuda, requires_grad=False) - return {"feature" : X, "class" : y} - - diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/example.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/example.py deleted file mode 100644 index 5270d673..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/example.py +++ /dev/null @@ -1,154 +0,0 @@ -import time - -import aggregation -import dataloader -import embedding -import encoder -import predict -import torch -import torch.nn as nn -import torch.optim as optim - -WORD_NUM = 357361 -WORD_SIZE = 100 -HIDDEN_SIZE = 300 -D_A = 350 -R = 10 -MLP_HIDDEN = 2000 -CLASSES_NUM = 5 - -from fastNLP.models.base_model import BaseModel - - -class MyNet(BaseModel): - def __init__(self): - super(MyNet, self).__init__() - self.embedding = embedding.Lookuptable(WORD_NUM, WORD_SIZE) - self.encoder = encoder.Lstm(WORD_SIZE, HIDDEN_SIZE, 1, 0.5, True) - self.aggregation = aggregation.Selfattention(2 * HIDDEN_SIZE, D_A, R) - self.predict = predict.MLP(R * HIDDEN_SIZE * 2, MLP_HIDDEN, CLASSES_NUM) - self.penalty = None - - def encode(self, x): - return self.encode(self.embedding(x)) - - def aggregate(self, x): - x, self.penalty = self.aggregate(x) - return x - - def decode(self, x): - return [self.predict(x), self.penalty] - - -class Net(nn.Module): - """ - A model for sentiment analysis using lstm and self-attention - """ - def __init__(self): - super(Net, self).__init__() - self.embedding = embedding.Lookuptable(WORD_NUM, WORD_SIZE) - self.encoder = encoder.Lstm(WORD_SIZE, HIDDEN_SIZE, 1, 0.5, True) - self.aggregation = aggregation.Selfattention(2 * HIDDEN_SIZE, D_A, R) - self.predict = predict.MLP(R * HIDDEN_SIZE * 2, MLP_HIDDEN, CLASSES_NUM) - - def forward(self, x): - x = self.embedding(x) - x = self.encoder(x) - x, penalty = self.aggregation(x) - x = self.predict(x) - return x, penalty - - -def train(model_dict=None, using_cuda=True, learning_rate=0.06,\ - momentum=0.3, batch_size=32, epochs=5, coef=1.0, interval=10): - """ - training procedure - - Args: - If model_dict is given (a file address), it will continue training on the given model. - Otherwise, it would train a new model from scratch. - If using_cuda is true, the training would be conducted on GPU. - Learning_rate and momentum is for SGD optimizer. - coef is the coefficent between the cross-entropy loss and the penalization term. - interval is the frequncy of reporting. - - the result will be saved with a form "model_dict_+current time", which could be used for further training - """ - - if using_cuda: - net = Net().cuda() - else: - net = Net() - - if model_dict != None: - net.load_state_dict(torch.load(model_dict)) - - optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=momentum) - criterion = nn.CrossEntropyLoss() - dataset = dataloader.DataLoader("train_set.pkl", batch_size, using_cuda=using_cuda) - - #statistics - loss_count = 0 - prepare_time = 0 - run_time = 0 - count = 0 - - for epoch in range(epochs): - print("epoch: %d"%(epoch)) - for i, batch in enumerate(dataset): - t1 = time.time() - X = batch["feature"] - y = batch["class"] - - t2 = time.time() - y_pred, y_penl = net(X) - loss = criterion(y_pred, y) + torch.sum(y_penl) / batch_size * coef - optimizer.zero_grad() - loss.backward() - nn.utils.clip_grad_norm(net.parameters(), 0.5) - optimizer.step() - t3 = time.time() - - loss_count += torch.sum(y_penl).data[0] - prepare_time += (t2 - t1) - run_time += (t3 - t2) - p, idx = torch.max(y_pred.data, dim=1) - count += torch.sum(torch.eq(idx.cpu(), y.data.cpu())) - - if (i + 1) % interval == 0: - print("epoch : %d, iters: %d"%(epoch, i + 1)) - print("loss count:" + str(loss_count / (interval * batch_size))) - print("acuracy:" + str(count / (interval * batch_size))) - print("penalty:" + str(torch.sum(y_penl).data[0] / batch_size)) - print("prepare time:" + str(prepare_time)) - print("run time:" + str(run_time)) - prepare_time = 0 - run_time = 0 - loss_count = 0 - count = 0 - string = time.strftime("%Y-%m-%d-%H:%M:%S", time.localtime()) - torch.save(net.state_dict(), "model_dict_%s.dict"%(string)) - -def test(model_dict, using_cuda=True): - if using_cuda: - net = Net().cuda() - else: - net = Net() - net.load_state_dict(torch.load(model_dict)) - dataset = dataloader.DataLoader("test_set.pkl", batch_size=1, using_cuda=using_cuda) - count = 0 - for i, batch in enumerate(dataset): - X = batch["feature"] - y = batch["class"] - y_pred, _ = net(X) - p, idx = torch.max(y_pred.data, dim=1) - count += torch.sum(torch.eq(idx.cpu(), y.data.cpu())) - print("accuracy: %f"%(count / dataset.num)) - - -if __name__ == "__main__": - train(using_cuda=torch.cuda.is_available()) - - - - diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/main.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/main.py deleted file mode 100644 index 05077530..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/main.py +++ /dev/null @@ -1,76 +0,0 @@ -# 这是一个很旧版本的代码 - -""" -import torch.nn.functional as F - -from fastNLP.core.trainer import Trainer -from fastNLP.core.utils import ClassPreprocess as Preprocess -from fastNLP.io.config_io import ConfigLoader -from fastNLP.io.config_io import ConfigSection -from fastNLP.io.dataset_loader import DummyClassificationReader as Dataset_loader -from fastNLP.models.base_model import BaseModel -from fastNLP.modules.aggregator.self_attention import SelfAttention -from fastNLP.modules.decoder.mlp import MLP -from fastNLP.embeddings.embedding import Embedding as Embedding -from fastNLP.modules.encoder.lstm import LSTM - -train_data_path = 'small_train_data.txt' -dev_data_path = 'small_dev_data.txt' -# emb_path = 'glove.txt' - -lstm_hidden_size = 300 -embeding_size = 300 -attention_unit = 350 -attention_hops = 10 -class_num = 5 -nfc = 3000 -### data load ### -train_dataset = Dataset_loader(train_data_path) -train_data = train_dataset.load() - -dev_args = Dataset_loader(dev_data_path) -dev_data = dev_args.load() - -###### preprocess #### -preprocess = Preprocess() -word2index, label2index = preprocess.build_dict(train_data) -train_data, dev_data = preprocess.run(train_data, dev_data) - - - -# emb = EmbedLoader(emb_path) -# embedding = emb.load_embedding(emb_dim= embeding_size , emb_file= emb_path ,word_dict= word2index) -### construct vocab ### - -class SELF_ATTENTION_YELP_CLASSIFICATION(BaseModel): - def __init__(self, args=None): - super(SELF_ATTENTION_YELP_CLASSIFICATION,self).__init__() - self.embedding = Embedding((len(word2index) ,embeding_size)) - self.lstm = LSTM(input_size=embeding_size, hidden_size=lstm_hidden_size, bidirectional=True) - self.attention = SelfAttention(lstm_hidden_size * 2 ,dim =attention_unit ,num_vec=attention_hops) - self.mlp = MLP(size_layer=[lstm_hidden_size * 2*attention_hops ,nfc ,class_num ]) - def forward(self,x): - x_emb = self.embedding(x) - output = self.lstm(x_emb) - after_attention, penalty = self.attention(output,x) - after_attention =after_attention.view(after_attention.size(0),-1) - output = self.mlp(after_attention) - return output - - def loss(self, predict, ground_truth): - print("predict:%s; g:%s" % (str(predict.size()), str(ground_truth.size()))) - print(ground_truth) - return F.cross_entropy(predict, ground_truth) - -train_args = ConfigSection() -ConfigLoader("good path").load_config('config.cfg',{"train": train_args}) -# train_args['vocab'] = len(word2index) - - -trainer = Trainer(**train_args.data) - -# for k in train_args.__dict__.keys(): -# print(k, train_args[k]) -model = SELF_ATTENTION_YELP_CLASSIFICATION(train_args) -trainer.train() -""" diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/predict.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/predict.py deleted file mode 100644 index 31affeb7..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/predict.py +++ /dev/null @@ -1,24 +0,0 @@ -import torch.nn as nn -import torch.nn.functional as F - -class MLP(nn.Module): - """ - A two layers perceptron for classification. - - Output : Unnormalized possibility distribution - Args: - input_size : the size of input - hidden_size : the size of hidden layer - output_size : the size of output - """ - def __init__(self, input_size, hidden_size, output_size): - super(MLP,self).__init__() - self.L1 = nn.Linear(input_size, hidden_size) - self.L2 = nn.Linear(hidden_size, output_size) - - def forward(self, x): - out = self.L2(F.relu(self.L1(x))) - return out - -if __name__ == "__main__": - MLP(20, 30, 20) \ No newline at end of file diff --git a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/prepare.py b/reproduction/legacy/LSTM+self_attention_sentiment_analysis/prepare.py deleted file mode 100644 index b8f8d7b8..00000000 --- a/reproduction/legacy/LSTM+self_attention_sentiment_analysis/prepare.py +++ /dev/null @@ -1,52 +0,0 @@ -import pickle - -import Word2Idx - - -def get_sets(m, n): - """ - get a train set containing m samples and a test set containing n samples - """ - samples = pickle.load(open("tuples.pkl","rb")) - if m+n > len(samples): - print("asking for too many tuples\n") - return - train_samples = samples[ : m] - test_samples = samples[m: m+n] - return train_samples, test_samples - -def build_wordidx(): - """ - build wordidx using word2idx - """ - train, test = get_sets(500000, 2000) - words = [] - for x in train: - words += x[0] - wordidx = Word2Idx.Word2Idx() - wordidx.build(words) - print(wordidx.num) - print(wordidx.i2w(0)) - wordidx.save("wordidx.pkl") - -def build_sets(): - """ - build train set and test set, transform word to index - """ - train, test = get_sets(500000, 2000) - wordidx = Word2Idx.Word2Idx() - wordidx.load("wordidx.pkl") - train_set = [] - for x in train: - sent = [wordidx.w2i(w) for w in x[0]] - train_set.append({"sent" : sent, "class" : x[1]}) - test_set = [] - for x in test: - sent = [wordidx.w2i(w) for w in x[0]] - test_set.append({"sent" : sent, "class" : x[1]}) - pickle.dump(train_set, open("train_set.pkl", "wb")) - pickle.dump(test_set, open("test_set.pkl", "wb")) - -if __name__ == "__main__": - build_wordidx() - build_sets() diff --git a/reproduction/legacy/POS_tagging/pos_processor.py b/reproduction/legacy/POS_tagging/pos_processor.py deleted file mode 100644 index 7a1b8e01..00000000 --- a/reproduction/legacy/POS_tagging/pos_processor.py +++ /dev/null @@ -1,133 +0,0 @@ - -from collections import Counter - -from fastNLP.api.processor import Processor -from fastNLP.core.dataset import DataSet - - -class CombineWordAndPosProcessor(Processor): - def __init__(self, word_field_name, pos_field_name): - super(CombineWordAndPosProcessor, self).__init__(None, None) - - self.word_field_name = word_field_name - self.pos_field_name = pos_field_name - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - - for ins in dataset: - chars = ins[self.word_field_name] - bmes_pos = ins[self.pos_field_name] - word_list = [] - pos_list = [] - pos_stack_cnt = Counter() - char_stack = [] - for char, p in zip(chars, bmes_pos): - parts = p.split('-') - pre = parts[0] - post = parts[1] - if pre.lower() == 's': - if len(pos_stack_cnt) != 0: - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - pos_list.append(post) - word_list.append(char) - char_stack.clear() - pos_stack_cnt.clear() - elif pre.lower() == 'e': - pos_stack_cnt.update([post]) - char_stack.append(char) - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - char_stack.clear() - pos_stack_cnt.clear() - elif pre.lower() == 'b': - if len(pos_stack_cnt) != 0: - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - char_stack.clear() - pos_stack_cnt.clear() - char_stack.append(char) - pos_stack_cnt.update([post]) - else: - char_stack.append(char) - pos_stack_cnt.update([post]) - - ins['word_list'] = word_list - ins['pos_list'] = pos_list - - return dataset - - -class PosOutputStrProcessor(Processor): - def __init__(self, word_field_name, pos_field_name): - super(PosOutputStrProcessor, self).__init__(None, None) - - self.word_field_name = word_field_name - self.pos_field_name = pos_field_name - self.sep = '_' - - def process(self, dataset): - assert isinstance(dataset, DataSet), "Only Dataset class is allowed, not {}.".format(type(dataset)) - - for ins in dataset: - word_list = ins[self.word_field_name] - pos_list = ins[self.pos_field_name] - - word_pos_list = [] - for word, pos in zip(word_list, pos_list): - word_pos_list.append(word + self.sep + pos) - #TODO 应该可以定制 - ins['word_pos_output'] = ' '.join(word_pos_list) - - return dataset - - -if __name__ == '__main__': - chars = ['迈', '向', '充', '满', '希', '望', '的', '新', '世', '纪', '—', '—', '一', '九', '九', '八', '年', '新', '年', '讲', '话', '(', '附', '图', '片', '1', '张', ')'] - bmes_pos = ['B-v', 'E-v', 'B-v', 'E-v', 'B-n', 'E-n', 'S-u', 'S-a', 'B-n', 'E-n', 'B-w', 'E-w', 'B-t', 'M-t', 'M-t', 'M-t', 'E-t', 'B-t', 'E-t', 'B-n', 'E-n', 'S-w', 'S-v', 'B-n', 'E-n', 'S-m', 'S-q', 'S-w'] - - - word_list = [] - pos_list = [] - pos_stack_cnt = Counter() - char_stack = [] - for char, p in zip(''.join(chars), bmes_pos): - parts = p.split('-') - pre = parts[0] - post = parts[1] - if pre.lower() == 's': - if len(pos_stack_cnt) != 0: - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - pos_list.append(post) - word_list.append(char) - char_stack.clear() - pos_stack_cnt.clear() - elif pre.lower() == 'e': - pos_stack_cnt.update([post]) - char_stack.append(char) - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - char_stack.clear() - pos_stack_cnt.clear() - elif pre.lower() == 'b': - if len(pos_stack_cnt) != 0: - pos = pos_stack_cnt.most_common(1)[0][0] - pos_list.append(pos) - word_list.append(''.join(char_stack)) - char_stack.clear() - pos_stack_cnt.clear() - char_stack.append(char) - pos_stack_cnt.update([post]) - else: - char_stack.append(char) - pos_stack_cnt.update([post]) - - print(word_list) - print(pos_list) diff --git a/reproduction/legacy/POS_tagging/pos_reader.py b/reproduction/legacy/POS_tagging/pos_reader.py deleted file mode 100644 index 4ff58f4b..00000000 --- a/reproduction/legacy/POS_tagging/pos_reader.py +++ /dev/null @@ -1,29 +0,0 @@ -from fastNLP.io.dataset_loader import ZhConllPOSReader - - -def cut_long_sentence(sent, max_sample_length=200): - sent_no_space = sent.replace(' ', '') - cutted_sentence = [] - if len(sent_no_space) > max_sample_length: - parts = sent.strip().split() - new_line = '' - length = 0 - for part in parts: - length += len(part) - new_line += part + ' ' - if length > max_sample_length: - new_line = new_line[:-1] - cutted_sentence.append(new_line) - length = 0 - new_line = '' - if new_line != '': - cutted_sentence.append(new_line[:-1]) - else: - cutted_sentence.append(sent) - return cutted_sentence - - -if __name__ == '__main__': - reader = ZhConllPOSReader() - d = reader.load('/home/hyan/train.conllx') - print(d) \ No newline at end of file diff --git a/reproduction/legacy/POS_tagging/pos_tag.cfg b/reproduction/legacy/POS_tagging/pos_tag.cfg deleted file mode 100644 index f8224234..00000000 --- a/reproduction/legacy/POS_tagging/pos_tag.cfg +++ /dev/null @@ -1,39 +0,0 @@ -[train] -epochs = 6 -batch_size = 32 -pickle_path = "./save/" -validate = true -save_best_dev = true -model_saved_path = "./save/" -valid_step = 250 -eval_sort_key = 'accuracy' - -[model] -rnn_hidden_units = 300 -word_emb_dim = 300 -dropout = 0.5 -use_crf = true -print_every_step = 10 - -[test] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 640 -pickle_path = "./save/" -use_crf = true -use_cuda = true - - -[POS_test] -save_output = true -validate_in_training = true -save_dev_input = false -save_loss = true -batch_size = 640 -pickle_path = "./save/" -use_crf = true -use_cuda = true -rnn_hidden_units = 100 -word_emb_dim = 100 \ No newline at end of file diff --git a/reproduction/legacy/POS_tagging/train_pos_tag.py b/reproduction/legacy/POS_tagging/train_pos_tag.py deleted file mode 100644 index a71531a4..00000000 --- a/reproduction/legacy/POS_tagging/train_pos_tag.py +++ /dev/null @@ -1,163 +0,0 @@ -import argparse -import os -import pickle -import sys - -import torch - -# in order to run fastNLP without installation -sys.path.append(os.path.join(os.path.dirname(__file__), '../..')) - -from fastNLP.api.pipeline import Pipeline -from fastNLP.api.processor import SeqLenProcessor, VocabIndexerProcessor, SetInputProcessor, IndexerProcessor -from fastNLP.core.metrics import SpanFPreRecMetric -from fastNLP.core.trainer import Trainer -from fastNLP.io.config_io import ConfigLoader, ConfigSection -from fastNLP.models.sequence_labeling import AdvSeqLabel -from fastNLP.io.dataset_loader import ConllxDataLoader -from fastNLP.api.processor import ModelProcessor, Index2WordProcessor - - -cfgfile = './pos_tag.cfg' -pickle_path = "save" - - -def load_tencent_embed(embed_path, word2id): - hit = 0 - with open(embed_path, "rb") as f: - embed_dict = pickle.load(f) - embedding_tensor = torch.randn(len(word2id), 200) - for key in word2id: - if key in embed_dict: - embedding_tensor[word2id[key]] = torch.Tensor(embed_dict[key]) - hit += 1 - print("vocab_size={} hit={} hit/vocab_size={}".format(len(word2id), hit, hit / len(word2id))) - return embedding_tensor - - -def train(train_data_path, dev_data_path, checkpoint=None, save=None): - # load config - train_param = ConfigSection() - model_param = ConfigSection() - ConfigLoader().load_config(cfgfile, {"train": train_param, "model": model_param}) - print("config loaded") - - # Data Loader - print("loading training set...") - dataset = ConllxDataLoader().load(train_data_path, return_dataset=True) - print("loading dev set...") - dev_data = ConllxDataLoader().load(dev_data_path, return_dataset=True) - print(dataset) - print("================= dataset ready =====================") - - dataset.rename_field("tag", "truth") - dev_data.rename_field("tag", "truth") - - vocab_proc = VocabIndexerProcessor("words", new_added_filed_name="word_seq") - tag_proc = VocabIndexerProcessor("truth", is_input=True) - seq_len_proc = SeqLenProcessor(field_name="word_seq", new_added_field_name="word_seq_origin_len", is_input=True) - set_input_proc = SetInputProcessor("word_seq", "word_seq_origin_len") - - vocab_proc(dataset) - tag_proc(dataset) - seq_len_proc(dataset) - - # index dev set - word_vocab, tag_vocab = vocab_proc.vocab, tag_proc.vocab - dev_data.apply(lambda ins: [word_vocab.to_index(w) for w in ins["words"]], new_field_name="word_seq") - dev_data.apply(lambda ins: [tag_vocab.to_index(w) for w in ins["truth"]], new_field_name="truth") - dev_data.apply(lambda ins: len(ins["word_seq"]), new_field_name="word_seq_origin_len") - - # set input & target - dataset.set_input("word_seq", "word_seq_origin_len", "truth") - dev_data.set_input("word_seq", "word_seq_origin_len", "truth") - dataset.set_target("truth", "word_seq_origin_len") - dev_data.set_target("truth", "word_seq_origin_len") - - # dataset.set_is_target(tag_ids=True) - model_param["vocab_size"] = vocab_proc.get_vocab_size() - model_param["num_classes"] = tag_proc.get_vocab_size() - print("vocab_size={} num_classes={}".format(model_param["vocab_size"], model_param["num_classes"])) - - # define a model - if checkpoint is None: - # pre_trained = load_tencent_embed("/home/zyfeng/data/char_tencent_embedding.pkl", vocab_proc.vocab.word2idx) - pre_trained = None - model = AdvSeqLabel(model_param, id2words=None, emb=pre_trained) - print(model) - else: - model = torch.load(checkpoint) - - # call trainer to train - trainer = Trainer(dataset, model, loss=None, n_epochs=20, print_every=10, dev_data=dev_data, - metrics=SpanFPreRecMetric(tag_proc.vocab, pred="predict", - target="truth", - seq_lens="word_seq_origin_len"), metric_key="f", save_path=save, - use_tqdm=True) - trainer.train(load_best_model=True) - - # save model & pipeline - model_proc = ModelProcessor(model, seq_len_field_name="word_seq_origin_len") - id2tag = Index2WordProcessor(tag_proc.vocab, "predict", "tag") - - pp = Pipeline([vocab_proc, seq_len_proc, set_input_proc, model_proc, id2tag]) - save_dict = {"pipeline": pp, "model": model, "tag_vocab": tag_proc.vocab} - torch.save(save_dict, os.path.join(save, "model_pp.pkl")) - print("pipeline saved") - - -def run_test(test_path): - test_data = ConllxDataLoader().load(test_path, return_dataset=True) - - with open("model_pp_0117.pkl", "rb") as f: - save_dict = torch.load(f) - tag_vocab = save_dict["tag_vocab"] - pipeline = save_dict["pipeline"] - index_tag = IndexerProcessor(vocab=tag_vocab, field_name="tag", new_added_field_name="truth", is_input=False) - pipeline.pipeline = [index_tag] + pipeline.pipeline - - pipeline(test_data) - test_data.set_target("truth") - prediction = test_data.field_arrays["predict"].content - truth = test_data.field_arrays["truth"].content - seq_len = test_data.field_arrays["word_seq_origin_len"].content - - # padding by hand - max_length = max([len(seq) for seq in prediction]) - for idx in range(len(prediction)): - prediction[idx] = list(prediction[idx]) + ([0] * (max_length - len(prediction[idx]))) - truth[idx] = list(truth[idx]) + ([0] * (max_length - len(truth[idx]))) - evaluator = SpanFPreRecMetric(tag_vocab=tag_vocab, pred="predict", target="truth", - seq_lens="word_seq_origin_len") - evaluator({"predict": torch.Tensor(prediction), "word_seq_origin_len": torch.Tensor(seq_len)}, - {"truth": torch.Tensor(truth)}) - test_result = evaluator.get_metric() - f1 = round(test_result['f'] * 100, 2) - pre = round(test_result['pre'] * 100, 2) - rec = round(test_result['rec'] * 100, 2) - - return {"F1": f1, "precision": pre, "recall": rec} - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--train", type=str, help="training conll file", default="/home/zyfeng/data/sample.conllx") - parser.add_argument("--dev", type=str, help="dev conll file", default="/home/zyfeng/data/sample.conllx") - parser.add_argument("--test", type=str, help="test conll file", default=None) - parser.add_argument("--save", type=str, help="path to save", default=None) - - parser.add_argument("-c", "--restart", action="store_true", help="whether to continue training") - parser.add_argument("-cp", "--checkpoint", type=str, help="checkpoint of the trained model") - args = parser.parse_args() - - if args.test is not None: - print(run_test(args.test)) - else: - if args.restart is True: - # 继续训练 python train_pos_tag.py -c -cp ./save/best_model.pkl - if args.checkpoint is None: - raise RuntimeError("Please provide the checkpoint. -cp ") - train(args.train, args.dev, args.checkpoint, save=args.save) - else: - # 一次训练 python train_pos_tag.py - train(args.train, args.dev, save=args.save) diff --git a/reproduction/legacy/POS_tagging/utils.py b/reproduction/legacy/POS_tagging/utils.py deleted file mode 100644 index bf10bf47..00000000 --- a/reproduction/legacy/POS_tagging/utils.py +++ /dev/null @@ -1,25 +0,0 @@ -import pickle - - -def load_embed(embed_path): - embed_dict = {} - with open(embed_path, "r", encoding="utf-8") as f: - for line in f: - tokens = line.split(" ") - if len(tokens) <= 5: - continue - key = tokens[0] - if len(key) == 1: - value = [float(x) for x in tokens[1:]] - embed_dict[key] = value - return embed_dict - - -if __name__ == "__main__": - embed_dict = load_embed("/home/zyfeng/data/small.txt") - - print(embed_dict.keys()) - - with open("./char_tencent_embedding.pkl", "wb") as f: - pickle.dump(embed_dict, f) - print("finished") diff --git a/reproduction/matching/README.md b/reproduction/matching/README.md deleted file mode 100644 index 52002f3b..00000000 --- a/reproduction/matching/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# Matching任务模型复现 -这里使用fastNLP复现了几个著名的Matching任务的模型,旨在达到与论文中相符的性能。这几个任务的评价指标均为准确率(%). - -复现的模型有(按论文发表时间顺序排序): -- CNTN:[模型代码](model/cntn.py); [训练代码](matching_cntn.py). -论文链接:[Convolutional Neural Tensor Network Architecture for Community-based Question Answering](https://www.aaai.org/ocs/index.php/IJCAI/IJCAI15/paper/view/11401/10844). -- ESIM:[模型代码](model/esim.py); [训练代码](matching_esim.py). -论文链接:[Enhanced LSTM for Natural Language Inference](https://arxiv.org/pdf/1609.06038.pdf). -- DIIN:模型代码(still in progress)[](); 训练代码(still in progress)[](). -论文链接:[Natural Language Inference over Interaction Space](https://arxiv.org/pdf/1709.04348.pdf). -- MwAN:[模型代码](model/mwan.py); [训练代码](matching_mwan.py). -论文链接:[Multiway Attention Networks for Modeling Sentence Pairs](https://www.ijcai.org/proceedings/2018/0613.pdf). -- BERT:[模型代码](model/bert.py); [训练代码](matching_bert.py). -论文链接:[BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/pdf/1810.04805.pdf). - -# 数据集及复现结果汇总 - -使用fastNLP复现的结果vs论文汇报结果,在前面的表示使用fastNLP复现的结果 - -'\-'表示我们仍未复现或者论文原文没有汇报 - -model name | SNLI | MNLI | RTE | QNLI | Quora -:---: | :---: | :---: | :---: | :---: | :---: -CNTN [代码](model/cntn.py); [论文](https://www.aaai.org/ocs/index.php/IJCAI/IJCAI15/paper/view/11401/10844) | 77.79 vs - | 63.29/63.16(dev) vs - | 57.04(dev) vs - | 62.38(dev) vs - | - | -ESIM[代码](model/bert.py); [论文](https://arxiv.org/pdf/1609.06038.pdf) | 88.13(glove) vs 88.0(glove)/88.7(elmo) | 77.78/76.49 vs 72.4/72.1* | 59.21(dev) vs - | 76.97(dev) vs - | - | -DIIN [](); [论文](https://arxiv.org/pdf/1709.04348.pdf) | - vs 88.0 | - vs 78.8/77.8 | - | - | - vs 89.06 | -MwAN [代码](model/mwan.py); [论文](https://www.ijcai.org/proceedings/2018/0613.pdf) | 87.9 vs 88.3 | 77.3/76.7(dev) vs 78.5/77.7 | - | 74.6(dev) vs - | 85.6 vs 89.12 | -BERT (BASE version)[代码](model/bert.py); [论文](https://arxiv.org/pdf/1810.04805.pdf) | 90.6 vs - | - vs 84.6/83.4| 67.87(dev) vs 66.4 | 90.97(dev) vs 90.5 | - | - -*ESIM模型由MNLI官方复现的结果为72.4/72.1,ESIM原论文当中没有汇报MNLI数据集的结果。 - -# 数据集复现结果及其他主要模型对比 -## SNLI -[Link to SNLI leaderboard](https://nlp.stanford.edu/projects/snli/) - -Performance on Test set: - -model name | ESIM | DIIN | MwAN | [GPT1.0](https://s3-us-west-2.amazonaws.com/openai-assets/research-covers/language-unsupervised/language_understanding_paper.pdf) | [BERT-Large+SRL](https://arxiv.org/pdf/1809.02794.pdf) | [MT-DNN](https://arxiv.org/pdf/1901.11504.pdf) -:---: | :---: | :---: | :---: | :---: | :---: | :---: -__performance__ | 88.0 | 88.0 | 88.3 | 89.9 | 91.3 | 91.6 | - -### 基于fastNLP复现的结果 -Performance on Test set: - -model name | CNTN | ESIM | DIIN | MwAN | BERT-Base | BERT-Large -:---: | :---: | :---: | :---: | :---: | :---: | :---: -__performance__ | 77.79 | 88.13 | - | 87.9 | 90.6 | 91.16 - -## MNLI -[Link to MNLI main page](https://www.nyu.edu/projects/bowman/multinli/) - -Performance on Test set(matched/mismatched): - -model name | ESIM | DIIN | MwAN | GPT1.0 | BERT-Base | MT-DNN -:---: | :---: | :---: | :---: | :---: | :---: | :---: -__performance__ | 72.4/72.1 | 78.8/77.8 | 78.5/77.7 | 82.1/81.4 | 84.6/83.4 | 87.9/87.4 | - -### 基于fastNLP复现的结果 -Performance on Test set(matched/mismatched): - -model name | CNTN | ESIM | DIIN | MwAN | BERT-Base -:---: | :---: | :---: | :---: | :---: | :---: | -__performance__ | 63.29/63.16(dev) | 77.78/76.49 | - | 77.3/76.7(dev) | - | - - -## RTE - -Still in progress. - -## QNLI - -### From GLUE baselines -[Link to GLUE leaderboard](https://gluebenchmark.com/leaderboard) - -Performance on Test set: -#### LSTM-based -model name | BiLSTM | BiLSTM + Attn | BiLSTM + ELMo | BiLSTM + Attn + ELMo -:---: | :---: | :---: | :---: | :---: | -__performance__ | 74.6 | 74.3 | 75.5 | 79.8 | - -*这些LSTM-based的baseline是由QNLI官方实现并测试的。 - -#### Transformer-based -model name | GPT1.0 | BERT-Base | BERT-Large | MT-DNN -:---: | :---: | :---: | :---: | :---: | -__performance__ | 87.4 | 90.5 | 92.7 | 96.0 | - - - -### 基于fastNLP复现的结果 -Performance on __Dev__ set: - -model name | CNTN | ESIM | DIIN | MwAN | BERT -:---: | :---: | :---: | :---: | :---: | :---: -__performance__ | 62.38 | 76.97 | - | 74.6 | - - -## Quora - -Still in progress. - diff --git a/reproduction/matching/matching_bert.py b/reproduction/matching/matching_bert.py deleted file mode 100644 index 05377dff..00000000 --- a/reproduction/matching/matching_bert.py +++ /dev/null @@ -1,104 +0,0 @@ -import random -import numpy as np -import torch - -from fastNLP.core import Trainer, Tester, AccuracyMetric, Const -from fastNLP.core.callback import WarmupCallback, EvaluateCallback -from fastNLP.core.optimizer import AdamW -from fastNLP.embeddings import BertEmbedding -from fastNLP.io.pipe.matching import SNLIBertPipe, RTEBertPipe, MNLIBertPipe,\ - QNLIBertPipe, QuoraBertPipe -from fastNLP.models.bert import BertForSentenceMatching - - -# define hyper-parameters -class BERTConfig: - - task = 'snli' - - batch_size_per_gpu = 6 - n_epochs = 6 - lr = 2e-5 - warm_up_rate = 0.1 - seed = 42 - save_path = None # 模型存储的位置,None表示不存储模型。 - - train_dataset_name = 'train' - dev_dataset_name = 'dev' - test_dataset_name = 'test' - - to_lower = True # 忽略大小写 - tokenizer = 'spacy' # 使用spacy进行分词 - - bert_model_dir_or_name = 'bert-base-uncased' - - -arg = BERTConfig() - -# set random seed -random.seed(arg.seed) -np.random.seed(arg.seed) -torch.manual_seed(arg.seed) - -n_gpu = torch.cuda.device_count() -if n_gpu > 0: - torch.cuda.manual_seed_all(arg.seed) - -# load data set -if arg.task == 'snli': - data_bundle = SNLIBertPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'rte': - data_bundle = RTEBertPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'qnli': - data_bundle = QNLIBertPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'mnli': - data_bundle = MNLIBertPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'quora': - data_bundle = QuoraBertPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -else: - raise RuntimeError(f'NOT support {arg.task} task yet!') - -print(data_bundle) # print details in data_bundle - -# load embedding -embed = BertEmbedding(data_bundle.vocabs[Const.INPUT], model_dir_or_name=arg.bert_model_dir_or_name) - -# define model -model = BertForSentenceMatching(embed, num_labels=len(data_bundle.vocabs[Const.TARGET])) - -# define optimizer and callback -optimizer = AdamW(lr=arg.lr, params=model.parameters()) -callbacks = [WarmupCallback(warmup=arg.warm_up_rate, schedule='linear'), ] - -if arg.task in ['snli']: - callbacks.append(EvaluateCallback(data=data_bundle.datasets[arg.test_dataset_name])) - # evaluate test set in every epoch if task is snli. - -# define trainer -trainer = Trainer(train_data=data_bundle.get_dataset(arg.train_dataset_name), model=model, - optimizer=optimizer, - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - n_epochs=arg.n_epochs, print_every=-1, - dev_data=data_bundle.get_dataset(arg.dev_dataset_name), - metrics=AccuracyMetric(), metric_key='acc', - device=[i for i in range(torch.cuda.device_count())], - check_code_level=-1, - save_path=arg.save_path, - callbacks=callbacks) - -# train model -trainer.train(load_best_model=True) - -# define tester -tester = Tester( - data=data_bundle.get_dataset(arg.test_dataset_name), - model=model, - metrics=AccuracyMetric(), - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - device=[i for i in range(torch.cuda.device_count())], -) - -# test model -tester.test() - - diff --git a/reproduction/matching/matching_cntn.py b/reproduction/matching/matching_cntn.py deleted file mode 100644 index 9be716ba..00000000 --- a/reproduction/matching/matching_cntn.py +++ /dev/null @@ -1,98 +0,0 @@ -import argparse -import torch - -from fastNLP.core import Trainer, Tester, Adam, AccuracyMetric, Const, CrossEntropyLoss -from fastNLP.embeddings import StaticEmbedding -from fastNLP.io.pipe.matching import SNLIPipe, RTEPipe, MNLIPipe, QNLIPipe - -from reproduction.matching.model.cntn import CNTNModel - -# define hyper-parameters -argument = argparse.ArgumentParser() -argument.add_argument('--embedding', choices=['glove', 'word2vec'], default='glove') -argument.add_argument('--batch-size-per-gpu', type=int, default=256) -argument.add_argument('--n-epochs', type=int, default=200) -argument.add_argument('--lr', type=float, default=1e-5) -argument.add_argument('--save-dir', type=str, default=None) -argument.add_argument('--cntn-depth', type=int, default=1) -argument.add_argument('--cntn-ns', type=int, default=200) -argument.add_argument('--cntn-k-top', type=int, default=10) -argument.add_argument('--cntn-r', type=int, default=5) -argument.add_argument('--dataset', choices=['qnli', 'rte', 'snli', 'mnli'], default='qnli') -arg = argument.parse_args() - -# dataset dict -dev_dict = { - 'qnli': 'dev', - 'rte': 'dev', - 'snli': 'dev', - 'mnli': 'dev_matched', -} - -test_dict = { - 'qnli': 'dev', - 'rte': 'dev', - 'snli': 'test', - 'mnli': 'dev_matched', -} - -# set num_labels -if arg.dataset == 'qnli' or arg.dataset == 'rte': - num_labels = 2 -else: - num_labels = 3 - -# load data set -if arg.dataset == 'snli': - data_bundle = SNLIPipe(lower=True, tokenizer='raw').process_from_file() -elif arg.dataset == 'rte': - data_bundle = RTEPipe(lower=True, tokenizer='raw').process_from_file() -elif arg.dataset == 'qnli': - data_bundle = QNLIPipe(lower=True, tokenizer='raw').process_from_file() -elif arg.dataset == 'mnli': - data_bundle = MNLIPipe(lower=True, tokenizer='raw').process_from_file() -else: - raise RuntimeError(f'NOT support {arg.task} task yet!') - -print(data_bundle) # print details in data_bundle - -# load embedding -if arg.embedding == 'word2vec': - embedding = StaticEmbedding(data_bundle.vocabs[Const.INPUTS(0)], model_dir_or_name='en-word2vec-300', - requires_grad=True) -elif arg.embedding == 'glove': - embedding = StaticEmbedding(data_bundle.vocabs[Const.INPUTS(0)], model_dir_or_name='en-glove-840b-300d', - requires_grad=True) -else: - raise ValueError(f'now we only support word2vec or glove embedding for cntn model!') - -# define model -model = CNTNModel(embedding, ns=arg.cntn_ns, k_top=arg.cntn_k_top, num_labels=num_labels, depth=arg.cntn_depth, - r=arg.cntn_r) -print(model) - -# define trainer -trainer = Trainer(train_data=data_bundle.datasets['train'], model=model, - optimizer=Adam(lr=arg.lr, model_params=model.parameters()), - loss=CrossEntropyLoss(), - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - n_epochs=arg.n_epochs, print_every=-1, - dev_data=data_bundle.datasets[dev_dict[arg.dataset]], - metrics=AccuracyMetric(), metric_key='acc', - device=[i for i in range(torch.cuda.device_count())], - check_code_level=-1) - -# train model -trainer.train(load_best_model=True) - -# define tester -tester = Tester( - data=data_bundle.datasets[test_dict[arg.dataset]], - model=model, - metrics=AccuracyMetric(), - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - device=[i for i in range(torch.cuda.device_count())] -) - -# test model -tester.test() diff --git a/reproduction/matching/matching_esim.py b/reproduction/matching/matching_esim.py deleted file mode 100644 index 9d50c0fb..00000000 --- a/reproduction/matching/matching_esim.py +++ /dev/null @@ -1,119 +0,0 @@ - -import random -import numpy as np -import torch -from torch.optim import Adamax -from torch.optim.lr_scheduler import StepLR - -from fastNLP.core import Trainer, Tester, AccuracyMetric, Const -from fastNLP.core.callback import GradientClipCallback, LRScheduler, EvaluateCallback -from fastNLP.core.losses import CrossEntropyLoss -from fastNLP.embeddings import StaticEmbedding -from fastNLP.embeddings import ElmoEmbedding -from fastNLP.io.pipe.matching import SNLIPipe, RTEPipe, MNLIPipe, QNLIPipe, QuoraPipe -from fastNLP.models.snli import ESIM - - -# define hyper-parameters -class ESIMConfig: - - task = 'snli' - - embedding = 'glove' - - batch_size_per_gpu = 196 - n_epochs = 30 - lr = 2e-3 - seed = 42 - save_path = None # 模型存储的位置,None表示不存储模型。 - - train_dataset_name = 'train' - dev_dataset_name = 'dev' - test_dataset_name = 'test' - - to_lower = True # 忽略大小写 - tokenizer = 'spacy' # 使用spacy进行分词 - - -arg = ESIMConfig() - -# set random seed -random.seed(arg.seed) -np.random.seed(arg.seed) -torch.manual_seed(arg.seed) - -n_gpu = torch.cuda.device_count() -if n_gpu > 0: - torch.cuda.manual_seed_all(arg.seed) - -# load data set -if arg.task == 'snli': - data_bundle = SNLIPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'rte': - data_bundle = RTEPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'qnli': - data_bundle = QNLIPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'mnli': - data_bundle = MNLIPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -elif arg.task == 'quora': - data_bundle = QuoraPipe(lower=arg.to_lower, tokenizer=arg.tokenizer).process_from_file() -else: - raise RuntimeError(f'NOT support {arg.task} task yet!') - -print(data_bundle) # print details in data_bundle - -# load embedding -if arg.embedding == 'elmo': - embedding = ElmoEmbedding(data_bundle.vocabs[Const.INPUTS(0)], model_dir_or_name='en-medium', - requires_grad=True) -elif arg.embedding == 'glove': - embedding = StaticEmbedding(data_bundle.vocabs[Const.INPUTS(0)], model_dir_or_name='en-glove-840b-300d', - requires_grad=True, normalize=False) -else: - raise RuntimeError(f'NOT support {arg.embedding} embedding yet!') - -# define model -model = ESIM(embedding, num_labels=len(data_bundle.vocabs[Const.TARGET])) - -# define optimizer and callback -optimizer = Adamax(lr=arg.lr, params=model.parameters()) -scheduler = StepLR(optimizer, step_size=10, gamma=0.5) # 每10个epoch学习率变为原来的0.5倍 - -callbacks = [ - GradientClipCallback(clip_value=10), # 等价于torch.nn.utils.clip_grad_norm_(10) - LRScheduler(scheduler), -] - -if arg.task in ['snli']: - callbacks.append(EvaluateCallback(data=data_bundle.datasets[arg.test_dataset_name])) - # evaluate test set in every epoch if task is snli. - -# define trainer -trainer = Trainer(train_data=data_bundle.datasets[arg.train_dataset_name], model=model, - optimizer=optimizer, - loss=CrossEntropyLoss(), - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - n_epochs=arg.n_epochs, print_every=-1, - dev_data=data_bundle.datasets[arg.dev_dataset_name], - metrics=AccuracyMetric(), metric_key='acc', - device=[i for i in range(torch.cuda.device_count())], - check_code_level=-1, - save_path=arg.save_path, - callbacks=callbacks) - -# train model -trainer.train(load_best_model=True) - -# define tester -tester = Tester( - data=data_bundle.datasets[arg.test_dataset_name], - model=model, - metrics=AccuracyMetric(), - batch_size=torch.cuda.device_count() * arg.batch_size_per_gpu, - device=[i for i in range(torch.cuda.device_count())], -) - -# test model -tester.test() - - diff --git a/reproduction/matching/matching_mwan.py b/reproduction/matching/matching_mwan.py deleted file mode 100644 index 026ea7b4..00000000 --- a/reproduction/matching/matching_mwan.py +++ /dev/null @@ -1,115 +0,0 @@ -import random - -import numpy as np -import torch -from torch.optim import Adadelta -from torch.optim.lr_scheduler import StepLR - -from fastNLP import CrossEntropyLoss -from fastNLP.core import Trainer, Tester, AccuracyMetric, Const -from fastNLP.core.callback import LRScheduler, EvaluateCallback -from fastNLP.embeddings import StaticEmbedding - -from fastNLP.io.pipe.matching import SNLIPipe, RTEPipe, MNLIPipe, QNLIPipe, QuoraPipe -from reproduction.matching.model.mwan import MwanModel - -import fitlog -fitlog.debug() - -import argparse - - -argument = argparse.ArgumentParser() -argument.add_argument('--task' , choices = ['snli', 'rte', 'qnli', 'mnli'],default = 'snli') -argument.add_argument('--batch-size' , type = int , default = 128) -argument.add_argument('--n-epochs' , type = int , default = 50) -argument.add_argument('--lr' , type = float , default = 1) -argument.add_argument('--testset-name' , type = str , default = 'test') -argument.add_argument('--devset-name' , type = str , default = 'dev') -argument.add_argument('--seed' , type = int , default = 42) -argument.add_argument('--hidden-size' , type = int , default = 150) -argument.add_argument('--dropout' , type = float , default = 0.3) -arg = argument.parse_args() - -random.seed(arg.seed) -np.random.seed(arg.seed) -torch.manual_seed(arg.seed) - -n_gpu = torch.cuda.device_count() -if n_gpu > 0: - torch.cuda.manual_seed_all(arg.seed) -print (n_gpu) - -for k in arg.__dict__: - print(k, arg.__dict__[k], type(arg.__dict__[k])) - -# load data set -if arg.task == 'snli': - data_bundle = SNLIPipe(lower=True, tokenizer='spacy').process_from_file() -elif arg.task == 'rte': - data_bundle = RTEPipe(lower=True, tokenizer='spacy').process_from_file() -elif arg.task == 'qnli': - data_bundle = QNLIPipe(lower=True, tokenizer='spacy').process_from_file() -elif arg.task == 'mnli': - data_bundle = MNLIPipe(lower=True, tokenizer='spacy').process_from_file() -elif arg.task == 'quora': - data_bundle = QuoraPipe(lower=True, tokenizer='spacy').process_from_file() -else: - raise RuntimeError(f'NOT support {arg.task} task yet!') - -print(data_bundle) -print(len(data_bundle.vocabs[Const.INPUTS(0)])) - - -model = MwanModel( - num_class = len(data_bundle.vocabs[Const.TARGET]), - EmbLayer = StaticEmbedding(data_bundle.vocabs[Const.INPUTS(0)], requires_grad=False, normalize=False), - ElmoLayer = None, - args_of_imm = { - "input_size" : 300 , - "hidden_size" : arg.hidden_size , - "dropout" : arg.dropout , - "use_allennlp" : False , - } , -) - - -optimizer = Adadelta(lr=arg.lr, params=model.parameters()) -scheduler = StepLR(optimizer, step_size=10, gamma=0.5) - -callbacks = [ - LRScheduler(scheduler), -] - -if arg.task in ['snli']: - callbacks.append(EvaluateCallback(data=data_bundle.datasets[arg.testset_name])) -elif arg.task == 'mnli': - callbacks.append(EvaluateCallback(data={'dev_matched': data_bundle.datasets['dev_matched'], - 'dev_mismatched': data_bundle.datasets['dev_mismatched']},)) - -trainer = Trainer( - train_data = data_bundle.datasets['train'], - model = model, - optimizer = optimizer, - num_workers = 0, - batch_size = arg.batch_size, - n_epochs = arg.n_epochs, - print_every = -1, - dev_data = data_bundle.datasets[arg.devset_name], - metrics = AccuracyMetric(pred = "pred" , target = "target"), - metric_key = 'acc', - device = [i for i in range(torch.cuda.device_count())], - check_code_level = -1, - callbacks = callbacks, - loss = CrossEntropyLoss(pred = "pred" , target = "target") -) -trainer.train(load_best_model=True) - -tester = Tester( - data=data_bundle.datasets[arg.testset_name], - model=model, - metrics=AccuracyMetric(), - batch_size=arg.batch_size, - device=[i for i in range(torch.cuda.device_count())], -) -tester.test() diff --git a/reproduction/matching/model/bert.py b/reproduction/matching/model/bert.py deleted file mode 100644 index 73a0c533..00000000 --- a/reproduction/matching/model/bert.py +++ /dev/null @@ -1,30 +0,0 @@ - -import torch -import torch.nn as nn - -from fastNLP.core.const import Const -from fastNLP.models.base_model import BaseModel -from fastNLP.embeddings import BertEmbedding - - -class BertForNLI(BaseModel): - - def __init__(self, bert_embed: BertEmbedding, class_num=3): - super(BertForNLI, self).__init__() - self.embed = bert_embed - self.classifier = nn.Linear(self.embed.embedding_dim, class_num) - - def forward(self, words): - """ - :param torch.Tensor words: [batch_size, seq_len] input_ids - :return: - """ - hidden = self.embed(words) - logits = self.classifier(hidden) - - return {Const.OUTPUT: logits} - - def predict(self, words): - logits = self.forward(words)[Const.OUTPUT] - return {Const.OUTPUT: logits.argmax(dim=-1)} - diff --git a/reproduction/matching/model/cntn.py b/reproduction/matching/model/cntn.py deleted file mode 100644 index cfa5e5a8..00000000 --- a/reproduction/matching/model/cntn.py +++ /dev/null @@ -1,112 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -import numpy as np - -from fastNLP.models.base_model import BaseModel -from fastNLP.embeddings import TokenEmbedding -from fastNLP.core.const import Const - - -class DynamicKMaxPooling(nn.Module): - """ - :param k_top: Fixed number of pooling output features for the topmost convolutional layer. - :param l: Number of convolutional layers. - """ - - def __init__(self, k_top, l): - super(DynamicKMaxPooling, self).__init__() - self.k_top = k_top - self.L = l - - def forward(self, x, l): - """ - :param x: Input sequence. - :param l: Current convolutional layers. - """ - s = x.size()[3] - k_ll = ((self.L - l) / self.L) * s - k_l = int(round(max(self.k_top, np.ceil(k_ll)))) - out = F.adaptive_max_pool2d(x, (x.size()[2], k_l)) - return out - - -class CNTNModel(BaseModel): - """ - 使用CNN进行问答匹配的模型 - 'Qiu, Xipeng, and Xuanjing Huang. - Convolutional neural tensor network architecture for community-based question answering. - Twenty-Fourth International Joint Conference on Artificial Intelligence. 2015.' - - :param init_embedding: Embedding. - :param ns: Sentence embedding size. - :param k_top: Fixed number of pooling output features for the topmost convolutional layer. - :param num_labels: Number of labels. - :param depth: Number of convolutional layers. - :param r: Number of weight tensor slices. - :param drop_rate: Dropout rate. - """ - - def __init__(self, init_embedding: TokenEmbedding, ns=200, k_top=10, num_labels=2, depth=2, r=5, - dropout_rate=0.3): - super(CNTNModel, self).__init__() - self.embedding = init_embedding - self.depth = depth - self.kmaxpooling = DynamicKMaxPooling(k_top, depth) - self.conv_q = nn.ModuleList() - self.conv_a = nn.ModuleList() - width = self.embedding.embed_size - for i in range(depth): - self.conv_q.append(nn.Sequential( - nn.Dropout(p=dropout_rate), - nn.Conv2d( - in_channels=1, - out_channels=width // 2, - kernel_size=(width, 3), - padding=(0, 2)) - )) - self.conv_a.append(nn.Sequential( - nn.Dropout(p=dropout_rate), - nn.Conv2d( - in_channels=1, - out_channels=width // 2, - kernel_size=(width, 3), - padding=(0, 2)) - )) - width = width // 2 - - self.fc_q = nn.Sequential(nn.Dropout(p=dropout_rate), nn.Linear(width * k_top, ns)) - self.fc_a = nn.Sequential(nn.Dropout(p=dropout_rate), nn.Linear(width * k_top, ns)) - self.weight_M = nn.Bilinear(ns, ns, r) - self.weight_V = nn.Linear(2 * ns, r) - self.weight_u = nn.Sequential(nn.Dropout(p=dropout_rate), nn.Linear(r, num_labels)) - - def forward(self, words1, words2, seq_len1, seq_len2): - """ - :param words1: [batch, seq_len, emb_size] Question. - :param words2: [batch, seq_len, emb_size] Answer. - :param seq_len1: [batch] - :param seq_len2: [batch] - :return: - """ - in_q = self.embedding(words1) - in_a = self.embedding(words2) - in_q = in_q.permute(0, 2, 1).unsqueeze(1) - in_a = in_a.permute(0, 2, 1).unsqueeze(1) - - for i in range(self.depth): - in_q = F.relu(self.conv_q[i](in_q)) - in_q = in_q.squeeze().unsqueeze(1) - in_q = self.kmaxpooling(in_q, i + 1) - in_a = F.relu(self.conv_a[i](in_a)) - in_a = in_a.squeeze().unsqueeze(1) - in_a = self.kmaxpooling(in_a, i + 1) - - in_q = self.fc_q(in_q.view(in_q.size(0), -1)) - in_a = self.fc_q(in_a.view(in_a.size(0), -1)) - score = torch.tanh(self.weight_u(self.weight_M(in_q, in_a) + self.weight_V(torch.cat((in_q, in_a), -1)))) - - return {Const.OUTPUT: score} - - def predict(self, words1, words2, seq_len1, seq_len2): - return self.forward(words1, words2, seq_len1, seq_len2) diff --git a/reproduction/matching/model/esim.py b/reproduction/matching/model/esim.py deleted file mode 100644 index f3f93bb6..00000000 --- a/reproduction/matching/model/esim.py +++ /dev/null @@ -1,189 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F - -from fastNLP.models.base_model import BaseModel -from fastNLP.embeddings import TokenEmbedding -from fastNLP.core.const import Const -from fastNLP.core.utils import seq_len_to_mask - - -class ESIMModel(BaseModel): - def __init__(self, init_embedding: TokenEmbedding, hidden_size=None, num_labels=3, dropout_rate=0.3, - dropout_embed=0.1): - super(ESIMModel, self).__init__() - - self.embedding = init_embedding - self.dropout_embed = EmbedDropout(p=dropout_embed) - if hidden_size is None: - hidden_size = self.embedding.embed_size - self.rnn = BiRNN(self.embedding.embed_size, hidden_size, dropout_rate=dropout_rate) - # self.rnn = LSTM(self.embedding.embed_size, hidden_size, dropout=dropout_rate, bidirectional=True) - - self.interfere = nn.Sequential(nn.Dropout(p=dropout_rate), - nn.Linear(8 * hidden_size, hidden_size), - nn.ReLU()) - nn.init.xavier_uniform_(self.interfere[1].weight.data) - self.bi_attention = SoftmaxAttention() - - self.rnn_high = BiRNN(self.embedding.embed_size, hidden_size, dropout_rate=dropout_rate) - # self.rnn_high = LSTM(hidden_size, hidden_size, dropout=dropout_rate, bidirectional=True,) - - self.classifier = nn.Sequential(nn.Dropout(p=dropout_rate), - nn.Linear(8 * hidden_size, hidden_size), - nn.Tanh(), - nn.Dropout(p=dropout_rate), - nn.Linear(hidden_size, num_labels)) - - self.dropout_rnn = nn.Dropout(p=dropout_rate) - - nn.init.xavier_uniform_(self.classifier[1].weight.data) - nn.init.xavier_uniform_(self.classifier[4].weight.data) - - def forward(self, words1, words2, seq_len1, seq_len2): - """ - :param words1: [batch, seq_len] - :param words2: [batch, seq_len] - :param seq_len1: [batch] - :param seq_len2: [batch] - :return: - """ - mask1 = seq_len_to_mask(seq_len1, words1.size(1)) - mask2 = seq_len_to_mask(seq_len2, words2.size(1)) - a0 = self.embedding(words1) # B * len * emb_dim - b0 = self.embedding(words2) - a0, b0 = self.dropout_embed(a0), self.dropout_embed(b0) - a = self.rnn(a0, mask1.byte()) # a: [B, PL, 2 * H] - b = self.rnn(b0, mask2.byte()) - # a = self.dropout_rnn(self.rnn(a0, seq_len1)[0]) # a: [B, PL, 2 * H] - # b = self.dropout_rnn(self.rnn(b0, seq_len2)[0]) - - ai, bi = self.bi_attention(a, mask1, b, mask2) - - a_ = torch.cat((a, ai, a - ai, a * ai), dim=2) # ma: [B, PL, 8 * H] - b_ = torch.cat((b, bi, b - bi, b * bi), dim=2) - a_f = self.interfere(a_) - b_f = self.interfere(b_) - - a_h = self.rnn_high(a_f, mask1.byte()) # ma: [B, PL, 2 * H] - b_h = self.rnn_high(b_f, mask2.byte()) - # a_h = self.dropout_rnn(self.rnn_high(a_f, seq_len1)[0]) # ma: [B, PL, 2 * H] - # b_h = self.dropout_rnn(self.rnn_high(b_f, seq_len2)[0]) - - a_avg = self.mean_pooling(a_h, mask1, dim=1) - a_max, _ = self.max_pooling(a_h, mask1, dim=1) - b_avg = self.mean_pooling(b_h, mask2, dim=1) - b_max, _ = self.max_pooling(b_h, mask2, dim=1) - - out = torch.cat((a_avg, a_max, b_avg, b_max), dim=1) # v: [B, 8 * H] - logits = torch.tanh(self.classifier(out)) - # logits = self.classifier(out) - - return {Const.OUTPUT: logits} - - def predict(self, words1, words2, seq_len1, seq_len2): - pred = self.forward(words1, words2, seq_len1, seq_len2)[Const.OUTPUT].argmax(-1) - return {Const.OUTPUT: pred} - - # input [batch_size, len , hidden] - # mask [batch_size, len] (111...00) - @staticmethod - def mean_pooling(input, mask, dim=1): - masks = mask.view(mask.size(0), mask.size(1), -1).float() - return torch.sum(input * masks, dim=dim) / torch.sum(masks, dim=1) - - @staticmethod - def max_pooling(input, mask, dim=1): - my_inf = 10e12 - masks = mask.view(mask.size(0), mask.size(1), -1) - masks = masks.expand(-1, -1, input.size(2)).float() - return torch.max(input + masks.le(0.5).float() * -my_inf, dim=dim) - - -class EmbedDropout(nn.Dropout): - - def forward(self, sequences_batch): - ones = sequences_batch.data.new_ones(sequences_batch.shape[0], sequences_batch.shape[-1]) - dropout_mask = nn.functional.dropout(ones, self.p, self.training, inplace=False) - return dropout_mask.unsqueeze(1) * sequences_batch - - -class BiRNN(nn.Module): - def __init__(self, input_size, hidden_size, dropout_rate=0.3): - super(BiRNN, self).__init__() - self.dropout_rate = dropout_rate - self.rnn = nn.LSTM(input_size, hidden_size, - num_layers=1, - bidirectional=True, - batch_first=True) - - def forward(self, x, x_mask): - # Sort x - lengths = x_mask.data.eq(True).long().sum(1) - _, idx_sort = torch.sort(lengths, dim=0, descending=True) - _, idx_unsort = torch.sort(idx_sort, dim=0) - lengths = list(lengths[idx_sort]) - - x = x.index_select(0, idx_sort) - # Pack it up - rnn_input = nn.utils.rnn.pack_padded_sequence(x, lengths, batch_first=True) - # Apply dropout to input - if self.dropout_rate > 0: - dropout_input = F.dropout(rnn_input.data, p=self.dropout_rate, training=self.training) - rnn_input = nn.utils.rnn.PackedSequence(dropout_input, rnn_input.batch_sizes) - output = self.rnn(rnn_input)[0] - # Unpack everything - output = nn.utils.rnn.pad_packed_sequence(output, batch_first=True)[0] - output = output.index_select(0, idx_unsort) - if output.size(1) != x_mask.size(1): - padding = torch.zeros(output.size(0), - x_mask.size(1) - output.size(1), - output.size(2)).type(output.data.type()) - output = torch.cat([output, padding], 1) - return output - - -def masked_softmax(tensor, mask): - tensor_shape = tensor.size() - reshaped_tensor = tensor.view(-1, tensor_shape[-1]) - - # Reshape the mask so it matches the size of the input tensor. - while mask.dim() < tensor.dim(): - mask = mask.unsqueeze(1) - mask = mask.expand_as(tensor).contiguous().float() - reshaped_mask = mask.view(-1, mask.size()[-1]) - result = F.softmax(reshaped_tensor * reshaped_mask, dim=-1) - result = result * reshaped_mask - # 1e-13 is added to avoid divisions by zero. - result = result / (result.sum(dim=-1, keepdim=True) + 1e-13) - return result.view(*tensor_shape) - - -def weighted_sum(tensor, weights, mask): - w_sum = weights.bmm(tensor) - while mask.dim() < w_sum.dim(): - mask = mask.unsqueeze(1) - mask = mask.transpose(-1, -2) - mask = mask.expand_as(w_sum).contiguous().float() - return w_sum * mask - - -class SoftmaxAttention(nn.Module): - - def forward(self, premise_batch, premise_mask, hypothesis_batch, hypothesis_mask): - similarity_matrix = premise_batch.bmm(hypothesis_batch.transpose(2, 1) - .contiguous()) - - prem_hyp_attn = masked_softmax(similarity_matrix, hypothesis_mask) - hyp_prem_attn = masked_softmax(similarity_matrix.transpose(1, 2) - .contiguous(), - premise_mask) - - attended_premises = weighted_sum(hypothesis_batch, - prem_hyp_attn, - premise_mask) - attended_hypotheses = weighted_sum(premise_batch, - hyp_prem_attn, - hypothesis_mask) - - return attended_premises, attended_hypotheses \ No newline at end of file diff --git a/reproduction/matching/model/mwan.py b/reproduction/matching/model/mwan.py deleted file mode 100644 index 9af1e134..00000000 --- a/reproduction/matching/model/mwan.py +++ /dev/null @@ -1,455 +0,0 @@ -import torch as tc -import torch.nn as nn -import torch.nn.functional as F -import sys -import os -import math -from fastNLP.core.const import Const - -class RNNModel(nn.Module): - def __init__(self, input_size, hidden_size, num_layers, bidrect, dropout): - super(RNNModel, self).__init__() - - if num_layers <= 1: - dropout = 0.0 - - self.rnn = nn.GRU(input_size=input_size, hidden_size=hidden_size, num_layers=num_layers, - batch_first=True, dropout=dropout, bidirectional=bidrect) - - self.number = (2 if bidrect else 1) * num_layers - - def forward(self, x, mask): - ''' - mask: (batch_size, seq_len) - x: (batch_size, seq_len, input_size) - ''' - lens = (mask).long().sum(dim=1) - lens, idx_sort = tc.sort(lens, descending=True) - _, idx_unsort = tc.sort(idx_sort) - - x = x[idx_sort] - - x = nn.utils.rnn.pack_padded_sequence(x, lens, batch_first=True) - self.rnn.flatten_parameters() - y, h = self.rnn(x) - y, lens = nn.utils.rnn.pad_packed_sequence(y, batch_first=True) - - h = h.transpose(0,1).contiguous() #make batch size first - - y = y[idx_unsort] #(batch_size, seq_len, bid * hid_size) - h = h[idx_unsort] #(batch_size, number, hid_size) - - return y, h - -class Contexualizer(nn.Module): - def __init__(self, input_size, hidden_size, num_layers=1, dropout=0.3): - super(Contexualizer, self).__init__() - - self.rnn = RNNModel(input_size, hidden_size, num_layers, True, dropout) - self.output_size = hidden_size * 2 - - self.reset_parameters() - - def reset_parameters(self): - weights = self.rnn.rnn.all_weights - for w1 in weights: - for w2 in w1: - if len(list(w2.size())) <= 1: - w2.data.fill_(0) - else: nn.init.xavier_normal_(w2.data, gain=1.414) - - def forward(self, s, mask): - y = self.rnn(s, mask)[0] # (batch_size, seq_len, 2 * hidden_size) - - return y - -class ConcatAttention_Param(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2): - super(ConcatAttention_Param, self).__init__() - self.ln = nn.Linear(input_size + hidden_size, hidden_size) - self.v = nn.Linear(hidden_size, 1, bias=False) - self.vq = nn.Parameter(tc.rand(hidden_size)) - self.drop = nn.Dropout(dropout) - - self.output_size = input_size - - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.v.weight.data) - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - - def forward(self, h, mask): - ''' - h: (batch_size, len, input_size) - mask: (batch_size, len) - ''' - - vq = self.vq.view(1,1,-1).expand(h.size(0), h.size(1), self.vq.size(0)) - - s = self.v(tc.tanh(self.ln(tc.cat([h,vq],-1)))).squeeze(-1) # (batch_size, len) - - s = s - ((mask.eq(False)).float() * 10000) - a = tc.softmax(s, dim=1) - - r = a.unsqueeze(-1) * h # (batch_size, len, input_size) - r = tc.sum(r, dim=1) # (batch_size, input_size) - - return self.drop(r) - - -def get_2dmask(mask_hq, mask_hp, siz=None): - - if siz is None: - siz = (mask_hq.size(0), mask_hq.size(1), mask_hp.size(1)) - - mask_mat = 1 - if mask_hq is not None: - mask_mat = mask_mat * mask_hq.unsqueeze(2).expand(siz) - if mask_hp is not None: - mask_mat = mask_mat * mask_hp.unsqueeze(1).expand(siz) - return mask_mat - -def Attention(hq, hp, mask_hq, mask_hp, my_method): - standard_size = (hq.size(0), hq.size(1), hp.size(1), hq.size(-1)) - mask_mat = get_2dmask(mask_hq, mask_hp, standard_size[:-1]) - - hq_mat = hq.unsqueeze(2).expand(standard_size) - hp_mat = hp.unsqueeze(1).expand(standard_size) - - s = my_method(hq_mat, hp_mat) # (batch_size, len_q, len_p) - - s = s - ((mask_mat.eq(False)).float() * 10000) - a = tc.softmax(s, dim=1) - - q = a.unsqueeze(-1) * hq_mat #(batch_size, len_q, len_p, input_size) - q = tc.sum(q, dim=1) #(batch_size, len_p, input_size) - - return q - -class ConcatAttention(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2, input_size_2=-1): - super(ConcatAttention, self).__init__() - - if input_size_2 < 0: - input_size_2 = input_size - self.ln = nn.Linear(input_size + input_size_2, hidden_size) - self.v = nn.Linear(hidden_size, 1, bias=False) - self.drop = nn.Dropout(dropout) - - self.output_size = input_size - - - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.v.weight.data) - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - - def my_method(self, hq_mat, hp_mat): - s = tc.cat([hq_mat, hp_mat], dim=-1) - s = self.v(tc.tanh(self.ln(s))).squeeze(-1) #(batch_size, len_q, len_p) - return s - - def forward(self, hq, hp, mask_hq=None, mask_hp=None): - ''' - hq: (batch_size, len_q, input_size) - mask_hq: (batch_size, len_q) - ''' - return self.drop(Attention(hq, hp, mask_hq, mask_hp, self.my_method)) - -class MinusAttention(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2): - super(MinusAttention, self).__init__() - self.ln = nn.Linear(input_size, hidden_size) - self.v = nn.Linear(hidden_size, 1, bias=False) - - self.drop = nn.Dropout(dropout) - self.output_size = input_size - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.v.weight.data) - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - - def my_method(self, hq_mat, hp_mat): - s = hq_mat - hp_mat - s = self.v(tc.tanh(self.ln(s))).squeeze(-1) #(batch_size, len_q, len_p) s[j,t] - return s - - def forward(self, hq, hp, mask_hq=None, mask_hp=None): - return self.drop(Attention(hq, hp, mask_hq, mask_hp, self.my_method)) - -class DotProductAttention(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2): - super(DotProductAttention, self).__init__() - self.ln = nn.Linear(input_size, hidden_size) - self.v = nn.Linear(hidden_size, 1, bias=False) - - self.drop = nn.Dropout(dropout) - self.output_size = input_size - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.v.weight.data) - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - - def my_method(self, hq_mat, hp_mat): - s = hq_mat * hp_mat - s = self.v(tc.tanh(self.ln(s))).squeeze(-1) #(batch_size, len_q, len_p) s[j,t] - return s - - def forward(self, hq, hp, mask_hq=None, mask_hp=None): - return self.drop(Attention(hq, hp, mask_hq, mask_hp, self.my_method)) - -class BiLinearAttention(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2, input_size_2=-1): - super(BiLinearAttention, self).__init__() - - input_size_2 = input_size if input_size_2 < 0 else input_size_2 - - self.ln = nn.Linear(input_size_2, input_size) - self.drop = nn.Dropout(dropout) - self.output_size = input_size - - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - - def my_method(self, hq, hp, mask_p): - # (bs, len, input_size) - - hp = self.ln(hp) - hp = hp * mask_p.unsqueeze(-1) - s = tc.matmul(hq, hp.transpose(-1,-2)) - - return s - - def forward(self, hq, hp, mask_hq=None, mask_hp=None): - standard_size = (hq.size(0), hq.size(1), hp.size(1), hq.size(-1)) - mask_mat = get_2dmask(mask_hq, mask_hp, standard_size[:-1]) - - s = self.my_method(hq, hp, mask_hp) # (batch_size, len_q, len_p) - - s = s - ((mask_mat.eq(False)).float() * 10000) - a = tc.softmax(s, dim=1) - - hq_mat = hq.unsqueeze(2).expand(standard_size) - q = a.unsqueeze(-1) * hq_mat #(batch_size, len_q, len_p, input_size) - q = tc.sum(q, dim=1) #(batch_size, len_p, input_size) - - return self.drop(q) - - -class AggAttention(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.2): - super(AggAttention, self).__init__() - self.ln = nn.Linear(input_size + hidden_size, hidden_size) - self.v = nn.Linear(hidden_size, 1, bias=False) - self.vq = nn.Parameter(tc.rand(hidden_size, 1)) - self.drop = nn.Dropout(dropout) - - self.output_size = input_size - - self.reset_parameters() - - def reset_parameters(self): - - nn.init.xavier_uniform_(self.vq.data) - nn.init.xavier_uniform_(self.v.weight.data) - nn.init.xavier_uniform_(self.ln.weight.data) - self.ln.bias.data.fill_(0) - self.vq.data = self.vq.data[:,0] - - - def forward(self, hs, mask): - ''' - hs: [(batch_size, len_q, input_size), ...] - mask: (batch_size, len_q) - ''' - - hs = tc.cat([h.unsqueeze(0) for h in hs], dim=0)# (4, batch_size, len_q, input_size) - - vq = self.vq.view(1,1,1,-1).expand(hs.size(0), hs.size(1), hs.size(2), self.vq.size(0)) - - s = self.v(tc.tanh(self.ln(tc.cat([hs,vq],-1)))).squeeze(-1)# (4, batch_size, len_q) - - s = s - ((mask.unsqueeze(0).eq(False)).float() * 10000) - a = tc.softmax(s, dim=0) - - x = a.unsqueeze(-1) * hs - x = tc.sum(x, dim=0)#(batch_size, len_q, input_size) - - return self.drop(x) - -class Aggragator(nn.Module): - def __init__(self, input_size, hidden_size, dropout=0.3): - super(Aggragator, self).__init__() - - now_size = input_size - self.ln = nn.Linear(2 * input_size, 2 * input_size) - - now_size = 2 * input_size - self.rnn = Contexualizer(now_size, hidden_size, 2, dropout) - - now_size = self.rnn.output_size - self.agg_att = AggAttention(now_size, now_size, dropout) - - now_size = self.agg_att.output_size - self.agg_rnn = Contexualizer(now_size, hidden_size, 2, dropout) - - self.drop = nn.Dropout(dropout) - - self.output_size = self.agg_rnn.output_size - - def forward(self, qs, hp, mask): - ''' - qs: [ (batch_size, len_p, input_size), ...] - hp: (batch_size, len_p, input_size) - mask if the same of hp's mask - ''' - - hs = [0 for _ in range(len(qs))] - - for i in range(len(qs)): - q = qs[i] - x = tc.cat([q, hp], dim=-1) - g = tc.sigmoid(self.ln(x)) - x_star = x * g - h = self.rnn(x_star, mask) - - hs[i] = h - - x = self.agg_att(hs, mask) #(batch_size, len_p, output_size) - h = self.agg_rnn(x, mask) #(batch_size, len_p, output_size) - return self.drop(h) - - -class Mwan_Imm(nn.Module): - def __init__(self, input_size, hidden_size, num_class=3, dropout=0.2, use_allennlp=False): - super(Mwan_Imm, self).__init__() - - now_size = input_size - self.enc_s1 = Contexualizer(now_size, hidden_size, 2, dropout) - self.enc_s2 = Contexualizer(now_size, hidden_size, 2, dropout) - - now_size = self.enc_s1.output_size - self.att_c = ConcatAttention(now_size, hidden_size, dropout) - self.att_b = BiLinearAttention(now_size, hidden_size, dropout) - self.att_d = DotProductAttention(now_size, hidden_size, dropout) - self.att_m = MinusAttention(now_size, hidden_size, dropout) - - now_size = self.att_c.output_size - self.agg = Aggragator(now_size, hidden_size, dropout) - - now_size = self.enc_s1.output_size - self.pred_1 = ConcatAttention_Param(now_size, hidden_size, dropout) - now_size = self.agg.output_size - self.pred_2 = ConcatAttention(now_size, hidden_size, dropout, - input_size_2=self.pred_1.output_size) - - now_size = self.pred_2.output_size - self.ln1 = nn.Linear(now_size, hidden_size) - self.ln2 = nn.Linear(hidden_size, num_class) - - self.reset_parameters() - - def reset_parameters(self): - nn.init.xavier_uniform_(self.ln1.weight.data) - nn.init.xavier_uniform_(self.ln2.weight.data) - self.ln1.bias.data.fill_(0) - self.ln2.bias.data.fill_(0) - - def forward(self, s1, s2, mas_s1, mas_s2): - hq = self.enc_s1(s1, mas_s1) #(batch_size, len_q, output_size) - hp = self.enc_s1(s2, mas_s2) - - mas_s1 = mas_s1[:,:hq.size(1)] - mas_s2 = mas_s2[:,:hp.size(1)] - mas_q, mas_p = mas_s1, mas_s2 - - qc = self.att_c(hq, hp, mas_s1, mas_s2) #(batch_size, len_p, output_size) - qb = self.att_b(hq, hp, mas_s1, mas_s2) - qd = self.att_d(hq, hp, mas_s1, mas_s2) - qm = self.att_m(hq, hp, mas_s1, mas_s2) - - ho = self.agg([qc,qb,qd,qm], hp, mas_s2) #(batch_size, len_p, output_size) - - rq = self.pred_1(hq, mas_q) #(batch_size, output_size) - rp = self.pred_2(ho, rq.unsqueeze(1), mas_p)#(batch_size, 1, output_size) - rp = rp.squeeze(1) #(batch_size, output_size) - - rp = F.relu(self.ln1(rp)) - rp = self.ln2(rp) - - return rp - -class MwanModel(nn.Module): - def __init__(self, num_class, EmbLayer, args_of_imm={}, ElmoLayer=None): - super(MwanModel, self).__init__() - - self.emb = EmbLayer - - if ElmoLayer is not None: - self.elmo = ElmoLayer - self.elmo_preln = nn.Linear(3 * self.elmo.emb_size, self.elmo.emb_size) - self.elmo_ln = nn.Linear(args_of_imm["input_size"] + - self.elmo.emb_size, args_of_imm["input_size"]) - - else: - self.elmo = None - - - self.imm = Mwan_Imm(num_class=num_class, **args_of_imm) - self.drop = nn.Dropout(args_of_imm["dropout"]) - - - def forward(self, words1, words2, str_s1=None, str_s2=None, *pargs, **kwargs): - ''' - str_s is for elmo use , however we don't use elmo - str_s: (batch_size, seq_len, word_len) - ''' - - s1, s2 = words1, words2 - - mas_s1 = (s1 != 0).float() # mas: (batch_size, seq_len) - mas_s2 = (s2 != 0).float() # mas: (batch_size, seq_len) - - mas_s1.requires_grad = False - mas_s2.requires_grad = False - - s1_emb = self.emb(s1) - s2_emb = self.emb(s2) - - if self.elmo is not None: - s1_elmo = self.elmo(str_s1) - s2_elmo = self.elmo(str_s2) - - s1_elmo = tc.tanh(self.elmo_preln(tc.cat(s1_elmo, dim=-1))) - s2_elmo = tc.tanh(self.elmo_preln(tc.cat(s2_elmo, dim=-1))) - - s1_emb = tc.cat([s1_emb, s1_elmo], dim=-1) - s2_emb = tc.cat([s2_emb, s2_elmo], dim=-1) - - s1_emb = tc.tanh(self.elmo_ln(s1_emb)) - s2_emb = tc.tanh(self.elmo_ln(s2_emb)) - - s1_emb = self.drop(s1_emb) - s2_emb = self.drop(s2_emb) - - y = self.imm(s1_emb, s2_emb, mas_s1, mas_s2) - - return { - Const.OUTPUT: y, - } diff --git a/reproduction/multi-criteria-cws/README.md b/reproduction/multi-criteria-cws/README.md deleted file mode 100644 index 0f4ab8d8..00000000 --- a/reproduction/multi-criteria-cws/README.md +++ /dev/null @@ -1,61 +0,0 @@ - - -# Multi-Criteria-CWS - -An implementation of [Multi-Criteria Chinese Word Segmentation with Transformer](http://arxiv.org/abs/1906.12035) with fastNLP. - -## Dataset -### Overview -We use the same datasets listed in paper. -- sighan2005 - - pku - - msr - - as - - cityu -- sighan2008 - - ctb - - ckip - - cityu (combined with data in sighan2005) - - ncc - - sxu - -### Preprocess -First, download OpenCC to convert between Traditional Chinese and Simplified Chinese. -``` shell -pip install opencc-python-reimplemented -``` -Then, set a path to save processed data, and run the shell script to process the data. -```shell -export DATA_DIR=path/to/processed-data -bash make_data.sh path/to/sighan2005 path/to/sighan2008 -``` -It would take a few minutes to finish the process. - -## Model -We use transformer to build the model, as described in paper. - -## Train -Finally, to train the model, run the shell script. -The `train.sh` takes one argument, the GPU-IDs to use, for example: -``` shell -bash train.sh 0,1 -``` -This command use GPUs with ID 0 and 1. - -Note: Please refer to the paper for details of hyper-parameters. And modify the settings in `train.sh` to match your experiment environment. - -Type -``` shell -python main.py --help -``` -to learn all arguments to be specified in training. - -## Performance - -Results on the test sets of eight CWS datasets with multi-criteria learning. - -| Dataset | MSRA | AS | PKU | CTB | CKIP | CITYU | NCC | SXU | Avg. | -| -------------- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | -| Original paper | 98.05 | 96.44 | 96.41 | 96.99 | 96.51 | 96.91 | 96.04 | 97.61 | 96.87 | -| Ours | 96.92 | 95.71 | 95.65 | 95.96 | 96.00 | 96.09 | 94.61 | 96.64 | 95.95 | - diff --git a/reproduction/multi-criteria-cws/data-prepare.py b/reproduction/multi-criteria-cws/data-prepare.py deleted file mode 100644 index 2c28e3b6..00000000 --- a/reproduction/multi-criteria-cws/data-prepare.py +++ /dev/null @@ -1,262 +0,0 @@ -import os -import re -import argparse -from opencc import OpenCC - -cc = OpenCC("t2s") - -from utils import make_sure_path_exists, append_tags - -sighan05_root = "" -sighan08_root = "" -data_path = "" - -E_pun = u",.!?[]()<>\"\"''," -C_pun = u",。!?【】()《》“”‘’、" -Table = {ord(f): ord(t) for f, t in zip(C_pun, E_pun)} -Table[12288] = 32 # 全半角空格 - - -def C_trans_to_E(string): - return string.translate(Table) - - -def normalize(ustring): - """全角转半角""" - rstring = "" - for uchar in ustring: - inside_code = ord(uchar) - if inside_code == 12288: # 全角空格直接转换 - inside_code = 32 - elif 65281 <= inside_code <= 65374: # 全角字符(除空格)根据关系转化 - inside_code -= 65248 - - rstring += chr(inside_code) - return rstring - - -def preprocess(text): - rNUM = u"(-|\+)?\d+((\.|·)\d+)?%?" - rENG = u"[A-Za-z_]+.*" - sent = normalize(C_trans_to_E(text.strip())).split() - new_sent = [] - for word in sent: - word = re.sub(u"\s+", "", word, flags=re.U) - word = re.sub(rNUM, u"0", word, flags=re.U) - word = re.sub(rENG, u"X", word) - new_sent.append(word) - return new_sent - - -def to_sentence_list(text, split_long_sentence=False): - text = preprocess(text) - delimiter = set() - delimiter.update("。!?:;…、,(),;!?、.\"'") - delimiter.add("……") - sent_list = [] - sent = [] - sent_len = 0 - for word in text: - sent.append(word) - sent_len += len(word) - if word in delimiter or (split_long_sentence and sent_len >= 50): - sent_list.append(sent) - sent = [] - sent_len = 0 - - if len(sent) > 0: - sent_list.append(sent) - - return sent_list - - -def is_traditional(dataset): - return dataset in ["as", "cityu", "ckip"] - - -def convert_file( - src, des, need_cc=False, split_long_sentence=False, encode="utf-8-sig" -): - with open(src, encoding=encode) as src, open(des, "w", encoding="utf-8") as des: - for line in src: - for sent in to_sentence_list(line, split_long_sentence): - line = " ".join(sent) + "\n" - if need_cc: - line = cc.convert(line) - des.write(line) - # if len(''.join(sent)) > 200: - # print(' '.join(sent)) - - -def split_train_dev(dataset): - root = data_path + "/" + dataset + "/raw/" - with open(root + "train-all.txt", encoding="UTF-8") as src, open( - root + "train.txt", "w", encoding="UTF-8" - ) as train, open(root + "dev.txt", "w", encoding="UTF-8") as dev: - lines = src.readlines() - idx = int(len(lines) * 0.9) - for line in lines[:idx]: - train.write(line) - for line in lines[idx:]: - dev.write(line) - - -def combine_files(one, two, out): - if os.path.exists(out): - os.remove(out) - with open(one, encoding="utf-8") as one, open(two, encoding="utf-8") as two, open( - out, "a", encoding="utf-8" - ) as out: - for line in one: - out.write(line) - for line in two: - out.write(line) - - -def bmes_tag(input_file, output_file): - with open(input_file, encoding="utf-8") as input_data, open( - output_file, "w", encoding="utf-8" - ) as output_data: - for line in input_data: - word_list = line.strip().split() - for word in word_list: - if len(word) == 1 or ( - len(word) > 2 and word[0] == "<" and word[-1] == ">" - ): - output_data.write(word + "\tS\n") - else: - output_data.write(word[0] + "\tB\n") - for w in word[1 : len(word) - 1]: - output_data.write(w + "\tM\n") - output_data.write(word[len(word) - 1] + "\tE\n") - output_data.write("\n") - - -def make_bmes(dataset="pku"): - path = data_path + "/" + dataset + "/" - make_sure_path_exists(path + "bmes") - bmes_tag(path + "raw/train.txt", path + "bmes/train.txt") - bmes_tag(path + "raw/train-all.txt", path + "bmes/train-all.txt") - bmes_tag(path + "raw/dev.txt", path + "bmes/dev.txt") - bmes_tag(path + "raw/test.txt", path + "bmes/test.txt") - - -def convert_sighan2005_dataset(dataset): - global sighan05_root - root = os.path.join(data_path, dataset) - make_sure_path_exists(root) - make_sure_path_exists(root + "/raw") - file_path = "{}/{}_training.utf8".format(sighan05_root, dataset) - convert_file( - file_path, "{}/raw/train-all.txt".format(root), is_traditional(dataset), True - ) - if dataset == "as": - file_path = "{}/{}_testing_gold.utf8".format(sighan05_root, dataset) - else: - file_path = "{}/{}_test_gold.utf8".format(sighan05_root, dataset) - convert_file( - file_path, "{}/raw/test.txt".format(root), is_traditional(dataset), False - ) - split_train_dev(dataset) - - -def convert_sighan2008_dataset(dataset, utf=16): - global sighan08_root - root = os.path.join(data_path, dataset) - make_sure_path_exists(root) - make_sure_path_exists(root + "/raw") - convert_file( - "{}/{}_train_utf{}.seg".format(sighan08_root, dataset, utf), - "{}/raw/train-all.txt".format(root), - is_traditional(dataset), - True, - "utf-{}".format(utf), - ) - convert_file( - "{}/{}_seg_truth&resource/{}_truth_utf{}.seg".format( - sighan08_root, dataset, dataset, utf - ), - "{}/raw/test.txt".format(root), - is_traditional(dataset), - False, - "utf-{}".format(utf), - ) - split_train_dev(dataset) - - -def extract_conll(src, out): - words = [] - with open(src, encoding="utf-8") as src, open(out, "w", encoding="utf-8") as out: - for line in src: - line = line.strip() - if len(line) == 0: - out.write(" ".join(words) + "\n") - words = [] - continue - cells = line.split() - words.append(cells[1]) - - -def make_joint_corpus(datasets, joint): - parts = ["dev", "test", "train", "train-all"] - for part in parts: - old_file = "{}/{}/raw/{}.txt".format(data_path, joint, part) - if os.path.exists(old_file): - os.remove(old_file) - elif not os.path.exists(os.path.dirname(old_file)): - os.makedirs(os.path.dirname(old_file)) - for name in datasets: - append_tags( - os.path.join(data_path, name, "raw"), - os.path.dirname(old_file), - name, - part, - encode="utf-8", - ) - - -def convert_all_sighan2005(datasets): - for dataset in datasets: - print(("Converting sighan bakeoff 2005 corpus: {}".format(dataset))) - convert_sighan2005_dataset(dataset) - make_bmes(dataset) - - -def convert_all_sighan2008(datasets): - for dataset in datasets: - print(("Converting sighan bakeoff 2008 corpus: {}".format(dataset))) - convert_sighan2008_dataset(dataset, 16) - make_bmes(dataset) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - # fmt: off - parser.add_argument("--sighan05", required=True, type=str, help="path to sighan2005 dataset") - parser.add_argument("--sighan08", required=True, type=str, help="path to sighan2008 dataset") - parser.add_argument("--data_path", required=True, type=str, help="path to save dataset") - # fmt: on - - args, _ = parser.parse_known_args() - sighan05_root = args.sighan05 - sighan08_root = args.sighan08 - data_path = args.data_path - - print("Converting sighan2005 Simplified Chinese corpus") - datasets = "pku", "msr", "as", "cityu" - convert_all_sighan2005(datasets) - - print("Combining sighan2005 corpus to one joint Simplified Chinese corpus") - datasets = "pku", "msr", "as", "cityu" - make_joint_corpus(datasets, "joint-sighan2005") - make_bmes("joint-sighan2005") - - # For researchers who have access to sighan2008 corpus, use official corpora please. - print("Converting sighan2008 Simplified Chinese corpus") - datasets = "ctb", "ckip", "cityu", "ncc", "sxu" - convert_all_sighan2008(datasets) - print("Combining those 8 sighan corpora to one joint corpus") - datasets = "pku", "msr", "as", "ctb", "ckip", "cityu", "ncc", "sxu" - make_joint_corpus(datasets, "joint-sighan2008") - make_bmes("joint-sighan2008") - diff --git a/reproduction/multi-criteria-cws/data-process.py b/reproduction/multi-criteria-cws/data-process.py deleted file mode 100644 index 829580ef..00000000 --- a/reproduction/multi-criteria-cws/data-process.py +++ /dev/null @@ -1,166 +0,0 @@ -import os -import sys - -import codecs -import argparse -from _pickle import load, dump -import collections -from utils import get_processing_word, is_dataset_tag, make_sure_path_exists, get_bmes -from fastNLP import Instance, DataSet, Vocabulary, Const - -max_len = 0 - - -def expand(x): - sent = [""] + x[1:] + [""] - return [x + y for x, y in zip(sent[:-1], sent[1:])] - - -def read_file(filename, processing_word=get_processing_word(lowercase=False)): - dataset = DataSet() - niter = 0 - with codecs.open(filename, "r", "utf-8-sig") as f: - words, tags = [], [] - for line in f: - line = line.strip() - if len(line) == 0 or line.startswith("-DOCSTART-"): - if len(words) != 0: - assert len(words) > 2 - if niter == 1: - print(words, tags) - niter += 1 - dataset.append(Instance(ori_words=words[:-1], ori_tags=tags[:-1])) - words, tags = [], [] - else: - word, tag = line.split() - word = processing_word(word) - words.append(word) - tags.append(tag.lower()) - - dataset.apply_field(lambda x: [x[0]], field_name="ori_words", new_field_name="task") - dataset.apply_field( - lambda x: len(x), field_name="ori_tags", new_field_name="seq_len" - ) - dataset.apply_field( - lambda x: expand(x), field_name="ori_words", new_field_name="bi1" - ) - return dataset - - -def main(): - parser = argparse.ArgumentParser() - # fmt: off - parser.add_argument("--data_path", required=True, type=str, help="all of datasets pkl paths") - # fmt: on - - options, _ = parser.parse_known_args() - - train_set, test_set = DataSet(), DataSet() - - input_dir = os.path.join(options.data_path, "joint-sighan2008/bmes") - options.output = os.path.join(options.data_path, "total_dataset.pkl") - print(input_dir, options.output) - - for fn in os.listdir(input_dir): - if fn not in ["test.txt", "train-all.txt"]: - continue - print(fn) - abs_fn = os.path.join(input_dir, fn) - ds = read_file(abs_fn) - if "test.txt" == fn: - test_set = ds - else: - train_set = ds - - print( - "num samples of total train, test: {}, {}".format(len(train_set), len(test_set)) - ) - - uni_vocab = Vocabulary(min_freq=None).from_dataset( - train_set, test_set, field_name="ori_words" - ) - # bi_vocab = Vocabulary(min_freq=3, max_size=50000).from_dataset(train_set,test_set, field_name="bi1") - bi_vocab = Vocabulary(min_freq=3, max_size=None).from_dataset( - train_set, field_name="bi1", no_create_entry_dataset=[test_set] - ) - tag_vocab = Vocabulary(min_freq=None, padding="s", unknown=None).from_dataset( - train_set, field_name="ori_tags" - ) - task_vocab = Vocabulary(min_freq=None, padding=None, unknown=None).from_dataset( - train_set, field_name="task" - ) - - def to_index(dataset): - uni_vocab.index_dataset(dataset, field_name="ori_words", new_field_name="uni") - tag_vocab.index_dataset(dataset, field_name="ori_tags", new_field_name="tags") - task_vocab.index_dataset(dataset, field_name="task", new_field_name="task") - - dataset.apply_field(lambda x: x[1:], field_name="bi1", new_field_name="bi2") - dataset.apply_field(lambda x: x[:-1], field_name="bi1", new_field_name="bi1") - bi_vocab.index_dataset(dataset, field_name="bi1", new_field_name="bi1") - bi_vocab.index_dataset(dataset, field_name="bi2", new_field_name="bi2") - - dataset.set_input("task", "uni", "bi1", "bi2", "seq_len") - dataset.set_target("tags") - return dataset - - train_set = to_index(train_set) - test_set = to_index(test_set) - - output = {} - output["train_set"] = train_set - output["test_set"] = test_set - output["uni_vocab"] = uni_vocab - output["bi_vocab"] = bi_vocab - output["tag_vocab"] = tag_vocab - output["task_vocab"] = task_vocab - - print(tag_vocab.word2idx) - print(task_vocab.word2idx) - - make_sure_path_exists(os.path.dirname(options.output)) - - print("Saving dataset to {}".format(os.path.abspath(options.output))) - with open(options.output, "wb") as outfile: - dump(output, outfile) - - print(len(task_vocab), len(tag_vocab), len(uni_vocab), len(bi_vocab)) - dic = {} - tokens = {} - - def process(words): - name = words[0][1:-1] - if name not in dic: - dic[name] = set() - tokens[name] = 0 - tokens[name] += len(words[1:]) - dic[name].update(words[1:]) - - train_set.apply_field(process, "ori_words", None) - for name in dic.keys(): - print(name, len(dic[name]), tokens[name]) - - with open(os.path.join(os.path.dirname(options.output), "oovdict.pkl"), "wb") as f: - dump(dic, f) - - def get_max_len(ds): - global max_len - max_len = 0 - - def find_max_len(words): - global max_len - if max_len < len(words): - max_len = len(words) - - ds.apply_field(find_max_len, "ori_words", None) - return max_len - - print( - "train max len: {}, test max len: {}".format( - get_max_len(train_set), get_max_len(test_set) - ) - ) - - -if __name__ == "__main__": - main() diff --git a/reproduction/multi-criteria-cws/main.py b/reproduction/multi-criteria-cws/main.py deleted file mode 100644 index 8ee1f81e..00000000 --- a/reproduction/multi-criteria-cws/main.py +++ /dev/null @@ -1,496 +0,0 @@ -import _pickle as pickle -import argparse -import collections -import logging -import math -import os -import pickle -import random -import sys -import time -from sys import maxsize - -import fastNLP -import fastNLP.embeddings -import numpy as np -import torch -import torch.distributed as dist -import torch.nn as nn -from fastNLP import BucketSampler, DataSetIter, SequentialSampler, logger -from torch.nn.parallel import DistributedDataParallel -from torch.utils.data.distributed import DistributedSampler - -import models -import optm -import utils - -NONE_TAG = "" -START_TAG = "" -END_TAG = "" - -DEFAULT_WORD_EMBEDDING_SIZE = 100 -DEBUG_SCALE = 200 - -# ===-----------------------------------------------------------------------=== -# Argument parsing -# ===-----------------------------------------------------------------------=== -# fmt: off -parser = argparse.ArgumentParser() -parser.add_argument("--dataset", required=True, dest="dataset", help="processed data dir") -parser.add_argument("--word-embeddings", dest="word_embeddings", help="File from which to read in pretrained embeds") -parser.add_argument("--bigram-embeddings", dest="bigram_embeddings", help="File from which to read in pretrained embeds") -parser.add_argument("--crf", dest="crf", action="store_true", help="crf") -# parser.add_argument("--devi", default="0", dest="devi", help="gpu") -parser.add_argument("--step", default=0, dest="step", type=int,help="step") -parser.add_argument("--num-epochs", default=100, dest="num_epochs", type=int, - help="Number of full passes through training set") -parser.add_argument("--batch-size", default=128, dest="batch_size", type=int, - help="Minibatch size of training set") -parser.add_argument("--d_model", default=256, dest="d_model", type=int, help="d_model") -parser.add_argument("--d_ff", default=1024, dest="d_ff", type=int, help="d_ff") -parser.add_argument("--N", default=6, dest="N", type=int, help="N") -parser.add_argument("--h", default=4, dest="h", type=int, help="h") -parser.add_argument("--factor", default=2, dest="factor", type=float, help="Initial learning rate") -parser.add_argument("--dropout", default=0.2, dest="dropout", type=float, - help="Amount of dropout(not keep rate, but drop rate) to apply to embeddings part of graph") -parser.add_argument("--log-dir", default="result", dest="log_dir", - help="Directory where to write logs / serialized models") -parser.add_argument("--task-name", default=time.strftime("%Y-%m-%d-%H-%M-%S"), dest="task_name", - help="Name for this task, use a comprehensive one") -parser.add_argument("--no-model", dest="no_model", action="store_true", help="Don't serialize model") -parser.add_argument("--always-model", dest="always_model", action="store_true", - help="Always serialize model after every epoch") -parser.add_argument("--old-model", dest="old_model", help="Path to old model for incremental training") -parser.add_argument("--skip-dev", dest="skip_dev", action="store_true", help="Skip dev set, would save some time") -parser.add_argument("--freeze", dest="freeze", action="store_true", help="freeze pretrained embedding") -parser.add_argument("--only-task", dest="only_task", action="store_true", help="only train task embedding") -parser.add_argument("--subset", dest="subset", help="Only train and test on a subset of the whole dataset") -parser.add_argument("--seclude", dest="seclude", help="train and test except a subset") -parser.add_argument("--instances", default=None, dest="instances", type=int,help="num of instances of subset") - -parser.add_argument("--seed", dest="python_seed", type=int, default=random.randrange(maxsize), - help="Random seed of Python and NumPy") -parser.add_argument("--debug", dest="debug", default=False, action="store_true", help="Debug mode") -parser.add_argument("--test", dest="test", action="store_true", help="Test mode") -parser.add_argument('--local_rank', type=int, default=None) -parser.add_argument('--init_method', type=str, default='env://') -# fmt: on - -options, _ = parser.parse_known_args() -print("unknown args", _) -task_name = options.task_name -root_dir = "{}/{}".format(options.log_dir, task_name) -utils.make_sure_path_exists(root_dir) - -if options.local_rank is not None: - torch.cuda.set_device(options.local_rank) - dist.init_process_group("nccl", init_method=options.init_method) - - -def init_logger(): - if not os.path.exists(root_dir): - os.mkdir(root_dir) - log_formatter = logging.Formatter("%(asctime)s - %(message)s") - logger = logging.getLogger() - file_handler = logging.FileHandler("{0}/info.log".format(root_dir), mode="w") - file_handler.setFormatter(log_formatter) - logger.addHandler(file_handler) - console_handler = logging.StreamHandler() - console_handler.setFormatter(log_formatter) - logger.addHandler(console_handler) - if options.local_rank is None or options.local_rank == 0: - logger.setLevel(logging.INFO) - else: - logger.setLevel(logging.WARNING) - return logger - - -# ===-----------------------------------------------------------------------=== -# Set up logging -# ===-----------------------------------------------------------------------=== -# logger = init_logger() -logger.add_file("{}/info.log".format(root_dir), "INFO") -logger.setLevel(logging.INFO if dist.get_rank() == 0 else logging.WARNING) - -# ===-----------------------------------------------------------------------=== -# Log some stuff about this run -# ===-----------------------------------------------------------------------=== -logger.info(" ".join(sys.argv)) -logger.info("") -logger.info(options) - -if options.debug: - logger.info("DEBUG MODE") - options.num_epochs = 2 - options.batch_size = 20 - -random.seed(options.python_seed) -np.random.seed(options.python_seed % (2 ** 32 - 1)) -torch.cuda.manual_seed_all(options.python_seed) -logger.info("Python random seed: {}".format(options.python_seed)) - -# ===-----------------------------------------------------------------------=== -# Read in dataset -# ===-----------------------------------------------------------------------=== -dataset = pickle.load(open(options.dataset + "/total_dataset.pkl", "rb")) -train_set = dataset["train_set"] -test_set = dataset["test_set"] -uni_vocab = dataset["uni_vocab"] -bi_vocab = dataset["bi_vocab"] -task_vocab = dataset["task_vocab"] -tag_vocab = dataset["tag_vocab"] -for v in (bi_vocab, uni_vocab, tag_vocab, task_vocab): - if hasattr(v, "_word2idx"): - v.word2idx = v._word2idx -for ds in (train_set, test_set): - ds.rename_field("ori_words", "words") - -logger.info("{} {}".format(bi_vocab.to_word(0), tag_vocab.word2idx)) -logger.info(task_vocab.word2idx) -if options.skip_dev: - dev_set = test_set -else: - train_set, dev_set = train_set.split(0.1) - -logger.info("{} {} {}".format(len(train_set), len(dev_set), len(test_set))) - -if options.debug: - train_set = train_set[0:DEBUG_SCALE] - dev_set = dev_set[0:DEBUG_SCALE] - test_set = test_set[0:DEBUG_SCALE] - -# ===-----------------------------------------------------------------------=== -# Build model and trainer -# ===-----------------------------------------------------------------------=== - -# =============================== -if dist.get_rank() != 0: - dist.barrier() - -if options.word_embeddings is None: - init_embedding = None -else: - # logger.info("Load: {}".format(options.word_embeddings)) - # init_embedding = utils.embedding_load_with_cache(options.word_embeddings, options.cache_dir, uni_vocab, normalize=False) - init_embedding = fastNLP.embeddings.StaticEmbedding( - uni_vocab, options.word_embeddings, word_drop=0.01 - ) - -bigram_embedding = None -if options.bigram_embeddings: - # logger.info("Load: {}".format(options.bigram_embeddings)) - # bigram_embedding = utils.embedding_load_with_cache(options.bigram_embeddings, options.cache_dir, bi_vocab, normalize=False) - bigram_embedding = fastNLP.embeddings.StaticEmbedding( - bi_vocab, options.bigram_embeddings - ) - -if dist.get_rank() == 0: - dist.barrier() -# =============================== - -# select subset training -if options.seclude is not None: - setname = "<{}>".format(options.seclude) - logger.info("seclude {}".format(setname)) - train_set.drop(lambda x: x["words"][0] == setname, inplace=True) - test_set.drop(lambda x: x["words"][0] == setname, inplace=True) - dev_set.drop(lambda x: x["words"][0] == setname, inplace=True) - -if options.subset is not None: - setname = "<{}>".format(options.subset) - logger.info("select {}".format(setname)) - train_set.drop(lambda x: x["words"][0] != setname, inplace=True) - test_set.drop(lambda x: x["words"][0] != setname, inplace=True) - dev_set.drop(lambda x: x["words"][0] != setname, inplace=True) - -# build model and optimizer -i2t = None -if options.crf: - # i2t=utils.to_id_list(tag_vocab.word2idx) - i2t = {} - for x, y in tag_vocab.word2idx.items(): - i2t[y] = x - logger.info(i2t) - -freeze = True if options.freeze else False -model = models.make_CWS( - d_model=options.d_model, - N=options.N, - h=options.h, - d_ff=options.d_ff, - dropout=options.dropout, - word_embedding=init_embedding, - bigram_embedding=bigram_embedding, - tag_size=len(tag_vocab), - task_size=len(task_vocab), - crf=i2t, - freeze=freeze, -) - -device = "cpu" - -if torch.cuda.device_count() > 0: - if options.local_rank is not None: - device = "cuda:{}".format(options.local_rank) - # model=nn.DataParallel(model) - model = model.to(device) - model = torch.nn.parallel.DistributedDataParallel( - model, device_ids=[options.local_rank], output_device=options.local_rank - ) - else: - device = "cuda:0" - model.to(device) - - -if options.only_task and options.old_model is not None: - logger.info("fix para except task embedding") - for name, para in model.named_parameters(): - if name.find("task_embed") == -1: - para.requires_grad = False - else: - para.requires_grad = True - logger.info(name) - -optimizer = optm.NoamOpt( - options.d_model, - options.factor, - 4000, - torch.optim.Adam(model.parameters(), lr=0, betas=(0.9, 0.98), eps=1e-9), -) - -optimizer._step = options.step - -best_model_file_name = "{}/model.bin".format(root_dir) - -if options.local_rank is None: - train_sampler = BucketSampler( - batch_size=options.batch_size, seq_len_field_name="seq_len" - ) -else: - train_sampler = DistributedSampler( - train_set, dist.get_world_size(), dist.get_rank() - ) -dev_sampler = SequentialSampler() - -i2t = utils.to_id_list(tag_vocab.word2idx) -i2task = utils.to_id_list(task_vocab.word2idx) -dev_set.set_input("words") -test_set.set_input("words") -test_batch = DataSetIter(test_set, options.batch_size, num_workers=2) - -word_dic = pickle.load(open(options.dataset + "/oovdict.pkl", "rb")) - - -def batch_to_device(batch, device): - for k, v in batch.items(): - if torch.is_tensor(v): - batch[k] = v.to(device) - return batch - - -def tester(model, test_batch, write_out=False): - res = [] - prf = utils.CWSEvaluator(i2t) - prf_dataset = {} - oov_dataset = {} - - logger.info("start evaluation") - # import ipdb; ipdb.set_trace() - with torch.no_grad(): - for batch_x, batch_y in test_batch: - batch_to_device(batch_x, device) - # batch_to_device(batch_y, device) - if bigram_embedding is not None: - out = model( - batch_x["task"], - batch_x["uni"], - batch_x["seq_len"], - batch_x["bi1"], - batch_x["bi2"], - ) - else: - out = model(batch_x["task"], batch_x["uni"], batch_x["seq_len"]) - out = out["pred"] - # print(out) - num = out.size(0) - out = out.detach().cpu().numpy() - for i in range(num): - length = int(batch_x["seq_len"][i]) - - out_tags = out[i, 1:length].tolist() - sentence = batch_x["words"][i] - gold_tags = batch_y["tags"][i][1:length].numpy().tolist() - dataset_name = sentence[0] - sentence = sentence[1:] - # print(out_tags,gold_tags) - assert utils.is_dataset_tag(dataset_name), dataset_name - assert len(gold_tags) == len(out_tags) and len(gold_tags) == len( - sentence - ) - - if dataset_name not in prf_dataset: - prf_dataset[dataset_name] = utils.CWSEvaluator(i2t) - oov_dataset[dataset_name] = utils.CWS_OOV( - word_dic[dataset_name[1:-1]] - ) - - prf_dataset[dataset_name].add_instance(gold_tags, out_tags) - prf.add_instance(gold_tags, out_tags) - - if write_out: - gold_strings = utils.to_tag_strings(i2t, gold_tags) - obs_strings = utils.to_tag_strings(i2t, out_tags) - - word_list = utils.bmes_to_words(sentence, obs_strings) - oov_dataset[dataset_name].update( - utils.bmes_to_words(sentence, gold_strings), word_list - ) - - raw_string = " ".join(word_list) - res.append(dataset_name + " " + raw_string + " " + dataset_name) - - Ap = 0.0 - Ar = 0.0 - Af = 0.0 - Aoov = 0.0 - tot = 0 - nw = 0.0 - for dataset_name, performance in sorted(prf_dataset.items()): - p = performance.result() - if write_out: - nw = oov_dataset[dataset_name].oov() - # nw = 0 - logger.info( - "{}\t{:04.2f}\t{:04.2f}\t{:04.2f}\t{:04.2f}".format( - dataset_name, p[0], p[1], p[2], nw - ) - ) - else: - logger.info( - "{}\t{:04.2f}\t{:04.2f}\t{:04.2f}".format( - dataset_name, p[0], p[1], p[2] - ) - ) - Ap += p[0] - Ar += p[1] - Af += p[2] - Aoov += nw - tot += 1 - - prf = prf.result() - logger.info( - "{}\t{:04.2f}\t{:04.2f}\t{:04.2f}".format("TOT", prf[0], prf[1], prf[2]) - ) - if not write_out: - logger.info( - "{}\t{:04.2f}\t{:04.2f}\t{:04.2f}".format( - "AVG", Ap / tot, Ar / tot, Af / tot - ) - ) - else: - logger.info( - "{}\t{:04.2f}\t{:04.2f}\t{:04.2f}\t{:04.2f}".format( - "AVG", Ap / tot, Ar / tot, Af / tot, Aoov / tot - ) - ) - return prf[-1], res - - -# start training -if not options.test: - if options.old_model: - # incremental training - logger.info("Incremental training from old model: {}".format(options.old_model)) - model.load_state_dict(torch.load(options.old_model, map_location="cuda:0")) - - logger.info("Number training instances: {}".format(len(train_set))) - logger.info("Number dev instances: {}".format(len(dev_set))) - - train_batch = DataSetIter(dataset=train_set, batch_size=options.batch_size, sampler=train_sampler, num_workers=4) - dev_batch = DataSetIter(dataset=dev_set, batch_size=options.batch_size, sampler=dev_sampler, num_workers=4) - - best_f1 = 0.0 - for epoch in range(int(options.num_epochs)): - logger.info("Epoch {} out of {}".format(epoch + 1, options.num_epochs)) - train_loss = 0.0 - model.train() - tot = 0 - t1 = time.time() - for batch_x, batch_y in train_batch: - model.zero_grad() - if bigram_embedding is not None: - out = model( - batch_x["task"], - batch_x["uni"], - batch_x["seq_len"], - batch_x["bi1"], - batch_x["bi2"], - batch_y["tags"], - ) - else: - out = model( - batch_x["task"], batch_x["uni"], batch_x["seq_len"], batch_y["tags"] - ) - loss = out["loss"] - train_loss += loss.item() - tot += 1 - loss.backward() - # nn.utils.clip_grad_value_(model.parameters(), 1) - optimizer.step() - - t2 = time.time() - train_loss = train_loss / tot - logger.info( - "time: {} loss: {} step: {}".format(t2 - t1, train_loss, optimizer._step) - ) - # Evaluate dev data - if options.skip_dev and dist.get_rank() == 0: - logger.info("Saving model to {}".format(best_model_file_name)) - torch.save(model.module.state_dict(), best_model_file_name) - continue - - model.eval() - if dist.get_rank() == 0: - f1, _ = tester(model.module, dev_batch) - if f1 > best_f1: - best_f1 = f1 - logger.info("- new best score!") - if not options.no_model: - logger.info("Saving model to {}".format(best_model_file_name)) - torch.save(model.module.state_dict(), best_model_file_name) - - elif options.always_model: - logger.info("Saving model to {}".format(best_model_file_name)) - torch.save(model.module.state_dict(), best_model_file_name) - dist.barrier() - -# Evaluate test data (once) -logger.info("\nNumber test instances: {}".format(len(test_set))) - - -if not options.skip_dev: - if options.test: - model.module.load_state_dict( - torch.load(options.old_model, map_location="cuda:0") - ) - else: - model.module.load_state_dict( - torch.load(best_model_file_name, map_location="cuda:0") - ) - -if dist.get_rank() == 0: - for name, para in model.named_parameters(): - if name.find("task_embed") != -1: - tm = para.detach().cpu().numpy() - logger.info(tm.shape) - np.save("{}/task.npy".format(root_dir), tm) - break - -_, res = tester(model.module, test_batch, True) - -if dist.get_rank() == 0: - with open("{}/testout.txt".format(root_dir), "w", encoding="utf-8") as raw_writer: - for sent in res: - raw_writer.write(sent) - raw_writer.write("\n") - diff --git a/reproduction/multi-criteria-cws/make_data.sh b/reproduction/multi-criteria-cws/make_data.sh deleted file mode 100644 index 9c2b09d8..00000000 --- a/reproduction/multi-criteria-cws/make_data.sh +++ /dev/null @@ -1,14 +0,0 @@ -if [ -z "$DATA_DIR" ] -then - DATA_DIR="./data" -fi - -mkdir -vp $DATA_DIR - -cmd="python -u ./data-prepare.py --sighan05 $1 --sighan08 $2 --data_path $DATA_DIR" -echo $cmd -eval $cmd - -cmd="python -u ./data-process.py --data_path $DATA_DIR" -echo $cmd -eval $cmd diff --git a/reproduction/multi-criteria-cws/model.py b/reproduction/multi-criteria-cws/model.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/multi-criteria-cws/models.py b/reproduction/multi-criteria-cws/models.py deleted file mode 100644 index 92c93175..00000000 --- a/reproduction/multi-criteria-cws/models.py +++ /dev/null @@ -1,198 +0,0 @@ -import fastNLP -import torch -import math -from fastNLP.modules.decoder.crf import ConditionalRandomField -from fastNLP import Const -import copy -import numpy as np -from torch.autograd import Variable -import torch.autograd as autograd -import torch.nn as nn -import torch.nn.functional as F -import transformer - - -class PositionalEncoding(nn.Module): - "Implement the PE function." - - def __init__(self, d_model, dropout, max_len=512): - super(PositionalEncoding, self).__init__() - self.dropout = nn.Dropout(p=dropout) - - # Compute the positional encodings once in log space. - pe = torch.zeros(max_len, d_model).float() - position = torch.arange(0, max_len).unsqueeze(1).float() - div_term = torch.exp( - torch.arange(0, d_model, 2).float() * -(math.log(10000.0) / d_model) - ) - pe[:, 0::2] = torch.sin(position * div_term) - pe[:, 1::2] = torch.cos(position * div_term) - pe = pe.unsqueeze(0) - self.register_buffer("pe", pe) - - def forward(self, x): - x = x + Variable(self.pe[:, : x.size(1)], requires_grad=False) - return self.dropout(x) - - -class Embedding(nn.Module): - def __init__( - self, - task_size, - d_model, - word_embedding=None, - bi_embedding=None, - word_size=None, - freeze=True, - ): - super(Embedding, self).__init__() - self.task_size = task_size - self.embed_dim = 0 - - self.task_embed = nn.Embedding(task_size, d_model) - if word_embedding is not None: - # self.uni_embed = nn.Embedding.from_pretrained(torch.FloatTensor(word_embedding), freeze=freeze) - # self.embed_dim+=word_embedding.shape[1] - self.uni_embed = word_embedding - self.embed_dim += word_embedding.embedding_dim - else: - if bi_embedding is not None: - self.embed_dim += bi_embedding.shape[1] - else: - self.embed_dim = d_model - assert word_size is not None - self.uni_embed = Embedding(word_size, self.embed_dim) - - if bi_embedding is not None: - # self.bi_embed = nn.Embedding.from_pretrained(torch.FloatTensor(bi_embedding), freeze=freeze) - # self.embed_dim += bi_embedding.shape[1]*2 - self.bi_embed = bi_embedding - self.embed_dim += bi_embedding.embedding_dim * 2 - - print("Trans Freeze", freeze, self.embed_dim) - - if d_model != self.embed_dim: - self.F = nn.Linear(self.embed_dim, d_model) - else: - self.F = None - - self.d_model = d_model - - def forward(self, task, uni, bi1=None, bi2=None): - y_task = self.task_embed(task[:, 0:1]) - y = self.uni_embed(uni[:, 1:]) - if bi1 is not None: - assert self.bi_embed is not None - - y = torch.cat([y, self.bi_embed(bi1), self.bi_embed(bi2)], dim=-1) - # y2=self.bi_embed(bi) - # y=torch.cat([y,y2[:,:-1,:],y2[:,1:,:]],dim=-1) - - # y=torch.cat([y_task,y],dim=1) - if self.F is not None: - y = self.F(y) - y = torch.cat([y_task, y], dim=1) - return y * math.sqrt(self.d_model) - - -def seq_len_to_mask(seq_len, max_len=None): - if isinstance(seq_len, np.ndarray): - assert ( - len(np.shape(seq_len)) == 1 - ), f"seq_len can only have one dimension, got {len(np.shape(seq_len))}." - if max_len is None: - max_len = int(seq_len.max()) - broad_cast_seq_len = np.tile(np.arange(max_len), (len(seq_len), 1)) - mask = broad_cast_seq_len < seq_len.reshape(-1, 1) - - elif isinstance(seq_len, torch.Tensor): - assert ( - seq_len.dim() == 1 - ), f"seq_len can only have one dimension, got {seq_len.dim() == 1}." - batch_size = seq_len.size(0) - if max_len is None: - max_len = seq_len.max().long() - broad_cast_seq_len = torch.arange(max_len).expand(batch_size, -1).to(seq_len) - mask = broad_cast_seq_len.lt(seq_len.unsqueeze(1)) - else: - raise TypeError("Only support 1-d numpy.ndarray or 1-d torch.Tensor.") - - return mask - - -class CWSModel(nn.Module): - def __init__(self, encoder, src_embed, position, d_model, tag_size, crf=None): - super(CWSModel, self).__init__() - self.encoder = encoder - self.src_embed = src_embed - self.pos = copy.deepcopy(position) - self.proj = nn.Linear(d_model, tag_size) - self.tag_size = tag_size - if crf is None: - self.crf = None - self.loss_f = nn.CrossEntropyLoss(reduction="mean", ignore_index=-100) - else: - print("crf") - trans = fastNLP.modules.decoder.crf.allowed_transitions( - crf, encoding_type="bmes" - ) - self.crf = ConditionalRandomField(tag_size, allowed_transitions=trans) - # self.norm=nn.LayerNorm(d_model) - - def forward(self, task, uni, seq_len, bi1=None, bi2=None, tags=None): - # mask=fastNLP.core.utils.seq_len_to_mask(seq_len,uni.size(1)) # for dev 0.5.1 - mask = seq_len_to_mask(seq_len, uni.size(1)) - out = self.src_embed(task, uni, bi1, bi2) - out = self.pos(out) - # out=self.norm(out) - out = self.proj(self.encoder(out, mask.float())) - - if self.crf is not None: - if tags is not None: - out = self.crf(out, tags, mask) - return {"loss": out} - else: - out, _ = self.crf.viterbi_decode(out, mask) - return {"pred": out} - else: - if tags is not None: - out = out.contiguous().view(-1, self.tag_size) - tags = tags.data.masked_fill_(mask.eq(False), -100).view(-1) - loss = self.loss_f(out, tags) - return {"loss": loss} - else: - out = torch.argmax(out, dim=-1) - return {"pred": out} - - -def make_CWS( - N=6, - d_model=256, - d_ff=1024, - h=4, - dropout=0.2, - tag_size=4, - task_size=8, - bigram_embedding=None, - word_embedding=None, - word_size=None, - crf=None, - freeze=True, -): - c = copy.deepcopy - encoder = transformer.make_encoder( - N=N, d_model=d_model, h=h, dropout=dropout, d_ff=d_ff - ) - - position = PositionalEncoding(d_model, dropout) - - embed = Embedding( - task_size, d_model, word_embedding, bigram_embedding, word_size, freeze - ) - model = CWSModel(encoder, embed, position, d_model, tag_size, crf=crf) - - for p in model.parameters(): - if p.dim() > 1 and p.requires_grad: - nn.init.xavier_uniform_(p) - - return model diff --git a/reproduction/multi-criteria-cws/optm.py b/reproduction/multi-criteria-cws/optm.py deleted file mode 100644 index a2b68de5..00000000 --- a/reproduction/multi-criteria-cws/optm.py +++ /dev/null @@ -1,49 +0,0 @@ -import torch -import torch.optim as optim - - -class NoamOpt: - "Optim wrapper that implements rate." - - def __init__(self, model_size, factor, warmup, optimizer): - self.optimizer = optimizer - self._step = 0 - self.warmup = warmup - self.factor = factor - self.model_size = model_size - self._rate = 0 - - def step(self): - "Update parameters and rate" - self._step += 1 - rate = self.rate() - for p in self.optimizer.param_groups: - p["lr"] = rate - self._rate = rate - self.optimizer.step() - - def rate(self, step=None): - "Implement `lrate` above" - if step is None: - step = self._step - lr = self.factor * ( - self.model_size ** (-0.5) - * min(step ** (-0.5), step * self.warmup ** (-1.5)) - ) - # if step>self.warmup: lr = max(1e-4,lr) - return lr - - -def get_std_opt(model): - return NoamOpt( - model.src_embed[0].d_model, - 2, - 4000, - torch.optim.Adam( - filter(lambda p: p.requires_grad, model.parameters()), - lr=0, - betas=(0.9, 0.98), - eps=1e-9, - ), - ) - diff --git a/reproduction/multi-criteria-cws/train.py b/reproduction/multi-criteria-cws/train.py deleted file mode 100644 index fce914a1..00000000 --- a/reproduction/multi-criteria-cws/train.py +++ /dev/null @@ -1,138 +0,0 @@ -from fastNLP import (Trainer, Tester, Callback, GradientClipCallback, LRScheduler, SpanFPreRecMetric) -import torch -import torch.cuda -from torch.optim import Adam, SGD -from argparse import ArgumentParser -import logging -from .utils import set_seed - - -class LoggingCallback(Callback): - def __init__(self, filepath=None): - super().__init__() - # create file handler and set level to debug - if filepath is not None: - file_handler = logging.FileHandler(filepath, "a") - else: - file_handler = logging.StreamHandler() - - file_handler.setLevel(logging.DEBUG) - file_handler.setFormatter( - logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(name)s - %(message)s', - datefmt='%m/%d/%Y %H:%M:%S')) - - # create logger and set level to debug - logger = logging.getLogger() - logger.handlers = [] - logger.setLevel(logging.DEBUG) - logger.propagate = False - logger.addHandler(file_handler) - self.log_writer = logger - - def on_backward_begin(self, loss): - if self.step % self.trainer.print_every == 0: - self.log_writer.info( - 'Step/Epoch {}/{}: Loss {}'.format(self.step, self.epoch, loss.item())) - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - self.log_writer.info( - 'Step/Epoch {}/{}: Eval result {}'.format(self.step, self.epoch, eval_result)) - - def on_backward_end(self): - pass - - -def main(): - parser = ArgumentParser() - register_args(parser) - args = parser.parse_known_args()[0] - - set_seed(args.seed) - if args.train: - train(args) - if args.eval: - evaluate(args) - -def get_optim(args): - name = args.optim.strip().split(' ')[0].lower() - p = args.optim.strip() - l = p.find('(') - r = p.find(')') - optim_args = eval('dict({})'.format(p[[l+1,r]])) - if name == 'sgd': - return SGD(**optim_args) - elif name == 'adam': - return Adam(**optim_args) - else: - raise ValueError(args.optim) - -def load_model_from_path(args): - pass - -def train(args): - data = get_data(args) - train_data = data['train'] - dev_data = data['dev'] - model = get_model(args) - optimizer = get_optim(args) - device = 'cuda' if torch.cuda.is_available() else 'cpu' - callbacks = [] - trainer = Trainer( - train_data=train_data, - model=model, - optimizer=optimizer, - loss=None, - batch_size=args.batch_size, - n_epochs=args.epochs, - num_workers=4, - metrics=SpanFPreRecMetric( - tag_vocab=data['tag_vocab'], encoding_type=data['encoding_type'], - ignore_labels=data['ignore_labels']), - metric_key='f1', - dev_data=dev_data, - save_path=args.save_path, - device=device, - callbacks=callbacks, - check_code_level=-1, - ) - - print(trainer.train()) - - - -def evaluate(args): - data = get_data(args) - test_data = data['test'] - model = load_model_from_path(args) - device = 'cuda' if torch.cuda.is_available() else 'cpu' - - tester = Tester( - data=test_data, model=model, batch_size=args.batch_size, - num_workers=2, device=device, - metrics=SpanFPreRecMetric( - tag_vocab=data['tag_vocab'], encoding_type=data['encoding_type'], - ignore_labels=data['ignore_labels']), - ) - print(tester.test()) - -def register_args(parser): - parser.add_argument('--optim', type=str, default='adam (lr=2e-3, weight_decay=0.0)') - parser.add_argument('--batch_size', type=int, default=128) - parser.add_argument('--epochs', type=int, default=10) - parser.add_argument('--save_path', type=str, default=None) - parser.add_argument('--data_path', type=str, required=True) - parser.add_argument('--log_path', type=str, default=None) - parser.add_argument('--model_config', type=str, required=True) - parser.add_argument('--load_path', type=str, default=None) - parser.add_argument('--train', action='store_true', default=False) - parser.add_argument('--eval', action='store_true', default=False) - parser.add_argument('--seed', type=int, default=42, help='rng seed') - -def get_model(args): - pass - -def get_data(args): - return torch.load(args.data_path) - -if __name__ == '__main__': - main() diff --git a/reproduction/multi-criteria-cws/train.sh b/reproduction/multi-criteria-cws/train.sh deleted file mode 100644 index aa47b8af..00000000 --- a/reproduction/multi-criteria-cws/train.sh +++ /dev/null @@ -1,26 +0,0 @@ -export EXP_NAME=release04 -export NGPU=2 -export PORT=9988 -export CUDA_DEVICE_ORDER=PCI_BUS_ID -export CUDA_VISIBLE_DEVICES=$1 - -if [ -z "$DATA_DIR" ] -then - DATA_DIR="./data" -fi - -echo $CUDA_VISIBLE_DEVICES -cmd=" -python -m torch.distributed.launch --nproc_per_node=$NGPU --master_port $PORT\ - main.py \ - --word-embeddings cn-char-fastnlp-100d \ - --bigram-embeddings cn-bi-fastnlp-100d \ - --num-epochs 100 \ - --batch-size 256 \ - --seed 1234 \ - --task-name $EXP_NAME \ - --dataset $DATA_DIR \ - --freeze \ -" -echo $cmd -eval $cmd diff --git a/reproduction/multi-criteria-cws/transformer.py b/reproduction/multi-criteria-cws/transformer.py deleted file mode 100644 index 33821f3b..00000000 --- a/reproduction/multi-criteria-cws/transformer.py +++ /dev/null @@ -1,152 +0,0 @@ -import numpy as np -import torch -import torch.autograd as autograd -import torch.nn as nn -import torch.nn.functional as F -import math, copy, time -from torch.autograd import Variable - -# import matplotlib.pyplot as plt - - -def clones(module, N): - "Produce N identical layers." - return nn.ModuleList([copy.deepcopy(module) for _ in range(N)]) - - -def subsequent_mask(size): - "Mask out subsequent positions." - attn_shape = (1, size, size) - subsequent_mask = np.triu(np.ones(attn_shape), k=1).astype("uint8") - return torch.from_numpy(subsequent_mask).eq(False) - - -def attention(query, key, value, mask=None, dropout=None): - "Compute 'Scaled Dot Product Attention'" - d_k = query.size(-1) - scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) - if mask is not None: - # print(scores.size(),mask.size()) # [bsz,1,1,len] - scores = scores.masked_fill(mask.eq(False), -1e9) - p_attn = F.softmax(scores, dim=-1) - if dropout is not None: - p_attn = dropout(p_attn) - return torch.matmul(p_attn, value), p_attn - - -class MultiHeadedAttention(nn.Module): - def __init__(self, h, d_model, dropout=0.1): - "Take in model size and number of heads." - super(MultiHeadedAttention, self).__init__() - assert d_model % h == 0 - # We assume d_v always equals d_k - self.d_k = d_model // h - self.h = h - self.linears = clones(nn.Linear(d_model, d_model), 4) - self.attn = None - self.dropout = nn.Dropout(p=dropout) - - def forward(self, query, key, value, mask=None): - "Implements Figure 2" - if mask is not None: - # Same mask applied to all h heads. - mask = mask.unsqueeze(1) - - nbatches = query.size(0) - - # 1) Do all the linear projections in batch from d_model => h x d_k - query, key, value = [ - l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2) - for l, x in zip(self.linears, (query, key, value)) - ] - - # 2) Apply attention on all the projected vectors in batch. - x, self.attn = attention(query, key, value, mask=mask, dropout=self.dropout) - - # 3) "Concat" using a view and apply a final linear. - x = x.transpose(1, 2).contiguous().view(nbatches, -1, self.h * self.d_k) - return self.linears[-1](x) - - -class LayerNorm(nn.Module): - "Construct a layernorm module (See citation for details)." - - def __init__(self, features, eps=1e-6): - super(LayerNorm, self).__init__() - self.a_2 = nn.Parameter(torch.ones(features)) - self.b_2 = nn.Parameter(torch.zeros(features)) - self.eps = eps - - def forward(self, x): - mean = x.mean(-1, keepdim=True) - std = x.std(-1, keepdim=True) - return self.a_2 * (x - mean) / (std + self.eps) + self.b_2 - - -class PositionwiseFeedForward(nn.Module): - "Implements FFN equation." - - def __init__(self, d_model, d_ff, dropout=0.1): - super(PositionwiseFeedForward, self).__init__() - self.w_1 = nn.Linear(d_model, d_ff) - self.w_2 = nn.Linear(d_ff, d_model) - self.dropout = nn.Dropout(dropout) - - def forward(self, x): - return self.w_2(self.dropout(F.relu(self.w_1(x)))) - - -class SublayerConnection(nn.Module): - """ - A residual connection followed by a layer norm. - Note for code simplicity the norm is first as opposed to last. - """ - - def __init__(self, size, dropout): - super(SublayerConnection, self).__init__() - self.norm = LayerNorm(size) - self.dropout = nn.Dropout(dropout) - - def forward(self, x, sublayer): - "Apply residual connection to any sublayer with the same size." - return x + self.dropout(sublayer(self.norm(x))) - - -class EncoderLayer(nn.Module): - "Encoder is made up of self-attn and feed forward (defined below)" - - def __init__(self, size, self_attn, feed_forward, dropout): - super(EncoderLayer, self).__init__() - self.self_attn = self_attn - self.feed_forward = feed_forward - self.sublayer = clones(SublayerConnection(size, dropout), 2) - self.size = size - - def forward(self, x, mask): - "Follow Figure 1 (left) for connections." - x = self.sublayer[0](x, lambda x: self.self_attn(x, x, x, mask)) - return self.sublayer[1](x, self.feed_forward) - - -class Encoder(nn.Module): - "Core encoder is a stack of N layers" - - def __init__(self, layer, N): - super(Encoder, self).__init__() - self.layers = clones(layer, N) - self.norm = LayerNorm(layer.size) - - def forward(self, x, mask): - # print(x.size(),mask.size()) - "Pass the input (and mask) through each layer in turn." - mask = mask.byte().unsqueeze(-2) - for layer in self.layers: - x = layer(x, mask) - return self.norm(x) - - -def make_encoder(N=6, d_model=512, d_ff=2048, h=8, dropout=0.1): - c = copy.deepcopy - attn = MultiHeadedAttention(h, d_model) - ff = PositionwiseFeedForward(d_model, d_ff, dropout) - return Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N) diff --git a/reproduction/multi-criteria-cws/utils.py b/reproduction/multi-criteria-cws/utils.py deleted file mode 100644 index aeb7e43c..00000000 --- a/reproduction/multi-criteria-cws/utils.py +++ /dev/null @@ -1,308 +0,0 @@ -import numpy as np -import torch -import torch.cuda -import random -import os -import sys -import errno -import time -import codecs -import hashlib -import _pickle as pickle -import warnings -from fastNLP.io import EmbedLoader - -UNK_TAG = "" - - -def set_seed(seed): - random.seed(seed) - np.random.seed(seed) - torch.manual_seed(seed) - torch.cuda.manual_seed_all(seed) - - -def bmes_to_words(chars, tags): - result = [] - if len(chars) == 0: - return result - word = chars[0] - - for c, t in zip(chars[1:], tags[1:]): - if t.upper() == "B" or t.upper() == "S": - result.append(word) - word = "" - word += c - if len(word) != 0: - result.append(word) - - return result - - -def bmes_to_index(tags): - result = [] - if len(tags) == 0: - return result - word = (0, 0) - - for i, t in enumerate(tags): - if i == 0: - word = (0, 0) - elif t.upper() == "B" or t.upper() == "S": - result.append(word) - word = (i, 0) - word = (word[0], word[1] + 1) - if word[1] != 0: - result.append(word) - return result - - -def get_bmes(sent): - x = [] - y = [] - for word in sent: - length = len(word) - tag = ["m"] * length if length > 1 else ["s"] * length - if length > 1: - tag[0] = "b" - tag[-1] = "e" - x += list(word) - y += tag - return x, y - - -class CWSEvaluator: - def __init__(self, i2t): - self.correct_preds = 0.0 - self.total_preds = 0.0 - self.total_correct = 0.0 - self.i2t = i2t - - def add_instance(self, pred_tags, gold_tags): - pred_tags = [self.i2t[i] for i in pred_tags] - gold_tags = [self.i2t[i] for i in gold_tags] - # Evaluate PRF - lab_gold_chunks = set(bmes_to_index(gold_tags)) - lab_pred_chunks = set(bmes_to_index(pred_tags)) - self.correct_preds += len(lab_gold_chunks & lab_pred_chunks) - self.total_preds += len(lab_pred_chunks) - self.total_correct += len(lab_gold_chunks) - - def result(self, percentage=True): - p = self.correct_preds / self.total_preds if self.correct_preds > 0 else 0 - r = self.correct_preds / self.total_correct if self.correct_preds > 0 else 0 - f1 = 2 * p * r / (p + r) if p + r > 0 else 0 - if percentage: - p *= 100 - r *= 100 - f1 *= 100 - return p, r, f1 - - -class CWS_OOV: - def __init__(self, dic): - self.dic = dic - self.recall = 0 - self.tot = 0 - - def update(self, gold_sent, pred_sent): - i = 0 - j = 0 - id = 0 - for w in gold_sent: - if w not in self.dic: - self.tot += 1 - while i + len(pred_sent[id]) <= j: - i += len(pred_sent[id]) - id += 1 - if ( - i == j - and len(pred_sent[id]) == len(w) - and w.find(pred_sent[id]) != -1 - ): - self.recall += 1 - j += len(w) - # print(gold_sent,pred_sent,self.tot) - - def oov(self, percentage=True): - ins = 1.0 * self.recall / self.tot - if percentage: - ins *= 100 - return ins - - -def get_processing_word( - vocab_words=None, vocab_chars=None, lowercase=False, chars=False -): - def f(word): - # 0. get chars of words - if vocab_chars is not None and chars: - char_ids = [] - for char in word: - # ignore chars out of vocabulary - if char in vocab_chars: - char_ids += [vocab_chars[char]] - - # 1. preprocess word - if lowercase: - word = word.lower() - if word.isdigit(): - word = "0" - - # 2. get id of word - if vocab_words is not None: - if word in vocab_words: - word = vocab_words[word] - else: - word = vocab_words[UNK_TAG] - - # 3. return tuple char ids, word id - if vocab_chars is not None and chars: - return char_ids, word - else: - return word - - return f - - -def append_tags(src, des, name, part, encode="utf-16"): - with open("{}/{}.txt".format(src, part), encoding=encode) as input, open( - "{}/{}.txt".format(des, part), "a", encoding=encode - ) as output: - for line in input: - line = line.strip() - if len(line) > 0: - output.write("<{}> {} ".format(name, line, name)) - output.write("\n") - - -def is_dataset_tag(word): - return len(word) > 2 and word[0] == "<" and word[-1] == ">" - - -def to_tag_strings(i2ts, tag_mapping, pos_separate_col=True): - senlen = len(tag_mapping) - key_value_strs = [] - - for j in range(senlen): - val = i2ts[tag_mapping[j]] - pos_str = val - key_value_strs.append(pos_str) - return key_value_strs - - -def to_id_list(w2i): - i2w = [None] * len(w2i) - for w, i in w2i.items(): - i2w[i] = w - return i2w - - -def make_sure_path_exists(path): - try: - os.makedirs(path) - except OSError as exception: - if exception.errno != errno.EEXIST: - raise - - -def md5_for_file(fn): - md5 = hashlib.md5() - with open(fn, "rb") as f: - for chunk in iter(lambda: f.read(128 * md5.block_size), b""): - md5.update(chunk) - return md5.hexdigest() - - -def embedding_match_vocab( - vocab, - emb, - ori_vocab, - dtype=np.float32, - padding="", - unknown="", - normalize=True, - error="ignore", - init_method=None, -): - dim = emb.shape[-1] - matrix = np.random.randn(len(vocab), dim).astype(dtype) - hit_flags = np.zeros(len(vocab), dtype=bool) - - if init_method: - matrix = init_method(matrix) - for word, idx in ori_vocab.word2idx.items(): - try: - if word == padding and vocab.padding is not None: - word = vocab.padding - elif word == unknown and vocab.unknown is not None: - word = vocab.unknown - if word in vocab: - index = vocab.to_index(word) - matrix[index] = emb[idx] - hit_flags[index] = True - except Exception as e: - if error == "ignore": - warnings.warn("Error occurred at the {} line.".format(idx)) - else: - print("Error occurred at the {} line.".format(idx)) - raise e - - total_hits = np.sum(hit_flags) - print( - "Found {} out of {} words in the pre-training embedding.".format( - total_hits, len(vocab) - ) - ) - if init_method is None: - found_vectors = matrix[hit_flags] - if len(found_vectors) != 0: - mean = np.mean(found_vectors, axis=0, keepdims=True) - std = np.std(found_vectors, axis=0, keepdims=True) - unfound_vec_num = len(vocab) - total_hits - r_vecs = np.random.randn(unfound_vec_num, dim).astype(dtype) * std + mean - matrix[hit_flags == False] = r_vecs - - if normalize: - matrix /= np.linalg.norm(matrix, axis=1, keepdims=True) - - return matrix - - -def embedding_load_with_cache(emb_file, cache_dir, vocab, **kwargs): - def match_cache(file, cache_dir): - md5 = md5_for_file(file) - cache_files = os.listdir(cache_dir) - for fn in cache_files: - if md5 in fn.split("-")[-1]: - return os.path.join(cache_dir, fn), True - return ( - "{}-{}.pkl".format(os.path.join(cache_dir, os.path.basename(file)), md5), - False, - ) - - def get_cache(file): - if not os.path.exists(file): - return None - with open(file, "rb") as f: - emb = pickle.load(f) - return emb - - os.makedirs(cache_dir, exist_ok=True) - cache_fn, match = match_cache(emb_file, cache_dir) - if not match: - print("cache missed, re-generating cache at {}".format(cache_fn)) - emb, ori_vocab = EmbedLoader.load_without_vocab( - emb_file, padding=None, unknown=None, normalize=False - ) - with open(cache_fn, "wb") as f: - pickle.dump((emb, ori_vocab), f) - - else: - print("cache matched at {}".format(cache_fn)) - - # use cache - print("loading embeddings ...") - emb = get_cache(cache_fn) - assert emb is not None - return embedding_match_vocab(vocab, emb[0], emb[1], **kwargs) diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/README.md b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/README.md deleted file mode 100644 index 55c1bdee..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/README.md +++ /dev/null @@ -1,65 +0,0 @@ -[中文](#支持批并行的LatticeLSTM) - -[English](#Batch-Parallel-LatticeLSTM) -# 支持批并行的LatticeLSTM -+ 原论文:https://arxiv.org/abs/1805.02023 -+ 在batch=10时,计算速度已明显超过[原版代码](https://github.com/jiesutd/LatticeLSTM)。 -+ 在main.py中添加三个embedding的文件路径以及对应数据集的路径即可运行 -+ 此代码集合已加入fastNLP - -## 运行环境: -+ python >= 3.7.3 -+ fastNLP >= dev.0.5.0 -+ pytorch >= 1.1.0 -+ numpy >= 1.16.4 -+ fitlog >= 0.2.0 -## 支持的数据集: -+ Resume,可以从[这里](https://github.com/jiesutd/LatticeLSTM)下载 -+ Ontonote -+ [Weibo](https://github.com/hltcoe/golden-horse) - -未包含的数据集可以通过提供增加类似 load_data.py 中 load_ontonotes4ner 这个输出格式的函数来增加对其的支持 -## 性能: -|数据集| 目前达到的F1分数(test)|原文中的F1分数(test)| -|:----:|:----:|:----:| -|Weibo|58.66|58.79| -|Resume|95.18|94.46| -|Ontonote|73.62|73.88| - -备注:Weibo数据集我用的是V2版本,也就是更新过的版本,根据杨杰博士Github上LatticeLSTM仓库里的某个issue,应该是一致的。 - -## 如有任何疑问请联系: -+ lixiaonan_xdu@outlook.com - ---- - -# Batch Parallel LatticeLSTM -+ paper:https://arxiv.org/abs/1805.02023 -+ when batch is 10,the computation efficiency exceeds that of [original code](https://github.com/jiesutd/LatticeLSTM)。 -+ set the path of embeddings and corpus before you run main.py -+ this code set has been added to fastNLP - -## Environment: -+ python >= 3.7.3 -+ fastNLP >= dev.0.5.0 -+ pytorch >= 1.1.0 -+ numpy >= 1.16.4 -+ fitlog >= 0.2.0 - -## Dataset: -+ Resume,downloaded from [here](https://github.com/jiesutd/LatticeLSTM) -+ Ontonote -+ [Weibo](https://github.com/hltcoe/golden-horse) - -to those unincluded dataset, you can write the interface function whose output form is like *load_ontonotes4ner* in load_data.py - -## Performance: -|Dataset|F1 of my code(test)|F1 in paper(test)| -|:----:|:----:|:----:| -|Weibo|58.66|58.79| -|Resume|95.18|94.46| -|Ontonote|73.62|73.88| - -PS:The Weibo dataset I use is V2, namely revised version. -## If any confusion, please contact: -+ lixiaonan_xdu@outlook.com diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/load_data.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/load_data.py deleted file mode 100644 index fcba17db..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/load_data.py +++ /dev/null @@ -1,854 +0,0 @@ -from fastNLP.io import CSVLoader -from fastNLP import Vocabulary -from fastNLP import Const -import numpy as np -import fitlog -import pickle -import os -from fastNLP.embeddings import StaticEmbedding -from fastNLP import cache_results - - -@cache_results(_cache_fp='mtl16', _refresh=False) -def load_16_task(dict_path): - ''' - - :param dict_path: /remote-home/txsun/fnlp/MTL-LT/data - :return: - ''' - task_path = os.path.join(dict_path,'data.pkl') - embedding_path = os.path.join(dict_path,'word_embedding.npy') - - embedding = np.load(embedding_path).astype(np.float32) - - task_list = pickle.load(open(task_path, 'rb'))['task_lst'] - - for t in task_list: - t.train_set.rename_field('words_idx', 'words') - t.dev_set.rename_field('words_idx', 'words') - t.test_set.rename_field('words_idx', 'words') - - t.train_set.rename_field('label', 'target') - t.dev_set.rename_field('label', 'target') - t.test_set.rename_field('label', 'target') - - t.train_set.add_seq_len('words') - t.dev_set.add_seq_len('words') - t.test_set.add_seq_len('words') - - t.train_set.set_input(Const.INPUT, Const.INPUT_LEN) - t.dev_set.set_input(Const.INPUT, Const.INPUT_LEN) - t.test_set.set_input(Const.INPUT, Const.INPUT_LEN) - - return task_list,embedding - - -@cache_results(_cache_fp='SST2', _refresh=False) -def load_sst2(dict_path,embedding_path=None): - ''' - - :param dict_path: /remote-home/xnli/data/corpus/text_classification/SST-2/ - :param embedding_path: glove 300d txt - :return: - ''' - train_path = os.path.join(dict_path,'train.tsv') - dev_path = os.path.join(dict_path,'dev.tsv') - - loader = CSVLoader(headers=('words', 'target'), sep='\t') - train_data = loader.load(train_path).datasets['train'] - dev_data = loader.load(dev_path).datasets['train'] - - train_data.apply_field(lambda x: x.split(), field_name='words', new_field_name='words') - dev_data.apply_field(lambda x: x.split(), field_name='words', new_field_name='words') - - train_data.apply_field(lambda x: len(x), field_name='words', new_field_name='seq_len') - dev_data.apply_field(lambda x: len(x), field_name='words', new_field_name='seq_len') - - vocab = Vocabulary(min_freq=2) - vocab.from_dataset(train_data, field_name='words') - vocab.from_dataset(dev_data, field_name='words') - - # pretrained_embedding = load_word_emb(embedding_path, 300, vocab) - - label_vocab = Vocabulary(padding=None, unknown=None).from_dataset(train_data, field_name='target') - - label_vocab.index_dataset(train_data, field_name='target') - label_vocab.index_dataset(dev_data, field_name='target') - - vocab.index_dataset(train_data, field_name='words', new_field_name='words') - vocab.index_dataset(dev_data, field_name='words', new_field_name='words') - - train_data.set_input(Const.INPUT, Const.INPUT_LEN) - train_data.set_target(Const.TARGET) - - dev_data.set_input(Const.INPUT, Const.INPUT_LEN) - dev_data.set_target(Const.TARGET) - - if embedding_path is not None: - pretrained_embedding = load_word_emb(embedding_path, 300, vocab) - return (train_data,dev_data),(vocab,label_vocab),pretrained_embedding - - else: - return (train_data,dev_data),(vocab,label_vocab) - -@cache_results(_cache_fp='OntonotesPOS', _refresh=False) -def load_conllized_ontonote_POS(path,embedding_path=None): - from fastNLP.io.data_loader import ConllLoader - header2index = {'words':3,'POS':4,'NER':10} - headers = ['words','POS'] - - if 'NER' in headers: - print('警告!通过 load_conllized_ontonote 函数读出来的NER标签不是BIOS,是纯粹的conll格式,是错误的!') - indexes = list(map(lambda x:header2index[x],headers)) - - loader = ConllLoader(headers,indexes) - - bundle = loader.load(path) - - # print(bundle.datasets) - - train_set = bundle.datasets['train'] - dev_set = bundle.datasets['dev'] - test_set = bundle.datasets['test'] - - - - - # train_set = loader.load(os.path.join(path,'train.txt')) - # dev_set = loader.load(os.path.join(path, 'dev.txt')) - # test_set = loader.load(os.path.join(path, 'test.txt')) - - # print(len(train_set)) - - train_set.add_seq_len('words','seq_len') - dev_set.add_seq_len('words','seq_len') - test_set.add_seq_len('words','seq_len') - - - - # print(dataset['POS']) - - vocab = Vocabulary(min_freq=1) - vocab.from_dataset(train_set,field_name='words') - vocab.from_dataset(dev_set, field_name='words') - vocab.from_dataset(test_set, field_name='words') - - vocab.index_dataset(train_set,field_name='words') - vocab.index_dataset(dev_set, field_name='words') - vocab.index_dataset(test_set, field_name='words') - - - - - label_vocab_dict = {} - - for i,h in enumerate(headers): - if h == 'words': - continue - label_vocab_dict[h] = Vocabulary(min_freq=1,padding=None,unknown=None) - label_vocab_dict[h].from_dataset(train_set,field_name=h) - - label_vocab_dict[h].index_dataset(train_set,field_name=h) - label_vocab_dict[h].index_dataset(dev_set,field_name=h) - label_vocab_dict[h].index_dataset(test_set,field_name=h) - - train_set.set_input(Const.INPUT, Const.INPUT_LEN) - train_set.set_target(headers[1]) - - dev_set.set_input(Const.INPUT, Const.INPUT_LEN) - dev_set.set_target(headers[1]) - - test_set.set_input(Const.INPUT, Const.INPUT_LEN) - test_set.set_target(headers[1]) - - if len(headers) > 2: - print('警告:由于任务数量大于1,所以需要每次手动设置target!') - - - print('train:',len(train_set),'dev:',len(dev_set),'test:',len(test_set)) - - if embedding_path is not None: - pretrained_embedding = load_word_emb(embedding_path, 300, vocab) - return (train_set,dev_set,test_set),(vocab,label_vocab_dict),pretrained_embedding - else: - return (train_set, dev_set, test_set), (vocab, label_vocab_dict) - - -@cache_results(_cache_fp='OntonotesNER', _refresh=False) -def load_conllized_ontonote_NER(path,embedding_path=None): - from fastNLP.io.pipe.conll import OntoNotesNERPipe - ontoNotesNERPipe = OntoNotesNERPipe(lower=True,target_pad_val=-100) - bundle_NER = ontoNotesNERPipe.process_from_file(path) - - train_set_NER = bundle_NER.datasets['train'] - dev_set_NER = bundle_NER.datasets['dev'] - test_set_NER = bundle_NER.datasets['test'] - - train_set_NER.add_seq_len('words','seq_len') - dev_set_NER.add_seq_len('words','seq_len') - test_set_NER.add_seq_len('words','seq_len') - - - NER_vocab = bundle_NER.get_vocab('target') - word_vocab = bundle_NER.get_vocab('words') - - if embedding_path is not None: - - embed = StaticEmbedding(vocab=word_vocab, model_dir_or_name=embedding_path, word_dropout=0.01, - dropout=0.5,lower=True) - - - # pretrained_embedding = load_word_emb(embedding_path, 300, word_vocab) - return (train_set_NER,dev_set_NER,test_set_NER),\ - (word_vocab,NER_vocab),embed - else: - return (train_set_NER, dev_set_NER, test_set_NER), (NER_vocab, word_vocab) - -@cache_results(_cache_fp='OntonotesPOSNER', _refresh=False) - -def load_conllized_ontonote_NER_POS(path,embedding_path=None): - from fastNLP.io.pipe.conll import OntoNotesNERPipe - ontoNotesNERPipe = OntoNotesNERPipe(lower=True) - bundle_NER = ontoNotesNERPipe.process_from_file(path) - - train_set_NER = bundle_NER.datasets['train'] - dev_set_NER = bundle_NER.datasets['dev'] - test_set_NER = bundle_NER.datasets['test'] - - NER_vocab = bundle_NER.get_vocab('target') - word_vocab = bundle_NER.get_vocab('words') - - (train_set_POS,dev_set_POS,test_set_POS),(_,POS_vocab) = load_conllized_ontonote_POS(path) - POS_vocab = POS_vocab['POS'] - - train_set_NER.add_field('pos',train_set_POS['POS'],is_target=True) - dev_set_NER.add_field('pos', dev_set_POS['POS'], is_target=True) - test_set_NER.add_field('pos', test_set_POS['POS'], is_target=True) - - if train_set_NER.has_field('target'): - train_set_NER.rename_field('target','ner') - - if dev_set_NER.has_field('target'): - dev_set_NER.rename_field('target','ner') - - if test_set_NER.has_field('target'): - test_set_NER.rename_field('target','ner') - - - - if train_set_NER.has_field('pos'): - train_set_NER.rename_field('pos','posid') - if dev_set_NER.has_field('pos'): - dev_set_NER.rename_field('pos','posid') - if test_set_NER.has_field('pos'): - test_set_NER.rename_field('pos','posid') - - if train_set_NER.has_field('ner'): - train_set_NER.rename_field('ner','nerid') - if dev_set_NER.has_field('ner'): - dev_set_NER.rename_field('ner','nerid') - if test_set_NER.has_field('ner'): - test_set_NER.rename_field('ner','nerid') - - if embedding_path is not None: - - embed = StaticEmbedding(vocab=word_vocab, model_dir_or_name=embedding_path, word_dropout=0.01, - dropout=0.5,lower=True) - - return (train_set_NER,dev_set_NER,test_set_NER),\ - (word_vocab,POS_vocab,NER_vocab),embed - else: - return (train_set_NER, dev_set_NER, test_set_NER), (NER_vocab, word_vocab) - -@cache_results(_cache_fp='Ontonotes3', _refresh=True) -def load_conllized_ontonote_pkl(path,embedding_path=None): - - data_bundle = pickle.load(open(path,'rb')) - train_set = data_bundle.datasets['train'] - dev_set = data_bundle.datasets['dev'] - test_set = data_bundle.datasets['test'] - - train_set.rename_field('pos','posid') - train_set.rename_field('ner','nerid') - train_set.rename_field('chunk','chunkid') - - dev_set.rename_field('pos','posid') - dev_set.rename_field('ner','nerid') - dev_set.rename_field('chunk','chunkid') - - test_set.rename_field('pos','posid') - test_set.rename_field('ner','nerid') - test_set.rename_field('chunk','chunkid') - - - word_vocab = data_bundle.vocabs['words'] - pos_vocab = data_bundle.vocabs['pos'] - ner_vocab = data_bundle.vocabs['ner'] - chunk_vocab = data_bundle.vocabs['chunk'] - - - if embedding_path is not None: - - embed = StaticEmbedding(vocab=word_vocab, model_dir_or_name=embedding_path, word_dropout=0.01, - dropout=0.5,lower=True) - - return (train_set,dev_set,test_set),\ - (word_vocab,pos_vocab,ner_vocab,chunk_vocab),embed - else: - return (train_set, dev_set, test_set), (word_vocab,ner_vocab) - # print(data_bundle) - - - - - - - - - - -# @cache_results(_cache_fp='Conll2003', _refresh=False) -# def load_conll_2003(path,embedding_path=None): -# f = open(path, 'rb') -# data_pkl = pickle.load(f) -# -# task_lst = data_pkl['task_lst'] -# vocabs = data_pkl['vocabs'] -# # word_vocab = vocabs['words'] -# # pos_vocab = vocabs['pos'] -# # chunk_vocab = vocabs['chunk'] -# # ner_vocab = vocabs['ner'] -# -# if embedding_path is not None: -# embed = StaticEmbedding(vocab=vocabs['words'], model_dir_or_name=embedding_path, word_dropout=0.01, -# dropout=0.5) -# return task_lst,vocabs,embed -# else: -# return task_lst,vocabs - -# @cache_results(_cache_fp='Conll2003_mine', _refresh=False) -@cache_results(_cache_fp='Conll2003_mine_embed_100', _refresh=True) -def load_conll_2003_mine(path,embedding_path=None,pad_val=-100): - f = open(path, 'rb') - - data_pkl = pickle.load(f) - # print(data_pkl) - # print(data_pkl) - train_set = data_pkl[0]['train'] - dev_set = data_pkl[0]['dev'] - test_set = data_pkl[0]['test'] - - train_set.set_pad_val('posid',pad_val) - train_set.set_pad_val('nerid', pad_val) - train_set.set_pad_val('chunkid', pad_val) - - dev_set.set_pad_val('posid',pad_val) - dev_set.set_pad_val('nerid', pad_val) - dev_set.set_pad_val('chunkid', pad_val) - - test_set.set_pad_val('posid',pad_val) - test_set.set_pad_val('nerid', pad_val) - test_set.set_pad_val('chunkid', pad_val) - - if train_set.has_field('task_id'): - - train_set.delete_field('task_id') - - if dev_set.has_field('task_id'): - dev_set.delete_field('task_id') - - if test_set.has_field('task_id'): - test_set.delete_field('task_id') - - if train_set.has_field('words_idx'): - train_set.rename_field('words_idx','words') - - if dev_set.has_field('words_idx'): - dev_set.rename_field('words_idx','words') - - if test_set.has_field('words_idx'): - test_set.rename_field('words_idx','words') - - - - word_vocab = data_pkl[1]['words'] - pos_vocab = data_pkl[1]['pos'] - ner_vocab = data_pkl[1]['ner'] - chunk_vocab = data_pkl[1]['chunk'] - - if embedding_path is not None: - embed = StaticEmbedding(vocab=word_vocab, model_dir_or_name=embedding_path, word_dropout=0.01, - dropout=0.5,lower=True) - return (train_set,dev_set,test_set),(word_vocab,pos_vocab,ner_vocab,chunk_vocab),embed - else: - return (train_set,dev_set,test_set),(word_vocab,pos_vocab,ner_vocab,chunk_vocab) - - -def load_conllized_ontonote_pkl_yf(path): - def init_task(task): - task_name = task.task_name - for ds in [task.train_set, task.dev_set, task.test_set]: - if ds.has_field('words'): - ds.rename_field('words', 'x') - else: - ds.rename_field('words_idx', 'x') - if ds.has_field('label'): - ds.rename_field('label', 'y') - else: - ds.rename_field(task_name, 'y') - ds.set_input('x', 'y', 'task_id') - ds.set_target('y') - - if task_name in ['ner', 'chunk'] or 'pos' in task_name: - ds.set_input('seq_len') - ds.set_target('seq_len') - return task - #/remote-home/yfshao/workdir/datasets/conll03/data.pkl - def pload(fn): - with open(fn, 'rb') as f: - return pickle.load(f) - - DB = pload(path) - task_lst = DB['task_lst'] - vocabs = DB['vocabs'] - task_lst = [init_task(task) for task in task_lst] - - return task_lst, vocabs - - -@cache_results(_cache_fp='weiboNER old uni+bi', _refresh=False) -def load_weibo_ner_old(path,unigram_embedding_path=None,bigram_embedding_path=None,index_token=True, - normlize={'char':True,'bigram':True,'word':False}): - from fastNLP.io.data_loader import ConllLoader - from utils import get_bigrams - - loader = ConllLoader(['chars','target']) - # from fastNLP.io.file_reader import _read_conll - # from fastNLP.core import Instance,DataSet - # def _load(path): - # ds = DataSet() - # for idx, data in _read_conll(path, indexes=loader.indexes, dropna=loader.dropna, - # encoding='ISO-8859-1'): - # ins = {h: data[i] for i, h in enumerate(loader.headers)} - # ds.append(Instance(**ins)) - # return ds - # from fastNLP.io.utils import check_loader_paths - # paths = check_loader_paths(path) - # datasets = {name: _load(path) for name, path in paths.items()} - datasets = {} - train_path = os.path.join(path,'train.all.bmes') - dev_path = os.path.join(path,'dev.all.bmes') - test_path = os.path.join(path,'test.all.bmes') - datasets['train'] = loader.load(train_path).datasets['train'] - datasets['dev'] = loader.load(dev_path).datasets['train'] - datasets['test'] = loader.load(test_path).datasets['train'] - - for k,v in datasets.items(): - print('{}:{}'.format(k,len(v))) - - vocabs = {} - word_vocab = Vocabulary() - bigram_vocab = Vocabulary() - label_vocab = Vocabulary(padding=None,unknown=None) - - for k,v in datasets.items(): - # ignore the word segmentation tag - v.apply_field(lambda x: [w[0] for w in x],'chars','chars') - v.apply_field(get_bigrams,'chars','bigrams') - - - word_vocab.from_dataset(datasets['train'],field_name='chars',no_create_entry_dataset=[datasets['dev'],datasets['test']]) - label_vocab.from_dataset(datasets['train'],field_name='target') - print('label_vocab:{}\n{}'.format(len(label_vocab),label_vocab.idx2word)) - - - for k,v in datasets.items(): - # v.set_pad_val('target',-100) - v.add_seq_len('chars',new_field_name='seq_len') - - - vocabs['char'] = word_vocab - vocabs['label'] = label_vocab - - - bigram_vocab.from_dataset(datasets['train'],field_name='bigrams',no_create_entry_dataset=[datasets['dev'],datasets['test']]) - if index_token: - word_vocab.index_dataset(*list(datasets.values()), field_name='raw_words', new_field_name='words') - bigram_vocab.index_dataset(*list(datasets.values()),field_name='raw_bigrams',new_field_name='bigrams') - label_vocab.index_dataset(*list(datasets.values()), field_name='raw_target', new_field_name='target') - - # for k,v in datasets.items(): - # v.set_input('chars','bigrams','seq_len','target') - # v.set_target('target','seq_len') - - vocabs['bigram'] = bigram_vocab - - embeddings = {} - - if unigram_embedding_path is not None: - unigram_embedding = StaticEmbedding(word_vocab, model_dir_or_name=unigram_embedding_path, - word_dropout=0.01,normalize=normlize['char']) - embeddings['char'] = unigram_embedding - - if bigram_embedding_path is not None: - bigram_embedding = StaticEmbedding(bigram_vocab, model_dir_or_name=bigram_embedding_path, - word_dropout=0.01,normalize=normlize['bigram']) - embeddings['bigram'] = bigram_embedding - - return datasets, vocabs, embeddings - - -@cache_results(_cache_fp='weiboNER uni+bi', _refresh=False) -def load_weibo_ner(path,unigram_embedding_path=None,bigram_embedding_path=None,index_token=True, - normlize={'char':True,'bigram':True,'word':False}): - from fastNLP.io.loader import ConllLoader - from utils import get_bigrams - - loader = ConllLoader(['chars','target']) - bundle = loader.load(path) - - datasets = bundle.datasets - for k,v in datasets.items(): - print('{}:{}'.format(k,len(v))) - # print(*list(datasets.keys())) - vocabs = {} - word_vocab = Vocabulary() - bigram_vocab = Vocabulary() - label_vocab = Vocabulary(padding=None,unknown=None) - - for k,v in datasets.items(): - # ignore the word segmentation tag - v.apply_field(lambda x: [w[0] for w in x],'chars','chars') - v.apply_field(get_bigrams,'chars','bigrams') - - - word_vocab.from_dataset(datasets['train'],field_name='chars',no_create_entry_dataset=[datasets['dev'],datasets['test']]) - label_vocab.from_dataset(datasets['train'],field_name='target') - print('label_vocab:{}\n{}'.format(len(label_vocab),label_vocab.idx2word)) - - - for k,v in datasets.items(): - # v.set_pad_val('target',-100) - v.add_seq_len('chars',new_field_name='seq_len') - - - vocabs['char'] = word_vocab - vocabs['label'] = label_vocab - - - bigram_vocab.from_dataset(datasets['train'],field_name='bigrams',no_create_entry_dataset=[datasets['dev'],datasets['test']]) - if index_token: - word_vocab.index_dataset(*list(datasets.values()), field_name='raw_words', new_field_name='words') - bigram_vocab.index_dataset(*list(datasets.values()),field_name='raw_bigrams',new_field_name='bigrams') - label_vocab.index_dataset(*list(datasets.values()), field_name='raw_target', new_field_name='target') - - # for k,v in datasets.items(): - # v.set_input('chars','bigrams','seq_len','target') - # v.set_target('target','seq_len') - - vocabs['bigram'] = bigram_vocab - - embeddings = {} - - if unigram_embedding_path is not None: - unigram_embedding = StaticEmbedding(word_vocab, model_dir_or_name=unigram_embedding_path, - word_dropout=0.01,normalize=normlize['char']) - embeddings['char'] = unigram_embedding - - if bigram_embedding_path is not None: - bigram_embedding = StaticEmbedding(bigram_vocab, model_dir_or_name=bigram_embedding_path, - word_dropout=0.01,normalize=normlize['bigram']) - embeddings['bigram'] = bigram_embedding - - return datasets, vocabs, embeddings - - - -# datasets,vocabs = load_weibo_ner('/remote-home/xnli/data/corpus/sequence_labelling/ner_weibo') -# -# print(datasets['train'][:5]) -# print(vocabs['word'].idx2word) -# print(vocabs['target'].idx2word) - - -@cache_results(_cache_fp='cache/ontonotes4ner',_refresh=False) -def load_ontonotes4ner(path,char_embedding_path=None,bigram_embedding_path=None,index_token=True, - normalize={'char':True,'bigram':True,'word':False}): - from fastNLP.io.loader import ConllLoader - from utils import get_bigrams - - train_path = os.path.join(path,'train.char.bmes') - dev_path = os.path.join(path,'dev.char.bmes') - test_path = os.path.join(path,'test.char.bmes') - - loader = ConllLoader(['chars','target']) - train_bundle = loader.load(train_path) - dev_bundle = loader.load(dev_path) - test_bundle = loader.load(test_path) - - - datasets = dict() - datasets['train'] = train_bundle.datasets['train'] - datasets['dev'] = dev_bundle.datasets['train'] - datasets['test'] = test_bundle.datasets['train'] - - - datasets['train'].apply_field(get_bigrams,field_name='chars',new_field_name='bigrams') - datasets['dev'].apply_field(get_bigrams, field_name='chars', new_field_name='bigrams') - datasets['test'].apply_field(get_bigrams, field_name='chars', new_field_name='bigrams') - - datasets['train'].add_seq_len('chars') - datasets['dev'].add_seq_len('chars') - datasets['test'].add_seq_len('chars') - - - - char_vocab = Vocabulary() - bigram_vocab = Vocabulary() - label_vocab = Vocabulary(padding=None,unknown=None) - print(datasets.keys()) - print(len(datasets['dev'])) - print(len(datasets['test'])) - print(len(datasets['train'])) - char_vocab.from_dataset(datasets['train'],field_name='chars', - no_create_entry_dataset=[datasets['dev'],datasets['test']] ) - bigram_vocab.from_dataset(datasets['train'],field_name='bigrams', - no_create_entry_dataset=[datasets['dev'],datasets['test']]) - label_vocab.from_dataset(datasets['train'],field_name='target') - if index_token: - char_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='chars',new_field_name='chars') - bigram_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='bigrams',new_field_name='bigrams') - label_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='target',new_field_name='target') - - vocabs = {} - vocabs['char'] = char_vocab - vocabs['label'] = label_vocab - vocabs['bigram'] = bigram_vocab - vocabs['label'] = label_vocab - - embeddings = {} - if char_embedding_path is not None: - char_embedding = StaticEmbedding(char_vocab,char_embedding_path,word_dropout=0.01, - normalize=normalize['char']) - embeddings['char'] = char_embedding - - if bigram_embedding_path is not None: - bigram_embedding = StaticEmbedding(bigram_vocab,bigram_embedding_path,word_dropout=0.01, - normalize=normalize['bigram']) - embeddings['bigram'] = bigram_embedding - - return datasets,vocabs,embeddings - - - -@cache_results(_cache_fp='cache/resume_ner',_refresh=False) -def load_resume_ner(path,char_embedding_path=None,bigram_embedding_path=None,index_token=True, - normalize={'char':True,'bigram':True,'word':False}): - from fastNLP.io.data_loader import ConllLoader - from utils import get_bigrams - - train_path = os.path.join(path,'train.char.bmes') - dev_path = os.path.join(path,'dev.char.bmes') - test_path = os.path.join(path,'test.char.bmes') - - loader = ConllLoader(['chars','target']) - train_bundle = loader.load(train_path) - dev_bundle = loader.load(dev_path) - test_bundle = loader.load(test_path) - - - datasets = dict() - datasets['train'] = train_bundle.datasets['train'] - datasets['dev'] = dev_bundle.datasets['train'] - datasets['test'] = test_bundle.datasets['train'] - - - datasets['train'].apply_field(get_bigrams,field_name='chars',new_field_name='bigrams') - datasets['dev'].apply_field(get_bigrams, field_name='chars', new_field_name='bigrams') - datasets['test'].apply_field(get_bigrams, field_name='chars', new_field_name='bigrams') - - datasets['train'].add_seq_len('chars') - datasets['dev'].add_seq_len('chars') - datasets['test'].add_seq_len('chars') - - - - char_vocab = Vocabulary() - bigram_vocab = Vocabulary() - label_vocab = Vocabulary(padding=None,unknown=None) - print(datasets.keys()) - print(len(datasets['dev'])) - print(len(datasets['test'])) - print(len(datasets['train'])) - char_vocab.from_dataset(datasets['train'],field_name='chars', - no_create_entry_dataset=[datasets['dev'],datasets['test']] ) - bigram_vocab.from_dataset(datasets['train'],field_name='bigrams', - no_create_entry_dataset=[datasets['dev'],datasets['test']]) - label_vocab.from_dataset(datasets['train'],field_name='target') - if index_token: - char_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='chars',new_field_name='chars') - bigram_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='bigrams',new_field_name='bigrams') - label_vocab.index_dataset(datasets['train'],datasets['dev'],datasets['test'], - field_name='target',new_field_name='target') - - vocabs = {} - vocabs['char'] = char_vocab - vocabs['label'] = label_vocab - vocabs['bigram'] = bigram_vocab - - embeddings = {} - if char_embedding_path is not None: - char_embedding = StaticEmbedding(char_vocab,char_embedding_path,word_dropout=0.01,normalize=normalize['char']) - embeddings['char'] = char_embedding - - if bigram_embedding_path is not None: - bigram_embedding = StaticEmbedding(bigram_vocab,bigram_embedding_path,word_dropout=0.01,normalize=normalize['bigram']) - embeddings['bigram'] = bigram_embedding - - return datasets,vocabs,embeddings - - -@cache_results(_cache_fp='need_to_defined_fp',_refresh=False) -def equip_chinese_ner_with_skip(datasets,vocabs,embeddings,w_list,word_embedding_path=None, - normalize={'char':True,'bigram':True,'word':False}): - from utils_ import Trie,get_skip_path - from functools import partial - w_trie = Trie() - for w in w_list: - w_trie.insert(w) - - # for k,v in datasets.items(): - # v.apply_field(partial(get_skip_path,w_trie=w_trie),'chars','skips') - - def skips2skips_l2r(chars,w_trie): - ''' - - :param lexicons: list[[int,int,str]] - :return: skips_l2r - ''' - # print(lexicons) - # print('******') - - lexicons = get_skip_path(chars,w_trie=w_trie) - - - # max_len = max(list(map(lambda x:max(x[:2]),lexicons)))+1 if len(lexicons) != 0 else 0 - - result = [[] for _ in range(len(chars))] - - for lex in lexicons: - s = lex[0] - e = lex[1] - w = lex[2] - - result[e].append([s,w]) - - return result - - def skips2skips_r2l(chars,w_trie): - ''' - - :param lexicons: list[[int,int,str]] - :return: skips_l2r - ''' - # print(lexicons) - # print('******') - - lexicons = get_skip_path(chars,w_trie=w_trie) - - - # max_len = max(list(map(lambda x:max(x[:2]),lexicons)))+1 if len(lexicons) != 0 else 0 - - result = [[] for _ in range(len(chars))] - - for lex in lexicons: - s = lex[0] - e = lex[1] - w = lex[2] - - result[s].append([e,w]) - - return result - - for k,v in datasets.items(): - v.apply_field(partial(skips2skips_l2r,w_trie=w_trie),'chars','skips_l2r') - - for k,v in datasets.items(): - v.apply_field(partial(skips2skips_r2l,w_trie=w_trie),'chars','skips_r2l') - - # print(v['skips_l2r'][0]) - word_vocab = Vocabulary() - word_vocab.add_word_lst(w_list) - vocabs['word'] = word_vocab - for k,v in datasets.items(): - v.apply_field(lambda x:[ list(map(lambda x:x[0],p)) for p in x],'skips_l2r','skips_l2r_source') - v.apply_field(lambda x:[ list(map(lambda x:x[1],p)) for p in x], 'skips_l2r', 'skips_l2r_word') - - for k,v in datasets.items(): - v.apply_field(lambda x:[ list(map(lambda x:x[0],p)) for p in x],'skips_r2l','skips_r2l_source') - v.apply_field(lambda x:[ list(map(lambda x:x[1],p)) for p in x], 'skips_r2l', 'skips_r2l_word') - - for k,v in datasets.items(): - v.apply_field(lambda x:list(map(len,x)), 'skips_l2r_word', 'lexicon_count') - v.apply_field(lambda x: - list(map(lambda y: - list(map(lambda z:word_vocab.to_index(z),y)),x)), - 'skips_l2r_word',new_field_name='skips_l2r_word') - - v.apply_field(lambda x:list(map(len,x)), 'skips_r2l_word', 'lexicon_count_back') - - v.apply_field(lambda x: - list(map(lambda y: - list(map(lambda z:word_vocab.to_index(z),y)),x)), - 'skips_r2l_word',new_field_name='skips_r2l_word') - - - - - - if word_embedding_path is not None: - word_embedding = StaticEmbedding(word_vocab,word_embedding_path,word_dropout=0,normalize=normalize['word']) - embeddings['word'] = word_embedding - - vocabs['char'].index_dataset(datasets['train'], datasets['dev'], datasets['test'], - field_name='chars', new_field_name='chars') - vocabs['bigram'].index_dataset(datasets['train'], datasets['dev'], datasets['test'], - field_name='bigrams', new_field_name='bigrams') - vocabs['label'].index_dataset(datasets['train'], datasets['dev'], datasets['test'], - field_name='target', new_field_name='target') - - return datasets,vocabs,embeddings - - - -@cache_results(_cache_fp='cache/load_yangjie_rich_pretrain_word_list',_refresh=False) -def load_yangjie_rich_pretrain_word_list(embedding_path,drop_characters=True): - f = open(embedding_path,'r') - lines = f.readlines() - w_list = [] - for line in lines: - splited = line.strip().split(' ') - w = splited[0] - w_list.append(w) - - if drop_characters: - w_list = list(filter(lambda x:len(x) != 1, w_list)) - - return w_list - - - -# from pathes import * -# -# datasets,vocabs,embeddings = load_ontonotes4ner(ontonote4ner_cn_path, -# yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path) -# print(datasets.keys()) -# print(vocabs.keys()) -# print(embeddings) -# yangjie_rich_pretrain_word_path -# datasets['train'].set_pad_val \ No newline at end of file diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/main.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/main.py deleted file mode 100644 index a2df5a91..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/main.py +++ /dev/null @@ -1,205 +0,0 @@ -import torch.nn as nn -# print(1111111111) -# from pathes import * -from load_data import load_ontonotes4ner,equip_chinese_ner_with_skip,load_yangjie_rich_pretrain_word_list,\ - load_resume_ner,load_weibo_ner,load_weibo_ner_old -from fastNLP.embeddings import StaticEmbedding -from models import LatticeLSTM_SeqLabel,LSTM_SeqLabel,LatticeLSTM_SeqLabel_V1 -from fastNLP import CrossEntropyLoss,SpanFPreRecMetric,Trainer,AccuracyMetric,LossInForward -import torch.optim as optim -import argparse -import torch -import sys -from utils_ import LatticeLexiconPadder,SpanFPreRecMetric_YJ -from fastNLP import Tester -import fitlog -from fastNLP.core.callback import FitlogCallback -from utils import set_seed -import os -from fastNLP import LRScheduler -from torch.optim.lr_scheduler import LambdaLR - -parser = argparse.ArgumentParser() -parser.add_argument('--device',default='cuda:1') -parser.add_argument('--debug',default=False) - -parser.add_argument('--norm_embed',default=False) -parser.add_argument('--batch',default=1) -parser.add_argument('--test_batch',default=1024) -parser.add_argument('--optim',default='sgd',help='adam|sgd') -parser.add_argument('--lr',default=0.045) -parser.add_argument('--model',default='lattice',help='lattice|lstm') -parser.add_argument('--skip_before_head',default=False)#in paper it's false -parser.add_argument('--hidden',default=113) -parser.add_argument('--momentum',default=0) -parser.add_argument('--bi',default=True) -parser.add_argument('--dataset',default='weibo',help='resume|ontonote|weibo|msra') -parser.add_argument('--use_bigram',default=True) - -parser.add_argument('--embed_dropout',default=0.5) -parser.add_argument('--gaz_dropout',default=-1) -parser.add_argument('--output_dropout',default=0.5) -parser.add_argument('--epoch',default=100) -parser.add_argument('--seed',default=100) - -args = parser.parse_args() - -set_seed(args.seed) - -fit_msg_list = [args.model,'bi' if args.bi else 'uni',str(args.batch)] -if args.model == 'lattice': - fit_msg_list.append(str(args.skip_before_head)) -fit_msg = ' '.join(fit_msg_list) -fitlog.commit(__file__,fit_msg=fit_msg) - -device = torch.device(args.device) -for k,v in args.__dict__.items(): - print(k,v) - -refresh_data = False - - -from pathes import * -# ontonote4ner_cn_path = 0 -# yangjie_rich_pretrain_unigram_path = 0 -# yangjie_rich_pretrain_bigram_path = 0 -# resume_ner_path = 0 -# weibo_ner_path = 0 - -if args.dataset == 'ontonote': - datasets,vocabs,embeddings = load_ontonotes4ner(ontonote4ner_cn_path,yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path, - _refresh=refresh_data,index_token=False, - ) -elif args.dataset == 'resume': - datasets,vocabs,embeddings = load_resume_ner(resume_ner_path,yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path, - _refresh=refresh_data,index_token=False, - ) -elif args.dataset == 'weibo': - datasets,vocabs,embeddings = load_weibo_ner(weibo_ner_path,yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path, - _refresh=refresh_data,index_token=False, - ) - -elif args.dataset == 'weibo_old': - datasets,vocabs,embeddings = load_weibo_ner_old(weibo_ner_old_path,yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path, - _refresh=refresh_data,index_token=False, - ) -if args.dataset == 'ontonote': - args.batch = 10 - args.lr = 0.045 -elif args.dataset == 'resume': - args.batch = 1 - args.lr = 0.015 -elif args.dataset == 'weibo': - args.batch = 10 - args.gaz_dropout = 0.1 - args.embed_dropout = 0.1 - args.output_dropout = 0.1 -elif args.dataset == 'weibo_old': - args.embed_dropout = 0.1 - args.output_dropout = 0.1 - -if args.gaz_dropout < 0: - args.gaz_dropout = args.embed_dropout - -fitlog.add_hyper(args) -w_list = load_yangjie_rich_pretrain_word_list(yangjie_rich_pretrain_word_path, - _refresh=refresh_data) - -cache_name = os.path.join('cache',args.dataset+'_lattice') -datasets,vocabs,embeddings = equip_chinese_ner_with_skip(datasets,vocabs,embeddings,w_list,yangjie_rich_pretrain_word_path, - _refresh=refresh_data,_cache_fp=cache_name) - -print(datasets['train'][0]) -print('vocab info:') -for k,v in vocabs.items(): - print('{}:{}'.format(k,len(v))) - -for k,v in datasets.items(): - if args.model == 'lattice': - v.set_ignore_type('skips_l2r_word','skips_l2r_source','skips_r2l_word', 'skips_r2l_source') - if args.skip_before_head: - v.set_padder('skips_l2r_word',LatticeLexiconPadder()) - v.set_padder('skips_l2r_source',LatticeLexiconPadder()) - v.set_padder('skips_r2l_word',LatticeLexiconPadder()) - v.set_padder('skips_r2l_source',LatticeLexiconPadder(pad_val_dynamic=True)) - else: - v.set_padder('skips_l2r_word',LatticeLexiconPadder()) - v.set_padder('skips_r2l_word', LatticeLexiconPadder()) - v.set_padder('skips_l2r_source', LatticeLexiconPadder(-1)) - v.set_padder('skips_r2l_source', LatticeLexiconPadder(pad_val_dynamic=True,dynamic_offset=1)) - if args.bi: - v.set_input('chars','bigrams','seq_len', - 'skips_l2r_word','skips_l2r_source','lexicon_count', - 'skips_r2l_word', 'skips_r2l_source','lexicon_count_back', - 'target', - use_1st_ins_infer_dim_type=True) - else: - v.set_input('chars','bigrams','seq_len', - 'skips_l2r_word','skips_l2r_source','lexicon_count', - 'target', - use_1st_ins_infer_dim_type=True) - v.set_target('target','seq_len') - - v['target'].set_pad_val(0) - elif args.model == 'lstm': - v.set_ignore_type('skips_l2r_word','skips_l2r_source') - v.set_padder('skips_l2r_word',LatticeLexiconPadder()) - v.set_padder('skips_l2r_source',LatticeLexiconPadder()) - v.set_input('chars','bigrams','seq_len','target', - use_1st_ins_infer_dim_type=True) - v.set_target('target','seq_len') - - v['target'].set_pad_val(0) - -print(datasets['dev']['skips_l2r_word'][100]) - - -if args.model =='lattice': - model = LatticeLSTM_SeqLabel_V1(embeddings['char'],embeddings['bigram'],embeddings['word'], - hidden_size=args.hidden,label_size=len(vocabs['label']),device=args.device, - embed_dropout=args.embed_dropout,output_dropout=args.output_dropout, - skip_batch_first=True,bidirectional=args.bi,debug=args.debug, - skip_before_head=args.skip_before_head,use_bigram=args.use_bigram, - gaz_dropout=args.gaz_dropout - ) -elif args.model == 'lstm': - model = LSTM_SeqLabel(embeddings['char'],embeddings['bigram'],embeddings['word'], - hidden_size=args.hidden,label_size=len(vocabs['label']),device=args.device, - bidirectional=args.bi, - embed_dropout=args.embed_dropout,output_dropout=args.output_dropout, - use_bigram=args.use_bigram) - - -loss = LossInForward() -encoding_type = 'bmeso' -if args.dataset == 'weibo': - encoding_type = 'bio' -f1_metric = SpanFPreRecMetric(vocabs['label'],pred='pred',target='target',seq_len='seq_len',encoding_type=encoding_type) -acc_metric = AccuracyMetric(pred='pred',target='target',seq_len='seq_len') -metrics = [f1_metric,acc_metric] - -if args.optim == 'adam': - optimizer = optim.Adam(model.parameters(),lr=args.lr) -elif args.optim == 'sgd': - optimizer = optim.SGD(model.parameters(),lr=args.lr,momentum=args.momentum) - - - - -callbacks = [ - FitlogCallback({'test':datasets['test'],'train':datasets['train']}), - LRScheduler(lr_scheduler=LambdaLR(optimizer, lambda ep: 1 / (1 + 0.03)**ep)) -] -print('label_vocab:{}\n{}'.format(len(vocabs['label']),vocabs['label'].idx2word)) -trainer = Trainer(datasets['train'],model, - optimizer=optimizer, - loss=loss, - metrics=metrics, - dev_data=datasets['dev'], - device=device, - batch_size=args.batch, - n_epochs=args.epoch, - dev_batch_size=args.test_batch, - callbacks=callbacks) - -trainer.train() \ No newline at end of file diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/models.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/models.py deleted file mode 100644 index 0b419015..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/models.py +++ /dev/null @@ -1,310 +0,0 @@ -import torch.nn as nn -from fastNLP.embeddings import StaticEmbedding -from fastNLP.modules import LSTM, ConditionalRandomField -import torch -from fastNLP import seq_len_to_mask -from utils import better_init_rnn,print_info - - -class LatticeLSTM_SeqLabel(nn.Module): - def __init__(self, char_embed, bigram_embed, word_embed, hidden_size, label_size, bias=True, bidirectional=False, - device=None, embed_dropout=0, output_dropout=0, skip_batch_first=True,debug=False, - skip_before_head=False,use_bigram=True,vocabs=None): - if device is None: - self.device = torch.device('cpu') - else: - self.device = torch.device(device) - from modules import LatticeLSTMLayer_sup_back_V0 - super().__init__() - self.debug = debug - self.skip_batch_first = skip_batch_first - self.char_embed_size = char_embed.embedding.weight.size(1) - self.bigram_embed_size = bigram_embed.embedding.weight.size(1) - self.word_embed_size = word_embed.embedding.weight.size(1) - self.hidden_size = hidden_size - self.label_size = label_size - self.bidirectional = bidirectional - self.use_bigram = use_bigram - self.vocabs = vocabs - - if self.use_bigram: - self.input_size = self.char_embed_size + self.bigram_embed_size - else: - self.input_size = self.char_embed_size - - self.char_embed = char_embed - self.bigram_embed = bigram_embed - self.word_embed = word_embed - self.encoder = LatticeLSTMLayer_sup_back_V0(self.input_size,self.word_embed_size, - self.hidden_size, - left2right=True, - bias=bias, - device=self.device, - debug=self.debug, - skip_before_head=skip_before_head) - if self.bidirectional: - self.encoder_back = LatticeLSTMLayer_sup_back_V0(self.input_size, - self.word_embed_size, self.hidden_size, - left2right=False, - bias=bias, - device=self.device, - debug=self.debug, - skip_before_head=skip_before_head) - - self.output = nn.Linear(self.hidden_size * (2 if self.bidirectional else 1), self.label_size) - self.crf = ConditionalRandomField(label_size, True) - - self.crf.trans_m = nn.Parameter(torch.zeros(size=[label_size, label_size],requires_grad=True)) - if self.crf.include_start_end_trans: - self.crf.start_scores = nn.Parameter(torch.zeros(size=[label_size],requires_grad=True)) - self.crf.end_scores = nn.Parameter(torch.zeros(size=[label_size],requires_grad=True)) - - self.loss_func = nn.CrossEntropyLoss() - self.embed_dropout = nn.Dropout(embed_dropout) - self.output_dropout = nn.Dropout(output_dropout) - - def forward(self, chars, bigrams, seq_len, target, - skips_l2r_source, skips_l2r_word, lexicon_count, - skips_r2l_source=None, skips_r2l_word=None, lexicon_count_back=None): - # print('skips_l2r_word_id:{}'.format(skips_l2r_word.size())) - batch = chars.size(0) - max_seq_len = chars.size(1) - # max_lexicon_count = skips_l2r_word.size(2) - - - embed_char = self.char_embed(chars) - if self.use_bigram: - - embed_bigram = self.bigram_embed(bigrams) - - embedding = torch.cat([embed_char, embed_bigram], dim=-1) - else: - - embedding = embed_char - - - embed_nonword = self.embed_dropout(embedding) - - # skips_l2r_word = torch.reshape(skips_l2r_word,shape=[batch,-1]) - embed_word = self.word_embed(skips_l2r_word) - embed_word = self.embed_dropout(embed_word) - # embed_word = torch.reshape(embed_word,shape=[batch,max_seq_len,max_lexicon_count,-1]) - - - encoded_h, encoded_c = self.encoder(embed_nonword, seq_len, skips_l2r_source, embed_word, lexicon_count) - - if self.bidirectional: - embed_word_back = self.word_embed(skips_r2l_word) - embed_word_back = self.embed_dropout(embed_word_back) - encoded_h_back, encoded_c_back = self.encoder_back(embed_nonword, seq_len, skips_r2l_source, - embed_word_back, lexicon_count_back) - encoded_h = torch.cat([encoded_h, encoded_h_back], dim=-1) - - encoded_h = self.output_dropout(encoded_h) - - pred = self.output(encoded_h) - - mask = seq_len_to_mask(seq_len) - - if self.training: - loss = self.crf(pred, target, mask) - return {'loss': loss} - else: - pred, path = self.crf.viterbi_decode(pred, mask) - return {'pred': pred} - - # batch_size, sent_len = pred.shape[0], pred.shape[1] - # loss = self.loss_func(pred.reshape(batch_size * sent_len, -1), target.reshape(batch_size * sent_len)) - # return {'pred':pred,'loss':loss} - -class LatticeLSTM_SeqLabel_V1(nn.Module): - def __init__(self, char_embed, bigram_embed, word_embed, hidden_size, label_size, bias=True, bidirectional=False, - device=None, embed_dropout=0, output_dropout=0, skip_batch_first=True,debug=False, - skip_before_head=False,use_bigram=True,vocabs=None,gaz_dropout=0): - if device is None: - self.device = torch.device('cpu') - else: - self.device = torch.device(device) - from modules import LatticeLSTMLayer_sup_back_V1 - super().__init__() - self.count = 0 - self.debug = debug - self.skip_batch_first = skip_batch_first - self.char_embed_size = char_embed.embedding.weight.size(1) - self.bigram_embed_size = bigram_embed.embedding.weight.size(1) - self.word_embed_size = word_embed.embedding.weight.size(1) - self.hidden_size = hidden_size - self.label_size = label_size - self.bidirectional = bidirectional - self.use_bigram = use_bigram - self.vocabs = vocabs - - if self.use_bigram: - self.input_size = self.char_embed_size + self.bigram_embed_size - else: - self.input_size = self.char_embed_size - - self.char_embed = char_embed - self.bigram_embed = bigram_embed - self.word_embed = word_embed - self.encoder = LatticeLSTMLayer_sup_back_V1(self.input_size,self.word_embed_size, - self.hidden_size, - left2right=True, - bias=bias, - device=self.device, - debug=self.debug, - skip_before_head=skip_before_head) - if self.bidirectional: - self.encoder_back = LatticeLSTMLayer_sup_back_V1(self.input_size, - self.word_embed_size, self.hidden_size, - left2right=False, - bias=bias, - device=self.device, - debug=self.debug, - skip_before_head=skip_before_head) - - self.output = nn.Linear(self.hidden_size * (2 if self.bidirectional else 1), self.label_size) - self.crf = ConditionalRandomField(label_size, True) - - self.crf.trans_m = nn.Parameter(torch.zeros(size=[label_size, label_size],requires_grad=True)) - if self.crf.include_start_end_trans: - self.crf.start_scores = nn.Parameter(torch.zeros(size=[label_size],requires_grad=True)) - self.crf.end_scores = nn.Parameter(torch.zeros(size=[label_size],requires_grad=True)) - - self.loss_func = nn.CrossEntropyLoss() - self.embed_dropout = nn.Dropout(embed_dropout) - self.gaz_dropout = nn.Dropout(gaz_dropout) - self.output_dropout = nn.Dropout(output_dropout) - - def forward(self, chars, bigrams, seq_len, target, - skips_l2r_source, skips_l2r_word, lexicon_count, - skips_r2l_source=None, skips_r2l_word=None, lexicon_count_back=None): - - batch = chars.size(0) - max_seq_len = chars.size(1) - - - - embed_char = self.char_embed(chars) - if self.use_bigram: - - embed_bigram = self.bigram_embed(bigrams) - - embedding = torch.cat([embed_char, embed_bigram], dim=-1) - else: - - embedding = embed_char - - - embed_nonword = self.embed_dropout(embedding) - - # skips_l2r_word = torch.reshape(skips_l2r_word,shape=[batch,-1]) - embed_word = self.word_embed(skips_l2r_word) - embed_word = self.embed_dropout(embed_word) - - - - encoded_h, encoded_c = self.encoder(embed_nonword, seq_len, skips_l2r_source, embed_word, lexicon_count) - - if self.bidirectional: - embed_word_back = self.word_embed(skips_r2l_word) - embed_word_back = self.embed_dropout(embed_word_back) - encoded_h_back, encoded_c_back = self.encoder_back(embed_nonword, seq_len, skips_r2l_source, - embed_word_back, lexicon_count_back) - encoded_h = torch.cat([encoded_h, encoded_h_back], dim=-1) - - encoded_h = self.output_dropout(encoded_h) - - pred = self.output(encoded_h) - - mask = seq_len_to_mask(seq_len) - - if self.training: - loss = self.crf(pred, target, mask) - return {'loss': loss} - else: - pred, path = self.crf.viterbi_decode(pred, mask) - return {'pred': pred} - - -class LSTM_SeqLabel(nn.Module): - def __init__(self, char_embed, bigram_embed, word_embed, hidden_size, label_size, bias=True, - bidirectional=False, device=None, embed_dropout=0, output_dropout=0,use_bigram=True): - - if device is None: - self.device = torch.device('cpu') - else: - self.device = torch.device(device) - super().__init__() - self.char_embed_size = char_embed.embedding.weight.size(1) - self.bigram_embed_size = bigram_embed.embedding.weight.size(1) - self.word_embed_size = word_embed.embedding.weight.size(1) - self.hidden_size = hidden_size - self.label_size = label_size - self.bidirectional = bidirectional - self.use_bigram = use_bigram - - self.char_embed = char_embed - self.bigram_embed = bigram_embed - self.word_embed = word_embed - - if self.use_bigram: - self.input_size = self.char_embed_size + self.bigram_embed_size - else: - self.input_size = self.char_embed_size - - self.encoder = LSTM(self.input_size, self.hidden_size, - bidirectional=self.bidirectional) - - better_init_rnn(self.encoder.lstm) - - - self.output = nn.Linear(self.hidden_size * (2 if self.bidirectional else 1), self.label_size) - - self.debug = True - self.loss_func = nn.CrossEntropyLoss() - self.embed_dropout = nn.Dropout(embed_dropout) - self.output_dropout = nn.Dropout(output_dropout) - self.crf = ConditionalRandomField(label_size, True) - - def forward(self, chars, bigrams, seq_len, target): - if self.debug: - - print_info('chars:{}'.format(chars.size())) - print_info('bigrams:{}'.format(bigrams.size())) - print_info('seq_len:{}'.format(seq_len.size())) - print_info('target:{}'.format(target.size())) - embed_char = self.char_embed(chars) - - if self.use_bigram: - - embed_bigram = self.bigram_embed(bigrams) - - embedding = torch.cat([embed_char, embed_bigram], dim=-1) - else: - - embedding = embed_char - - embedding = self.embed_dropout(embedding) - - encoded_h, encoded_c = self.encoder(embedding, seq_len) - - encoded_h = self.output_dropout(encoded_h) - - pred = self.output(encoded_h) - - mask = seq_len_to_mask(seq_len) - - # pred = self.crf(pred) - - # batch_size, sent_len = pred.shape[0], pred.shape[1] - # loss = self.loss_func(pred.reshape(batch_size * sent_len, -1), target.reshape(batch_size * sent_len)) - if self.debug: - print('debug mode:finish') - exit(1208) - if self.training: - loss = self.crf(pred, target, mask) - return {'loss': loss} - else: - pred, path = self.crf.viterbi_decode(pred, mask) - return {'pred': pred} diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/modules.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/modules.py deleted file mode 100644 index 70182250..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/modules.py +++ /dev/null @@ -1,638 +0,0 @@ -import torch.nn as nn -import torch -from fastNLP.core.utils import seq_len_to_mask -from utils import better_init_rnn -import numpy as np - - -class WordLSTMCell_yangjie(nn.Module): - - """A basic LSTM cell.""" - - def __init__(self, input_size, hidden_size, use_bias=True,debug=False, left2right=True): - """ - Most parts are copied from torch.nn.LSTMCell. - """ - - super().__init__() - self.left2right = left2right - self.debug = debug - self.input_size = input_size - self.hidden_size = hidden_size - self.use_bias = use_bias - self.weight_ih = nn.Parameter( - torch.FloatTensor(input_size, 3 * hidden_size)) - self.weight_hh = nn.Parameter( - torch.FloatTensor(hidden_size, 3 * hidden_size)) - if use_bias: - self.bias = nn.Parameter(torch.FloatTensor(3 * hidden_size)) - else: - self.register_parameter('bias', None) - self.reset_parameters() - - def reset_parameters(self): - """ - Initialize parameters following the way proposed in the paper. - """ - nn.init.orthogonal(self.weight_ih.data) - weight_hh_data = torch.eye(self.hidden_size) - weight_hh_data = weight_hh_data.repeat(1, 3) - with torch.no_grad(): - self.weight_hh.set_(weight_hh_data) - # The bias is just set to zero vectors. - if self.use_bias: - nn.init.constant(self.bias.data, val=0) - - def forward(self, input_, hx): - """ - Args: - input_: A (batch, input_size) tensor containing input - features. - hx: A tuple (h_0, c_0), which contains the initial hidden - and cell state, where the size of both states is - (batch, hidden_size). - Returns: - h_1, c_1: Tensors containing the next hidden and cell state. - """ - - h_0, c_0 = hx - - - - batch_size = h_0.size(0) - bias_batch = (self.bias.unsqueeze(0).expand(batch_size, *self.bias.size())) - wh_b = torch.addmm(bias_batch, h_0, self.weight_hh) - wi = torch.mm(input_, self.weight_ih) - f, i, g = torch.split(wh_b + wi, split_size_or_sections=self.hidden_size, dim=1) - c_1 = torch.sigmoid(f)*c_0 + torch.sigmoid(i)*torch.tanh(g) - - return c_1 - - def __repr__(self): - s = '{name}({input_size}, {hidden_size})' - return s.format(name=self.__class__.__name__, **self.__dict__) - - -class MultiInputLSTMCell_V0(nn.Module): - def __init__(self, char_input_size, hidden_size, use_bias=True,debug=False): - super().__init__() - self.char_input_size = char_input_size - self.hidden_size = hidden_size - self.use_bias = use_bias - - self.weight_ih = nn.Parameter( - torch.FloatTensor(char_input_size, 3 * hidden_size) - ) - - self.weight_hh = nn.Parameter( - torch.FloatTensor(hidden_size, 3 * hidden_size) - ) - - self.alpha_weight_ih = nn.Parameter( - torch.FloatTensor(char_input_size, hidden_size) - ) - - self.alpha_weight_hh = nn.Parameter( - torch.FloatTensor(hidden_size, hidden_size) - ) - - if self.use_bias: - self.bias = nn.Parameter(torch.FloatTensor(3 * hidden_size)) - self.alpha_bias = nn.Parameter(torch.FloatTensor(hidden_size)) - else: - self.register_parameter('bias', None) - self.register_parameter('alpha_bias', None) - - self.debug = debug - self.reset_parameters() - - def reset_parameters(self): - """ - Initialize parameters following the way proposed in the paper. - """ - nn.init.orthogonal(self.weight_ih.data) - nn.init.orthogonal(self.alpha_weight_ih.data) - - weight_hh_data = torch.eye(self.hidden_size) - weight_hh_data = weight_hh_data.repeat(1, 3) - with torch.no_grad(): - self.weight_hh.set_(weight_hh_data) - - alpha_weight_hh_data = torch.eye(self.hidden_size) - alpha_weight_hh_data = alpha_weight_hh_data.repeat(1, 1) - with torch.no_grad(): - self.alpha_weight_hh.set_(alpha_weight_hh_data) - - # The bias is just set to zero vectors. - if self.use_bias: - nn.init.constant_(self.bias.data, val=0) - nn.init.constant_(self.alpha_bias.data, val=0) - - def forward(self, inp, skip_c, skip_count, hx): - ''' - - :param inp: chars B * hidden - :param skip_c: 由跳边得到的c, B * X * hidden - :param skip_count: 这个batch中每个example中当前位置的跳边的数量,用于mask - :param hx: - :return: - ''' - max_skip_count = torch.max(skip_count).item() - - - - if True: - h_0, c_0 = hx - batch_size = h_0.size(0) - - bias_batch = (self.bias.unsqueeze(0).expand(batch_size, *self.bias.size())) - - wi = torch.matmul(inp, self.weight_ih) - wh = torch.matmul(h_0, self.weight_hh) - - - - i, o, g = torch.split(wh + wi + bias_batch, split_size_or_sections=self.hidden_size, dim=1) - - i = torch.sigmoid(i).unsqueeze(1) - o = torch.sigmoid(o).unsqueeze(1) - g = torch.tanh(g).unsqueeze(1) - - - - alpha_wi = torch.matmul(inp, self.alpha_weight_ih) - alpha_wi.unsqueeze_(1) - - # alpha_wi = alpha_wi.expand(1,skip_count,self.hidden_size) - alpha_wh = torch.matmul(skip_c, self.alpha_weight_hh) - - alpha_bias_batch = self.alpha_bias.unsqueeze(0) - - alpha = torch.sigmoid(alpha_wi + alpha_wh + alpha_bias_batch) - - skip_mask = seq_len_to_mask(skip_count,max_len=skip_c.size()[1]) - - skip_mask = 1 - skip_mask - - - skip_mask = skip_mask.unsqueeze(-1).expand(*skip_mask.size(), self.hidden_size) - - skip_mask = (skip_mask).float()*1e20 - - alpha = alpha - skip_mask - - alpha = torch.exp(torch.cat([i, alpha], dim=1)) - - - - alpha_sum = torch.sum(alpha, dim=1, keepdim=True) - - alpha = torch.div(alpha, alpha_sum) - - merge_i_c = torch.cat([g, skip_c], dim=1) - - c_1 = merge_i_c * alpha - - c_1 = c_1.sum(1, keepdim=True) - # h_1 = o * c_1 - h_1 = o * torch.tanh(c_1) - - return h_1.squeeze(1), c_1.squeeze(1) - - else: - - h_0, c_0 = hx - batch_size = h_0.size(0) - - bias_batch = (self.bias.unsqueeze(0).expand(batch_size, *self.bias.size())) - - wi = torch.matmul(inp, self.weight_ih) - wh = torch.matmul(h_0, self.weight_hh) - - i, o, g = torch.split(wh + wi + bias_batch, split_size_or_sections=self.hidden_size, dim=1) - - i = torch.sigmoid(i).unsqueeze(1) - o = torch.sigmoid(o).unsqueeze(1) - g = torch.tanh(g).unsqueeze(1) - - c_1 = g - h_1 = o * c_1 - - return h_1,c_1 - -class MultiInputLSTMCell_V1(nn.Module): - def __init__(self, char_input_size, hidden_size, use_bias=True,debug=False): - super().__init__() - self.char_input_size = char_input_size - self.hidden_size = hidden_size - self.use_bias = use_bias - - self.weight_ih = nn.Parameter( - torch.FloatTensor(char_input_size, 3 * hidden_size) - ) - - self.weight_hh = nn.Parameter( - torch.FloatTensor(hidden_size, 3 * hidden_size) - ) - - self.alpha_weight_ih = nn.Parameter( - torch.FloatTensor(char_input_size, hidden_size) - ) - - self.alpha_weight_hh = nn.Parameter( - torch.FloatTensor(hidden_size, hidden_size) - ) - - if self.use_bias: - self.bias = nn.Parameter(torch.FloatTensor(3 * hidden_size)) - self.alpha_bias = nn.Parameter(torch.FloatTensor(hidden_size)) - else: - self.register_parameter('bias', None) - self.register_parameter('alpha_bias', None) - - self.debug = debug - self.reset_parameters() - - def reset_parameters(self): - """ - Initialize parameters following the way proposed in the paper. - """ - nn.init.orthogonal(self.weight_ih.data) - nn.init.orthogonal(self.alpha_weight_ih.data) - - weight_hh_data = torch.eye(self.hidden_size) - weight_hh_data = weight_hh_data.repeat(1, 3) - with torch.no_grad(): - self.weight_hh.set_(weight_hh_data) - - alpha_weight_hh_data = torch.eye(self.hidden_size) - alpha_weight_hh_data = alpha_weight_hh_data.repeat(1, 1) - with torch.no_grad(): - self.alpha_weight_hh.set_(alpha_weight_hh_data) - - # The bias is just set to zero vectors. - if self.use_bias: - nn.init.constant_(self.bias.data, val=0) - nn.init.constant_(self.alpha_bias.data, val=0) - - def forward(self, inp, skip_c, skip_count, hx): - ''' - - :param inp: chars B * hidden - :param skip_c: 由跳边得到的c, B * X * hidden - :param skip_count: 这个batch中每个example中当前位置的跳边的数量,用于mask - :param hx: - :return: - ''' - max_skip_count = torch.max(skip_count).item() - - - - if True: - h_0, c_0 = hx - batch_size = h_0.size(0) - - bias_batch = (self.bias.unsqueeze(0).expand(batch_size, *self.bias.size())) - - wi = torch.matmul(inp, self.weight_ih) - wh = torch.matmul(h_0, self.weight_hh) - - - i, o, g = torch.split(wh + wi + bias_batch, split_size_or_sections=self.hidden_size, dim=1) - - i = torch.sigmoid(i).unsqueeze(1) - o = torch.sigmoid(o).unsqueeze(1) - g = torch.tanh(g).unsqueeze(1) - - - - ##basic lstm start - - f = 1 - i - c_1_basic = f*c_0.unsqueeze(1) + i*g - c_1_basic = c_1_basic.squeeze(1) - - - - - - alpha_wi = torch.matmul(inp, self.alpha_weight_ih) - alpha_wi.unsqueeze_(1) - - - alpha_wh = torch.matmul(skip_c, self.alpha_weight_hh) - - alpha_bias_batch = self.alpha_bias.unsqueeze(0) - - alpha = torch.sigmoid(alpha_wi + alpha_wh + alpha_bias_batch) - - skip_mask = seq_len_to_mask(skip_count,max_len=skip_c.size()[1]).float() - - skip_mask = 1 - skip_mask - - - skip_mask = skip_mask.unsqueeze(-1).expand(*skip_mask.size(), self.hidden_size) - - skip_mask = (skip_mask).float()*1e20 - - alpha = alpha - skip_mask - - alpha = torch.exp(torch.cat([i, alpha], dim=1)) - - - - alpha_sum = torch.sum(alpha, dim=1, keepdim=True) - - alpha = torch.div(alpha, alpha_sum) - - merge_i_c = torch.cat([g, skip_c], dim=1) - - c_1 = merge_i_c * alpha - - c_1 = c_1.sum(1, keepdim=True) - # h_1 = o * c_1 - c_1 = c_1.squeeze(1) - count_select = (skip_count != 0).float().unsqueeze(-1) - - - - - c_1 = c_1*count_select + c_1_basic*(1-count_select) - - - o = o.squeeze(1) - h_1 = o * torch.tanh(c_1) - - return h_1, c_1 - -class LatticeLSTMLayer_sup_back_V0(nn.Module): - def __init__(self, char_input_size, word_input_size, hidden_size, left2right, - bias=True,device=None,debug=False,skip_before_head=False): - super().__init__() - - self.skip_before_head = skip_before_head - - self.hidden_size = hidden_size - - self.char_cell = MultiInputLSTMCell_V0(char_input_size, hidden_size, bias,debug) - - self.word_cell = WordLSTMCell_yangjie(word_input_size,hidden_size,bias,debug=self.debug) - - self.word_input_size = word_input_size - self.left2right = left2right - self.bias = bias - self.device = device - self.debug = debug - - def forward(self, inp, seq_len, skip_sources, skip_words, skip_count, init_state=None): - ''' - - :param inp: batch * seq_len * embedding, chars - :param seq_len: batch, length of chars - :param skip_sources: batch * seq_len * X, 跳边的起点 - :param skip_words: batch * seq_len * X * embedding, 跳边的词 - :param lexicon_count: batch * seq_len, count of lexicon per example per position - :param init_state: the hx of rnn - :return: - ''' - - - if self.left2right: - - max_seq_len = max(seq_len) - batch_size = inp.size(0) - c_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - h_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - - for i in range(max_seq_len): - max_lexicon_count = max(torch.max(skip_count[:, i]).item(), 1) - h_0, c_0 = h_[:, i, :], c_[:, i, :] - - skip_word_flat = skip_words[:, i, :max_lexicon_count].contiguous() - - skip_word_flat = skip_word_flat.view(batch_size*max_lexicon_count,self.word_input_size) - skip_source_flat = skip_sources[:, i, :max_lexicon_count].contiguous().view(batch_size, max_lexicon_count) - - - index_0 = torch.tensor(range(batch_size)).unsqueeze(1).expand(batch_size,max_lexicon_count) - index_1 = skip_source_flat - - if not self.skip_before_head: - c_x = c_[[index_0, index_1+1]] - h_x = h_[[index_0, index_1+1]] - else: - c_x = c_[[index_0,index_1]] - h_x = h_[[index_0,index_1]] - - c_x_flat = c_x.view(batch_size*max_lexicon_count,self.hidden_size) - h_x_flat = h_x.view(batch_size*max_lexicon_count,self.hidden_size) - - - - - c_1_flat = self.word_cell(skip_word_flat,(h_x_flat,c_x_flat)) - - c_1_skip = c_1_flat.view(batch_size,max_lexicon_count,self.hidden_size) - - h_1,c_1 = self.char_cell(inp[:,i,:],c_1_skip,skip_count[:,i],(h_0,c_0)) - - - h_ = torch.cat([h_,h_1.unsqueeze(1)],dim=1) - c_ = torch.cat([c_, c_1.unsqueeze(1)], dim=1) - - return h_[:,1:],c_[:,1:] - else: - mask_for_seq_len = seq_len_to_mask(seq_len) - - max_seq_len = max(seq_len) - batch_size = inp.size(0) - c_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - h_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - - for i in reversed(range(max_seq_len)): - max_lexicon_count = max(torch.max(skip_count[:, i]).item(), 1) - - - - h_0, c_0 = h_[:, 0, :], c_[:, 0, :] - - skip_word_flat = skip_words[:, i, :max_lexicon_count].contiguous() - - skip_word_flat = skip_word_flat.view(batch_size*max_lexicon_count,self.word_input_size) - skip_source_flat = skip_sources[:, i, :max_lexicon_count].contiguous().view(batch_size, max_lexicon_count) - - - index_0 = torch.tensor(range(batch_size)).unsqueeze(1).expand(batch_size,max_lexicon_count) - index_1 = skip_source_flat-i - - if not self.skip_before_head: - c_x = c_[[index_0, index_1-1]] - h_x = h_[[index_0, index_1-1]] - else: - c_x = c_[[index_0,index_1]] - h_x = h_[[index_0,index_1]] - - c_x_flat = c_x.view(batch_size*max_lexicon_count,self.hidden_size) - h_x_flat = h_x.view(batch_size*max_lexicon_count,self.hidden_size) - - - - - c_1_flat = self.word_cell(skip_word_flat,(h_x_flat,c_x_flat)) - - c_1_skip = c_1_flat.view(batch_size,max_lexicon_count,self.hidden_size) - - h_1,c_1 = self.char_cell(inp[:,i,:],c_1_skip,skip_count[:,i],(h_0,c_0)) - - - h_1_mask = h_1.masked_fill(1-mask_for_seq_len[:,i].unsqueeze(-1),0) - c_1_mask = c_1.masked_fill(1 - mask_for_seq_len[:, i].unsqueeze(-1), 0) - - - h_ = torch.cat([h_1_mask.unsqueeze(1),h_],dim=1) - c_ = torch.cat([c_1_mask.unsqueeze(1),c_], dim=1) - - return h_[:,:-1],c_[:,:-1] - -class LatticeLSTMLayer_sup_back_V1(nn.Module): - # V1与V0的不同在于,V1在当前位置完全无lexicon匹配时,会采用普通的lstm计算公式, - # 普通的lstm计算公式与杨杰实现的lattice lstm在lexicon数量为0时不同 - def __init__(self, char_input_size, word_input_size, hidden_size, left2right, - bias=True,device=None,debug=False,skip_before_head=False): - super().__init__() - - self.debug = debug - - self.skip_before_head = skip_before_head - - self.hidden_size = hidden_size - - self.char_cell = MultiInputLSTMCell_V1(char_input_size, hidden_size, bias,debug) - - self.word_cell = WordLSTMCell_yangjie(word_input_size,hidden_size,bias,debug=self.debug) - - self.word_input_size = word_input_size - self.left2right = left2right - self.bias = bias - self.device = device - - def forward(self, inp, seq_len, skip_sources, skip_words, skip_count, init_state=None): - ''' - - :param inp: batch * seq_len * embedding, chars - :param seq_len: batch, length of chars - :param skip_sources: batch * seq_len * X, 跳边的起点 - :param skip_words: batch * seq_len * X * embedding_size, 跳边的词 - :param lexicon_count: batch * seq_len, - lexicon_count[i,j]为第i个例子以第j个位子为结尾匹配到的词的数量 - :param init_state: the hx of rnn - :return: - ''' - - - if self.left2right: - - max_seq_len = max(seq_len) - batch_size = inp.size(0) - c_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - h_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - - for i in range(max_seq_len): - max_lexicon_count = max(torch.max(skip_count[:, i]).item(), 1) - h_0, c_0 = h_[:, i, :], c_[:, i, :] - - #为了使rnn能够计算B*lexicon_count*embedding_size的张量,需要将其reshape成二维张量 - #为了匹配pytorch的[]取址方式,需要将reshape成二维张量 - - skip_word_flat = skip_words[:, i, :max_lexicon_count].contiguous() - - skip_word_flat = skip_word_flat.view(batch_size*max_lexicon_count,self.word_input_size) - skip_source_flat = skip_sources[:, i, :max_lexicon_count].contiguous().view(batch_size, max_lexicon_count) - - - index_0 = torch.tensor(range(batch_size)).unsqueeze(1).expand(batch_size,max_lexicon_count) - index_1 = skip_source_flat - - - if not self.skip_before_head: - c_x = c_[[index_0, index_1+1]] - h_x = h_[[index_0, index_1+1]] - else: - c_x = c_[[index_0,index_1]] - h_x = h_[[index_0,index_1]] - - c_x_flat = c_x.view(batch_size*max_lexicon_count,self.hidden_size) - h_x_flat = h_x.view(batch_size*max_lexicon_count,self.hidden_size) - - - - c_1_flat = self.word_cell(skip_word_flat,(h_x_flat,c_x_flat)) - - c_1_skip = c_1_flat.view(batch_size,max_lexicon_count,self.hidden_size) - - h_1,c_1 = self.char_cell(inp[:,i,:],c_1_skip,skip_count[:,i],(h_0,c_0)) - - - h_ = torch.cat([h_,h_1.unsqueeze(1)],dim=1) - c_ = torch.cat([c_, c_1.unsqueeze(1)], dim=1) - - return h_[:,1:],c_[:,1:] - else: - mask_for_seq_len = seq_len_to_mask(seq_len) - - max_seq_len = max(seq_len) - batch_size = inp.size(0) - c_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - h_ = torch.zeros(size=[batch_size, 1, self.hidden_size], requires_grad=True).to(self.device) - - for i in reversed(range(max_seq_len)): - max_lexicon_count = max(torch.max(skip_count[:, i]).item(), 1) - - - h_0, c_0 = h_[:, 0, :], c_[:, 0, :] - - skip_word_flat = skip_words[:, i, :max_lexicon_count].contiguous() - - skip_word_flat = skip_word_flat.view(batch_size*max_lexicon_count,self.word_input_size) - skip_source_flat = skip_sources[:, i, :max_lexicon_count].contiguous().view(batch_size, max_lexicon_count) - - - index_0 = torch.tensor(range(batch_size)).unsqueeze(1).expand(batch_size,max_lexicon_count) - index_1 = skip_source_flat-i - - if not self.skip_before_head: - c_x = c_[[index_0, index_1-1]] - h_x = h_[[index_0, index_1-1]] - else: - c_x = c_[[index_0,index_1]] - h_x = h_[[index_0,index_1]] - - c_x_flat = c_x.view(batch_size*max_lexicon_count,self.hidden_size) - h_x_flat = h_x.view(batch_size*max_lexicon_count,self.hidden_size) - - - - - c_1_flat = self.word_cell(skip_word_flat,(h_x_flat,c_x_flat)) - - - - c_1_skip = c_1_flat.view(batch_size,max_lexicon_count,self.hidden_size) - - h_1,c_1 = self.char_cell(inp[:,i,:],c_1_skip,skip_count[:,i],(h_0,c_0)) - - - h_1_mask = h_1.masked_fill(~ mask_for_seq_len[:,i].unsqueeze(-1),0) - c_1_mask = c_1.masked_fill(~ mask_for_seq_len[:, i].unsqueeze(-1), 0) - - - h_ = torch.cat([h_1_mask.unsqueeze(1),h_],dim=1) - c_ = torch.cat([c_1_mask.unsqueeze(1),c_], dim=1) - - - - return h_[:,:-1],c_[:,:-1] - - - - diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/pathes.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/pathes.py deleted file mode 100644 index fe3f6162..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/pathes.py +++ /dev/null @@ -1,24 +0,0 @@ - - -glove_100_path = 'en-glove-6b-100d' -glove_50_path = 'en-glove-6b-50d' -glove_200_path = '' -glove_300_path = 'en-glove-840b-300' -fasttext_path = 'en-fasttext' #300 -tencent_chinese_word_path = 'cn' # tencent 200 -fasttext_cn_path = 'cn-fasttext' # 300 -yangjie_rich_pretrain_unigram_path = '/remote-home/xnli/data/pretrain/chinese/gigaword_chn.all.a2b.uni.ite50.vec' -yangjie_rich_pretrain_bigram_path = '/remote-home/xnli/data/pretrain/chinese/gigaword_chn.all.a2b.bi.ite50.vec' -yangjie_rich_pretrain_word_path = '/remote-home/xnli/data/pretrain/chinese/ctb.50d.vec' - - -conll_2003_path = '/remote-home/xnli/data/corpus/multi_task/conll_2013/data_mine.pkl' -conllized_ontonote_path = '/remote-home/txsun/data/OntoNotes-5.0-NER-master/v12/english' -conllized_ontonote_pkl_path = '/remote-home/txsun/data/ontonotes5.pkl' -sst2_path = '/remote-home/xnli/data/corpus/text_classification/SST-2/' -# weibo_ner_path = '/remote-home/xnli/data/corpus/sequence_labelling/ner_weibo' -ontonote4ner_cn_path = '/remote-home/xnli/data/corpus/sequence_labelling/chinese_ner/OntoNote4NER' -msra_ner_cn_path = '/remote-home/xnli/data/corpus/sequence_labelling/chinese_ner/MSRANER' -resume_ner_path = '/remote-home/xnli/data/corpus/sequence_labelling/chinese_ner/ResumeNER' -weibo_ner_path = '/remote-home/xnli/data/corpus/sequence_labelling/chinese_ner/WeiboNER' -weibo_ner_old_path = '/remote-home/xnli/data/corpus/sequence_labelling/chinese_ner/WeiboNER_old' \ No newline at end of file diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/small.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/small.py deleted file mode 100644 index c877d96f..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/small.py +++ /dev/null @@ -1,126 +0,0 @@ -from utils_ import get_skip_path_trivial, Trie, get_skip_path -from load_data import load_yangjie_rich_pretrain_word_list, load_ontonotes4ner, equip_chinese_ner_with_skip -from pathes import * -from functools import partial -from fastNLP import cache_results -from fastNLP.embeddings.static_embedding import StaticEmbedding -import torch -import torch.nn as nn -import torch.nn.functional as F -from fastNLP.core.metrics import _bmes_tag_to_spans,_bmeso_tag_to_spans -from load_data import load_resume_ner - - -# embed = StaticEmbedding(None,embedding_dim=2) -# datasets,vocabs,embeddings = load_ontonotes4ner(ontonote4ner_cn_path,yangjie_rich_pretrain_unigram_path,yangjie_rich_pretrain_bigram_path, -# _refresh=True,index_token=False) -# -# w_list = load_yangjie_rich_pretrain_word_list(yangjie_rich_pretrain_word_path, -# _refresh=False) -# -# datasets,vocabs,embeddings = equip_chinese_ner_with_skip(datasets,vocabs,embeddings,w_list,yangjie_rich_pretrain_word_path, -# _refresh=True) -# - -def reverse_style(input_string): - target_position = input_string.index('[') - input_len = len(input_string) - output_string = input_string[target_position:input_len] + input_string[0:target_position] - # print('in:{}.out:{}'.format(input_string, output_string)) - return output_string - - - - - -def get_yangjie_bmeso(label_list): - def get_ner_BMESO_yj(label_list): - # list_len = len(word_list) - # assert(list_len == len(label_list)), "word list size unmatch with label list" - list_len = len(label_list) - begin_label = 'b-' - end_label = 'e-' - single_label = 's-' - whole_tag = '' - index_tag = '' - tag_list = [] - stand_matrix = [] - for i in range(0, list_len): - # wordlabel = word_list[i] - current_label = label_list[i].lower() - if begin_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i - 1)) - whole_tag = current_label.replace(begin_label, "", 1) + '[' + str(i) - index_tag = current_label.replace(begin_label, "", 1) - - elif single_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i - 1)) - whole_tag = current_label.replace(single_label, "", 1) + '[' + str(i) - tag_list.append(whole_tag) - whole_tag = "" - index_tag = "" - elif end_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i)) - whole_tag = '' - index_tag = '' - else: - continue - if (whole_tag != '') & (index_tag != ''): - tag_list.append(whole_tag) - tag_list_len = len(tag_list) - - for i in range(0, tag_list_len): - if len(tag_list[i]) > 0: - tag_list[i] = tag_list[i] + ']' - insert_list = reverse_style(tag_list[i]) - stand_matrix.append(insert_list) - # print stand_matrix - return stand_matrix - - def transform_YJ_to_fastNLP(span): - span = span[1:] - span_split = span.split(']') - # print('span_list:{}'.format(span_split)) - span_type = span_split[1] - # print('span_split[0].split(','):{}'.format(span_split[0].split(','))) - if ',' in span_split[0]: - b, e = span_split[0].split(',') - else: - b = span_split[0] - e = b - - b = int(b) - e = int(e) - - e += 1 - - return (span_type, (b, e)) - yj_form = get_ner_BMESO_yj(label_list) - # print('label_list:{}'.format(label_list)) - # print('yj_from:{}'.format(yj_form)) - fastNLP_form = list(map(transform_YJ_to_fastNLP,yj_form)) - return fastNLP_form - - -# tag_list = ['O', 'B-singer', 'M-singer', 'E-singer', 'O', 'O'] -# span_list = get_ner_BMES(tag_list) -# print(span_list) -# yangjie_label_list = ['B-NAME', 'E-NAME', 'O', 'B-CONT', 'M-CONT', 'E-CONT', 'B-RACE', 'E-RACE', 'B-TITLE', 'M-TITLE', 'E-TITLE', 'B-EDU', 'M-EDU', 'E-EDU', 'B-ORG', 'M-ORG', 'E-ORG', 'M-NAME', 'B-PRO', 'M-PRO', 'E-PRO', 'S-RACE', 'S-NAME', 'B-LOC', 'M-LOC', 'E-LOC', 'M-RACE', 'S-ORG'] -# my_label_list = ['O', 'M-ORG', 'M-TITLE', 'B-TITLE', 'E-TITLE', 'B-ORG', 'E-ORG', 'M-EDU', 'B-NAME', 'E-NAME', 'B-EDU', 'E-EDU', 'M-NAME', 'M-PRO', 'M-CONT', 'B-PRO', 'E-PRO', 'B-CONT', 'E-CONT', 'M-LOC', 'B-RACE', 'E-RACE', 'S-NAME', 'B-LOC', 'E-LOC', 'M-RACE', 'S-RACE', 'S-ORG'] -# yangjie_label = set(yangjie_label_list) -# my_label = set(my_label_list) - -a = torch.tensor([0,2,0,3]) -b = (a==0) -print(b) -print(b.float()) -from fastNLP import RandomSampler - -# f = open('/remote-home/xnli/weight_debug/lattice_yangjie.pkl','rb') -# weight_dict = torch.load(f) -# print(weight_dict.keys()) -# for k,v in weight_dict.items(): -# print("{}:{}".format(k,v.size())) \ No newline at end of file diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils.py deleted file mode 100644 index 8c64c43c..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils.py +++ /dev/null @@ -1,361 +0,0 @@ -import torch.nn.functional as F -import torch -import random -import numpy as np -from fastNLP import Const -from fastNLP import CrossEntropyLoss -from fastNLP import AccuracyMetric -from fastNLP import Tester -import os -from fastNLP import logger -def should_mask(name, t=''): - if 'bias' in name: - return False - if 'embedding' in name: - splited = name.split('.') - if splited[-1]!='weight': - return False - if 'embedding' in splited[-2]: - return False - if 'c0' in name: - return False - if 'h0' in name: - return False - - if 'output' in name and t not in name: - return False - - return True -def get_init_mask(model): - init_masks = {} - for name, param in model.named_parameters(): - if should_mask(name): - init_masks[name+'.mask'] = torch.ones_like(param) - # logger.info(init_masks[name+'.mask'].requires_grad) - - return init_masks - -def set_seed(seed): - random.seed(seed) - np.random.seed(seed+100) - torch.manual_seed(seed+200) - torch.cuda.manual_seed_all(seed+300) - -def get_parameters_size(model): - result = {} - for name,p in model.state_dict().items(): - result[name] = p.size() - - return result - -def prune_by_proportion_model(model,proportion,task): - # print('this time prune to ',proportion*100,'%') - for name, p in model.named_parameters(): - # print(name) - if not should_mask(name,task): - continue - - tensor = p.data.cpu().numpy() - index = np.nonzero(model.mask[task][name+'.mask'].data.cpu().numpy()) - # print(name,'alive count',len(index[0])) - alive = tensor[index] - # print('p and mask size:',p.size(),print(model.mask[task][name+'.mask'].size())) - percentile_value = np.percentile(abs(alive), (1 - proportion) * 100) - # tensor = p - # index = torch.nonzero(model.mask[task][name+'.mask']) - # # print('nonzero len',index) - # alive = tensor[index] - # print('alive size:',alive.shape) - # prune_by_proportion_model() - - # percentile_value = torch.topk(abs(alive), int((1-proportion)*len(index[0]))).values - # print('the',(1-proportion)*len(index[0]),'th big') - # print('threshold:',percentile_value) - - prune_by_threshold_parameter(p, model.mask[task][name+'.mask'],percentile_value) - # for - -def prune_by_proportion_model_global(model,proportion,task): - # print('this time prune to ',proportion*100,'%') - alive = None - for name, p in model.named_parameters(): - # print(name) - if not should_mask(name,task): - continue - - tensor = p.data.cpu().numpy() - index = np.nonzero(model.mask[task][name+'.mask'].data.cpu().numpy()) - # print(name,'alive count',len(index[0])) - if alive is None: - alive = tensor[index] - else: - alive = np.concatenate([alive,tensor[index]],axis=0) - - percentile_value = np.percentile(abs(alive), (1 - proportion) * 100) - - for name, p in model.named_parameters(): - if should_mask(name,task): - prune_by_threshold_parameter(p, model.mask[task][name+'.mask'],percentile_value) - - -def prune_by_threshold_parameter(p, mask, threshold): - p_abs = torch.abs(p) - - new_mask = (p_abs > threshold).float() - # print(mask) - mask[:]*=new_mask - - -def one_time_train_and_prune_single_task(trainer,PRUNE_PER, - optimizer_init_state_dict=None, - model_init_state_dict=None, - is_global=None, - ): - - - from fastNLP import Trainer - - - trainer.optimizer.load_state_dict(optimizer_init_state_dict) - trainer.model.load_state_dict(model_init_state_dict) - # print('metrics:',metrics.__dict__) - # print('loss:',loss.__dict__) - # print('trainer input:',task.train_set.get_input_name()) - # trainer = Trainer(model=model, train_data=task.train_set, dev_data=task.dev_set, loss=loss, metrics=metrics, - # optimizer=optimizer, n_epochs=EPOCH, batch_size=BATCH, device=device,callbacks=callbacks) - - - trainer.train(load_best_model=True) - # tester = Tester(task.train_set, model, metrics, BATCH, device=device, verbose=1,use_tqdm=False) - # print('FOR DEBUG: test train_set:',tester.test()) - # print('**'*20) - # if task.test_set: - # tester = Tester(task.test_set, model, metrics, BATCH, device=device, verbose=1) - # tester.test() - if is_global: - - prune_by_proportion_model_global(trainer.model, PRUNE_PER, trainer.model.now_task) - - else: - prune_by_proportion_model(trainer.model, PRUNE_PER, trainer.model.now_task) - - - -# def iterative_train_and_prune_single_task(get_trainer,ITER,PRUNE,is_global=False,save_path=None): -def iterative_train_and_prune_single_task(get_trainer,args,model,train_set,dev_set,test_set,device,save_path=None): - - ''' - - :param trainer: - :param ITER: - :param PRUNE: - :param is_global: - :param save_path: should be a dictionary which will be filled with mask and state dict - :return: - ''' - - - - from fastNLP import Trainer - import torch - import math - import copy - PRUNE = args.prune - ITER = args.iter - trainer = get_trainer(args,model,train_set,dev_set,test_set,device) - optimizer_init_state_dict = copy.deepcopy(trainer.optimizer.state_dict()) - model_init_state_dict = copy.deepcopy(trainer.model.state_dict()) - if save_path is not None: - if not os.path.exists(save_path): - os.makedirs(save_path) - # if not os.path.exists(os.path.join(save_path, 'model_init.pkl')): - # f = open(os.path.join(save_path, 'model_init.pkl'), 'wb') - # torch.save(trainer.model.state_dict(),f) - - - mask_count = 0 - model = trainer.model - task = trainer.model.now_task - for name, p in model.mask[task].items(): - mask_count += torch.sum(p).item() - init_mask_count = mask_count - logger.info('init mask count:{}'.format(mask_count)) - # logger.info('{}th traning mask count: {} / {} = {}%'.format(i, mask_count, init_mask_count, - # mask_count / init_mask_count * 100)) - - prune_per_iter = math.pow(PRUNE, 1 / ITER) - - - for i in range(ITER): - trainer = get_trainer(args,model,train_set,dev_set,test_set,device) - one_time_train_and_prune_single_task(trainer,prune_per_iter,optimizer_init_state_dict,model_init_state_dict) - if save_path is not None: - f = open(os.path.join(save_path,task+'_mask_'+str(i)+'.pkl'),'wb') - torch.save(model.mask[task],f) - - mask_count = 0 - for name, p in model.mask[task].items(): - mask_count += torch.sum(p).item() - logger.info('{}th traning mask count: {} / {} = {}%'.format(i,mask_count,init_mask_count,mask_count/init_mask_count*100)) - - -def get_appropriate_cuda(task_scale='s'): - if task_scale not in {'s','m','l'}: - logger.info('task scale wrong!') - exit(2) - import pynvml - pynvml.nvmlInit() - total_cuda_num = pynvml.nvmlDeviceGetCount() - for i in range(total_cuda_num): - logger.info(i) - handle = pynvml.nvmlDeviceGetHandleByIndex(i) # 这里的0是GPU id - memInfo = pynvml.nvmlDeviceGetMemoryInfo(handle) - utilizationInfo = pynvml.nvmlDeviceGetUtilizationRates(handle) - logger.info(i, 'mem:', memInfo.used / memInfo.total, 'util:',utilizationInfo.gpu) - if memInfo.used / memInfo.total < 0.15 and utilizationInfo.gpu <0.2: - logger.info(i,memInfo.used / memInfo.total) - return 'cuda:'+str(i) - - if task_scale=='s': - max_memory=2000 - elif task_scale=='m': - max_memory=6000 - else: - max_memory = 9000 - - max_id = -1 - for i in range(total_cuda_num): - handle = pynvml.nvmlDeviceGetHandleByIndex(0) # 这里的0是GPU id - memInfo = pynvml.nvmlDeviceGetMemoryInfo(handle) - utilizationInfo = pynvml.nvmlDeviceGetUtilizationRates(handle) - if max_memory < memInfo.free: - max_memory = memInfo.free - max_id = i - - if id == -1: - logger.info('no appropriate gpu, wait!') - exit(2) - - return 'cuda:'+str(max_id) - - # if memInfo.used / memInfo.total < 0.5: - # return - -def print_mask(mask_dict): - def seq_mul(*X): - res = 1 - for x in X: - res*=x - return res - - for name,p in mask_dict.items(): - total_size = seq_mul(*p.size()) - unmasked_size = len(np.nonzero(p)) - - print(name,':',unmasked_size,'/',total_size,'=',unmasked_size/total_size*100,'%') - - - print() - - -def check_words_same(dataset_1,dataset_2,field_1,field_2): - if len(dataset_1[field_1]) != len(dataset_2[field_2]): - logger.info('CHECK: example num not same!') - return False - - for i, words in enumerate(dataset_1[field_1]): - if len(dataset_1[field_1][i]) != len(dataset_2[field_2][i]): - logger.info('CHECK {} th example length not same'.format(i)) - logger.info('1:{}'.format(dataset_1[field_1][i])) - logger.info('2:'.format(dataset_2[field_2][i])) - return False - - # for j,w in enumerate(words): - # if dataset_1[field_1][i][j] != dataset_2[field_2][i][j]: - # print('CHECK', i, 'th example has words different!') - # print('1:',dataset_1[field_1][i]) - # print('2:',dataset_2[field_2][i]) - # return False - - logger.info('CHECK: totally same!') - - return True - -def get_now_time(): - import time - from datetime import datetime, timezone, timedelta - dt = datetime.utcnow() - # print(dt) - tzutc_8 = timezone(timedelta(hours=8)) - local_dt = dt.astimezone(tzutc_8) - result = ("_{}_{}_{}__{}_{}_{}".format(local_dt.year, local_dt.month, local_dt.day, local_dt.hour, local_dt.minute, - local_dt.second)) - - return result - - -def get_bigrams(words): - result = [] - for i,w in enumerate(words): - if i!=len(words)-1: - result.append(words[i]+words[i+1]) - else: - result.append(words[i]+'') - - return result - -def print_info(*inp,islog=False,sep=' '): - from fastNLP import logger - if islog: - print(*inp,sep=sep) - else: - inp = sep.join(map(str,inp)) - logger.info(inp) - -def better_init_rnn(rnn,coupled=False): - import torch.nn as nn - if coupled: - repeat_size = 3 - else: - repeat_size = 4 - # print(list(rnn.named_parameters())) - if hasattr(rnn,'num_layers'): - for i in range(rnn.num_layers): - nn.init.orthogonal(getattr(rnn,'weight_ih_l'+str(i)).data) - weight_hh_data = torch.eye(rnn.hidden_size) - weight_hh_data = weight_hh_data.repeat(1, repeat_size) - with torch.no_grad(): - getattr(rnn,'weight_hh_l'+str(i)).set_(weight_hh_data) - nn.init.constant(getattr(rnn,'bias_ih_l'+str(i)).data, val=0) - nn.init.constant(getattr(rnn,'bias_hh_l'+str(i)).data, val=0) - - if rnn.bidirectional: - for i in range(rnn.num_layers): - nn.init.orthogonal(getattr(rnn, 'weight_ih_l' + str(i)+'_reverse').data) - weight_hh_data = torch.eye(rnn.hidden_size) - weight_hh_data = weight_hh_data.repeat(1, repeat_size) - with torch.no_grad(): - getattr(rnn, 'weight_hh_l' + str(i)+'_reverse').set_(weight_hh_data) - nn.init.constant(getattr(rnn, 'bias_ih_l' + str(i)+'_reverse').data, val=0) - nn.init.constant(getattr(rnn, 'bias_hh_l' + str(i)+'_reverse').data, val=0) - - - else: - nn.init.orthogonal(rnn.weight_ih.data) - weight_hh_data = torch.eye(rnn.hidden_size) - weight_hh_data = weight_hh_data.repeat(repeat_size,1) - with torch.no_grad(): - rnn.weight_hh.set_(weight_hh_data) - # The bias is just set to zero vectors. - print('rnn param size:{},{}'.format(rnn.weight_hh.size(),type(rnn))) - if rnn.bias: - nn.init.constant(rnn.bias_ih.data, val=0) - nn.init.constant(rnn.bias_hh.data, val=0) - - # print(list(rnn.named_parameters())) - - - - - - diff --git a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils_.py b/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils_.py deleted file mode 100644 index dfc05486..00000000 --- a/reproduction/sequence_labelling/chinese_ner/LatticeLSTM/utils_.py +++ /dev/null @@ -1,405 +0,0 @@ -import collections -from fastNLP import cache_results -def get_skip_path(chars,w_trie): - sentence = ''.join(chars) - result = w_trie.get_lexicon(sentence) - - return result - -# @cache_results(_cache_fp='cache/get_skip_path_trivial',_refresh=True) -def get_skip_path_trivial(chars,w_list): - chars = ''.join(chars) - w_set = set(w_list) - result = [] - # for i in range(len(chars)): - # result.append([]) - for i in range(len(chars)-1): - for j in range(i+2,len(chars)+1): - if chars[i:j] in w_set: - result.append([i,j-1,chars[i:j]]) - - return result - - -class TrieNode: - def __init__(self): - self.children = collections.defaultdict(TrieNode) - self.is_w = False - -class Trie: - def __init__(self): - self.root = TrieNode() - - def insert(self,w): - - current = self.root - for c in w: - current = current.children[c] - - current.is_w = True - - def search(self,w): - ''' - - :param w: - :return: - -1:not w route - 0:subroute but not word - 1:subroute and word - ''' - current = self.root - - for c in w: - current = current.children.get(c) - - if current is None: - return -1 - - if current.is_w: - return 1 - else: - return 0 - - def get_lexicon(self,sentence): - result = [] - for i in range(len(sentence)): - current = self.root - for j in range(i, len(sentence)): - current = current.children.get(sentence[j]) - if current is None: - break - - if current.is_w: - result.append([i,j,sentence[i:j+1]]) - - return result - -from fastNLP.core.field import Padder -import numpy as np -import torch -from collections import defaultdict -class LatticeLexiconPadder(Padder): - - def __init__(self, pad_val=0, pad_val_dynamic=False,dynamic_offset=0, **kwargs): - ''' - - :param pad_val: - :param pad_val_dynamic: if True, pad_val is the seq_len - :param kwargs: - ''' - self.pad_val = pad_val - self.pad_val_dynamic = pad_val_dynamic - self.dynamic_offset = dynamic_offset - - def __call__(self, contents, field_name, field_ele_dtype, dim: int): - # 与autoPadder中 dim=2 的情况一样 - max_len = max(map(len, contents)) - - max_len = max(max_len,1)#avoid 0 size dim which causes cuda wrong - - max_word_len = max([max([len(content_ii) for content_ii in content_i]) for - content_i in contents]) - - max_word_len = max(max_word_len,1) - if self.pad_val_dynamic: - # print('pad_val_dynamic:{}'.format(max_len-1)) - - array = np.full((len(contents), max_len, max_word_len), max_len-1+self.dynamic_offset, - dtype=field_ele_dtype) - - else: - array = np.full((len(contents), max_len, max_word_len), self.pad_val, dtype=field_ele_dtype) - for i, content_i in enumerate(contents): - for j, content_ii in enumerate(content_i): - array[i, j, :len(content_ii)] = content_ii - array = torch.tensor(array) - - return array - -from fastNLP.core.metrics import MetricBase - -def get_yangjie_bmeso(label_list,ignore_labels=None): - def get_ner_BMESO_yj(label_list): - def reverse_style(input_string): - target_position = input_string.index('[') - input_len = len(input_string) - output_string = input_string[target_position:input_len] + input_string[0:target_position] - # print('in:{}.out:{}'.format(input_string, output_string)) - return output_string - - # list_len = len(word_list) - # assert(list_len == len(label_list)), "word list size unmatch with label list" - list_len = len(label_list) - begin_label = 'b-' - end_label = 'e-' - single_label = 's-' - whole_tag = '' - index_tag = '' - tag_list = [] - stand_matrix = [] - for i in range(0, list_len): - # wordlabel = word_list[i] - current_label = label_list[i].lower() - if begin_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i - 1)) - whole_tag = current_label.replace(begin_label, "", 1) + '[' + str(i) - index_tag = current_label.replace(begin_label, "", 1) - - elif single_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i - 1)) - whole_tag = current_label.replace(single_label, "", 1) + '[' + str(i) - tag_list.append(whole_tag) - whole_tag = "" - index_tag = "" - elif end_label in current_label: - if index_tag != '': - tag_list.append(whole_tag + ',' + str(i)) - whole_tag = '' - index_tag = '' - else: - continue - if (whole_tag != '') & (index_tag != ''): - tag_list.append(whole_tag) - tag_list_len = len(tag_list) - - for i in range(0, tag_list_len): - if len(tag_list[i]) > 0: - tag_list[i] = tag_list[i] + ']' - insert_list = reverse_style(tag_list[i]) - stand_matrix.append(insert_list) - # print stand_matrix - return stand_matrix - - def transform_YJ_to_fastNLP(span): - span = span[1:] - span_split = span.split(']') - # print('span_list:{}'.format(span_split)) - span_type = span_split[1] - # print('span_split[0].split(','):{}'.format(span_split[0].split(','))) - if ',' in span_split[0]: - b, e = span_split[0].split(',') - else: - b = span_split[0] - e = b - - b = int(b) - e = int(e) - - e += 1 - - return (span_type, (b, e)) - yj_form = get_ner_BMESO_yj(label_list) - # print('label_list:{}'.format(label_list)) - # print('yj_from:{}'.format(yj_form)) - fastNLP_form = list(map(transform_YJ_to_fastNLP,yj_form)) - return fastNLP_form -class SpanFPreRecMetric_YJ(MetricBase): - r""" - 别名::class:`fastNLP.SpanFPreRecMetric` :class:`fastNLP.core.metrics.SpanFPreRecMetric` - - 在序列标注问题中,以span的方式计算F, pre, rec. - 比如中文Part of speech中,会以character的方式进行标注,句子 `中国在亚洲` 对应的POS可能为(以BMES为例) - ['B-NN', 'E-NN', 'S-DET', 'B-NN', 'E-NN']。该metric就是为类似情况下的F1计算。 - 最后得到的metric结果为:: - - { - 'f': xxx, # 这里使用f考虑以后可以计算f_beta值 - 'pre': xxx, - 'rec':xxx - } - - 若only_gross=False, 即还会返回各个label的metric统计值:: - - { - 'f': xxx, - 'pre': xxx, - 'rec':xxx, - 'f-label': xxx, - 'pre-label': xxx, - 'rec-label':xxx, - ... - } - - :param tag_vocab: 标签的 :class:`~fastNLP.Vocabulary` 。支持的标签为"B"(没有label);或"B-xxx"(xxx为某种label,比如POS中的NN), - 在解码时,会将相同xxx的认为是同一个label,比如['B-NN', 'E-NN']会被合并为一个'NN'. - :param str pred: 用该key在evaluate()时从传入dict中取出prediction数据。 为None,则使用 `pred` 取数据 - :param str target: 用该key在evaluate()时从传入dict中取出target数据。 为None,则使用 `target` 取数据 - :param str seq_len: 用该key在evaluate()时从传入dict中取出sequence length数据。为None,则使用 `seq_len` 取数据。 - :param str encoding_type: 目前支持bio, bmes, bmeso, bioes - :param list ignore_labels: str 组成的list. 这个list中的class不会被用于计算。例如在POS tagging时传入['NN'],则不会计算'NN'这 - 个label - :param bool only_gross: 是否只计算总的f1, precision, recall的值;如果为False,不仅返回总的f1, pre, rec, 还会返回每个 - label的f1, pre, rec - :param str f_type: `micro` 或 `macro` . `micro` :通过先计算总体的TP,FN和FP的数量,再计算f, precision, recall; `macro` : - 分布计算每个类别的f, precision, recall,然后做平均(各类别f的权重相同) - :param float beta: f_beta分数, :math:`f_{beta} = \frac{(1 + {beta}^{2})*(pre*rec)}{({beta}^{2}*pre + rec)}` . - 常用为beta=0.5, 1, 2. 若为0.5则精确率的权重高于召回率;若为1,则两者平等;若为2,则召回率权重高于精确率。 - """ - def __init__(self, tag_vocab, pred=None, target=None, seq_len=None, encoding_type='bio', ignore_labels=None, - only_gross=True, f_type='micro', beta=1): - from fastNLP.core import Vocabulary - from fastNLP.core.metrics import _bmes_tag_to_spans,_bio_tag_to_spans,\ - _bioes_tag_to_spans,_bmeso_tag_to_spans - from collections import defaultdict - - encoding_type = encoding_type.lower() - - if not isinstance(tag_vocab, Vocabulary): - raise TypeError("tag_vocab can only be fastNLP.Vocabulary, not {}.".format(type(tag_vocab))) - if f_type not in ('micro', 'macro'): - raise ValueError("f_type only supports `micro` or `macro`', got {}.".format(f_type)) - - self.encoding_type = encoding_type - # print('encoding_type:{}'self.encoding_type) - if self.encoding_type == 'bmes': - self.tag_to_span_func = _bmes_tag_to_spans - elif self.encoding_type == 'bio': - self.tag_to_span_func = _bio_tag_to_spans - elif self.encoding_type == 'bmeso': - self.tag_to_span_func = _bmeso_tag_to_spans - elif self.encoding_type == 'bioes': - self.tag_to_span_func = _bioes_tag_to_spans - elif self.encoding_type == 'bmesoyj': - self.tag_to_span_func = get_yangjie_bmeso - # self.tag_to_span_func = - else: - raise ValueError("Only support 'bio', 'bmes', 'bmeso' type.") - - self.ignore_labels = ignore_labels - self.f_type = f_type - self.beta = beta - self.beta_square = self.beta ** 2 - self.only_gross = only_gross - - super().__init__() - self._init_param_map(pred=pred, target=target, seq_len=seq_len) - - self.tag_vocab = tag_vocab - - self._true_positives = defaultdict(int) - self._false_positives = defaultdict(int) - self._false_negatives = defaultdict(int) - - def evaluate(self, pred, target, seq_len): - from fastNLP.core.utils import _get_func_signature - """evaluate函数将针对一个批次的预测结果做评价指标的累计 - - :param pred: [batch, seq_len] 或者 [batch, seq_len, len(tag_vocab)], 预测的结果 - :param target: [batch, seq_len], 真实值 - :param seq_len: [batch] 文本长度标记 - :return: - """ - if not isinstance(pred, torch.Tensor): - raise TypeError(f"`pred` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(pred)}.") - if not isinstance(target, torch.Tensor): - raise TypeError(f"`target` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(target)}.") - - if not isinstance(seq_len, torch.Tensor): - raise TypeError(f"`seq_lens` in {_get_func_signature(self.evaluate)} must be torch.Tensor," - f"got {type(seq_len)}.") - - if pred.size() == target.size() and len(target.size()) == 2: - pass - elif len(pred.size()) == len(target.size()) + 1 and len(target.size()) == 2: - num_classes = pred.size(-1) - pred = pred.argmax(dim=-1) - if (target >= num_classes).any(): - raise ValueError("A gold label passed to SpanBasedF1Metric contains an " - "id >= {}, the number of classes.".format(num_classes)) - else: - raise RuntimeError(f"In {_get_func_signature(self.evaluate)}, when pred have " - f"size:{pred.size()}, target should have size: {pred.size()} or " - f"{pred.size()[:-1]}, got {target.size()}.") - - batch_size = pred.size(0) - pred = pred.tolist() - target = target.tolist() - for i in range(batch_size): - pred_tags = pred[i][:int(seq_len[i])] - gold_tags = target[i][:int(seq_len[i])] - - pred_str_tags = [self.tag_vocab.to_word(tag) for tag in pred_tags] - gold_str_tags = [self.tag_vocab.to_word(tag) for tag in gold_tags] - - pred_spans = self.tag_to_span_func(pred_str_tags, ignore_labels=self.ignore_labels) - gold_spans = self.tag_to_span_func(gold_str_tags, ignore_labels=self.ignore_labels) - - for span in pred_spans: - if span in gold_spans: - self._true_positives[span[0]] += 1 - gold_spans.remove(span) - else: - self._false_positives[span[0]] += 1 - for span in gold_spans: - self._false_negatives[span[0]] += 1 - - def get_metric(self, reset=True): - """get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果.""" - evaluate_result = {} - if not self.only_gross or self.f_type == 'macro': - tags = set(self._false_negatives.keys()) - tags.update(set(self._false_positives.keys())) - tags.update(set(self._true_positives.keys())) - f_sum = 0 - pre_sum = 0 - rec_sum = 0 - for tag in tags: - tp = self._true_positives[tag] - fn = self._false_negatives[tag] - fp = self._false_positives[tag] - f, pre, rec = self._compute_f_pre_rec(tp, fn, fp) - f_sum += f - pre_sum += pre - rec_sum += rec - if not self.only_gross and tag != '': # tag!=''防止无tag的情况 - f_key = 'f-{}'.format(tag) - pre_key = 'pre-{}'.format(tag) - rec_key = 'rec-{}'.format(tag) - evaluate_result[f_key] = f - evaluate_result[pre_key] = pre - evaluate_result[rec_key] = rec - - if self.f_type == 'macro': - evaluate_result['f'] = f_sum / len(tags) - evaluate_result['pre'] = pre_sum / len(tags) - evaluate_result['rec'] = rec_sum / len(tags) - - if self.f_type == 'micro': - f, pre, rec = self._compute_f_pre_rec(sum(self._true_positives.values()), - sum(self._false_negatives.values()), - sum(self._false_positives.values())) - evaluate_result['f'] = f - evaluate_result['pre'] = pre - evaluate_result['rec'] = rec - - if reset: - self._true_positives = defaultdict(int) - self._false_positives = defaultdict(int) - self._false_negatives = defaultdict(int) - - for key, value in evaluate_result.items(): - evaluate_result[key] = round(value, 6) - - return evaluate_result - - def _compute_f_pre_rec(self, tp, fn, fp): - """ - - :param tp: int, true positive - :param fn: int, false negative - :param fp: int, false positive - :return: (f, pre, rec) - """ - pre = tp / (fp + tp + 1e-13) - rec = tp / (fn + tp + 1e-13) - f = (1 + self.beta_square) * pre * rec / (self.beta_square * pre + rec + 1e-13) - - return f, pre, rec - - - - diff --git a/reproduction/sequence_labelling/chinese_ner/readme.md b/reproduction/sequence_labelling/chinese_ner/readme.md deleted file mode 100644 index 3a9d37d8..00000000 --- a/reproduction/sequence_labelling/chinese_ner/readme.md +++ /dev/null @@ -1,30 +0,0 @@ -使用以下中文NERPipe自动下载的统计数据 - -| MsraNERPipe | # of sents | # of tokens | -| ----------- | ---------- | ----------- | -| train | 41747 | 1954374 | -| dev | 4617 | 215505 | -| test | 4365 | 172601 | -| total | 50729 | 2342480 | -这里报道的统计数据,与[https://arxiv.org/pdf/1805.02023.pdf]()报道的一致 - - - -| WeiboNERPipe | # of sents | # of tokens | -| ------------ | ---------- | ----------- | -| train | 1350 | 73778 | -| dev | 270 | 14509 | -| test | 270 | 14842 | -| total | 1890 | 1890 | -这里报道的统计数据与[https://www.cs.cmu.edu/~ark/EMNLP-2015/proceedings/EMNLP/pdf/EMNLP064.pdf]()一致 - - - - -| PeopleDailyPipe | # of sents | # of tokens | -| --------------- | ---------- | ----------- | -| train | 50658 | 2169879 | -| dev | 4631 | 172601 | -| test | 68 | 2270 | -| total | 55357 | 2344750 | -这里使用的数据与[https://arxiv.org/pdf/1906.08101.pdf]()的数据是一致的 diff --git a/reproduction/sequence_labelling/chinese_ner/train_bert.py b/reproduction/sequence_labelling/chinese_ner/train_bert.py deleted file mode 100644 index b12c8f75..00000000 --- a/reproduction/sequence_labelling/chinese_ner/train_bert.py +++ /dev/null @@ -1,81 +0,0 @@ - - -""" -使用Bert进行中文命名实体识别 - -""" - -import sys - -sys.path.append('../../../') - -from torch import nn - -from fastNLP.embeddings import BertEmbedding, Embedding -from fastNLP import Trainer, Const -from fastNLP import BucketSampler, SpanFPreRecMetric, GradientClipCallback -from fastNLP.modules import MLP -from fastNLP.core.callback import WarmupCallback -from fastNLP import CrossEntropyLoss -from fastNLP.core.optimizer import AdamW -from fastNLP.io import MsraNERPipe, MsraNERLoader, WeiboNERPipe - -from fastNLP import cache_results - -encoding_type = 'bio' - -@cache_results('caches/weibo.pkl', _refresh=False) -def get_data(): - # data_dir = MsraNERLoader().download(dev_ratio=0) - # data = MsraNERPipe(encoding_type=encoding_type, target_pad_val=-100).process_from_file(data_dir) - data = WeiboNERPipe(encoding_type=encoding_type).process_from_file() - return data -data = get_data() -print(data) - -class BertCNNER(nn.Module): - def __init__(self, embed, tag_size): - super().__init__() - self.embedding = embed - self.tag_size = tag_size - self.mlp = MLP(size_layer=[self.embedding.embedding_dim, tag_size]) - - def forward(self, chars): - # batch_size, max_len = words.size() - chars = self.embedding(chars) - outputs = self.mlp(chars) - - return {Const.OUTPUT: outputs} - - def predict(self, chars): - # batch_size, max_len = words.size() - chars = self.embedding(chars) - outputs = self.mlp(chars) - - return {Const.OUTPUT: outputs} - -embed = BertEmbedding(data.get_vocab(Const.CHAR_INPUT), model_dir_or_name='cn-wwm-ext', - pool_method='first', requires_grad=True, layers='11', include_cls_sep=False, dropout=0.5) - -callbacks = [ - GradientClipCallback(clip_type='norm', clip_value=1), - WarmupCallback(warmup=0.1, schedule='linear') - ] - -model = BertCNNER(embed, len(data.vocabs[Const.TARGET])) -optimizer = AdamW(model.parameters(), lr=3e-5) - -for name, dataset in data.datasets.items(): - original_len = len(dataset) - dataset.drop(lambda x:x['seq_len']>256, inplace=True) - clipped_len = len(dataset) - print("Delete {} instances in {}.".format(original_len-clipped_len, name)) - -trainer = Trainer(train_data=data.datasets['train'], model=model, optimizer=optimizer, sampler=BucketSampler(), - device=0, dev_data=data.datasets['test'], batch_size=6, - metrics=SpanFPreRecMetric(tag_vocab=data.vocabs[Const.TARGET], encoding_type=encoding_type), - loss=CrossEntropyLoss(reduction='sum'), - callbacks=callbacks, num_workers=2, n_epochs=5, - check_code_level=0, update_every=3) -trainer.train() - diff --git a/reproduction/sequence_labelling/chinese_ner/train_cn_ner.py b/reproduction/sequence_labelling/chinese_ner/train_cn_ner.py deleted file mode 100644 index 58b32265..00000000 --- a/reproduction/sequence_labelling/chinese_ner/train_cn_ner.py +++ /dev/null @@ -1,136 +0,0 @@ -import sys -sys.path.append('../../..') - -from fastNLP.embeddings import StaticEmbedding - -from torch import nn -import torch -from fastNLP.embeddings.utils import get_embeddings -from fastNLP.modules import LSTM -from fastNLP.modules import ConditionalRandomField -from fastNLP.modules import allowed_transitions -import torch.nn.functional as F -from fastNLP import seq_len_to_mask -from fastNLP.core.const import Const as C -from fastNLP import SpanFPreRecMetric, Trainer -from fastNLP import cache_results, Vocabulary -from fastNLP.io.pipe.utils import _add_chars_field, _indexize - -from fastNLP.io.pipe import Pipe -from fastNLP.core.utils import iob2bioes, iob2 -from fastNLP.io import MsraNERLoader, WeiboNERLoader - -class ChineseNERPipe(Pipe): - def __init__(self, encoding_type: str = 'bio', target_pad_val=0, bigram=False): - if encoding_type == 'bio': - self.convert_tag = iob2 - else: - self.convert_tag = lambda words: iob2bioes(iob2(words)) - self.target_pad_val = int(target_pad_val) - self.bigram = bigram - - def process(self, data_bundle): - data_bundle.copy_field(C.RAW_CHAR, C.CHAR_INPUT) - input_fields = [C.TARGET, C.CHAR_INPUT, C.INPUT_LEN] - target_fields = [C.TARGET, C.INPUT_LEN] - if self.bigram: - for dataset in data_bundle.datasets.values(): - dataset.apply_field(lambda chars:[c1+c2 for c1, c2 in zip(chars, chars[1:]+[''])], - field_name=C.CHAR_INPUT, new_field_name='bigrams') - bigram_vocab = Vocabulary() - bigram_vocab.from_dataset(data_bundle.get_dataset('train'),field_name='bigrams', - no_create_entry_dataset=[ds for name, ds in data_bundle.datasets.items() if name!='train']) - bigram_vocab.index_dataset(*data_bundle.datasets.values(), field_name='bigrams') - data_bundle.set_vocab(bigram_vocab, field_name='bigrams') - input_fields.append('bigrams') - - _add_chars_field(data_bundle, lower=False) - - # index - _indexize(data_bundle, input_field_names=C.CHAR_INPUT, target_field_names=C.TARGET) - - for name, dataset in data_bundle.datasets.items(): - dataset.set_pad_val(C.TARGET, self.target_pad_val) - dataset.add_seq_len(C.CHAR_INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - -class CNBiLSTMCRFNER(nn.Module): - def __init__(self, char_embed, num_classes, bigram_embed=None, trigram_embed=None, num_layers=1, hidden_size=100, - dropout=0.5, target_vocab=None, encoding_type=None): - super().__init__() - - self.char_embed = get_embeddings(char_embed) - embed_size = self.char_embed.embedding_dim - if bigram_embed: - self.bigram_embed = get_embeddings(bigram_embed) - embed_size += self.bigram_embed.embedding_dim - if trigram_embed: - self.trigram_ebmbed = get_embeddings(trigram_embed) - embed_size += self.bigram_embed.embedding_dim - - if num_layers>1: - self.lstm = LSTM(embed_size, num_layers=num_layers, hidden_size=hidden_size//2, bidirectional=True, - batch_first=True, dropout=dropout) - else: - self.lstm = LSTM(embed_size, num_layers=num_layers, hidden_size=hidden_size//2, bidirectional=True, - batch_first=True) - - self.dropout = nn.Dropout(dropout) - self.fc = nn.Linear(hidden_size, num_classes) - - trans = None - if target_vocab is not None and encoding_type is not None: - trans = allowed_transitions(target_vocab.idx2word, encoding_type=encoding_type, include_start_end=True) - - self.crf = ConditionalRandomField(num_classes, include_start_end_trans=True, allowed_transitions=trans) - - def _forward(self, chars, bigrams=None, trigrams=None, seq_len=None, target=None): - chars = self.char_embed(chars) - if hasattr(self, 'bigram_embed'): - bigrams = self.bigram_embed(bigrams) - chars = torch.cat((chars, bigrams), dim=-1) - if hasattr(self, 'trigram_embed'): - trigrams = self.trigram_embed(trigrams) - chars = torch.cat((chars, trigrams), dim=-1) - feats, _ = self.lstm(chars, seq_len=seq_len) - feats = self.fc(feats) - feats = self.dropout(feats) - logits = F.log_softmax(feats, dim=-1) - mask = seq_len_to_mask(seq_len) - if target is None: - pred, _ = self.crf.viterbi_decode(logits, mask) - return {C.OUTPUT: pred} - else: - loss = self.crf(logits, target, mask).mean() - return {C.LOSS:loss} - - def forward(self, chars, target, bigrams=None, trigrams=None, seq_len=None): - return self._forward(chars, bigrams, trigrams, seq_len, target) - - def predict(self, chars, seq_len=None, bigrams=None, trigrams=None): - return self._forward(chars, bigrams, trigrams, seq_len) - -# data_bundle = pickle.load(open('caches/msra.pkl', 'rb')) -@cache_results('caches/weibo-lstm.pkl', _refresh=False) -def get_data(): - data_bundle = WeiboNERLoader().load() - data_bundle = ChineseNERPipe(encoding_type='bioes', bigram=True).process(data_bundle) - char_embed = StaticEmbedding(data_bundle.get_vocab(C.CHAR_INPUT), model_dir_or_name='cn-fasttext') - bigram_embed = StaticEmbedding(data_bundle.get_vocab('bigrams'), embedding_dim=100, min_freq=3) - return data_bundle, char_embed, bigram_embed -data_bundle, char_embed, bigram_embed = get_data() -# data_bundle = get_data() -print(data_bundle) - -# exit(0) -model = CNBiLSTMCRFNER(char_embed, num_classes=len(data_bundle.vocabs['target']), bigram_embed=bigram_embed) - -Trainer(data_bundle.datasets['train'], model, batch_size=20, - metrics=SpanFPreRecMetric(data_bundle.vocabs['target'], encoding_type='bioes'), - num_workers=2, dev_data=data_bundle. datasets['dev'], device=0).train() - diff --git a/reproduction/sequence_labelling/cws/data/cws_shift_pipe.py b/reproduction/sequence_labelling/cws/data/cws_shift_pipe.py deleted file mode 100644 index 0ae4064d..00000000 --- a/reproduction/sequence_labelling/cws/data/cws_shift_pipe.py +++ /dev/null @@ -1,202 +0,0 @@ -from fastNLP.io.pipe import Pipe -from fastNLP.io import DataBundle -from fastNLP.io.loader import CWSLoader -from fastNLP import Const -from itertools import chain -from fastNLP.io.pipe.utils import _indexize -from functools import partial -from fastNLP.io.pipe.cws import _find_and_replace_alpha_spans, _find_and_replace_digit_spans - - -def _word_lens_to_relay(word_lens): - """ - [1, 2, 3, ..] 转换为[0, 1, 0, 2, 1, 0,](start指示seg有多长); - :param word_lens: - :return: - """ - tags = [] - for word_len in word_lens: - tags.extend([idx for idx in range(word_len - 1, -1, -1)]) - return tags - -def _word_lens_to_end_seg_mask(word_lens): - """ - [1, 2, 3, ..] 转换为[0, 1, 0, 2, 1, 0,](start指示seg有多长); - :param word_lens: - :return: - """ - end_seg_mask = [] - for word_len in word_lens: - end_seg_mask.extend([0] * (word_len - 1) + [1]) - return end_seg_mask - -def _word_lens_to_start_seg_mask(word_lens): - """ - [1, 2, 3, ..] 转换为[0, 1, 0, 2, 1, 0,](start指示seg有多长); - :param word_lens: - :return: - """ - start_seg_mask = [] - for word_len in word_lens: - start_seg_mask.extend([1] + [0] * (word_len - 1)) - return start_seg_mask - - -class CWSShiftRelayPipe(Pipe): - """ - - :param str,None dataset_name: 支持'pku', 'msra', 'cityu', 'as', None - :param int L: ShiftRelay模型的超参数 - :param bool replace_num_alpha: 是否将数字和字母用特殊字符替换。 - :param bool bigrams: 是否增加一列bigram. bigram的构成是['复', '旦', '大', '学', ...]->["复旦", "旦大", ...] - :param bool trigrams: 是否增加一列trigram. trigram的构成是 ['复', '旦', '大', '学', ...]->["复旦大", "旦大学", ...] - """ - def __init__(self, dataset_name=None, L=5, replace_num_alpha=True, bigrams=True): - self.dataset_name = dataset_name - self.bigrams = bigrams - self.replace_num_alpha = replace_num_alpha - self.L = L - - def _tokenize(self, data_bundle): - """ - 将data_bundle中的'chars'列切分成一个一个的word. - 例如输入是"共同 创造 美好.."->[[共, 同], [创, 造], [...], ] - - :param data_bundle: - :return: - """ - def split_word_into_chars(raw_chars): - words = raw_chars.split() - chars = [] - for word in words: - char = [] - subchar = [] - for c in word: - if c=='<': - subchar.append(c) - continue - if c=='>' and subchar[0]=='<': - char.append(''.join(subchar)) - subchar = [] - if subchar: - subchar.append(c) - else: - char.append(c) - char.extend(subchar) - chars.append(char) - return chars - - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(split_word_into_chars, field_name=Const.CHAR_INPUT, - new_field_name=Const.CHAR_INPUT) - return data_bundle - - def process(self, data_bundle: DataBundle) -> DataBundle: - """ - 可以处理的DataSet需要包含raw_words列 - - .. csv-table:: - :header: "raw_words" - - "上海 浦东 开发 与 法制 建设 同步" - "新华社 上海 二月 十日 电 ( 记者 谢金虎 、 张持坚 )" - "..." - - :param data_bundle: - :return: - """ - data_bundle.copy_field(Const.RAW_WORD, Const.CHAR_INPUT) - - if self.replace_num_alpha: - data_bundle.apply_field(_find_and_replace_alpha_spans, Const.CHAR_INPUT, Const.CHAR_INPUT) - data_bundle.apply_field(_find_and_replace_digit_spans, Const.CHAR_INPUT, Const.CHAR_INPUT) - - self._tokenize(data_bundle) - input_field_names = [Const.CHAR_INPUT] - target_field_names = [] - - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars:_word_lens_to_relay(map(len, chars)), field_name=Const.CHAR_INPUT, - new_field_name=Const.TARGET) - dataset.apply_field(lambda chars:_word_lens_to_start_seg_mask(map(len, chars)), field_name=Const.CHAR_INPUT, - new_field_name='start_seg_mask') - dataset.apply_field(lambda chars:_word_lens_to_end_seg_mask(map(len, chars)), field_name=Const.CHAR_INPUT, - new_field_name='end_seg_mask') - dataset.apply_field(lambda chars:list(chain(*chars)), field_name=Const.CHAR_INPUT, - new_field_name=Const.CHAR_INPUT) - target_field_names.append('start_seg_mask') - input_field_names.append('end_seg_mask') - if self.bigrams: - for name, dataset in data_bundle.datasets.items(): - dataset.apply_field(lambda chars: [c1+c2 for c1, c2 in zip(chars, chars[1:]+[''])], - field_name=Const.CHAR_INPUT, new_field_name='bigrams') - input_field_names.append('bigrams') - - _indexize(data_bundle, ['chars', 'bigrams'], []) - - func = partial(_clip_target, L=self.L) - for name, dataset in data_bundle.datasets.items(): - res = dataset.apply_field(func, field_name='target') - relay_target = [res_i[0] for res_i in res] - relay_mask = [res_i[1] for res_i in res] - dataset.add_field('relay_target', relay_target, is_input=True, is_target=False, ignore_type=False) - dataset.add_field('relay_mask', relay_mask, is_input=True, is_target=False, ignore_type=False) - input_field_names.append('relay_target') - input_field_names.append('relay_mask') - - input_fields = [Const.TARGET, Const.INPUT_LEN] + input_field_names - target_fields = [Const.TARGET, Const.INPUT_LEN] + target_field_names - for name, dataset in data_bundle.datasets.items(): - dataset.add_seq_len(Const.CHAR_INPUT) - - data_bundle.set_input(*input_fields) - data_bundle.set_target(*target_fields) - - return data_bundle - - def process_from_file(self, paths=None) -> DataBundle: - """ - - :param str paths: - :return: - """ - if self.dataset_name is None and paths is None: - raise RuntimeError("You have to set `paths` when calling process_from_file() or `dataset_name `when initialization.") - if self.dataset_name is not None and paths is not None: - raise RuntimeError("You cannot specify `paths` and `dataset_name` simultaneously") - data_bundle = CWSLoader(self.dataset_name).load(paths) - return self.process(data_bundle) - -def _clip_target(target, L:int): - """ - - 只有在target_type为shift_relay的使用 - :param target: List[int] - :param L: - :return: - """ - relay_target_i = [] - tmp = [] - for j in range(len(target) - 1): - tmp.append(target[j]) - if target[j] > target[j + 1]: - pass - else: - relay_target_i.extend([L - 1 if t >= L else t for t in tmp[::-1]]) - tmp = [] - # 处理未结束的部分 - if len(tmp) == 0: - relay_target_i.append(0) - else: - tmp.append(target[-1]) - relay_target_i.extend([L - 1 if t >= L else t for t in tmp[::-1]]) - relay_mask_i = [] - j = 0 - while j < len(target): - seg_len = target[j] + 1 - if target[j] < L: - relay_mask_i.extend([0] * (seg_len)) - else: - relay_mask_i.extend([1] * (seg_len - L) + [0] * L) - j = seg_len + j - return relay_target_i, relay_mask_i diff --git a/reproduction/sequence_labelling/cws/model/bilstm_crf_cws.py b/reproduction/sequence_labelling/cws/model/bilstm_crf_cws.py deleted file mode 100644 index 4f87a81c..00000000 --- a/reproduction/sequence_labelling/cws/model/bilstm_crf_cws.py +++ /dev/null @@ -1,60 +0,0 @@ - -import torch -from fastNLP.modules import LSTM -from fastNLP.modules import allowed_transitions, ConditionalRandomField -from fastNLP import seq_len_to_mask -from torch import nn -from fastNLP import Const -import torch.nn.functional as F - -class BiLSTMCRF(nn.Module): - def __init__(self, char_embed, hidden_size, num_layers, target_vocab=None, bigram_embed=None, trigram_embed=None, - dropout=0.5): - super().__init__() - - embed_size = char_embed.embed_size - self.char_embed = char_embed - if bigram_embed: - embed_size += bigram_embed.embed_size - self.bigram_embed = bigram_embed - if trigram_embed: - embed_size += trigram_embed.embed_size - self.trigram_embed = trigram_embed - - self.lstm = LSTM(embed_size, hidden_size=hidden_size//2, bidirectional=True, batch_first=True, - num_layers=num_layers) - self.dropout = nn.Dropout(p=dropout) - self.fc = nn.Linear(hidden_size, len(target_vocab)) - - transitions = None - if target_vocab: - transitions = allowed_transitions(target_vocab, include_start_end=True, encoding_type='bmes') - - self.crf = ConditionalRandomField(num_tags=len(target_vocab), allowed_transitions=transitions) - - def _forward(self, chars, bigrams, trigrams, seq_len, target=None): - chars = self.char_embed(chars) - if bigrams is not None: - bigrams = self.bigram_embed(bigrams) - chars = torch.cat([chars, bigrams], dim=-1) - if trigrams is not None: - trigrams = self.trigram_embed(trigrams) - chars = torch.cat([chars, trigrams], dim=-1) - - output, _ = self.lstm(chars, seq_len) - output = self.dropout(output) - output = self.fc(output) - output = F.log_softmax(output, dim=-1) - mask = seq_len_to_mask(seq_len) - if target is None: - pred, _ = self.crf.viterbi_decode(output, mask) - return {Const.OUTPUT:pred} - else: - loss = self.crf.forward(output, tags=target, mask=mask) - return {Const.LOSS:loss} - - def forward(self, chars, seq_len, target, bigrams=None, trigrams=None): - return self._forward(chars, bigrams, trigrams, seq_len, target) - - def predict(self, chars, seq_len, bigrams=None, trigrams=None): - return self._forward(chars, bigrams, trigrams, seq_len) \ No newline at end of file diff --git a/reproduction/sequence_labelling/cws/model/bilstm_shift_relay.py b/reproduction/sequence_labelling/cws/model/bilstm_shift_relay.py deleted file mode 100644 index efba5c41..00000000 --- a/reproduction/sequence_labelling/cws/model/bilstm_shift_relay.py +++ /dev/null @@ -1,66 +0,0 @@ -from torch import nn -import torch -from reproduction.sequence_labelling.cws.model.module import FeatureFunMax, SemiCRFShiftRelay -from fastNLP.modules import LSTM - -class ShiftRelayCWSModel(nn.Module): - """ - 该模型可以用于进行分词操作 - 包含两个方法, - forward(chars, bigrams, seq_len) -> {'loss': batch_size,} - predict(chars, bigrams) -> {'pred': batch_size x max_len, 'pred_mask': batch_size x max_len} - pred是对当前segment的长度预测,pred_mask是仅在有预测的位置为1 - - :param char_embed: 预训练的Embedding或者embedding的shape - :param bigram_embed: 预训练的Embedding或者embedding的shape - :param hidden_size: LSTM的隐藏层大小 - :param num_layers: LSTM的层数 - :param L: SemiCRFShiftRelay的segment大小 - :param num_bigram_per_char: 每个character对应的bigram的数量 - :param drop_p: Dropout的大小 - """ - def __init__(self, char_embed, bigram_embed, hidden_size:int=400, num_layers:int=1, L:int=6, drop_p:float=0.2): - super().__init__() - self.char_embedding = char_embed - self.bigram_embedding = bigram_embed - self.lstm = LSTM(char_embed.embed_size+bigram_embed.embed_size, hidden_size // 2, num_layers=num_layers, - bidirectional=True, - batch_first=True) - self.feature_fn = FeatureFunMax(hidden_size, L) - self.semi_crf_relay = SemiCRFShiftRelay(L) - self.feat_drop = nn.Dropout(drop_p) - self.reset_param() - - def reset_param(self): - for name, param in self.named_parameters(): - if 'embedding' in name: - continue - if 'bias_hh' in name: - nn.init.constant_(param, 0) - elif 'bias_ih' in name: - nn.init.constant_(param, 1) - elif len(param.size()) < 2: - nn.init.uniform_(param, -0.1, 0.1) - else: - nn.init.xavier_uniform_(param) - - def get_feats(self, chars, bigrams, seq_len): - chars = self.char_embedding(chars) - bigrams = self.bigram_embedding(bigrams) - chars = torch.cat([chars, bigrams], dim=-1) - feats, _ = self.lstm(chars, seq_len) - feats = self.feat_drop(feats) - logits, relay_logits = self.feature_fn(feats) - - return logits, relay_logits - - def forward(self, chars, bigrams, relay_target, relay_mask, end_seg_mask, seq_len): - logits, relay_logits = self.get_feats(chars, bigrams, seq_len) - loss = self.semi_crf_relay(logits, relay_logits, relay_target, relay_mask, end_seg_mask, seq_len) - return {'loss':loss} - - def predict(self, chars, bigrams, seq_len): - logits, relay_logits = self.get_feats(chars, bigrams, seq_len) - pred, pred_mask = self.semi_crf_relay.predict(logits, relay_logits, seq_len) - return {'pred': pred, 'pred_mask': pred_mask} - diff --git a/reproduction/sequence_labelling/cws/model/metric.py b/reproduction/sequence_labelling/cws/model/metric.py deleted file mode 100644 index d68e3473..00000000 --- a/reproduction/sequence_labelling/cws/model/metric.py +++ /dev/null @@ -1,44 +0,0 @@ - -from fastNLP.core.metrics import MetricBase - - -class RelayMetric(MetricBase): - def __init__(self, pred=None, pred_mask=None, target=None, start_seg_mask=None): - super().__init__() - self._init_param_map(pred=pred, pred_mask=pred_mask, target=target, start_seg_mask=start_seg_mask) - self.tp = 0 - self.rec = 0 - self.pre = 0 - - def evaluate(self, pred, pred_mask, target, start_seg_mask): - """ - 给定每个batch,累计一下结果。 - - :param pred: 预测的结果,为当前位置的开始的segment的(长度-1) - :param pred_mask: 当前位置预测有segment开始 - :param target: 当前位置开始的segment的(长度-1) - :param start_seg_mask: 当前有segment结束 - :return: - """ - self.tp += ((pred.long().eq(target.long())).__and__(pred_mask.byte().__and__(start_seg_mask.byte()))).sum().item() - self.rec += start_seg_mask.sum().item() - self.pre += pred_mask.sum().item() - - def get_metric(self, reset=True): - """ - 在所有数据都计算结束之后,得到performance - - :param reset: - :return: - """ - pre = self.tp/(self.pre + 1e-12) - rec = self.tp/(self.rec + 1e-12) - f = 2*pre*rec/(1e-12 + pre + rec) - - if reset: - self.tp = 0 - self.rec = 0 - self.pre = 0 - self.bigger_than_L = 0 - - return {'f': round(f, 6), 'pre': round(pre, 6), 'rec': round(rec, 6)} diff --git a/reproduction/sequence_labelling/cws/model/module.py b/reproduction/sequence_labelling/cws/model/module.py deleted file mode 100644 index 9a141c34..00000000 --- a/reproduction/sequence_labelling/cws/model/module.py +++ /dev/null @@ -1,197 +0,0 @@ -from torch import nn -import torch -import numpy as np - -class SemiCRFShiftRelay(nn.Module): - """ - 该模块是一个decoder,但当前不支持含有tag的decode。 - - """ - def __init__(self, L): - """ - - :param L: 不包含relay的长度 - """ - if L<2: - raise RuntimeError() - super().__init__() - self.L = L - - def forward(self, logits, relay_logits, relay_target, relay_mask, end_seg_mask, seq_len): - """ - relay node是接下来L个字都不是它的结束。relay的状态是往后滑动1个位置 - - :param logits: batch_size x max_len x L, 当前位置往左边L个segment的分数,最后一维的0是长度为1的segment(即本身) - :param relay_logits: batch_size x max_len, 当前位置是接下来L-1个位置都不是终点的分数 - :param relay_target: batch_size x max_len 每个位置他的segment在哪里开始的。如果超过L,则一直保持为L-1。比如长度为 - 5的词,L=3, [0, 1, 2, 2, 2] - :param relay_mask: batch_size x max_len, 在需要relay的地方为1, 长度为5的词, L=3时,为[1, 1, 1, 0, 0] - :param end_seg_mask: batch_size x max_len, segment结束的地方为1。 - :param seq_len: batch_size, 句子的长度 - :return: loss: batch_size, - """ - batch_size, max_len, L = logits.size() - - # 当前时刻为relay node的分数是多少 - relay_scores = logits.new_zeros(batch_size, max_len) - # 当前时刻结束的分数是多少 - scores = logits.new_zeros(batch_size, max_len+1) - # golden的分数 - gold_scores = relay_logits[:, 0].masked_fill(relay_mask[:, 0].eq(False), 0) + \ - logits[:, 0, 0].masked_fill(end_seg_mask[:, 0].eq(False), 0) - # 初始化 - scores[:, 1] = logits[:, 0, 0] - batch_i = torch.arange(batch_size).to(logits.device).long() - relay_scores[:, 0] = relay_logits[:, 0] - last_relay_index = max_len - self.L - for t in range(1, max_len): - real_L = min(t+1, L) - flip_logits_t = logits[:, t, :real_L].flip(dims=[1]) # flip之后低0个位置为real_L-1的segment - # 计算relay_scores的更新 - if tself.L-1: - # (2)从relay跳转过来的 - tmp2 = relay_scores[:, t-self.L] # batch_size - tmp2 = tmp2 + flip_logits_t[:, 0] # batch_size - tmp1 = torch.cat([tmp1, tmp2.unsqueeze(-1)], dim=-1) - scores[:, t+1] = torch.logsumexp(tmp1, dim=-1) # 更新当前时刻的分数 - - # 计算golden - seg_i = relay_target[:, t] # batch_size - gold_segment_scores = logits[:, t][(batch_i, seg_i)].masked_fill(end_seg_mask[:, t].eq(False), 0) # batch_size, 后向从0到L长度的segment的分数 - relay_score = relay_logits[:, t].masked_fill(relay_mask[:, t].eq(False), 0) - gold_scores = gold_scores + relay_score + gold_segment_scores - all_scores = scores.gather(dim=1, index=seq_len.unsqueeze(1)).squeeze(1) # batch_size - return all_scores - gold_scores - - def predict(self, logits, relay_logits, seq_len): - """ - relay node是接下来L个字都不是它的结束。relay的状态是往后滑动L-1个位置 - - :param logits: batch_size x max_len x L, 当前位置左边L个segment的分数,最后一维的0是长度为1的segment(即本身) - :param relay_logits: batch_size x max_len, 当前位置是接下来L-1个位置都不是终点的分数 - :param seq_len: batch_size, 句子的长度 - :return: pred: batch_size x max_len以该点开始的segment的(长度-1); pred_mask为1的地方预测有segment开始 - """ - batch_size, max_len, L = logits.size() - # 当前时刻为relay node的分数是多少 - max_relay_scores = logits.new_zeros(batch_size, max_len) - relay_bt = seq_len.new_zeros(batch_size, max_len) # 当前结果是否来自于relay的结果 - # 当前时刻结束的分数是多少 - max_scores = logits.new_zeros(batch_size, max_len+1) - bt = seq_len.new_zeros(batch_size, max_len) - # 初始化 - max_scores[:, 1] = logits[:, 0, 0] - max_relay_scores[:, 0] = relay_logits[:, 0] - last_relay_index = max_len - self.L - for t in range(1, max_len): - real_L = min(t+1, L) - flip_logits_t = logits[:, t, :real_L].flip(dims=[1]) # flip之后低0个位置为real_L-1的segment - # 计算relay_scores的更新 - if t-1: - if bt_i[j]==self.L: - seg_start_pos = j - j = j-self.L - while relay_bt_i[j]!=0 and j>-1: - j = j - 1 - pred[b, j] = seg_start_pos - j - pred_mask[b, j] = 1 - else: - length = bt_i[j] - j = j - bt_i[j] - pred_mask[b, j] = 1 - pred[b, j] = length - j = j - 1 - - return torch.LongTensor(pred).to(logits.device), torch.LongTensor(pred_mask).to(logits.device) - - - -class FeatureFunMax(nn.Module): - def __init__(self, hidden_size:int, L:int): - """ - 用于计算semi-CRF特征的函数。给定batch_size x max_len x hidden_size形状的输入,输出为batch_size x max_len x L的 - 分数,以及batch_size x max_len的relay的分数。两者的区别参考论文 TODO 补充 - - :param hidden_size: 输入特征的维度大小 - :param L: 不包含relay node的segment的长度大小。 - """ - super().__init__() - - self.end_fc = nn.Linear(hidden_size, 1, bias=False) - self.whole_w = nn.Parameter(torch.randn(L, hidden_size)) - self.relay_fc = nn.Linear(hidden_size, 1) - self.length_bias = nn.Parameter(torch.randn(L)) - self.L = L - def forward(self, logits): - """ - - :param logits: batch_size x max_len x hidden_size - :return: batch_size x max_len x L # 最后一维为左边segment的分数,0处为长度为1的segment - batch_size x max_len, # 当前位置是接下来L-1个位置都不是终点的分数 - - """ - batch_size, max_len, hidden_size = logits.size() - # start_scores = self.start_fc(logits) # batch_size x max_len x 1 # 每个位置作为start的分数 - tmp = logits.new_zeros(batch_size, max_len+self.L-1, hidden_size) - tmp[:, -max_len:] = logits - # batch_size x max_len x hidden_size x (self.L) -> batch_size x max_len x (self.L) x hidden_size - start_logits = tmp.unfold(dimension=1, size=self.L, step=1).transpose(2, 3).flip(dims=[2]) - end_scores = self.end_fc(logits) # batch_size x max_len x 1 - # 计算relay的特征 - relay_tmp = logits.new_zeros(batch_size, max_len, hidden_size) - relay_tmp[:, :-self.L] = logits[:, self.L:] - # batch_size x max_len x hidden_size - relay_logits_max = torch.max(relay_tmp, logits) # end - start - logits_max = torch.max(logits.unsqueeze(2), start_logits) # batch_size x max_len x L x hidden_size - whole_scores = (logits_max*self.whole_w).sum(dim=-1) # batch_size x max_len x self.L - # whole_scores = self.whole_fc().squeeze(-1) # bz x max_len x self.L - # batch_size x max_len - relay_scores = self.relay_fc(relay_logits_max).squeeze(-1) - return whole_scores+end_scores+self.length_bias.view(1, 1, -1), relay_scores diff --git a/reproduction/sequence_labelling/cws/readme.md b/reproduction/sequence_labelling/cws/readme.md deleted file mode 100644 index a25bb0ed..00000000 --- a/reproduction/sequence_labelling/cws/readme.md +++ /dev/null @@ -1,32 +0,0 @@ -四个数据集的统计信息,最原始的数据可以从[http://sighan.cs.uchicago.edu/bakeoff2005/]()下载。 - -| pku | # of sents | # of tokens | -| ----- | ---------- | ----------- | -| train | 17173 | 1650222 | -| dev | 1881 | 176226 | -| test | 1944 | 172733 | -| total | 20998 | 1999181 | - - -| cityu | # of sents | # of tokens | -| ----- | ---------- | ----------- | -| train | 47696 | 2164907 | -| dev | 5323 | 238447 | -| test | 1492 | 67690 | -| total | 54511 | 2471044 | - - -| msra | # of sents | # of tokens | -| ----- | ---------- | ----------- | -| train | 78242 | 3644550 | -| dev | 8676 | 405919 | -| test | 3985 | 184355 | -| total | 90903 | 4234824 | - - -| as | # of sents | # of tokens | -| ----- | ---------- | ----------- | -| train | 638273 | 7536586 | -| dev | 70680 | 831464 | -| test | 14429 | 197681 | -| total | 723382 | 8565731 | diff --git a/reproduction/sequence_labelling/cws/train_bilstm_crf.py b/reproduction/sequence_labelling/cws/train_bilstm_crf.py deleted file mode 100644 index 30760d8f..00000000 --- a/reproduction/sequence_labelling/cws/train_bilstm_crf.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys -sys.path.append('../../..') - -from fastNLP.io.pipe.cws import CWSPipe -from reproduction.sequence_labelling.cws.model.bilstm_crf_cws import BiLSTMCRF -from fastNLP import Trainer, cache_results -from fastNLP.embeddings import StaticEmbedding -from fastNLP import EvaluateCallback, BucketSampler, SpanFPreRecMetric, GradientClipCallback -from torch.optim import Adagrad - -###########hyper -dataname = 'pku' -hidden_size = 400 -num_layers = 1 -lr = 0.05 -###########hyper - - -@cache_results('{}.pkl'.format(dataname), _refresh=False) -def get_data(): - data_bundle = CWSPipe(dataset_name=dataname, bigrams=True, trigrams=False).process_from_file() - char_embed = StaticEmbedding(data_bundle.get_vocab('chars'), dropout=0.33, word_dropout=0.01, - model_dir_or_name='~/exps/CWS/pretrain/vectors/1grams_t3_m50_corpus.txt') - bigram_embed = StaticEmbedding(data_bundle.get_vocab('bigrams'), dropout=0.33,min_freq=3, word_dropout=0.01, - model_dir_or_name='~/exps/CWS/pretrain/vectors/2grams_t3_m50_corpus.txt') - return data_bundle, char_embed, bigram_embed - -data_bundle, char_embed, bigram_embed = get_data() -print(data_bundle) - -model = BiLSTMCRF(char_embed, hidden_size, num_layers, target_vocab=data_bundle.get_vocab('target'), bigram_embed=bigram_embed, - trigram_embed=None, dropout=0.3) -model.cuda() - -callbacks = [] -callbacks.append(EvaluateCallback(data_bundle.get_dataset('test'))) -callbacks.append(GradientClipCallback(clip_type='value', clip_value=5)) -optimizer = Adagrad(model.parameters(), lr=lr) - -metrics = [] -metric1 = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target'), encoding_type='bmes') -metrics.append(metric1) - -trainer = Trainer(data_bundle.get_dataset('train'), model, optimizer=optimizer, loss=None, - batch_size=128, sampler=BucketSampler(), update_every=1, - num_workers=1, n_epochs=10, print_every=5, - dev_data=data_bundle.get_dataset('dev'), - metrics=metrics, - metric_key=None, - validate_every=-1, save_path=None, use_tqdm=True, device=0, - callbacks=callbacks, check_code_level=0, dev_batch_size=128) -trainer.train() diff --git a/reproduction/sequence_labelling/cws/train_shift_relay.py b/reproduction/sequence_labelling/cws/train_shift_relay.py deleted file mode 100644 index 1a519028..00000000 --- a/reproduction/sequence_labelling/cws/train_shift_relay.py +++ /dev/null @@ -1,53 +0,0 @@ - -import sys -sys.path.append('../../..') - -from fastNLP import cache_results -from reproduction.sequence_labelling.cws.data.cws_shift_pipe import CWSShiftRelayPipe -from reproduction.sequence_labelling.cws.model.bilstm_shift_relay import ShiftRelayCWSModel -from fastNLP import Trainer -from torch.optim import Adam -from fastNLP import BucketSampler -from fastNLP import GradientClipCallback -from reproduction.sequence_labelling.cws.model.metric import RelayMetric -from fastNLP.embeddings import StaticEmbedding -from fastNLP import EvaluateCallback - -#########hyper -L = 4 -hidden_size = 200 -num_layers = 1 -drop_p = 0.2 -lr = 0.008 -data_name = 'pku' -#########hyper -device = 0 - -cache_fp = 'caches/{}.pkl'.format(data_name) -@cache_results(_cache_fp=cache_fp, _refresh=True) # 将结果缓存到cache_fp中,这样下次运行就直接读取,而不需要再次运行 -def prepare_data(): - data_bundle = CWSShiftRelayPipe(dataset_name=data_name, L=L).process_from_file() - # 预训练的character embedding和bigram embedding - char_embed = StaticEmbedding(data_bundle.get_vocab('chars'), dropout=0.5, word_dropout=0.01, - model_dir_or_name='~/exps/CWS/pretrain/vectors/1grams_t3_m50_corpus.txt') - bigram_embed = StaticEmbedding(data_bundle.get_vocab('bigrams'), dropout=0.5, min_freq=3, word_dropout=0.01, - model_dir_or_name='~/exps/CWS/pretrain/vectors/2grams_t3_m50_corpus.txt') - - return data_bundle, char_embed, bigram_embed - -data, char_embed, bigram_embed = prepare_data() - -model = ShiftRelayCWSModel(char_embed=char_embed, bigram_embed=bigram_embed, - hidden_size=hidden_size, num_layers=num_layers, drop_p=drop_p, L=L) - -sampler = BucketSampler() -optimizer = Adam(model.parameters(), lr=lr) -clipper = GradientClipCallback(clip_value=5, clip_type='value') # 截断太大的梯度 -evaluator = EvaluateCallback(data.get_dataset('test')) # 额外测试在test集上的效果 -callbacks = [clipper, evaluator] - -trainer = Trainer(data.get_dataset('train'), model, optimizer=optimizer, loss=None, batch_size=128, sampler=sampler, - update_every=1, n_epochs=10, print_every=5, dev_data=data.get_dataset('dev'), metrics=RelayMetric(), - metric_key='f', validate_every=-1, save_path=None, use_tqdm=True, device=device, callbacks=callbacks, - check_code_level=0, num_workers=1) -trainer.train() \ No newline at end of file diff --git a/reproduction/sequence_labelling/ner/README.md b/reproduction/sequence_labelling/ner/README.md deleted file mode 100644 index d42046b0..00000000 --- a/reproduction/sequence_labelling/ner/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# NER任务模型复现 -这里使用fastNLP复现经典的BiLSTM-CNN的NER任务的模型,旨在达到与论文中相符的性能。 - -论文链接[Named Entity Recognition with Bidirectional LSTM-CNNs](https://arxiv.org/pdf/1511.08308.pdf) - -# 数据集及复现结果汇总 - -使用fastNLP复现的结果vs论文汇报结果(/前为fastNLP实现,后面为论文报道) - -model name | Conll2003 | Ontonotes -:---: | :---: | :---: -BiLSTM-CNN | 91.17/90.91 | 86.47/86.35 | - diff --git a/reproduction/sequence_labelling/ner/__init__.py b/reproduction/sequence_labelling/ner/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/sequence_labelling/ner/model/bert_crf.py b/reproduction/sequence_labelling/ner/model/bert_crf.py deleted file mode 100644 index 8061d116..00000000 --- a/reproduction/sequence_labelling/ner/model/bert_crf.py +++ /dev/null @@ -1,31 +0,0 @@ - - -from torch import nn -from fastNLP.modules import ConditionalRandomField, allowed_transitions -import torch.nn.functional as F - -class BertCRF(nn.Module): - def __init__(self, embed, tag_vocab, encoding_type='bio'): - super().__init__() - self.embed = embed - self.fc = nn.Linear(self.embed.embed_size, len(tag_vocab)) - trans = allowed_transitions(tag_vocab, encoding_type=encoding_type, include_start_end=True) - self.crf = ConditionalRandomField(len(tag_vocab), include_start_end_trans=True, allowed_transitions=trans) - - def _forward(self, words, target): - mask = words.ne(0) - words = self.embed(words) - words = self.fc(words) - logits = F.log_softmax(words, dim=-1) - if target is not None: - loss = self.crf(logits, target, mask) - return {'loss': loss} - else: - paths, _ = self.crf.viterbi_decode(logits, mask) - return {'pred': paths} - - def forward(self, words, target): - return self._forward(words, target) - - def predict(self, words): - return self._forward(words, None) diff --git a/reproduction/sequence_labelling/ner/model/dilated_cnn.py b/reproduction/sequence_labelling/ner/model/dilated_cnn.py deleted file mode 100644 index 990d9a35..00000000 --- a/reproduction/sequence_labelling/ner/model/dilated_cnn.py +++ /dev/null @@ -1,134 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -from fastNLP.modules.decoder import ConditionalRandomField -from fastNLP.embeddings import Embedding -from fastNLP.core.utils import seq_len_to_mask -from fastNLP.core.const import Const as C - - -class IDCNN(nn.Module): - def __init__(self, - init_embed, - char_embed, - num_cls, - repeats, num_layers, num_filters, kernel_size, - use_crf=False, use_projection=False, block_loss=False, - input_dropout=0.3, hidden_dropout=0.2, inner_dropout=0.0): - super(IDCNN, self).__init__() - self.word_embeddings = Embedding(init_embed) - - if char_embed is None: - self.char_embeddings = None - embedding_size = self.word_embeddings.embedding_dim - else: - self.char_embeddings = Embedding(char_embed) - embedding_size = self.word_embeddings.embedding_dim + \ - self.char_embeddings.embedding_dim - - self.conv0 = nn.Sequential( - nn.Conv1d(in_channels=embedding_size, - out_channels=num_filters, - kernel_size=kernel_size, - stride=1, dilation=1, - padding=kernel_size//2, - bias=True), - nn.ReLU(), - ) - - block = [] - for layer_i in range(num_layers): - dilated = 2 ** layer_i if layer_i+1 < num_layers else 1 - block.append(nn.Conv1d( - in_channels=num_filters, - out_channels=num_filters, - kernel_size=kernel_size, - stride=1, dilation=dilated, - padding=(kernel_size//2) * dilated, - bias=True)) - block.append(nn.ReLU()) - self.block = nn.Sequential(*block) - - if use_projection: - self.projection = nn.Sequential( - nn.Conv1d( - in_channels=num_filters, - out_channels=num_filters//2, - kernel_size=1, - bias=True), - nn.ReLU(),) - encode_dim = num_filters // 2 - else: - self.projection = None - encode_dim = num_filters - - self.input_drop = nn.Dropout(input_dropout) - self.hidden_drop = nn.Dropout(hidden_dropout) - self.inner_drop = nn.Dropout(inner_dropout) - self.repeats = repeats - self.out_fc = nn.Conv1d( - in_channels=encode_dim, - out_channels=num_cls, - kernel_size=1, - bias=True) - self.crf = ConditionalRandomField( - num_tags=num_cls) if use_crf else None - self.block_loss = block_loss - self.reset_parameters() - - def reset_parameters(self): - for m in self.modules(): - if isinstance(m, (nn.Conv1d, nn.Conv2d, nn.Linear)): - nn.init.xavier_normal_(m.weight, gain=1) - if m.bias is not None: - nn.init.normal_(m.bias, mean=0, std=0.01) - - def forward(self, words, seq_len, target=None, chars=None): - if self.char_embeddings is None: - x = self.word_embeddings(words) - else: - if chars is None: - raise ValueError('must provide chars for model with char embedding') - e1 = self.word_embeddings(words) - e2 = self.char_embeddings(chars) - x = torch.cat((e1, e2), dim=-1) # b,l,h - mask = seq_len_to_mask(seq_len) - - x = x.transpose(1, 2) # b,h,l - last_output = self.conv0(x) - output = [] - for repeat in range(self.repeats): - last_output = self.block(last_output) - hidden = self.projection(last_output) if self.projection is not None else last_output - output.append(self.out_fc(hidden)) - - def compute_loss(y, t, mask): - if self.crf is not None and target is not None: - loss = self.crf(y.transpose(1, 2), t, mask) - else: - y.masked_fill_((mask.eq(False))[:,None,:], -100) - # f_mask = mask.float() - # t = f_mask * t + (1-f_mask) * -100 - loss = F.cross_entropy(y, t, ignore_index=-100) - return loss - - if target is not None: - if self.block_loss: - losses = [compute_loss(o, target, mask) for o in output] - loss = sum(losses) - else: - loss = compute_loss(output[-1], target, mask) - else: - loss = None - - scores = output[-1] - if self.crf is not None: - pred, _ = self.crf.viterbi_decode(scores.transpose(1, 2), mask) - else: - pred = scores.max(1)[1] * mask.long() - - return { - C.LOSS: loss, - C.OUTPUT: pred, - } - diff --git a/reproduction/sequence_labelling/ner/model/lstm_cnn_crf.py b/reproduction/sequence_labelling/ner/model/lstm_cnn_crf.py deleted file mode 100644 index 1d51ab79..00000000 --- a/reproduction/sequence_labelling/ner/model/lstm_cnn_crf.py +++ /dev/null @@ -1,50 +0,0 @@ - -from torch import nn -from fastNLP import seq_len_to_mask -from fastNLP.modules import LSTM -from fastNLP.modules import ConditionalRandomField, allowed_transitions -import torch.nn.functional as F -from fastNLP import Const - -class CNNBiLSTMCRF(nn.Module): - def __init__(self, embed, hidden_size, num_layers, tag_vocab, dropout=0.5, encoding_type='bioes'): - super().__init__() - self.embedding = embed - self.lstm = LSTM(input_size=self.embedding.embedding_dim, - hidden_size=hidden_size//2, num_layers=num_layers, - bidirectional=True, batch_first=True) - self.fc = nn.Linear(hidden_size, len(tag_vocab)) - - transitions = allowed_transitions(tag_vocab.idx2word, encoding_type=encoding_type, include_start_end=True) - self.crf = ConditionalRandomField(len(tag_vocab), include_start_end_trans=True, allowed_transitions=transitions) - - self.dropout = nn.Dropout(dropout, inplace=True) - - for name, param in self.named_parameters(): - if 'fc' in name: - if param.data.dim()>1: - nn.init.xavier_uniform_(param) - else: - nn.init.constant_(param, 0) - if 'crf' in name: - nn.init.zeros_(param) - - def _forward(self, words, seq_len, target=None): - words = self.embedding(words) - outputs, _ = self.lstm(words, seq_len) - self.dropout(outputs) - - logits = F.log_softmax(self.fc(outputs), dim=-1) - - if target is not None: - loss = self.crf(logits, target, seq_len_to_mask(seq_len, max_len=logits.size(1))).mean() - return {Const.LOSS: loss} - else: - pred, _ = self.crf.viterbi_decode(logits, seq_len_to_mask(seq_len, max_len=logits.size(1))) - return {Const.OUTPUT: pred} - - def forward(self, words, seq_len, target): - return self._forward(words, seq_len, target) - - def predict(self, words, seq_len): - return self._forward(words, seq_len, None) diff --git a/reproduction/sequence_labelling/ner/train_bert.py b/reproduction/sequence_labelling/ner/train_bert.py deleted file mode 100644 index a90e9998..00000000 --- a/reproduction/sequence_labelling/ner/train_bert.py +++ /dev/null @@ -1,52 +0,0 @@ - - -""" -使用Bert进行英文命名实体识别 - -""" - -import sys - -sys.path.append('../../../') - -from reproduction.sequence_labelling.ner.model.bert_crf import BertCRF -from fastNLP.embeddings import BertEmbedding -from fastNLP import Trainer, Const -from fastNLP import BucketSampler, SpanFPreRecMetric, GradientClipCallback -from fastNLP.core.callback import WarmupCallback -from fastNLP.core.optimizer import AdamW -from fastNLP.io import Conll2003NERPipe - -from fastNLP import cache_results, EvaluateCallback - -encoding_type = 'bioes' - -@cache_results('caches/conll2003.pkl', _refresh=False) -def load_data(): - # 替换路径 - paths = 'data/conll2003' - data = Conll2003NERPipe(encoding_type=encoding_type).process_from_file(paths) - return data -data = load_data() -print(data) - -embed = BertEmbedding(data.get_vocab(Const.INPUT), model_dir_or_name='en-base-cased', - pool_method='max', requires_grad=True, layers='11', include_cls_sep=False, dropout=0.5, - word_dropout=0.01) - -callbacks = [ - GradientClipCallback(clip_type='norm', clip_value=1), - WarmupCallback(warmup=0.1, schedule='linear'), - EvaluateCallback(data.get_dataset('test')) - ] - -model = BertCRF(embed, tag_vocab=data.get_vocab('target'), encoding_type=encoding_type) -optimizer = AdamW(model.parameters(), lr=2e-5) - -trainer = Trainer(train_data=data.datasets['train'], model=model, optimizer=optimizer, sampler=BucketSampler(), - device=0, dev_data=data.datasets['dev'], batch_size=6, - metrics=SpanFPreRecMetric(tag_vocab=data.vocabs[Const.TARGET], encoding_type=encoding_type), - loss=None, callbacks=callbacks, num_workers=2, n_epochs=5, - check_code_level=0, update_every=3, test_use_tqdm=False) -trainer.train() - diff --git a/reproduction/sequence_labelling/ner/train_cnn_lstm_crf_conll2003.py b/reproduction/sequence_labelling/ner/train_cnn_lstm_crf_conll2003.py deleted file mode 100644 index d74963ab..00000000 --- a/reproduction/sequence_labelling/ner/train_cnn_lstm_crf_conll2003.py +++ /dev/null @@ -1,54 +0,0 @@ -import sys -sys.path.append('../../..') - -from fastNLP.embeddings import CNNCharEmbedding, StaticEmbedding, StackEmbedding - -from reproduction.sequence_labelling.ner.model.lstm_cnn_crf import CNNBiLSTMCRF -from fastNLP import Trainer -from fastNLP import SpanFPreRecMetric -from fastNLP import BucketSampler -from fastNLP import Const -from torch.optim import SGD -from fastNLP import GradientClipCallback -from fastNLP.core.callback import EvaluateCallback, LRScheduler -from torch.optim.lr_scheduler import LambdaLR -from fastNLP import cache_results - -from fastNLP.io.pipe.conll import Conll2003NERPipe -encoding_type = 'bioes' -@cache_results('caches/conll2003_new.pkl', _refresh=True) -def load_data(): - # 替换路径 - paths = {'test':"NER/corpus/CoNLL-2003/eng.testb", - 'train':"NER/corpus/CoNLL-2003/eng.train", - 'dev':"NER/corpus/CoNLL-2003/eng.testa"} - data = Conll2003NERPipe(encoding_type=encoding_type).process_from_file(paths) - return data -data = load_data() -print(data) - -char_embed = CNNCharEmbedding(vocab=data.get_vocab('words'), embed_size=30, char_emb_size=30, filter_nums=[30], - kernel_sizes=[3], word_dropout=0, dropout=0.5) -word_embed = StaticEmbedding(vocab=data.get_vocab('words'), - model_dir_or_name='en-glove-6b-100d', - requires_grad=True, lower=True, word_dropout=0.01, dropout=0.5) -word_embed.embedding.weight.data = word_embed.embedding.weight.data/word_embed.embedding.weight.data.std() -embed = StackEmbedding([word_embed, char_embed]) - -model = CNNBiLSTMCRF(embed, hidden_size=200, num_layers=1, tag_vocab=data.vocabs[Const.TARGET], - encoding_type=encoding_type) - -callbacks = [ - GradientClipCallback(clip_type='value', clip_value=5), - EvaluateCallback(data=data.get_dataset('test')) # 额外对test上的数据进行性能评测 - ] - -optimizer = SGD(model.parameters(), lr=0.008, momentum=0.9) -scheduler = LRScheduler(LambdaLR(optimizer, lr_lambda=lambda epoch: 1 / (1 + 0.05 * epoch))) -callbacks.append(scheduler) - -trainer = Trainer(train_data=data.get_dataset('train'), model=model, optimizer=optimizer, sampler=BucketSampler(), - device=0, dev_data=data.get_dataset('dev'), batch_size=20, - metrics=SpanFPreRecMetric(tag_vocab=data.vocabs[Const.TARGET], encoding_type=encoding_type), - callbacks=callbacks, num_workers=2, n_epochs=100, dev_batch_size=512) -trainer.train() \ No newline at end of file diff --git a/reproduction/sequence_labelling/ner/train_idcnn.py b/reproduction/sequence_labelling/ner/train_idcnn.py deleted file mode 100644 index 7f4e43af..00000000 --- a/reproduction/sequence_labelling/ner/train_idcnn.py +++ /dev/null @@ -1,138 +0,0 @@ -from fastNLP.io import OntoNotesNERPipe -from fastNLP.core.callback import LRScheduler -from fastNLP import GradientClipCallback -from torch.optim.lr_scheduler import LambdaLR -from torch.optim import Adam -from fastNLP import Const -from fastNLP import BucketSampler -from fastNLP import SpanFPreRecMetric -from fastNLP import Trainer, Tester -from fastNLP.core.metrics import MetricBase -from reproduction.sequence_labelling.ner.model.dilated_cnn import IDCNN -from fastNLP.core.utils import Option -from fastNLP.embeddings import StaticEmbedding -from fastNLP.core.utils import cache_results -import torch.cuda -import os - -encoding_type = 'bioes' - - -def get_path(path): - return os.path.join(os.environ['HOME'], path) - - -ops = Option( - batch_size=128, - num_epochs=100, - lr=3e-4, - repeats=3, - num_layers=3, - num_filters=400, - use_crf=False, - gradient_clip=5, -) - -@cache_results('ontonotes-case-cache') -def load_data(): - print('loading data') - data = OntoNotesNERPipe(encoding_type=encoding_type).process_from_file( - paths = get_path('workdir/datasets/ontonotes-v4')) - print('loading embedding') - word_embed = StaticEmbedding(vocab=data.vocabs[Const.INPUT], - model_dir_or_name='en-glove-840b-300', - requires_grad=True) - return data, [word_embed] - -data, embeds = load_data() -print(data) -print(data.datasets['train'][0]) -print(list(data.vocabs.keys())) - -# for ds in data.datasets.values(): -# ds.rename_field('cap_words', 'chars') -# ds.set_input('chars') - -word_embed = embeds[0] -word_embed.embedding.weight.data /= word_embed.embedding.weight.data.std() - -# char_embed = CNNCharEmbedding(data.vocabs['cap_words']) -char_embed = None -# for ds in data.datasets: -# ds.rename_field('') - -print(data.vocabs[Const.TARGET].word2idx) - -model = IDCNN(init_embed=word_embed, - char_embed=char_embed, - num_cls=len(data.vocabs[Const.TARGET]), - repeats=ops.repeats, - num_layers=ops.num_layers, - num_filters=ops.num_filters, - kernel_size=3, - use_crf=ops.use_crf, use_projection=True, - block_loss=True, - input_dropout=0.5, hidden_dropout=0.2, inner_dropout=0.2) - -print(model) - -callbacks = [GradientClipCallback(clip_value=ops.gradient_clip, clip_type='value'),] -metrics = [] -metrics.append( - SpanFPreRecMetric( - tag_vocab=data.vocabs[Const.TARGET], encoding_type=encoding_type, - pred=Const.OUTPUT, target=Const.TARGET, seq_len=Const.INPUT_LEN, - ) -) - -class LossMetric(MetricBase): - def __init__(self, loss=None): - super(LossMetric, self).__init__() - self._init_param_map(loss=loss) - self.total_loss = 0.0 - self.steps = 0 - - def evaluate(self, loss): - self.total_loss += float(loss) - self.steps += 1 - - def get_metric(self, reset=True): - result = {'loss': self.total_loss / (self.steps + 1e-12)} - if reset: - self.total_loss = 0.0 - self.steps = 0 - return result - -metrics.append( - LossMetric(loss=Const.LOSS) -) - -optimizer = Adam(model.parameters(), lr=ops.lr, weight_decay=0) -scheduler = LRScheduler(LambdaLR(optimizer, lr_lambda=lambda epoch: 1 / (1 + 0.05 * epoch))) -callbacks.append(scheduler) -# callbacks.append(LRScheduler(CosineAnnealingLR(optimizer, 15))) -# optimizer = SWATS(model.parameters(), verbose=True) -# optimizer = Adam(model.parameters(), lr=0.005) - -device = 'cuda:0' if torch.cuda.is_available() else 'cpu' - -trainer = Trainer(train_data=data.datasets['train'], model=model, optimizer=optimizer, - sampler=BucketSampler(num_buckets=50, batch_size=ops.batch_size), - device=device, dev_data=data.datasets['dev'], batch_size=ops.batch_size, - metrics=metrics, - check_code_level=-1, - callbacks=callbacks, num_workers=2, n_epochs=ops.num_epochs) -trainer.train() - -torch.save(model, 'idcnn.pt') - -tester = Tester( - data=data.datasets['test'], - model=model, - metrics=metrics, - batch_size=ops.batch_size, - num_workers=2, - device=device -) -tester.test() - diff --git a/reproduction/sequence_labelling/ner/train_ontonote.py b/reproduction/sequence_labelling/ner/train_ontonote.py deleted file mode 100644 index a0484ec3..00000000 --- a/reproduction/sequence_labelling/ner/train_ontonote.py +++ /dev/null @@ -1,66 +0,0 @@ -import sys - -sys.path.append('../../..') - -from fastNLP.embeddings import CNNCharEmbedding, StaticEmbedding, StackEmbedding - -from reproduction.sequence_labelling.ner.model.lstm_cnn_crf import CNNBiLSTMCRF -from fastNLP import Trainer -from fastNLP import SpanFPreRecMetric -from fastNLP import Const -from torch.optim import SGD -from torch.optim.lr_scheduler import LambdaLR -from fastNLP import GradientClipCallback -from fastNLP import BucketSampler -from fastNLP.core.callback import EvaluateCallback, LRScheduler -from fastNLP import cache_results -from fastNLP.io.pipe.conll import OntoNotesNERPipe - -#######hyper -normalize = False -lr = 0.01 -dropout = 0.5 -batch_size = 32 -data_name = 'ontonote' -#######hyper - - -encoding_type = 'bioes' - -@cache_results('caches/ontonotes.pkl', _refresh=True) -def cache(): - data = OntoNotesNERPipe(encoding_type=encoding_type).process_from_file('../../../../others/data/v4/english') - char_embed = CNNCharEmbedding(vocab=data.vocabs['words'], embed_size=30, char_emb_size=30, filter_nums=[30], - kernel_sizes=[3], dropout=dropout) - word_embed = StaticEmbedding(vocab=data.vocabs[Const.INPUT], - model_dir_or_name='en-glove-6b-100d', - requires_grad=True, - normalize=normalize, - word_dropout=0.01, - dropout=dropout, - lower=True, - min_freq=1) - return data, char_embed, word_embed -data, char_embed, word_embed = cache() - -print(data) - -embed = StackEmbedding([word_embed, char_embed]) -model = CNNBiLSTMCRF(embed, hidden_size=1200, num_layers=1, tag_vocab=data.vocabs[Const.TARGET], - encoding_type=encoding_type, dropout=dropout) - -callbacks = [ - GradientClipCallback(clip_value=5, clip_type='value'), - EvaluateCallback(data.datasets['test']) - ] - -optimizer = SGD(model.parameters(), lr=lr, momentum=0.9) -scheduler = LRScheduler(LambdaLR(optimizer, lr_lambda=lambda epoch: 1 / (1 + 0.05 * epoch))) -callbacks.append(scheduler) - - -trainer = Trainer(train_data=data.get_dataset('train'), model=model, optimizer=optimizer, sampler=BucketSampler(num_buckets=100), - device=0, dev_data=data.get_dataset('dev'), batch_size=batch_size, - metrics=SpanFPreRecMetric(tag_vocab=data.vocabs[Const.TARGET], encoding_type=encoding_type), - callbacks=callbacks, num_workers=1, n_epochs=100, dev_batch_size=256) -trainer.train() \ No newline at end of file diff --git a/reproduction/text_classification/README.md b/reproduction/text_classification/README.md deleted file mode 100644 index 5767d9e8..00000000 --- a/reproduction/text_classification/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# text_classification任务模型复现 -这里使用fastNLP复现以下模型: - -char_cnn :论文链接[Character-level Convolutional Networks for Text Classification](https://arxiv.org/pdf/1509.01626v3.pdf) - -dpcnn:论文链接[Deep Pyramid Convolutional Neural Networks for TextCategorization](https://ai.tencent.com/ailab/media/publications/ACL3-Brady.pdf) - -HAN:论文链接[Hierarchical Attention Networks for Document Classification](https://www.cs.cmu.edu/~diyiy/docs/naacl16.pdf) - -LSTM+self_attention:论文链接[A Structured Self-attentive Sentence Embedding](https://arxiv.org/pdf/1703.03130.pdf) - -AWD-LSTM:论文链接[Regularizing and Optimizing LSTM Language Models](https://arxiv.org/pdf/1708.02182.pdf) - -#数据集来源 -IMDB:http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz -SST-2:https://firebasestorage.googleapis.com/v0/b/mtl-sentence-representations.appspot.com/o/data%2FSST-2.zip?alt=media&token=aabc5f6b-e466-44a2-b9b4-cf6337f84ac8 -SST:https://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip -yelp_full:https://drive.google.com/drive/folders/0Bz8a_Dbh9Qhbfll6bVpmNUtUcFdjYmF2SEpmZUZUcVNiMUw1TWN6RDV3a0JHT3kxLVhVR2M -yelp_polarity:https://drive.google.com/drive/folders/0Bz8a_Dbh9Qhbfll6bVpmNUtUcFdjYmF2SEpmZUZUcVNiMUw1TWN6RDV3a0JHT3kxLVhVR2M - -dataset |classes | train samples | dev samples | test samples|refer| -:---: | :---: | :---: | :---: | :---: | :---: | -yelp_polarity | 2 |560k | - |38k|[char_cnn](https://arxiv.org/pdf/1509.01626v3.pdf)| -yelp_full | 5|650k | - |50k|[char_cnn](https://arxiv.org/pdf/1509.01626v3.pdf)| -IMDB | 2 |25k | - |25k|[IMDB](https://ai.stanford.edu/~ang/papers/acl11-WordVectorsSentimentAnalysis.pdf)| -sst-2 | 2 |67k | 872 |1.8k|[GLUE](https://arxiv.org/pdf/1804.07461.pdf)| - -# 数据集及复现结果汇总 - -使用fastNLP复现的结果vs论文汇报结果(/前为fastNLP实现,后面为论文报道,-表示论文没有在该数据集上列出结果) - -model name | yelp_p | yelp_f | sst-2|IMDB -:---: | :---: | :---: | :---: |----- -char_cnn | 93.80/95.12 | - | - |- -dpcnn | 95.50/97.36 | - | - |- -HAN |- | - | - |- -LSTM| 95.74/- |64.16/- |- |88.52/- -AWD-LSTM| 95.96/- |64.74/- |- |88.91/- -LSTM+self_attention| 96.34/- | 65.78/- | - |89.53/- - diff --git a/reproduction/text_classification/data/IMDBLoader.py b/reproduction/text_classification/data/IMDBLoader.py deleted file mode 100644 index 1585fe44..00000000 --- a/reproduction/text_classification/data/IMDBLoader.py +++ /dev/null @@ -1,114 +0,0 @@ -from fastNLP.io.embed_loader import EmbeddingOption, EmbedLoader -from fastNLP.core.vocabulary import VocabularyOption -from fastNLP.io.data_bundle import DataSetLoader, DataBundle -from typing import Union, Dict, List, Iterator -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import Vocabulary -from fastNLP import Const -# from reproduction.utils import check_dataloader_paths -from functools import partial -from reproduction.utils import check_dataloader_paths, get_tokenizer - -class IMDBLoader(DataSetLoader): - """ - 读取IMDB数据集,DataSet包含以下fields: - - words: list(str), 需要分类的文本 - target: str, 文本的标签 - - - """ - - def __init__(self): - super(IMDBLoader, self).__init__() - self.tokenizer = get_tokenizer() - - def _load(self, path): - dataset = DataSet() - with open(path, 'r', encoding="utf-8") as f: - for line in f: - line = line.strip() - if not line: - continue - parts = line.split('\t') - target = parts[0] - words = self.tokenizer(parts[1].lower()) - dataset.append(Instance(words=words, target=target)) - - if len(dataset)==0: - raise RuntimeError(f"{path} has no valid data.") - - return dataset - - def process(self, - paths: Union[str, Dict[str, str]], - src_vocab_opt: VocabularyOption = None, - tgt_vocab_opt: VocabularyOption = None, - src_embed_opt: EmbeddingOption = None, - char_level_op=False): - - datasets = {} - info = DataBundle() - paths = check_dataloader_paths(paths) - for name, path in paths.items(): - dataset = self.load(path) - datasets[name] = dataset - - def wordtochar(words): - chars = [] - for word in words: - word = word.lower() - for char in word: - chars.append(char) - chars.append('') - chars.pop() - return chars - - if char_level_op: - for dataset in datasets.values(): - dataset.apply_field(wordtochar, field_name="words", new_field_name='chars') - - datasets["train"], datasets["dev"] = datasets["train"].split(0.1, shuffle=False) - - src_vocab = Vocabulary() if src_vocab_opt is None else Vocabulary(**src_vocab_opt) - src_vocab.from_dataset(datasets['train'], field_name='words') - - src_vocab.index_dataset(*datasets.values(), field_name='words') - - tgt_vocab = Vocabulary(unknown=None, padding=None) \ - if tgt_vocab_opt is None else Vocabulary(**tgt_vocab_opt) - tgt_vocab.from_dataset(datasets['train'], field_name='target') - tgt_vocab.index_dataset(*datasets.values(), field_name='target') - - info.vocabs = { - "words": src_vocab, - "target": tgt_vocab - } - - info.datasets = datasets - - if src_embed_opt is not None: - embed = EmbedLoader.load_with_vocab(**src_embed_opt, vocab=src_vocab) - info.embeddings['words'] = embed - - for name, dataset in info.datasets.items(): - dataset.set_input("words") - dataset.set_target("target") - - return info - - - -if __name__=="__main__": - datapath = {"train": "/remote-home/ygwang/IMDB_data/train.csv", - "test": "/remote-home/ygwang/IMDB_data/test.csv"} - datainfo=IMDBLoader().process(datapath,char_level_op=True) - #print(datainfo.datasets["train"]) - len_count = 0 - for instance in datainfo.datasets["train"]: - len_count += len(instance["chars"]) - - ave_len = len_count / len(datainfo.datasets["train"]) - print(ave_len) - diff --git a/reproduction/text_classification/data/MTL16Loader.py b/reproduction/text_classification/data/MTL16Loader.py deleted file mode 100644 index 225fffe6..00000000 --- a/reproduction/text_classification/data/MTL16Loader.py +++ /dev/null @@ -1,79 +0,0 @@ -from fastNLP.io.embed_loader import EmbeddingOption, EmbedLoader -from fastNLP.core.vocabulary import VocabularyOption -from fastNLP.io.data_bundle import DataSetLoader, DataBundle -from typing import Union, Dict, List, Iterator -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import Vocabulary -from fastNLP import Const -from reproduction.utils import check_dataloader_paths -from functools import partial - -class MTL16Loader(DataSetLoader): - """ - 读取MTL16数据集,DataSet包含以下fields: - - words: list(str), 需要分类的文本 - target: str, 文本的标签 - - 数据来源:https://pan.baidu.com/s/1c2L6vdA - - """ - - def __init__(self): - super(MTL16Loader, self).__init__() - - def _load(self, path): - dataset = DataSet() - with open(path, 'r', encoding="utf-8") as f: - for line in f: - line = line.strip() - if not line: - continue - parts = line.split('\t') - target = parts[0] - words = parts[1].lower().split() - dataset.append(Instance(words=words, target=target)) - if len(dataset)==0: - raise RuntimeError(f"{path} has no valid data.") - - return dataset - - def process(self, - paths: Union[str, Dict[str, str]], - src_vocab_opt: VocabularyOption = None, - tgt_vocab_opt: VocabularyOption = None, - src_embed_opt: EmbeddingOption = None): - - paths = check_dataloader_paths(paths) - datasets = {} - info = DataBundle() - for name, path in paths.items(): - dataset = self.load(path) - datasets[name] = dataset - - src_vocab = Vocabulary() if src_vocab_opt is None else Vocabulary(**src_vocab_opt) - src_vocab.from_dataset(datasets['train'], field_name='words') - src_vocab.index_dataset(*datasets.values(), field_name='words') - - tgt_vocab = Vocabulary(unknown=None, padding=None) \ - if tgt_vocab_opt is None else Vocabulary(**tgt_vocab_opt) - tgt_vocab.from_dataset(datasets['train'], field_name='target') - tgt_vocab.index_dataset(*datasets.values(), field_name='target') - - info.vocabs = { - "words": src_vocab, - "target": tgt_vocab - } - - info.datasets = datasets - - if src_embed_opt is not None: - embed = EmbedLoader.load_with_vocab(**src_embed_opt, vocab=src_vocab) - info.embeddings['words'] = embed - - for name, dataset in info.datasets.items(): - dataset.set_input("words") - dataset.set_target("target") - - return info diff --git a/reproduction/text_classification/data/TODO.txt b/reproduction/text_classification/data/TODO.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/text_classification/data/sstloader.py b/reproduction/text_classification/data/sstloader.py deleted file mode 100644 index 4e860279..00000000 --- a/reproduction/text_classification/data/sstloader.py +++ /dev/null @@ -1,198 +0,0 @@ -from typing import Iterable -from nltk import Tree -from fastNLP.io.data_bundle import DataBundle, DataSetLoader -from fastNLP.core.vocabulary import VocabularyOption, Vocabulary -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP.io.embed_loader import EmbeddingOption, EmbedLoader -import csv -from typing import Union, Dict -from reproduction.utils import check_dataloader_paths, get_tokenizer - - -class SSTLoader(DataSetLoader): - """ - 读取SST数据集, DataSet包含fields:: - words: list(str) 需要分类的文本 - target: str 文本的标签 - 数据来源: https://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip - :param subtree: 是否将数据展开为子树,扩充数据量. Default: ``False`` - :param fine_grained: 是否使用SST-5标准,若 ``False`` , 使用SST-2。Default: ``False`` - """ - - URL = 'https://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip' - DATA_DIR = 'sst/' - - def __init__(self, subtree=False, fine_grained=False): - self.subtree = subtree - tag_v = {'0': 'very negative', '1': 'negative', '2': 'neutral', - '3': 'positive', '4': 'very positive'} - if not fine_grained: - tag_v['0'] = tag_v['1'] - tag_v['4'] = tag_v['3'] - self.tag_v = tag_v - - def _load(self, path): - """ - :param str path: 存储数据的路径 - :return: 一个 :class:`~fastNLP.DataSet` 类型的对象 - """ - datalist = [] - with open(path, 'r', encoding='utf-8') as f: - datas = [] - for l in f: - datas.extend([(s, self.tag_v[t]) - for s, t in self._get_one(l, self.subtree)]) - ds = DataSet() - for words, tag in datas: - ds.append(Instance(words=words, target=tag)) - return ds - - - @staticmethod - def _get_one(data, subtree): - tree = Tree.fromstring(data) - if subtree: - return [(t.leaves(), t.label()) for t in tree.subtrees()] - return [(tree.leaves(), tree.label())] - - - def process(self, - paths, - train_ds: Iterable[str] = None, - src_vocab_op: VocabularyOption = None, - tgt_vocab_op: VocabularyOption = None, - src_embed_op: EmbeddingOption = None): - input_name, target_name = 'words', 'target' - src_vocab = Vocabulary() if src_vocab_op is None else Vocabulary(**src_vocab_op) - tgt_vocab = Vocabulary(unknown=None, padding=None) \ - if tgt_vocab_op is None else Vocabulary(**tgt_vocab_op) - - info = DataBundle(datasets=self.load(paths)) - _train_ds = [info.datasets[name] - for name in train_ds] if train_ds else info.datasets.values() - src_vocab.from_dataset(*_train_ds, field_name=input_name) - tgt_vocab.from_dataset(*_train_ds, field_name=target_name) - src_vocab.index_dataset( - *info.datasets.values(), - field_name=input_name, new_field_name=input_name) - tgt_vocab.index_dataset( - *info.datasets.values(), - field_name=target_name, new_field_name=target_name) - info.vocabs = { - input_name: src_vocab, - target_name: tgt_vocab - } - - - if src_embed_op is not None: - src_embed_op.vocab = src_vocab - init_emb = EmbedLoader.load_with_vocab(**src_embed_op) - info.embeddings[input_name] = init_emb - - - for name, dataset in info.datasets.items(): - dataset.set_input(input_name) - dataset.set_target(target_name) - return info - - - -class sst2Loader(DataSetLoader): - ''' - 数据来源"SST":'https://firebasestorage.googleapis.com/v0/b/mtl-sentence-representations.appspot.com/o/data%2FSST-2.zip?alt=media&token=aabc5f6b-e466-44a2-b9b4-cf6337f84ac8', - ''' - - def __init__(self): - super(sst2Loader, self).__init__() - self.tokenizer = get_tokenizer() - - - def _load(self, path: str) -> DataSet: - ds = DataSet() - all_count=0 - csv_reader = csv.reader(open(path, encoding='utf-8'),delimiter='\t') - skip_row = 0 - for idx,row in enumerate(csv_reader): - if idx<=skip_row: - continue - target = row[1] - words=self.tokenizer(row[0]) - ds.append(Instance(words=words,target=target)) - all_count+=1 - print("all count:", all_count) - return ds - - - - def process(self, - paths: Union[str, Dict[str, str]], - src_vocab_opt: VocabularyOption = None, - tgt_vocab_opt: VocabularyOption = None, - src_embed_opt: EmbeddingOption = None, - char_level_op=False): - - paths = check_dataloader_paths(paths) - datasets = {} - info = DataBundle() - for name, path in paths.items(): - dataset = self.load(path) - datasets[name] = dataset - - def wordtochar(words): - chars = [] - for word in words: - word = word.lower() - for char in word: - chars.append(char) - chars.append('') - chars.pop() - return chars - - input_name, target_name = 'words', 'target' - info.vocabs={} - - # 就分隔为char形式 - if char_level_op: - for dataset in datasets.values(): - dataset.apply_field(wordtochar, field_name="words", new_field_name='chars') - src_vocab = Vocabulary() if src_vocab_opt is None else Vocabulary(**src_vocab_opt) - src_vocab.from_dataset(datasets['train'], field_name='words') - src_vocab.index_dataset(*datasets.values(), field_name='words') - - tgt_vocab = Vocabulary(unknown=None, padding=None) \ - if tgt_vocab_opt is None else Vocabulary(**tgt_vocab_opt) - tgt_vocab.from_dataset(datasets['train'], field_name='target') - tgt_vocab.index_dataset(*datasets.values(), field_name='target') - - - info.vocabs = { - "words": src_vocab, - "target": tgt_vocab - } - - info.datasets = datasets - - if src_embed_opt is not None: - embed = EmbedLoader.load_with_vocab(**src_embed_opt, vocab=src_vocab) - info.embeddings['words'] = embed - - for name, dataset in info.datasets.items(): - dataset.set_input("words") - dataset.set_target("target") - - return info - - - -if __name__=="__main__": - datapath = {"train": "/remote-home/ygwang/workspace/GLUE/SST-2/train.tsv", - "dev": "/remote-home/ygwang/workspace/GLUE/SST-2/dev.tsv"} - datainfo=sst2Loader().process(datapath,char_level_op=True) - #print(datainfo.datasets["train"]) - - len_count = 0 - for instance in datainfo.datasets["train"]: - len_count += len(instance["chars"]) - ave_len = len_count / len(datainfo.datasets["train"]) - print(ave_len) \ No newline at end of file diff --git a/reproduction/text_classification/data/yelpLoader.py b/reproduction/text_classification/data/yelpLoader.py deleted file mode 100644 index 1f7634fc..00000000 --- a/reproduction/text_classification/data/yelpLoader.py +++ /dev/null @@ -1,200 +0,0 @@ -import ast -import csv -from typing import Iterable -from fastNLP import DataSet, Instance, Vocabulary -from fastNLP.core.vocabulary import VocabularyOption -from fastNLP.io import JsonLoader -from fastNLP.io.data_bundle import DataBundle,DataSetLoader -from fastNLP.io.embed_loader import EmbeddingOption -from fastNLP.io.file_reader import _read_json -from typing import Union, Dict -from reproduction.utils import check_dataloader_paths, get_tokenizer - -def clean_str(sentence, tokenizer, char_lower=False): - """ - heavily borrowed from github - https://github.com/LukeZhuang/Hierarchical-Attention-Network/blob/master/yelp-preprocess.ipynb - :param sentence: is a str - :return: - """ - if char_lower: - sentence = sentence.lower() - import re - nonalpnum = re.compile('[^0-9a-zA-Z?!\']+') - words = tokenizer(sentence) - words_collection = [] - for word in words: - if word in ['-lrb-', '-rrb-', '', '-r', '-l', 'b-']: - continue - tt = nonalpnum.split(word) - t = ''.join(tt) - if t != '': - words_collection.append(t) - - return words_collection - - -class yelpLoader(DataSetLoader): - - """ - 读取Yelp_full/Yelp_polarity数据集, DataSet包含fields: - words: list(str), 需要分类的文本 - target: str, 文本的标签 - chars:list(str),未index的字符列表 - - 数据集:yelp_full/yelp_polarity - :param fine_grained: 是否使用SST-5标准,若 ``False`` , 使用SST-2。Default: ``False`` - """ - - def __init__(self, fine_grained=False,lower=False): - super(yelpLoader, self).__init__() - tag_v = {'1.0': 'very negative', '2.0': 'negative', '3.0': 'neutral', - '4.0': 'positive', '5.0': 'very positive'} - if not fine_grained: - tag_v['1.0'] = tag_v['2.0'] - tag_v['5.0'] = tag_v['4.0'] - self.fine_grained = fine_grained - self.tag_v = tag_v - self.lower = lower - self.tokenizer = get_tokenizer() - - ''' - 读取Yelp数据集, DataSet包含fields: - - review_id: str, 22 character unique review id - user_id: str, 22 character unique user id - business_id: str, 22 character business id - useful: int, number of useful votes received - funny: int, number of funny votes received - cool: int, number of cool votes received - date: str, date formatted YYYY-MM-DD - words: list(str), 需要分类的文本 - target: str, 文本的标签 - - 数据来源: https://www.yelp.com/dataset/download - - - def _load_json(self, path): - ds = DataSet() - for idx, d in _read_json(path, fields=self.fields_list, dropna=self.dropna): - d = ast.literal_eval(d) - d["words"] = d.pop("text").split() - d["target"] = self.tag_v[str(d.pop("stars"))] - ds.append(Instance(**d)) - return ds - - def _load_yelp2015_broken(self,path): - ds = DataSet() - with open (path,encoding='ISO 8859-1') as f: - row=f.readline() - all_count=0 - exp_count=0 - while row: - row=row.split("\t\t") - all_count+=1 - if len(row)>=3: - words=row[-1].split() - try: - target=self.tag_v[str(row[-2])+".0"] - ds.append(Instance(words=words, target=target)) - except KeyError: - exp_count+=1 - else: - exp_count+=1 - row = f.readline() - print("error sample count:",exp_count) - print("all count:",all_count) - return ds - ''' - - def _load(self, path): - ds = DataSet() - csv_reader=csv.reader(open(path,encoding='utf-8')) - all_count=0 - real_count=0 - for row in csv_reader: - all_count+=1 - if len(row)==2: - target=self.tag_v[row[0]+".0"] - words = clean_str(row[1], self.tokenizer, self.lower) - if len(words)!=0: - ds.append(Instance(words=words,target=target)) - real_count += 1 - print("all count:", all_count) - print("real count:", real_count) - return ds - - - - def process(self, paths: Union[str, Dict[str, str]], - train_ds: Iterable[str] = None, - src_vocab_op: VocabularyOption = None, - tgt_vocab_op: VocabularyOption = None, - embed_opt: EmbeddingOption = None, - char_level_op=False, - split_dev_op=True - ): - paths = check_dataloader_paths(paths) - datasets = {} - info = DataBundle(datasets=self.load(paths)) - src_vocab = Vocabulary() if src_vocab_op is None else Vocabulary(**src_vocab_op) - tgt_vocab = Vocabulary(unknown=None, padding=None) \ - if tgt_vocab_op is None else Vocabulary(**tgt_vocab_op) - _train_ds = [info.datasets[name] - for name in train_ds] if train_ds else info.datasets.values() - - def wordtochar(words): - chars = [] - for word in words: - word = word.lower() - for char in word: - chars.append(char) - chars.append('') - chars.pop() - return chars - - input_name, target_name = 'words', 'target' - info.vocabs={} - #就分隔为char形式 - if char_level_op: - for dataset in info.datasets.values(): - dataset.apply_field(wordtochar, field_name="words",new_field_name='chars') - # if embed_opt is not None: - # embed = EmbedLoader.load_with_vocab(**embed_opt, vocab=vocab) - # info.embeddings['words'] = embed - else: - src_vocab.from_dataset(*_train_ds, field_name=input_name) - src_vocab.index_dataset(*info.datasets.values(),field_name=input_name, new_field_name=input_name) - info.vocabs[input_name]=src_vocab - - tgt_vocab.from_dataset(*_train_ds, field_name=target_name) - tgt_vocab.index_dataset( - *info.datasets.values(), - field_name=target_name, new_field_name=target_name) - - info.vocabs[target_name]=tgt_vocab - - if split_dev_op: - info.datasets['train'], info.datasets['dev'] = info.datasets['train'].split(0.1, shuffle=False) - - for name, dataset in info.datasets.items(): - dataset.set_input("words") - dataset.set_target("target") - - return info - -if __name__=="__main__": - testloader=yelpLoader() - # datapath = {"train": "/remote-home/ygwang/yelp_full/train.csv", - # "test": "/remote-home/ygwang/yelp_full/test.csv"} - #datapath={"train": "/remote-home/ygwang/yelp_full/test.csv"} - datapath = {"train": "/remote-home/ygwang/yelp_polarity/train.csv", - "test": "/remote-home/ygwang/yelp_polarity/test.csv"} - datainfo=testloader.process(datapath,char_level_op=True) - - len_count=0 - for instance in datainfo.datasets["train"]: - len_count+=len(instance["chars"]) - - ave_len=len_count/len(datainfo.datasets["train"]) - print(ave_len) diff --git a/reproduction/text_classification/model/BertTC.py b/reproduction/text_classification/model/BertTC.py deleted file mode 100644 index 702c0cd1..00000000 --- a/reproduction/text_classification/model/BertTC.py +++ /dev/null @@ -1,24 +0,0 @@ -from fastNLP.embeddings import BertEmbedding -import torch -import torch.nn as nn -from fastNLP.core.const import Const as C - -class BertTC(nn.Module): - def __init__(self, vocab,num_class,bert_model_dir_or_name,fine_tune=False): - super(BertTC, self).__init__() - self.embed=BertEmbedding(vocab, requires_grad=fine_tune, - model_dir_or_name=bert_model_dir_or_name,include_cls_sep=True) - self.classifier = nn.Linear(self.embed.embedding_dim, num_class) - - def forward(self, words): - embedding_cls=self.embed(words)[:,0] - output=self.classifier(embedding_cls) - return {C.OUTPUT: output} - - def predict(self,words): - return self.forward(words) - -if __name__=="__main__": - ta=torch.tensor([[1,2,3],[4,5,6],[7,8,9]]) - tb=ta[:,0] - print(tb) diff --git a/reproduction/text_classification/model/HAN.py b/reproduction/text_classification/model/HAN.py deleted file mode 100644 index 7ebbe30f..00000000 --- a/reproduction/text_classification/model/HAN.py +++ /dev/null @@ -1,109 +0,0 @@ -import torch -import torch.nn as nn -from torch.autograd import Variable -from fastNLP.embeddings.utils import get_embeddings -from fastNLP.core import Const as C - - -def pack_sequence(tensor_seq, padding_value=0.0): - if len(tensor_seq) <= 0: - return - length = [v.size(0) for v in tensor_seq] - max_len = max(length) - size = [len(tensor_seq), max_len] - size.extend(list(tensor_seq[0].size()[1:])) - ans = torch.Tensor(*size).fill_(padding_value) - if tensor_seq[0].data.is_cuda: - ans = ans.cuda() - ans = Variable(ans) - for i, v in enumerate(tensor_seq): - ans[i, :length[i], :] = v - return ans - - -class HANCLS(nn.Module): - def __init__(self, init_embed, num_cls): - super(HANCLS, self).__init__() - - self.embed = get_embeddings(init_embed) - self.han = HAN(input_size=300, - output_size=num_cls, - word_hidden_size=50, word_num_layers=1, word_context_size=100, - sent_hidden_size=50, sent_num_layers=1, sent_context_size=100 - ) - - def forward(self, input_sents): - # input_sents [B, num_sents, seq-len] dtype long - # target - B, num_sents, seq_len = input_sents.size() - input_sents = input_sents.view(-1, seq_len) # flat - words_embed = self.embed(input_sents) # should be [B*num-sent, seqlen , word-dim] - words_embed = words_embed.view(B, num_sents, seq_len, -1) # recover # [B, num-sent, seqlen , word-dim] - out = self.han(words_embed) - - return {C.OUTPUT: out} - - def predict(self, input_sents): - x = self.forward(input_sents)[C.OUTPUT] - return {C.OUTPUT: torch.argmax(x, 1)} - - -class HAN(nn.Module): - def __init__(self, input_size, output_size, - word_hidden_size, word_num_layers, word_context_size, - sent_hidden_size, sent_num_layers, sent_context_size): - super(HAN, self).__init__() - - self.word_layer = AttentionNet(input_size, - word_hidden_size, - word_num_layers, - word_context_size) - self.sent_layer = AttentionNet(2 * word_hidden_size, - sent_hidden_size, - sent_num_layers, - sent_context_size) - self.output_layer = nn.Linear(2 * sent_hidden_size, output_size) - self.softmax = nn.LogSoftmax(dim=1) - - def forward(self, batch_doc): - # input is a sequence of matrix - doc_vec_list = [] - for doc in batch_doc: - sent_mat = self.word_layer(doc) # doc's dim (num_sent, seq_len, word_dim) - doc_vec_list.append(sent_mat) # sent_mat's dim (num_sent, vec_dim) - doc_vec = self.sent_layer(pack_sequence(doc_vec_list)) - output = self.softmax(self.output_layer(doc_vec)) - return output - - -class AttentionNet(nn.Module): - def __init__(self, input_size, gru_hidden_size, gru_num_layers, context_vec_size): - super(AttentionNet, self).__init__() - - self.input_size = input_size - self.gru_hidden_size = gru_hidden_size - self.gru_num_layers = gru_num_layers - self.context_vec_size = context_vec_size - - # Encoder - self.gru = nn.GRU(input_size=input_size, - hidden_size=gru_hidden_size, - num_layers=gru_num_layers, - batch_first=True, - bidirectional=True) - # Attention - self.fc = nn.Linear(2 * gru_hidden_size, context_vec_size) - self.tanh = nn.Tanh() - self.softmax = nn.Softmax(dim=1) - # context vector - self.context_vec = nn.Parameter(torch.Tensor(context_vec_size, 1)) - self.context_vec.data.uniform_(-0.1, 0.1) - - def forward(self, inputs): - # GRU part - h_t, hidden = self.gru(inputs) # inputs's dim (batch_size, seq_len, word_dim) - u = self.tanh(self.fc(h_t)) - # Attention part - alpha = self.softmax(torch.matmul(u, self.context_vec)) # u's dim (batch_size, seq_len, context_vec_size) - output = torch.bmm(torch.transpose(h_t, 1, 2), alpha) # alpha's dim (batch_size, seq_len, 1) - return torch.squeeze(output, dim=2) # output's dim (batch_size, 2*hidden_size, 1) diff --git a/reproduction/text_classification/model/awd_lstm.py b/reproduction/text_classification/model/awd_lstm.py deleted file mode 100644 index c9c8a153..00000000 --- a/reproduction/text_classification/model/awd_lstm.py +++ /dev/null @@ -1,31 +0,0 @@ -import torch -import torch.nn as nn -from fastNLP.core.const import Const as C -from .awdlstm_module import LSTM -from fastNLP.embeddings.utils import get_embeddings -from fastNLP.modules.decoder.mlp import MLP - - -class AWDLSTMSentiment(nn.Module): - def __init__(self, init_embed, - num_classes, - hidden_dim=256, - num_layers=1, - nfc=128, - wdrop=0.5): - super(AWDLSTMSentiment,self).__init__() - self.embed = get_embeddings(init_embed) - self.lstm = LSTM(input_size=self.embed.embedding_dim, hidden_size=hidden_dim, num_layers=num_layers, bidirectional=True, wdrop=wdrop) - self.mlp = MLP(size_layer=[hidden_dim* 2, nfc, num_classes]) - - def forward(self, words): - x_emb = self.embed(words) - output, _ = self.lstm(x_emb) - output = self.mlp(output[:,-1,:]) - return {C.OUTPUT: output} - - def predict(self, words): - output = self(words) - _, predict = output[C.OUTPUT].max(dim=1) - return {C.OUTPUT: predict} - diff --git a/reproduction/text_classification/model/awdlstm_module.py b/reproduction/text_classification/model/awdlstm_module.py deleted file mode 100644 index a586ed2d..00000000 --- a/reproduction/text_classification/model/awdlstm_module.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -轻量封装的 Pytorch LSTM 模块. -可在 forward 时传入序列的长度, 自动对padding做合适的处理. -""" -__all__ = [ - "LSTM" -] - -import torch -import torch.nn as nn -import torch.nn.utils.rnn as rnn - -from fastNLP.modules.utils import initial_parameter -from torch import autograd -from .weight_drop import WeightDrop - - -class LSTM(nn.Module): - """ - LSTM 模块, 轻量封装的Pytorch LSTM. 在提供seq_len的情况下,将自动使用pack_padded_sequence; 同时默认将forget gate的bias初始化 - 为1; 且可以应对DataParallel中LSTM的使用问题。 - - :param input_size: 输入 `x` 的特征维度 - :param hidden_size: 隐状态 `h` 的特征维度. - :param num_layers: rnn的层数. Default: 1 - :param dropout: 层间dropout概率. Default: 0 - :param bidirectional: 若为 ``True``, 使用双向的RNN. Default: ``False`` - :param batch_first: 若为 ``True``, 输入和输出 ``Tensor`` 形状为 - :(batch, seq, feature). Default: ``False`` - :param bias: 如果为 ``False``, 模型将不会使用bias. Default: ``True`` - """ - - def __init__(self, input_size, hidden_size=100, num_layers=1, dropout=0.0, batch_first=True, - bidirectional=False, bias=True, wdrop=0.5): - super(LSTM, self).__init__() - self.batch_first = batch_first - self.lstm = nn.LSTM(input_size, hidden_size, num_layers, bias=bias, batch_first=batch_first, - dropout=dropout, bidirectional=bidirectional) - self.lstm = WeightDrop(self.lstm, ['weight_hh_l0'], dropout=wdrop) - self.init_param() - - def init_param(self): - for name, param in self.named_parameters(): - if 'bias' in name: - # based on https://github.com/pytorch/pytorch/issues/750#issuecomment-280671871 - param.data.fill_(0) - n = param.size(0) - start, end = n // 4, n // 2 - param.data[start:end].fill_(1) - else: - nn.init.xavier_uniform_(param) - - def forward(self, x, seq_len=None, h0=None, c0=None): - """ - - :param x: [batch, seq_len, input_size] 输入序列 - :param seq_len: [batch, ] 序列长度, 若为 ``None``, 所有输入看做一样长. Default: ``None`` - :param h0: [batch, hidden_size] 初始隐状态, 若为 ``None`` , 设为全0向量. Default: ``None`` - :param c0: [batch, hidden_size] 初始Cell状态, 若为 ``None`` , 设为全0向量. Default: ``None`` - :return (output, ht) 或 output: 若 ``get_hidden=True`` [batch, seq_len, hidden_size*num_direction] 输出序列 - 和 [batch, hidden_size*num_direction] 最后时刻隐状态. - """ - batch_size, max_len, _ = x.size() - if h0 is not None and c0 is not None: - hx = (h0, c0) - else: - hx = None - if seq_len is not None and not isinstance(x, rnn.PackedSequence): - sort_lens, sort_idx = torch.sort(seq_len, dim=0, descending=True) - if self.batch_first: - x = x[sort_idx] - else: - x = x[:, sort_idx] - x = rnn.pack_padded_sequence(x, sort_lens, batch_first=self.batch_first) - output, hx = self.lstm(x, hx) # -> [N,L,C] - output, _ = rnn.pad_packed_sequence(output, batch_first=self.batch_first, total_length=max_len) - _, unsort_idx = torch.sort(sort_idx, dim=0, descending=False) - if self.batch_first: - output = output[unsort_idx] - else: - output = output[:, unsort_idx] - else: - output, hx = self.lstm(x, hx) - return output, hx diff --git a/reproduction/text_classification/model/bert.py b/reproduction/text_classification/model/bert.py deleted file mode 100644 index f87f5c14..00000000 --- a/reproduction/text_classification/model/bert.py +++ /dev/null @@ -1 +0,0 @@ -# TODO \ No newline at end of file diff --git a/reproduction/text_classification/model/char_cnn.py b/reproduction/text_classification/model/char_cnn.py deleted file mode 100644 index ac370082..00000000 --- a/reproduction/text_classification/model/char_cnn.py +++ /dev/null @@ -1,90 +0,0 @@ -''' -@author: https://github.com/ahmedbesbes/character-based-cnn -这里借鉴了上述链接中char-cnn model的代码,改动主要为将其改动为符合fastnlp的pipline -''' -import torch -import torch.nn as nn -from fastNLP.core.const import Const as C - -class CharacterLevelCNN(nn.Module): - def __init__(self, args,embedding): - super(CharacterLevelCNN, self).__init__() - - self.config=args.char_cnn_config - self.embedding=embedding - - conv_layers = [] - for i, conv_layer_parameter in enumerate(self.config['model_parameters'][args.model_size]['conv']): - if i == 0: - #in_channels = args.number_of_characters + len(args.extra_characters) - in_channels = args.embedding_dim - out_channels = conv_layer_parameter[0] - else: - in_channels, out_channels = conv_layer_parameter[0], conv_layer_parameter[0] - - if conv_layer_parameter[2] != -1: - conv_layer = nn.Sequential(nn.Conv1d(in_channels, - out_channels, - kernel_size=conv_layer_parameter[1], padding=0), - nn.ReLU(), - nn.MaxPool1d(conv_layer_parameter[2])) - else: - conv_layer = nn.Sequential(nn.Conv1d(in_channels, - out_channels, - kernel_size=conv_layer_parameter[1], padding=0), - nn.ReLU()) - conv_layers.append(conv_layer) - self.conv_layers = nn.ModuleList(conv_layers) - - input_shape = (args.batch_size, args.max_length, - args.number_of_characters + len(args.extra_characters)) - dimension = self._get_conv_output(input_shape) - - print('dimension :', dimension) - - fc_layer_parameter = self.config['model_parameters'][args.model_size]['fc'][0] - fc_layers = nn.ModuleList([ - nn.Sequential( - nn.Linear(dimension, fc_layer_parameter), nn.Dropout(0.5)), - nn.Sequential(nn.Linear(fc_layer_parameter, - fc_layer_parameter), nn.Dropout(0.5)), - nn.Linear(fc_layer_parameter, args.num_classes), - ]) - - self.fc_layers = fc_layers - - if args.model_size == 'small': - self._create_weights(mean=0.0, std=0.05) - elif args.model_size == 'large': - self._create_weights(mean=0.0, std=0.02) - - def _create_weights(self, mean=0.0, std=0.05): - for module in self.modules(): - if isinstance(module, nn.Conv1d) or isinstance(module, nn.Linear): - module.weight.data.normal_(mean, std) - - def _get_conv_output(self, shape): - input = torch.rand(shape) - output = input.transpose(1, 2) - # forward pass through conv layers - for i in range(len(self.conv_layers)): - output = self.conv_layers[i](output) - - output = output.view(output.size(0), -1) - n_size = output.size(1) - return n_size - - def forward(self, chars): - input=self.embedding(chars) - output = input.transpose(1, 2) - # forward pass through conv layers - for i in range(len(self.conv_layers)): - output = self.conv_layers[i](output) - - output = output.view(output.size(0), -1) - - # forward pass through fc layers - for i in range(len(self.fc_layers)): - output = self.fc_layers[i](output) - - return {C.OUTPUT: output} \ No newline at end of file diff --git a/reproduction/text_classification/model/dpcnn.py b/reproduction/text_classification/model/dpcnn.py deleted file mode 100644 index b63c6d38..00000000 --- a/reproduction/text_classification/model/dpcnn.py +++ /dev/null @@ -1,97 +0,0 @@ -import torch -import torch.nn as nn -from fastNLP.core import Const as C - - -class DPCNN(nn.Module): - def __init__(self, init_embed, num_cls, n_filters=256, - kernel_size=3, n_layers=7, embed_dropout=0.1, cls_dropout=0.1): - super().__init__() - self.region_embed = RegionEmbedding( - init_embed, out_dim=n_filters, kernel_sizes=[1, 3, 5]) - embed_dim = self.region_embed.embedding_dim - self.conv_list = nn.ModuleList() - for i in range(n_layers): - self.conv_list.append(nn.Sequential( - nn.ReLU(), - nn.Conv1d(n_filters, n_filters, kernel_size, - padding=kernel_size//2), - nn.Conv1d(n_filters, n_filters, kernel_size, - padding=kernel_size//2), - )) - self.pool = nn.MaxPool1d(kernel_size=3, stride=2, padding=1) - self.embed_drop = nn.Dropout(embed_dropout) - self.classfier = nn.Sequential( - nn.Dropout(cls_dropout), - nn.Linear(n_filters, num_cls), - ) - self.reset_parameters() - - def reset_parameters(self): - for m in self.modules(): - if isinstance(m, (nn.Conv1d, nn.Conv2d, nn.Linear)): - nn.init.normal_(m.weight, mean=0, std=0.01) - if m.bias is not None: - nn.init.normal_(m.bias, mean=0, std=0.01) - - def forward(self, words, seq_len=None): - words = words.long() - # get region embeddings - x = self.region_embed(words) - x = self.embed_drop(x) - - # not pooling on first conv - x = self.conv_list[0](x) + x - for conv in self.conv_list[1:]: - x = self.pool(x) - x = conv(x) + x - - # B, C, L => B, C - x, _ = torch.max(x, dim=2) - x = self.classfier(x) - return {C.OUTPUT: x} - - def predict(self, words, seq_len=None): - x = self.forward(words, seq_len)[C.OUTPUT] - return {C.OUTPUT: torch.argmax(x, 1)} - - -class RegionEmbedding(nn.Module): - def __init__(self, init_embed, out_dim=300, kernel_sizes=None): - super().__init__() - if kernel_sizes is None: - kernel_sizes = [5, 9] - assert isinstance( - kernel_sizes, list), 'kernel_sizes should be List(int)' - # self.embed = nn.Embedding.from_pretrained(torch.tensor(init_embed).float(), freeze=False) - self.embed = init_embed - try: - embed_dim = self.embed.embedding_dim - except Exception: - embed_dim = self.embed.embed_size - self.region_embeds = nn.ModuleList() - for ksz in kernel_sizes: - self.region_embeds.append(nn.Sequential( - nn.Conv1d(embed_dim, embed_dim, ksz, padding=ksz // 2), - )) - self.linears = nn.ModuleList([nn.Conv1d(embed_dim, out_dim, 1) - for _ in range(len(kernel_sizes))]) - self.embedding_dim = embed_dim - - def forward(self, x): - x = self.embed(x) - x = x.transpose(1, 2) - # B, C, L - out = 0 - for conv, fc in zip(self.region_embeds, self.linears[1:]): - conv_i = conv(x) - out = out + fc(conv_i) - # B, C, L - return out - - -if __name__ == '__main__': - x = torch.randint(0, 10000, size=(5, 15), dtype=torch.long) - model = DPCNN((10000, 300), 20) - y = model(x) - print(y.size(), y.mean(1), y.std(1)) diff --git a/reproduction/text_classification/model/lstm.py b/reproduction/text_classification/model/lstm.py deleted file mode 100644 index 16c7652c..00000000 --- a/reproduction/text_classification/model/lstm.py +++ /dev/null @@ -1,30 +0,0 @@ -import torch -import torch.nn as nn -from fastNLP.core.const import Const as C -from fastNLP.modules.encoder.lstm import LSTM -from fastNLP.embeddings.utils import get_embeddings -from fastNLP.modules.decoder.mlp import MLP - - -class BiLSTMSentiment(nn.Module): - def __init__(self, init_embed, - num_classes, - hidden_dim=256, - num_layers=1, - nfc=128): - super(BiLSTMSentiment,self).__init__() - self.embed = get_embeddings(init_embed) - self.lstm = LSTM(input_size=self.embed.embedding_dim, hidden_size=hidden_dim, num_layers=num_layers, bidirectional=True) - self.mlp = MLP(size_layer=[hidden_dim*2, nfc, num_classes]) - - def forward(self, words): - x_emb = self.embed(words) - output, _ = self.lstm(x_emb) - output = self.mlp(torch.max(output, dim=1)[0]) - return {C.OUTPUT: output} - - def predict(self, words): - output = self(words) - _, predict = output[C.OUTPUT].max(dim=1) - return {C.OUTPUT: predict} - diff --git a/reproduction/text_classification/model/lstm_self_attention.py b/reproduction/text_classification/model/lstm_self_attention.py deleted file mode 100644 index b79cb1b0..00000000 --- a/reproduction/text_classification/model/lstm_self_attention.py +++ /dev/null @@ -1,34 +0,0 @@ -import torch.nn as nn -from fastNLP.core.const import Const as C -from fastNLP.modules.encoder.lstm import LSTM -from fastNLP.embeddings.utils import get_embeddings -from fastNLP.modules.attention import SelfAttention -from fastNLP.modules.decoder.mlp import MLP - - -class BiLSTM_SELF_ATTENTION(nn.Module): - def __init__(self, init_embed, - num_classes, - hidden_dim=256, - num_layers=1, - attention_unit=256, - attention_hops=1, - nfc=128): - super(BiLSTM_SELF_ATTENTION,self).__init__() - self.embed = get_embeddings(init_embed) - self.lstm = LSTM(input_size=self.embed.embedding_dim, hidden_size=hidden_dim, num_layers=num_layers, bidirectional=True) - self.attention = SelfAttention(input_size=hidden_dim * 2 , attention_unit=attention_unit, attention_hops=attention_hops) - self.mlp = MLP(size_layer=[hidden_dim* 2*attention_hops, nfc, num_classes]) - - def forward(self, words): - x_emb = self.embed(words) - output, _ = self.lstm(x_emb) - after_attention, penalty = self.attention(output,words) - after_attention =after_attention.view(after_attention.size(0),-1) - output = self.mlp(after_attention) - return {C.OUTPUT: output} - - def predict(self, words): - output = self(words) - _, predict = output[C.OUTPUT].max(dim=1) - return {C.OUTPUT: predict} diff --git a/reproduction/text_classification/model/weight_drop.py b/reproduction/text_classification/model/weight_drop.py deleted file mode 100644 index 688c8d54..00000000 --- a/reproduction/text_classification/model/weight_drop.py +++ /dev/null @@ -1,99 +0,0 @@ -import torch -from torch.nn import Parameter -from functools import wraps - -class WeightDrop(torch.nn.Module): - def __init__(self, module, weights, dropout=0, variational=False): - super(WeightDrop, self).__init__() - self.module = module - self.weights = weights - self.dropout = dropout - self.variational = variational - self._setup() - - def widget_demagnetizer_y2k_edition(*args, **kwargs): - # We need to replace flatten_parameters with a nothing function - # It must be a function rather than a lambda as otherwise pickling explodes - # We can't write boring code though, so ... WIDGET DEMAGNETIZER Y2K EDITION! - # (╯°□°)╯︵ ┻━┻ - return - - def _setup(self): - # Terrible temporary solution to an issue regarding compacting weights re: CUDNN RNN - if issubclass(type(self.module), torch.nn.RNNBase): - self.module.flatten_parameters = self.widget_demagnetizer_y2k_edition - - for name_w in self.weights: - print('Applying weight drop of {} to {}'.format(self.dropout, name_w)) - w = getattr(self.module, name_w) - del self.module._parameters[name_w] - self.module.register_parameter(name_w + '_raw', Parameter(w.data)) - - def _setweights(self): - for name_w in self.weights: - raw_w = getattr(self.module, name_w + '_raw') - w = None - if self.variational: - mask = torch.autograd.Variable(torch.ones(raw_w.size(0), 1)) - if raw_w.is_cuda: mask = mask.cuda() - mask = torch.nn.functional.dropout(mask, p=self.dropout, training=True) - w = mask.expand_as(raw_w) * raw_w - else: - w = torch.nn.functional.dropout(raw_w, p=self.dropout, training=self.training) - setattr(self.module, name_w, w) - - def forward(self, *args): - self._setweights() - return self.module.forward() - -if __name__ == '__main__': - import torch - from weight_drop import WeightDrop - - # Input is (seq, batch, input) - x = torch.autograd.Variable(torch.randn(2, 1, 10)).cuda() - h0 = None - - ### - - print('Testing WeightDrop') - print('=-=-=-=-=-=-=-=-=-=') - - ### - - print('Testing WeightDrop with Linear') - - lin = WeightDrop(torch.nn.Linear(10, 10), ['weight'], dropout=0.9) - lin.cuda() - run1 = [x.sum() for x in lin(x).data] - run2 = [x.sum() for x in lin(x).data] - - print('All items should be different') - print('Run 1:', run1) - print('Run 2:', run2) - - assert run1[0] != run2[0] - assert run1[1] != run2[1] - - print('---') - - ### - - print('Testing WeightDrop with LSTM') - - wdrnn = WeightDrop(torch.nn.LSTM(10, 10), ['weight_hh_l0'], dropout=0.9) - wdrnn.cuda() - - run1 = [x.sum() for x in wdrnn(x, h0)[0].data] - run2 = [x.sum() for x in wdrnn(x, h0)[0].data] - - print('First timesteps should be equal, all others should differ') - print('Run 1:', run1) - print('Run 2:', run2) - - # First time step, not influenced by hidden to hidden weights, should be equal - assert run1[0] == run2[0] - # Second step should not - assert run1[1] != run2[1] - - print('---') diff --git a/reproduction/text_classification/test/TODO.txt b/reproduction/text_classification/test/TODO.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/text_classification/test/sample_MTL16.txt b/reproduction/text_classification/test/sample_MTL16.txt deleted file mode 100644 index f1efeb4c..00000000 --- a/reproduction/text_classification/test/sample_MTL16.txt +++ /dev/null @@ -1,10 +0,0 @@ -1 the only thing better than these sunglasses is the customer service i got , after i dropped and broke the lenses on these i called 80 's purple and they actually sent me out a replacement free of charge . i was blown away -0 this light worked for one day . i should have known better because in the past , i bought a tap light , and it worked for only a few days , too . do n't waste your money -1 i 've tried 6 different nursing bras . this one , with the center snap closure , is the easiest to use . it is also the lightest and most comfortable , while providing good support . my only complaint is that after about 50 washes the underwire begins to poke free from the fabric . even when i try to sew it back into place , it breaks loose after a few washes . perhaps if i handwashed the bra instead of using a machine , it would last longer . this bra is less durabe than my other nursing bras ( particularly the leading lady bra , which seems to be indestructible ) , but it is well worth the sacrifice for comfort , lightness , and ease of use . it is by far my favorite -0 i have had my bag for a couple of months . the liner on the inside has already ripped -0 the photo is quite deceiving . this suit is made out of cheap polyester fabric that looks cheap , shiny , and is horrible to the touch . my three year olds hate the uncomfortable stiffness . spend the extra money for a decent fabric that is actually practical for a toddler if they really need a suit -1 i had bought a bra of this model at a discount store , just got lucky . it quickly became my favorite , and i was glad to find it at amazon . -0 lookslike it would be a nice product , but it 's only for very small babies up to 12 pounds and 23 inches . my baby is very long and just does n't fit - wish target/amazon would have been more upfront with the sizing -0 i purchased the non-premium kit ( $ 9.99 ) with a silicone skin case cover and 2 screen protectors ( one for each screen ) , but it is the same case . the problem is that the silicone skin cover is slippery , twice as slippery as the nintendo lite without the cover . we thought that washing them in dove dish soap would wash away the slipperyness , but that did n't work . after handling the cover , your hands have a slippery residue on them . the other issue is that the cover is so thin that it is little more than scratch protection , not impact protection . the screen covers that come with the non-premium kit are ok , i guess , but one of them had 2 defect particles that were raised ( trust me , the screen was clean ) . i purchased 2 kits , and i had one screen protector defect and my wife accidentally broke one of the silicone covers hinge straps with little effort . i do not recommend this product at all -1 good quality jeans at an affordable price . size is just right , quite comfortable -0 not the best fabric , scratchy and see thru . you get what you pay for on these diff --git a/reproduction/text_classification/test/sample_yelp.json b/reproduction/text_classification/test/sample_yelp.json deleted file mode 100644 index 053dc4bc..00000000 --- a/reproduction/text_classification/test/sample_yelp.json +++ /dev/null @@ -1,20 +0,0 @@ -"{\"review_id\":\"Q1sbwvVQXV2734tPgoKj4Q\",\"user_id\":\"hG7b0MtEbXx5QzbzE6C_VA\",\"business_id\":\"ujmEBvifdJM6h6RLv4wQIg\",\"stars\":1.0,\"useful\":6,\"funny\":1,\"cool\":0,\"text\":\"Total bill for this horrible service? Over $8Gs. These crooks actually had the nerve to charge us $69 for 3 pills. I checked online the pills can be had for 19 cents EACH! Avoid Hospital ERs at all costs.\",\"date\":\"2013-05-07 04:34:36\"}\n" -"{\"review_id\":\"GJXCdrto3ASJOqKeVWPi6Q\",\"user_id\":\"yXQM5uF2jS6es16SJzNHfg\",\"business_id\":\"NZnhc2sEQy3RmzKTZnqtwQ\",\"stars\":5.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"I *adore* Travis at the Hard Rock's new Kelly Cardenas Salon! I'm always a fan of a great blowout and no stranger to the chains that offer this service; however, Travis has taken the flawless blowout to a whole new level! \\n\\nTravis's greets you with his perfectly green swoosh in his otherwise perfectly styled black hair and a Vegas-worthy rockstar outfit. Next comes the most relaxing and incredible shampoo -- where you get a full head message that could cure even the very worst migraine in minutes --- and the scented shampoo room. Travis has freakishly strong fingers (in a good way) and use the perfect amount of pressure. That was superb! Then starts the glorious blowout... where not one, not two, but THREE people were involved in doing the best round-brush action my hair has ever seen. The team of stylists clearly gets along extremely well, as it's evident from the way they talk to and help one another that it's really genuine and not some corporate requirement. It was so much fun to be there! \\n\\nNext Travis started with the flat iron. The way he flipped his wrist to get volume all around without over-doing it and making me look like a Texas pagent girl was admirable. It's also worth noting that he didn't fry my hair -- something that I've had happen before with less skilled stylists. At the end of the blowout & style my hair was perfectly bouncey and looked terrific. The only thing better? That this awesome blowout lasted for days! \\n\\nTravis, I will see you every single time I'm out in Vegas. You make me feel beauuuutiful!\",\"date\":\"2017-01-14 21:30:33\"}\n" -"{\"review_id\":\"2TzJjDVDEuAW6MR5Vuc1ug\",\"user_id\":\"n6-Gk65cPZL6Uz8qRm3NYw\",\"business_id\":\"WTqjgwHlXbSFevF32_DJVw\",\"stars\":5.0,\"useful\":3,\"funny\":0,\"cool\":0,\"text\":\"I have to say that this office really has it together, they are so organized and friendly! Dr. J. Phillipp is a great dentist, very friendly and professional. The dental assistants that helped in my procedure were amazing, Jewel and Bailey helped me to feel comfortable! I don't have dental insurance, but they have this insurance through their office you can purchase for $80 something a year and this gave me 25% off all of my dental work, plus they helped me get signed up for care credit which I knew nothing about before this visit! I highly recommend this office for the nice synergy the whole office has!\",\"date\":\"2016-11-09 20:09:03\"}\n" -"{\"review_id\":\"yi0R0Ugj_xUx_Nek0-_Qig\",\"user_id\":\"dacAIZ6fTM6mqwW5uxkskg\",\"business_id\":\"ikCg8xy5JIg_NGPx-MSIDA\",\"stars\":5.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Went in for a lunch. Steak sandwich was delicious, and the Caesar salad had an absolutely delicious dressing, with a perfect amount of dressing, and distributed perfectly across each leaf. I know I'm going on about the salad ... But it was perfect.\\n\\nDrink prices were pretty good.\\n\\nThe Server, Dawn, was friendly and accommodating. Very happy with her.\\n\\nIn summation, a great pub experience. Would go again!\",\"date\":\"2018-01-09 20:56:38\"}\n" -"{\"review_id\":\"11a8sVPMUFtaC7_ABRkmtw\",\"user_id\":\"ssoyf2_x0EQMed6fgHeMyQ\",\"business_id\":\"b1b1eb3uo-w561D0ZfCEiQ\",\"stars\":1.0,\"useful\":7,\"funny\":0,\"cool\":0,\"text\":\"Today was my second out of three sessions I had paid for. Although my first session went well, I could tell Meredith had a particular enjoyment for her male clients over her female. However, I returned because she did my teeth fine and I was pleased with the results. When I went in today, I was in the whitening room with three other gentlemen. My appointment started out well, although, being a person who is in the service industry, I always attend to my female clientele first when a couple arrives. Unbothered by those signs, I waited my turn. She checked on me once after my original 30 minute timer to ask if I was ok. She attended my boyfriend on numerous occasions, as well as the other men, and would exit the room without even asking me or looking to see if I had any irritation. Half way through, another woman had showed up who she was explaining the deals to in the lobby. While she admits timers must be reset half way through the process, she reset my boyfriends, left, rest the gentleman furthest away from me who had time to come in, redeem his deal, get set, and gave his timer done, before me, then left, and at this point my time was at 10 minutes. So, she should have reset it 5 minutes ago, according to her. While I sat there patiently this whole time with major pain in my gums, i watched the time until the lamp shut off. Not only had she reset two others, explained deals to other guest, but she never once checked on my time. When my light turned off, I released the stance of my mouth to a more relaxed state, assuming I was only getting a thirty minute session instead of the usual 45, because she had yet to come in. At this point, the teeth formula was not only burning the gum she neglected for 25 minutes now, but it began to burn my lips. I began squealing and slapping my chair trying to get her attention from the other room in a panic. I was in so much pain, that by the time she entered the room I was already out of my chair. She finally then acknowledged me, and asked if she could put vitamin E on my gum burn (pictured below). At this point, she has treated two other gums burns, while neglecting me, and I was so irritated that I had to suffer, all I wanted was to leave. While I waited for my boyfriend, she kept harassing me about the issue. Saying, \\\"well burns come with teeth whitening.\\\" While I totally agree, and under justifiable circumstances would not be as irritate, it could have easily been avoid if she had checked on me even a second time, so I could let her know. Not only did she never check on my physical health, she couldn't even take two seconds to reset the timer, which she even admitted to me. Her accuse was that she was coming in to do it, but I had the light off for a solid two minutes before I couldn't stand the pain. She admitted it should be reset every 15 minutes, which means for 25 minutes she did not bother to help me at all. Her guest in the lobby then proceeded to attack me as well, simply because I wanted to leave after the way I was treated. I also expected a refund for not getting a complete session today, due to the neglect, and the fact I won't be returning for my last, she had failed to do that. She was even screaming from the door, and continued to until my boyfriend and I were down the steps. I have never in my life been more appalled by a grown woman's behavior, who claims to be in the business for \\\"10 years.\\\" Admit your wrongs, but don't make your guest feel unwelcome because you can't do you job properly.\",\"date\":\"2018-01-30 23:07:38\"}\n" -"{\"review_id\":\"fdiNeiN_hoCxCMy2wTRW9g\",\"user_id\":\"w31MKYsNFMrjhWxxAb5wIw\",\"business_id\":\"eU_713ec6fTGNO4BegRaww\",\"stars\":4.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"I'll be the first to admit that I was not excited about going to La Tavolta. Being a food snob, when a group of friends suggested we go for dinner I looked online at the menu and to me there was nothing special and it seemed overpriced. Im also not big on ordering pasta when I go out. Alas, I was outnumbered. Thank goodness! I ordered the sea bass special. It was to die for. Cooked perfectly, seasoned perfectly, perfect portion. I can not say enough good things about this dish. When the server asked how it was he seemed very proud of the dish and said, \\\" doesn't she (the chef) do an incredible job?\\\" She does. \\n\\nMy hubby got the crab tortellini and also loved his. I heard \\\"mmmm this is so good\\\" from all around the table. Our waiter was super nice and even gave us free desserts because we were some of the last people in the restaurant. Service was very slow and the place was PACKED but we had our jugs of wine and a large group with good conversation so it didn't seem to bother anyone.\\n\\nSo-\\n\\nDo order the calamari and fried zucchini appetizers. Leave out the mussels. \\n\\nIf they have the sea bass special, I highly recommend it. The chicken parm and crab tortellini were also very good and very big. The chicken Romano was a bit bland. The house salads were teeny. \\n\\nDo make a reservation but still expect to wait for your food. Go with a large group of people and plan for it to be loud. Don't go with a date unless you're fighting and don't feel like hearing anything they have to say. Ask to sit in the side room if it's available.\",\"date\":\"2013-01-20 13:25:59\"}\n" -"{\"review_id\":\"G7XHMxG0bx9oBJNECG4IFg\",\"user_id\":\"jlu4CztcSxrKx56ba1a5AQ\",\"business_id\":\"3fw2X5bZYeW9xCz_zGhOHg\",\"stars\":3.0,\"useful\":5,\"funny\":4,\"cool\":5,\"text\":\"Tracy dessert had a big name in Hong Kong and the one in First Markham place has been here for many years now! \\n\\nCame in for some Chinese dessert, and I must say their selection has increased tremendously over the years. I might as well add that the price has also increased tremendously as well. The waitress gave us tea, which I could taste had red date in it. Fancy!\\n\\nA simple taro with coconut with tapioca pearls was like $5.25 or something. Basically all the desserts were more than $5. That's crazy! I can literally just make this dessert at home and for a bowl, it would probably cost like $0.50. A few years ago, I think I can still get it for like $3-$4, which is more reasonable, but wow, more than $5 is a little over the top for this dessert. Though I must say, it is Tracy Dessert, and they are a little more on the expensive side. \\n\\nI also saw other items on the menu like fish balls, chicken wings, shaved ice. My friend got a mango drink with fresh mango in it! \\n\\nI'm also surprised how many people come to Tracy Dessert after work. We came on a Sunday and the tables were always filled. I think the amount of tables they had were just perfect because no one really waited for seats for a long time, but the tables kept filling up once a table was finished.\",\"date\":\"2016-05-07 01:21:02\"}\n" -"{\"review_id\":\"8e9HxxLjjqc9ez5ezzN7iQ\",\"user_id\":\"d6xvYpyzcfbF_AZ8vMB7QA\",\"business_id\":\"zvO-PJCpNk4fgAVUnExYAA\",\"stars\":1.0,\"useful\":3,\"funny\":1,\"cool\":1,\"text\":\"This place has gone down hill. Clearly they have cut back on staff and food quality\\n\\nMany of the reviews were written before the menu changed. I've been going for years and the food quality has gone down hill.\\n\\nThe service is slow & my salad, which was $15, was as bad as it gets.\\n\\nIt's just not worth spending the money on this place when there are so many other options.\",\"date\":\"2010-10-05 19:12:35\"}\n" -"{\"review_id\":\"qrffudO73zsslZbe8B9D3Q\",\"user_id\":\"sG_h0dIzTKWa3Q6fmb4u-g\",\"business_id\":\"b2jN2mm9Wf3RcrZCgfo1cg\",\"stars\":2.0,\"useful\":1,\"funny\":0,\"cool\":0,\"text\":\"I was really looking forward to visiting after having some of their beers. The \\\"Man O'War\\\" quickly became my favorite DIPA; the Rusulka Vanilla Stout is a good thick, sweet stout; and the Ironclad is a top notch IPA. \\nThe only big miss on their beers I've had is the Big Chuck Barleywine. It could probably benefit greatly with age, but at this age all there is to taste is the alcohol. \\nNonetheless, I had enough to convince me that the other beers I hadn't had from them would be top notch... and they are! \\nThe reason for the 2 stars should not reflect the quality of the brewers, they obviously know their craft well! \\nThe servers are great and friendly.... but relying on two servers to wait on 100+ customers says a lot about how inexperienced management must be. In fact, after waiting 15 mins at a dirty table I was finally able to track down someone I guessed was an employee to let them know we were even there! \\nAfter another 5+ mins, the GM finally stopped over to take our drink order. The smugness of this guy was amazing. The thought of offering a simple apology never seemed to enter into his head. \\nThis is the time a server finally stopped by to pick up the non-final check left by the party before us... who didn't seem very pleased when leaving. \\nThe toast & cheese was good, but by the time we were able to dig into their heartiest offering of food, saltines and butter may have been equally pleasing.\",\"date\":\"2015-01-18 14:04:18\"}\n" -"{\"review_id\":\"RS_GTIT6836bCaPy637kNQ\",\"user_id\":\"nMeCE5-xsdleyxYuNZ_7rA\",\"business_id\":\"oxwGyA17NL6c5t1Etg5WgQ\",\"stars\":3.0,\"useful\":1,\"funny\":0,\"cool\":1,\"text\":\"It's a giant Best Buy with 66 registers. I don't get it. What's the big deal about this place??\",\"date\":\"2012-02-29 21:52:43\"}\n" -"{\"review_id\":\"kbtscdyz6lvrtGjD1quQTg\",\"user_id\":\"FIk4lQQu1eTe2EpzQ4xhBA\",\"business_id\":\"8mIrX_LrOnAqWsB5JrOojQ\",\"stars\":4.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Like walking back in time, every Saturday morning my sister and I was in a bowling league and after we were done, we'd spend a few quarters playing the pin ball machines until our mother came to pick us up.\\n\\nMy sister was daring and play the machines hard, she was afraid of that \\\"tilt\\\" showing up and freezing the game. I, on the other hand was a bit more gentler and wanted to make sure I got my quarter's worth.\\n\\nThis place has rows and rows of machines, some are really old and some are more of a mid 80's theme. There is even a Ms pac man! It was fun to spend an afternoon playing the machines and remembering all the fun of my early teen years.\",\"date\":\"2011-11-30 02:11:15\"}\n" -"{\"review_id\":\"-I5umRTkhw15RqpKMl_o1Q\",\"user_id\":\"-mA3-1mN4JIEkqOtdbNXCQ\",\"business_id\":\"mRUVMJkUGxrByzMQ2MuOpA\",\"stars\":1.0,\"useful\":0,\"funny\":1,\"cool\":0,\"text\":\"Walked in around 4 on a Friday afternoon, we sat at a table just off the bar and walked out after 5 min or so. Don't even think they realized we walked in. However everyone at the bar noticed we walked in!!! Service was non existent at best. Not a good way for a new business to start out. Oh well, the location they are at has been about 5 different things over the past several years, so they will just be added to the list. SMDH!!!\",\"date\":\"2017-12-15 23:27:08\"}\n" -"{\"review_id\":\"Z7wgXp98wYB57QdRY3HQ3w\",\"user_id\":\"GYNnVehQeXjty0xH7-6Fhw\",\"business_id\":\"FxLfqxdYPA6Z85PFKaqLrg\",\"stars\":4.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Wow. So surprised at the one and two star reviews! We started with the most tender calamari. Although the marinara sauce was a bit bland, but a touch of salt made it just right. My husband had the veal with peppers and said it was so delicious and tender. The mashed potatoes were perfect. I had the salmon Diablo which was also delicious. Our salad was beautiful! Dressing was served on the salad and it was a nice amount. We ended our delicious meal with a piece of tiramisu. Our server Matt was right on!! Very pleasant and knowledgeable about the menu. Our appetizer, salad and entrees were timed perfectly. I love salad and did not mind that my entree was served while I was still eating it! No problem it let my dinner cool to just the right temp for me to eat it comfortably. \\nI wonder sometimes if people just don't appreciate relaxing and taking time to eat a wonderful and beautifully prepared meal. A wonderful atmosphere. So relaxing. The chairs are super comfortable too!!! We will certainly be back. \\nGive it a try. Don't always go by the reviews. \\nA bottle of Riesling, calamari app, two delicious entrees and dessert for $92! \\nWell with it.\",\"date\":\"2016-05-07 01:36:53\"}\n" -"{\"review_id\":\"qlXw1JQ0UodW7qrmVgwCXw\",\"user_id\":\"bAhqAPoWaZYcyYi7bs024Q\",\"business_id\":\"LUN6swQYa4xJKaM_UEUOEw\",\"stars\":4.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Michael from Red Carpet VIP is amazing ! I reached out because I needed help planning my soon to be sister in law's bachelorette. It was a group of 10 girls so I was a little overwhelmed but Michael saved the day! Everything was super smooth and easy! We got good deals and had the best time ever! We booked hotel and a bachelorette package for a great price. I have saved contact info because I will for sure reach out again on next Vegas trip!!!\",\"date\":\"2018-04-27 20:25:26\"}\n" -"{\"review_id\":\"JVcjMhlavKKn3UIt9p9OXA\",\"user_id\":\"TpyOT5E16YASd7EWjLQlrw\",\"business_id\":\"AakkkTuGZA2KBodKi2_u8A\",\"stars\":1.0,\"useful\":1,\"funny\":1,\"cool\":0,\"text\":\"I cannot believe how things have changed in 3 years. I picked up duck congee sometime in the winter when my hubby was sick. I was very disappointed because the ginger fish sauce tasted like it had gone bad (it should never be bitter). Today, my hubby wanted to eat there since he was craving the duck congee and most places don't serve the duck & coleslaw side. We waited about 10 minutes to get our menu. After we placed our orders, we waited another 5 minutes to get the tea that most places bring with the menu. I could go on with the details but the gist of the story is they were understaffed or the staff was slow. The worst part of it was that the service. The servers make us feel bad for asking for anything (like when they took our order). We had arrived and placed our order before another couple bside us at least 10 minutes ahead but somehow, this couple received their pho before mine. They were almost done eating their pho before mine came out.\",\"date\":\"2012-07-16 00:37:14\"}\n" -"{\"review_id\":\"svK3nBU7Rk8VfGorlrN52A\",\"user_id\":\"NJlxGtouq06hhC7sS2ECYw\",\"business_id\":\"YvrylyuWgbP90RgMqZQVnQ\",\"stars\":5.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"You can't really find anything wrong with this place, the pastas and pizzas are both amazing and high quality, the price is very reasonable, the owner and the staff are very friendly, if you're in downtown check this place out, a lot of people think just because it's downtown there are lots of options around but that's not always the case as there is also a lot of poor quality food in downtown as well.\",\"date\":\"2017-04-07 21:27:49\"}\n" -"{\"review_id\":\"1wVA2-vQIuW_ClmXkDxqMQ\",\"user_id\":\"86J5DwcFk4f4In1Vxe2TvA\",\"business_id\":\"NyLYY8q1-H3hfsTwuwLPCg\",\"stars\":4.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Great lunch today. Staff was very helpful in assisting with selections and knowledgeable on the ingredients. We enjoyed the BBQ chicken with tika masala sauce and really good naan bread. The biryani with chicken was also yummy! Fun to see the food being prepared in the tandoori ovens. Great addition to the fast casual scene in Cleveland.\",\"date\":\"2015-01-03 22:47:34\"}\n" -"{\"review_id\":\"6BnQwlxRn7ZuWdzninM9sQ\",\"user_id\":\"JSrP-dUmLlwZiI7Dp3PQ2A\",\"business_id\":\"cHdJXLlKNWixBXpDwEGb_A\",\"stars\":3.0,\"useful\":1,\"funny\":7,\"cool\":1,\"text\":\"I love chinese food and I love mexican food. What can go wrong? A couple of things. First things first, this place is more of a \\\"rice bowl\\\" kind of place. I thought it was going to be more diverse as far as the menu goes, but its mainly rice bowls you get with different kinds of meats. The ordering was a little confusing at first, but one of the employees helped us out and I got the 2-item bowl and got the jade chicken and hengrenade chicken with all rice(jerk). I also ordered a jade chicken quesadilla on the side.\\n\\nI'm gonna admit, this place looks kinda dirty. I don't think Arizona uses those health department letter grade system like California does, but if I were to just judge by how it looked inside, i'd give it a \\\"C\\\" grade lol. We waited for about 15 minutes or so and finally got our food. We took it to go and ate at our hotel room. \\n\\nMmmm... the food was just alright. The jade chicken was nothing special. It tasted like any generic chinese fast food orange chicken\\/sesame chicken variant. The hengrenade chicken, although was the less spicier version of the jerk chicken, was still pretty spicy for me. Just be warned the jerk chicken is super spicy. If you aren't sure, ask for a sample at the restaurant before ordering, but it was way too spicy for me. \\n\\nThe jade chicken quesadilla was decent, but nothing special. Just imagine orange chicken in between a tortilla and cheese. A friend of mine ordered a jade chicken burrito and we were confused when we pulled it out of the bag because it was literally the size of Mcdonald's apple pie. If you order the burrito, be warned that it's a burrito for gnomes and smurfs, but he said it was tasty. \\n\\nThey provide a snicker doodle sugar cookie for each meal and it was decent, again nothing special. \\n\\nNot gonna lie, the next day my stomach felt like a little mexican dude and chinese dude were wrestling and throwing molotov cocktails inside. I used the bathroom like 5 times. I don't recommend eating this place if you have a lot to do the next day.\",\"date\":\"2015-04-01 16:30:00\"}\n" -"{\"review_id\":\"rEITo90tpyKmEfNDp3Ou3A\",\"user_id\":\"6Fz_nus_OG4gar721OKgZA\",\"business_id\":\"6lj2BJ4tJeu7db5asGHQ4w\",\"stars\":5.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"We've been a huge Slim's fan since they opened one up in Texas about two years ago when we used to live there. This place never disappoints. They even have great salads and grilled chicken. Plus they have fresh brewed sweet tea, it's the best!\",\"date\":\"2017-05-26 01:23:19\"}\n" -"{\"review_id\":\"4bUyL7lzoWzDZaJETAKREg\",\"user_id\":\"_N7Ndn29bpll_961oPeEfw\",\"business_id\":\"y-Iw6dZflNix4BdwIyTNGA\",\"stars\":3.0,\"useful\":0,\"funny\":0,\"cool\":0,\"text\":\"Good selection of classes of beers and mains. I've been here twice.\\n\\nFirst time I had the fried chicken. It was delicious, but be warned, extremely salty. I couldn't even finish the last piece of chicken after experiencing a salt overload.\\n\\nSecond time we came on a wednesday. We didn't know it was BBQ night, where they have a completely different menu, and don't offer anything from their original vegetarian-friendly menu. This menu has one vegetarian-friendly option - an eggplant sandwich. The vegetarian in my party said it was awful. Also, on BBQ night you choose 2 sides. Except they were out of all their sides except 2 - fries and potato salad. I can't say I was thrilled to have carb heavy sides with my carb heavy main. How do you run out of sides so early in the evening?\\n\\nService not so great.\\n\\nI'd avoid coming here on wednesdays.\",\"date\":\"2014-06-27 21:19:23\"}\n" diff --git a/reproduction/text_classification/test/test_MTL16Loader.py b/reproduction/text_classification/test/test_MTL16Loader.py deleted file mode 100644 index 0ae6adc1..00000000 --- a/reproduction/text_classification/test/test_MTL16Loader.py +++ /dev/null @@ -1,10 +0,0 @@ -import unittest -from reproduction.text_classification.data.MTL16Loader import MTL16Loader - - -class TestDataLoader(unittest.TestCase): - def test_MTL16Loader(self): - loader = MTL16Loader() - data = loader.process('sample_MTL16.txt') - print(data.datasets) - diff --git a/reproduction/text_classification/test/test_yelp.py b/reproduction/text_classification/test/test_yelp.py deleted file mode 100644 index 2c390d46..00000000 --- a/reproduction/text_classification/test/test_yelp.py +++ /dev/null @@ -1,7 +0,0 @@ -import unittest -from reproduction.text_classification.data.yelpLoader import yelpLoader - -class TestDatasetLoader(unittest.TestCase): - def test_yelpLoader(self): - ds = yelpLoader().load('sample_yelp.json') - assert len(ds) == 20 \ No newline at end of file diff --git a/reproduction/text_classification/train_HAN.py b/reproduction/text_classification/train_HAN.py deleted file mode 100644 index a8b06146..00000000 --- a/reproduction/text_classification/train_HAN.py +++ /dev/null @@ -1,107 +0,0 @@ -# 首先需要加入以下的路径到环境变量,因为当前只对内部测试开放,所以需要手动申明一下路径 - -import os -import sys -sys.path.append('../../') -os.environ['FASTNLP_BASE_URL'] = 'http://10.141.222.118:8888/file/download/' -os.environ['FASTNLP_CACHE_DIR'] = '/remote-home/hyan01/fastnlp_caches' -os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" - -from fastNLP.core.const import Const as C -from fastNLP.core import LRScheduler -from fastNLP.io.data_loader import YelpLoader -from reproduction.text_classification.model.HAN import HANCLS -from fastNLP.embeddings import StaticEmbedding -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP.core.trainer import Trainer -from torch.optim import SGD -import torch.cuda -from torch.optim.lr_scheduler import CosineAnnealingLR - - -##hyper - -class Config(): - model_dir_or_name = "en-base-uncased" - embedding_grad = False, - train_epoch = 30 - batch_size = 100 - num_classes = 5 - task = "yelp" - #datadir = '/remote-home/lyli/fastNLP/yelp_polarity/' - datadir = '/remote-home/ygwang/yelp_polarity/' - datafile = {"train": "train.csv", "test": "test.csv"} - lr = 1e-3 - - def __init__(self): - self.datapath = {k: os.path.join(self.datadir, v) - for k, v in self.datafile.items()} - - -ops = Config() - -##1.task相关信息:利用dataloader载入dataInfo - -datainfo = YelpLoader(fine_grained=True).process(paths=ops.datapath, train_ds=['train']) -print(len(datainfo.datasets['train'])) -print(len(datainfo.datasets['test'])) - - -# post process -def make_sents(words): - sents = [words] - return sents - - -for dataset in datainfo.datasets.values(): - dataset.apply_field(make_sents, field_name='words', new_field_name='input_sents') - -datainfo = datainfo -datainfo.datasets['train'].set_input('input_sents') -datainfo.datasets['test'].set_input('input_sents') -datainfo.datasets['train'].set_target('target') -datainfo.datasets['test'].set_target('target') - -## 2.或直接复用fastNLP的模型 - -vocab = datainfo.vocabs['words'] -# embedding = StackEmbedding([StaticEmbedding(vocab), CNNCharEmbedding(vocab, 100)]) -embedding = StaticEmbedding(vocab) - -print(len(vocab)) -print(len(datainfo.vocabs['target'])) - -# model = DPCNN(init_embed=embedding, num_cls=ops.num_classes) -model = HANCLS(init_embed=embedding, num_cls=ops.num_classes) - -## 3. 声明loss,metric,optimizer -loss = CrossEntropyLoss(pred=C.OUTPUT, target=C.TARGET) -metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET) -optimizer = SGD([param for param in model.parameters() if param.requires_grad == True], - lr=ops.lr, momentum=0.9, weight_decay=0) - -callbacks = [] -callbacks.append(LRScheduler(CosineAnnealingLR(optimizer, 5))) - -device = 'cuda:0' if torch.cuda.is_available() else 'cpu' - -print(device) - -for ds in datainfo.datasets.values(): - ds.apply_field(len, C.INPUT, C.INPUT_LEN) - ds.set_input(C.INPUT, C.INPUT_LEN) - ds.set_target(C.TARGET) - - -## 4.定义train方法 -def train(model, datainfo, loss, metrics, optimizer, num_epochs=ops.train_epoch): - trainer = Trainer(datainfo.datasets['train'], model, optimizer=optimizer, loss=loss, - metrics=[metrics], dev_data=datainfo.datasets['test'], device=device, - check_code_level=-1, batch_size=ops.batch_size, callbacks=callbacks, - n_epochs=num_epochs) - - print(trainer.train()) - - -if __name__ == "__main__": - train(model, datainfo, loss, metric, optimizer) diff --git a/reproduction/text_classification/train_awdlstm.py b/reproduction/text_classification/train_awdlstm.py deleted file mode 100644 index 7537e6f7..00000000 --- a/reproduction/text_classification/train_awdlstm.py +++ /dev/null @@ -1,59 +0,0 @@ -# 这个模型需要在pytorch=0.4下运行,weight_drop不支持1.0 - -import sys -sys.path.append('../..') - -from fastNLP.io.pipe.classification import IMDBPipe -from fastNLP.embeddings import StaticEmbedding -from model.awd_lstm import AWDLSTMSentiment - -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP import Trainer -from torch.optim import Adam - - -class Config(): - train_epoch= 10 - lr=0.001 - - num_classes=2 - hidden_dim=256 - num_layers=1 - nfc=128 - wdrop=0.5 - - task_name = "IMDB" - datapath={"train":"IMDB_data/train.csv", "test":"IMDB_data/test.csv"} - save_model_path="./result_IMDB_test/" - -opt=Config() - - -# load data -data_bundle=IMDBPipe.process_from_file(opt.datapath) - -# print(data_bundle.datasets["train"]) -# print(data_bundle) - - -# define model -vocab=data_bundle.vocabs['words'] -embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-840b-300', requires_grad=True) -model=AWDLSTMSentiment(init_embed=embed, num_classes=opt.num_classes, hidden_dim=opt.hidden_dim, num_layers=opt.num_layers, nfc=opt.nfc, wdrop=opt.wdrop) - - -# define loss_function and metrics -loss=CrossEntropyLoss() -metrics=AccuracyMetric() -optimizer= Adam([param for param in model.parameters() if param.requires_grad==True], lr=opt.lr) - - -def train(datainfo, model, optimizer, loss, metrics, opt): - trainer = Trainer(data_bundle.datasets['train'], model, optimizer=optimizer, loss=loss, - metrics=metrics, dev_data=data_bundle.datasets['test'], device=0, check_code_level=-1, - n_epochs=opt.train_epoch, save_path=opt.save_model_path) - trainer.train() - - -if __name__ == "__main__": - train(data_bundle, model, optimizer, loss, metrics, opt) diff --git a/reproduction/text_classification/train_bert.py b/reproduction/text_classification/train_bert.py deleted file mode 100644 index 25337d9e..00000000 --- a/reproduction/text_classification/train_bert.py +++ /dev/null @@ -1,33 +0,0 @@ -import sys -sys.path.append('../../') - -from reproduction.text_classification.data.IMDBLoader import IMDBLoader -from fastNLP.embeddings import BertEmbedding -from reproduction.text_classification.model.lstm import BiLSTMSentiment -from fastNLP import Trainer -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP import cache_results -from fastNLP import Tester - -# 对返回结果进行缓存,下一次运行就会自动跳过预处理 -@cache_results('imdb.pkl') -def get_data(): - data_bundle = IMDBLoader().process('imdb/') - return data_bundle -data_bundle = get_data() - -print(data_bundle) - -# 删除超过512, 但由于英语中会把word进行word piece处理,所以截取的时候做一点的裕量 -data_bundle.datasets['train'].drop(lambda x:len(x['words'])>400) -data_bundle.datasets['dev'].drop(lambda x:len(x['words'])>400) -data_bundle.datasets['test'].drop(lambda x:len(x['words'])>400) -bert_embed = BertEmbedding(data_bundle.vocabs['words'], requires_grad=False, - model_dir_or_name="en-base-uncased") -model = BiLSTMSentiment(bert_embed, len(data_bundle.vocabs['target'])) - -Trainer(data_bundle.datasets['train'], model, optimizer=None, loss=CrossEntropyLoss(), device=0, - batch_size=10, dev_data=data_bundle.datasets['dev'], metrics=AccuracyMetric()).train() - -# 在测试集上测试一下效果 -Tester(data_bundle.datasets['test'], model, batch_size=32, metrics=AccuracyMetric()).test() \ No newline at end of file diff --git a/reproduction/text_classification/train_char_cnn.py b/reproduction/text_classification/train_char_cnn.py deleted file mode 100644 index a4a97dc4..00000000 --- a/reproduction/text_classification/train_char_cnn.py +++ /dev/null @@ -1,220 +0,0 @@ -import sys -sys.path.append('../..') -from fastNLP.core.const import Const as C -import torch.nn as nn -from fastNLP.io.pipe.classification import YelpFullPipe,YelpPolarityPipe,SST2Pipe,IMDBPipe -from model.char_cnn import CharacterLevelCNN -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP.core.trainer import Trainer -from torch.optim import SGD -from torch.autograd import Variable -import torch -from torch.optim.lr_scheduler import LambdaLR -from fastNLP.core import LRScheduler - - -##hyper -#todo 这里加入fastnlp的记录 -class Config(): - #seed=7777 - model_dir_or_name="en-base-uncased" - embedding_grad= False, - bert_embedding_larers= '4,-2,-1' - train_epoch= 100 - num_classes=2 - task= "yelp_p" - lr=0.01 - batch_size=128 - model_size="large" - number_of_characters=69 - extra_characters='' - max_length=1014 - weight_decay = 1e-5 - to_lower=True - tokenizer = 'spacy' # 使用spacy进行分词 - - char_cnn_config={ - "alphabet": { - "en": { - "lower": { - "alphabet": "abcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{}", - "number_of_characters": 69 - }, - "both": { - "alphabet": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{}", - "number_of_characters": 95 - } - } - }, - "model_parameters": { - "small": { - "conv": [ - #依次是channel,kennnel_size,maxpooling_size - [256,7,3], - [256,7,3], - [256,3,-1], - [256,3,-1], - [256,3,-1], - [256,3,3] - ], - "fc": [1024,1024] - }, - "large":{ - "conv":[ - [1024, 7, 3], - [1024, 7, 3], - [1024, 3, -1], - [1024, 3, -1], - [1024, 3, -1], - [1024, 3, 3] - ], - "fc": [2048,2048] - } - }, - "data": { - "text_column": "SentimentText", - "label_column": "Sentiment", - "max_length": 1014, - "num_of_classes": 2, - "encoding": None, - "chunksize": 50000, - "max_rows": 100000, - "preprocessing_steps": ["lower", "remove_hashtags", "remove_urls", "remove_user_mentions"] - }, - "training": { - "batch_size": 128, - "learning_rate": 0.01, - "epochs": 10, - "optimizer": "sgd" - } - } -ops=Config - -# set_rng_seeds(ops.seed) -# print('RNG SEED: {}'.format(ops.seed)) - - -##1.task相关信息:利用dataloader载入DataBundle -char_vocab=ops.char_cnn_config["alphabet"]["en"]["lower"]["alphabet"] -ops.number_of_characters=len(char_vocab) -ops.embedding_dim=ops.number_of_characters - -# load data set -if ops.task == 'yelp_p': - data_bundle = YelpPolarityPipe(lower=ops.to_lower, tokenizer=ops.tokenizer).process_from_file() -elif ops.task == 'yelp_f': - data_bundle = YelpFullPipe(lower=ops.to_lower, tokenizer=ops.tokenizer).process_from_file() -elif ops.task == 'imdb': - data_bundle = IMDBPipe(lower=ops.to_lower, tokenizer=ops.tokenizer).process_from_file() -elif ops.task == 'sst-2': - data_bundle = SST2Pipe(lower=ops.to_lower, tokenizer=ops.tokenizer).process_from_file() -else: - raise RuntimeError(f'NOT support {ops.task} task yet!') - -print(data_bundle) - -def wordtochar(words): - chars = [] - - #for word in words: - #word = word.lower() - for char in words: - chars.append(char) - #chars.append('') - #chars.pop() - return chars - -#chartoindex -def chartoindex(chars): - max_seq_len=ops.max_length - zero_index=len(char_vocab) - char_index_list=[] - for char in chars: - if char in char_vocab: - char_index_list.append(char_vocab.index(char)) - else: - #均使用最后一个作为embbeding - char_index_list.append(zero_index) - if len(char_index_list) > max_seq_len: - char_index_list = char_index_list[:max_seq_len] - elif 0 < len(char_index_list) < max_seq_len: - char_index_list = char_index_list+[zero_index]*(max_seq_len-len(char_index_list)) - elif len(char_index_list) == 0: - char_index_list=[zero_index]*max_seq_len - return char_index_list - - -for dataset in data_bundle.datasets.values(): - dataset.apply_field(wordtochar, field_name="raw_words", new_field_name='chars') - dataset.apply_field(chartoindex,field_name='chars',new_field_name='chars') - -# print(data_bundle.datasets['train'][0]['chars']) -# print(data_bundle.datasets['train'][0]['raw_words']) - -data_bundle.set_input('chars') -data_bundle.set_target('target') - -##2. 定义/组装模型,这里可以随意,就如果是fastNLP封装好的,类似CNNText就直接用初始化调用就好了,这里只是给出一个伪框架表示占位,在这里建立符合fastNLP输入输出规范的model -class ModelFactory(nn.Module): - """ - 用于拼装embedding,encoder,decoder 以及设计forward过程 - - :param embedding: embbeding model - :param encoder: encoder model - :param decoder: decoder model - - """ - def __int__(self,embedding,encoder,decoder,**kwargs): - super(ModelFactory,self).__init__() - self.embedding=embedding - self.encoder=encoder - self.decoder=decoder - - def forward(self,x): - return {C.OUTPUT:None} - -## 2.或直接复用fastNLP的模型 -vocab_label=data_bundle.get_vocab('target') -''' -# emded_char=CNNCharEmbedding(vocab) -# embed_word = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50', requires_grad=True) -# embedding=StackEmbedding([emded_char, embed_word]) -# cnn_char_embed = CNNCharEmbedding(vocab) -# lstm_char_embed = LSTMCharEmbedding(vocab) -# embedding = StackEmbedding([cnn_char_embed, lstm_char_embed]) -''' -#one-hot embedding -embedding_weight= Variable(torch.zeros(len(char_vocab)+1, len(char_vocab))) - -for i in range(len(char_vocab)): - embedding_weight[i][i]=1 -embedding=nn.Embedding(num_embeddings=len(char_vocab)+1,embedding_dim=len(char_vocab),padding_idx=len(char_vocab),_weight=embedding_weight) -for para in embedding.parameters(): - para.requires_grad=False -#CNNText太过于简单 -#model=CNNText(embed=embedding, num_classes=ops.num_classes) -model=CharacterLevelCNN(ops,embedding) - -## 3. 声明loss,metric,optimizer -loss=CrossEntropyLoss -metric=AccuracyMetric -optimizer = SGD([param for param in model.parameters() if param.requires_grad == True], - lr=ops.lr, momentum=0.9, weight_decay=ops.weight_decay) -callbacks = [] -# callbacks.append(LRScheduler(CosineAnnealingLR(optimizer, 5))) -callbacks.append( - LRScheduler(LambdaLR(optimizer, lambda epoch: ops.lr if epoch < - ops.train_epoch * 0.8 else ops.lr * 0.1)) -) - -## 4.定义train方法 -def train(model,datainfo,loss,metrics,optimizer,num_epochs=100): - trainer = Trainer(datainfo.datasets['train'], model, optimizer=optimizer, loss=loss(target='target'),batch_size=ops.batch_size, - metrics=[metrics(target='target')], dev_data=datainfo.datasets['test'], device=[0,1,2], check_code_level=-1, - n_epochs=num_epochs,callbacks=callbacks) - print(trainer.train()) - - - -if __name__=="__main__": - train(model,data_bundle,loss,metric,optimizer,num_epochs=ops.train_epoch) diff --git a/reproduction/text_classification/train_dpcnn.py b/reproduction/text_classification/train_dpcnn.py deleted file mode 100644 index c7f5751c..00000000 --- a/reproduction/text_classification/train_dpcnn.py +++ /dev/null @@ -1,117 +0,0 @@ -# 首先需要加入以下的路径到环境变量,因为当前只对内部测试开放,所以需要手动申明一下路径 - -import torch.cuda -from fastNLP.core.utils import cache_results -from torch.optim import SGD -from torch.optim.lr_scheduler import CosineAnnealingLR -from fastNLP.core.trainer import Trainer -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP.embeddings import StaticEmbedding -from reproduction.text_classification.model.dpcnn import DPCNN -from fastNLP.core.sampler import BucketSampler -from fastNLP.core import LRScheduler -from fastNLP.core.const import Const as C -from fastNLP.core.vocabulary import VocabularyOption -from utils.util_init import set_rng_seeds -from fastNLP import logger -import os -from fastNLP.io import YelpFullPipe, YelpPolarityPipe - -os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" -# hyper -logger.add_file('log', 'INFO') - -class Config(): - seed = 12345 - model_dir_or_name = "dpcnn-yelp-f" - embedding_grad = True - train_epoch = 30 - batch_size = 100 - task = "yelp_f" - #datadir = 'workdir/datasets/SST' - # datadir = 'workdir/datasets/yelp_polarity' - datadir = 'workdir/datasets/yelp_full' - #datafile = {"train": "train.txt", "dev": "dev.txt", "test": "test.txt"} - datafile = {"train": "train.csv", "test": "test.csv"} - lr = 1e-3 - src_vocab_op = VocabularyOption(max_size=100000) - embed_dropout = 0.3 - cls_dropout = 0.1 - weight_decay = 1e-5 - - def __init__(self): - self.datadir = os.path.join(os.environ['HOME'], self.datadir) - self.datapath = {k: os.path.join(self.datadir, v) - for k, v in self.datafile.items()} - -ops = Config() - -set_rng_seeds(ops.seed) -logger.info('RNG SEED %d'%ops.seed) - -# 1.task相关信息:利用dataloader载入dataInfo - - -@cache_results(ops.model_dir_or_name+'-data-cache') -def load_data(): - datainfo = YelpFullPipe(lower=True, tokenizer='raw').process_from_file(ops.datapath) - for ds in datainfo.datasets.values(): - ds.apply_field(len, C.INPUT, C.INPUT_LEN) - ds.set_input(C.INPUT, C.INPUT_LEN) - ds.set_target(C.TARGET) - - return datainfo - - -datainfo = load_data() -embedding = StaticEmbedding( - datainfo.vocabs['words'], model_dir_or_name='en-glove-6b-100d', requires_grad=ops.embedding_grad, - normalize=False) -embedding.embedding.weight.data /= embedding.embedding.weight.data.std() -print(embedding.embedding.weight.data.mean(), embedding.embedding.weight.data.std()) - -# 2.或直接复用fastNLP的模型 - -# datainfo.datasets['train'] = datainfo.datasets['train'][:1000] # for debug purpose -# datainfo.datasets['test'] = datainfo.datasets['test'][:1000] -logger.info(datainfo) - -model = DPCNN(init_embed=embedding, num_cls=len(datainfo.vocabs[C.TARGET]), - embed_dropout=ops.embed_dropout, cls_dropout=ops.cls_dropout) -# print(model) - -# 3. 声明loss,metric,optimizer -loss = CrossEntropyLoss(pred=C.OUTPUT, target=C.TARGET) -metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET) -optimizer = SGD([param for param in model.parameters() if param.requires_grad == True], - lr=ops.lr, momentum=0.9, weight_decay=ops.weight_decay) - -callbacks = [] - -callbacks.append(LRScheduler(CosineAnnealingLR(optimizer, 5))) - - -device = 'cuda:0' if torch.cuda.is_available() else 'cpu' - -# print(device) -logger.info(device) - -# 4.定义train方法 -# normal trainer -trainer = Trainer(datainfo.datasets['train'], model, optimizer=optimizer, loss=loss, - sampler=BucketSampler(num_buckets=50, batch_size=ops.batch_size), - metrics=[metric], use_tqdm=False, save_path='save', - dev_data=datainfo.datasets['test'], device=device, - check_code_level=-1, batch_size=ops.batch_size, callbacks=callbacks, - n_epochs=ops.train_epoch, num_workers=4) - -# distributed trainer -# trainer = DistTrainer(datainfo.datasets['train'], model, optimizer=optimizer, loss=loss, -# metrics=[metric], -# dev_data=datainfo.datasets['test'], device='cuda', -# batch_size_per_gpu=ops.batch_size, callbacks_all=callbacks, -# n_epochs=ops.train_epoch, num_workers=4) - - -if __name__ == "__main__": - print(trainer.train()) diff --git a/reproduction/text_classification/train_lstm.py b/reproduction/text_classification/train_lstm.py deleted file mode 100644 index a23be0cb..00000000 --- a/reproduction/text_classification/train_lstm.py +++ /dev/null @@ -1,56 +0,0 @@ -import sys -sys.path.append('../..') - -from fastNLP.io.pipe.classification import IMDBPipe -from fastNLP.embeddings import StaticEmbedding -from model.lstm import BiLSTMSentiment - -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP import Trainer -from torch.optim import Adam - - -class Config(): - train_epoch= 10 - lr=0.001 - - num_classes=2 - hidden_dim=256 - num_layers=1 - nfc=128 - - task_name = "IMDB" - datapath={"train":"IMDB_data/train.csv", "test":"IMDB_data/test.csv"} - save_model_path="./result_IMDB_test/" - -opt=Config() - - -# load data -data_bundle=IMDBPipe.process_from_file(opt.datapath) - -# print(data_bundle.datasets["train"]) -# print(data_bundle) - - -# define model -vocab=data_bundle.vocabs['words'] -embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-840b-300', requires_grad=True) -model=BiLSTMSentiment(init_embed=embed, num_classes=opt.num_classes, hidden_dim=opt.hidden_dim, num_layers=opt.num_layers, nfc=opt.nfc) - - -# define loss_function and metrics -loss=CrossEntropyLoss() -metrics=AccuracyMetric() -optimizer= Adam([param for param in model.parameters() if param.requires_grad==True], lr=opt.lr) - - -def train(data_bundle, model, optimizer, loss, metrics, opt): - trainer = Trainer(data_bundle.datasets['train'], model, optimizer=optimizer, loss=loss, - metrics=metrics, dev_data=data_bundle.datasets['test'], device=0, check_code_level=-1, - n_epochs=opt.train_epoch, save_path=opt.save_model_path) - trainer.train() - - -if __name__ == "__main__": - train(data_bundle, model, optimizer, loss, metrics, opt) \ No newline at end of file diff --git a/reproduction/text_classification/train_lstm_att.py b/reproduction/text_classification/train_lstm_att.py deleted file mode 100644 index a2b8612d..00000000 --- a/reproduction/text_classification/train_lstm_att.py +++ /dev/null @@ -1,58 +0,0 @@ -import sys -sys.path.append('../..') - -from fastNLP.io.pipe.classification import IMDBPipe -from fastNLP.embeddings import StaticEmbedding -from model.lstm_self_attention import BiLSTM_SELF_ATTENTION - -from fastNLP import CrossEntropyLoss, AccuracyMetric -from fastNLP import Trainer -from torch.optim import Adam - - -class Config(): - train_epoch= 10 - lr=0.001 - - num_classes=2 - hidden_dim=256 - num_layers=1 - attention_unit=256 - attention_hops=1 - nfc=128 - - task_name = "IMDB" - datapath={"train":"IMDB_data/train.csv", "test":"IMDB_data/test.csv"} - save_model_path="./result_IMDB_test/" - -opt=Config() - - -# load data -data_bundle=IMDBPipe.process_from_file(opt.datapath) - -# print(data_bundle.datasets["train"]) -# print(data_bundle) - - -# define model -vocab=data_bundle.vocabs['words'] -embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-840b-300', requires_grad=True) -model=BiLSTM_SELF_ATTENTION(init_embed=embed, num_classes=opt.num_classes, hidden_dim=opt.hidden_dim, num_layers=opt.num_layers, attention_unit=opt.attention_unit, attention_hops=opt.attention_hops, nfc=opt.nfc) - - -# define loss_function and metrics -loss=CrossEntropyLoss() -metrics=AccuracyMetric() -optimizer= Adam([param for param in model.parameters() if param.requires_grad==True], lr=opt.lr) - - -def train(data_bundle, model, optimizer, loss, metrics, opt): - trainer = Trainer(data_bundle.datasets['train'], model, optimizer=optimizer, loss=loss, - metrics=metrics, dev_data=data_bundle.datasets['test'], device=0, check_code_level=-1, - n_epochs=opt.train_epoch, save_path=opt.save_model_path) - trainer.train() - - -if __name__ == "__main__": - train(data_bundle, model, optimizer, loss, metrics, opt) diff --git a/reproduction/text_classification/utils/place_holder.txt b/reproduction/text_classification/utils/place_holder.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/reproduction/text_classification/utils/util_init.py b/reproduction/text_classification/utils/util_init.py deleted file mode 100644 index fcb8fffb..00000000 --- a/reproduction/text_classification/utils/util_init.py +++ /dev/null @@ -1,11 +0,0 @@ -import numpy -import torch -import random - - -def set_rng_seeds(seed): - random.seed(seed) - numpy.random.seed(seed) - torch.random.manual_seed(seed) - torch.cuda.manual_seed_all(seed) - # print('RNG_SEED {}'.format(seed)) diff --git a/reproduction/utils.py b/reproduction/utils.py deleted file mode 100644 index d6cd1af3..00000000 --- a/reproduction/utils.py +++ /dev/null @@ -1,71 +0,0 @@ -import os - -from typing import Union, Dict - - -def check_dataloader_paths(paths:Union[str, Dict[str, str]])->Dict[str, str]: - """ - 检查传入dataloader的文件的合法性。如果为合法路径,将返回至少包含'train'这个key的dict。类似于下面的结果 - { - 'train': '/some/path/to/', # 一定包含,建词表应该在这上面建立,剩下的其它文件应该只需要处理并index。 - 'test': 'xxx' # 可能有,也可能没有 - ... - } - 如果paths为不合法的,将直接进行raise相应的错误 - - :param paths: 路径. 可以为一个文件路径(则认为该文件就是train的文件); 可以为一个文件目录,将在该目录下寻找train(文件名 - 中包含train这个字段), test.txt, dev.txt; 可以为一个dict, 则key是用户自定义的某个文件的名称,value是这个文件的路径。 - :return: - """ - if isinstance(paths, str): - if os.path.isfile(paths): - return {'train': paths} - elif os.path.isdir(paths): - filenames = os.listdir(paths) - files = {} - for filename in filenames: - path_pair = None - if 'train' in filename: - path_pair = ('train', filename) - if 'dev' in filename: - if path_pair: - raise Exception("File:{} in {} contains both `{}` and `dev`.".format(filename, paths, path_pair[0])) - path_pair = ('dev', filename) - if 'test' in filename: - if path_pair: - raise Exception("File:{} in {} contains both `{}` and `test`.".format(filename, paths, path_pair[0])) - path_pair = ('test', filename) - if path_pair: - if path_pair[0] in files: - raise RuntimeError(f"Multiple file under {paths} have '{path_pair[0]}' in their filename.") - files[path_pair[0]] = os.path.join(paths, path_pair[1]) - return files - else: - raise FileNotFoundError(f"{paths} is not a valid file path.") - - elif isinstance(paths, dict): - if paths: - if 'train' not in paths: - raise KeyError("You have to include `train` in your dict.") - for key, value in paths.items(): - if isinstance(key, str) and isinstance(value, str): - if not os.path.isfile(value): - raise TypeError(f"{value} is not a valid file.") - else: - raise TypeError("All keys and values in paths should be str.") - return paths - else: - raise ValueError("Empty paths is not allowed.") - else: - raise TypeError(f"paths only supports str and dict. not {type(paths)}.") - -def get_tokenizer(): - try: - import spacy - spacy.prefer_gpu() - en = spacy.load('en') - print('use spacy tokenizer') - return lambda x: [w.text for w in en.tokenizer(x)] - except Exception as e: - print('use raw tokenizer') - return lambda x: x.split() diff --git a/requirements.txt b/requirements.txt index 09501827..2e2808d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ numpy>=1.14.2 -torch>=1.0.0 -tqdm>=4.28.1 prettytable>=0.7.2 requests -prettytable>=0.7.2 -regex!=2019.12.17 \ No newline at end of file +regex!=2019.12.17 +rich==11.2.0 +# fsspec[http]>=2021.05.0, !=2021.06.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 1ebd4b7f..cde5680c 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ print(pkgs) setup( name='FastNLP', - version='0.7.1', + version='0.8.0alpha', url='https://gitee.com/fastnlp/fastNLP', description='fastNLP: Deep Learning Toolkit for NLP, developed by Fudan FastNLP Team', long_description=readme, diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index c7a5f082..00000000 --- a/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import fastNLP - -__all__ = ["fastNLP"] diff --git a/tests/core/__init__.py b/tests/core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/core/test_batch.py b/tests/core/test_batch.py deleted file mode 100644 index 1e218800..00000000 --- a/tests/core/test_batch.py +++ /dev/null @@ -1,487 +0,0 @@ -import unittest - -import numpy as np -import torch - -from fastNLP import DataSetIter, TorchLoaderIter -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import SequentialSampler, ConstantTokenNumSampler -from fastNLP import ConcatCollateFn - - -def generate_fake_dataset(num_samples=1000): - """ - 产生的DataSet包含以下的field {'1':[], '2':[], '3': [], '4':[]} - :param num_samples: sample的数量 - :return: - """ - - max_len = 50 - min_len = 10 - num_features = 4 - - data_dict = {} - for i in range(num_features): - data = [] - lengths = np.random.randint(min_len, max_len, size=(num_samples)) - for length in lengths: - data.append(np.random.randint(1, 100, size=length)) - data_dict[str(i)] = data - - dataset = DataSet(data_dict) - - for i in range(num_features): - if np.random.randint(2) == 0: - dataset.set_input(str(i)) - else: - dataset.set_target(str(i)) - return dataset - - -def construct_dataset(sentences): - """Construct a data set from a list of sentences. - - :param sentences: list of list of str - :return dataset: a DataSet object - """ - dataset = DataSet() - for sentence in sentences: - instance = Instance() - instance['raw_sentence'] = sentence - dataset.append(instance) - return dataset - - -class TestCase1(unittest.TestCase): - def test_simple(self): - dataset = construct_dataset( - [["FastNLP", "is", "the", "most", "beautiful", "tool", "in", "the", "world"] for _ in range(40)]) - dataset.set_target() - batch = DataSetIter(dataset, batch_size=4, sampler=SequentialSampler(), as_numpy=True) - - cnt = 0 - for _, _ in batch: - cnt += 1 - self.assertEqual(cnt, 10) - - def test_dataset_batching(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=True) - for x, y in iter: - self.assertTrue(isinstance(x["x"], np.ndarray) and isinstance(y["y"], np.ndarray)) - self.assertEqual(len(x["x"]), 4) - self.assertEqual(len(y["y"]), 4) - self.assertListEqual(list(x["x"][-1]), [1, 2, 3, 4]) - self.assertListEqual(list(y["y"][-1]), [5, 6]) - - def test_list_padding(self): - ds = DataSet({"x": [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4]] * 10, - "y": [[4, 3, 2, 1], [3, 2, 1], [2, 1], [1]] * 10}) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=True) - for x, y in iter: - self.assertEqual(x["x"].shape, (4, 4)) - self.assertEqual(y["y"].shape, (4, 4)) - - def test_numpy_padding(self): - ds = DataSet({"x": np.array([[1], [1, 2], [1, 2, 3], [1, 2, 3, 4]] * 10), - "y": np.array([[4, 3, 2, 1], [3, 2, 1], [2, 1], [1]] * 10)}) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=True) - for x, y in iter: - self.assertEqual(x["x"].shape, (4, 4)) - self.assertEqual(y["y"].shape, (4, 4)) - - def test_list_to_tensor(self): - ds = DataSet({"x": [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4]] * 10, - "y": [[4, 3, 2, 1], [3, 2, 1], [2, 1], [1]] * 10}) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=False) - for x, y in iter: - self.assertTrue(isinstance(x["x"], torch.Tensor)) - self.assertEqual(tuple(x["x"].shape), (4, 4)) - self.assertTrue(isinstance(y["y"], torch.Tensor)) - self.assertEqual(tuple(y["y"].shape), (4, 4)) - - def test_numpy_to_tensor(self): - ds = DataSet({"x": np.array([[1], [1, 2], [1, 2, 3], [1, 2, 3, 4]] * 10), - "y": np.array([[4, 3, 2, 1], [3, 2, 1], [2, 1], [1]] * 10)}) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=False) - for x, y in iter: - self.assertTrue(isinstance(x["x"], torch.Tensor)) - self.assertEqual(tuple(x["x"].shape), (4, 4)) - self.assertTrue(isinstance(y["y"], torch.Tensor)) - self.assertEqual(tuple(y["y"].shape), (4, 4)) - - def test_list_of_list_to_tensor(self): - ds = DataSet([Instance(x=[1, 2], y=[3, 4]) for _ in range(2)] + - [Instance(x=[1, 2, 3, 4], y=[3, 4, 5, 6]) for _ in range(2)]) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=False) - for x, y in iter: - self.assertTrue(isinstance(x["x"], torch.Tensor)) - self.assertEqual(tuple(x["x"].shape), (4, 4)) - self.assertTrue(isinstance(y["y"], torch.Tensor)) - self.assertEqual(tuple(y["y"].shape), (4, 4)) - - def test_list_of_numpy_to_tensor(self): - ds = DataSet([Instance(x=np.array([1, 2]), y=np.array([3, 4])) for _ in range(2)] + - [Instance(x=np.array([1, 2, 3, 4]), y=np.array([3, 4, 5, 6])) for _ in range(2)]) - ds.set_input("x") - ds.set_target("y") - iter = DataSetIter(ds, batch_size=4, sampler=SequentialSampler(), as_numpy=False) - for x, y in iter: - print(x, y) - - def test_sequential_batch(self): - batch_size = 32 - num_samples = 1000 - dataset = generate_fake_dataset(num_samples) - - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler()) - for batch_x, batch_y in batch: - pass - - def test_udf_padder(self): - from fastNLP.core.field import Padder - alphas = list('abcdefghijk') - class UDFPadder(Padder): - def __init__(self): - super().__init__() - - def __call__(self, contents, field_name, field_ele_dtype, dim): - results = [alphas[:con] for con in contents] - return results - - batch_size = 32 - num_samples = 1000 - dataset = generate_fake_dataset(num_samples) - contents = np.random.randint(5, size=(num_samples)) - dataset.add_field('test', contents, is_input=True, padder=UDFPadder(), - ignore_type=True) - - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler()) - for batch_x, batch_y in batch: - test = batch_x['test'] - indices = batch.cur_batch_indices - cons = contents[indices] - for con,t in zip(cons, test): - self.assertEqual(alphas[:con], t) - - def test_collate_fn(self): - batch_size = 32 - num_samples = 1000 - dataset = generate_fake_dataset(num_samples) - dataset.set_input('1','2') - dataset.set_target('0','3') - - fn = ConcatCollateFn(inputs=['1', '2'], output='12', pad_val=0, max_len=0, is_input=True, is_target=False) - dataset.add_collate_fn(fn, name='demo') - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler(), drop_last=True) - for batch_x, batch_y in batch: - for i in range(batch_size): - # print(i) - self.assertEqual(batch_x['12'][i].sum(), batch_x['1'][i].sum() + batch_x['2'][i].sum()) - dataset.delete_collate_fn(name='demo') - - # 测试非input的情况 - dataset.set_input('1', '2', flag=False) # - fn = ConcatCollateFn(inputs=['1', '2'], output='12', pad_val=0, max_len=0, is_input=True, is_target=False) - dataset.add_collate_fn(fn, name='demo') - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler(), drop_last=True) - for batch_x, batch_y in batch: - for i in range(batch_size): - self.assertTrue('12' in batch_x) - dataset.delete_collate_fn(name='demo') - dataset.set_input('1', '2', flag=True) # - - # 测试覆盖其它field的情况 - fn = ConcatCollateFn(inputs=['1', '2'], output='3', pad_val=0, max_len=0, is_input=True, is_target=True) - dataset.add_collate_fn(fn, name='demo') - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler(), drop_last=True) - for batch_x, batch_y in batch: - for i in range(batch_size): - # print(i) - self.assertEqual(batch_y['3'][i].sum(), batch_x['1'][i].sum() + batch_x['2'][i].sum()) - dataset.delete_collate_fn(name='demo') - - # 测试非input,target的情况 - dataset.set_input('1', '2', flag=False) - fn = ConcatCollateFn(inputs=['1', '2'], output='3', pad_val=0, max_len=0, is_input=True, is_target=True) - dataset.add_collate_fn(fn, name='demo') - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler(), drop_last=True) - for batch_x, batch_y in batch: - for i in range(batch_size): - # print(i) - self.assertTrue('3' in batch_x) - self.assertTrue('3' in batch_y) - dataset.delete_collate_fn(name='demo') - - # 测试加入非法fn的请 - with self.assertRaises(AssertionError): - dataset.add_collate_fn(1) - - # 测试collate_fn返回值只有一个的情况 - def demo_collate_fn(ins_list): - return {'3':1} - dataset.add_collate_fn(demo_collate_fn, name='demo') - with self.assertRaises(BaseException): - batch = DataSetIter(dataset, batch_size=batch_size, sampler=SequentialSampler(), drop_last=True) - for batch_x, batch_y in batch: - pass - dataset.delete_collate_fn(name='demo') - - # 测试多个collate_fn - dataset.add_collate_fn(demo_collate_fn, name='demo') - dataset.add_collate_fn(demo_collate_fn, name='demo') - # 测试删除 - dataset.delete_collate_fn() - dataset.delete_collate_fn() - self.assertTrue(dataset.collater.is_empty()) - - def test_demo(self): - import torch - - data = DataSet({ - 'x1': [[0, 1], - [2]], - 'x2': [[3], - [2, 4, 5] - ], - 'y': [0, 1] - }) - data.set_target('y') - - # 所有的collate_fn函数都接受list[(ind1, instance1), (ind2, instance2), ...]作为输入,其中ind1/ind2是该instance在dataset中 - # 的index,instance1/instance2是这次batch取出来的数据,包含了所有的field. - def concat_collate_fn(ins_list): - x1 = [ins['x1'] for ind,ins in ins_list] - x2 = [ins['x2'] for ind,ins in ins_list] - xs = [] - for i in range(len(ins_list)): - xs.append(torch.LongTensor(x1[i] + x2[i])) - # 需要自行pad并转换为tensor,但不需要移动到gpu - arr = torch.nn.utils.rnn.pad_sequence(xs, batch_first=True, padding_value=0) - b_x = {'x': arr} - b_y = {} - # 返回值一定是两个dict,第一个dict的值会认为是input,第二个dict的值会认为是target. 若名称与已有input或target重复,则 - # 采用返回值。 - return b_x, b_y - - data.add_collate_fn(concat_collate_fn) - - for batch_x, batch_y in DataSetIter(data, batch_size=2, sampler=SequentialSampler()): - print("batch_x:", batch_x) - print("batch_y:", batch_y) - # batch_x: {'x': tensor([[0, 1, 3, 0], - # [2, 2, 4, 5]])} - # batch_y: {'y': array([0, 1])} - - # 如果取batch过程含有一些参数,可以通过类来实现 - class ConCollateFn: - def __init__(self, max_len=3): - self.max_len = max_len - def __call__(self, ins_list): - x1 = [ins['x1'] for ind, ins in ins_list] - x2 = [ins['x2'] for ind, ins in ins_list] - xs = [] - for i in range(len(ins_list)): - xs.append(torch.LongTensor(x1[i] + x2[i])[:self.max_len]) - arr = torch.nn.utils.rnn.pad_sequence(xs, batch_first=True, padding_value=0) - b_x = {'x': arr} - b_y = {} - return b_x, b_y - data.delete_collate_fn() # 删除之前的collate_fn - data.add_collate_fn(ConCollateFn(max_len=3)) - for batch_x, batch_y in DataSetIter(data, batch_size=2, sampler=SequentialSampler()): - print("batch_x:", batch_x) - print("batch_y:", batch_y) - # batch_x: {'x': tensor([[0, 1, 3], - # [2, 2, 4]])} - # batch_y: {'y': array([0, 1])} - - def testTensorLoaderIter(self): - class FakeData: - def __init__(self, return_dict=True): - self.x = [[1,2,3], [4,5,6]] - self.return_dict = return_dict - - def __len__(self): - return len(self.x) - - def __getitem__(self, i): - x = self.x[i] - y = 0 - if self.return_dict: - return {'x':x}, {'y':y} - return x, y - - data1 = FakeData() - def collact_fn(ins_list): - xs = [ins[0]['x'] for ins in ins_list] - ys = [ins[1]['y'] for ins in ins_list] - return {'x':xs}, {'y':ys} - dataiter = TorchLoaderIter(data1, collate_fn=collact_fn, batch_size=2) - for x, y in dataiter: - print(x, y) - - def test_batch_sampler(self): - # 测试DataSetIter与TorchLoaderIter的batch_sampler能否正常工作 - # DataSetIter - ds = generate_fake_dataset(5) - ds.set_input('1') - class BatchSampler: - def __init__(self, dataset): - self.num_samples = len(dataset) - - def __iter__(self): - index = 0 - indexes = list(range(self.num_samples)) - np.random.shuffle(indexes) - start_idx = 0 - while index < self.num_samples: - if start_idx == 0: - end_index = self.num_samples//2 - else: - end_index = self.num_samples - yield indexes[start_idx:end_index] - index = end_index - start_idx = end_index - - def __len__(self): - return 2 - - batch_sampler = BatchSampler(ds) - - data_iter = DataSetIter(ds, batch_size=10, sampler=batch_sampler, as_numpy=False, num_workers=0, - pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None, - batch_sampler=batch_sampler) - num_samples = [len(ds)//2, len(ds)-len(ds)//2] - for idx, (batch_x, batch_y) in enumerate(data_iter): - self.assertEqual(num_samples[idx], len(batch_x['1'])) - - # TorchLoaderIter - class FakeData: - def __init__(self): - self.x = [[1,2,3], [4,5,6], [1,2]] - - def __len__(self): - return len(self.x) - - def __getitem__(self, i): - x = self.x[i] - y = 0 - return x,y - - def collate_fn(ins_list): - xs = [ins[0] for ins in ins_list] - ys = [ins[1] for ins in ins_list] - return {'x':xs}, {'y':ys} - - ds = FakeData() - batch_sampler = BatchSampler(ds) - data_iter = TorchLoaderIter(ds, batch_size=10, sampler=batch_sampler, - num_workers=0, pin_memory=False, drop_last=False, - timeout=0, worker_init_fn=None, collate_fn=collate_fn, - batch_sampler=batch_sampler) - num_samples = [len(ds)//2, len(ds)-len(ds)//2] - for idx, (batch_x, batch_y) in enumerate(data_iter): - self.assertEqual(num_samples[idx], len(batch_x['x'])) - - def test_ConstantTokenNumSampler(self): - num_samples = 100 - ds = generate_fake_dataset(num_samples) - ds.set_input('1') - ds.add_seq_len('1', 'seq_len') - ds.set_input('seq_len') - - # 测试token数量不超过 - batch_sampler = ConstantTokenNumSampler(ds.get_field('seq_len'), max_token=120) - data_iter = DataSetIter(ds, batch_size=10, sampler=batch_sampler, as_numpy=False, num_workers=0, - pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None, - batch_sampler=batch_sampler) - sample_count = 0 - for batch_x, batch_y in data_iter: - self.assertTrue(sum(batch_x['seq_len'])<120) - sample_count += len(batch_x['seq_len']) - self.assertEqual(sample_count, num_samples) - - # 测试句子数量不超过 - batch_sampler = ConstantTokenNumSampler(ds.get_field('seq_len'), max_token=120, max_sentence=1) - data_iter = DataSetIter(ds, batch_size=10, sampler=batch_sampler, as_numpy=False, num_workers=0, - pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None, - batch_sampler=batch_sampler) - sample_count = 0 - for batch_x, batch_y in data_iter: - sample_count += len(batch_x['seq_len']) - self.assertTrue(sum(batch_x['seq_len'])<120 and len(batch_x['seq_len'])==1) - self.assertEqual(sample_count, num_samples) - - # 测试need_be_multiple_of - sample_count = 0 - batch_sampler = ConstantTokenNumSampler(ds.get_field('seq_len'), max_token=120, max_sentence=2, need_be_multiple_of=2) - data_iter = DataSetIter(ds, batch_size=10, sampler=batch_sampler, as_numpy=False, num_workers=0, - pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None, - batch_sampler=batch_sampler) - for batch_x, batch_y in data_iter: - sample_count += len(batch_x['seq_len']) - self.assertTrue(sum(batch_x['seq_len'])<120 and len(batch_x['seq_len'])==2) - self.assertEqual(sample_count, num_samples) - - # 测试token数量不超过, bucket尽量接近 - batch_sampler = ConstantTokenNumSampler(ds.get_field('seq_len'), max_token=120, num_bucket=10) - data_iter = DataSetIter(ds, batch_size=10, sampler=batch_sampler, as_numpy=False, num_workers=0, - pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None, - batch_sampler=batch_sampler) - sample_count = 0 - for batch_x, batch_y in data_iter: - sample_count += len(batch_x['seq_len']) - self.assertTrue(sum(batch_x['seq_len'])<=120) - self.assertEqual(sample_count, num_samples) - - """ - def test_multi_workers_batch(self): - batch_size = 32 - pause_seconds = 0.01 - num_samples = 1000 - dataset = generate_fake_dataset(num_samples) - - num_workers = 1 - batch = Batch(dataset, batch_size=batch_size, sampler=SequentialSampler(), num_workers=num_workers) - for batch_x, batch_y in batch: - time.sleep(pause_seconds) - - num_workers = 2 - batch = Batch(dataset, batch_size=batch_size, sampler=SequentialSampler(), num_workers=num_workers) - end1 = time.time() - for batch_x, batch_y in batch: - time.sleep(pause_seconds) - """ - """ - def test_pin_memory(self): - batch_size = 32 - pause_seconds = 0.01 - num_samples = 1000 - dataset = generate_fake_dataset(num_samples) - - batch = Batch(dataset, batch_size=batch_size, sampler=SequentialSampler(), pin_memory=True) - # 这里发生OOM - # for batch_x, batch_y in batch: - # time.sleep(pause_seconds) - - num_workers = 2 - batch = Batch(dataset, batch_size=batch_size, sampler=SequentialSampler(), num_workers=num_workers, - pin_memory=True) - # 这里发生OOM - # for batch_x, batch_y in batch: - # time.sleep(pause_seconds) - """ diff --git a/tests/core/test_callbacks.py b/tests/core/test_callbacks.py deleted file mode 100644 index 165d7004..00000000 --- a/tests/core/test_callbacks.py +++ /dev/null @@ -1,256 +0,0 @@ -import os -import tempfile -import unittest - -import numpy as np -import torch - -from fastNLP import AccuracyMetric -from fastNLP import BCELoss -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import SGD -from fastNLP import Trainer -from fastNLP.core.callback import EarlyStopCallback, GradientClipCallback, LRScheduler, ControlC, \ - LRFinder, TensorboardCallback, Callback -from fastNLP.core.callback import EvaluateCallback, FitlogCallback, SaveModelCallback -from fastNLP.core.callback import WarmupCallback -from fastNLP.models.base_model import NaiveClassifier - - -def prepare_env(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=[0.0]) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=[1.0]) for item in class_B]) - - data_set.set_input("x") - data_set.set_target("y") - model = NaiveClassifier(2, 1) - return data_set, model - - -class TestCallback(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.mkdtemp() - - def tearDown(self): - import shutil - shutil.rmtree(self.tempdir) - - def test_gradient_clip(self): - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=20, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, - callbacks=[GradientClipCallback(model.parameters(), clip_value=2)], check_code_level=2) - trainer.train() - - def test_early_stop(self): - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.01), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=20, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, - callbacks=[EarlyStopCallback(5)], check_code_level=2) - trainer.train() - - def test_lr_scheduler(self): - data_set, model = prepare_env() - optimizer = torch.optim.SGD(model.parameters(), lr=0.01) - trainer = Trainer(data_set, model, optimizer=optimizer, loss=BCELoss(pred="predict", target="y"), batch_size=32, - n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, - callbacks=[LRScheduler(torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1))], - check_code_level=2) - trainer.train() - - def test_KeyBoardInterrupt(self): - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, use_tqdm=False, callbacks=[ControlC(False)], - check_code_level=2) - trainer.train() - - def test_LRFinder(self): - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, use_tqdm=False, - callbacks=[LRFinder(len(data_set) // 32)], check_code_level=2) - trainer.train() - - def test_TensorboardCallback(self): - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, - callbacks=[TensorboardCallback("loss", "metric")], check_code_level=2) - trainer.train() - import os - import shutil - path = os.path.join("./", 'tensorboard_logs_{}'.format(trainer.start_time)) - if os.path.exists(path): - shutil.rmtree(path) - - def test_readonly_property(self): - from fastNLP.core.callback import Callback - passed_epochs = [] - total_epochs = 5 - - class MyCallback(Callback): - def __init__(self): - super(MyCallback, self).__init__() - - def on_epoch_begin(self): - passed_epochs.append(self.epoch) - print(self.n_epochs, self.n_steps, self.batch_size) - print(self.model) - print(self.optimizer) - - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=total_epochs, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, callbacks=[MyCallback()], - check_code_level=2) - trainer.train() - assert passed_epochs == list(range(1, total_epochs + 1)) - - def test_evaluate_callback(self): - data_set, model = prepare_env() - from fastNLP import Tester - tester = Tester(data=data_set, model=model, metrics=AccuracyMetric(pred="predict", target="y")) - evaluate_callback = EvaluateCallback(data_set, tester) - - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=False, - callbacks=evaluate_callback, check_code_level=2) - trainer.train() - - def test_fitlog_callback(self): - import fitlog - fitlog.set_log_dir(self.tempdir, new_log=True) - data_set, model = prepare_env() - from fastNLP import Tester - tester = Tester(data=data_set, model=model, metrics=AccuracyMetric(pred="predict", target="y")) - fitlog_callback = FitlogCallback(data_set, tester) - - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=fitlog_callback, check_code_level=2) - trainer.train() - - def test_CheckPointCallback(self): - - from fastNLP import CheckPointCallback, Callback - from fastNLP import Tester - - class RaiseCallback(Callback): - def __init__(self, stop_step=10): - super().__init__() - self.stop_step = stop_step - - def on_backward_begin(self, loss): - if self.step > self.stop_step: - raise RuntimeError() - - data_set, model = prepare_env() - tester = Tester(data=data_set, model=model, metrics=AccuracyMetric(pred="predict", target="y")) - import fitlog - - fitlog.set_log_dir(self.tempdir, new_log=True) - tempfile_path = os.path.join(self.tempdir, 'chkt.pt') - callbacks = [CheckPointCallback(tempfile_path)] - - fitlog_callback = FitlogCallback(data_set, tester) - callbacks.append(fitlog_callback) - - callbacks.append(RaiseCallback(100)) - try: - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=callbacks, check_code_level=2) - trainer.train() - except: - pass - # 用下面的代码模拟重新运行 - data_set, model = prepare_env() - callbacks = [CheckPointCallback(tempfile_path)] - tester = Tester(data=data_set, model=model, metrics=AccuracyMetric(pred="predict", target="y")) - fitlog_callback = FitlogCallback(data_set, tester) - callbacks.append(fitlog_callback) - - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=callbacks, check_code_level=2) - trainer.train() - - def test_save_model_callback(self): - data_set, model = prepare_env() - top = 3 - save_model_callback = SaveModelCallback(self.tempdir, top=top) - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=save_model_callback, check_code_level=2) - trainer.train() - - timestamp = os.listdir(self.tempdir)[0] - self.assertEqual(len(os.listdir(os.path.join(self.tempdir, timestamp))), top) - - def test_warmup_callback(self): - data_set, model = prepare_env() - warmup_callback = WarmupCallback() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=5, print_every=50, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=warmup_callback, check_code_level=2) - trainer.train() - - def test_early_stop_callback(self): - """ - 需要观察是否真的 EarlyStop - """ - data_set, model = prepare_env() - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=2, n_epochs=10, print_every=5, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=EarlyStopCallback(1), check_code_level=2) - trainer.train() - - def test_control_C_callback(self): - - class Raise(Callback): - def on_epoch_end(self): - raise KeyboardInterrupt - - flags = [False] - - def set_flag(): - flags[0] = not flags[0] - - data_set, model = prepare_env() - - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=20, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=[Raise(), ControlC(False, set_flag)], check_code_level=2) - trainer.train() - - self.assertEqual(flags[0], False) - - trainer = Trainer(data_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=20, dev_data=data_set, - metrics=AccuracyMetric(pred="predict", target="y"), use_tqdm=True, - callbacks=[Raise(), ControlC(True, set_flag)], check_code_level=2) - trainer.train() - - self.assertEqual(flags[0], True) diff --git a/tests/core/test_dataset.py b/tests/core/test_dataset.py deleted file mode 100644 index 7d38601c..00000000 --- a/tests/core/test_dataset.py +++ /dev/null @@ -1,373 +0,0 @@ -import os -import sys -import unittest - -from fastNLP import DataSet -from fastNLP.core.dataset import ApplyResultException -from fastNLP import FieldArray -from fastNLP import Instance -from fastNLP.io import CSVLoader - - -class TestDataSetInit(unittest.TestCase): - """初始化DataSet的办法有以下几种: - 1) 用dict: - 1.1) 二维list DataSet({"x": [[1, 2], [3, 4]]}) - 1.2) 二维array DataSet({"x": np.array([[1, 2], [3, 4]])}) - 1.3) 三维list DataSet({"x": [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]}) - 2) 用list of Instance: - 2.1) 一维list DataSet([Instance(x=[1, 2, 3, 4])]) - 2.2) 一维array DataSet([Instance(x=np.array([1, 2, 3, 4]))]) - 2.3) 二维list DataSet([Instance(x=[[1, 2], [3, 4]])]) - 2.4) 二维array DataSet([Instance(x=np.array([[1, 2], [3, 4]]))]) - - 只接受纯list或者最外层ndarray - """ - def test_init_v1(self): - # 一维list - ds = DataSet([Instance(x=[1, 2, 3, 4], y=[5, 6])] * 40) - self.assertTrue("x" in ds.field_arrays and "y" in ds.field_arrays) - self.assertEqual(ds.field_arrays["x"].content, [[1, 2, 3, 4], ] * 40) - self.assertEqual(ds.field_arrays["y"].content, [[5, 6], ] * 40) - - def test_init_v2(self): - # 用dict - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - self.assertTrue("x" in ds.field_arrays and "y" in ds.field_arrays) - self.assertEqual(ds.field_arrays["x"].content, [[1, 2, 3, 4], ] * 40) - self.assertEqual(ds.field_arrays["y"].content, [[5, 6], ] * 40) - - def test_init_assert(self): - with self.assertRaises(AssertionError): - _ = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 100}) - with self.assertRaises(AssertionError): - _ = DataSet([[1, 2, 3, 4]] * 10) - with self.assertRaises(ValueError): - _ = DataSet(0.00001) - - -class TestDataSetMethods(unittest.TestCase): - def test_append(self): - dd = DataSet() - for _ in range(3): - dd.append(Instance(x=[1, 2, 3, 4], y=[5, 6])) - self.assertEqual(len(dd), 3) - self.assertEqual(dd.field_arrays["x"].content, [[1, 2, 3, 4]] * 3) - self.assertEqual(dd.field_arrays["y"].content, [[5, 6]] * 3) - - def test_add_field(self): - dd = DataSet() - dd.add_field("x", [[1, 2, 3]] * 10) - dd.add_field("y", [[1, 2, 3, 4]] * 10) - dd.add_field("z", [[5, 6]] * 10) - self.assertEqual(len(dd), 10) - self.assertEqual(dd.field_arrays["x"].content, [[1, 2, 3]] * 10) - self.assertEqual(dd.field_arrays["y"].content, [[1, 2, 3, 4]] * 10) - self.assertEqual(dd.field_arrays["z"].content, [[5, 6]] * 10) - - with self.assertRaises(RuntimeError): - dd.add_field("??", [[1, 2]] * 40) - - def test_add_field_ignore_type(self): - dd = DataSet() - dd.add_field("x", [(1, "1"), (2, "2"), (3, "3"), (4, "4")], ignore_type=True, is_target=True) - dd.add_field("y", [{1, "1"}, {2, "2"}, {3, "3"}, {4, "4"}], ignore_type=True, is_target=True) - - def test_delete_field(self): - dd = DataSet() - dd.add_field("x", [[1, 2, 3]] * 10) - dd.add_field("y", [[1, 2, 3, 4]] * 10) - dd.delete_field("x") - self.assertFalse("x" in dd.field_arrays) - self.assertTrue("y" in dd.field_arrays) - - def test_delete_instance(self): - dd = DataSet() - old_length = 2 - dd.add_field("x", [[1, 2, 3]] * old_length) - dd.add_field("y", [[1, 2, 3, 4]] * old_length) - dd.delete_instance(0) - self.assertEqual(len(dd), old_length-1) - dd.delete_instance(0) - self.assertEqual(len(dd), old_length-2) - - def test_getitem(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - ins_1, ins_0 = ds[0], ds[1] - self.assertTrue(isinstance(ins_1, Instance) and isinstance(ins_0, Instance)) - self.assertEqual(ins_1["x"], [1, 2, 3, 4]) - self.assertEqual(ins_1["y"], [5, 6]) - self.assertEqual(ins_0["x"], [1, 2, 3, 4]) - self.assertEqual(ins_0["y"], [5, 6]) - - sub_ds = ds[:10] - self.assertTrue(isinstance(sub_ds, DataSet)) - self.assertEqual(len(sub_ds), 10) - - def test_get_item_error(self): - with self.assertRaises(RuntimeError): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - _ = ds[40:] - - with self.assertRaises(KeyError): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - _ = ds["kom"] - - def test_len_(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - self.assertEqual(len(ds), 40) - - ds = DataSet() - self.assertEqual(len(ds), 0) - - def test_apply(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - ds.apply(lambda ins: ins["x"][::-1], new_field_name="rx") - self.assertTrue("rx" in ds.field_arrays) - self.assertEqual(ds.field_arrays["rx"].content[0], [4, 3, 2, 1]) - - ds.apply(lambda ins: len(ins["y"]), new_field_name="y") - self.assertEqual(ds.field_arrays["y"].content[0], 2) - - res = ds.apply(lambda ins: len(ins["x"])) - self.assertTrue(isinstance(res, list) and len(res) > 0) - self.assertTrue(res[0], 4) - - ds.apply(lambda ins: (len(ins["x"]), "hahaha"), new_field_name="k", ignore_type=True) - # expect no exception raised - - def test_apply_tqdm(self): - import time - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - def do_nothing(ins): - time.sleep(0.01) - ds.apply(do_nothing, use_tqdm=True) - ds.apply_field(do_nothing, field_name='x', use_tqdm=True) - - def test_apply_cannot_modify_instance(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - def modify_inplace(instance): - instance['words'] = 1 - - with self.assertRaises(TypeError): - ds.apply(modify_inplace) - - def test_apply_more(self): - - T = DataSet({"a": [1, 2, 3], "b": [2, 4, 5]}) - func_1 = lambda x: {"c": x["a"] * 2, "d": x["a"] ** 2} - func_2 = lambda x: {"c": x * 3, "d": x ** 3} - - def func_err_1(x): - if x["a"] == 1: - return {"e": x["a"] * 2, "f": x["a"] ** 2} - else: - return {"e": x["a"] * 2} - - def func_err_2(x): - if x == 1: - return {"e": x * 2, "f": x ** 2} - else: - return {"e": x * 2} - - T.apply_more(func_1) - self.assertEqual(list(T["c"]), [2, 4, 6]) - self.assertEqual(list(T["d"]), [1, 4, 9]) - - res = T.apply_field_more(func_2, "a", modify_fields=False) - self.assertEqual(list(T["c"]), [2, 4, 6]) - self.assertEqual(list(T["d"]), [1, 4, 9]) - self.assertEqual(list(res["c"]), [3, 6, 9]) - self.assertEqual(list(res["d"]), [1, 8, 27]) - - with self.assertRaises(ApplyResultException) as e: - T.apply_more(func_err_1) - print(e) - - with self.assertRaises(ApplyResultException) as e: - T.apply_field_more(func_err_2, "a") - print(e) - - def test_drop(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6], [7, 8, 9, 0]] * 20}) - ds.drop(lambda ins: len(ins["y"]) < 3, inplace=True) - self.assertEqual(len(ds), 20) - - def test_contains(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 40, "y": [[5, 6]] * 40}) - self.assertTrue("x" in ds) - self.assertTrue("y" in ds) - self.assertFalse("z" in ds) - - def test_rename_field(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ds.rename_field("x", "xx") - self.assertTrue("xx" in ds) - self.assertFalse("x" in ds) - - with self.assertRaises(KeyError): - ds.rename_field("yyy", "oo") - - def test_input_target(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ds.set_input("x") - ds.set_target("y") - self.assertTrue(ds.field_arrays["x"].is_input) - self.assertTrue(ds.field_arrays["y"].is_target) - - with self.assertRaises(KeyError): - ds.set_input("xxx") - with self.assertRaises(KeyError): - ds.set_input("yyy") - - def test_get_input_name(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - self.assertEqual(ds.get_input_name(), [_ for _ in ds.field_arrays if ds.field_arrays[_].is_input]) - - def test_get_target_name(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - self.assertEqual(ds.get_target_name(), [_ for _ in ds.field_arrays if ds.field_arrays[_].is_target]) - - def test_split(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - d1, d2 = ds.split(0.1) - - def test_apply2(self): - def split_sent(ins): - return ins['raw_sentence'].split() - csv_loader = CSVLoader(headers=['raw_sentence', 'label'], sep='\t') - data_bundle = csv_loader.load('tests/data_for_tests/tutorial_sample_dataset.csv') - dataset = data_bundle.datasets['train'] - dataset.drop(lambda x: len(x['raw_sentence'].split()) == 0, inplace=True) - dataset.apply(split_sent, new_field_name='words', is_input=True) - # print(dataset) - - def test_add_field_v2(self): - ds = DataSet({"x": [3, 4]}) - ds.add_field('y', [['hello', 'world'], ['this', 'is', 'a', 'test']], is_input=True, is_target=True) - # ds.apply(lambda x:[x['x']]*3, is_input=True, is_target=True, new_field_name='y') - print(ds) - - def test_save_load(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ds.save("./my_ds.pkl") - self.assertTrue(os.path.exists("./my_ds.pkl")) - - ds_1 = DataSet.load("./my_ds.pkl") - os.remove("my_ds.pkl") - - def test_get_all_fields(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ans = ds.get_all_fields() - self.assertEqual(ans["x"].content, [[1, 2, 3, 4]] * 10) - self.assertEqual(ans["y"].content, [[5, 6]] * 10) - - def test_get_field(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ans = ds.get_field("x") - self.assertTrue(isinstance(ans, FieldArray)) - self.assertEqual(ans.content, [[1, 2, 3, 4]] * 10) - ans = ds.get_field("y") - self.assertTrue(isinstance(ans, FieldArray)) - self.assertEqual(ans.content, [[5, 6]] * 10) - - def test_add_null(self): - ds = DataSet() - with self.assertRaises(RuntimeError) as RE: - ds.add_field('test', []) - - def test_concat(self): - """ - 测试两个dataset能否正确concat - - """ - ds1 = DataSet({"x": [[1, 2, 3, 4] for i in range(10)], "y": [[5, 6] for i in range(10)]}) - ds2 = DataSet({"x": [[4,3,2,1] for i in range(10)], "y": [[6,5] for i in range(10)]}) - ds3 = ds1.concat(ds2) - - self.assertEqual(len(ds3), 20) - - self.assertListEqual(ds1[9]['x'], [1, 2, 3, 4]) - self.assertListEqual(ds1[10]['x'], [4,3,2,1]) - - ds2[0]['x'][0] = 100 - self.assertEqual(ds3[10]['x'][0], 4) # 不改变copy后的field了 - - ds3[10]['x'][0] = -100 - self.assertEqual(ds2[0]['x'][0], 100) # 不改变copy前的field了 - - # 测试inplace - ds1 = DataSet({"x": [[1, 2, 3, 4] for i in range(10)], "y": [[5, 6] for i in range(10)]}) - ds2 = DataSet({"x": [[4, 3, 2, 1] for i in range(10)], "y": [[6, 5] for i in range(10)]}) - ds3 = ds1.concat(ds2, inplace=True) - - ds2[0]['x'][0] = 100 - self.assertEqual(ds3[10]['x'][0], 4) # 不改变copy后的field了 - - ds3[10]['x'][0] = -100 - self.assertEqual(ds2[0]['x'][0], 100) # 不改变copy前的field了 - - ds3[0]['x'][0] = 100 - self.assertEqual(ds1[0]['x'][0], 100) # 改变copy前的field了 - - # 测试mapping - ds1 = DataSet({"x": [[1, 2, 3, 4] for i in range(10)], "y": [[5, 6] for i in range(10)]}) - ds2 = DataSet({"X": [[4, 3, 2, 1] for i in range(10)], "Y": [[6, 5] for i in range(10)]}) - ds3 = ds1.concat(ds2, field_mapping={'X':'x', 'Y':'y'}) - self.assertEqual(len(ds3), 20) - - # 测试忽略掉多余的 - ds1 = DataSet({"x": [[1, 2, 3, 4] for i in range(10)], "y": [[5, 6] for i in range(10)]}) - ds2 = DataSet({"X": [[4, 3, 2, 1] for i in range(10)], "Y": [[6, 5] for i in range(10)], 'Z':[0]*10}) - ds3 = ds1.concat(ds2, field_mapping={'X':'x', 'Y':'y'}) - - # 测试报错 - ds1 = DataSet({"x": [[1, 2, 3, 4] for i in range(10)], "y": [[5, 6] for i in range(10)]}) - ds2 = DataSet({"X": [[4, 3, 2, 1] for i in range(10)]}) - with self.assertRaises(RuntimeError): - ds3 = ds1.concat(ds2, field_mapping={'X':'x'}) - - def test_no_padder(self): - ds = DataSet() - ds.add_field('idx', [1, 2, 3], padder=None) - self.assertEqual(ds['idx'].padder, None) # should be None, but AutoPadder - - def test_copy_padder(self): - from fastNLP.core.field import AutoPadder - ds = DataSet() - ds.add_field('idx', [1, 2, 3]) - ds['idx'].set_padder(None) # workaround of problem 1 - ds.apply_field(lambda x: x, 'idx', 'idx') - self.assertEqual(ds['idx'].padder, None) # should be None, but AutoPadder - - ds = DataSet() - ds.add_field('idx', [1, 2, 3]) - ds.apply_field(lambda x: x, 'idx', 'idx') - self.assertTrue(isinstance(ds.get_field('idx').padder, AutoPadder)) # should be None, but AutoPadder - - def test_instance_field_disappear_bug(self): - data = DataSet({'raw_chars': [[0,1],[2]], 'target': [0, 1]}) - data.copy_field(field_name='raw_chars', new_field_name='chars') - _data = data[:1] - for field_name in ['raw_chars', 'target', 'chars']: - self.assertTrue(_data.has_field(field_name)) - - -class TestDataSetIter(unittest.TestCase): - def test__repr__(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - for iter in ds: - self.assertEqual(iter.__repr__(), """+--------------+--------+ -| x | y | -+--------------+--------+ -| [1, 2, 3, 4] | [5, 6] | -+--------------+--------+""") - - -class TestDataSetFieldMeta(unittest.TestCase): - def test_print_field_meta(self): - ds = DataSet({"x": [[1, 2, 3, 4]] * 10, "y": [[5, 6]] * 10}) - ds.print_field_meta() - - ds.set_input('x') - ds.print_field_meta() diff --git a/tests/core/test_dist_trainer.py b/tests/core/test_dist_trainer.py deleted file mode 100644 index 2f1408c8..00000000 --- a/tests/core/test_dist_trainer.py +++ /dev/null @@ -1,183 +0,0 @@ -import os -# have to add this, otherwise cannot import fastNLP when check_call() -import sys -sys.path.append(os.sep.join(os.path.abspath(__file__).split(os.sep)[:-3])) -import shutil -import subprocess -import unittest -from argparse import ArgumentParser - -import numpy as np -import torch.cuda -import torch.distributed as dist - -from fastNLP import AccuracyMetric -from fastNLP import CrossEntropyLoss, BCELoss -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import SGD -from fastNLP.core.callback import EchoCallback, GradientClipCallback -from fastNLP.core.dist_trainer import DistTrainer, get_local_rank -from fastNLP.models.base_model import NaiveClassifier - - -def prepare_fake_dataset(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=0) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=1) for item in class_B]) - return data_set - - -def prepare_fake_dataset2(*args, size=100): - ys = np.random.randint(4, size=100, dtype=np.int64) - data = {'y': ys} - for arg in args: - data[arg] = np.random.randn(size, 5) - return DataSet(data=data) - - -def set_rng_seed(seed): - np.random.seed(seed) - - -def prepare_env(): - def prepare_fake_dataset(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=[0.0]) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=[1.0]) for item in class_B]) - return data_set - - data_set = prepare_fake_dataset() - data_set.set_input("x") - data_set.set_target("y") - model = NaiveClassifier(2, 1) - return data_set, model - - -class TestDistTrainer(unittest.TestCase): - save_path = './save_cp' - - def run1(self): - # test distributed training - print('local rank', get_local_rank()) - set_rng_seed(100) - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - model = NaiveClassifier(2, 2) - - trainer = DistTrainer( - model=model, train_data=data_set, optimizer=SGD(lr=0.1), - loss=CrossEntropyLoss(pred="predict", target="y"), - batch_size_per_gpu=8, n_epochs=3, print_every=50, save_path=self.save_path, - ) - trainer.train() - """ - # 应该正确运行 - """ - if trainer.is_master and os.path.exists(self.save_path): - shutil.rmtree(self.save_path) - - def run2(self): - # test fp16 with distributed training - print('local rank', get_local_rank()) - set_rng_seed(100) - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - model = NaiveClassifier(2, 2) - - trainer = DistTrainer( - model=model, train_data=data_set, optimizer=SGD(lr=0.1), - loss=CrossEntropyLoss(pred="predict", target="y"), - batch_size_per_gpu=8, n_epochs=3, print_every=50, save_path=self.save_path, - fp16=True - ) - trainer.train() - """ - # 应该正确运行 - """ - if trainer.is_master and os.path.exists(self.save_path): - shutil.rmtree(self.save_path) - - def run3(self): - # test callbacks, especially clip-norm - set_rng_seed(100) - data_set, model = prepare_env() - trainer = DistTrainer( - data_set, model, optimizer=None, - loss=BCELoss(pred="predict", target="y"), - n_epochs=3, print_every=50, - callbacks_all=[GradientClipCallback()], - callbacks_master=[EchoCallback('callbacks_master')] - ) - trainer.train() - - def run4(self): - # test metrics, save, and others - set_rng_seed(100) - data_set, model = prepare_env() - - train_set, dev_set = data_set.split(0.3) - - model = NaiveClassifier(2, 1) - - trainer = DistTrainer( - train_set, model, optimizer=SGD(lr=0.1), - loss=BCELoss(pred="predict", target="y"), - batch_size_per_gpu=32, n_epochs=3, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=self.save_path, - ) - trainer.train() - """ - # 应该正确运行 - """ - if trainer.is_master and os.path.exists(self.save_path): - shutil.rmtree(self.save_path) - - def run_dist(self, run_id): - if torch.cuda.is_available(): - ngpu = min(2, torch.cuda.device_count()) - path = os.path.abspath(__file__) - cmd = ['python', '-m', 'torch.distributed.launch', - '--nproc_per_node', str(ngpu), path, '--test', str(run_id)] - print(' '.join(cmd)) - subprocess.check_call(cmd) - - def test_normal_run(self): - self.run_dist(1) - - def no_test_fp16(self): - self.run_dist(2) - - def test_callback(self): - self.run_dist(3) - - def test_dev_data(self): - self.run_dist(4) - - -if __name__ == '__main__': - runner = TestDistTrainer() - parser = ArgumentParser() - parser.add_argument('--test', type=int) - args, _ = parser.parse_known_args() - if args.test and hasattr(runner, 'run%s' % args.test): - dist.init_process_group("nccl") - getattr(runner, 'run%s' % args.test)() diff --git a/tests/core/test_field.py b/tests/core/test_field.py deleted file mode 100644 index c46e2de2..00000000 --- a/tests/core/test_field.py +++ /dev/null @@ -1,316 +0,0 @@ -import unittest - -import numpy as np -import torch - -from fastNLP import FieldArray -from fastNLP.core.field import _get_ele_type_and_dim -from fastNLP import AutoPadder - -class TestFieldArrayTyepDimDetect(unittest.TestCase): - """ - 检测FieldArray能否正确识别type与ndim - - """ - def test_case1(self): - # 1.1 常规类型测试 - for value in [1, True, 1.0, 'abc']: - type_ = type(value) - _type, _dim = _get_ele_type_and_dim(cell=value) - self.assertListEqual([_type, _dim], [type_, 0]) - # 1.2 mix类型报错 - with self.assertRaises(Exception): - value = [1, 2, 1.0] - self.assertRaises(_get_ele_type_and_dim(value)) - # 带有numpy的测试 - # 2.1 - value = np.array([1, 2, 3]) - type_ = value.dtype - dim_ = 1 - self.assertSequenceEqual(_get_ele_type_and_dim(cell=value), [type_, dim_]) - # 2.2 - value = np.array([[1, 2], [3, 4, 5]]) # char embedding的场景 - self.assertSequenceEqual([int, 2], _get_ele_type_and_dim(value)) - # 2.3 - value = np.zeros((3, 4)) - self.assertSequenceEqual([value.dtype, 2], _get_ele_type_and_dim(value)) - # 2.4 测试错误的dimension - with self.assertRaises(Exception): - value = np.array([[1, 2], [3, [1]]]) - _get_ele_type_and_dim(value) - # 2.5 测试混合类型 - with self.assertRaises(Exception): - value = np.array([[1, 2], [3.0]]) - _get_ele_type_and_dim(value) - - # 带有tensor的测试 - # 3.1 word embedding的场景 - value = torch.zeros(3, 10) - self.assertSequenceEqual([value.dtype, 2], _get_ele_type_and_dim(value)) - # 3.2 char embedding/image的场景 - value = torch.zeros(3, 32, 32) - self.assertSequenceEqual([value.dtype, 3], _get_ele_type_and_dim(value)) - - -class TestFieldArrayInit(unittest.TestCase): - """ - 1) 如果DataSet使用dict初始化,那么在add_field中会构造FieldArray: - 1.1) 二维list DataSet({"x": [[1, 2], [3, 4]]}) - 1.2) 二维array DataSet({"x": np.array([[1, 2], [3, 4]])}) - 1.3) 三维list DataSet({"x": [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]}) - 2) 如果DataSet使用list of Instance 初始化,那么在append中会先对第一个样本初始化FieldArray; - 然后后面的样本使用FieldArray.append进行添加。 - 2.1) 一维list DataSet([Instance(x=[1, 2, 3, 4])]) - 2.2) 一维array DataSet([Instance(x=np.array([1, 2, 3, 4]))]) - 2.3) 二维list DataSet([Instance(x=[[1, 2], [3, 4]])]) - 2.4) 二维array DataSet([Instance(x=np.array([[1, 2], [3, 4]]))]) - """ - - def test_init_v1(self): - # 二维list - fa = FieldArray("x", [[1, 2], [3, 4]] * 5, is_input=True) - - def test_init_v2(self): - # 二维array - fa = FieldArray("x", np.array([[1, 2], [3, 4]] * 5), is_input=True) - - def test_init_v3(self): - # 三维list - fa = FieldArray("x", [[[1, 2], [3, 4]], [[1, 2], [3, 4]]], is_input=True) - - def test_init_v4(self): - # 一维list - val = [1, 2, 3, 4] - fa = FieldArray("x", [val], is_input=True) - fa.append(val) - - def test_init_v5(self): - # 一维array - val = np.array([1, 2, 3, 4]) - fa = FieldArray("x", [val], is_input=True) - fa.append(val) - - def test_init_v6(self): - # 二维array - val = [[1, 2], [3, 4]] - fa = FieldArray("x", [val], is_input=True) - fa.append(val) - - def test_init_v7(self): - # list of array - fa = FieldArray("x", [np.array([[1, 2], [3, 4]]), np.array([[1, 2], [3, 4]])], is_input=True) - self.assertEqual(fa.dtype, np.array([1]).dtype) - - def test_init_v8(self): - # 二维list - val = np.array([[1, 2], [3, 4]]) - fa = FieldArray("x", [val], is_input=True) - fa.append(val) - - -class TestFieldArray(unittest.TestCase): - def test_main(self): - fa = FieldArray("x", [1, 2, 3, 4, 5], is_input=True) - self.assertEqual(len(fa), 5) - fa.append(6) - self.assertEqual(len(fa), 6) - - self.assertEqual(fa[-1], 6) - self.assertEqual(fa[0], 1) - fa[-1] = 60 - self.assertEqual(fa[-1], 60) - - self.assertEqual(fa.get(0), 1) - self.assertTrue(isinstance(fa.get([0, 1, 2]), np.ndarray)) - self.assertListEqual(list(fa.get([0, 1, 2])), [1, 2, 3]) - - def test_type_conversion(self): - fa = FieldArray("x", [1, 2, 3, 4, 5], is_input=True) - self.assertEqual(fa.dtype, int) - - fa = FieldArray("y", [1.1, 2.2, 3.3, 4.4, 5.5], is_input=True) - fa.append(10.0) - self.assertEqual(fa.dtype, float) - - fa = FieldArray("y", ["a", "b", "c", "d"], is_input=True) - fa.append("e") - self.assertEqual(fa.dtype, str) - - def test_support_np_array(self): - fa = FieldArray("y", np.array([[1.1, 2.2, 3.3, 4.4, 5.5]]), is_input=True) - self.assertEqual(fa.dtype, np.float64) - - fa.append(np.array([1.1, 2.2, 3.3, 4.4, 5.5])) - self.assertEqual(fa.dtype, np.float64) - - fa = FieldArray("my_field", np.random.rand(3, 5), is_input=True) - # in this case, pytype is actually a float. We do not care about it. - self.assertEqual(fa.dtype, np.float64) - - def test_nested_list(self): - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1.1, 2.2, 3.3, 4.4, 5.5]], is_input=True) - self.assertEqual(fa.dtype, float) - - def test_getitem_v1(self): - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1.0, 2.0, 3.0, 4.0, 5.0]], is_input=True) - self.assertEqual(fa[0], [1.1, 2.2, 3.3, 4.4, 5.5]) - ans = fa[[0, 1]] - self.assertTrue(isinstance(ans, np.ndarray)) - self.assertTrue(isinstance(ans[0], np.ndarray)) - self.assertEqual(ans[0].tolist(), [1.1, 2.2, 3.3, 4.4, 5.5]) - self.assertEqual(ans[1].tolist(), [1, 2, 3, 4, 5]) - self.assertEqual(ans.dtype, np.float64) - - def test_getitem_v2(self): - x = np.random.rand(10, 5) - fa = FieldArray("my_field", x, is_input=True) - indices = [0, 1, 3, 4, 6] - for a, b in zip(fa[indices], x[indices]): - self.assertListEqual(a.tolist(), b.tolist()) - - def test_append(self): - with self.assertRaises(Exception): - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1, 2, 3, 4, 5]], is_input=True, use_1st_ins_infer_dim_type=False) - fa.append(0) - - with self.assertRaises(Exception): - fa = FieldArray("y", [1.1, 2.2, 3.3, 4.4, 5.5], is_input=True, use_1st_ins_infer_dim_type=False) - fa.append([1, 2, 3, 4, 5]) - - with self.assertRaises(Exception): - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1, 2, 3, 4, 5]], is_input=True, use_1st_ins_infer_dim_type=False) - fa.append([]) - - with self.assertRaises(Exception): - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1, 2, 3, 4, 5]], is_input=True, use_1st_ins_infer_dim_type=False) - fa.append(["str", 0, 0, 0, 1.89]) - - fa = FieldArray("y", [[1.1, 2.2, 3.3, 4.4, 5.5], [1.0, 2.0, 3.0, 4.0, 5.0]], is_input=True, use_1st_ins_infer_dim_type=False) - fa.append([1.2, 2.3, 3.4, 4.5, 5.6]) - self.assertEqual(len(fa), 3) - self.assertEqual(fa[2], [1.2, 2.3, 3.4, 4.5, 5.6]) - - def test_ignore_type(self): - # 测试新添加的参数ignore_type,用来跳过类型检查 - fa = FieldArray("y", [[1.1, 2.2, "jin", {}, "hahah"], [int, 2, "$", 4, 5]], is_input=True, ignore_type=True) - fa.append([1.2, 2.3, str, 4.5, print]) - - fa = FieldArray("y", [(1, "1"), (2, "2"), (3, "3"), (4, "4")], is_target=True, ignore_type=True) - - -class TestAutoPadder(unittest.TestCase): - def test00(self): - padder = AutoPadder() - # 没有类型时 - contents = [(1, 2), ('str', 'a')] - padder(contents, None, None, None) - - def test01(self): - # 测试使用多维的bool, int, str, float的情况 - # str - padder = AutoPadder() - content = ['This is a str', 'this is another str'] - self.assertListEqual(content, padder(content, None, str, 0).tolist()) - - # 1维int - content = [[1, 2, 3], [4,], [5, 6, 7, 8]] - padded_content = [[1, 2, 3, 0], [4, 0, 0, 0], [5, 6, 7, 8]] - self.assertListEqual(padder(content, None, int, 1).tolist(), padded_content) - - # 二维int - padded_content = [[[1, 2, 3, 0], [4, 5, 0, 0], [7, 8, 9, 10]], [[1, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]] - content = [ - [[1, 2, 3], [4, 5], [7, 8, 9, 10]], - [[1]] - ] - self.assertListEqual(padder(content, None, int, 2).tolist(), padded_content) - - # 3维图片 - contents = [np.random.rand(3, 4, 4).tolist() for _ in range(5)] - self.assertTrue(padder(contents, None, float, 3).shape==(5, 3, 4, 4)) - - # 更高维度直接返回 - contents = [np.random.rand(24, 3, 4, 4).tolist() for _ in range(5)] - self.assertTrue(isinstance(padder(contents, None, float, 4), np.ndarray)) - - def test02(self): - padder = AutoPadder() - # 测试numpy的情况 - # 0维 - contents = np.arange(12) - self.assertListEqual(padder(contents, None, contents.dtype, 0).tolist(), contents.tolist()) - - # 1维 - contents = np.arange(12).reshape((3, 4)) - self.assertListEqual(padder(contents, None, contents.dtype, 1).tolist(), contents.tolist()) - - # 2维 - contents = np.ones((3, 10, 5)) - self.assertListEqual(padder(contents, None, contents.dtype, 2).tolist(), contents.tolist()) - - # 3维 - contents = [np.random.rand(3, 4, 4) for _ in range(5)] - l_contents = [content.tolist() for content in contents] - self.assertListEqual(padder(contents, None, contents[0].dtype, 3).tolist(), l_contents) - - def test03(self): - padder = AutoPadder() - # 测试tensor的情况 - # 0维 - contents = torch.arange(12) - r_contents = padder(contents, None, contents.dtype, 0) - self.assertSequenceEqual(r_contents.tolist(), contents.tolist()) - self.assertTrue(r_contents.dtype==contents.dtype) - - # 0维 - contents = [torch.tensor(1) for _ in range(10)] - self.assertSequenceEqual(padder(contents, None, torch.int64, 0).tolist(), contents) - - # 1维 - contents = torch.randn(3, 4) - padder(contents, None, torch.float64, 1) - - # 3维 - contents = [torch.randn(3, 4, 4) for _ in range(5)] - padder(contents, None, torch.float64, 3) - - - -class TestEngChar2DPadder(unittest.TestCase): - def test01(self): - """ - 测试EngChar2DPadder能不能正确使用 - :return: - """ - from fastNLP import EngChar2DPadder - padder = EngChar2DPadder(pad_length=0) - - contents = [1, 2] - # 不能是0维 - with self.assertRaises(Exception): - padder(contents, None, np.int64, 0) - contents = [[1, 2]] - # 不能是1维 - with self.assertRaises(Exception): - padder(contents, None, np.int64, 1) - contents = [ - [[[[1, 2]]]] - ] - # 不能是3维以上 - with self.assertRaises(Exception): - padder(contents, None, np.int64, 3) - - contents = [ - [[1, 2, 3], [4, 5], [7,8,9,10]], - [[1]] - ] - self.assertListEqual([[[1, 2, 3, 0], [4, 5, 0, 0], [7, 8, 9, 10]], [[1, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]], - padder(contents, None, np.int64, 2).tolist()) - - padder = EngChar2DPadder(pad_length=5, pad_val=-100) - self.assertListEqual( - [[[1, 2, 3, -100, -100], [4, 5, -100, -100, -100], [7, 8, 9, 10, -100]], - [[1, -100, -100, -100, -100], [-100, -100, -100, -100, -100], [-100, -100, -100, -100, -100]]], - padder(contents, None, np.int64, 2).tolist() - ) - diff --git a/tests/core/test_instance.py b/tests/core/test_instance.py deleted file mode 100644 index 207b44e9..00000000 --- a/tests/core/test_instance.py +++ /dev/null @@ -1,35 +0,0 @@ -import unittest - -from fastNLP import Instance - - -class TestCase(unittest.TestCase): - - def test_init(self): - fields = {"x": [1, 2, 3], "y": [4, 5, 6]} - ins = Instance(x=[1, 2, 3], y=[4, 5, 6]) - self.assertTrue(isinstance(ins.fields, dict)) - self.assertEqual(ins.fields, fields) - - ins = Instance(**fields) - self.assertEqual(ins.fields, fields) - - def test_add_field(self): - fields = {"x": [1, 2, 3], "y": [4, 5, 6]} - ins = Instance(**fields) - ins.add_field("z", [1, 1, 1]) - fields.update({"z": [1, 1, 1]}) - self.assertEqual(ins.fields, fields) - - def test_get_item(self): - fields = {"x": [1, 2, 3], "y": [4, 5, 6], "z": [1, 1, 1]} - ins = Instance(**fields) - self.assertEqual(ins["x"], [1, 2, 3]) - self.assertEqual(ins["y"], [4, 5, 6]) - self.assertEqual(ins["z"], [1, 1, 1]) - - def test_repr(self): - fields = {"x": [1, 2, 3], "y": [4, 5, 6], "z": [1, 1, 1]} - ins = Instance(**fields) - # simple print, that is enough. - print(ins) diff --git a/tests/core/test_logger.py b/tests/core/test_logger.py deleted file mode 100644 index 628ed94d..00000000 --- a/tests/core/test_logger.py +++ /dev/null @@ -1,34 +0,0 @@ -from fastNLP import logger -import unittest -from unittest.mock import patch -import os -import io -import tempfile -import shutil - -class TestLogger(unittest.TestCase): - msg = 'some test logger msg' - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - - def tearDown(self): - pass - # shutil.rmtree(self.tmpdir) - - def test_add_file(self): - fn = os.path.join(self.tmpdir, 'log.txt') - logger.add_file(fn) - logger.info(self.msg) - with open(fn, 'r') as f: - line = ''.join([l for l in f]) - print(line) - self.assertTrue(self.msg in line) - - @patch('sys.stdout', new_callable=io.StringIO) - def test_stdout(self, mock_out): - for i in range(3): - logger.info(self.msg) - logger.debug('aabbc') - - self.assertEqual([self.msg for i in range(3)], mock_out.getvalue().strip().split('\n')) diff --git a/tests/core/test_loss.py b/tests/core/test_loss.py deleted file mode 100644 index 976285a9..00000000 --- a/tests/core/test_loss.py +++ /dev/null @@ -1,86 +0,0 @@ -import unittest - -import torch -import torch.nn.functional as F - -import fastNLP as loss - - -class TestLoss(unittest.TestCase): - def test_CrossEntropyLoss(self): - ce = loss.CrossEntropyLoss(pred="my_predict", target="my_truth") - a = torch.randn(3, 5, requires_grad=False) - b = torch.empty(3, dtype=torch.long).random_(5) - ans = ce({"my_predict": a}, {"my_truth": b}) - self.assertEqual(ans, torch.nn.functional.cross_entropy(a, b)) - - ce = loss.CrossEntropyLoss(pred="my_predict", target="my_truth", class_in_dim=1) - a = torch.randn(3, 4, 3) - b = torch.randint(3, (3, 3)) - ans = ce({"my_predict": a}, {"my_truth": b}) - self.assertAlmostEqual(ans.item(), torch.nn.functional.cross_entropy(a, b).item(), places=4) - - ce = loss.CrossEntropyLoss(pred="my_predict", target="my_truth", class_in_dim=2) - a = torch.randn(3, 4, 3) - b = torch.randint(3, (3, 4)) - ans = ce({"my_predict": a}, {"my_truth": b}) - self.assertAlmostEqual(ans.item(), torch.nn.functional.cross_entropy(a.transpose(1, 2), b).item(), places=4) - - def test_BCELoss(self): - bce = loss.BCELoss(pred="my_predict", target="my_truth") - a = torch.sigmoid(torch.randn((3, 5), requires_grad=False)) - b = torch.randn((3, 5), requires_grad=False) - ans = bce({"my_predict": a}, {"my_truth": b}) - self.assertEqual(ans, torch.nn.functional.binary_cross_entropy(a, b)) - - def test_L1Loss(self): - l1 = loss.L1Loss(pred="my_predict", target="my_truth") - a = torch.randn(3, 5, requires_grad=False) - b = torch.randn(3, 5) - ans = l1({"my_predict": a}, {"my_truth": b}) - self.assertEqual(ans, torch.nn.functional.l1_loss(a, b)) - - def test_NLLLoss(self): - l1 = loss.NLLLoss(pred="my_predict", target="my_truth") - a = F.log_softmax(torch.randn(3, 5, requires_grad=False), dim=0) - b = torch.tensor([1, 0, 4]) - ans = l1({"my_predict": a}, {"my_truth": b}) - self.assertEqual(ans, torch.nn.functional.nll_loss(a, b)) - - -class TestLosserError(unittest.TestCase): - def test_losser1(self): - # (1) only input, targets passed - pred_dict = {"pred": torch.zeros(4, 3)} - target_dict = {'target': torch.zeros(4).long()} - los = loss.CrossEntropyLoss() - - print(los(pred_dict=pred_dict, target_dict=target_dict)) - - # - def test_losser2(self): - # (2) with corrupted size - pred_dict = {"pred": torch.zeros(16, 3)} - target_dict = {'target': torch.zeros(16, 3).long()} - los = loss.CrossEntropyLoss() - - with self.assertRaises(RuntimeError): - print(los(pred_dict=pred_dict, target_dict=target_dict)) - - def test_losser3(self): - # (2) with corrupted size - pred_dict = {"pred": torch.zeros(16, 3), 'stop_fast_param': 0} - target_dict = {'target': torch.zeros(16).long()} - los = loss.CrossEntropyLoss() - - print(los(pred_dict=pred_dict, target_dict=target_dict)) - - def test_check_error(self): - l1 = loss.NLLLoss(pred="my_predict", target="my_truth") - a = F.log_softmax(torch.randn(3, 5, requires_grad=False), dim=0) - b = torch.tensor([1, 0, 4]) - with self.assertRaises(Exception): - ans = l1({"wrong_predict": a, "my": b}, {"my_truth": b}) - - with self.assertRaises(Exception): - ans = l1({"my_predict": a}, {"truth": b, "my": a}) diff --git a/tests/core/test_metrics.py b/tests/core/test_metrics.py deleted file mode 100644 index 4330ebc2..00000000 --- a/tests/core/test_metrics.py +++ /dev/null @@ -1,637 +0,0 @@ -import unittest -from collections import Counter - -import numpy as np -import torch -from fastNLP import AccuracyMetric -from fastNLP.core.metrics import (ClassifyFPreRecMetric, CMRC2018Metric, - ConfusionMatrixMetric, SpanFPreRecMetric, - _accuracy_topk, _pred_topk) -from fastNLP.core.vocabulary import Vocabulary - - -def _generate_tags(encoding_type, number_labels=4): - """ - - :param encoding_type: 例如BIOES, BMES, BIO等 - :param number_labels: 多少个label,大于1 - :return: - """ - vocab = {} - for i in range(number_labels): - label = str(i) - for tag in encoding_type: - if tag == 'O': - if tag not in vocab: - vocab['O'] = len(vocab) + 1 - continue - vocab['{}-{}'.format(tag, label)] = len(vocab) + 1 # 其实表达的是这个的count - return vocab - - -def _convert_res_to_fastnlp_res(metric_result): - allen_result = {} - key_map = {'f1-measure-overall': "f", "recall-overall": "rec", "precision-overall": "pre"} - for key, value in metric_result.items(): - if key in key_map: - key = key_map[key] - else: - label = key.split('-')[-1] - if key.startswith('f1'): - key = 'f-{}'.format(label) - else: - key = '{}-{}'.format(key[:3], label) - allen_result[key] = round(value, 6) - return allen_result - - - -class TestConfusionMatrixMetric(unittest.TestCase): - def test_ConfusionMatrixMetric1(self): - pred_dict = {"pred": torch.zeros(4,3)} - target_dict = {'target': torch.zeros(4)} - metric = ConfusionMatrixMetric() - - metric(pred_dict=pred_dict, target_dict=target_dict) - print(metric.get_metric()) - - def test_ConfusionMatrixMetric2(self): - # (2) with corrupted size - - with self.assertRaises(Exception): - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4)} - metric = ConfusionMatrixMetric() - - metric(pred_dict=pred_dict, target_dict=target_dict, ) - print(metric.get_metric()) - - def test_ConfusionMatrixMetric3(self): - # (3) the second batch is corrupted size - with self.assertRaises(Exception): - metric = ConfusionMatrixMetric() - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4)} - metric(pred_dict=pred_dict, target_dict=target_dict) - - print(metric.get_metric()) - - def test_ConfusionMatrixMetric4(self): - # (4) check reset - metric = ConfusionMatrixMetric() - pred_dict = {"pred": torch.randn(4, 3, 2)} - target_dict = {'target': torch.ones(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - res = metric.get_metric() - self.assertTrue(isinstance(res, dict)) - print(res) - - def test_ConfusionMatrixMetric5(self): - # (5) check numpy array is not acceptable - - with self.assertRaises(Exception): - metric = ConfusionMatrixMetric() - pred_dict = {"pred": np.zeros((4, 3, 2))} - target_dict = {'target': np.zeros((4, 3))} - metric(pred_dict=pred_dict, target_dict=target_dict) - - def test_ConfusionMatrixMetric6(self): - # (6) check map, match - metric = ConfusionMatrixMetric(pred='predictions', target='targets') - pred_dict = {"predictions": torch.randn(4, 3, 2)} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - res = metric.get_metric() - print(res) - - def test_ConfusionMatrixMetric7(self): - # (7) check map, include unused - metric = ConfusionMatrixMetric(pred='prediction', target='targets') - pred_dict = {"prediction": torch.zeros(4, 3, 2), 'unused': 1} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - - def test_ConfusionMatrixMetric8(self): - # (8) check _fast_metric - with self.assertRaises(Exception): - metric = ConfusionMatrixMetric() - pred_dict = {"predictions": torch.zeros(4, 3, 2), "seq_len": torch.ones(3) * 3} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - print(metric.get_metric()) - - - def test_duplicate(self): - # 0.4.1的潜在bug,不能出现形参重复的情况 - metric = ConfusionMatrixMetric(pred='predictions', target='targets') - pred_dict = {"predictions": torch.zeros(4, 3, 2), "seq_len": torch.ones(4) * 3, 'pred':0} - target_dict = {'targets':torch.zeros(4, 3), 'target': 0} - metric(pred_dict=pred_dict, target_dict=target_dict) - print(metric.get_metric()) - - - def test_seq_len(self): - N = 256 - seq_len = torch.zeros(N).long() - seq_len[0] = 2 - pred = {'pred': torch.ones(N, 2)} - target = {'target': torch.ones(N, 2), 'seq_len': seq_len} - metric = ConfusionMatrixMetric() - metric(pred_dict=pred, target_dict=target) - metric.get_metric(reset=False) - seq_len[1:] = 1 - metric(pred_dict=pred, target_dict=target) - metric.get_metric() - - def test_vocab(self): - vocab = Vocabulary() - word_list = "this is a word list".split() - vocab.update(word_list) - - pred_dict = {"pred": torch.zeros(4,3)} - target_dict = {'target': torch.zeros(4)} - metric = ConfusionMatrixMetric(vocab=vocab) - metric(pred_dict=pred_dict, target_dict=target_dict) - print(metric.get_metric()) - - - -class TestAccuracyMetric(unittest.TestCase): - def test_AccuracyMetric1(self): - # (1) only input, targets passed - pred_dict = {"pred": torch.zeros(4, 3)} - target_dict = {'target': torch.zeros(4)} - metric = AccuracyMetric() - - metric(pred_dict=pred_dict, target_dict=target_dict) - print(metric.get_metric()) - - def test_AccuracyMetric2(self): - # (2) with corrupted size - try: - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4)} - metric = AccuracyMetric() - - metric(pred_dict=pred_dict, target_dict=target_dict, ) - print(metric.get_metric()) - except Exception as e: - print(e) - return - print("No exception catches.") - - def test_AccuracyMetric3(self): - # (3) the second batch is corrupted size - try: - metric = AccuracyMetric() - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - - pred_dict = {"pred": torch.zeros(4, 3, 2)} - target_dict = {'target': torch.zeros(4)} - metric(pred_dict=pred_dict, target_dict=target_dict) - - print(metric.get_metric()) - except Exception as e: - print(e) - return - self.assertTrue(True, False), "No exception catches." - - def test_AccuaryMetric4(self): - # (5) check reset - metric = AccuracyMetric() - pred_dict = {"pred": torch.randn(4, 3, 2)} - target_dict = {'target': torch.ones(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - ans = torch.argmax(pred_dict["pred"], dim=2).to(target_dict["target"]) == target_dict["target"] - res = metric.get_metric() - self.assertTrue(isinstance(res, dict)) - self.assertTrue("acc" in res) - self.assertAlmostEqual(res["acc"], float(ans.float().mean()), places=3) - - def test_AccuaryMetric5(self): - # (5) check reset - metric = AccuracyMetric() - pred_dict = {"pred": torch.randn(4, 3, 2)} - target_dict = {'target': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - res = metric.get_metric(reset=False) - ans = (torch.argmax(pred_dict["pred"], dim=2).float() == target_dict["target"]).float().mean() - self.assertAlmostEqual(res["acc"], float(ans), places=4) - - def test_AccuaryMetric6(self): - # (6) check numpy array is not acceptable - try: - metric = AccuracyMetric() - pred_dict = {"pred": np.zeros((4, 3, 2))} - target_dict = {'target': np.zeros((4, 3))} - metric(pred_dict=pred_dict, target_dict=target_dict) - except Exception as e: - print(e) - return - self.assertTrue(True, False), "No exception catches." - - def test_AccuaryMetric7(self): - # (7) check map, match - metric = AccuracyMetric(pred='predictions', target='targets') - pred_dict = {"predictions": torch.randn(4, 3, 2)} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - res = metric.get_metric() - ans = (torch.argmax(pred_dict["predictions"], dim=2).float() == target_dict["targets"]).float().mean() - self.assertAlmostEqual(res["acc"], float(ans), places=4) - - def test_AccuaryMetric8(self): - try: - metric = AccuracyMetric(pred='predictions', target='targets') - pred_dict = {"predictions": torch.zeros(4, 3, 2)} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict, ) - self.assertDictEqual(metric.get_metric(), {'acc': 1}) - except Exception as e: - print(e) - return - self.assertTrue(True, False), "No exception catches." - - def test_AccuaryMetric9(self): - # (9) check map, include unused - try: - metric = AccuracyMetric(pred='prediction', target='targets') - pred_dict = {"prediction": torch.zeros(4, 3, 2), 'unused': 1} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - self.assertDictEqual(metric.get_metric(), {'acc': 1}) - except Exception as e: - print(e) - return - self.assertTrue(True, False), "No exception catches." - - def test_AccuaryMetric10(self): - # (10) check _fast_metric - try: - metric = AccuracyMetric() - pred_dict = {"predictions": torch.zeros(4, 3, 2), "seq_len": torch.ones(3) * 3} - target_dict = {'targets': torch.zeros(4, 3)} - metric(pred_dict=pred_dict, target_dict=target_dict) - self.assertDictEqual(metric.get_metric(), {'acc': 1}) - except Exception as e: - print(e) - return - self.assertTrue(True, False), "No exception catches." - - def test_duplicate(self): - # 0.4.1的潜在bug,不能出现形参重复的情况 - metric = AccuracyMetric(pred='predictions', target='targets') - pred_dict = {"predictions": torch.zeros(4, 3, 2), "seq_len": torch.ones(4) * 3, 'pred':0} - target_dict = {'targets':torch.zeros(4, 3), 'target': 0} - metric(pred_dict=pred_dict, target_dict=target_dict) - - - def test_seq_len(self): - N = 256 - seq_len = torch.zeros(N).long() - seq_len[0] = 2 - pred = {'pred': torch.ones(N, 2)} - target = {'target': torch.ones(N, 2), 'seq_len': seq_len} - metric = AccuracyMetric() - metric(pred_dict=pred, target_dict=target) - self.assertDictEqual(metric.get_metric(), {'acc': 1.}) - seq_len[1:] = 1 - metric(pred_dict=pred, target_dict=target) - self.assertDictEqual(metric.get_metric(), {'acc': 1.}) - - -class SpanFPreRecMetricTest(unittest.TestCase): - def test_case1(self): - from fastNLP.core.metrics import _bmes_tag_to_spans - from fastNLP.core.metrics import _bio_tag_to_spans - - bmes_lst = ['M-8', 'S-2', 'S-0', 'B-9', 'B-6', 'E-5', 'B-7', 'S-2', 'E-7', 'S-8'] - bio_lst = ['O-8', 'O-2', 'B-0', 'O-9', 'I-6', 'I-5', 'I-7', 'I-2', 'I-7', 'O-8'] - expect_bmes_res = set() - expect_bmes_res.update([('8', (0, 1)), ('2', (1, 2)), ('0', (2, 3)), ('9', (3, 4)), ('6', (4, 5)), - ('5', (5, 6)), ('7', (6, 7)), ('2', (7, 8)), ('7', (8, 9)), ('8', (9, 10))]) - expect_bio_res = set() - expect_bio_res.update([('7', (8, 9)), ('0', (2, 3)), ('2', (7, 8)), ('5', (5, 6)), - ('6', (4, 5)), ('7', (6, 7))]) - self.assertSetEqual(expect_bmes_res, set(_bmes_tag_to_spans(bmes_lst))) - self.assertSetEqual(expect_bio_res, set(_bio_tag_to_spans(bio_lst))) - - def test_case2(self): - # 测试不带label的 - from fastNLP.core.metrics import _bmes_tag_to_spans - from fastNLP.core.metrics import _bio_tag_to_spans - - bmes_lst = ['B', 'E', 'B', 'S', 'B', 'M', 'E', 'M', 'B', 'E'] - bio_lst = ['I', 'B', 'O', 'O', 'I', 'O', 'I', 'B', 'O', 'O'] - expect_bmes_res = set() - expect_bmes_res.update([('', (0, 2)), ('', (2, 3)), ('', (3, 4)), ('', (4, 7)), ('', (7, 8)), ('', (8, 10))]) - expect_bio_res = set() - expect_bio_res.update([('', (7, 8)), ('', (6, 7)), ('', (4, 5)), ('', (0, 1)), ('', (1, 2))]) - self.assertSetEqual(expect_bmes_res, set(_bmes_tag_to_spans(bmes_lst))) - self.assertSetEqual(expect_bio_res, set(_bio_tag_to_spans(bio_lst))) - - def test_case3(self): - number_labels = 4 - # bio tag - fastnlp_bio_vocab = Vocabulary(unknown=None, padding=None) - fastnlp_bio_vocab.word_count = Counter(_generate_tags('BIO', number_labels)) - fastnlp_bio_metric = SpanFPreRecMetric(tag_vocab=fastnlp_bio_vocab, only_gross=False) - bio_sequence = torch.FloatTensor([[[-0.4424, -0.4579, -0.7376, 1.8129, 0.1316, 1.6566, -1.2169, - -0.3782, 0.8240], - [-1.2348, -0.1876, -0.1462, -0.4834, -0.6692, -0.9735, 1.1563, - -0.3562, -1.4116], - [ 1.6550, -0.9555, 0.3782, -1.3160, -1.5835, -0.3443, -1.7858, - 2.0023, 0.7075], - [-0.3772, -0.5447, -1.5631, 1.1614, 1.4598, -1.2764, 0.5186, - 0.3832, -0.1540], - [-0.1011, 0.0600, 1.1090, -0.3545, 0.1284, 1.1484, -1.0120, - -1.3508, -0.9513], - [ 1.8948, 0.8627, -2.1359, 1.3740, -0.7499, 1.5019, 0.6919, - -0.0842, -0.4294]], - - [[-0.2802, 0.6941, -0.4788, -0.3845, 1.7752, 1.2950, -1.9490, - -1.4138, -0.8853], - [-1.3752, -0.5457, -0.5305, 0.4018, 0.2934, 0.7931, 2.3845, - -1.0726, 0.0364], - [ 0.3621, 0.2609, 0.1269, -0.5950, 0.7212, 0.5959, 1.6264, - -0.8836, -0.9320], - [ 0.2003, -1.0758, -1.1560, -0.6472, -1.7549, 0.1264, 0.6044, - -1.6857, 1.1571], - [ 1.4277, -0.4915, 0.4496, 2.2027, 0.0730, -3.1792, -0.5125, - -0.5837, 1.0184], - [ 1.9495, 1.7145, -0.2143, -0.1230, -0.2205, 0.8250, 0.4943, - -0.9025, 0.0864]]]) - bio_target = torch.LongTensor([[3, 6, 0, 8, 2, 4], - [4, 1, 7, 0, 4, 7]]) - fastnlp_bio_metric({'pred': bio_sequence, 'seq_len': torch.LongTensor([6, 6])}, {'target': bio_target}) - expect_bio_res = {'pre-1': 0.333333, 'rec-1': 0.333333, 'f-1': 0.333333, 'pre-2': 0.5, 'rec-2': 0.5, - 'f-2': 0.5, 'pre-0': 0.0, 'rec-0': 0.0, 'f-0': 0.0, 'pre-3': 0.0, 'rec-3': 0.0, - 'f-3': 0.0, 'pre': 0.222222, 'rec': 0.181818, 'f': 0.2} - - self.assertDictEqual(expect_bio_res, fastnlp_bio_metric.get_metric()) - - def test_case4(self): - # bmes tag - def _generate_samples(): - target = [] - seq_len = [] - vocab = Vocabulary(unknown=None, padding=None) - for i in range(3): - target_i = [] - seq_len_i = 0 - for j in range(1, 10): - word_len = np.random.randint(1, 5) - seq_len_i += word_len - if word_len==1: - target_i.append('S') - else: - target_i.append('B') - target_i.extend(['M']*(word_len-2)) - target_i.append('E') - vocab.add_word_lst(target_i) - target.append(target_i) - seq_len.append(seq_len_i) - target_ = np.zeros((3, max(seq_len))) - for i in range(3): - target_i = [vocab.to_index(t) for t in target[i]] - target_[i, :seq_len[i]] = target_i - return target_, target, seq_len, vocab - def get_eval(raw_target, pred, vocab, seq_len): - pred = pred.argmax(dim=-1).tolist() - tp = 0 - gold = 0 - seg = 0 - pred_target = [] - for i in range(len(seq_len)): - tags = [vocab.to_word(p) for p in pred[i][:seq_len[i]]] - spans = [] - prev_bmes_tag = None - for idx, tag in enumerate(tags): - if tag in ('B', 'S'): - spans.append([idx, idx]) - elif tag in ('M', 'E') and prev_bmes_tag in ('B', 'M'): - spans[-1][1] = idx - else: - spans.append([idx, idx]) - prev_bmes_tag = tag - tmp = [] - for span in spans: - if span[1]-span[0]>0: - tmp.extend(['B'] + ['M']*(span[1]-span[0]-1) + ['E']) - else: - tmp.append('S') - pred_target.append(tmp) - for i in range(len(seq_len)): - raw_pred = pred_target[i] - start = 0 - for j in range(seq_len[i]): - if raw_target[i][j] in ('E', 'S'): - flag = True - for k in range(start, j+1): - if raw_target[i][k]!=raw_pred[k]: - flag = False - break - if flag: - tp += 1 - start = j + 1 - gold += 1 - if raw_pred[j] in ('E', 'S'): - seg += 1 - - pre = round(tp/seg, 6) - rec = round(tp/gold, 6) - return {'f': round(2*pre*rec/(pre+rec), 6), 'pre': pre, 'rec':rec} - - target, raw_target, seq_len, vocab = _generate_samples() - pred = torch.randn(3, max(seq_len), 4) - - expected_metric = get_eval(raw_target, pred, vocab, seq_len) - metric = SpanFPreRecMetric(vocab, encoding_type='bmes') - metric({'pred': pred, 'seq_len':torch.LongTensor(seq_len)}, {'target': torch.from_numpy(target)}) - # print(metric.get_metric(reset=False)) - # print(expected_metric) - metric_value = metric.get_metric() - for key, value in expected_metric.items(): - self.assertAlmostEqual(value, metric_value[key], places=5) - - def test_auto_encoding_type_infer(self): - # 检查是否可以自动check encode的类型 - vocabs = {} - import random - for encoding_type in ['bio', 'bioes', 'bmeso']: - vocab = Vocabulary(unknown=None, padding=None) - for i in range(random.randint(10, 100)): - label = str(random.randint(1, 10)) - for tag in encoding_type: - if tag!='o': - vocab.add_word(f'{tag}-{label}') - else: - vocab.add_word('o') - vocabs[encoding_type] = vocab - for e in ['bio', 'bioes', 'bmeso']: - with self.subTest(e=e): - metric = SpanFPreRecMetric(tag_vocab=vocabs[e]) - assert metric.encoding_type == e - - bmes_vocab = _generate_tags('bmes') - vocab = Vocabulary() - for tag, index in bmes_vocab.items(): - vocab.add_word(tag) - metric = SpanFPreRecMetric(vocab) - assert metric.encoding_type == 'bmes' - - # 一些无法check的情况 - vocab = Vocabulary() - for i in range(10): - vocab.add_word(str(i)) - with self.assertRaises(Exception): - metric = SpanFPreRecMetric(vocab) - - def test_encoding_type(self): - # 检查传入的tag_vocab与encoding_type不符合时,是否会报错 - vocabs = {} - import random - from itertools import product - for encoding_type in ['bio', 'bioes', 'bmeso']: - vocab = Vocabulary(unknown=None, padding=None) - for i in range(random.randint(10, 100)): - label = str(random.randint(1, 10)) - for tag in encoding_type: - if tag!='o': - vocab.add_word(f'{tag}-{label}') - else: - vocab.add_word('o') - vocabs[encoding_type] = vocab - for e1, e2 in product(['bio', 'bioes', 'bmeso'], ['bio', 'bioes', 'bmeso']): - with self.subTest(e1=e1, e2=e2): - if e1==e2: - metric = SpanFPreRecMetric(vocabs[e1], encoding_type=e2) - else: - s2 = set(e2) - s2.update(set(e1)) - if s2==set(e2): - continue - with self.assertRaises(AssertionError): - metric = SpanFPreRecMetric(vocabs[e1], encoding_type=e2) - for encoding_type in ['bio', 'bioes', 'bmeso']: - with self.assertRaises(AssertionError): - metric = SpanFPreRecMetric(vocabs[encoding_type], encoding_type='bmes') - - with self.assertWarns(Warning): - vocab = Vocabulary(unknown=None, padding=None).add_word_lst(list('bmes')) - metric = SpanFPreRecMetric(vocab, encoding_type='bmeso') - vocab = Vocabulary().add_word_lst(list('bmes')) - metric = SpanFPreRecMetric(vocab, encoding_type='bmeso') - - -class TestCMRC2018Metric(unittest.TestCase): - def test_case1(self): - # 测试能否正确计算 - import torch - metric = CMRC2018Metric() - - raw_chars = [list("abcsdef"), list("123456s789")] - context_len = torch.LongTensor([3, 6]) - answers = [["abc", "abc", "abc"], ["12", "12", "12"]] - pred_start = torch.randn(2, max(map(len, raw_chars))) - pred_end = torch.randn(2, max(map(len, raw_chars))) - pred_start[0, 0] = 1000 # 正好是abc - pred_end[0, 2] = 1000 - pred_start[1, 1] = 1000 # 取出234 - pred_end[1, 3] = 1000 - - metric.evaluate(answers=answers, raw_chars=raw_chars, pred_start=pred_start, - pred_end=pred_end, context_len=context_len) - - eval_res = metric.get_metric() - self.assertDictEqual(eval_res, {'f1': 70.0, 'em': 50.0}) - - -class TestUsefulFunctions(unittest.TestCase): - # 测试metrics.py中一些看上去挺有用的函数 - def test_case_1(self): - # multi-class - _ = _accuracy_topk(np.random.randint(0, 3, size=(10, 1)), np.random.randint(0, 3, size=(10, 1)), k=3) - _ = _pred_topk(np.random.randint(0, 3, size=(10, 1))) - - # 跑通即可 - - - -class TestClassfiyFPreRecMetric(unittest.TestCase): - def test_case_1(self): - pred= torch.tensor([[-0.4375, -0.1779, -1.0985, -1.1592, 0.4910], - [ 1.3410, 0.2889, -0.8667, -1.8580, 0.3029], - [ 0.7459, -1.1957, 0.3231, 0.0308, -0.1847], - [ 1.1439, -0.0057, 0.8203, 0.0312, -1.0051], - [-0.4870, 0.3215, -0.8290, 0.9221, 0.4683], - [ 0.9078, 1.0674, -0.5629, 0.3895, 0.8917], - [-0.7743, -0.4041, -0.9026, 0.2112, 1.0892], - [ 1.8232, -1.4188, -2.5615, -2.4187, 0.5907], - [-1.0592, 0.4164, -0.1192, 1.4238, -0.9258], - [-1.1137, 0.5773, 2.5778, 0.5398, -0.3323], - [-0.3868, -0.5165, 0.2286, -1.3876, 0.5561], - [-0.3304, 1.3619, -1.5744, 0.4902, -0.7661], - [ 1.8387, 0.5234, 0.4269, 1.3748, -1.2793], - [ 0.6692, 0.2571, 1.2425, -0.5894, -0.0184], - [ 0.4165, 0.4084, -0.1280, 1.4489, -2.3058], - [-0.5826, -0.5469, 1.5898, -0.2786, -0.9882], - [-1.5548, -2.2891, 0.2983, -1.2145, -0.1947], - [-0.7222, 2.3543, -0.5801, -0.0640, -1.5614], - [-1.4978, 1.9297, -1.3652, -0.2358, 2.5566], - [ 0.1561, -0.0316, 0.9331, 1.0363, 2.3949], - [ 0.2650, -0.8459, 1.3221, 0.1321, -1.1900], - [ 0.0664, -1.2353, -0.5242, -1.4491, 1.3300], - [-0.2744, 0.0941, 0.7157, 0.1404, 1.2046], - [ 0.9341, -0.6652, 1.4512, 0.9608, -0.3623], - [-1.1641, 0.0873, 0.1163, -0.2068, -0.7002], - [ 1.4775, -2.0025, -0.5634, -0.1589, 0.0247], - [ 1.0151, 1.0304, -0.1042, -0.6955, -0.0629], - [-0.3119, -0.4558, 0.7757, 0.0758, -1.6297], - [ 1.0654, 0.0313, -0.7716, 0.1194, 0.6913], - [-0.8088, -0.6648, -0.5018, -0.0230, -0.8207], - [-0.7753, -0.3508, 1.6163, 0.7158, 1.5207], - [ 0.8692, 0.7718, -0.6734, 0.6515, 0.0641]]) - arg_max_pred = torch.argmax(pred,dim=-1) - target = torch.tensor([0, 2, 4, 1, 4, 0, 1, 3, 3, 3, 1, 3, 4, 4, 3, 4, 0, 2, 4, 4, 3, 4, 4, 3, - 0, 3, 0, 0, 0, 1, 3, 1]) - - metric = ClassifyFPreRecMetric(f_type='macro') - metric.evaluate(pred, target) - result_dict = metric.get_metric() - f1_score = 0.1882051282051282 - recall = 0.1619047619047619 - pre = 0.23928571428571427 - - ground_truth = {'f': f1_score, 'pre': pre, 'rec': recall} - for keys in ['f', 'pre', 'rec']: - self.assertAlmostEqual(result_dict[keys], ground_truth[keys], delta=0.000001) - - metric = ClassifyFPreRecMetric(f_type='micro') - metric.evaluate(pred, target) - result_dict = metric.get_metric() - f1_score = 0.21875 - recall = 0.21875 - pre = 0.21875 - - ground_truth = {'f': f1_score, 'pre': pre, 'rec': recall} - for keys in ['f', 'pre', 'rec']: - self.assertAlmostEqual(result_dict[keys], ground_truth[keys], delta=0.000001) - - metric = ClassifyFPreRecMetric(only_gross=False, f_type='macro') - metric.evaluate(pred, target) - result_dict = metric.get_metric(reset=True) - ground_truth = {'0': {'f1-score': 0.13333333333333333, 'precision': 0.125, 'recall': 0.14285714285714285, 'support': 7}, '1': {'f1-score': 0.0, 'precision': 0.0, 'recall': 0.0, 'support': 5}, '2': {'f1-score': 0.0, 'precision': 0.0, 'recall': 0.0, 'support': 2}, '3': {'f1-score': 0.30769230769230765, 'precision': 0.5, 'recall': 0.2222222222222222, 'support': 9}, '4': {'f1-score': 0.5, 'precision': 0.5714285714285714, 'recall': 0.4444444444444444, 'support': 9}, 'macro avg': {'f1-score': 0.1882051282051282, 'precision': 0.23928571428571427, 'recall': 0.1619047619047619, 'support': 32}, 'micro avg': {'f1-score': 0.21875, 'precision': 0.21875, 'recall': 0.21875, 'support': 32}, 'weighted avg': {'f1-score': 0.2563301282051282, 'precision': 0.3286830357142857, 'recall': 0.21875, 'support': 32}} - for keys in result_dict.keys(): - if keys=="f" or "pre" or "rec": - continue - gl=str(keys[-1]) - tmp_d={"p":"precision","r":"recall","f":"f1-score"} - gk=tmp_d[keys[0]] - self.assertAlmostEqual(result_dict[keys], ground_truth[gl][gk], delta=0.000001) diff --git a/tests/core/test_optimizer.py b/tests/core/test_optimizer.py deleted file mode 100644 index 2f2487c7..00000000 --- a/tests/core/test_optimizer.py +++ /dev/null @@ -1,63 +0,0 @@ -import unittest - -import torch - -from fastNLP import SGD, Adam, AdamW - - -class TestOptim(unittest.TestCase): - def test_SGD(self): - optim = SGD(model_params=torch.nn.Linear(10, 3).parameters()) - self.assertTrue("lr" in optim.__dict__["settings"]) - self.assertTrue("momentum" in optim.__dict__["settings"]) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.SGD)) - - optim = SGD(lr=0.001) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.001) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.SGD)) - - optim = SGD(lr=0.002, momentum=0.989) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.002) - self.assertEqual(optim.__dict__["settings"]["momentum"], 0.989) - - optim = SGD(0.001) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.001) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.SGD)) - - with self.assertRaises(TypeError): - _ = SGD("???") - with self.assertRaises(TypeError): - _ = SGD(0.001, lr=0.002) - - def test_Adam(self): - optim = Adam(model_params=torch.nn.Linear(10, 3).parameters()) - self.assertTrue("lr" in optim.__dict__["settings"]) - self.assertTrue("weight_decay" in optim.__dict__["settings"]) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.Adam)) - - optim = Adam(lr=0.001) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.001) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.Adam)) - - optim = Adam(lr=0.002, weight_decay=0.989) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.002) - self.assertEqual(optim.__dict__["settings"]["weight_decay"], 0.989) - - optim = Adam(0.001) - self.assertEqual(optim.__dict__["settings"]["lr"], 0.001) - res = optim.construct_from_pytorch(torch.nn.Linear(10, 3).parameters()) - self.assertTrue(isinstance(res, torch.optim.Adam)) - - def test_AdamW(self): - optim = AdamW(params=torch.nn.Linear(10, 3).parameters()) - self.assertTrue('lr' in optim.defaults) - self.assertTrue('weight_decay' in optim.defaults) - - optim = AdamW(params=torch.nn.Linear(10, 3).parameters(), lr=0.002, weight_decay=0.989) - self.assertEqual(optim.defaults['lr'], 0.002) - self.assertTrue(optim.defaults['weight_decay'], 0.989) diff --git a/tests/core/test_predictor.py b/tests/core/test_predictor.py deleted file mode 100644 index 701353dc..00000000 --- a/tests/core/test_predictor.py +++ /dev/null @@ -1,48 +0,0 @@ -import unittest -from collections import defaultdict - -import numpy as np -import torch - -from fastNLP.core.dataset import DataSet -from fastNLP.core.instance import Instance -from fastNLP.core.predictor import Predictor - - -def prepare_fake_dataset(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=[0.0]) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=[1.0]) for item in class_B]) - return data_set - - -class LinearModel(torch.nn.Module): - def __init__(self): - super(LinearModel, self).__init__() - self.linear = torch.nn.Linear(2, 1) - - def forward(self, x): - return {"predict": self.linear(x)} - - -class TestPredictor(unittest.TestCase): - def test_simple(self): - model = LinearModel() - predictor = Predictor(model) - data = prepare_fake_dataset() - data.set_input("x") - ans = predictor.predict(data) - self.assertTrue(isinstance(ans, defaultdict)) - self.assertTrue("predict" in ans) - self.assertTrue(isinstance(ans["predict"], list)) - - def test_sequence(self): - # test sequence input/output - pass diff --git a/tests/core/test_sampler.py b/tests/core/test_sampler.py deleted file mode 100644 index 40d196f0..00000000 --- a/tests/core/test_sampler.py +++ /dev/null @@ -1,54 +0,0 @@ -import random -import unittest - -import torch - -from fastNLP import DataSet -from fastNLP import SequentialSampler, RandomSampler, BucketSampler -from fastNLP.core.sampler import k_means_1d, k_means_bucketing, simple_sort_bucketing - - -class TestSampler(unittest.TestCase): - def test_sequential_sampler(self): - sampler = SequentialSampler() - data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] - for idx, i in enumerate(sampler(data)): - assert idx == i - - def test_random_sampler(self): - sampler = RandomSampler() - data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] - ans = [data[i] for i in sampler(data)] - assert len(ans) == len(data) - for d in ans: - assert d in data - - def test_k_means(self): - centroids, assign = k_means_1d([21, 3, 25, 7, 9, 22, 4, 6, 28, 10], 2, max_iter=5) - centroids, assign = list(centroids), list(assign) - assert len(centroids) == 2 - assert len(assign) == 10 - - def test_k_means_bucketing(self): - res = k_means_bucketing([21, 3, 25, 7, 9, 22, 4, 6, 28, 10], [None, None]) - assert len(res) == 2 - - def test_simple_sort_bucketing(self): - _ = simple_sort_bucketing([21, 3, 25, 7, 9, 22, 4, 6, 28, 10]) - assert len(_) == 10 - - def test_BucketSampler(self): - sampler = BucketSampler(num_buckets=3, batch_size=16, seq_len_field_name="seq_len") - data_set = DataSet({"x": [[0] * random.randint(1, 10)] * 10, "y": [[5, 6]] * 10}) - data_set.apply(lambda ins: len(ins["x"]), new_field_name="seq_len") - indices = sampler(data_set) - self.assertEqual(len(indices), 10) - # 跑通即可,不验证效果 - - def test_ConstantTokenNumSampler(self): - # 需要check的是,是否在number上是接近的 - pass - - def test_ConstTokenNumSampler(self): - # 需要check的是,是否可以直接运行 - pass diff --git a/tests/core/test_tester.py b/tests/core/test_tester.py deleted file mode 100644 index d0267cce..00000000 --- a/tests/core/test_tester.py +++ /dev/null @@ -1,63 +0,0 @@ -import unittest -import numpy as np -from torch import nn -import time -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import AccuracyMetric -from fastNLP import Tester - -data_name = "pku_training.utf8" -pickle_path = "data_for_tests" - - -def prepare_fake_dataset(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=[0.0]) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=[1.0]) for item in class_B]) - return data_set - - -def prepare_fake_dataset2(*args, size=100): - ys = np.random.randint(4, size=100, dtype=np.int64) - data = {'y': ys} - for arg in args: - data[arg] = np.random.randn(size, 5) - return DataSet(data=data) - - -class TestTester(unittest.TestCase): - def test_case_1(self): - # 检查报错提示能否正确提醒用户 - dataset = prepare_fake_dataset2('x1', 'x_unused') - dataset.rename_field('x_unused', 'x2') - dataset.set_input('x1', 'x2') - dataset.set_target('y', 'x1') - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - time.sleep(0.1) - # loss = F.cross_entropy(x, y) - return {'preds': x} - - model = Model() - with self.assertRaises(NameError): - tester = Tester( - data=dataset, - model=model, - metrics=AccuracyMetric()) - tester.test() diff --git a/tests/core/test_trainer.py b/tests/core/test_trainer.py deleted file mode 100644 index f9a7ae42..00000000 --- a/tests/core/test_trainer.py +++ /dev/null @@ -1,726 +0,0 @@ -import time -import unittest -import os - -import numpy as np -import torch.nn.functional as F -from torch import nn -import torch - -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import BCELoss, BCEWithLogits -from fastNLP import CrossEntropyLoss -from fastNLP import AccuracyMetric -from fastNLP import SGD -from fastNLP import Trainer -from fastNLP.models.base_model import NaiveClassifier -from fastNLP import TorchLoaderIter -from fastNLP.models import BaseModel -from fastNLP.modules import MLP -from pkg_resources import parse_version - - - -def prepare_fake_dataset(): - mean = np.array([-3, -3]) - cov = np.array([[1, 0], [0, 1]]) - class_A = np.random.multivariate_normal(mean, cov, size=(1000,)) - - mean = np.array([3, 3]) - cov = np.array([[1, 0], [0, 1]]) - class_B = np.random.multivariate_normal(mean, cov, size=(1000,)) - - data_set = DataSet([Instance(x=[float(item[0]), float(item[1])], y=[0.0]) for item in class_A] + - [Instance(x=[float(item[0]), float(item[1])], y=[1.0]) for item in class_B]) - return data_set - - -def prepare_fake_dataset2(*args, size=100): - ys = np.random.randint(4, size=100, dtype=np.int64) - data = {'y': ys} - for arg in args: - data[arg] = np.random.randn(size, 5) - return DataSet(data=data) - - -class TrainerTestGround(unittest.TestCase): - def test_case(self): - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - train_set, dev_set = data_set.split(0.3) - - model = NaiveClassifier(2, 1) - - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2) - trainer.train() - """ - # 应该正确运行 - """ - - def test_save_path(self): - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - train_set, dev_set = data_set.split(0.3) - - model = NaiveClassifier(2, 1) - - save_path = 'test_save_models' - - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=save_path, - use_tqdm=True, check_code_level=2) - trainer.train() - import os - import shutil - self.assertTrue(os.path.exists(save_path)) - if os.path.exists(save_path): - shutil.rmtree(save_path) - - # 无dev_data的训练 - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCELoss(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=None, - metrics=None, validate_every=-1, save_path=save_path, - use_tqdm=True, check_code_level=2) - trainer.train() - self.assertTrue(os.path.exists(save_path)) - if os.path.exists(save_path): - shutil.rmtree(save_path) - - def test_trainer_suggestion1(self): - # 检查报错提示能否正确提醒用户。 - # 这里没有传入forward需要的数据。需要trainer提醒用户如何设置。 - dataset = prepare_fake_dataset2('x') - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'loss': loss} - - model = Model() - - with self.assertRaises(RuntimeError): - trainer = Trainer(train_data=dataset, model=model) - """ - # 应该获取到的报错提示 - NameError: - The following problems occurred when calling Model.forward(self, x1, x2, y) - missing param: ['y', 'x1', 'x2'] - Suggestion: (1). You might need to set ['y'] as input. - (2). You need to provide ['x1', 'x2'] in DataSet and set it as input. - - """ - - def test_trainer_suggestion2(self): - # 检查报错提示能否正确提醒用户 - # 这里传入forward需要的数据,看是否可以运行 - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2', 'y', flag=True) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'loss': loss} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, print_every=2, use_tqdm=False) - trainer.train() - """ - # 应该正确运行 - """ - - def test_trainer_suggestion3(self): - # 检查报错提示能否正确提醒用户 - # 这里传入forward需要的数据,但是forward没有返回loss这个key - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2', 'y', flag=True) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'wrong_loss_key': loss} - - model = Model() - with self.assertRaises(NameError): - trainer = Trainer(train_data=dataset, model=model, print_every=2, use_tqdm=False) - trainer.train() - - def test_trainer_suggestion4(self): - # 检查报错提示能否正确提醒用户 - # 这里传入forward需要的数据,是否可以正确提示unused - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2', 'y', flag=True) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'losses': loss} - - model = Model() - with self.assertRaises(NameError): - trainer = Trainer(train_data=dataset, model=model, print_every=2, use_tqdm=False) - - def test_trainer_suggestion5(self): - # 检查报错提示能否正确提醒用户 - # 这里传入多余参数,让其duplicate, 但这里因为y不会被调用,所以其实不会报错 - dataset = prepare_fake_dataset2('x1', 'x_unused') - dataset.rename_field('x_unused', 'x2') - dataset.set_input('x1', 'x2', 'y') - dataset.set_target('y') - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'loss': loss} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, print_every=2, use_tqdm=False) - - def test_trainer_suggestion6(self): - # 检查报错提示能否正确提醒用户 - # 这里传入多余参数,让其duplicate - dataset = prepare_fake_dataset2('x1', 'x_unused') - dataset.rename_field('x_unused', 'x2') - dataset.set_input('x1', 'x2') - dataset.set_target('y', 'x1') - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - time.sleep(0.1) - # loss = F.cross_entropy(x, y) - return {'preds': x} - - model = Model() - with self.assertRaises(NameError): - trainer = Trainer(train_data=dataset, model=model, loss=CrossEntropyLoss(), print_every=2, dev_data=dataset, - metrics=AccuracyMetric(), use_tqdm=False) - - @unittest.skipIf('TRAVIS' in os.environ, "Need to be tested in hosts with more than 1 gpus") - def test_trainer_data_parallel(self): - if torch.cuda.device_count()>1: - from fastNLP import AccuracyMetric - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2', 'y', flag=True) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y=None): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - if self.training: - loss = F.cross_entropy(x, y) - return {'loss': loss} - else: - return {'pred':x, 'target':y} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, print_every=2, use_tqdm=False, - dev_data=dataset, metrics=AccuracyMetric(), device=[0, 1]) - trainer.train(load_best_model=False) - - def test_udf_dataiter(self): - import random - import torch - class UdfDataSet: - def __init__(self, num_samples): - self.num_samples = num_samples - - def __getitem__(self, idx): - x = [random.random() for _ in range(3)] - y = random.random() - return x,y - - def __len__(self): - return self.num_samples - - def collate_fn(data_list): - # [(x1,y1), (x2,y2), ...], 这里的输入实际上是将UdfDataSet的__getitem__输入结合为list - xs, ys = [], [] - for l in data_list: - x, y = l - xs.append(x) - ys.append(y) - x,y = torch.FloatTensor(xs), torch.FloatTensor(ys) - return {'x':x, 'y':y}, {'y':y} - - dataset = UdfDataSet(10) - dataset = TorchLoaderIter(dataset, collate_fn=collate_fn) - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(3, 1) - def forward(self, x, y): - return {'loss':torch.pow(self.fc(x).squeeze(-1)-y, 2).sum()} - def predict(self, x): - return {'pred':self.fc(x).squeeze(0)} - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=None, print_every=2, dev_data=dataset, - metrics=AccuracyMetric(target='y'), use_tqdm=False) - trainer.train(load_best_model=False) - - def test_batch_sampler_dataiter(self): - import random - import torch - class BatchSampler: - def __init__(self, dataset): - self.num_samples = len(dataset) - - def __iter__(self): - index = 0 - indexes = list(range(self.num_samples)) - np.random.shuffle(indexes) - start_idx = 0 - while index < self.num_samples: - if start_idx == 0: - end_index = self.num_samples//2 - else: - end_index = self.num_samples - yield indexes[start_idx:end_index] - index = end_index - start_idx = end_index - def __len__(self): - return 2 - - class UdfDataSet: - def __init__(self, num_samples): - self.num_samples = num_samples - - def __getitem__(self, idx): - x = [random.random() for _ in range(3)] - y = random.random() - return x,y - - def __len__(self): - return self.num_samples - - def collate_fn(data_list): - # [(x1,y1), (x2,y2), ...], 这里的输入实际上是将UdfDataSet的__getitem__输入结合为list - xs, ys = [], [] - for l in data_list: - x, y = l - xs.append(x) - ys.append(y) - x,y = torch.FloatTensor(xs), torch.FloatTensor(ys) - return {'x':x, 'y':y}, {'y':y} - - dataset = UdfDataSet(11) - batch_sampler = BatchSampler(dataset) - dataset = TorchLoaderIter(dataset, collate_fn=collate_fn, batch_sampler=batch_sampler) - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(3, 1) - def forward(self, x, y): - return {'loss':torch.pow(self.fc(x).squeeze(-1)-y, 2).sum()} - def predict(self, x): - return {'pred':self.fc(x).squeeze(-1)} - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=None, print_every=2, dev_data=dataset, - metrics=AccuracyMetric(target='y'), use_tqdm=False) - trainer.train(load_best_model=False) - - def test_onthefly_iter(self): - import tempfile - import random - import torch - tmp_file_handler, tmp_file_path = tempfile.mkstemp(text=True) - try: - num_samples = 10 - data = [] - for _ in range(num_samples): - x, y = [random.random() for _ in range(3)], random.random() - data.append(x + [y]) - with open(tmp_file_path, 'w') as f: - for d in data: - f.write(' '.join(map(str, d)) + '\n') - - class FileDataSet: - def __init__(self, tmp_file): - num_samples = 0 - line_pos = [0] # 对应idx是某一行对应的位置 - self.tmp_file_handler = open(tmp_file, 'r', encoding='utf-8') - line = self.tmp_file_handler.readline() - while line: - if line.strip(): - num_samples += 1 - line_pos.append(self.tmp_file_handler.tell()) - line = self.tmp_file_handler.readline() - self.tmp_file_handler.seek(0) - self.num_samples = num_samples - self.line_pos = line_pos - - def __getitem__(self, idx): - line_start, line_end = self.line_pos[idx], self.line_pos[idx + 1] - self.tmp_file_handler.seek(line_start) - line = self.tmp_file_handler.read(line_end - line_start).strip() - values = list(map(float, line.split())) - gold_d = data[idx] - assert all([g==v for g,v in zip(gold_d, values)]), "Should have the same data" - x, y = values[:3], values[-1] - return x, y - - def __len__(self): - return self.num_samples - - def collact_fn(data_list): - # [(x1,y1), (x2,y2), ...], 这里的输入实际上是将UdfDataSet的__getitem__输入结合为list - xs, ys = [], [] - for l in data_list: - x, y = l - xs.append(x) - ys.append(y) - x, y = torch.FloatTensor(xs), torch.FloatTensor(ys) - return {'x': x, 'y': y}, {'y': y} - - dataset = FileDataSet(tmp_file_path) - dataset = TorchLoaderIter(dataset, collate_fn=collact_fn) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(3, 1) - - def forward(self, x, y): - return {'loss': torch.pow(self.fc(x).squeeze(-1) - y, 2).sum()} - - def predict(self, x): - return {'pred': self.fc(x).squeeze(-1)} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=None, print_every=2, dev_data=dataset, - metrics=AccuracyMetric(target='y'), use_tqdm=False, n_epochs=2) - trainer.train(load_best_model=False) - - finally: - import os - if os.path.exists(tmp_file_path): - os.remove(tmp_file_path) - - def test_collecct_fn(self): - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2') - dataset.set_target('y', 'x1') - import torch - def fn(ins_list): - x = [] - for ind, ins in ins_list: - x.append(ins['x1']+ins['x2']) - x = torch.FloatTensor(x) - return {'x':x}, {} - dataset.add_collate_fn(fn) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, x): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = self.fc(x) - sum_x = x1 + x2 + x - time.sleep(0.1) - # loss = F.cross_entropy(x, y) - return {'pred': sum_x} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=CrossEntropyLoss(target='y'), print_every=2, - dev_data=dataset, metrics=AccuracyMetric(target='y'), use_tqdm=False) - trainer.train() - - def test_collate_fn2(self): - """测试能否实现batch_x, batch_y""" - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2') - dataset.set_target('y', 'x1') - import torch - def fn(ins_list): - x = [] - for ind, ins in ins_list: - x.append(ins['x1']+ins['x2']) - x = torch.FloatTensor(x) - return {'x':x}, {'target':x[:, :4].argmax(dim=-1)} - dataset.add_collate_fn(fn) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, x): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = self.fc(x) - sum_x = x1 + x2 + x - time.sleep(0.1) - # loss = F.cross_entropy(x, y) - return {'pred': sum_x} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=CrossEntropyLoss(), print_every=2, - dev_data=dataset, metrics=AccuracyMetric(), use_tqdm=False) - trainer.train() - - def test_collate_fn3(self): - """ - 测试应该会覆盖 - - :return: - """ - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2') - dataset.set_target('y') - import torch - def fn(ins_list): - x = [] - for ind, ins in ins_list: - x.append(ins['x1']+ins['x2']) - x = torch.FloatTensor(x) - return {'x1':torch.zeros_like(x)}, {'target':torch.zeros(x.size(0)).long(), 'y':x} - dataset.add_collate_fn(fn) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 1, bias=False) - - def forward(self, x1): - x1 = self.fc(x1) - assert x1.sum()==0, "Should be replaced to one" - # loss = F.cross_entropy(x, y) - return {'pred': x1} - - model = Model() - trainer = Trainer(train_data=dataset, model=model, loss=CrossEntropyLoss(), print_every=2, - dev_data=dataset, metrics=AccuracyMetric(), use_tqdm=False, n_epochs=1) - best_metric = trainer.train()['best_eval']['AccuracyMetric']['acc'] - self.assertTrue(best_metric==1) - - """ - def test_trainer_multiprocess(self): - dataset = prepare_fake_dataset2('x1', 'x2') - dataset.set_input('x1', 'x2', 'y', flag=True) - - class Model(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(5, 4) - - def forward(self, x1, x2, y): - x1 = self.fc(x1) - x2 = self.fc(x2) - x = x1 + x2 - loss = F.cross_entropy(x, y) - return {'loss': loss} - - model = Model() - trainer = Trainer( - train_data=dataset, - model=model, - use_tqdm=True, - print_every=2, - num_workers=2, - pin_memory=False, - timeout=0, - ) - trainer.train() - """ - - -class NaiveClassifier2(BaseModel): - r""" - 一个简单的分类器例子,可用于各种测试 - """ - - def __init__(self, in_feature_dim, out_feature_dim): - super(NaiveClassifier2, self).__init__() - self.mlp = MLP([in_feature_dim, in_feature_dim, out_feature_dim]) - - def forward(self, x): - return {"predict": self.mlp(x)} - - def predict(self, x): - return {"predict": torch.sigmoid(self.mlp(x)) > 0.5} - - -class Fp16TrainerTest(unittest.TestCase): - def test_raise_error(self): - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - train_set, dev_set = data_set.split(0.3) - - model = NaiveClassifier2(2, 1) - - with self.assertRaises(RuntimeError): - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True) - - with self.assertRaises(RuntimeError): - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device='cpu') - - with self.assertRaises(RuntimeError): - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=torch.device('cpu')) - - @unittest.skipIf(torch.cuda.is_available()==False or parse_version(torch.__version__) < parse_version('1.6'), "Skip when no cuda device detch") - def test_run_fp16(self): - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - train_set, dev_set = data_set.split(0.3) - - model = NaiveClassifier2(2, 1) - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=0) - trainer.train(load_best_model=False) - - model = NaiveClassifier2(2, 1) - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=0, test_use_fp16=False) - trainer.train(load_best_model=False) - - @unittest.skipIf(torch.cuda.device_count()<2 or parse_version(torch.__version__) < parse_version('1.6'), "Skip when lower than 1 gpus.") - def test_run_data_parallel(self): - data_set = prepare_fake_dataset() - data_set.set_input("x", flag=True) - data_set.set_target("y", flag=True) - - train_set, dev_set = data_set.split(0.3) - - class NaiveClassifier2(BaseModel): - r""" - 一个简单的分类器例子,可用于各种测试 - """ - - def __init__(self, in_feature_dim, out_feature_dim): - super(NaiveClassifier2, self).__init__() - self.mlp = MLP([in_feature_dim, in_feature_dim, out_feature_dim]) - - def forward(self, x): - return {"predict": self.mlp(x)} - - def predict(self, x): - return {"predict": torch.sigmoid(self.mlp(x)) > 0.5} - - model = NaiveClassifier2(2, 1) - with self.assertRaises(RuntimeError): - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=[0, 1]) - - with self.assertRaises(RuntimeError): - class NaiveClassifier3(BaseModel): - r""" - 一个简单的分类器例子,可用于各种测试 - """ - - def __init__(self, in_feature_dim, out_feature_dim): - super(NaiveClassifier3, self).__init__() - self.mlp = MLP([in_feature_dim, in_feature_dim, out_feature_dim]) - - @torch.cuda.amp.autocast() - def forward(self, x): - return {"predict": self.mlp(x)} - - @torch.cuda.amp.autocast() - def predict(self, x): - return {"predict": torch.sigmoid(self.mlp(x)) > 0.5} - - model = NaiveClassifier3(2, 1) - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=[0, 1], test_use_fp16=True) - - class NaiveClassifier4(BaseModel): - r""" - 一个简单的分类器例子,可用于各种测试 - """ - - def __init__(self, in_feature_dim, out_feature_dim): - super(NaiveClassifier4, self).__init__() - self.mlp = MLP([in_feature_dim, in_feature_dim, out_feature_dim]) - - def forward(self, x): - with torch.cuda.amp.autocast(): - return {"predict": self.mlp(x)} - - def predict(self, x): - with torch.cuda.amp.autocast(): - return {"predict": torch.sigmoid(self.mlp(x)) > 0.5} - - model = NaiveClassifier4(2, 1) - trainer = Trainer(train_set, model, optimizer=SGD(lr=0.1), loss=BCEWithLogits(pred="predict", target="y"), - batch_size=32, n_epochs=10, print_every=50, dev_data=dev_set, - metrics=AccuracyMetric(pred="predict", target="y"), validate_every=-1, save_path=None, - use_tqdm=True, check_code_level=2, fp16=True, device=[0, 1], test_use_fp16=True) - trainer.train(load_best_model=False) diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py deleted file mode 100644 index f43a526c..00000000 --- a/tests/core/test_utils.py +++ /dev/null @@ -1,315 +0,0 @@ -import unittest -import _pickle -from fastNLP import cache_results -from fastNLP.io import EmbedLoader -from fastNLP import DataSet -from fastNLP import Instance -import time -import os -import torch -from torch import nn -from fastNLP.core.utils import _move_model_to_device, _get_model_device -import numpy as np -from fastNLP.core.utils import seq_len_to_mask, get_seq_len -from fastNLP.core.utils import iob2, iob2bioes - -class Model(nn.Module): - def __init__(self): - super().__init__() - self.param = nn.Parameter(torch.zeros(0)) - - -class TestMoveModelDevice(unittest.TestCase): - def test_case1(self): - # 测试str - model = Model() - model = _move_model_to_device(model, 'cpu') - assert model.param.device == torch.device('cpu') - # 测试不存在的device报错 - with self.assertRaises(Exception): - _move_model_to_device(model, 'cpuu') - # 测试gpu - if torch.cuda.is_available(): - model = _move_model_to_device(model, 'cuda') - assert model.param.is_cuda - model = _move_model_to_device(model, 'cuda:0') - assert model.param.device == torch.device('cuda:0') - with self.assertRaises(Exception): - _move_model_to_device(model, 'cuda:1000') - # 测试None - model = _move_model_to_device(model, None) - - def test_case2(self): - # 测试使用int初始化 - model = Model() - if torch.cuda.is_available(): - model = _move_model_to_device(model, 0) - assert model.param.device == torch.device('cuda:0') - assert model.param.device == torch.device('cuda:0'), "The model should be in " - with self.assertRaises(Exception): - _move_model_to_device(model, 100) - with self.assertRaises(Exception): - _move_model_to_device(model, -1) - - def test_case3(self): - # 测试None - model = Model() - device = _get_model_device(model) - model = _move_model_to_device(model, None) - assert device == _get_model_device(model), "The device should not change." - if torch.cuda.is_available(): - model.cuda() - device = _get_model_device(model) - model = _move_model_to_device(model, None) - assert device == _get_model_device(model), "The device should not change." - - model = nn.DataParallel(model, device_ids=[0]) - _move_model_to_device(model, None) - with self.assertRaises(Exception): - _move_model_to_device(model, 'cpu') - - def test_case4(self): - # 测试传入list的内容 - model = Model() - device = ['cpu'] - with self.assertRaises(Exception): - _move_model_to_device(model, device) - if torch.cuda.is_available(): - device = [0] - _model = _move_model_to_device(model, device) - assert not isinstance(_model, nn.DataParallel) - device = [torch.device('cuda:0'), torch.device('cuda:0')] - with self.assertRaises(Exception): - _model = _move_model_to_device(model, device) - if torch.cuda.device_count() > 1: - device = [0, 1] - _model = _move_model_to_device(model, device) - assert isinstance(_model, nn.DataParallel) - device = ['cuda', 'cuda:1'] - with self.assertRaises(Exception): - _move_model_to_device(model, device) - - def test_case5(self): - if not torch.cuda.is_available(): - return - # torch.device() - device = torch.device('cpu') - model = Model() - _move_model_to_device(model, device) - device = torch.device('cuda') - model = _move_model_to_device(model, device) - assert model.param.device == torch.device('cuda:0') - with self.assertRaises(Exception): - _move_model_to_device(model, torch.device('cuda:100')) - - -@cache_results('test/demo1.pkl') -def process_data_1(embed_file, cws_train): - embed, vocab = EmbedLoader.load_without_vocab(embed_file) - time.sleep(1) # 测试是否通过读取cache获得结果 - with open(cws_train, 'r', encoding='utf-8') as f: - d = DataSet() - for line in f: - line = line.strip() - if len(line) > 0: - d.append(Instance(raw=line)) - return embed, vocab, d - - -class TestCache(unittest.TestCase): - def test_cache_save(self): - try: - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', - 'tests/data_for_tests/cws_train') - end_time = time.time() - pre_time = end_time - start_time - with open('test/demo1.pkl', 'rb') as f: - _embed, _vocab, _d = _pickle.load(f) - self.assertEqual(embed.shape, _embed.shape) - for i in range(embed.shape[0]): - self.assertListEqual(embed[i].tolist(), _embed[i].tolist()) - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', - 'tests/data_for_tests/cws_train') - end_time = time.time() - read_time = end_time - start_time - print("Read using {:.3f}, while prepare using:{:.3f}".format(read_time, pre_time)) - self.assertGreater(pre_time - 0.5, read_time) - finally: - os.remove('test/demo1.pkl') - - def test_cache_save_overwrite_path(self): - try: - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', 'tests/data_for_tests/cws_train', - _cache_fp='test/demo_overwrite.pkl') - end_time = time.time() - pre_time = end_time - start_time - with open('test/demo_overwrite.pkl', 'rb') as f: - _embed, _vocab, _d = _pickle.load(f) - self.assertEqual(embed.shape, _embed.shape) - for i in range(embed.shape[0]): - self.assertListEqual(embed[i].tolist(), _embed[i].tolist()) - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', - 'tests/data_for_tests/cws_train', - _cache_fp='test/demo_overwrite.pkl') - end_time = time.time() - read_time = end_time - start_time - print("Read using {:.3f}, while prepare using:{:.3f}".format(read_time, pre_time)) - self.assertGreater(pre_time - 0.5, read_time) - finally: - os.remove('test/demo_overwrite.pkl') - - def test_cache_refresh(self): - try: - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', - 'tests/data_for_tests/cws_train', - _refresh=True) - end_time = time.time() - pre_time = end_time - start_time - with open('test/demo1.pkl', 'rb') as f: - _embed, _vocab, _d = _pickle.load(f) - self.assertEqual(embed.shape, _embed.shape) - for i in range(embed.shape[0]): - self.assertListEqual(embed[i].tolist(), _embed[i].tolist()) - start_time = time.time() - embed, vocab, d = process_data_1('tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt', - 'tests/data_for_tests/cws_train', - _refresh=True) - end_time = time.time() - read_time = end_time - start_time - print("Read using {:.3f}, while prepare using:{:.3f}".format(read_time, pre_time)) - self.assertGreater(0.1, pre_time - read_time) - finally: - os.remove('test/demo1.pkl') - - def test_duplicate_keyword(self): - with self.assertRaises(RuntimeError): - @cache_results(None) - def func_verbose(a, _verbose): - pass - - func_verbose(0, 1) - with self.assertRaises(RuntimeError): - @cache_results(None) - def func_cache(a, _cache_fp): - pass - - func_cache(1, 2) - with self.assertRaises(RuntimeError): - @cache_results(None) - def func_refresh(a, _refresh): - pass - - func_refresh(1, 2) - - def test_create_cache_dir(self): - @cache_results('test/demo1/demo.pkl') - def cache(): - return 1, 2 - - try: - results = cache() - print(results) - finally: - os.remove('test/demo1/demo.pkl') - os.rmdir('test/demo1') - - -class TestSeqLenToMask(unittest.TestCase): - - def evaluate_mask_seq_len(self, seq_len, mask): - max_len = int(max(seq_len)) - for i in range(len(seq_len)): - length = seq_len[i] - mask_i = mask[i] - for j in range(max_len): - self.assertEqual(mask_i[j], j#S_N -年#年#S_N -提#提#B_N -出#出#E_N -了#了#S_N -興#興#B_N -建#建#E_N -人#人#B_N -文#文#E_N -大#大#B_N -樓#樓#E_N -的#的#S_N -構#構#B_N -想#想#E_N -,#,#S_N -企#企#B_N -業#業#E_N -界#界#S_N -陸#陸#B_N -續#續#E_N -有#有#S_N -人#人#S_N -提#提#B_N -供#供#E_N -捐#捐#B_N -款#款#E_N -.#.#S_N - -杜#杜#B_N -鵑#鵑#M_N -花#花#E_N -為#為#S_N -溫#溫#B_N -帶#帶#E_N -植#植#B_N -物#物#E_N -,#,#S_N -台#台#B_N -北#北#E_N -雖#雖#B_N -然#然#E_N -在#在#S_N -亞#亞#S_N -熱#熱#B_N -帶#帶#E_N -,#,#S_N -但#但#S_N -冬#冬#B_N -季#季#E_N -的#的#S_N -東#東#B_N -北#北#E_N -季#季#B_N -風#風#E_N -卻#卻#S_N -使#使#B_N -得#得#E_N -杜#杜#B_N -鵑#鵑#M_N -花#花#E_N -在#在#S_N -臺#臺#B_N -大#大#E_N -宜#宜#B_N -然#然#M_N -自#自#M_N -得#得#E_N -.#.#S_N - -臺#臺#B_N -大#大#E_N -醫#醫#B_N -學#學#E_N -人#人#B_N -文#文#E_N -博#博#B_N -物#物#E_N -館#館#S_N -是#是#S_N -一#一#S_N -棟#棟#S_N -兩#兩#S_N -層#層#S_N -樓#樓#S_N -的#的#S_N -建#建#B_N -築#築#E_N -,#,#S_N -沿#沿#S_N -中#中#B_N -山#山#E_N -南#南#S_N -路#路#S_N -與#與#S_N -仁#仁#B_N -愛#愛#E_N -路#路#S_N -成#成#S_N -L#L#S_N -型#型#S_N -.#.#S_N - -樓#樓#B_N -頂#頂#E_N -有#有#S_N -天#天#B_N -文#文#E_N -台#台#S_N -,#,#S_N -現#現#S_N -為#為#S_N -天#天#B_N -文#文#E_N -社#社#S_N -使#使#B_N -用#用#E_N -.#.#S_N - -國#國#B_N -際#際#E_N -北#北#B_N -極#極#E_N -研#研#B_N -究#究#E_N -中#中#B_N -心#心#E_N -的#的#S_N -主#主#B_N -要#要#E_N -夥#夥#B_N -伴#伴#E_N -是#是#S_N -日#日#B_N -本#本#E_N -和#和#S_N -美#美#B_N -國#國#E_N -,#,#S_N -參#參#B_N -與#與#E_N -會#會#B_N -務#務#E_N -的#的#S_N -還#還#S_N -有#有#S_N -來#來#S_N -自#自#S_N -加#加#B_N -拿#拿#M_N -大#大#E_N -、#、#S_N -中#中#B_N -國#國#E_N -、#、#S_N -丹#丹#B_N -麥#麥#E_N -、#、#S_N -德#德#B_N -國#國#E_N -、#、#S_N -日#日#B_N -本#本#E_N -、#、#S_N -挪#挪#B_N -威#威#E_N -、#、#S_N -俄#俄#B_N -羅#羅#M_N -斯#斯#E_N -、#、#S_N -英#英#B_N -國#國#E_N -和#和#S_N -美#美#B_N -國#國#E_N -的#的#S_N -代#代#B_N -表#表#E_N -.#.#S_N - -其#其#B_N -中#中#E_N -參#參#B_N -賽#賽#E_N -者#者#S_N -年#年#B_N -齡#齡#E_N -不#不#B_N -可#可#E_N -超#超#B_N -過#過#E_N -18##S_N -歲#歲#S_N -(#(#S_N -以#以#S_N -當#當#B_N -年#年#E_N -7##S_N -月#月#S_N -1##S_N -日#日#S_N -為#為#S_N -準#準#S_N -)#)#S_N -,#,#S_N -且#且#S_N -必#必#B_N -須#須#E_N -就#就#B_N -讀#讀#E_N -於#於#S_N -中#中#S_N -學#學#B_N -校#校#E_N -(#(#S_N -S#S#B_N -e#e#M_N -c#c#M_N -o#o#M_N -n#n#M_N -d#d#M_N -a#a#M_N -r#r#M_N -y#y#E_N -S#S#B_N -c#c#M_N -h#h#M_N -o#o#M_N -o#o#M_N -l#l#E_N -)#)#S_N -.#.#S_N - -同#同#B_N -年#年#E_N -9##S_N -月#月#S_N -7##S_N -日#日#S_N -,#,#S_N -亞#亞#B_N -奧#奧#E_N -理#理#B_N -事#事#E_N -會#會#S_N -主#主#B_N -席#席#E_N -薩#薩#B_N -巴#巴#M_N -赫#赫#E_N -親#親#B_N -王#王#E_N -為#為#S_N -國#國#B_N -際#際#E_N -射#射#B_N -擊#擊#E_N -中#中#B_N -心#心#E_N -主#主#B_N -持#持#E_N -銅#銅#B_N -像#像#E_N -揭#揭#B_N -幕#幕#E_N -儀#儀#B_N -式#式#E_N -.#.#S_N - -這#這#B_N -些#些#E_N -電#電#B_N -話#話#E_N -經#經#S_N -交#交#B_N -換#換#E_N -機#機#S_N -處#處#B_N -理#理#E_N -,#,#S_N -使#使#B_N -用#用#E_N -的#的#S_N -媒#媒#B_N -介#介#E_N -包#包#B_N -括#括#E_N -海#海#B_N -底#底#E_N -電#電#B_N -纜#纜#E_N -、#、#S_N -人#人#S_N -造#造#S_N -衛#衛#B_N -星#星#E_N -、#、#S_N -無#無#B_N -線#線#E_N -電#電#S_N -、#、#S_N -光#光#B_N -纖#纖#E_N -及#及#S_N -I#I#B_N -P#P#E_N -電#電#B_N -話#話#E_N -(#(#S_N -V#V#B_N -O#O#M_N -I#I#M_N -P#P#E_N -)#)#S_N -.#.#S_N - -《#《#S_N -圓#圓#S_N -月#月#S_N -彎#彎#S_N -刀#刀#S_N -》#》#S_N -為#為#S_N -古#古#B_N -龍#龍#E_N -晚#晚#B_N -期#期#E_N -作#作#B_N -品#品#E_N -,#,#S_N -1976##S_N -年#年#S_N -6##S_N -月#月#S_N -至#至#S_N -1978##S_N -年#年#S_N -5##S_N -月#月#S_N -,#,#S_N -香#香#B_N -港#港#E_N -〈#〈#S_N -武#武#B_N -俠#俠#E_N -春#春#B_N -秋#秋#E_N -〉#〉#S_N -282##S_N -至#至#S_N -348##S_N -期#期#S_N -斷#斷#B_N -續#續#E_N -連#連#B_N -載#載#E_N -,#,#S_N -原#原#B_N -名#名#E_N -《#《#S_N -刀#刀#B_N -神#神#E_N -》#》#S_N -,#,#S_N -1978##S_N -年#年#S_N -漢#漢#B_N -麟#麟#E_N -出#出#B_N -版#版#E_N -改#改#B_N -名#名#E_N -《#《#S_N -圓#圓#S_N -月#月#S_N -彎#彎#S_N -刀#刀#S_N -》#》#S_N -.#.#S_N - -圓#圓#B_N -齒#齒#M_N -龍#龍#E_N -(#(#S_N -G#G#B_N -l#l#M_N -o#o#M_N -b#b#M_N -i#i#M_N -d#d#M_N -e#e#M_N -n#n#M_N -s#s#E_N -)#)#S_N -意#意#S_N -為#為#S_N -「#「#S_N -球#球#B_N -狀#狀#E_N -牙#牙#B_N -齒#齒#E_N -」#」#S_N -,#,#S_N -是#是#S_N -滄#滄#B_N -龍#龍#E_N -科#科#S_N -的#的#S_N -一#一#S_N -個#個#S_N -屬#屬#S_N -.#.#S_N - -圖#圖#B_N -波#波#M_N -列#列#M_N -夫#夫#E_N -設#設#B_N -計#計#E_N -局#局#S_N -在#在#S_N -1960##S_N -年#年#B_N -代#代#E_N -末#末#B_N -期#期#E_N -推#推#B_N -出#出#E_N -圖#圖#S_N --#-#E_N -154##S_N -客#客#B_N -機#機#E_N -後#後#S_N -,#,#S_N -圖#圖#B_N -波#波#M_N -列#列#M_N -夫#夫#E_N -便#便#S_N -成#成#S_N -為#為#S_N -社#社#B_N -會#會#E_N -主#主#B_N -義#義#E_N -國#國#B_N -家#家#E_N -民#民#B_N -航#航#E_N -飛#飛#B_N -機#機#E_N -的#的#S_N -主#主#B_N -要#要#E_N -供#供#B_N -應#應#E_N -商#商#S_N -.#.#S_N - -事#事#B_N -實#實#E_N -上#上#S_N -,#,#S_N -團#團#B_N -購#購#E_N -網#網#B_N -站#站#E_N -的#的#S_N -產#產#B_N -品#品#E_N -原#原#B_N -價#價#E_N -和#和#S_N -購#購#B_N -買#買#E_N -數#數#B_N -量#量#E_N -經#經#B_N -常#常#E_N -被#被#S_N -「#「#S_N -注#注#B_N -水#水#E_N -」#」#S_N -,#,#S_N -而#而#S_N -產#產#B_N -品#品#E_N -和#和#S_N -服#服#B_N -務#務#E_N -的#的#S_N -品#品#B_N -質#質#E_N -則#則#S_N -經#經#B_N -常#常#E_N -「#「#S_N -縮#縮#B_N -水#水#E_N -」#」#S_N -.#.#S_N - -團#團#B_N -購#購#E_N -網#網#B_N -站#站#E_N -的#的#S_N -主#主#B_N -要#要#E_N -產#產#B_N -品#品#E_N -分#分#S_N -為#為#S_N -家#家#B_N -居#居#E_N -類#類#S_N -、#、#S_N -日#日#B_N -用#用#E_N -品#品#S_N -類#類#S_N -、#、#S_N -旅#旅#B_N -遊#遊#E_N -優#優#B_N -惠#惠#E_N -、#、#S_N -機#機#B_N -票#票#E_N -、#、#S_N -酒#酒#B_N -店#店#E_N -及#及#S_N -郵#郵#B_N -輪#輪#E_N -等#等#S_N -.#.#S_N - -可#可#B_N -是#是#E_N -,#,#S_N -魔#魔#B_N -牛#牛#M_N -肝#肝#M_N -菌#菌#E_N -的#的#S_N -菌#菌#B_N -肉#肉#E_N -在#在#S_N -被#被#S_N -切#切#B_N -割#割#E_N -或#或#S_N -撞#撞#B_N -傷#傷#E_N -後#後#S_N -會#會#S_N -變#變#B_N -成#成#E_N -藍#藍#B_N -色#色#E_N -的#的#S_N -,#,#S_N -而#而#S_N -這#這#S_N -種#種#S_N -菌#菌#S_N -的#的#S_N -菌#菌#B_N -肉#肉#E_N -無#無#B_N -論#論#M_N -如#如#M_N -何#何#E_N -都#都#B_N -是#是#E_N -白#白#B_N -色#色#E_N -的#的#S_N -,#,#S_N -因#因#S_N -此#此#S_N -透#透#B_N -過#過#E_N -切#切#B_N -割#割#E_N -菌#菌#B_N -肉#肉#E_N -便#便#S_N -能#能#S_N -分#分#B_N -辨#辨#E_N -二#二#S_N -者#者#S_N -.#.#S_N - -站#站#B_N -牌#牌#E_N -上#上#S_N -標#標#B_N -示#示#E_N -著#著#S_N -「#「#S_N -浪#浪#B_N -漫#漫#E_N -和#和#S_N -傳#傳#B_N -奇#奇#E_N -的#的#S_N -入#入#B_N -野#野#M_N -松#松#M_N -原#原#E_N -」#」#S_N -.#.#S_N - -自#自#B_N -然#然#E_N -界#界#S_N -的#的#S_N -土#土#S_N -是#是#S_N -由#由#S_N -岩#岩#B_N -石#石#E_N -經#經#S_N -風#風#B_N -化#化#E_N -、#、#S_N -搬#搬#B_N -運#運#E_N -、#、#S_N -堆#堆#B_N -積#積#E_N -而#而#S_N -形#形#B_N -成#成#E_N -的#的#S_N -.#.#S_N - -毛#毛#S_N -澤#澤#B_N -東#東#E_N -早#早#S_N -在#在#S_N -1949##S_N -年#年#S_N -3##S_N -月#月#S_N -中#中#B_N -共#共#E_N -七#七#S_N -屆#屆#S_N -二#二#B_N -中#中#M_N -全#全#M_N -會#會#E_N -的#的#S_N -報#報#B_N -告#告#E_N -中#中#S_N -就#就#S_N -明#明#B_N -確#確#E_N -地#地#S_N -說#說#S_N -:#:#S_N -「#「#S_N -占#占#S_N -國#國#B_N -民#民#E_N -經#經#B_N -濟#濟#E_N -總#總#S_N -產#產#B_N -值#值#E_N -90%##S_N -的#的#S_N -分#分#B_N -散#散#E_N -的#的#S_N -個#個#B_N -體#體#E_N -的#的#S_N -農#農#B_N -業#業#E_N -經#經#B_N -濟#濟#E_N -和#和#S_N -手#手#B_N -工#工#E_N -業#業#S_N -經#經#B_N -濟#濟#E_N -,#,#S_N -是#是#S_N -可#可#B_N -能#能#E_N -和#和#S_N -必#必#B_N -須#須#E_N -謹#謹#B_N -慎#慎#E_N -地#地#S_N -、#、#S_N -逐#逐#B_N -步#步#E_N -地#地#S_N -而#而#S_N -又#又#S_N -積#積#B_N -極#極#E_N -地#地#S_N -引#引#B_N -導#導#E_N -它#它#B_N -們#們#E_N -向#向#S_N -著#著#S_N -現#現#B_N -代#代#E_N -化#化#S_N -和#和#S_N -集#集#B_N -體#體#E_N -化#化#S_N -的#的#S_N -方#方#B_N -向#向#E_N -發#發#B_N -展#展#E_N -的#的#S_N -,#,#S_N -任#任#B_N -其#其#M_N -自#自#M_N -流#流#E_N -的#的#S_N -觀#觀#B_N -點#點#E_N -是#是#S_N -錯#錯#B_N -誤#誤#E_N -的#的#S_N -.#.#S_N -」#」#S_N - -它#它#S_N -主#主#B_N -要#要#E_N -分#分#B_N -布#布#E_N -在#在#S_N -表#表#B_N -土#土#E_N -層#層#S_N -或#或#S_N -耕#耕#B_N -層#層#E_N -中#中#S_N -,#,#S_N -深#深#B_N -受#受#E_N -耕#耕#B_N -作#作#E_N -施#施#B_N -肥#肥#E_N -等#等#S_N -人#人#B_N -為#為#E_N -因#因#B_N -素#素#E_N -的#的#S_N -影#影#B_N -響#響#E_N -而#而#S_N -極#極#S_N -不#不#S_N -穩#穩#B_N -定#定#E_N -.#.#S_N - -土#土#B_N -壤#壤#E_N -是#是#S_N -重#重#B_N -要#要#E_N -的#的#S_N -自#自#B_N -然#然#E_N -資#資#B_N -源#源#E_N -和#和#S_N -生#生#B_N -產#產#E_N -資#資#B_N -料#料#E_N -,#,#S_N -土#土#B_N -壤#壤#E_N -的#的#S_N -植#植#B_N -物#物#E_N -生#生#B_N -產#產#E_N -能#能#B_N -力#力#E_N -是#是#S_N -衡#衡#B_N -量#量#E_N -土#土#B_N -壤#壤#E_N -資#資#B_N -源#源#E_N -質#質#B_N -量#量#E_N -的#的#S_N -標#標#B_N -誌#誌#E_N -.#.#S_N - -一#一#B_N -些#些#E_N -土#土#B_N -星#星#E_N -1##S_N -號#號#S_N -儀#儀#B_N -錶#錶#E_N -組#組#S_N -中#中#S_N -的#的#S_N -部#部#B_N -分#分#E_N -也#也#S_N -被#被#S_N -用#用#S_N -在#在#S_N -土#土#B_N -星#星#E_N -1B##S_N -中#中#S_N -了#了#S_N -.#.#S_N - -它#它#S_N -在#在#S_N -位#位#S_N -於#於#S_N -亨#亨#B_N -茨#茨#M_N -維#維#M_N -爾#爾#E_N -的#的#S_N -空#空#B_N -間#間#E_N -系#系#B_N -統#統#E_N -中#中#B_N -心#心#E_N -建#建#B_N -造#造#E_N -.#.#S_N - -這#這#S_N -個#個#S_N -計#計#B_N -算#算#E_N -機#機#S_N -控#控#B_N -制#制#E_N -了#了#S_N -火#火#B_N -箭#箭#E_N -從#從#S_N -起#起#B_N -飛#飛#E_N -前#前#S_N -一#一#B_N -直#直#E_N -到#到#S_N -拋#拋#B_N -棄#棄#E_N -S#S#B_N --#-#M_N -I#I#M_N -V#V#M_N -B#B#E_N -推#推#B_N -進#進#E_N -器#器#S_N -的#的#S_N -操#操#B_N -作#作#E_N -過#過#B_N -程#程#E_N -.#.#S_N - -2007##S_N -年#年#S_N -7##S_N -月#月#S_N -6##S_N -日#日#S_N -,#,#S_N -聖#聖#B_N -喬#喬#M_N -治#治#E_N -教#教#B_N -堂#堂#E_N -被#被#S_N -馬#馬#B_N -來#來#M_N -西#西#M_N -亞#亞#E_N -政#政#B_N -府#府#E_N -列#列#S_N -為#為#S_N -50##S_N -個#個#S_N -馬#馬#B_N -來#來#M_N -西#西#M_N -亞#亞#E_N -國#國#B_N -家#家#E_N -寶#寶#B_N -藏#藏#E_N -之#之#S_N -一#一#S_N -.#.#S_N - -聖#聖#B_N -伯#伯#M_N -多#多#M_N -祿#祿#E_N -堂#堂#S_N -(#(#S_N -I#I#B_N -g#g#M_N -l#l#M_N -e#e#M_N -s#s#M_N -i#i#M_N -a#a#E_N -d#d#B_N -e#e#E_N -S#S#B_N -a#a#M_N -n#n#E_N -P#P#B_N -e#e#M_N -d#d#M_N -r#r#M_N -o#o#E_N -)#)#S_N -是#是#S_N -西#西#B_N -班#班#M_N -牙#牙#E_N -南#南#B_N -部#部#E_N -城#城#B_N -市#市#E_N -科#科#B_N -爾#爾#M_N -多#多#M_N -瓦#瓦#E_N -的#的#S_N -一#一#S_N -座#座#S_N -羅#羅#B_N -馬#馬#E_N -天#天#B_N -主#主#E_N -教#教#S_N -教#教#B_N -堂#堂#E_N -,#,#S_N -供#供#B_N -奉#奉#E_N -聖#聖#B_N -伯#伯#M_N -多#多#M_N -祿#祿#E_N -,#,#S_N -位#位#S_N -於#於#S_N -同#同#B_N -名#名#E_N -的#的#S_N -廣#廣#B_N -場#場#E_N -上#上#S_N -.#.#S_N diff --git a/tests/data_for_tests/cws_train b/tests/data_for_tests/cws_train deleted file mode 100644 index 085eb912..00000000 --- a/tests/data_for_tests/cws_train +++ /dev/null @@ -1,1002 +0,0 @@ -看#看#B_N -似#似#E_N -簡#簡#B_N -單#單#E_N -,#,#S_N -只#只#S_N -是#是#S_N -二#二#S_N -選#選#S_N -一#一#S_N -做#做#S_N -決#決#B_N -擇#擇#E_N -,#,#S_N -但#但#S_N -其#其#B_N -實#實#E_N -他#他#B_N -們#們#E_N -代#代#B_N -表#表#E_N -的#的#S_N -是#是#S_N -你#你#S_N -周#周#B_N -遭#遭#E_N -的#的#S_N -親#親#B_N -朋#朋#E_N -好#好#B_N -友#友#E_N -,#,#S_N -試#試#S_N -著#著#S_N -給#給#S_N -你#你#S_N -不#不#B_N -同#同#E_N -的#的#S_N -意#意#B_N -見#見#E_N -,#,#S_N -但#但#S_N -追#追#B_N -根#根#M_N -究#究#M_N -底#底#E_N -,#,#S_N -最#最#B_N -後#後#E_N -決#決#B_N -定#定#E_N -的#的#S_N -還#還#B_N -是#是#E_N -自#自#B_N -己#己#E_N -.#.#S_N - -其#其#S_N -便#便#B_N -當#當#E_N -都#都#B_N -是#是#E_N -買#買#B_N -來#來#E_N -的#的#S_N -,#,#S_N -就#就#B_N -算#算#E_N -加#加#B_N -熱#熱#E_N -也#也#B_N -是#是#E_N -由#由#S_N -媽#媽#B_N -媽#媽#E_N -負#負#B_N -責#責#E_N -(#(#S_N -後#後#B_N -來#來#E_N -揭#揭#B_N -曉#曉#E_N -其#其#B_N -實#實#E_N -是#是#S_N -避#避#B_N -免#免#E_N -帶#帶#B_N -來#來#E_N -厄#厄#B_N -運#運#E_N -)#)#S_N -,#,#S_N -父#父#B_N -親#親#E_N -則#則#S_N -在#在#S_N -電#電#B_N -視#視#E_N -台#台#S_N -上#上#B_N -班#班#E_N -.#.#S_N - -這#這#S_N -次#次#S_N -遊#遊#B_N -行#行#E_N -最#最#B_N -大#大#E_N -的#的#S_N -特#特#B_N -色#色#E_N -,#,#S_N -在#在#S_N -於#於#S_N -越#越#B_N -來#來#M_N -越#越#E_N -多#多#S_N -年#年#B_N -輕#輕#E_N -人#人#S_N -上#上#B_N -街#街#E_N -遊#遊#B_N -行#行#E_N -,#,#S_N -而#而#B_N -且#且#E_N -當#當#B_N -中#中#E_N -不#不#B_N -乏#乏#E_N -行#行#B_N -動#動#E_N -激#激#B_N -烈#烈#E_N -的#的#S_N -躁#躁#S_N -少#少#B_N -年#年#E_N -.#.#S_N - -懷#懷#B_N -孕#孕#E_N -期#期#S_N -為#為#S_N -421##S_N -至#至#S_N -457##S_N -日#日#S_N -.#.#S_N - -婷#婷#B_N -婷#婷#E_N -向#向#S_N -昏#昏#B_N -迷#迷#E_N -中#中#S_N -的#的#S_N -婆#婆#B_N -婆#婆#E_N -訴#訴#B_N -說#說#E_N -,#,#S_N -為#為#S_N -什#什#B_N -麼#麼#E_N -生#生#B_N -活#活#E_N -會#會#S_N -與#與#S_N -她#她#S_N -想#想#B_N -像#像#E_N -的#的#S_N -不#不#S_N -一#一#B_N -樣#樣#E_N -.#.#S_N - -就#就#B_N -算#算#E_N -數#數#B_N -論#論#E_N -的#的#S_N -應#應#B_N -用#用#E_N -被#被#S_N -找#找#B_N -到#到#E_N -了#了#S_N -,#,#S_N -也#也#S_N -不#不#B_N -會#會#E_N -有#有#S_N -人#人#S_N -會#會#S_N -因#因#S_N -此#此#S_N -罷#罷#B_N -黜#黜#E_N -這#這#S_N -一#一#S_N -數#數#B_N -學#學#E_N -的#的#S_N -皇#皇#B_N -后#后#E_N -.#.#S_N - -一#一#B_N -中#中#E_N -商#商#B_N -圈#圈#E_N -另#另#S_N -一#一#S_N -特#特#B_N -色#色#E_N -為#為#S_N -同#同#S_N -類#類#B_N -型#型#E_N -商#商#B_N -店#店#E_N -會#會#S_N -聚#聚#B_N -集#集#E_N -,#,#S_N -短#短#B_N -短#短#E_N -的#的#S_N -育#育#B_N -才#才#E_N -街#街#S_N -聚#聚#B_N -集#集#E_N -了#了#S_N -十#十#B_N -數#數#E_N -家#家#S_N -知#知#B_N -名#名#E_N -眼#眼#B_N -鏡#鏡#E_N -連#連#B_N -鎖#鎖#E_N -店#店#S_N -,#,#S_N -而#而#S_N -體#體#B_N -育#育#E_N -用#用#B_N -品#品#E_N -店#店#S_N -沿#沿#B_N -著#著#E_N -太#太#B_N -平#平#E_N -路#路#S_N -連#連#B_N -成#成#E_N -一#一#S_N -線#線#S_N -,#,#S_N -在#在#S_N -激#激#B_N -烈#烈#E_N -競#競#B_N -爭#爭#E_N -下#下#S_N -價#價#B_N -格#格#E_N -比#比#S_N -外#外#B_N -地#地#E_N -便#便#B_N -宜#宜#E_N -不#不#B_N -少#少#E_N -,#,#S_N -貨#貨#S_N -比#比#S_N -三#三#S_N -家#家#S_N -更#更#S_N -增#增#B_N -加#加#E_N -購#購#B_N -物#物#E_N -樂#樂#B_N -趣#趣#E_N -.#.#S_N - -《#《#S_N -一#一#S_N -代#代#S_N -女#女#B_N -皇#皇#E_N -》#》#S_N -開#開#B_N -錄#錄#E_N -當#當#B_N -日#日#E_N -掌#掌#B_N -鏡#鏡#E_N -者#者#S_N -是#是#S_N -導#導#B_N -播#播#E_N -出#出#B_N -身#身#E_N -的#的#S_N -當#當#B_N -時#時#E_N -中#中#B_N -視#視#E_N -節#節#B_N -目#目#E_N -部#部#S_N -經#經#B_N -理#理#E_N -王#王#S_N -世#世#B_N -綱#綱#E_N -.#.#S_N - -我#我#B_N -們#們#E_N -只#只#S_N -希#希#B_N -望#望#E_N -,#,#S_N -藉#藉#B_N -著#著#E_N -這#這#S_N -個#個#S_N -歷#歷#B_N -史#史#E_N -上#上#S_N -真#真#B_N -實#實#E_N -人#人#B_N -物#物#E_N -的#的#S_N -一#一#S_N -生#生#S_N -,#,#S_N -利#利#B_N -用#用#E_N -一#一#B_N -些#些#E_N -稗#稗#B_N -官#官#M_N -野#野#M_N -史#史#E_N -的#的#S_N -片#片#B_N -段#段#E_N -資#資#B_N -料#料#E_N -,#,#S_N -再#再#S_N -加#加#B_N -上#上#E_N -一#一#B_N -些#些#E_N -善#善#B_N -意#意#E_N -改#改#B_N -編#編#E_N -的#的#S_N -部#部#B_N -分#分#E_N -情#情#B_N -節#節#E_N -,#,#S_N -而#而#S_N -能#能#S_N -帶#帶#B_N -給#給#E_N -觀#觀#B_N -眾#眾#E_N -一#一#B_N -些#些#E_N -啟#啟#B_N -示#示#E_N -.#.#S_N -」#」#S_N - -當#當#B_N -時#時#E_N -外#外#B_N -界#界#E_N -傳#傳#B_N -聞#聞#E_N -樊#樊#S_N -日#日#B_N -行#行#E_N -是#是#S_N -在#在#S_N -中#中#B_N -視#視#E_N -主#主#B_N -管#管#E_N -授#授#B_N -意#意#E_N -下#下#S_N -裝#裝#B_N -病#病#E_N -,#,#S_N -樊#樊#S_N -日#日#B_N -行#行#E_N -否#否#B_N -認#認#E_N -:#:#S_N -「#「#S_N -人#人#S_N -都#都#B_N -是#是#E_N -吃#吃#S_N -五#五#S_N -穀#穀#S_N -雜#雜#B_N -糧#糧#E_N -長#長#B_N -大#大#E_N -,#,#S_N -本#本#B_N -來#來#E_N -就#就#S_N -會#會#S_N -生#生#B_N -病#病#E_N -;#;#S_N -而#而#B_N -且#且#E_N -裝#裝#B_N -病#病#E_N -萬#萬#B_N -一#一#E_N -被#被#S_N -拆#拆#B_N -穿#穿#E_N -了#了#S_N -,#,#S_N -豈#豈#S_N -不#不#B_N -是#是#E_N -無#無#B_N -法#法#E_N -對#對#S_N -廣#廣#B_N -大#大#E_N -的#的#S_N -觀#觀#B_N -眾#眾#E_N -交#交#B_N -代#代#E_N -?#?#S_N - -該#該#B_N -劇#劇#E_N -兩#兩#S_N -位#位#S_N -編#編#B_N -劇#劇#E_N -獨#獨#B_N -孤#孤#E_N -紅#紅#S_N -、#、#S_N -羅#羅#S_N -文#文#B_N -忠#忠#E_N -在#在#S_N -寫#寫#S_N -劇#劇#B_N -本#本#E_N -時#時#S_N -,#,#S_N -幾#幾#B_N -乎#乎#E_N -參#參#B_N -考#考#E_N -了#了#S_N -所#所#B_N -有#有#E_N -有#有#B_N -關#關#E_N -武#武#S_N -則#則#B_N -天#天#E_N -的#的#S_N -資#資#B_N -料#料#E_N -如#如#S_N -林#林#S_N -語#語#B_N -堂#堂#E_N -《#《#S_N -武#武#S_N -則#則#B_N -天#天#E_N -傳#傳#S_N -》#》#S_N -、#、#S_N -陳#陳#S_N -虹#虹#S_N -《#《#S_N -武#武#S_N -則#則#B_N -天#天#E_N -與#與#S_N -狄#狄#S_N -仁#仁#B_N -傑#傑#E_N -》#》#S_N -、#、#S_N -《#《#S_N -歷#歷#B_N -代#代#E_N -通#通#B_N -俗#俗#E_N -演#演#B_N -義#義#E_N -》#》#S_N -、#、#S_N -童#童#S_N -煦#煦#S_N -《#《#S_N -中#中#B_N -國#國#E_N -后#后#B_N -妃#妃#E_N -列#列#B_N -傳#傳#E_N -》#》#S_N -、#、#S_N -黃#黃#S_N -柏#柏#B_N -松#松#E_N -《#《#S_N -中#中#B_N -國#國#E_N -艷#艷#B_N -姬#姬#E_N -》#》#S_N -.#.#B_N -.#.#M_N -.#.#M_N -.#.#M_N -.#.#M_N -.#.#E_N -等#等#S_N -.#.#S_N - -電#電#B_N -視#視#E_N -台#台#S_N -把#把#S_N -一#一#S_N -個#個#S_N -跋#跋#B_N -扈#扈#E_N -專#專#B_N -橫#橫#E_N -、#、#S_N -亂#亂#B_N -倫#倫#E_N -篡#篡#B_N -國#國#E_N -的#的#S_N -武#武#S_N -則#則#B_N -天#天#E_N -演#演#B_N -成#成#E_N -柔#柔#B_N -弱#弱#E_N -堪#堪#B_N -憐#憐#E_N -的#的#S_N -女#女#B_N -子#子#E_N -,#,#S_N -這#這#S_N -是#是#S_N -我#我#B_N -們#們#E_N -所#所#S_N -不#不#B_N -能#能#E_N -容#容#B_N -忍#忍#E_N -的#的#S_N -.#.#S_N - -U#U#B_N -M#M#M_N -L#L#M_N -S#S#E_N -可#可#S_N -用#用#S_N -於#於#S_N -設#設#B_N -計#計#E_N -信#信#B_N -息#息#E_N -檢#檢#B_N -索#索#E_N -或#或#S_N -病#病#B_N -歷#歷#E_N -系#系#B_N -統#統#E_N -,#,#S_N -促#促#B_N -進#進#E_N -不#不#B_N -同#同#E_N -系#系#B_N -統#統#E_N -之#之#B_N -間#間#E_N -的#的#S_N -通#通#B_N -訊#訊#E_N -交#交#B_N -流#流#E_N -,#,#S_N -或#或#B_N -者#者#E_N -用#用#S_N -於#於#S_N -開#開#B_N -發#發#E_N -能#能#B_N -夠#夠#E_N -解#解#B_N -析#析#E_N -生#生#B_N -物#物#E_N -醫#醫#B_N -學#學#E_N -文#文#B_N -獻#獻#E_N -的#的#S_N -系#系#B_N -統#統#E_N -.#.#S_N - -一#一#S_N -支#支#S_N -由#由#S_N -白#白#B_N -人#人#E_N -軍#軍#B_N -官#官#E_N -帶#帶#B_N -領#領#E_N -的#的#S_N -黑#黑#B_N -人#人#E_N -民#民#B_N -兵#兵#E_N -洗#洗#B_N -劫#劫#E_N -了#了#S_N -卡#卡#B_N -梅#梅#M_N -隆#隆#E_N -莊#莊#B_N -園#園#E_N -,#,#S_N -並#並#B_N -且#且#E_N -企#企#B_N -圖#圖#E_N -強#強#B_N -暴#暴#E_N -卡#卡#B_N -梅#梅#M_N -隆#隆#E_N -家#家#S_N -的#的#S_N -每#每#S_N -一#一#S_N -個#個#S_N -女#女#B_N -眷#眷#E_N -,#,#S_N -但#但#S_N -邦#邦#B_N -聯#聯#E_N -軍#軍#B_N -隊#隊#E_N -擊#擊#B_N -潰#潰#E_N -這#這#B_N -些#些#E_N -民#民#B_N -兵#兵#E_N -,#,#S_N -拯#拯#B_N -救#救#E_N -了#了#S_N -她#她#B_N -們#們#E_N -.#.#S_N - -2006##S_N -年#年#S_N -剩#剩#B_N -下#下#E_N -的#的#S_N -時#時#B_N -光#光#E_N -,#,#S_N -樂#樂#B_N -團#團#E_N -都#都#S_N -用#用#S_N -來#來#S_N -編#編#B_N -寫#寫#E_N -與#與#S_N -錄#錄#B_N -製#製#E_N -樂#樂#B_N -團#團#E_N -的#的#S_N -第#第#B_N -三#三#E_N -張#張#S_N -專#專#B_N -輯#輯#E_N -閒#閒#B_N -言#言#M_N -閒#閒#M_N -語#語#E_N -就#就#B_N -是#是#E_N -不#不#B_N -道#道#E_N -自#自#B_N -己#己#E_N -,#,#S_N -該#該#S_N -專#專#B_N -輯#輯#E_N -並#並#S_N -在#在#S_N -2007##S_N -年#年#S_N -5##S_N -月#月#S_N -發#發#B_N -行#行#E_N -.#.#S_N - -當#當#B_N -時#時#E_N -他#他#S_N -對#對#S_N -巴#巴#B_N -黎#黎#E_N -這#這#S_N -個#個#S_N -法#法#B_N -國#國#E_N -首#首#B_N -都#都#E_N -興#興#B_N -奮#奮#E_N -不#不#B_N -已#已#E_N -,#,#S_N -並#並#S_N -決#決#B_N -定#定#E_N -將#將#S_N -各#各#B_N -種#種#E_N -印#印#B_N -象#象#E_N -和#和#S_N -想#想#B_N -像#像#E_N -化#化#B_N -成#成#E_N -音#音#B_N -樂#樂#E_N -作#作#S_N -為#為#S_N -表#表#B_N -達#達#E_N -;#;#S_N -他#他#S_N -在#在#S_N -巴#巴#B_N -黎#黎#E_N -譜#譜#B_N -寫#寫#E_N -此#此#S_N -樂#樂#B_N -曲#曲#E_N -,#,#S_N -在#在#S_N -一#一#S_N -次#次#S_N -前#前#B_N -往#往#E_N -維#維#B_N -也#也#M_N -納#納#E_N -行#行#B_N -程#程#E_N -的#的#S_N -途#途#B_N -中#中#E_N -進#進#B_N -行#行#E_N -配#配#B_N -器#器#E_N -工#工#B_N -作#作#E_N -,#,#S_N -首#首#B_N -演#演#E_N -則#則#S_N -在#在#S_N -紐#紐#B_N -約#約#E_N -,#,#S_N -於#於#S_N -1928##S_N -年#年#S_N -12##S_N -月#月#S_N -31##S_N -日#日#S_N -由#由#S_N -瓦#瓦#B_N -德#德#E_N -·#·#S_N -丹#丹#B_N -路#路#M_N -殊#殊#E_N -(#(#S_N -W#W#B_N -a#a#M_N -l#l#M_N -t#t#M_N -e#e#M_N -r#r#E_N -D#D#B_N -a#a#M_N -m#m#M_N -r#r#M_N -o#o#M_N -s#s#M_N -c#c#M_N -h#h#E_N -)#)#S_N -指#指#B_N -揮#揮#E_N -紐#紐#B_N -約#約#E_N -愛#愛#B_N -樂#樂#E_N -演#演#B_N -出#出#E_N -.#.#S_N - -而#而#S_N -雪#雪#B_N -莉#莉#E_N -的#的#S_N -卧#卧#B_N -底#底#E_N -身#身#B_N -份#份#E_N -也#也#S_N -被#被#S_N -拆#拆#B_N -穿#穿#E_N -,#,#S_N -原#原#B_N -來#來#E_N -雪#雪#B_N -莉#莉#E_N -的#的#S_N -真#真#B_N -正#正#E_N -身#身#B_N -份#份#E_N -是#是#S_N -雲#雲#B_N -想#想#E_N -集#集#B_N -團#團#E_N -董#董#B_N -事#事#E_N -長#長#S_N -高#高#S_N -超#超#S_N -之#之#S_N -女#女#S_N -高#高#S_N -蓓#蓓#S_N -,#,#S_N -受#受#S_N -後#後#B_N -母#母#E_N -品#品#S_N -緹#緹#S_N -(#(#S_N -王#王#S_N -琳#琳#S_N -飾#飾#S_N -)#)#S_N -指#指#B_N -使#使#E_N -,#,#S_N -化#化#B_N -名#名#E_N -雪#雪#B_N -莉#莉#E_N -潛#潛#B_N -入#入#E_N -天#天#B_N -衣#衣#E_N -集#集#B_N -團#團#E_N -搞#搞#B_N -垮#垮#E_N -天#天#B_N -衣#衣#E_N -.#.#S_N - -靜#靜#B_N -華#華#E_N -後#後#B_N -來#來#E_N -發#發#B_N -現#現#E_N -葉#葉#S_N -廣#廣#B_N -義#義#E_N -不#不#B_N -愛#愛#E_N -自#自#B_N -己#己#E_N -便#便#S_N -偷#偷#B_N -取#取#E_N -美#美#B_N -麗#麗#E_N -的#的#S_N -設#設#B_N -計#計#E_N -創#創#B_N -意#意#E_N -投#投#B_N -靠#靠#E_N -雲#雲#B_N -想#想#E_N -集#集#B_N -團#團#E_N -.#.#S_N - -吳#吳#S_N -湯#湯#B_N -興#興#E_N -將#將#S_N -髮#髮#B_N -辮#辮#E_N -交#交#B_N -給#給#E_N -妻#妻#B_N -子#子#E_N -,#,#S_N -並#並#B_N -且#且#E_N -囑#囑#B_N -咐#咐#E_N -若#若#S_N -不#不#B_N -幸#幸#E_N -戰#戰#B_N -死#死#E_N -,#,#S_N -以#以#S_N -髮#髮#S_N -為#為#S_N -塚#塚#S_N -.#.#S_N diff --git a/tests/data_for_tests/embedding/small_bert/config.json b/tests/data_for_tests/embedding/small_bert/config.json deleted file mode 100644 index da4cda35..00000000 --- a/tests/data_for_tests/embedding/small_bert/config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "attention_probs_dropout_prob": 0.1, - "hidden_act": "gelu", - "hidden_dropout_prob": 0.1, - "hidden_size": 16, - "initializer_range": 0.02, - "intermediate_size": 64, - "max_position_embeddings": 32, - "num_attention_heads": 4, - "num_hidden_layers": 2, - "type_vocab_size": 2, - "vocab_size": 21 -} \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_bert/small_pytorch_model.bin b/tests/data_for_tests/embedding/small_bert/small_pytorch_model.bin deleted file mode 100644 index a0811def..00000000 Binary files a/tests/data_for_tests/embedding/small_bert/small_pytorch_model.bin and /dev/null differ diff --git a/tests/data_for_tests/embedding/small_bert/vocab.txt b/tests/data_for_tests/embedding/small_bert/vocab.txt deleted file mode 100644 index 5c873094..00000000 --- a/tests/data_for_tests/embedding/small_bert/vocab.txt +++ /dev/null @@ -1,21 +0,0 @@ -[PAD] -[UNK] -[CLS] -[SEP] -this -is -a -small -bert -model -vocab -file -and -only -twenty -line -for -the -whole -text -##a \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_elmo/char.dic b/tests/data_for_tests/embedding/small_elmo/char.dic deleted file mode 100644 index 74285f34..00000000 --- a/tests/data_for_tests/embedding/small_elmo/char.dic +++ /dev/null @@ -1,229 +0,0 @@ -! 33 -" 34 -# 35 -$ 36 -% 37 -& 38 -' 39 -( 40 -) 41 -* 42 -+ 43 -, 44 -- 45 -. 46 -/ 47 -0 48 -1 49 -2 50 -3 51 -4 52 -5 53 -6 54 -7 55 -8 56 -9 57 -: 58 -; 59 -< 60 -= 61 -> 62 -? 63 -@ 64 -A 65 -B 66 -C 67 -D 68 -E 69 -F 70 -G 71 -H 72 -I 73 -J 74 -K 75 -L 76 -M 77 -N 78 -O 79 -P 80 -Q 81 -R 82 -S 83 -T 84 -U 85 -V 86 -W 87 -X 88 -Y 89 -Z 90 -[ 91 -\ 92 -] 93 -^ 94 -_ 95 -` 96 -a 97 -b 98 -c 99 -d 100 -e 101 -f 102 -g 103 -h 104 -i 105 -j 106 -k 107 -l 108 -m 109 -n 110 -o 111 -p 112 -q 113 -r 114 -s 115 -t 116 -u 117 -v 118 -w 119 -x 120 -y 121 -z 122 -{ 123 -| 124 -} 125 -~ 126 - 127 -€ 128 - 129 -‚ 130 -ƒ 131 -„ 132 -† 134 -‡ 135 -ˆ 136 -‰ 137 -Š 138 -‹ 139 -Œ 140 - 141 -Ž 142 - 143 - 144 -‘ 145 -’ 146 -“ 147 -” 148 -• 149 -– 150 -— 151 -˜ 152 -™ 153 -š 154 -› 155 -œ 156 - 157 -ž 158 -Ÿ 159 -  160 -¡ 161 -¢ 162 -£ 163 -¤ 164 -¥ 165 -¦ 166 -§ 167 -¨ 168 -© 169 -ª 170 -« 171 -¬ 172 -­ 173 -® 174 -¯ 175 -° 176 -± 177 -² 178 -³ 179 -´ 180 -µ 181 -¶ 182 -· 183 -¸ 184 -¹ 185 -º 186 -» 187 -¼ 188 -½ 189 -¾ 190 -¿ 191 -À 192 -Á 193 - 194 -à 195 -Ä 196 -Å 197 -Æ 198 -Ç 199 -È 200 -É 201 -Ê 202 -Ë 203 -Ì 204 -Í 205 -Î 206 -Ï 207 -Ð 208 -Ñ 209 -Ò 210 -Ó 211 -Ô 212 -Õ 213 -Ö 214 -× 215 -Ø 216 -Ù 217 -Ú 218 -Û 219 -Ü 220 -Ý 221 -Þ 222 -ß 223 -à 224 -á 225 -â 226 -ã 227 -ä 228 -å 229 -æ 230 -ç 231 -è 232 -é 233 -ê 234 -ë 235 -ì 236 -í 237 -î 238 -ï 239 -ð 240 -ñ 241 -ò 242 -ó 243 -ô 244 -õ 245 -ö 246 -÷ 247 -ø 248 -ù 249 -ú 250 -û 251 -ü 252 -ý 253 -þ 254 -ÿ 255 - 256 - 257 - 258 - 259 - 260 - 1 - -1 diff --git a/tests/data_for_tests/embedding/small_elmo/elmo_1x16_16_32cnn_1xhighway_options.json b/tests/data_for_tests/embedding/small_elmo/elmo_1x16_16_32cnn_1xhighway_options.json deleted file mode 100644 index 9c02ef72..00000000 --- a/tests/data_for_tests/embedding/small_elmo/elmo_1x16_16_32cnn_1xhighway_options.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "lstm": { - "use_skip_connections": true, - "projection_dim": 16, - "cell_clip": 3, - "proj_clip": 3, - "dim": 16, - "n_layers": 1 - }, - "char_cnn": { - "activation": "relu", - "filters": [ - [ - 1, - 16 - ], - [ - 2, - 16 - ] - ], - "n_highway": 1, - "embedding": { - "dim": 4 - }, - "n_characters": 262, - "max_characters_per_token": 50 - } -} diff --git a/tests/data_for_tests/embedding/small_elmo/elmo_mini_for_testing.pkl b/tests/data_for_tests/embedding/small_elmo/elmo_mini_for_testing.pkl deleted file mode 100644 index 4c72f3d5..00000000 Binary files a/tests/data_for_tests/embedding/small_elmo/elmo_mini_for_testing.pkl and /dev/null differ diff --git a/tests/data_for_tests/embedding/small_gpt2/config.json b/tests/data_for_tests/embedding/small_gpt2/config.json deleted file mode 100644 index b2f61bdc..00000000 --- a/tests/data_for_tests/embedding/small_gpt2/config.json +++ /dev/null @@ -1 +0,0 @@ -{"architectures": ["GPT2LMHeadModel"], "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "n_ctx": 20, "n_embd": 16, "n_head": 4, "n_layer": 2, "n_positions": 20, "vocab_size": 64} \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_gpt2/merges.txt b/tests/data_for_tests/embedding/small_gpt2/merges.txt deleted file mode 100644 index 5e4f2b9b..00000000 --- a/tests/data_for_tests/embedding/small_gpt2/merges.txt +++ /dev/null @@ -1,39 +0,0 @@ -#version: small -a b -c e -e l -e m -e n -en ce -en t -h e -he r -i s -o c -o d -o t -ot her -x t -Ġ T -Ġ a -Ġ d -Ġ is -Ġ m -Ġ s -Ġ t -Ġ v -ĠT h -ĠTh is -Ġa n -Ġan other -Ġd em -Ġdem o -Ġm od -Ġmod el -Ġs ent -Ġsent ence -Ġt e -Ġt h -Ġte xt -Ġth is -Ġv oc diff --git a/tests/data_for_tests/embedding/small_gpt2/small_pytorch_model.bin b/tests/data_for_tests/embedding/small_gpt2/small_pytorch_model.bin deleted file mode 100644 index ec2f48d7..00000000 Binary files a/tests/data_for_tests/embedding/small_gpt2/small_pytorch_model.bin and /dev/null differ diff --git a/tests/data_for_tests/embedding/small_gpt2/vocab.json b/tests/data_for_tests/embedding/small_gpt2/vocab.json deleted file mode 100644 index 8f9feeda..00000000 --- a/tests/data_for_tests/embedding/small_gpt2/vocab.json +++ /dev/null @@ -1 +0,0 @@ -{"\u0120This": 0, "\u0120is": 1, "\u0120a": 2, "\u0120demo": 3, "\u0120sentence": 4, "\u0120another": 5, "\u0120this": 6, "\u0120text": 7, "a": 8, "\u0120model": 9, "\u0120voc": 10, "ab": 11, "<|endoftext|>": 12, "A": 13, "B": 14, "C": 15, "D": 16, "E": 17, "F": 18, "G": 19, "H": 20, "I": 21, "J": 22, "K": 23, "L": 24, "M": 25, "N": 26, "O": 27, "P": 28, "Q": 29, "R": 30, "S": 31, "T": 32, "U": 33, "V": 34, "W": 35, "X": 36, "Y": 37, "Z": 38, "b": 39, "c": 40, "d": 41, "e": 42, "f": 43, "g": 44, "h": 45, "i": 46, "j": 47, "k": 48, "l": 49, "m": 50, "n": 51, "o": 52, "p": 53, "q": 54, "r": 55, "s": 56, "t": 57, "u": 58, "v": 59, "w": 60, "x": 61, "y": 62, "z": 63} \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_roberta/config.json b/tests/data_for_tests/embedding/small_roberta/config.json deleted file mode 100644 index 4814927b..00000000 --- a/tests/data_for_tests/embedding/small_roberta/config.json +++ /dev/null @@ -1 +0,0 @@ -{"architectures": ["RobertaForMaskedLM"], "attention_probs_dropout_prob": 0.1, "finetuning_task": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 20, "layer_norm_eps": 1e-05, "max_position_embeddings": 20, "num_attention_heads": 4, "num_hidden_layers": 2, "num_labels": 2, "output_attentions": false, "output_hidden_states": false, "torchscript": false, "type_vocab_size": 1, "vocab_size": 68} \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_roberta/merges.txt b/tests/data_for_tests/embedding/small_roberta/merges.txt deleted file mode 100644 index 2af8d178..00000000 --- a/tests/data_for_tests/embedding/small_roberta/merges.txt +++ /dev/null @@ -1,39 +0,0 @@ -#version: tiny -a b -c e -e l -e m -e n -en ce -en t -h e -he r -i s -o c -o d -o t -ot her -x t -Ġ T -Ġ a -Ġ d -Ġ is -Ġ m -Ġ s -Ġ t -Ġ v -ĠT h -ĠTh is -Ġa n -Ġan other -Ġd em -Ġdem o -Ġm od -Ġmod el -Ġs ent -Ġsent ence -Ġt e -Ġt h -Ġte xt -Ġth is -Ġv oc diff --git a/tests/data_for_tests/embedding/small_roberta/small_pytorch_model.bin b/tests/data_for_tests/embedding/small_roberta/small_pytorch_model.bin deleted file mode 100644 index 73282346..00000000 Binary files a/tests/data_for_tests/embedding/small_roberta/small_pytorch_model.bin and /dev/null differ diff --git a/tests/data_for_tests/embedding/small_roberta/vocab.json b/tests/data_for_tests/embedding/small_roberta/vocab.json deleted file mode 100644 index 376b658f..00000000 --- a/tests/data_for_tests/embedding/small_roberta/vocab.json +++ /dev/null @@ -1 +0,0 @@ -{"": 0, "": 1, "": 2, "": 3, "": 4, "A": 5, "B": 6, "C": 7, "D": 8, "E": 9, "F": 10, "G": 11, "H": 12, "I": 13, "J": 14, "K": 15, "L": 16, "M": 17, "N": 18, "O": 19, "P": 20, "Q": 21, "R": 22, "S": 23, "T": 24, "U": 25, "V": 26, "W": 27, "X": 28, "Y": 29, "Z": 30, "a": 31, "b": 32, "c": 33, "d": 34, "e": 35, "f": 36, "g": 37, "h": 38, "i": 39, "j": 40, "k": 41, "l": 42, "m": 43, "n": 44, "o": 45, "p": 46, "q": 47, "r": 48, "s": 49, "t": 50, "u": 51, "v": 52, "w": 53, "x": 54, "y": 55, "z": 56, "\u0120This": 57, "\u0120is": 58, "\u0120a": 59, "\u0120demo": 60, "\u0120sentence": 61, "\u0120another": 62, "\u0120this": 63, "\u0120text": 64, "\u0120model": 65, "\u0120voc": 66, "ab": 67} \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_static_embedding/glove.6B.50d_test.txt b/tests/data_for_tests/embedding/small_static_embedding/glove.6B.50d_test.txt deleted file mode 100644 index 707e48e8..00000000 --- a/tests/data_for_tests/embedding/small_static_embedding/glove.6B.50d_test.txt +++ /dev/null @@ -1,6 +0,0 @@ -the 0.418 0.24968 -0.41242 0.1217 0.34527 -0.044457 -0.49688 -0.17862 -0.00066023 -0.6566 0.27843 -0.14767 -0.55677 0.14658 -0.0095095 0.011658 0.10204 -0.12792 -0.8443 -0.12181 -0.016801 -0.33279 -0.1552 -0.23131 -0.19181 -1.8823 -0.76746 0.099051 -0.42125 -0.19526 4.0071 -0.18594 -0.52287 -0.31681 0.00059213 0.0074449 0.17778 -0.15897 0.012041 -0.054223 -0.29871 -0.15749 -0.34758 -0.045637 -0.44251 0.18785 0.0027849 -0.18411 -0.11514 -0.78581 -of 0.70853 0.57088 -0.4716 0.18048 0.54449 0.72603 0.18157 -0.52393 0.10381 -0.17566 0.078852 -0.36216 -0.11829 -0.83336 0.11917 -0.16605 0.061555 -0.012719 -0.56623 0.013616 0.22851 -0.14396 -0.067549 -0.38157 -0.23698 -1.7037 -0.86692 -0.26704 -0.2589 0.1767 3.8676 -0.1613 -0.13273 -0.68881 0.18444 0.0052464 -0.33874 -0.078956 0.24185 0.36576 -0.34727 0.28483 0.075693 -0.062178 -0.38988 0.22902 -0.21617 -0.22562 -0.093918 -0.80375 -to 0.68047 -0.039263 0.30186 -0.17792 0.42962 0.032246 -0.41376 0.13228 -0.29847 -0.085253 0.17118 0.22419 -0.10046 -0.43653 0.33418 0.67846 0.057204 -0.34448 -0.42785 -0.43275 0.55963 0.10032 0.18677 -0.26854 0.037334 -2.0932 0.22171 -0.39868 0.20912 -0.55725 3.8826 0.47466 -0.95658 -0.37788 0.20869 -0.32752 0.12751 0.088359 0.16351 -0.21634 -0.094375 0.018324 0.21048 -0.03088 -0.19722 0.082279 -0.09434 -0.073297 -0.064699 -0.26044 -and 0.26818 0.14346 -0.27877 0.016257 0.11384 0.69923 -0.51332 -0.47368 -0.33075 -0.13834 0.2702 0.30938 -0.45012 -0.4127 -0.09932 0.038085 0.029749 0.10076 -0.25058 -0.51818 0.34558 0.44922 0.48791 -0.080866 -0.10121 -1.3777 -0.10866 -0.23201 0.012839 -0.46508 3.8463 0.31362 0.13643 -0.52244 0.3302 0.33707 -0.35601 0.32431 0.12041 0.3512 -0.069043 0.36885 0.25168 -0.24517 0.25381 0.1367 -0.31178 -0.6321 -0.25028 -0.38097 -in 0.33042 0.24995 -0.60874 0.10923 0.036372 0.151 -0.55083 -0.074239 -0.092307 -0.32821 0.09598 -0.82269 -0.36717 -0.67009 0.42909 0.016496 -0.23573 0.12864 -1.0953 0.43334 0.57067 -0.1036 0.20422 0.078308 -0.42795 -1.7984 -0.27865 0.11954 -0.12689 0.031744 3.8631 -0.17786 -0.082434 -0.62698 0.26497 -0.057185 -0.073521 0.46103 0.30862 0.12498 -0.48609 -0.0080272 0.031184 -0.36576 -0.42699 0.42164 -0.11666 -0.50703 -0.027273 -0.53285 -a 0.21705 0.46515 -0.46757 0.10082 1.0135 0.74845 -0.53104 -0.26256 0.16812 0.13182 -0.24909 -0.44185 -0.21739 0.51004 0.13448 -0.43141 -0.03123 0.20674 -0.78138 -0.20148 -0.097401 0.16088 -0.61836 -0.18504 -0.12461 -2.2526 -0.22321 0.5043 0.32257 0.15313 3.9636 -0.71365 -0.67012 0.28388 0.21738 0.14433 0.25926 0.23434 0.4274 -0.44451 0.13813 0.36973 -0.64289 0.024142 -0.039315 -0.26037 0.12017 -0.043782 0.41013 0.1796 \ No newline at end of file diff --git a/tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt b/tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt deleted file mode 100644 index c16170f2..00000000 --- a/tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt +++ /dev/null @@ -1,7 +0,0 @@ -5 50 -the 0.418 0.24968 -0.41242 0.1217 0.34527 -0.044457 -0.49688 -0.17862 -0.00066023 -0.6566 0.27843 -0.14767 -0.55677 0.14658 -0.0095095 0.011658 0.10204 -0.12792 -0.8443 -0.12181 -0.016801 -0.33279 -0.1552 -0.23131 -0.19181 -1.8823 -0.76746 0.099051 -0.42125 -0.19526 4.0071 -0.18594 -0.52287 -0.31681 0.00059213 0.0074449 0.17778 -0.15897 0.012041 -0.054223 -0.29871 -0.15749 -0.34758 -0.045637 -0.44251 0.18785 0.0027849 -0.18411 -0.11514 -0.78581 -of 0.70853 0.57088 -0.4716 0.18048 0.54449 0.72603 0.18157 -0.52393 0.10381 -0.17566 0.078852 -0.36216 -0.11829 -0.83336 0.11917 -0.16605 0.061555 -0.012719 -0.56623 0.013616 0.22851 -0.14396 -0.067549 -0.38157 -0.23698 -1.7037 -0.86692 -0.26704 -0.2589 0.1767 3.8676 -0.1613 -0.13273 -0.68881 0.18444 0.0052464 -0.33874 -0.078956 0.24185 0.36576 -0.34727 0.28483 0.075693 -0.062178 -0.38988 0.22902 -0.21617 -0.22562 -0.093918 -0.80375 -to 0.68047 -0.039263 0.30186 -0.17792 0.42962 0.032246 -0.41376 0.13228 -0.29847 -0.085253 0.17118 0.22419 -0.10046 -0.43653 0.33418 0.67846 0.057204 -0.34448 -0.42785 -0.43275 0.55963 0.10032 0.18677 -0.26854 0.037334 -2.0932 0.22171 -0.39868 0.20912 -0.55725 3.8826 0.47466 -0.95658 -0.37788 0.20869 -0.32752 0.12751 0.088359 0.16351 -0.21634 -0.094375 0.018324 0.21048 -0.03088 -0.19722 0.082279 -0.09434 -0.073297 -0.064699 -0.26044 -and 0.26818 0.14346 -0.27877 0.016257 0.11384 0.69923 -0.51332 -0.47368 -0.33075 -0.13834 0.2702 0.30938 -0.45012 -0.4127 -0.09932 0.038085 0.029749 0.10076 -0.25058 -0.51818 0.34558 0.44922 0.48791 -0.080866 -0.10121 -1.3777 -0.10866 -0.23201 0.012839 -0.46508 3.8463 0.31362 0.13643 -0.52244 0.3302 0.33707 -0.35601 0.32431 0.12041 0.3512 -0.069043 0.36885 0.25168 -0.24517 0.25381 0.1367 -0.31178 -0.6321 -0.25028 -0.38097 -in 0.33042 0.24995 -0.60874 0.10923 0.036372 0.151 -0.55083 -0.074239 -0.092307 -0.32821 0.09598 -0.82269 -0.36717 -0.67009 0.42909 0.016496 -0.23573 0.12864 -1.0953 0.43334 0.57067 -0.1036 0.20422 0.078308 -0.42795 -1.7984 -0.27865 0.11954 -0.12689 0.031744 3.8631 -0.17786 -0.082434 -0.62698 0.26497 -0.057185 -0.073521 0.46103 0.30862 0.12498 -0.48609 -0.0080272 0.031184 -0.36576 -0.42699 0.42164 -0.11666 -0.50703 -0.027273 -0.53285 -a 0.21705 0.46515 -0.46757 0.10082 1.0135 0.74845 -0.53104 -0.26256 0.16812 0.13182 -0.24909 -0.44185 -0.21739 0.51004 0.13448 -0.43141 -0.03123 0.20674 -0.78138 -0.20148 -0.097401 0.16088 -0.61836 -0.18504 -0.12461 -2.2526 -0.22321 0.5043 0.32257 0.15313 3.9636 -0.71365 -0.67012 0.28388 0.21738 0.14433 0.25926 0.23434 0.4274 -0.44451 0.13813 0.36973 -0.64289 0.024142 -0.039315 -0.26037 0.12017 -0.043782 0.41013 0.1796 \ No newline at end of file diff --git a/tests/data_for_tests/io/20ng/dev.csv b/tests/data_for_tests/io/20ng/dev.csv deleted file mode 100644 index 1cfb7c56..00000000 --- a/tests/data_for_tests/io/20ng/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -talk.religion.misc,"sandvik newton apple com \( kent sandvik \) subject clarification organization cookamunga tourist bureau lines 14 sorry , san jose based rosicrucian order called r c , n't remember time stand r c ordo rosae crucis , words latin order rose cross sigh , seems loosing long term memory otherwise headquarters san jose pretty decent metaphysical bookstore , interested books son loves run around egyptian museum cheers , kent sandvik newton apple com alink ksand private activities net" -talk.religion.misc,"subject catholic lit nunnally acs harding edu \( john nunnally \) distribution world organization harding university , , ar nntp posting host acs harding edu x news reader vms news 1 reply dlphknob camelot bradley edu 's message 16 apr 93 18 57 20 gmtlines 45 lines 45 dlphknob camelot dlphknob camelot bradley edu writes 1993apr14 476 mtechca maintech com foster mtechca maintech com writes surprised saddened would expect kind behavior evangelical born gospel thumping face 're true christian protestants , always thought catholics behaved better please stoop level e b g f w c protestants , think best way witness strident , intrusive , loud , insulting self righteous \( pleading mode \) please ! i'm begging ! quit confusing religious groups , stop making generalizations ! i'm protestant ! i'm evangelical ! n't believe way way ! i'm creation scientist ! n't think homosexuals hung ! want discuss bible thumpers , would better singling \( making generalizations \) fundamentalists compared actions methodists southern baptists , would think different religions ! sarcasm sure pick correct groups bible thumpers , fundamentalists , southern baptists deserve hasty generalizations prejudicial statements n't pick methodists ! sarcasm please , prejudice thinking people group , please n't write protestants evangelicals ! \( pleading mode \) god wish could get ahold thomas stories n , n tha gb , gb n yvan sasha david cole iv chief research dlphknob camelot bradley edu" -talk.religion.misc,"sandvik newton apple com \( kent sandvik \) subject alt sex stories literary critical analysis \) organization cookamunga tourist bureau lines 16 article h7v agate berkeley edu , dzkriz ocf berkeley edu \( dennis kriz \) wrote i'm going try something , perhaps many would thought even possible want begin process initiating literary critical study pornography posted alt sex stories , identify major themes motifs present stories posted opening possibility objective moral evaluation material present dennis , i'm astounded n't know interested even study filth alt sex stories provide cheers , kent sandvik newton apple com alink ksand private activities net" -talk.religion.misc,"anthony landreneau ozonehole com \( anthony landreneau \) subject abortion distribution world organization ozone online operations , inc , dba ozone hole bbs reply anthony landreneau ozonehole com \( anthony landreneau \) lines 21 margoli watson ibm com \( larry margolis \) anthony landreneau ozonehole com lm rape passed , nothing ever take away lm true forcing remain pregnant continues violation lm body another 9 months see unbelievably cruel life violation cruel , killing living solely friend right cold anthony slmr 2 1 's difference orange \? ozone hole bbs private bulletin board service \( 504 \) 891 3142 3 full service nodes usrobotics 16 8k bps 10 gigs 100 , 000 files skydive new orleans ! rime network mail hub 500 usenet newsgroups please route questions inquiries postmaster ozonehole com" -talk.religion.misc,"kevin rotag mi org \( kevin darcy \) subject 2000 years , say christian morality organization , \? \? \? lines 15 article pww spac at1 59 rice edu pww spacsun rice edu \( peter walker \) writes article 1993apr18 rotag mi org , kevin rotag mi org \( kevin darcy \) wrote , one , considered intentionality primary ontological stuff built perceptions , consciousness , thoughts , etc frank means alone seeing intentionality \( values , puts \) underlying human experience , even called objective experiences , measurements natural world , output des chip others us see intellectual masturbation 'll defer greater firsthand knowledge matters kevin" -talk.religion.misc,"bil okcforum osrhe edu \( bill conner \) subject 2000 years , say christian morality nntp posting host okcforum osrhe edu organization okcforum unix users group x newsreader tin version 1 1 pl9 lines 54 mind , say science basis values bit reach science basis observable fact 'd say one chooses observe observation interpreted significance 's given depends great deal values observer science human activity , subject potential distortion human activity myth scientists moral influence ethical concern , knowledge whole pure nature biases scientist , nonsense bill one argue objective values \( moral sense \) one must first start demonstrating morality objective considering meaning word objective doubt ever happen , back original question objective morality \? may unfortunate choice words , almost self contradictory objective sense used means something immutable absolute morality describes behavior group people first term inclusive , second specific concept supposedly described may meaning however god described christians \( instance \) , existence apart independent humankind existence outside frame reference \( reality \) declares thing , necessarily since defined omnipotent , claims believed , least omnipotent relative us god intrinsically self defined reality whatever says objective sense god determines standard conduct , standard objective human beings held accountable conformance standard permitted ignore , substitute relative morality mode conduct , giving term morality nebulous , meaningless sense argued pretending misunderstand standard objective conduct required meet standard therefore objectively determined convenient pretend term morality infinitely , n't mean objective standard n't exist morality come mean little cultural norm , preferred conduct decent people , making seem subjective , derived absolute , objective , standard ironically , objective standard perfect accord true nature \( according christianity least \) , yet condemned contrary human , oppressive severe may due bill much amoral standard , like , 's x" diff --git a/tests/data_for_tests/io/20ng/test.csv b/tests/data_for_tests/io/20ng/test.csv deleted file mode 100644 index b636bc65..00000000 --- a/tests/data_for_tests/io/20ng/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -talk.religion.misc,"halat pooh bears \( jim halat \) subject 2000 years , say christian morality reply halat pooh bears \( jim halat \) lines 43 article 1993apr15 wam umd edu , wam umd edu \( jay stein objectively subjective \) writes horus ap mchp sni de frank d012s658 uucp \( frank o'dwyer \) discussion christianity objective morals question effective difference objective values exist , disagreement values subjective \? n't see difference saying absolute truth exists , people think lie truth relative \? think examples , first statement fundamental disagreement least two people second statement agreed upon put another way , someone says objective values exist agree values subjective jim halat" -talk.religion.misc,"halat pooh bears \( jim halat \) subject 2000 years , say christian morality reply halat pooh bears \( jim halat \) lines 17 article na4 horus ap mchp sni de , frank d012s658 uucp \( frank o'dwyer \) writes really \? n't know objective value \? offered people u , collectively , 1 land america , would sound like good deal \? happens subjective example people us would happen agree continue move price point people would accept probably would accept high enough number endpoints subjective scale given homes objective viewpoints jim halat" -talk.religion.misc,"halat pooh bears \( jim halat \) subject 2000 years , say christian morality reply halat pooh bears \( jim halat \) lines 34 article horus ap mchp sni de , frank d012s658 uucp \( frank o'dwyer \) writes firstly , science basis values , way round better explain objective atoms , get subjective values , go atoms objective n't even real scientists call atom nothing mathematical model describes certain physical , observable properties surroundings subjective objective , though , approach scientist takes discussing model observations objective science objective approach subjectively selected scientist objective case means specified , unchanging set rules colleagues use discuss science contrast objective morality may objective approach subjectively discuss beliefs morality exists objective morality also , science deals discuss observations physical world around us method discussion objective \( science discussion \) science makes claims know even sometimes observe simply gives us way discuss surroundings meaningful , consistent way think bohr said \( paraphrase \) science say physical world jim halat" -talk.religion.misc,"mwilson ncratl atlantaga ncr com \( mark wilson \) subject message mr president know happened \? organization ncr engineering manufacturing atlanta atlanta , ga lines 58 noose ecn purdue edu tbrent bank ecn purdue edu \( timothy j brent \) writes probably , n't pack heavy weaponry intent use please cite evidence intending use n't really think allowed keep stuff \? \? , tell live sure steer well clear check sig public also rights , placed individual society rights individuals rights go ahead , call commie , ok , commie 'd singing different tune exercised right rape daughter think right rape anyone \? wonder n't care others broke law , please indicate law feel koresh broke , convicted said crime threat society , feel owning guns makes threat society ou going start going knives baseball bats well feel someone spouts unpopular ideas definition threat society job simple simple think job assualt civilians support first , second , fourth , fifth , sixth , eighth amendment rights , lest taken away fbi davidians think 'll support \( except 2 \) words n't support mob rule n't prettier merely mob calls government ai n't charity using someone else 's money wilson 's theory relativity go back far enough , 're related mark wilson atlantaga ncr com" -talk.religion.misc,"alizard tweekco uucp \( lizard \) subject 14 apr 93 god 's promise 1 john 1 7 organization com systems bbs , , ca \( 510 \) 631 lines 20 starowl rahul net \( michael adams \) writes anyone netland process devising new religion , use lamb bull , already reserved please choose another animal , preferably one endangered species list washed blood barney dinosaur \? \) judging postings 've read usenet non usenet bbs conferences , barney definitely endangered species especially runs dark alley lizard lizard internet addresses alizard tweekco boo pacbell com \( preferred \) pacbell com ! boo ! tweekco ! alizard \( bang path \) alizard com \( backup \) pgp2 2 public key available request" -talk.religion.misc,"alizard tweekco uucp \( lizard \) subject oto , ancient order oriental templars organization com systems bbs , , ca \( 510 \) 631 lines 18 thyagi cup portal com \( thyagi morgoth nagasiva \) writes organization known present time ancient order oriental templars ordo templi orientis otherwise hermetic brotherhood light organization official e mail address days \? \( address sf bay area lodges , e g would \) 93 lizard lizard internet addresses alizard tweekco boo pacbell com \( preferred \) pacbell com ! boo ! tweekco ! alizard \( bang path \) alizard com \( backup \) pgp2 2 public key available request" diff --git a/tests/data_for_tests/io/20ng/train.csv b/tests/data_for_tests/io/20ng/train.csv deleted file mode 100644 index 55307ad6..00000000 --- a/tests/data_for_tests/io/20ng/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -talk.religion.misc,"deane binah cc brandeis edu \( david matthew deane \) subject flaming nazis reply deane binah cc brandeis edu organization brandeis university lines 106 okay , 'll bite probably leave alone , heck article 1993apr14 422 sun0 urz uni heidelberg de , gsmith lauren iwr uni heidelberg de \( gene w smith \) writes article brewich hou tx us popec brewich hou tx us \( pope charles \) writes name guy responsible much uniforms , props used early nazis rallies name roehm , hitler claim came swastika business n't credit actual flag design party member dentist \? believe gives credit mein kampf killed early nazi purge many associates flaming homosexuals well know also trying find actual evidence common assertion recently postings groups soc history soc culture german uncovered net experts could provide well , i'm expert , histories nazi germany assert make reference several scandals occurred long night long knives impression got homosexuality portions sa common knowledge also , book \( homosexual author whose name escapes moment \) called homosexuals history asserts roehm heines homosexuals , well others roehm 's sa circle books say roehm associate , edmund heines , homosexual able find nothing beyond , suspect sort historical urban legend well , 're one germany n't believe history books , look primary sources us outside germany access seems plenty documented instances several scandals , fact knight long knives several sa members \( including heines \) found sleeping together , etc also believe people complaining sa 's homosexual activities \( young boys , etc \) histories 've read make convincing case none sounds like urban legend \( irving , notoriously unreliable historian , says funk , nazi finance minister , homosexual gives sources \) know next nothing irving nothing funk precisely know , would contradict history books read concerning existence homosexual nazis \? trying say historians taking part anti homosexual smear \? homosexual writers agree official history \? n't think would found truth roehm heines homosexuals \? would think would want homosexuality nazism one use connection two bash homosexuals case challenge anyone document claim going challenge historians point \( irving \) , burden proof track references find stories originate , one germany , close archival material people net found great deal evidence many flaming heterosexuals among nazis seems include worst ones hitler , himmler , goebbels , goering , , eichmann , many eh \? agenda \? prove nazis heterosexuals , bash heterosexuals \? bother nazis might homosexuals \? make homosexuals bad true \? course bisexuals \? half nazis \? n't know would difficult believe nazis homosexuals german officer corps ww1 , instance , notorious homosexuality numerous scandals rocked german govt late 19th early 20th century many kaiser 's friends prosecuted kaiser homosexual , germany army long tradition homosexuality , going far back prussian history back frederick great least , homosexual roehm product prussian officer tradition , old german army \( like english public school system \) , well known center homosexuality , would quite willing overlook roehm 's homosexuality addition , nazis complained homosexuality hitler youth hitler youth swallowed pre nazi youth groups , various pre war , bund , youth groups known promote ideals friendship , many cases , homosexuality seems unlikely plenty homosexual nazis , regardless official nazi dogmas concerning evils homosexuality suprise anyone \? homosexuality always existed , societies would unusual nazis exception , n't sources , think kind proof accept would citations archival material , access intend reread every book nazis modern homosexuality ever read n't time nothing stopping , however , chasing sources prove otherwise , though , stick established histories david matthew deane \( deane binah cc brandeis edu \) eternal bleak wind let gods speak softly us days hereafter \( ezra pound \)" -talk.religion.misc,"psyrobtw ubvmsd cc buffalo edu \( robert weiss \) subject 18 apr 93 god 's promise philippians 4 9 organization university buffalo lines 8 news software vax vms vnews 1 41 nntp posting host ubvmsd cc buffalo edu things , ye learned , received , heard , seen , god peace shall philippians 4 9" -talk.religion.misc,"sandvik newton apple com \( kent sandvik \) subject 14 apr 93 god 's promise 1 john 1 7 organization cookamunga tourist bureau lines 14 article tweekco uucp , alizard tweekco uucp \( lizard \) wrote judging postings 've read usenet non usenet bbs conferences , barney definitely endangered species especially runs dark alley please , please n't make barney modern martyr saviour mythical figure , humans create religion name , life unbearable \) cheers , kent sandvik newton apple com alink ksand private activities net" -talk.religion.misc,"sandvik newton apple com \( kent sandvik \) subject disillusioned protestant finds christ organization cookamunga tourist bureau lines 23 article boi hp com , jburrill boi hp com \( jim burrill \) wrote jesus never taught concept trinity , deal following mat 28 18 jesus came said , authority heaven earth given mat 28 19 therefore go make disciples nations , baptizing name father son holy spirit , mat 28 20 teaching obey everything commanded surely always , end age jim , please , 's lame explanation trinity jesus provides baptizing people name three things ! trinity case , i'm wrong , assumed trinity implies god three entities , yet cheers , kent sandvik newton apple com alink ksand private activities net" -talk.religion.misc,"cutter gloster via mind org \( cutter \) subject biblical backing koresh 's 3 02 tape \( cites enclosed \) distribution world organization gordian knot , gloster , ga lines 22 netd susie sbc com \( \) writes article 20apr199301460499 utarlg uta edu b645zaw utarlg uta edu \( stephen think david koresh n't solid structure , sound biblical backing hour long tape broadcast , n't think anyone really cares solid structure sermon 's deaths 's responsible concern people think ought hold christ followers died hand romans also fault believing god , society reminds roman empire every day guess 'll log go watch american cutter gloster via mind org \( chris \) jobs easy person n't holt 's law" -talk.religion.misc,"subject albert sabin rfox charlie usd edu \( rich fox , univ south dakota \) reply rfox charlie usd edu organization university south dakota computer science dept nntp posting host charlie lines 91 article 1993apr15 nntpd2 cxo dec com , sharpe enet dec com \( system privileged account \) writes article 885 sunfish usd edu , rfox charlie usd edu \( rich fox , univ south dakota \) writes article 1993apr10 rambo atlanta dg com , wpr atlanta dg com \( bill rawlins \) writes earlier dialogue deleted perhaps read stop advancing bible evidence relating questions science jesus exist \? g wells great fallacy statement question origins based science alone nope , fallacy yep , science best determining religions handle rich , curious others award custody baby theists religion \? hope n't award custody , rich purposely used handle order avoid e , happens religions \( course like scientific creationism \) used best part indicate science currently time , domains mostly ignored also attempted brief , doubt confused matter aside , science written nobody seems argue theists , theologians better investigate magicians , , , athiests agnostics seems answer would vary individual individual i'm trying evasive , societal perspective , religion works hand , sometimes abused misused , many suffer , know net result seems positive , anthropological perspective human affairs might call neo insofar think masses ca n't get along without religion generally incapable n't , myriad reasons , main one seems promise immortality , immortality therefore seems theologians better equipped others mention answers suggest holds regardless truth answers simply people believe end , spiritual beliefs real scientific facts explanation \( caution take context \) suggest forever closed scientific investigation \? fact , n't think closed , least individuals n't group theoretical physicists argue matter created nothing big bang singularity \? approach might absence , except seems could argued something responsible nothing \? maybe something n't supernatural , maybe 's tough one people today grasp case , theory without empirical data explanation , question require data words , agree theorizing \( within scientific parameters \) scientific explaining answer , closed scientists , sense science currently inadequate data necessary improvement , seems long way , ever pretty convoluted hope 've made sense seems 200 years ago , question origin life earth considered open scientific agree generally prefer put way questions , , open inquiry enlightenment , reason questioning theological answers , , part , science thus born curiosity , eventually away largely leaving behind ignorant , selfish , intolerant , arrogant , course , still claim authority four domains rich fox , anthro , usouthdakota like discussion around , figure original post \) much obliged funny facts tend things , n't \? well , sure plenty scientific creationist somewhere , even created nothing record , , modern humans best regards \) , rich fox , anthro , usouthdakota" diff --git a/tests/data_for_tests/io/BQCorpus/dev.txt b/tests/data_for_tests/io/BQCorpus/dev.txt deleted file mode 100644 index 2bd7414e..00000000 --- a/tests/data_for_tests/io/BQCorpus/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -sentence1,sentence2,label -综合评分不足什么原因,综合评估的依据,0 -什么时候我能使用微粒贷,你就赶快给我开通就行了,0 -如何修改每个月的还款日期,可以申请延期还款日吗?,0 -没什么问的,不能登陆就是我最大的问题了,登录不上,1 -你的意思是不能取现,借到的钱可不可以提出来,1 diff --git a/tests/data_for_tests/io/BQCorpus/test.txt b/tests/data_for_tests/io/BQCorpus/test.txt deleted file mode 100644 index 949583ad..00000000 --- a/tests/data_for_tests/io/BQCorpus/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -sentence1,sentence2,label -你电话号码多少,你们的客服电话是多少?,1 -10000块日利息是多少,0.05%就是借2000块,利息为1块钱一天,1 -17号还款了,我现在想提前几天还,怎么弄,一直按时还款,提前还款,怎么会评估不足,0 -我昨晚申请的,现在钱没到,也没有人联系我,审核多久才会打电话,1 -假如我贷四万还款怎么,18号还款日可以不凌晨扣款,我18日下午还款可以吗,0 diff --git a/tests/data_for_tests/io/BQCorpus/train.txt b/tests/data_for_tests/io/BQCorpus/train.txt deleted file mode 100644 index f2ac4e84..00000000 --- a/tests/data_for_tests/io/BQCorpus/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -sentence1,sentence2,label -一天了还是不能登录,你好,用app干嘛但是无法登入,1 -为什么我的钱包点开,没显示微粒贷呀,点击我进入钱包,没有,借款的,提示呀!,1 -什么要求,借款没有,0 -微信注册的手机号停机了,还可以办理吗,没有邀请可以注册嘛,0 -开通微粒贷,开通微粒贷!强烈要求,1 diff --git a/tests/data_for_tests/io/ChnSentiCorp/dev.txt b/tests/data_for_tests/io/ChnSentiCorp/dev.txt deleted file mode 100644 index 9387b569..00000000 --- a/tests/data_for_tests/io/ChnSentiCorp/dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -label text_a -1 基金痛所有投资项目一样,必须先要有所了解,才能把握分寸,不至于跟风而造成损失。此本基金入门的书是一个不错的选择,不像一般的书一样偏重概念,虽然也涉及到概念,但作者用自己的方式解读,使第一次接触基金的人能更好的理解。内容以非常容易理解的语言象大众普及了基金的很多观念,对于普通基民来说,要想有所收获,必须了解基金界的很多情况,在关键的时候才不会盲目跟风。对于新手,强烈推荐。 -1 系统很好装,LED屏是不错,就是16比9的比例看起来比较长,是14.0的屏。外观比较酷,适合年轻人,键盘模仿SONY的,还不错。 -1 这书的装帧很好的,既适合家庭收藏亦适合阅读了解。了解一个人,通过他的书信,而且是家书,再好不过了,而了解这个人也更了解些那个时代,那个社会,给我们现代人些许启发吧。而我从中也知道了他的学习习惯、方法以及教子方面。比较有收获。软精装的封面,封面要是每个唐老师那个照片就更好了,分上下册便于阅读。内里字体有分别:信是用的启功老师的手写字体,评点是宋体。 -0 屏幕没有坏点和暗点,这个比较不错。配置性价比较高,目前使用已有半个月,基本正常。 -0 典型的国营酒店,管理层缺乏责任心,管理混乱。房间里的大灯镜灯台灯都是坏的,只有一盏床头灯可用,不知道酒店是怎么维护的。最可气的是结帐时竟然要求客人赔偿房间里已损坏很久的鞋盒,简直是讹诈。 -0 普通游客旅馆 还三星 让我伤心 店名好大 奇差无比 补充点评 2006年12月8日 : 还说有地下车库 谁敢下去 晕 狭小 黑暗 要卡壳儿的 CTRIP上怎么让它这么忽悠顾客的 ?!!!!!!! diff --git a/tests/data_for_tests/io/ChnSentiCorp/test.txt b/tests/data_for_tests/io/ChnSentiCorp/test.txt deleted file mode 100644 index 35f7d2c5..00000000 --- a/tests/data_for_tests/io/ChnSentiCorp/test.txt +++ /dev/null @@ -1,7 +0,0 @@ -label text_a -0 v系统和XP系统能做到二选一就更好了,毕竟大部分人还是更偏爱XP系统。 -0 自带的Linix系统上上网还可以,想玩其他的功能毫无疑问得换XP.偶在京东订的时候为了装XP方便,一起买了阿帕奇的USB光驱。到货后,发现该USB光驱无法引导系统光盘启动,已验证过该光驱读写功能正常。 -1 非常不错的酒店,依山傍水,里面大片森林,散散步很不错,坐在湖边也休息也是不错的选择;房间很幽静,房间的设施很好,服务员态度也很好。 -0 5月8日付款成功,当当网显示5月10日发货,可是至今还没看到货物,也没收到任何通知,简不知怎么说好!!! -1 收到书,还未打开就被封面的鲜艳色彩及版样吸引,迫不急待的打开,书内的设计及彩图也不错,色泽及印刷质量都称的上好,没有味道,贴图也从简入深。价格也不贵。拿回家,小宝贝也很喜欢,我家宝宝只有2岁5个月对于她贴片不太好撕,大一些的贴片要我来帮她撕。不过,今天再玩时已经比昨天撕的好很多了,可以锻炼她的小手呢。等这几本用完了,我想我还会再给她买一些类似的书。 -0 挺失望的,还不如买一本张爱玲文集呢,以<色戒>命名,可这篇文章仅仅10多页,且无头无尾的,完全比不上里面的任意一篇其它文章. diff --git a/tests/data_for_tests/io/ChnSentiCorp/train.txt b/tests/data_for_tests/io/ChnSentiCorp/train.txt deleted file mode 100644 index 9e53f1bd..00000000 --- a/tests/data_for_tests/io/ChnSentiCorp/train.txt +++ /dev/null @@ -1,7 +0,0 @@ -label text_a -1 很好的酒店,很规范植得一住.餐厅一般不应该的,不知道为什么. 宾馆反馈 2008年4月17日 : 餐厅现已重新装修,用餐环境较以前要好的多。谢谢您的宝贵意见! -0 这是我看过文字写得很糟糕的书,因为买了,还是耐着性子看完了,但是总体来说不好,文字、内容、结构都不好 -1 拿房时没大床房了,给我们免费升成套房,这点还蛮满意的。酒店大致不错,有国内五星水准。比国际品牌的要差一点。酒店有点年纪了,维修要加强,比如我们浴室的下水就堵塞不通,这些在客人入住前就该发觉修好。其它都还可以。 -1 开始看了2005年的几位朋友的评价,都不敢去入住。没想到现在改观了很多,房间虽小,但很整洁。下次再来的话,还会选择这个酒店。只是希望宽带能一直免费! -0 本机预装的Vista跟瑞星杀软不兼容,蓝屏,不能进入系统,不能自行卸载!!千万小心别装,用卡巴可以。 -0 跟心灵鸡汤没什么本质区别嘛,至少我不喜欢这样读经典,把经典都解读成这样有点去中国化的味道了 diff --git a/tests/data_for_tests/io/LCQMC/dev.txt b/tests/data_for_tests/io/LCQMC/dev.txt deleted file mode 100644 index 3e253c93..00000000 --- a/tests/data_for_tests/io/LCQMC/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -开初婚未育证明怎么弄? 初婚未育情况证明怎么开? 1 -脚气怎么治疗 醋怎么治疗脚气 0 -世界是先有男人还是先有女人 世界上是先有男人还是先有女人 1 -有什么小说软件好用的 那个看小说的阅读器较好 1 -网上兼职是做什么的,手机可以做吗 手机可以做什么网上兼职,拍单子是什么 0 -郑州有什么好玩的地方? 郑州有什么好玩的地方啊 1 diff --git a/tests/data_for_tests/io/LCQMC/test.txt b/tests/data_for_tests/io/LCQMC/test.txt deleted file mode 100644 index bc694d3a..00000000 --- a/tests/data_for_tests/io/LCQMC/test.txt +++ /dev/null @@ -1,5 +0,0 @@ -谁有狂三这张高清的 这张高清图,谁有 0 -淘宝模特叫什么?急 淘宝的模特她叫什么 1 -不要嘛用韩语怎么说 韩语的请不要走怎么说 0 -倒瓜子脸适合什么发型 额头高又是瓜子脸的女生适合什么刘海 0 -淘宝流量怎么买 刚淘宝店如何才能有流量 0 diff --git a/tests/data_for_tests/io/LCQMC/train.txt b/tests/data_for_tests/io/LCQMC/train.txt deleted file mode 100644 index 9f6d4924..00000000 --- a/tests/data_for_tests/io/LCQMC/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -喜欢打篮球的男生喜欢什么样的女生 爱打篮球的男生喜欢什么样的女生 1 -你帮我设计小说的封面吧 谁能帮我给小说设计个封面? 0 -移动手机卡刷砖 关于移动手机卡 0 -有什么好听的短信铃声啊 有什么好听的韩剧短信铃声 0 -人生的三大事是什么 人生三大事是什么? 1 -您好是后8位的 您提供后8位即可, 1 diff --git a/tests/data_for_tests/io/MNLI/dev_matched.tsv b/tests/data_for_tests/io/MNLI/dev_matched.tsv deleted file mode 100755 index ace2dd27..00000000 --- a/tests/data_for_tests/io/MNLI/dev_matched.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 label1 label2 label3 label4 label5 gold_label -0 63735 63735n slate ( ( The ( new rights ) ) ( are ( nice enough ) ) ) ( Everyone ( really ( likes ( the ( newest benefits ) ) ) ) ) (ROOT (S (NP (DT The) (JJ new) (NNS rights)) (VP (VBP are) (ADJP (JJ nice) (RB enough))))) (ROOT (S (NP (NN Everyone)) (VP (ADVP (RB really)) (VBZ likes) (NP (DT the) (JJS newest) (NNS benefits))))) The new rights are nice enough Everyone really likes the newest benefits neutral entailment neutral neutral neutral neutral -1 91383 91383c government ( ( This site ) ( ( includes ( ( ( ( a list ) ( of ( all ( award winners ) ) ) ) and ) ( ( a ( searchable database ) ) ( of ( Government ( Executive articles ) ) ) ) ) ) . ) ) ( ( ( The ( Government ( Executive articles ) ) ) ( housed ( on ( the website ) ) ) ) ( ( ( are not ) ( able ( to ( be searched ) ) ) ) . ) ) (ROOT (S (NP (DT This) (NN site)) (VP (VBZ includes) (NP (NP (NP (DT a) (NN list)) (PP (IN of) (NP (DT all) (NN award) (NNS winners)))) (CC and) (NP (NP (DT a) (JJ searchable) (NN database)) (PP (IN of) (NP (NNP Government) (NNP Executive) (NNS articles)))))) (. .))) (ROOT (S (NP (NP (DT The) (NNP Government) (NNP Executive) (NNS articles)) (VP (VBN housed) (PP (IN on) (NP (DT the) (NN website))))) (VP (VBP are) (RB not) (ADJP (JJ able) (S (VP (TO to) (VP (VB be) (ADJP (JJ searched))))))) (. .))) This site includes a list of all award winners and a searchable database of Government Executive articles. The Government Executive articles housed on the website are not able to be searched. contradiction contradiction contradiction contradiction contradiction contradiction -2 755 755e telephone ( ( ( ( uh ( i ( ( do n't ) ( know ( ( i i ) ( have ( ( mixed emotions ) ( about ( him ( ( uh sometimes ) ( i ( like him ) ) ) ) ) ) ) ) ) ) ) ) but ) ( ( at ( the ( same times ) ) ) ( i ( love ( to ( see somebody ) ) ) ) ) ) ( beat him ) ) ( I ( ( ( ( ( ( like him ) ( for ( the ( most part ) ) ) ) , ) but ) ( ( would still ) ( enjoy ( seeing ( someone ( beat him ) ) ) ) ) ) . ) ) (ROOT (SINV (S (S (INTJ (UH uh)) (NP (FW i)) (VP (VBP do) (RB n't) (VP (VB know) (NP (NP (FW i) (FW i)) (SBAR (S (VP (VBP have) (VP (VBN mixed) (NP (NNS emotions)) (PP (IN about) (S (NP (PRP him)) (VP (VBG uh) (ADVP (RB sometimes)) (NP (NP (FW i)) (PP (IN like) (NP (PRP him))))))))))))))) (CC but) (S (PP (IN at) (NP (DT the) (JJ same) (NNS times))) (NP (FW i)) (VP (VBP love) (S (VP (TO to) (VP (VB see) (NP (NN somebody)))))))) (VP (VBD beat)) (NP (PRP him)))) (ROOT (S (NP (PRP I)) (VP (VP (VBP like) (NP (PRP him)) (PP (IN for) (NP (DT the) (JJS most) (NN part)))) (, ,) (CC but) (VP (MD would) (ADVP (RB still)) (VP (VB enjoy) (S (VP (VBG seeing) (S (NP (NN someone)) (VP (VB beat) (NP (PRP him))))))))) (. .))) uh i don't know i i have mixed emotions about him uh sometimes i like him but at the same times i love to see somebody beat him I like him for the most part, but would still enjoy seeing someone beat him. entailment entailment entailment entailment entailment entailment -3 78013 78013c telephone ( yeah ( ( i i ) ( think ( ( my ( favorite restaurant ) ) ( ( is always ) ( been ( ( the ( one closest ) ) ( you ( ( know ( the closest ) ) ( ( as long ) ( as ( it ( 's ( it ( meets ( ( the ( minimum criteria ) ) ( you ( know ( of ( good food ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ( ( My ( favorite restaurants ) ) ( ( ( ( are always ) ( ( ( ( ( at least ) a ) hundred ) miles ) away ) ) ( from ( my house ) ) ) . ) ) (ROOT (S (VP (VB yeah) (NP (NP (FW i) (FW i)) (SBAR (S (VP (VBP think) (SBAR (S (NP (PRP$ my) (JJ favorite) (NN restaurant)) (VP (VBZ is) (ADVP (RB always)) (VP (VBN been) (NP (NP (DT the) (CD one) (JJS closest)) (SBAR (S (NP (PRP you)) (VP (VBP know) (NP (DT the) (JJS closest)) (ADVP (ADVP (RB as) (RB long)) (SBAR (IN as) (S (NP (PRP it)) (VP (VBZ 's) (SBAR (S (NP (PRP it)) (VP (VBZ meets) (NP (NP (DT the) (JJ minimum) (NNS criteria)) (SBAR (S (NP (PRP you)) (VP (VBP know) (PP (IN of) (NP (JJ good) (NN food))))))))))))))))))))))))))))) (ROOT (S (NP (PRP$ My) (JJ favorite) (NNS restaurants)) (VP (VBP are) (ADVP (RB always)) (ADVP (NP (QP (IN at) (JJS least) (DT a) (CD hundred)) (NNS miles)) (RB away)) (PP (IN from) (NP (PRP$ my) (NN house)))) (. .))) yeah i i think my favorite restaurant is always been the one closest you know the closest as long as it's it meets the minimum criteria you know of good food My favorite restaurants are always at least a hundred miles away from my house. contradiction contradiction contradiction contradiction contradiction contradiction -4 96377 96377c telephone ( i ( ( do n't ) ( know ( um ( do ( you ( do ( ( a lot ) ( of camping ) ) ) ) ) ) ) ) ) ( I ( ( know exactly ) . ) ) (ROOT (S (NP (FW i)) (VP (VBP do) (RB n't) (VP (VB know) (SBAR (S (NP (NN um)) (VP (VBP do) (SBAR (S (NP (PRP you)) (VP (VBP do) (NP (NP (DT a) (NN lot)) (PP (IN of) (NP (NN camping)))))))))))))) (ROOT (S (NP (PRP I)) (VP (VBP know) (ADVP (RB exactly))) (. .))) i don't know um do you do a lot of camping I know exactly. contradiction contradiction contradiction contradiction contradiction contradiction diff --git a/tests/data_for_tests/io/MNLI/dev_mismatched.tsv b/tests/data_for_tests/io/MNLI/dev_mismatched.tsv deleted file mode 100755 index a1da8897..00000000 --- a/tests/data_for_tests/io/MNLI/dev_mismatched.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 label1 label2 label3 label4 label5 gold_label -0 75290 75290c letters ( ( Your contribution ) ( ( helped ( make ( it ( possible ( for ( us ( to ( ( provide ( our students ) ) ( with ( a ( quality education ) ) ) ) ) ) ) ) ) ) ) . ) ) ( ( Your contributions ) ( ( were ( of ( ( no help ) ( with ( ( our ( students ' ) ) education ) ) ) ) ) . ) ) (ROOT (S (NP (PRP$ Your) (NN contribution)) (VP (VBD helped) (VP (VB make) (S (NP (PRP it)) (ADJP (JJ possible)) (SBAR (IN for) (S (NP (PRP us)) (VP (TO to) (VP (VB provide) (NP (PRP$ our) (NNS students)) (PP (IN with) (NP (DT a) (NN quality) (NN education)))))))))) (. .))) (ROOT (S (NP (PRP$ Your) (NNS contributions)) (VP (VBD were) (PP (IN of) (NP (NP (DT no) (NN help)) (PP (IN with) (NP (NP (PRP$ our) (NNS students) (POS ')) (NN education)))))) (. .))) Your contribution helped make it possible for us to provide our students with a quality education. Your contributions were of no help with our students' education. contradiction contradiction contradiction contradiction contradiction contradiction -1 133794 133794c verbatim ( ( ( ( ( ( The answer ) ( ( ( ( has nothing ) ( to ( do ( with ( their cause ) ) ) ) ) , ) however ) ) , ) but ) ( ( with ( ( ( ( ( ( ( ( the ( simple fact ) ) ( that ( dictionaries ( ( are not ) ( exercises ( in ( bi-unique substitutability ) ) ) ) ) ) ) ; ) ( in ( ( ( other words ) , ) ( if ( ( one ( of ( ( the senses ) ( of run ) ) ) ) ( ( is ` ) ( ( ( ( operate ' ) -LRB- ) ( as ( in ( She ( runs ( an ( engine factory ) ) ) ) ) ) ) -RRB- ) ) ) ) ) ) ) , ) ( that ( ( does not ) ( ( make it ) ( ( valid ( to ( assume ( that ( one ( can ( substitute ( ( operate ( for run ) ) ( in ( We ( ( run ( in ( ( the marathon ) ( every year ) ) ) ) . ) ) ) ) ) ) ) ) ) ) ) ( Although ( ( ( ( recognizing this ) ( as ( ( a shortcoming ) ( of dictionaries ) ) ) ) and ) ( ( ( assigning it ) arbitrarily ) ( to ( what ( , ( ( for ( lack ( of ( a ( better term ) ) ) ) ) ( , ( we ( might ( call ( ( the genius ) ( of ( the language ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) , ) ( might ( seem ( trivial ( to ( the ( casual observer ) ) ) ) ) ) ) ) ( , ( it ( is ( ( a ( valid matter ) ) ( for ( concern ( in ( ( the realm ) ( of lexicology ) ) ) ) ) ) ) ) ) ) ) . ) ( Dictionaries ( ( ( are indeed ) ( exercises ( in ( bi-unique substitutability ) ) ) ) . ) ) (ROOT (S (S (NP (DT The) (NN answer)) (VP (VBZ has) (ADVP (NN nothing)) (S (VP (TO to) (VP (VB do) (PP (IN with) (NP (PRP$ their) (NN cause)))))) (, ,) (ADVP (RB however)))) (, ,) (CC but) (S (SBAR (IN with) (S (NP (NP (DT the) (JJ simple) (NN fact)) (SBAR (IN that) (S (NP (NNS dictionaries)) (VP (VBP are) (RB not) (NP (NP (NNS exercises)) (PP (IN in) (NP (JJ bi-unique) (NN substitutability))))))) (: ;) (PP (IN in) (NP (NP (JJ other) (NNS words)) (, ,) (SBAR (IN if) (S (NP (NP (CD one)) (PP (IN of) (NP (NP (DT the) (NNS senses)) (PP (IN of) (NP (NN run)))))) (VP (VBZ is) (`` `) (VP (VB operate) ('' ') (-LRB- -LRB-) (SBAR (RB as) (IN in) (S (NP (PRP She)) (VP (VBZ runs) (NP (DT an) (NN engine) (NN factory))))) (-RRB- -RRB-))))))) (, ,) (SBAR (WHNP (WDT that)) (S (VP (VBZ does) (RB not) (VP (VB make) (NP (PRP it)) (S (ADJP (JJ valid) (S (VP (TO to) (VP (VB assume) (SBAR (IN that) (S (NP (PRP one)) (VP (MD can) (VP (VB substitute) (VP (VB operate) (PP (IN for) (NP (NN run))) (SBAR (IN in) (S (NP (PRP We)) (VP (VB run) (PP (IN in) (NP (NP (DT the) (NN marathon)) (NP (DT every) (NN year)))) (. .))))))))))))) (SBAR (IN Although) (S (S (VP (VBG recognizing) (NP (DT this)) (PP (IN as) (NP (NP (DT a) (NN shortcoming)) (PP (IN of) (NP (NNS dictionaries))))))) (CC and) (S (VP (VBG assigning) (NP (PRP it)) (ADVP (RB arbitrarily)) (PP (TO to) (SBAR (WHNP (WP what)) (S (, ,) (PP (IN for) (NP (NP (NN lack)) (PP (IN of) (NP (DT a) (JJR better) (NN term))))) (, ,) (NP (PRP we)) (VP (MD might) (VP (VB call) (NP (NP (DT the) (NN genius)) (PP (IN of) (NP (DT the) (NN language)))))))))))))))))) (, ,)) (VP (MD might) (VP (VB seem) (ADJP (JJ trivial) (PP (TO to) (NP (DT the) (JJ casual) (NN observer)))))))) (, ,) (NP (PRP it)) (VP (VBZ is) (NP (NP (DT a) (JJ valid) (NN matter)) (PP (IN for) (NP (NP (NN concern)) (PP (IN in) (NP (NP (DT the) (NN realm)) (PP (IN of) (NP (NN lexicology)))))))))) (. .))) (ROOT (S (NP (NNS Dictionaries)) (VP (VBP are) (ADVP (RB indeed)) (NP (NP (NNS exercises)) (PP (IN in) (NP (JJ bi-unique) (NN substitutability))))) (. .))) The answer has nothing to do with their cause, however, but with the simple fact that dictionaries are not exercises in bi-unique substitutability; in other words, if one of the senses of run is `operate' (as in She runs an engine factory ), that does not make it valid to assume that one can substitute operate for run in We run in the marathon every year . Although recognizing this as a shortcoming of dictionaries and assigning it arbitrarily to what, for lack of a better term, we might call the genius of the language, might seem trivial to the casual observer, it is a valid matter for concern in the realm of lexicology. Dictionaries are indeed exercises in bi-unique substitutability. contradiction contradiction contradiction contradiction contradiction contradiction -2 3628 3628c verbatim ( We ( ( serve ( ( a ( classic ( Tuscan meal ) ) ) ( that ( includes ( ( a ( Florentine terrine ) ) ( made ( with ( dick ( and ( chicken livers ) ) ) ) ) ) ) ) ) ) . ) ) ( We ( ( serve ( ( a meal ) ( of ( Florentine terrine ) ) ) ) . ) ) (ROOT (S (NP (PRP We)) (VP (VBP serve) (NP (NP (DT a) (JJ classic) (NNP Tuscan) (NN meal)) (SBAR (WHNP (WDT that)) (S (VP (VBZ includes) (NP (NP (DT a) (JJ Florentine) (NN terrine)) (VP (VBN made) (PP (IN with) (NP (NN dick) (CC and) (NN chicken) (NNS livers)))))))))) (. .))) (ROOT (S (NP (PRP We)) (VP (VBP serve) (NP (NP (DT a) (NN meal)) (PP (IN of) (NP (NNP Florentine) (NN terrine))))) (. .))) We serve a classic Tuscan meal that includes a Florentine terrine made with dick and chicken livers. We serve a meal of Florentine terrine. contradiction neutral entailment entailment entailment entailment -3 89411 89411c letters ( ( ( A ( few months ) ) ago ) ( , ( ( ( ( Carl Newton ) and ) I ) ( ( ( wrote ( a letter ) ) ( asking ( you ( to ( ( consider ( a ( financial contribution ) ) ) ( to ( ( graduate Endodontics ) ( at ( Indiana University ) ) ) ) ) ) ) ) ) . ) ) ) ) ( ( ( ( Carl Newton ) and ) I ) ( ( ( have never ) ( ( had ( any ( other ( previous contact ) ) ) ) ( with you ) ) ) . ) ) (ROOT (S (ADVP (NP (DT A) (JJ few) (NNS months)) (RB ago)) (, ,) (NP (NP (NNP Carl) (NNP Newton)) (CC and) (NP (PRP I))) (VP (VBD wrote) (NP (DT a) (NN letter)) (S (VP (VBG asking) (S (NP (PRP you)) (VP (TO to) (VP (VB consider) (NP (DT a) (JJ financial) (NN contribution)) (PP (TO to) (NP (NP (JJ graduate) (NNS Endodontics)) (PP (IN at) (NP (NNP Indiana) (NNP University))))))))))) (. .))) (ROOT (S (NP (NP (NNP Carl) (NNP Newton)) (CC and) (NP (PRP I))) (VP (VBP have) (ADVP (RB never)) (VP (VBN had) (NP (DT any) (JJ other) (JJ previous) (NN contact)) (PP (IN with) (NP (PRP you))))) (. .))) A few months ago, Carl Newton and I wrote a letter asking you to consider a financial contribution to graduate Endodontics at Indiana University. Carl Newton and I have never had any other previous contact with you. contradiction contradiction contradiction contradiction contradiction contradiction -4 136158 136158e facetoface ( I ( ( was ( on ( ( this earth ) ( you ( know ( ( , ( ( I ( 've ( lived ( on ( ( this earth ) ( for ( some reason ) ) ) ) ) ) ) , ) ) ( I ( just ( ( do n't ) ( know ( what ( it ( is yet ) ) ) ) ) ) ) ) ) ) ) ) ) . ) ) ( I ( ( ( ( do n't ) yet ) ( ( know ( the reason ) ) ( why ( I ( have ( lived ( on earth ) ) ) ) ) ) ) . ) ) (ROOT (S (NP (PRP I)) (VP (VBD was) (PP (IN on) (NP (NP (DT this) (NN earth)) (SBAR (S (NP (PRP you)) (VP (VBP know) (SBAR (S (PRN (, ,) (S (NP (PRP I)) (VP (VBP 've) (VP (VBN lived) (PP (IN on) (NP (NP (DT this) (NN earth)) (PP (IN for) (NP (DT some) (NN reason)))))))) (, ,)) (NP (PRP I)) (ADVP (RB just)) (VP (VBP do) (RB n't) (VP (VB know) (SBAR (WHNP (WP what)) (S (NP (PRP it)) (VP (VBZ is) (ADVP (RB yet))))))))))))))) (. .))) (ROOT (S (NP (PRP I)) (VP (VBP do) (RB n't) (ADVP (RB yet)) (VP (VB know) (NP (DT the) (NN reason)) (SBAR (WHADVP (WRB why)) (S (NP (PRP I)) (VP (VBP have) (VP (VBN lived) (PP (IN on) (NP (NN earth))))))))) (. .))) I was on this earth you know, I've lived on this earth for some reason, I just don't know what it is yet. I don't yet know the reason why I have lived on earth. entailment entailment entailment entailment entailment entailment diff --git a/tests/data_for_tests/io/MNLI/test_matched.tsv b/tests/data_for_tests/io/MNLI/test_matched.tsv deleted file mode 100755 index b90c2d2a..00000000 --- a/tests/data_for_tests/io/MNLI/test_matched.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 -0 31493 31493 travel ( ( ( ( ( ( ( ( Hierbas , ) ( ans seco ) ) , ) ( ans dulce ) ) , ) and ) frigola ) ( ( ( are just ) ( ( a ( few names ) ) ( worth ( ( keeping ( a look-out ) ) for ) ) ) ) . ) ) ( Hierbas ( ( is ( ( a name ) ( worth ( ( looking out ) for ) ) ) ) . ) ) (ROOT (S (NP (NP (NNS Hierbas)) (, ,) (NP (NN ans) (NN seco)) (, ,) (NP (NN ans) (NN dulce)) (, ,) (CC and) (NP (NN frigola))) (VP (VBP are) (ADVP (RB just)) (NP (NP (DT a) (JJ few) (NNS names)) (PP (JJ worth) (S (VP (VBG keeping) (NP (DT a) (NN look-out)) (PP (IN for))))))) (. .))) (ROOT (S (NP (NNS Hierbas)) (VP (VBZ is) (NP (NP (DT a) (NN name)) (PP (JJ worth) (S (VP (VBG looking) (PRT (RP out)) (PP (IN for))))))) (. .))) Hierbas, ans seco, ans dulce, and frigola are just a few names worth keeping a look-out for. Hierbas is a name worth looking out for. -1 92164 92164 government ( ( ( The extent ) ( of ( the ( behavioral effects ) ) ) ) ( ( would ( ( depend ( in ( part ( on ( ( the structure ) ( of ( ( ( the ( individual ( account program ) ) ) and ) ( any limits ) ) ) ) ) ) ) ) ( on ( accessing ( the funds ) ) ) ) ) . ) ) ( ( Many people ) ( ( would ( be ( very ( unhappy ( to ( ( loose control ) ( over ( their ( own money ) ) ) ) ) ) ) ) ) . ) ) (ROOT (S (NP (NP (DT The) (NN extent)) (PP (IN of) (NP (DT the) (JJ behavioral) (NNS effects)))) (VP (MD would) (VP (VB depend) (PP (IN in) (NP (NP (NN part)) (PP (IN on) (NP (NP (DT the) (NN structure)) (PP (IN of) (NP (NP (DT the) (JJ individual) (NN account) (NN program)) (CC and) (NP (DT any) (NNS limits)))))))) (PP (IN on) (S (VP (VBG accessing) (NP (DT the) (NNS funds))))))) (. .))) (ROOT (S (NP (JJ Many) (NNS people)) (VP (MD would) (VP (VB be) (ADJP (RB very) (JJ unhappy) (PP (TO to) (NP (NP (JJ loose) (NN control)) (PP (IN over) (NP (PRP$ their) (JJ own) (NN money)))))))) (. .))) The extent of the behavioral effects would depend in part on the structure of the individual account program and any limits on accessing the funds. Many people would be very unhappy to loose control over their own money. -2 9662 9662 government ( ( ( Timely access ) ( to information ) ) ( ( is ( in ( ( the ( best interests ) ) ( of ( ( ( both GAO ) and ) ( the agencies ) ) ) ) ) ) . ) ) ( It ( ( ( is ( in ( ( everyone 's ) ( best interest ) ) ) ) ( to ( ( have access ) ( to ( information ( in ( a ( timely manner ) ) ) ) ) ) ) ) . ) ) (ROOT (S (NP (NP (JJ Timely) (NN access)) (PP (TO to) (NP (NN information)))) (VP (VBZ is) (PP (IN in) (NP (NP (DT the) (JJS best) (NNS interests)) (PP (IN of) (NP (NP (DT both) (NNP GAO)) (CC and) (NP (DT the) (NNS agencies))))))) (. .))) (ROOT (S (NP (PRP It)) (VP (VBZ is) (PP (IN in) (NP (NP (NN everyone) (POS 's)) (JJS best) (NN interest))) (S (VP (TO to) (VP (VB have) (NP (NN access)) (PP (TO to) (NP (NP (NN information)) (PP (IN in) (NP (DT a) (JJ timely) (NN manner))))))))) (. .))) Timely access to information is in the best interests of both GAO and the agencies. It is in everyone's best interest to have access to information in a timely manner. -3 5991 5991 travel ( ( Based ( in ( ( the ( Auvergnat ( spa town ) ) ) ( of Vichy ) ) ) ) ( , ( ( the ( French government ) ) ( often ( ( ( ( proved ( more zealous ) ) ( than ( its masters ) ) ) ( in ( ( ( suppressing ( civil liberties ) ) and ) ( ( drawing up ) ( anti-Jewish legislation ) ) ) ) ) . ) ) ) ) ) ( ( The ( French government ) ) ( ( passed ( ( anti-Jewish laws ) ( aimed ( at ( helping ( the Nazi ) ) ) ) ) ) . ) ) (ROOT (S (PP (VBN Based) (PP (IN in) (NP (NP (DT the) (NNP Auvergnat) (NN spa) (NN town)) (PP (IN of) (NP (NNP Vichy)))))) (, ,) (NP (DT the) (JJ French) (NN government)) (ADVP (RB often)) (VP (VBD proved) (NP (JJR more) (NNS zealous)) (PP (IN than) (NP (PRP$ its) (NNS masters))) (PP (IN in) (S (VP (VP (VBG suppressing) (NP (JJ civil) (NNS liberties))) (CC and) (VP (VBG drawing) (PRT (RP up)) (NP (JJ anti-Jewish) (NN legislation))))))) (. .))) (ROOT (S (NP (DT The) (JJ French) (NN government)) (VP (VBD passed) (NP (NP (JJ anti-Jewish) (NNS laws)) (VP (VBN aimed) (PP (IN at) (S (VP (VBG helping) (NP (DT the) (JJ Nazi)))))))) (. .))) Based in the Auvergnat spa town of Vichy, the French government often proved more zealous than its masters in suppressing civil liberties and drawing up anti-Jewish legislation. The French government passed anti-Jewish laws aimed at helping the Nazi. -4 50156 50156 travel ( ( ( ( ( Built ( in 1870 ) ) ( , ( ( ( its canopy ) ( of ( stained ( glass ( and ( cast iron ) ) ) ) ) ) ( is ( ( the oldest ) ( in Dublin ) ) ) ) ) ) ; ) ( ( its ( enthusiastic ( interior decoration ) ) ) ( ( is also ) ( typical ( of ( the era ) ) ) ) ) ) . ) ( It ( ( ( ( was ( constructed ( in 1870 ) ) ) and ) ( has ( ( the ( oldest canopy ) ) ( in Dublin ) ) ) ) . ) ) (ROOT (S (S (S (VP (VBN Built) (PP (IN in) (NP (CD 1870))))) (, ,) (NP (NP (PRP$ its) (NN canopy)) (PP (IN of) (NP (JJ stained) (NN glass) (CC and) (NN cast) (NN iron)))) (VP (VBZ is) (NP (NP (DT the) (JJS oldest)) (PP (IN in) (NP (NNP Dublin)))))) (: ;) (S (NP (PRP$ its) (JJ enthusiastic) (JJ interior) (NN decoration)) (VP (VBZ is) (ADVP (RB also)) (ADJP (JJ typical) (PP (IN of) (NP (DT the) (NN era)))))) (. .))) (ROOT (S (NP (PRP It)) (VP (VP (VBD was) (VP (VBN constructed) (PP (IN in) (NP (CD 1870))))) (CC and) (VP (VBZ has) (NP (NP (DT the) (JJS oldest) (NN canopy)) (PP (IN in) (NP (NNP Dublin)))))) (. .))) Built in 1870, its canopy of stained glass and cast iron is the oldest in Dublin; its enthusiastic interior decoration is also typical of the era. It was constructed in 1870 and has the oldest canopy in Dublin. diff --git a/tests/data_for_tests/io/MNLI/test_mismatched.tsv b/tests/data_for_tests/io/MNLI/test_mismatched.tsv deleted file mode 100755 index 798cd395..00000000 --- a/tests/data_for_tests/io/MNLI/test_mismatched.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 -0 16130 16130 facetoface ( ( What ( have ( you decided ) ) ) ( , ( what ( ( ( are you ) ( going ( to do ) ) ) ? ) ) ) ) ( So ( what ( ( 's ( your decision ) ) ? ) ) ) (ROOT (SBARQ (SBAR (WHNP (WP What)) (S (VP (VBP have) (S (NP (PRP you)) (VP (VBD decided)))))) (, ,) (WHNP (WP what)) (SQ (VBP are) (NP (PRP you)) (VP (VBG going) (S (VP (TO to) (VP (VB do)))))) (. ?))) (ROOT (SBARQ (RB So) (WHNP (WP what)) (SQ (VBZ 's) (NP (PRP$ your) (NN decision))) (. ?))) What have you decided, what are you going to do? So what's your decision? -1 128269 128269 oup ( ( ( Women 's ) clothing ) ( ( is ( characterized ( by ( ( great diversity ) ( in ( ( styles and ) ( short ( production runs ) ) ) ) ) ) ) ) . ) ) ( ( ( Men 's ) clothing ) ( typically ( ( ( has ( the ( ( most stylistic ) diversity ) ) ) ( unlike ( ( the blandness ) ( of ( ( women 's ) fashion ) ) ) ) ) . ) ) ) (ROOT (S (NP (NP (NNP Women) (POS 's)) (NN clothing)) (VP (VBZ is) (VP (VBN characterized) (PP (IN by) (NP (NP (JJ great) (NN diversity)) (PP (IN in) (NP (NP (NNS styles)) (CC and) (NP (JJ short) (NN production) (NNS runs)))))))) (. .))) (ROOT (S (NP (NP (NNP Men) (POS 's)) (NN clothing)) (ADVP (RB typically)) (VP (VBZ has) (NP (DT the) (ADJP (RBS most) (JJ stylistic)) (NN diversity)) (PP (IN unlike) (NP (NP (DT the) (NN blandness)) (PP (IN of) (NP (NP (NNS women) (POS 's)) (NN fashion)))))) (. .))) Women's clothing is characterized by great diversity in styles and short production runs. Men's clothing typically has the most stylistic diversity unlike the blandness of women's fashion. -2 130938 130938 nineeleven ( ( ( ( ( Reports ( from ( ( two ( flight attendants ) ) ( in ( the ( coach cabin ) ) ) ) ) ) , ) ( ( ( Betty Ong ) and ) ( Madeline ( Amy Sweeney ) ) ) ) , ) ( ( ( tell us ) ( ( most ( of what ) ) ( we ( know ( about ( how ( ( the hijacking ) happened ) ) ) ) ) ) ) . ) ) ( ( ( The report ) ( on ( the hijacking ) ) ) ( ( ( was ( ( over ( five hundred ) ) pages ) ) long ) . ) ) (ROOT (S (NP (NP (NP (NNS Reports)) (PP (IN from) (NP (NP (CD two) (NN flight) (NNS attendants)) (PP (IN in) (NP (DT the) (NN coach) (NN cabin)))))) (, ,) (NP (NP (NNP Betty) (NNP Ong)) (CC and) (NP (NNP Madeline) (NNP Amy) (NNP Sweeney))) (, ,)) (VP (VBP tell) (NP (PRP us)) (SBAR (WHNP (JJS most) (WHPP (IN of) (WHNP (WP what)))) (S (NP (PRP we)) (VP (VBP know) (PP (IN about) (SBAR (WHADVP (WRB how)) (S (NP (DT the) (NN hijacking)) (VP (VBD happened))))))))) (. .))) (ROOT (S (NP (NP (DT The) (NN report)) (PP (IN on) (NP (DT the) (NN hijacking)))) (VP (VBD was) (NP (QP (RB over) (CD five) (CD hundred)) (NNS pages)) (ADVP (RB long))) (. .))) Reports from two flight attendants in the coach cabin, Betty Ong and Madeline Amy Sweeney, tell us most of what we know about how the hijacking happened. The report on the hijacking was over five hundred pages long. -3 40009 40009 nineeleven ( ( At ( about 9:20 ) ) ( , ( ( ( security personnel ) ( at ( FAA headquarters ) ) ) ( ( ( ( set up ) ( a ( hijacking teleconference ) ) ) ( with ( ( ( several agencies ) , ) ( including ( the ( Defense Department ) ) ) ) ) ) . ) ) ) ) ( ( The teleconference ) ( ( lasted ( for ( 13 ( straight hours ) ) ) ) . ) ) (ROOT (S (PP (IN At) (NP (QP (RB about) (CD 9:20)))) (, ,) (NP (NP (NN security) (NNS personnel)) (PP (IN at) (NP (NNP FAA) (NNS headquarters)))) (VP (VBD set) (PRT (RP up)) (NP (DT a) (VBG hijacking) (NN teleconference)) (PP (IN with) (NP (NP (JJ several) (NNS agencies)) (, ,) (PP (VBG including) (NP (DT the) (NNP Defense) (NNP Department)))))) (. .))) (ROOT (S (NP (DT The) (NN teleconference)) (VP (VBD lasted) (PP (IN for) (NP (CD 13) (JJ straight) (NNS hours)))) (. .))) At about 9:20, security personnel at FAA headquarters set up a hijacking teleconference with several agencies, including the Defense Department. The teleconference lasted for 13 straight hours. -4 105266 105266 nineeleven ( So ( we ( ( 've ( ( got ( ( a couple ) ( of aircraft ) ) ) ( ( up there ) ( that ( ( have ( those instructions ) ) ( at ( this ( present time ) ) ) ) ) ) ) ) ? ) ) ) ( ( At ( the ( present time ) ) ) ( , ( there ( ( ( ( ( were n't ) ( ( any aircraft ) ( in ( the air ) ) ) ) , ) right ) ? ) ) ) ) (ROOT (S (IN So) (NP (PRP we)) (VP (VBP 've) (VP (VBD got) (NP (NP (DT a) (NN couple)) (PP (IN of) (NP (NN aircraft)))) (ADVP (ADVP (RB up) (RB there)) (SBAR (WHNP (WDT that)) (S (VP (VBP have) (NP (DT those) (NNS instructions)) (PP (IN at) (NP (DT this) (JJ present) (NN time))))))))) (. ?))) (ROOT (S (PP (IN At) (NP (DT the) (JJ present) (NN time))) (, ,) (NP (EX there)) (VP (VBD were) (RB n't) (NP (NP (DT any) (NN aircraft)) (PP (IN in) (NP (DT the) (NN air)))) (, ,) (ADJP (JJ right))) (. ?))) So we've got a couple of aircraft up there that have those instructions at this present time? At the present time, there weren't any aircraft in the air, right? diff --git a/tests/data_for_tests/io/MNLI/train.tsv b/tests/data_for_tests/io/MNLI/train.tsv deleted file mode 100755 index 4ceebefd..00000000 --- a/tests/data_for_tests/io/MNLI/train.tsv +++ /dev/null @@ -1,7 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 label1 gold_label -0 31193 31193n government ( ( Conceptually ( cream skimming ) ) ( ( has ( ( ( two ( basic dimensions ) ) - ) ( ( product and ) geography ) ) ) . ) ) ( ( ( Product and ) geography ) ( ( are ( what ( make ( cream ( skimming work ) ) ) ) ) . ) ) (ROOT (S (NP (JJ Conceptually) (NN cream) (NN skimming)) (VP (VBZ has) (NP (NP (CD two) (JJ basic) (NNS dimensions)) (: -) (NP (NN product) (CC and) (NN geography)))) (. .))) (ROOT (S (NP (NN Product) (CC and) (NN geography)) (VP (VBP are) (SBAR (WHNP (WP what)) (S (VP (VBP make) (NP (NP (NN cream)) (VP (VBG skimming) (NP (NN work)))))))) (. .))) Conceptually cream skimming has two basic dimensions - product and geography. Product and geography are what make cream skimming work. neutral neutral -1 101457 101457e telephone ( you ( ( know ( during ( ( ( the season ) and ) ( i guess ) ) ) ) ( at ( at ( ( your level ) ( uh ( you ( ( ( lose them ) ( to ( the ( next level ) ) ) ) ( if ( ( if ( they ( decide ( to ( recall ( the ( the ( parent team ) ) ) ) ) ) ) ) ( ( the Braves ) ( decide ( to ( call ( to ( ( recall ( a guy ) ) ( from ( ( triple A ) ( ( ( then ( ( a ( double ( A guy ) ) ) ( ( goes up ) ( to ( replace him ) ) ) ) ) and ) ( ( a ( single ( A guy ) ) ) ( ( goes up ) ( to ( replace him ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ( You ( ( ( ( lose ( the things ) ) ( to ( the ( following level ) ) ) ) ( if ( ( the people ) recall ) ) ) . ) ) (ROOT (S (NP (PRP you)) (VP (VBP know) (PP (IN during) (NP (NP (DT the) (NN season)) (CC and) (NP (FW i) (FW guess)))) (PP (IN at) (IN at) (NP (NP (PRP$ your) (NN level)) (SBAR (S (INTJ (UH uh)) (NP (PRP you)) (VP (VBP lose) (NP (PRP them)) (PP (TO to) (NP (DT the) (JJ next) (NN level))) (SBAR (IN if) (S (SBAR (IN if) (S (NP (PRP they)) (VP (VBP decide) (S (VP (TO to) (VP (VB recall) (NP (DT the) (DT the) (NN parent) (NN team)))))))) (NP (DT the) (NNPS Braves)) (VP (VBP decide) (S (VP (TO to) (VP (VB call) (S (VP (TO to) (VP (VB recall) (NP (DT a) (NN guy)) (PP (IN from) (NP (NP (RB triple) (DT A)) (SBAR (S (S (ADVP (RB then)) (NP (DT a) (JJ double) (NNP A) (NN guy)) (VP (VBZ goes) (PRT (RP up)) (S (VP (TO to) (VP (VB replace) (NP (PRP him))))))) (CC and) (S (NP (DT a) (JJ single) (NNP A) (NN guy)) (VP (VBZ goes) (PRT (RP up)) (S (VP (TO to) (VP (VB replace) (NP (PRP him)))))))))))))))))))))))))))) (ROOT (S (NP (PRP You)) (VP (VBP lose) (NP (DT the) (NNS things)) (PP (TO to) (NP (DT the) (JJ following) (NN level))) (SBAR (IN if) (S (NP (DT the) (NNS people)) (VP (VBP recall))))) (. .))) you know during the season and i guess at at your level uh you lose them to the next level if if they decide to recall the the parent team the Braves decide to call to recall a guy from triple A then a double A guy goes up to replace him and a single A guy goes up to replace him You lose the things to the following level if the people recall. entailment entailment -2 134793 134793e fiction ( ( One ( of ( our number ) ) ) ( ( will ( ( ( carry out ) ( your instructions ) ) minutely ) ) . ) ) ( ( ( A member ) ( of ( my team ) ) ) ( ( will ( ( execute ( your orders ) ) ( with ( immense precision ) ) ) ) . ) ) (ROOT (S (NP (NP (CD One)) (PP (IN of) (NP (PRP$ our) (NN number)))) (VP (MD will) (VP (VB carry) (PRT (RP out)) (NP (PRP$ your) (NNS instructions)) (ADVP (RB minutely)))) (. .))) (ROOT (S (NP (NP (DT A) (NN member)) (PP (IN of) (NP (PRP$ my) (NN team)))) (VP (MD will) (VP (VB execute) (NP (PRP$ your) (NNS orders)) (PP (IN with) (NP (JJ immense) (NN precision))))) (. .))) One of our number will carry out your instructions minutely. A member of my team will execute your orders with immense precision. entailment entailment -3 37397 37397e fiction ( ( How ( ( ( do you ) know ) ? ) ) ( ( All this ) ( ( ( is ( their information ) ) again ) . ) ) ) ( ( This information ) ( ( belongs ( to them ) ) . ) ) (ROOT (S (SBARQ (WHADVP (WRB How)) (SQ (VBP do) (NP (PRP you)) (VP (VB know))) (. ?)) (NP (PDT All) (DT this)) (VP (VBZ is) (NP (PRP$ their) (NN information)) (ADVP (RB again))) (. .))) (ROOT (S (NP (DT This) (NN information)) (VP (VBZ belongs) (PP (TO to) (NP (PRP them)))) (. .))) How do you know? All this is their information again. This information belongs to them. entailment entailment -4 50563 50563n telephone ( yeah ( i ( ( tell you ) ( what ( ( though ( if ( you ( go ( price ( some ( of ( those ( tennis shoes ) ) ) ) ) ) ) ) ) ( i ( can ( see ( why ( now ( you ( know ( they ( 're ( ( getting up ) ( in ( the ( hundred ( dollar range ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ( ( The ( tennis shoes ) ) ( ( have ( ( a range ) ( of prices ) ) ) . ) ) (ROOT (S (VP (VB yeah) (S (NP (FW i)) (VP (VB tell) (NP (PRP you)) (SBAR (WHNP (WP what)) (S (SBAR (RB though) (IN if) (S (NP (PRP you)) (VP (VBP go) (VP (VB price) (NP (NP (DT some)) (PP (IN of) (NP (DT those) (NN tennis) (NNS shoes)))))))) (NP (FW i)) (VP (MD can) (VP (VB see) (SBAR (WHADVP (WRB why)) (S (ADVP (RB now)) (NP (PRP you)) (VP (VBP know) (SBAR (S (NP (PRP they)) (VP (VBP 're) (VP (VBG getting) (PRT (RP up)) (PP (IN in) (NP (DT the) (CD hundred) (NN dollar) (NN range))))))))))))))))))) (ROOT (S (NP (DT The) (NN tennis) (NNS shoes)) (VP (VBP have) (NP (NP (DT a) (NN range)) (PP (IN of) (NP (NNS prices))))) (. .))) yeah i tell you what though if you go price some of those tennis shoes i can see why now you know they're getting up in the hundred dollar range The tennis shoes have a range of prices. neutral neutral -11 11877 11877c travel ( ( Fun ( for ( ( adults and ) children ) ) ) . ) ( ( Fun ( for ( only children ) ) ) . ) (ROOT (S (VP (VB Fun) (PP (IN for) (NP (NNS adults) (CC and) (NNS children)))) (. .))) (ROOT (S (VP (VB Fun) (PP (IN for) (NP (JJ only) (NNS children)))) (. .))) Fun for adults and children. Fun for only children. contradiction contradiction diff --git a/tests/data_for_tests/io/MSRA_NER/dev.conll b/tests/data_for_tests/io/MSRA_NER/dev.conll deleted file mode 100755 index 792efce8..00000000 --- a/tests/data_for_tests/io/MSRA_NER/dev.conll +++ /dev/null @@ -1,38 +0,0 @@ -把 O -欧 B-LOC - -美 B-LOC -、 O - -港 B-LOC -台 B-LOC - -流 O -行 O - -的 O -食 O - -品 O -类 O - -图 O -谱 O - -马 B-PER -列 B-PER - -主 O -义 O - -在 O -中 B-LOC - -国 I-LOC -传 O - -播 O -的 O - -历 O -史 O \ No newline at end of file diff --git a/tests/data_for_tests/io/MSRA_NER/test.conll b/tests/data_for_tests/io/MSRA_NER/test.conll deleted file mode 100755 index d611fcdd..00000000 --- a/tests/data_for_tests/io/MSRA_NER/test.conll +++ /dev/null @@ -1,31 +0,0 @@ -中 B-ORG -共 I-ORG - -中 I-ORG -央 I-ORG - -致 O -中 B-ORG - -国 I-ORG -致 I-ORG - -公 I-ORG -党 I-ORG - -十 I-ORG -一 I-ORG - -大 I-ORG -的 O - -贺 O -词 O - - -各 O - -位 O -代 O - -表 O diff --git a/tests/data_for_tests/io/MSRA_NER/train.conll b/tests/data_for_tests/io/MSRA_NER/train.conll deleted file mode 100755 index 9edd3aef..00000000 --- a/tests/data_for_tests/io/MSRA_NER/train.conll +++ /dev/null @@ -1,60 +0,0 @@ -是 O -我 O - -们 O -收 O - -藏 O -北 B-LOC - -京 I-LOC -史 O - -料 O - -调 O -查 O - -范 O -围 O - -涉 O -及 O - -故 B-LOC -宫 I-LOC - -、 O -历 B-LOC - -博 I-LOC -、 O - -古 B-ORG -研 I-ORG - -所 I-ORG -、 O - -北 B-LOC -大 I-LOC - -清 I-LOC -华 I-LOC - -图 I-LOC -书 I-LOC - -馆 I-LOC -. O - -夏 B-PER -财 I-PER - -兴 I-PER -家 O - -分 O -到 O - -田 O diff --git a/tests/data_for_tests/io/OntoNotes/dev.txt b/tests/data_for_tests/io/OntoNotes/dev.txt deleted file mode 100644 index e99207a1..00000000 --- a/tests/data_for_tests/io/OntoNotes/dev.txt +++ /dev/null @@ -1,10 +0,0 @@ - -bc/msnbc/00/msnbc_0000 0 0 Hi UH (TOP(FRAG(INTJ*) - - - Dan_Abrams * - -bc/msnbc/00/msnbc_0000 0 1 everyone NN (NP*) - - - Dan_Abrams * - -bc/msnbc/00/msnbc_0000 0 2 /. . *)) - - - Dan_Abrams * - - -bc/msnbc/00/msnbc_0000 0 0 first RB (TOP(S(ADVP* - - - Dan_Abrams * (ARGM-TMP* * * * - -bc/msnbc/00/msnbc_0000 0 1 up RB * - - - Dan_Abrams * * * * * - -bc/msnbc/00/msnbc_0000 0 2 on IN (PP* - - - Dan_Abrams * * * * * - -bc/msnbc/00/msnbc_0000 0 3 the DT (NP* - - - Dan_Abrams * * * * * - -bc/msnbc/00/msnbc_0000 0 4 docket NN *)) docket - - Dan_Abrams * * * * * - diff --git a/tests/data_for_tests/io/OntoNotes/test.txt b/tests/data_for_tests/io/OntoNotes/test.txt deleted file mode 100644 index c94069e0..00000000 --- a/tests/data_for_tests/io/OntoNotes/test.txt +++ /dev/null @@ -1,10 +0,0 @@ - -bc/msnbc/00/msnbc_0007 0 0 Dealing VBG (TOP(VP* deal 01 - speaker_1 * (V*) - -bc/msnbc/00/msnbc_0007 0 1 with IN (PP* - - - speaker_1 * (ARG1* - -bc/msnbc/00/msnbc_0007 0 2 serial JJ (NP(NP* - - - speaker_1 * * (156 -bc/msnbc/00/msnbc_0007 0 3 crimes NNS *) crime - 1 speaker_1 * * 156) -bc/msnbc/00/msnbc_0007 0 4 per FW (ADVP* - - - speaker_1 * * - -bc/msnbc/00/msnbc_0007 0 5 se FW *))) - - - speaker_1 * *) - -bc/msnbc/00/msnbc_0007 0 6 /. . *)) - - - speaker_1 * * - - -bc/msnbc/00/msnbc_0007 0 0 We PRP (TOP(S(NP*) - - - speaker_1 * (ARG0*) * (90) diff --git a/tests/data_for_tests/io/OntoNotes/train.txt b/tests/data_for_tests/io/OntoNotes/train.txt deleted file mode 100644 index 36f14c73..00000000 --- a/tests/data_for_tests/io/OntoNotes/train.txt +++ /dev/null @@ -1,50 +0,0 @@ - -bc/msnbc/00/msnbc_0003 0 0 The DT (TOP(S(NP* - - - Chris_Matthews * * (ARG1* * * * * - -bc/msnbc/00/msnbc_0003 0 1 move NN *) move 02 2 Chris_Matthews * (V*) *) * * * * - -bc/msnbc/00/msnbc_0003 0 2 comes VBZ (VP* come 03 2 Chris_Matthews * * (V*) * * * * - -bc/msnbc/00/msnbc_0003 0 3 a DT (SBAR(NP* - - - Chris_Matthews (DATE* * (ARGM-TMP* * * * * - -bc/msnbc/00/msnbc_0003 0 4 month NN *) month - 2 Chris_Matthews *) * * * * * * - -bc/msnbc/00/msnbc_0003 0 5 before IN * - - - Chris_Matthews * * * * * * * - -bc/msnbc/00/msnbc_0003 0 6 the DT (S(NP* - - - Chris_Matthews * * * * (ARG1* (ARG0* * - -bc/msnbc/00/msnbc_0003 0 7 Senate NNP *) - - - Chris_Matthews (ORG) * * * *) *) * - -bc/msnbc/00/msnbc_0003 0 8 is VBZ (VP* be 03 - Chris_Matthews * * * (V*) * * * - -bc/msnbc/00/msnbc_0003 0 9 scheduled VBN (VP* schedule 01 - Chris_Matthews * * * * (V*) * * - -bc/msnbc/00/msnbc_0003 0 10 to TO (S(VP* - - - Chris_Matthews * * * * (ARG2* * * - -bc/msnbc/00/msnbc_0003 0 11 hold VB (VP* hold 04 8 Chris_Matthews * * * * * (V*) * - -bc/msnbc/00/msnbc_0003 0 12 confirmation NN (NP(NP* - - - Chris_Matthews * * * * * (ARG1* (ARG2*) - -bc/msnbc/00/msnbc_0003 0 13 hearings NNS *) hearing 01 1 Chris_Matthews * * * * * * (V*) - -bc/msnbc/00/msnbc_0003 0 14 on IN (PP* - - - Chris_Matthews * * * * * * (ARG1* - -bc/msnbc/00/msnbc_0003 0 15 President NNP (NP(NP(NP* - - - Chris_Matthews * * * * * * * (194 -bc/msnbc/00/msnbc_0003 0 16 Bush NNP * - - - Chris_Matthews (PERSON) * * * * * * - -bc/msnbc/00/msnbc_0003 0 17 's POS *) - - - Chris_Matthews * * * * * * * 194) -bc/msnbc/00/msnbc_0003 0 18 Supreme NNP (NML* - - - Chris_Matthews (ORG* * * * * * * - -bc/msnbc/00/msnbc_0003 0 19 Court NNP *) - - - Chris_Matthews *) * * * * * * - -bc/msnbc/00/msnbc_0003 0 20 nominee NN *) - - - Chris_Matthews * * * * * * * - -bc/msnbc/00/msnbc_0003 0 21 John NNP (NP* - - - Chris_Matthews (PERSON* * * * * * * - -bc/msnbc/00/msnbc_0003 0 22 Roberts NNP *)))))))))))) - - - Chris_Matthews *) * *) * *) *) *) - -bc/msnbc/00/msnbc_0003 0 23 /. . *)) - - - Chris_Matthews * * * * * * * - - -bc/msnbc/00/msnbc_0003 0 0 Senator NNP (TOP(S(NP(NP* - - - Chris_Matthews * (ARG1* * * (162 -bc/msnbc/00/msnbc_0003 0 1 Chris NNP * - - - Chris_Matthews (PERSON* * * * - -bc/msnbc/00/msnbc_0003 0 2 Dodd NNP *) - - - Chris_Matthews *) * * * - -bc/msnbc/00/msnbc_0003 0 3 of IN (PP* - - - Chris_Matthews * * * * - -bc/msnbc/00/msnbc_0003 0 4 Connecticut NNP (NP*))) - - - Chris_Matthews (GPE) *) * * 162) -bc/msnbc/00/msnbc_0003 0 5 was VBD (VP* be 01 1 Chris_Matthews * (V*) * * - -bc/msnbc/00/msnbc_0003 0 6 among IN (PP* - - - Chris_Matthews * (ARG2* * * - -bc/msnbc/00/msnbc_0003 0 7 those DT (NP(NP* - - - Chris_Matthews * * (ARG0* * - -bc/msnbc/00/msnbc_0003 0 8 Democrats NNPS *) - - - Chris_Matthews (NORP) * *) * - -bc/msnbc/00/msnbc_0003 0 9 who WP (SBAR(WHNP*) - - - Chris_Matthews * * (R-ARG0*) * - -bc/msnbc/00/msnbc_0003 0 10 spoke VBD (S(VP* speak 03 5 Chris_Matthews * * (V*) * - -bc/msnbc/00/msnbc_0003 0 11 out RP (PRT*) - - - Chris_Matthews * * * * - -bc/msnbc/00/msnbc_0003 0 12 against IN (PP* - - - Chris_Matthews * * (ARG1* * - -bc/msnbc/00/msnbc_0003 0 13 Bolton NNP (NP(NP* - - - Chris_Matthews (PERSON) * * (ARG1* (31|(130 -bc/msnbc/00/msnbc_0003 0 14 's POS *) - - - Chris_Matthews * * * *) 31) -bc/msnbc/00/msnbc_0003 0 15 appointment NN *)) appointment 01 1 Chris_Matthews * * *) (V*) 130) -bc/msnbc/00/msnbc_0003 0 16 today NN (NP*))))))) today - 2 Chris_Matthews (DATE) *) (ARGM-TMP*) * (121) -bc/msnbc/00/msnbc_0003 0 17 /. . *)) - - - Chris_Matthews * * * * - - -bc/msnbc/00/msnbc_0003 0 0 I PRP (TOP(S(NP*) - - - Christopher_Dodd * * (ARG0*) * (162) -bc/msnbc/00/msnbc_0003 0 1 just RB (ADVP*) - - - Christopher_Dodd * * (ARGM-ADV*) * - -bc/msnbc/00/msnbc_0003 0 2 do VBP (VP* do 01 - Christopher_Dodd * (V*) * * - -bc/msnbc/00/msnbc_0003 0 3 n't RB * - - - Christopher_Dodd * * (ARGM-NEG*) * - -bc/msnbc/00/msnbc_0003 0 4 think VB (VP* think 01 1 Christopher_Dodd * * (V*) * - diff --git a/tests/data_for_tests/io/QNLI/dev.tsv b/tests/data_for_tests/io/QNLI/dev.tsv deleted file mode 100755 index ac4ecabe..00000000 --- a/tests/data_for_tests/io/QNLI/dev.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index question sentence label -0 What came into force after the new constitution was herald? As of that day, the new constitution heralding the Second Republic came into force. entailment -1 What is the first major city in the stream of the Rhine? The most important tributaries in this area are the Ill below of Strasbourg, the Neckar in Mannheim and the Main across from Mainz. not_entailment -2 What is the minimum required if you want to teach in Canada? In most provinces a second Bachelor's Degree such as a Bachelor of Education is required to become a qualified teacher. not_entailment -3 How was Temüjin kept imprisoned by the Tayichi'ud? The Tayichi'ud enslaved Temüjin (reportedly with a cangue, a sort of portable stocks), but with the help of a sympathetic guard, the father of Chilaun (who later became a general of Genghis Khan), he was able to escape from the ger (yurt) in the middle of the night by hiding in a river crevice.[citation needed] entailment -4 What did Herr Gott, dich loben wir become known as ? He paraphrased the Te Deum as "Herr Gott, dich loben wir" with a simplified form of the melody. not_entailment diff --git a/tests/data_for_tests/io/QNLI/test.tsv b/tests/data_for_tests/io/QNLI/test.tsv deleted file mode 100755 index 55bfbeaa..00000000 --- a/tests/data_for_tests/io/QNLI/test.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index question sentence -0 What organization is devoted to Jihad against Israel? For some decades prior to the First Palestine Intifada in 1987, the Muslim Brotherhood in Palestine took a "quiescent" stance towards Israel, focusing on preaching, education and social services, and benefiting from Israel's "indulgence" to build up a network of mosques and charitable organizations. -1 In what century was the Yarrow-Schlick-Tweedy balancing system used? In the late 19th century, the Yarrow-Schlick-Tweedy balancing 'system' was used on some marine triple expansion engines. -2 The largest brand of what store in the UK is located in Kingston Park? Close to Newcastle, the largest indoor shopping centre in Europe, the MetroCentre, is located in Gateshead. -3 What does the IPCC rely on for research? In principle, this means that any significant new evidence or events that change our understanding of climate science between this deadline and publication of an IPCC report cannot be included. -4 What is the principle about relating spin and space variables? Thus in the case of two fermions there is a strictly negative correlation between spatial and spin variables, whereas for two bosons (e.g. quanta of electromagnetic waves, photons) the correlation is strictly positive. diff --git a/tests/data_for_tests/io/QNLI/train.tsv b/tests/data_for_tests/io/QNLI/train.tsv deleted file mode 100755 index fc0b966e..00000000 --- a/tests/data_for_tests/io/QNLI/train.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index question sentence label -0 When did the third Digimon series begin? Unlike the two seasons before it and most of the seasons that followed, Digimon Tamers takes a darker and more realistic approach to its story featuring Digimon who do not reincarnate after their deaths and more complex character development in the original Japanese. not_entailment -1 Which missile batteries often have individual launchers several kilometres from one another? When MANPADS is operated by specialists, batteries may have several dozen teams deploying separately in small sections; self-propelled air defence guns may deploy in pairs. not_entailment -2 What two things does Popper argue Tarski's theory involves in an evaluation of truth? He bases this interpretation on the fact that examples such as the one described above refer to two things: assertions and the facts to which they refer. entailment -3 What is the name of the village 9 miles north of Calafat where the Ottoman forces attacked the Russians? On 31 December 1853, the Ottoman forces at Calafat moved against the Russian force at Chetatea or Cetate, a small village nine miles north of Calafat, and engaged them on 6 January 1854. entailment -4 What famous palace is located in London? London contains four World Heritage Sites: the Tower of London; Kew Gardens; the site comprising the Palace of Westminster, Westminster Abbey, and St Margaret's Church; and the historic settlement of Greenwich (in which the Royal Observatory, Greenwich marks the Prime Meridian, 0° longitude, and GMT). not_entailment diff --git a/tests/data_for_tests/io/Quora/dev.tsv b/tests/data_for_tests/io/Quora/dev.tsv deleted file mode 100644 index 8182f190..00000000 --- a/tests/data_for_tests/io/Quora/dev.tsv +++ /dev/null @@ -1,2 +0,0 @@ -1 How do I get funding for my web based startup idea ? How do I get seed funding pre product ? 327970 -0 Is honey a viable alternative to sugar for diabetics ? How would you compare the United States ' euthanasia laws to Denmark ? 90348 diff --git a/tests/data_for_tests/io/Quora/test.tsv b/tests/data_for_tests/io/Quora/test.tsv deleted file mode 100644 index 9582aa14..00000000 --- a/tests/data_for_tests/io/Quora/test.tsv +++ /dev/null @@ -1,2 +0,0 @@ -1 What should I do to avoid sleeping in class ? How do I not sleep in a boring class ? 50018 -0 Do women support each other more than men do ? Do women need more compliments than men ? 126924 diff --git a/tests/data_for_tests/io/Quora/train.tsv b/tests/data_for_tests/io/Quora/train.tsv deleted file mode 100644 index e82940c9..00000000 --- a/tests/data_for_tests/io/Quora/train.tsv +++ /dev/null @@ -1,2 +0,0 @@ -1 What is your review of Hidden Figures -LRB- 2016 movie -RRB- ? What are your impressions of Hidden Figures -LRB- 2017 movie -RRB- ? 11877 -0 Currently , all Supreme Court Justices come from very elite law schools , is it similar for the best lawyers in private practice ? What 's your type of jungle -LRB- concrete or nature -RRB- and why ? 221489 diff --git a/tests/data_for_tests/io/R52/dev.csv b/tests/data_for_tests/io/R52/dev.csv deleted file mode 100644 index 37eab6ad..00000000 --- a/tests/data_for_tests/io/R52/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -trade,canadians urge exemption u trade bill group canadian lawmakers ontario today asked u counterparts exempt canada mandatory trade retaliation provisions major trade bill considered u congress meeting northeast midwest coalition organization u legislators david cooke chairman ontario parliament select committee economic affairs said exemption would help trade relations trade legislation considered full house late april would require president reagan retaliate foreign unfair trade practices unless trade actions would harm u economy currently reagan reject trade sanctions grounds cooke member liberal party told u congressmen understand trade bill think concerns parts world would suggest best concerns canada consider country bill added canada united states largest trading partner two way trade billion dlrs according coalition u ran billion dlr deficit manufactured goods year compared billion dlr surplus services trade reuter -earn,american corp nd qtr feb shr profit one cts vs loss three cts net profit vs loss revs mln vs mln six months shr profit six cts vs loss six cts net profit mln vs loss mln revs mln vs mln note six months includes gain four cts change accounting principle reuter -earn,meyers co increases dividend qtly div eight cts vs seven cts prior payable may record april reuter -earn,meyers co year feb shr dlrs vs dlrs net mln dlrs vs mln revs mln vs mln note results reflect year month period company changed fiscal year end february march reuter -earn,kelly oil gas partners year dec shr cts vs cts net mln vs mln revs mln vs mln reuter -money-fx,japan seeks strengthen paris currency accord japan seek strengthen paris accord currency stability meeting group seven leading industrial nations tomorrow japanese officials said however officials japanese finance minister kiichi miyazawa asked identified would provide details wanted accord signed six leading industrial democracies february strengthened currency target zones reference ranges discussed g meeting scheduled tomorrow japanese officials said meeting held conjunction week international monetary fund world bank sessions currency pact need changing language used paris accord officials said miyazawa met u treasury secretary james baker early afternoon discussed dollar yen exchange rates officials said declined disclosed details discussion japanese officials also declined detail miyazawa baker discussed subject greater joint intervention currency markets stabilize dollar independent american intervention officials said money market action stabilize dollar benefit japan suffering sharp appreciation currency also benefit united states well u japan take steps boost domestic demand reduce trade surplus japan explain economic measures g officials said however miyazawa failed outline size japanese economic package meeting baker today japanese budget authorized parliament despite new fiscal year started april one officials said japan ruling liberal democratic party revealed economic package today calling billion yen additional spending reuter diff --git a/tests/data_for_tests/io/R52/test.csv b/tests/data_for_tests/io/R52/test.csv deleted file mode 100644 index 99497e79..00000000 --- a/tests/data_for_tests/io/R52/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -pet-chem,italy eni invest venezuelan projects italy state owned ente nazionale idrocarburi eni invest mln dlrs two joint ventures coal petroleos de venezuela eni president franco said speaking news conference said two projects eventually bring mln dlrs annually foreign exchange venezuela help diversify country export base joint ventures principal instrument allowing resources industrialized countries developing world lead future growth said eni subsidiary join petrochemical subsidiary pdvsa building mln dlr plant produce gasoline additive used increase octane levels mt per year plant jose eastern venezuela fed butane produced pdvsa eastern complex eni owns pct joint venture company super c pct remaining three pct sold private investors production set begin third quarter officials said plant one saudi arabia another eni subsidiary agip sign letter intent caracas tomorrow enter partnership pdvsa mine coal deposits western state said feasibility studies still done project definitive accord slated august added agip atlantic richfield coal arco subsidiary formed consortium pct project whose total cost estimated mln dlrs company said agip invest pct mln dlrs project said reuter -earn,republicbank rpt brazil loans republicbank corp said placed mln dlrs intermediate term loans brazil non accrual basis march said reclassification reduce first quarter earnings mln dlrs taxes mln dlrs taxes brazil change position moratorium interest payments republicbank also said net income first quarter expected mln dlrs cts share fully diluted basis year ago first quarter company earned mln dlrs cts share company also said first quarter results expected include provision loan losses mln dlrs mln dlrs net loan charge offs mln dlrs said provision increase loan losses mln dlrs pct loans republicbank total assets billion dlrs announced december agreement interfirst corp form first republicbank corp merger approved regulatory agencies stockholders would create th largest bank holding company united states reuter -acq,amoskeag bank seek rehearing amoskeag bank shares inc portsmouth savings bank said file rehearing new hampshire supreme court march ruling state regulatory approval amoskeag acquisition portsmouth decision believe go well beyond affiliation amoskeag portsmouth savings bank said amoskeag chairman william transaction opposed group portsmouth investors wanted bank remain independent according press reports reuter -strategic-metal,doe recommends special unit uranium energy secretary john herrington told congress federally chartered corporation would best way manage operate government uranium program said letter congressmen unless program run energy department improved sales worth five billion dlrs could lost program annual commercial sales one billion dlrs holds pct free world market services department official said world market uranium power utilities increasingly competitive private entity could better tap administration plan spin department uranium operation line effort reduce federal government role areas feels private enterprise could efficient reuter -earn,declares stock dividend financial corp said declared stock dividend one class share two class shares held payable may shareholders record april reuter -acq,allegheny ag shareholders file suit allegheny international inc agreed merge jointly formed first boston inc affiliate deal worth mn dlrs said shareholders preferred stock filed class action complaint company complaint alleges among things company board agreed pay first boston illegal seven mln dlr fee received higher offer company prior buyout suit fee allegheny ability attract offers take actions would benefit holders preferred stock complaint also alleges federal securities laws violations breach fiduciary duty suit requests injunction proceeding pending offer made sunter acquisition acquire allegheny sunter acquisition corp sunter holdings corp formed first boston allegheny allegheny said sunter concerns intend vigorously defend complaint charges complaints filed robert parties believed shares allegheny preferred stock reuter diff --git a/tests/data_for_tests/io/R52/train.csv b/tests/data_for_tests/io/R52/train.csv deleted file mode 100644 index 34af13dc..00000000 --- a/tests/data_for_tests/io/R52/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -earn,convertible securities sets dividend convertible securities fund inc said board declared initial quarterly dividend three cents per share payable april shareholders record april said anticipates paying regular quarterly dividend company made initial public stock offering march five reuter -jobs,n z unemployment rate pct december quarter new zealand unemployment rate pct workforce quarter ended december unchanged revised pct preliminary pct previous quarter slightly pct year earlier quarter statistics department said department citing household labour force survey said statement number unemployed october december september quarter year earlier reuter -rubber,japan rubber stocks fall march japan rubber stocks fell tonnes march february march japan rubber trade association said stocks tonnes february year earlier comparisons march feb march crude rubber synthetic latex reuter -money-fx,south korean fixed month high bank korea said fixed dollar highest level since february set yesterday risen pct dollar far year rising pct reuter -copper,nippon mining lowers copper price nippon mining co ltd said lowered selling price electrolytic copper yen per tonne effective immediately reuter -ship,australian unions launch new south wales strikes australian trade unions said launched week long strikes industrial action new south wales nsw protest new laws would reduce injury compensation payments union sources said talks state government broke last night two sides scheduled meet later today attempt find compromise rail freight shipping cargo movements country state first affected union officials said almost every business sector hit unless quick settlement state government recently introduced new workers compensation act would cut cash benefits injured workers third act awaiting parliamentary ratification nsw state premier said workers compensation risen recent years proposed cuts would save hundreds mlns dollars year union officials said industrial action could spread states federal government also plans make sharp cuts workers compensation reuter diff --git a/tests/data_for_tests/io/R8/dev.csv b/tests/data_for_tests/io/R8/dev.csv deleted file mode 100644 index b7271c38..00000000 --- a/tests/data_for_tests/io/R8/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -acq,amoskeag bank seek amoskeag bank shares inc portsmouth savings bank said file new hampshire supreme court march ruling state regulatory approval amoskeag acquisition portsmouth decision believe go well beyond affiliation amoskeag portsmouth savings bank said amoskeag chairman william transaction opposed group portsmouth investors wanted bank remain independent according press reports reuter -earn,declares stock dividend financial corp said declared stock dividend one class share two class shares held payable may shareholders record april reuter -acq,allegheny ag shareholders file suit allegheny international inc agreed merge jointly formed first boston inc affiliate deal worth mn dlrs said shareholders preferred stock filed class action complaint company complaint alleges among things company board agreed pay first boston illegal seven mln dlr fee received higher offer company prior buyout suit fee allegheny ability attract offers take actions would benefit holders preferred stock complaint also alleges federal securities laws violations fiduciary duty suit requests injunction proceeding pending offer made sunter acquisition acquire allegheny sunter acquisition corp sunter holdings corp formed first boston allegheny allegheny said sunter concerns intend vigorously defend complaint charges complaints filed robert parties believed shares allegheny preferred stock reuter -trade,canadians urge exemption u trade bill group canadian lawmakers ontario today asked u exempt canada mandatory trade retaliation provisions major trade bill considered u congress meeting northeast midwest coalition organization u legislators david cooke chairman ontario parliament select committee economic affairs said exemption would help trade relations trade legislation considered full house late april would require president reagan retaliate foreign unfair trade practices unless trade actions would harm u economy currently reagan reject trade sanctions grounds cooke member liberal party told u congressmen understand trade bill think concerns parts world would suggest best concerns canada consider country bill added canada united states largest trading partner two way trade billion dlrs according coalition u ran billion dlr deficit manufactured goods year compared billion dlr surplus services trade reuter -earn,american corp nd qtr feb shr profit one cts vs loss three cts net profit vs loss revs mln vs mln six months shr profit six cts vs loss six cts net profit mln vs loss mln revs mln vs mln note six months includes gain four cts change accounting principle reuter -earn,meyers co increases dividend qtly div eight cts vs seven cts prior payable may record april reuter diff --git a/tests/data_for_tests/io/R8/test.csv b/tests/data_for_tests/io/R8/test.csv deleted file mode 100644 index 13225334..00000000 --- a/tests/data_for_tests/io/R8/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -earn,technology inc nd qtr march shr profit eight cts vs loss dlrs net profit vs loss revs mln vs avg shrs vs six mths shr loss nine cts vs loss dlrs net loss vs loss revs mln vs mln avg shrs vs reuter -earn,nacco industries report nd qtr gain nacco industries inc said report gain second quarter mln dlrs dlrs share sale stock subsidiary nacco said north american coal corp unit received notice consolidation coal co unit du pont co dd exercise option buy stock mining co subsidiary north american coal stock north american coal receive mln dlrs mln paid closing april rest company said addition pay dividend north american coal mln dlrs retained earnings closing funds previously used finance mining operations consolidation coal got option group utilities received option nacco nacco reported earnings mln dlrs dlrs share last year second quarter generated mln dlrs net income equal cts share nacco total earnings dlrs share produced mln short tons mln tons produced north american coal nacco said reuter -earn,buffton post investigation charge buffton corp said conduct investigation plant designated site result charge six cts per share second quarter year ago second quarter buffton reported net income cts share dlrs sales mln dlrs study completed nine months determine action may required inc plant former owner split cost buffton said share cost dlrs reuter -acq,american dynamics sell pct stake american dynamics corp meridian reserve inc said signed definitive agreement meridian buy mln shares pct american dynamics common stock terms agreement santa calif based meridian said pay based american dynamics one mln dlrs cash notes five years shares common stock meridian said option issue additional shares common next two years payment certain notes meridian oil gas company whose operations primarily oklahoma said acquisition increase consolidated assets mln dlrs committed gas reserves mln dlrs discounted present value american dynamics engaged gas gathering transmission liquids also oklahoma companies said five plants miles transmission lines five oklahoma counties reuter -money-fx,ussr exchange rates soviet state bank effective april roubles per hundred unless stated u stg unch fin unch yen aus aus dlr unch pak unch ind unch unch one unch unch -earn,republicbank rpt brazil loans republicbank corp said placed mln dlrs intermediate term loans brazil non accrual basis march said reclassification reduce first quarter earnings mln dlrs taxes mln dlrs taxes brazil change position moratorium interest payments republicbank also said net income first quarter expected mln dlrs cts share fully diluted basis year ago first quarter company earned mln dlrs cts share company also said first quarter results expected include provision loan losses mln dlrs mln dlrs net loan charge offs mln dlrs said provision increase loan losses mln dlrs pct loans republicbank total assets billion dlrs announced december agreement interfirst corp form first republicbank corp merger approved regulatory agencies stockholders would create th largest bank holding company united states reuter diff --git a/tests/data_for_tests/io/R8/train.csv b/tests/data_for_tests/io/R8/train.csv deleted file mode 100644 index 77897bb9..00000000 --- a/tests/data_for_tests/io/R8/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -earn,meyers co year feb shr dlrs vs dlrs net mln dlrs vs mln revs mln vs mln note results reflect year month period company changed fiscal year end february march reuter -earn,kelly oil gas partners year dec shr cts vs cts net mln vs mln revs mln vs mln reuter -money-fx,japan seeks strengthen paris currency accord japan seek strengthen paris accord currency stability meeting group seven leading industrial nations tomorrow japanese officials said however officials japanese finance minister kiichi miyazawa asked identified would provide details wanted accord signed six leading industrial democracies february strengthened currency target zones reference ranges discussed g meeting scheduled tomorrow japanese officials said meeting held conjunction week international monetary fund world bank sessions currency pact need changing language used paris accord officials said miyazawa met u treasury secretary james baker early afternoon discussed dollar yen exchange rates officials said declined disclosed details discussion japanese officials also declined detail miyazawa baker discussed subject greater joint intervention currency markets stabilize dollar independent american intervention officials said money market action stabilize dollar benefit japan suffering sharp appreciation currency also benefit united states well u japan take steps boost domestic demand reduce trade surplus japan explain economic measures g officials said however miyazawa failed outline size japanese economic package meeting baker today japanese budget authorized parliament despite new fiscal year started april one officials said japan ruling liberal democratic party revealed economic package today calling billion yen additional spending reuter -earn,convertible securities sets dividend convertible securities fund inc said board declared initial quarterly dividend three cents per share payable april shareholders record april said anticipates paying regular quarterly dividend company made initial public stock offering march five reuter -money-fx,south korean fixed month high bank korea said fixed dollar highest level since february set yesterday risen pct dollar far year rising pct reuter -ship,australian unions launch new south wales strikes australian trade unions said launched week long strikes industrial action new south wales nsw protest new laws would reduce injury compensation payments union sources said talks state government broke last night two sides scheduled meet later today attempt find compromise rail freight shipping cargo movements country state first affected union officials said almost every business sector hit unless quick settlement state government recently introduced new workers compensation act would cut cash benefits injured workers third act awaiting parliamentary nsw state premier said workers compensation risen recent years proposed cuts would save hundreds dollars year union officials said industrial action could spread states federal government also plans make sharp cuts workers compensation reuter diff --git a/tests/data_for_tests/io/RTE/dev.tsv b/tests/data_for_tests/io/RTE/dev.tsv deleted file mode 100644 index f8f72536..00000000 --- a/tests/data_for_tests/io/RTE/dev.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index sentence1 sentence2 label -0 Dana Reeve, the widow of the actor Christopher Reeve, has died of lung cancer at age 44, according to the Christopher Reeve Foundation. Christopher Reeve had an accident. not_entailment -1 Yet, we now are discovering that antibiotics are losing their effectiveness against illness. Disease-causing bacteria are mutating faster than we can come up with new antibiotics to fight the new variations. Bacteria is winning the war against antibiotics. entailment -2 Cairo is now home to some 15 million people - a burgeoning population that produces approximately 10,000 tonnes of rubbish per day, putting an enormous strain on public services. In the past 10 years, the government has tried hard to encourage private investment in the refuse sector, but some estimate 4,000 tonnes of waste is left behind every day, festering in the heat as it waits for someone to clear it up. It is often the people in the poorest neighbourhoods that are worst affected. But in some areas they are fighting back. In Shubra, one of the northern districts of the city, the residents have taken to the streets armed with dustpans and brushes to clean up public areas which have been used as public dumps. 15 million tonnes of rubbish are produced daily in Cairo. not_entailment -3 The Amish community in Pennsylvania, which numbers about 55,000, lives an agrarian lifestyle, shunning technological advances like electricity and automobiles. And many say their insular lifestyle gives them a sense that they are protected from the violence of American society. But as residents gathered near the school, some wearing traditional garb and arriving in horse-drawn buggies, they said that sense of safety had been shattered. "If someone snaps and wants to do something stupid, there's no distance that's going to stop them," said Jake King, 56, an Amish lantern maker who knew several families whose children had been shot. Pennsylvania has the biggest Amish community in the U.S. not_entailment -4 Security forces were on high alert after an election campaign in which more than 1,000 people, including seven election candidates, have been killed. Security forces were on high alert after a campaign marred by violence. entailment diff --git a/tests/data_for_tests/io/RTE/test.tsv b/tests/data_for_tests/io/RTE/test.tsv deleted file mode 100644 index e52dfac4..00000000 --- a/tests/data_for_tests/io/RTE/test.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index sentence1 sentence2 -0 Mangla was summoned after Madhumita's sister Nidhi Shukla, who was the first witness in the case. Shukla is related to Mangla. -1 Authorities in Brazil say that more than 200 people are being held hostage in a prison in the country's remote, Amazonian-jungle state of Rondonia. Authorities in Brazil hold 200 people as hostage. -2 A mercenary group faithful to the warmongering policy of former Somozist colonel Enrique Bermudez attacked an IFA truck belonging to the interior ministry at 0900 on 26 March in El Jicote, wounded and killed an interior ministry worker and wounded five others. An interior ministry worker was killed by a mercenary group. -3 The British ambassador to Egypt, Derek Plumbly, told Reuters on Monday that authorities had compiled the list of 10 based on lists from tour companies and from families whose relatives have not been in contact since the bombings. Derek Plumbly resides in Egypt. -4 Tibone estimated diamond production at four mines operated by Debswana -- Botswana's 50-50 joint venture with De Beers -- could reach 33 million carats this year. Botswana is a business partner of De Beers. diff --git a/tests/data_for_tests/io/RTE/train.tsv b/tests/data_for_tests/io/RTE/train.tsv deleted file mode 100644 index 70e5414f..00000000 --- a/tests/data_for_tests/io/RTE/train.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index sentence1 sentence2 label -0 No Weapons of Mass Destruction Found in Iraq Yet. Weapons of Mass Destruction Found in Iraq. not_entailment -1 A place of sorrow, after Pope John Paul II died, became a place of celebration, as Roman Catholic faithful gathered in downtown Chicago to mark the installation of new Pope Benedict XVI. Pope Benedict XVI is the new leader of the Roman Catholic Church. entailment -2 Herceptin was already approved to treat the sickest breast cancer patients, and the company said, Monday, it will discuss with federal regulators the possibility of prescribing the drug for more breast cancer patients. Herceptin can be used to treat breast cancer. entailment -3 Judie Vivian, chief executive at ProMedica, a medical service company that helps sustain the 2-year-old Vietnam Heart Institute in Ho Chi Minh City (formerly Saigon), said that so far about 1,500 children have received treatment. The previous name of Ho Chi Minh City was Saigon. entailment -4 A man is due in court later charged with the murder 26 years ago of a teenager whose case was the first to be featured on BBC One's Crimewatch. Colette Aram, 16, was walking to her boyfriend's house in Keyworth, Nottinghamshire, on 30 October 1983 when she disappeared. Her body was later found in a field close to her home. Paul Stewart Hutchinson, 50, has been charged with murder and is due before Nottingham magistrates later. Paul Stewart Hutchinson is accused of having stabbed a girl. not_entailment diff --git a/tests/data_for_tests/io/SNLI/snli_1.0_dev.jsonl b/tests/data_for_tests/io/SNLI/snli_1.0_dev.jsonl deleted file mode 100755 index 2d091c73..00000000 --- a/tests/data_for_tests/io/SNLI/snli_1.0_dev.jsonl +++ /dev/null @@ -1,5 +0,0 @@ -{"annotator_labels": ["neutral", "entailment", "neutral", "neutral", "neutral"], "captionID": "4705552913.jpg#2", "gold_label": "neutral", "pairID": "4705552913.jpg#2r1n", "sentence1": "Two women are embracing while holding to go packages.", "sentence1_binary_parse": "( ( Two women ) ( ( are ( embracing ( while ( holding ( to ( go packages ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (CD Two) (NNS women)) (VP (VBP are) (VP (VBG embracing) (SBAR (IN while) (S (NP (VBG holding)) (VP (TO to) (VP (VB go) (NP (NNS packages)))))))) (. .)))", "sentence2": "The sisters are hugging goodbye while holding to go packages after just eating lunch.", "sentence2_binary_parse": "( ( The sisters ) ( ( are ( ( hugging goodbye ) ( while ( holding ( to ( ( go packages ) ( after ( just ( eating lunch ) ) ) ) ) ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NNS sisters)) (VP (VBP are) (VP (VBG hugging) (NP (UH goodbye)) (PP (IN while) (S (VP (VBG holding) (S (VP (TO to) (VP (VB go) (NP (NNS packages)) (PP (IN after) (S (ADVP (RB just)) (VP (VBG eating) (NP (NN lunch))))))))))))) (. .)))"} -{"annotator_labels": ["entailment", "entailment", "entailment", "entailment", "entailment"], "captionID": "4705552913.jpg#2", "gold_label": "entailment", "pairID": "4705552913.jpg#2r1e", "sentence1": "Two women are embracing while holding to go packages.", "sentence1_binary_parse": "( ( Two women ) ( ( are ( embracing ( while ( holding ( to ( go packages ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (CD Two) (NNS women)) (VP (VBP are) (VP (VBG embracing) (SBAR (IN while) (S (NP (VBG holding)) (VP (TO to) (VP (VB go) (NP (NNS packages)))))))) (. .)))", "sentence2": "Two woman are holding packages.", "sentence2_binary_parse": "( ( Two woman ) ( ( are ( holding packages ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (CD Two) (NN woman)) (VP (VBP are) (VP (VBG holding) (NP (NNS packages)))) (. .)))"} -{"annotator_labels": ["contradiction", "contradiction", "contradiction", "contradiction", "contradiction"], "captionID": "4705552913.jpg#2", "gold_label": "contradiction", "pairID": "4705552913.jpg#2r1c", "sentence1": "Two women are embracing while holding to go packages.", "sentence1_binary_parse": "( ( Two women ) ( ( are ( embracing ( while ( holding ( to ( go packages ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (CD Two) (NNS women)) (VP (VBP are) (VP (VBG embracing) (SBAR (IN while) (S (NP (VBG holding)) (VP (TO to) (VP (VB go) (NP (NNS packages)))))))) (. .)))", "sentence2": "The men are fighting outside a deli.", "sentence2_binary_parse": "( ( The men ) ( ( are ( fighting ( outside ( a deli ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NNS men)) (VP (VBP are) (VP (VBG fighting) (PP (IN outside) (NP (DT a) (NNS deli))))) (. .)))"} -{"annotator_labels": ["entailment", "entailment", "entailment", "entailment", "entailment"], "captionID": "2407214681.jpg#0", "gold_label": "entailment", "pairID": "2407214681.jpg#0r1e", "sentence1": "Two young children in blue jerseys, one with the number 9 and one with the number 2 are standing on wooden steps in a bathroom and washing their hands in a sink.", "sentence1_binary_parse": "( ( ( Two ( young children ) ) ( in ( ( ( ( ( blue jerseys ) , ) ( one ( with ( the ( number 9 ) ) ) ) ) and ) ( one ( with ( the ( number 2 ) ) ) ) ) ) ) ( ( are ( ( ( standing ( on ( ( wooden steps ) ( in ( a bathroom ) ) ) ) ) and ) ( ( washing ( their hands ) ) ( in ( a sink ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (CD Two) (JJ young) (NNS children)) (PP (IN in) (NP (NP (JJ blue) (NNS jerseys)) (, ,) (NP (NP (CD one)) (PP (IN with) (NP (DT the) (NN number) (CD 9)))) (CC and) (NP (NP (CD one)) (PP (IN with) (NP (DT the) (NN number) (CD 2))))))) (VP (VBP are) (VP (VP (VBG standing) (PP (IN on) (NP (NP (JJ wooden) (NNS steps)) (PP (IN in) (NP (DT a) (NN bathroom)))))) (CC and) (VP (VBG washing) (NP (PRP$ their) (NNS hands)) (PP (IN in) (NP (DT a) (NN sink)))))) (. .)))", "sentence2": "Two kids in numbered jerseys wash their hands.", "sentence2_binary_parse": "( ( ( Two kids ) ( in ( numbered jerseys ) ) ) ( ( wash ( their hands ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (NP (CD Two) (NNS kids)) (PP (IN in) (NP (JJ numbered) (NNS jerseys)))) (VP (VBP wash) (NP (PRP$ their) (NNS hands))) (. .)))"} -{"annotator_labels": ["neutral", "neutral", "neutral", "entailment", "entailment"], "captionID": "2407214681.jpg#0", "gold_label": "neutral", "pairID": "2407214681.jpg#0r1n", "sentence1": "Two young children in blue jerseys, one with the number 9 and one with the number 2 are standing on wooden steps in a bathroom and washing their hands in a sink.", "sentence1_binary_parse": "( ( ( Two ( young children ) ) ( in ( ( ( ( ( blue jerseys ) , ) ( one ( with ( the ( number 9 ) ) ) ) ) and ) ( one ( with ( the ( number 2 ) ) ) ) ) ) ) ( ( are ( ( ( standing ( on ( ( wooden steps ) ( in ( a bathroom ) ) ) ) ) and ) ( ( washing ( their hands ) ) ( in ( a sink ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (CD Two) (JJ young) (NNS children)) (PP (IN in) (NP (NP (JJ blue) (NNS jerseys)) (, ,) (NP (NP (CD one)) (PP (IN with) (NP (DT the) (NN number) (CD 9)))) (CC and) (NP (NP (CD one)) (PP (IN with) (NP (DT the) (NN number) (CD 2))))))) (VP (VBP are) (VP (VP (VBG standing) (PP (IN on) (NP (NP (JJ wooden) (NNS steps)) (PP (IN in) (NP (DT a) (NN bathroom)))))) (CC and) (VP (VBG washing) (NP (PRP$ their) (NNS hands)) (PP (IN in) (NP (DT a) (NN sink)))))) (. .)))", "sentence2": "Two kids at a ballgame wash their hands.", "sentence2_binary_parse": "( ( ( Two kids ) ( at ( a ballgame ) ) ) ( ( wash ( their hands ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (NP (CD Two) (NNS kids)) (PP (IN at) (NP (DT a) (NN ballgame)))) (VP (VBP wash) (NP (PRP$ their) (NNS hands))) (. .)))"} diff --git a/tests/data_for_tests/io/SNLI/snli_1.0_test.jsonl b/tests/data_for_tests/io/SNLI/snli_1.0_test.jsonl deleted file mode 100755 index 49d40720..00000000 --- a/tests/data_for_tests/io/SNLI/snli_1.0_test.jsonl +++ /dev/null @@ -1,5 +0,0 @@ -{"annotator_labels": ["neutral", "contradiction", "contradiction", "neutral", "neutral"], "captionID": "2677109430.jpg#1", "gold_label": "neutral", "pairID": "2677109430.jpg#1r1n", "sentence1": "This church choir sings to the masses as they sing joyous songs from the book at a church.", "sentence1_binary_parse": "( ( This ( church choir ) ) ( ( ( sings ( to ( the masses ) ) ) ( as ( they ( ( sing ( joyous songs ) ) ( from ( ( the book ) ( at ( a church ) ) ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (DT This) (NN church) (NN choir)) (VP (VBZ sings) (PP (TO to) (NP (DT the) (NNS masses))) (SBAR (IN as) (S (NP (PRP they)) (VP (VBP sing) (NP (JJ joyous) (NNS songs)) (PP (IN from) (NP (NP (DT the) (NN book)) (PP (IN at) (NP (DT a) (NN church))))))))) (. .)))", "sentence2": "The church has cracks in the ceiling.", "sentence2_binary_parse": "( ( The church ) ( ( has ( cracks ( in ( the ceiling ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NN church)) (VP (VBZ has) (NP (NP (NNS cracks)) (PP (IN in) (NP (DT the) (NN ceiling))))) (. .)))"} -{"annotator_labels": ["entailment", "entailment", "entailment", "neutral", "entailment"], "captionID": "2677109430.jpg#1", "gold_label": "entailment", "pairID": "2677109430.jpg#1r1e", "sentence1": "This church choir sings to the masses as they sing joyous songs from the book at a church.", "sentence1_binary_parse": "( ( This ( church choir ) ) ( ( ( sings ( to ( the masses ) ) ) ( as ( they ( ( sing ( joyous songs ) ) ( from ( ( the book ) ( at ( a church ) ) ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (DT This) (NN church) (NN choir)) (VP (VBZ sings) (PP (TO to) (NP (DT the) (NNS masses))) (SBAR (IN as) (S (NP (PRP they)) (VP (VBP sing) (NP (JJ joyous) (NNS songs)) (PP (IN from) (NP (NP (DT the) (NN book)) (PP (IN at) (NP (DT a) (NN church))))))))) (. .)))", "sentence2": "The church is filled with song.", "sentence2_binary_parse": "( ( The church ) ( ( is ( filled ( with song ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NN church)) (VP (VBZ is) (VP (VBN filled) (PP (IN with) (NP (NN song))))) (. .)))"} -{"annotator_labels": ["contradiction", "contradiction", "contradiction", "contradiction", "contradiction"], "captionID": "2677109430.jpg#1", "gold_label": "contradiction", "pairID": "2677109430.jpg#1r1c", "sentence1": "This church choir sings to the masses as they sing joyous songs from the book at a church.", "sentence1_binary_parse": "( ( This ( church choir ) ) ( ( ( sings ( to ( the masses ) ) ) ( as ( they ( ( sing ( joyous songs ) ) ( from ( ( the book ) ( at ( a church ) ) ) ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (DT This) (NN church) (NN choir)) (VP (VBZ sings) (PP (TO to) (NP (DT the) (NNS masses))) (SBAR (IN as) (S (NP (PRP they)) (VP (VBP sing) (NP (JJ joyous) (NNS songs)) (PP (IN from) (NP (NP (DT the) (NN book)) (PP (IN at) (NP (DT a) (NN church))))))))) (. .)))", "sentence2": "A choir singing at a baseball game.", "sentence2_binary_parse": "( ( ( A choir ) ( singing ( at ( a ( baseball game ) ) ) ) ) . )", "sentence2_parse": "(ROOT (NP (NP (DT A) (NN choir)) (VP (VBG singing) (PP (IN at) (NP (DT a) (NN baseball) (NN game)))) (. .)))"} -{"annotator_labels": ["neutral", "neutral", "neutral", "neutral", "neutral"], "captionID": "6160193920.jpg#4", "gold_label": "neutral", "pairID": "6160193920.jpg#4r1n", "sentence1": "A woman with a green headscarf, blue shirt and a very big grin.", "sentence1_binary_parse": "( ( ( A woman ) ( with ( ( ( ( ( a ( green headscarf ) ) , ) ( blue shirt ) ) and ) ( a ( ( very big ) grin ) ) ) ) ) . )", "sentence1_parse": "(ROOT (NP (NP (DT A) (NN woman)) (PP (IN with) (NP (NP (DT a) (JJ green) (NN headscarf)) (, ,) (NP (JJ blue) (NN shirt)) (CC and) (NP (DT a) (ADJP (RB very) (JJ big)) (NN grin)))) (. .)))", "sentence2": "The woman is young.", "sentence2_binary_parse": "( ( The woman ) ( ( is young ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NN woman)) (VP (VBZ is) (ADJP (JJ young))) (. .)))"} -{"annotator_labels": ["entailment", "entailment", "contradiction", "entailment", "neutral"], "captionID": "6160193920.jpg#4", "gold_label": "entailment", "pairID": "6160193920.jpg#4r1e", "sentence1": "A woman with a green headscarf, blue shirt and a very big grin.", "sentence1_binary_parse": "( ( ( A woman ) ( with ( ( ( ( ( a ( green headscarf ) ) , ) ( blue shirt ) ) and ) ( a ( ( very big ) grin ) ) ) ) ) . )", "sentence1_parse": "(ROOT (NP (NP (DT A) (NN woman)) (PP (IN with) (NP (NP (DT a) (JJ green) (NN headscarf)) (, ,) (NP (JJ blue) (NN shirt)) (CC and) (NP (DT a) (ADJP (RB very) (JJ big)) (NN grin)))) (. .)))", "sentence2": "The woman is very happy.", "sentence2_binary_parse": "( ( The woman ) ( ( is ( very happy ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT The) (NN woman)) (VP (VBZ is) (ADJP (RB very) (JJ happy))) (. .)))"} diff --git a/tests/data_for_tests/io/SNLI/snli_1.0_train.jsonl b/tests/data_for_tests/io/SNLI/snli_1.0_train.jsonl deleted file mode 100755 index 8be03c11..00000000 --- a/tests/data_for_tests/io/SNLI/snli_1.0_train.jsonl +++ /dev/null @@ -1,5 +0,0 @@ -{"annotator_labels": ["neutral"], "captionID": "3416050480.jpg#4", "gold_label": "neutral", "pairID": "3416050480.jpg#4r1n", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is training his horse for a competition.", "sentence2_binary_parse": "( ( A person ) ( ( is ( ( training ( his horse ) ) ( for ( a competition ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (VP (VBG training) (NP (PRP$ his) (NN horse)) (PP (IN for) (NP (DT a) (NN competition))))) (. .)))"} -{"annotator_labels": ["contradiction"], "captionID": "3416050480.jpg#4", "gold_label": "contradiction", "pairID": "3416050480.jpg#4r1c", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is at a diner, ordering an omelette.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is ( at ( a diner ) ) ) , ) ( ordering ( an omelette ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (PP (IN at) (NP (DT a) (NN diner))) (, ,) (S (VP (VBG ordering) (NP (DT an) (NN omelette))))) (. .)))"} -{"annotator_labels": ["entailment"], "captionID": "3416050480.jpg#4", "gold_label": "entailment", "pairID": "3416050480.jpg#4r1e", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is outdoors, on a horse.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is outdoors ) , ) ( on ( a horse ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (ADVP (RB outdoors)) (, ,) (PP (IN on) (NP (DT a) (NN horse)))) (. .)))"} -{"annotator_labels": ["neutral"], "captionID": "2267923837.jpg#2", "gold_label": "neutral", "pairID": "2267923837.jpg#2r1n", "sentence1": "Children smiling and waving at camera", "sentence1_binary_parse": "( Children ( ( ( smiling and ) waving ) ( at camera ) ) )", "sentence1_parse": "(ROOT (NP (S (NP (NNP Children)) (VP (VBG smiling) (CC and) (VBG waving) (PP (IN at) (NP (NN camera)))))))", "sentence2": "They are smiling at their parents", "sentence2_binary_parse": "( They ( are ( smiling ( at ( their parents ) ) ) ) )", "sentence2_parse": "(ROOT (S (NP (PRP They)) (VP (VBP are) (VP (VBG smiling) (PP (IN at) (NP (PRP$ their) (NNS parents)))))))"} -{"annotator_labels": ["entailment"], "captionID": "2267923837.jpg#2", "gold_label": "entailment", "pairID": "2267923837.jpg#2r1e", "sentence1": "Children smiling and waving at camera", "sentence1_binary_parse": "( Children ( ( ( smiling and ) waving ) ( at camera ) ) )", "sentence1_parse": "(ROOT (NP (S (NP (NNP Children)) (VP (VBG smiling) (CC and) (VBG waving) (PP (IN at) (NP (NN camera)))))))", "sentence2": "There are children present", "sentence2_binary_parse": "( There ( ( are children ) present ) )", "sentence2_parse": "(ROOT (S (NP (EX There)) (VP (VBP are) (NP (NNS children)) (ADVP (RB present)))))"} diff --git a/tests/data_for_tests/io/SST-2/dev.tsv b/tests/data_for_tests/io/SST-2/dev.tsv deleted file mode 100755 index 3fec0fa6..00000000 --- a/tests/data_for_tests/io/SST-2/dev.tsv +++ /dev/null @@ -1,6 +0,0 @@ -sentence label -it 's a charming and often affecting journey . 1 -unflinchingly bleak and desperate 0 -allows us to hope that nolan is poised to embark a major career as a commercial yet inventive filmmaker . 1 -the acting , costumes , music , cinematography and sound are all astounding given the production 's austere locales . 1 -it 's slow -- very , very slow . 0 diff --git a/tests/data_for_tests/io/SST-2/test.tsv b/tests/data_for_tests/io/SST-2/test.tsv deleted file mode 100755 index 6ad46368..00000000 --- a/tests/data_for_tests/io/SST-2/test.tsv +++ /dev/null @@ -1,6 +0,0 @@ -index sentence -0 uneasy mishmash of styles and genres . -1 this film 's relationship to actual tension is the same as what christmas-tree flocking in a spray can is to actual snow : a poor -- if durable -- imitation . -2 by the end of no such thing the audience , like beatrice , has a watchful affection for the monster . -3 director rob marshall went out gunning to make a great one . -4 lathan and diggs have considerable personal charm , and their screen rapport makes the old story seem new . diff --git a/tests/data_for_tests/io/SST-2/train.tsv b/tests/data_for_tests/io/SST-2/train.tsv deleted file mode 100755 index 4d7ea56c..00000000 --- a/tests/data_for_tests/io/SST-2/train.tsv +++ /dev/null @@ -1,6 +0,0 @@ -sentence label -hide new secretions from the parental units 0 -contains no wit , only labored gags 0 -that loves its characters and communicates something rather beautiful about human nature 1 -remains utterly satisfied to remain the same throughout 0 -on the worst revenge-of-the-nerds clichés the filmmakers could dredge up 0 diff --git a/tests/data_for_tests/io/SST/dev.txt b/tests/data_for_tests/io/SST/dev.txt deleted file mode 100755 index 46fca6bf..00000000 --- a/tests/data_for_tests/io/SST/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -(3 (2 It) (4 (4 (2 's) (4 (3 (2 a) (4 (3 lovely) (2 film))) (3 (2 with) (4 (3 (3 lovely) (2 performances)) (2 (2 by) (2 (2 (2 Buy) (2 and)) (2 Accorsi))))))) (2 .))) -(2 (2 (1 No) (2 one)) (1 (1 (2 goes) (2 (1 (2 (2 unindicted) (2 here)) (2 ,)) (2 (2 which) (3 (2 (2 is) (2 probably)) (3 (2 for) (4 (2 the) (4 best))))))) (2 .))) -(3 (2 And) (4 (3 (2 if) (1 (2 you) (1 (2 (2 (2 're) (1 not)) (2 nearly)) (4 (3 (3 moved) (2 (2 to) (1 tears))) (2 (2 by) (2 (2 (2 a) (2 couple)) (2 (2 of) (2 scenes)))))))) (2 (2 ,) (2 (2 you) (2 (2 (2 've) (1 (2 got) (2 (3 (2 ice) (2 water)) (2 (2 in) (2 (2 your) (2 veins)))))) (2 .)))))) -(4 (4 (2 A) (4 (3 (3 warm) (2 ,)) (3 funny))) (3 (2 ,) (3 (4 (4 engaging) (2 film)) (2 .)))) -(4 (3 (2 Uses) (3 (3 (4 (3 sharp) (4 (3 (4 humor) (2 and)) (2 insight))) (2 (2 into) (3 (2 human) (2 nature)))) (2 (2 to) (2 (2 examine) (2 (2 class) (1 conflict)))))) (2 (2 ,) (2 (2 (2 adolescent) (2 (2 (2 yearning) (2 ,)) (3 (2 (2 the) (2 roots)) (3 (2 of) (2 (2 friendship) (2 (2 and) (2 (2 sexual) (2 identity)))))))) (2 .)))) -(2 (2 (2 Half) (1 (2 (2 (2 (2 (2 Submarine) (2 flick)) (2 ,)) (2 (2 Half) (2 (2 Ghost) (2 Story)))) (2 ,)) (2 (2 All) (2 (2 in) (2 (2 one) (2 criminally)))))) (1 (1 neglected) (2 film))) diff --git a/tests/data_for_tests/io/SST/test.txt b/tests/data_for_tests/io/SST/test.txt deleted file mode 100755 index ebf325d8..00000000 --- a/tests/data_for_tests/io/SST/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -(2 (3 (3 Effective) (2 but)) (1 (1 too-tepid) (2 biopic))) -(3 (3 (2 If) (3 (2 you) (3 (2 sometimes) (2 (2 like) (3 (2 to) (3 (3 (2 go) (2 (2 to) (2 (2 the) (2 movies)))) (3 (2 to) (3 (2 have) (4 fun))))))))) (2 (2 ,) (2 (2 Wasabi) (3 (3 (2 is) (2 (2 a) (2 (3 good) (2 (2 place) (2 (2 to) (2 start)))))) (2 .))))) -(4 (4 (4 (3 (2 Emerges) (3 (2 as) (3 (2 something) (3 rare)))) (2 ,)) (4 (2 (2 an) (2 (2 issue) (2 movie))) (3 (2 that) (3 (3 (2 's) (4 (3 (3 (2 so) (4 honest)) (2 and)) (3 (2 keenly) (2 observed)))) (2 (2 that) (2 (2 it) (2 (1 (2 does) (2 n't)) (2 (2 feel) (2 (2 like) (2 one)))))))))) (2 .)) -(2 (2 (2 The) (2 film)) (3 (3 (3 (3 provides) (2 (2 some) (3 (4 great) (2 insight)))) (3 (2 into) (3 (2 (2 the) (2 (2 neurotic) (2 mindset))) (3 (2 of) (2 (2 (2 (2 (2 all) (2 comics)) (2 --)) (2 even)) (3 (2 those) (4 (2 who) (4 (2 have) (4 (2 reached) (4 (4 (2 the) (3 (2 absolute) (2 top))) (2 (2 of) (2 (2 the) (2 game))))))))))))) (2 .))) -(4 (4 (2 Offers) (3 (3 (2 that) (3 (3 rare) (2 combination))) (2 (2 of) (3 (3 (3 entertainment) (2 and)) (2 education))))) (2 .)) -(3 (2 Perhaps) (4 (2 (1 (1 no) (2 picture)) (2 (2 ever) (2 made))) (3 (3 (2 (2 has) (2 (2 more) (3 literally))) (3 (2 showed) (2 (2 that) (2 (1 (2 (2 the) (1 road)) (1 (2 to) (0 hell))) (3 (2 is) (3 (2 paved) (3 (2 with) (3 (3 good) (2 intentions))))))))) (2 .)))) diff --git a/tests/data_for_tests/io/SST/train.txt b/tests/data_for_tests/io/SST/train.txt deleted file mode 100755 index d5296ab0..00000000 --- a/tests/data_for_tests/io/SST/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -(3 (2 (2 The) (2 Rock)) (4 (3 (2 is) (4 (2 destined) (2 (2 (2 (2 (2 to) (2 (2 be) (2 (2 the) (2 (2 21st) (2 (2 (2 Century) (2 's)) (2 (3 new) (2 (2 ``) (2 Conan)))))))) (2 '')) (2 and)) (3 (2 that) (3 (2 he) (3 (2 's) (3 (2 going) (3 (2 to) (4 (3 (2 make) (3 (3 (2 a) (3 splash)) (2 (2 even) (3 greater)))) (2 (2 than) (2 (2 (2 (2 (1 (2 Arnold) (2 Schwarzenegger)) (2 ,)) (2 (2 Jean-Claud) (2 (2 Van) (2 Damme)))) (2 or)) (2 (2 Steven) (2 Segal))))))))))))) (2 .))) -(4 (4 (4 (2 The) (4 (3 gorgeously) (3 (2 elaborate) (2 continuation)))) (2 (2 (2 of) (2 ``)) (2 (2 The) (2 (2 (2 Lord) (2 (2 of) (2 (2 the) (2 Rings)))) (2 (2 '') (2 trilogy)))))) (2 (3 (2 (2 is) (2 (2 so) (2 huge))) (2 (2 that) (3 (2 (2 (2 a) (2 column)) (2 (2 of) (2 words))) (2 (2 (2 (2 can) (1 not)) (3 adequately)) (2 (2 describe) (2 (3 (2 (2 co-writer\/director) (2 (2 Peter) (3 (2 Jackson) (2 's)))) (3 (2 expanded) (2 vision))) (2 (2 of) (2 (2 (2 J.R.R.) (2 (2 Tolkien) (2 's))) (2 Middle-earth))))))))) (2 .))) -(3 (3 (2 (2 (2 (2 (2 Singer\/composer) (2 (2 Bryan) (2 Adams))) (2 (2 contributes) (2 (2 (2 a) (2 slew)) (2 (2 of) (2 songs))))) (2 (2 --) (2 (2 (2 (2 a) (2 (2 few) (3 potential))) (2 (2 (2 hits) (2 ,)) (2 (2 (2 a) (2 few)) (1 (1 (2 more) (1 (2 simply) (2 intrusive))) (2 (2 to) (2 (2 the) (2 story))))))) (2 --)))) (2 but)) (3 (4 (2 the) (3 (2 whole) (2 package))) (2 (3 certainly) (3 (2 captures) (2 (1 (2 the) (2 (2 (2 intended) (2 (2 ,) (2 (2 er) (2 ,)))) (3 spirit))) (2 (2 of) (2 (2 the) (2 piece)))))))) (2 .)) -(2 (2 (2 You) (2 (2 'd) (2 (2 think) (2 (2 by) (2 now))))) (2 (2 America) (2 (2 (2 would) (1 (2 have) (2 (2 (2 had) (1 (2 enough) (2 (2 of) (2 (2 plucky) (2 (2 British) (1 eccentrics)))))) (4 (2 with) (4 (3 hearts) (3 (2 of) (3 gold))))))) (2 .)))) -(3 (2 ``) (3 (2 Frailty) (4 (2 '') (3 (4 (3 (2 has) (3 (2 been) (3 (4 (3 (3 written) (3 (2 so) (3 well))) (2 ,)) (2 (2 (2 that) (2 even)) (1 (2 (2 a) (2 simple)) (1 (2 ``) (0 Goddammit))))))) (2 !)) (2 ''))))) -(4 (2 (2 Whether) (2 (2 (2 (2 or) (1 not)) (3 (2 you) (2 (2 're) (3 (3 enlightened) (2 (2 by) (2 (2 any) (2 (2 of) (2 (2 Derrida) (2 's))))))))) (2 (2 lectures) (2 (2 on) (2 (2 ``) (2 (2 (2 (2 (2 (2 the) (2 other)) (2 '')) (2 and)) (2 ``)) (2 (2 the) (2 self)))))))) (3 (2 ,) (3 (2 '') (3 (2 Derrida) (3 (3 (2 is) (4 (2 an) (4 (4 (2 undeniably) (3 (4 (3 fascinating) (2 and)) (4 playful))) (2 fellow)))) (2 .)))))) diff --git a/tests/data_for_tests/io/THUCNews/dev.txt b/tests/data_for_tests/io/THUCNews/dev.txt deleted file mode 100644 index e40ee4a0..00000000 --- a/tests/data_for_tests/io/THUCNews/dev.txt +++ /dev/null @@ -1,9 +0,0 @@ -体育 调查-您如何评价热火客场胜绿军总分3-1夺赛点?新浪体育讯四年了,终于赢球了,热火在凯尔特人的主场经过加时98-90艰难战胜对手,总比分3-1领先,詹姆斯拿下35分14个篮板,韦德28分9篮板,波什20分12个篮板。您如何评价这场比赛? -娱乐 盘点好莱坞明星新年目标 布兰妮迪亚兹在列(图)新年伊始,又是制定新一年目标的时候了。大到关注环保、寻找真爱,小到改掉坏毛病、改变生活习惯,这些都是美国演艺明星在2009年中的目标。●告别烟圈好莱坞女星卡梅隆·迪亚兹计划在新的一年戒烟,和她目标相同者还有《实习医生格蕾》中的凯瑟琳·海格尔及《飞跃贝弗利》中的布莱恩·奥斯汀·格林。格林说:“每年我似乎都说要戒烟,看看今年行不行吧。”●不咬指甲女歌手布兰妮( 听歌)希望自己“改掉咬手指甲的毛病”。此外,她还表示:“我希望自己不再焦虑,以前的我无时无刻不在焦虑中,我要学会让自己幸福。”●寻觅真爱凭借《灵魂歌王》一片夺得2005年奥斯卡()奖的杰米·福克斯希望自己能在2009年找到真爱。●回归平静去年刚刚与男友分手的影星安妮·海瑟薇则希望过上平静的生活。●享受滑雪因出演《灵异第六感》而一举成名的影星黑利·乔尔·奥斯门特的最大愿望就是重拾自己滑雪的爱好,并从美国犹他州的某座高山上直冲而下。●致力环保曾主演《异形》和《冰风暴》等片的女演员西戈尼·威弗表示要为环保事业贡献力量。她说:“我不再使用塑料袋,手头现有的这些我也要循环使用。”●亲近素食《绝望主妇》中的伊娃·朗格利亚的目标是努力尝试吃素。●活络筋骨热门电视剧《汉娜·蒙塔娜》的主角麦莉·赛勒斯关心的问题则是“多做运动”。●回馈世界要说计划最为抽象的当数帕丽斯·希尔顿,她说:“我已经长大了,成熟了,我要怀着一颗感恩的心,开始回馈世界。”●计划“计划”1983年出演《战争游戏》的马修·布罗德里克的新年计划最别具一格,他的计划就是在2009年“拟订计划”。○据新华社 -家居 蓝景丽家尹勃乐居思路清晰 创新开拓(图)     新浪家居谢娟讯  10月16日,易居中国与新浪合资公司中国房产信息集团(简称CRIC)在美国纳斯达克成功上市。此消息一出,家居业界大腕在分享喜悦的同时,纷纷来电来函,向中国房产信息集团成功登陆纳斯达克表示祝贺,同时对CRIC在未来发展提出了中肯的建议和期待。新浪家居电话连线业内数位大腕,倾听他们对此事的看法,以及对中国房产信息集团上市寄语。【CRIC(中国房产信息集团)纳斯达克挂牌上市】       采访嘉宾:蓝景丽家总经理 尹勃         新浪家居:您好,尹总,我是新浪乐居家居频道编辑谢娟,感谢您接受本次访谈。   尹勃:您好。       新浪家居:北京时间2009年10月16日,易居中国与新浪合资公司中国房产信息集团在美国纳斯达克成功上市融资2亿美元。您是否知道此事?您对此有怎样的看法?       尹勃:刚刚知道!对家居很好的促进作用,希望能够加大北京市场支持力度,给予北京市场更高的重视。   新浪家居:感谢您的肯定。同时也希望您能给予建设性的意见。       尹勃:在罗总的带领下做的比较有声势,目前的思路更清晰。希望乐居做到较其他媒体更有高度,活动更有所创新。   新浪家居:您有怎样的祝语?             尹勃:祝新浪乐居越办越好,带动北京家居市场更上一层楼!      【嘉宾简介】       尹勃:(蓝景丽家总经理 北京市建筑装饰协会家装委员会副会长 北京市场协会家居分会副会长 北京家具协会常务理事 中国建材市场协会理事会副理事长)家居流通卖场一路走来,从昔日倒爷式的地摊、棚户到今天品牌型的综合、主题式购物广场,经历了多少时代的洗礼。尹勃作为这个行业中翘楚企业的负责人,见证了整个家具行业的变迁。名字后面这一连串的职务介绍足以说明他在整个行业中举足轻重的影响力,也更加肯定了他对“蓝景丽家”这个行业航母的巨大贡献。      【推荐阅读】        蓝景丽家十一精彩促销撼京城       百城万店无假货蓝景丽家启动       乐居装修日首战告捷 蓝景丽家销售额逆势暴涨       【媒体声音】      中国证券报:新浪易居合资公司CRIC登陆纳市       上证报:新浪易居合资公司CRIC逆市登陆纳市       第一财经日报:CRIC上市首日市值20亿美元       新华网:新浪与易居合资公司CRIC登陆纳斯达克       专访丁祖昱:CRIC在做前人没有做过的事情       专访罗军:CRIC具有巨大的商业潜力       专访曹国伟:在某些垂直领域会做更多尝试 【更多】     上市背景资料:      美国东部时间10月16日(北京时间10月16日)消息,易居中国与新浪合资公司中国房产信息集团(以下简称CRIC)在美国纳斯达克挂牌上市,首日开盘价12.28美元,超出发行价0.28美元。CRIC为易居中国与新浪的合资公司,股票代码为CRIC,发行价12美元,共发行美国存托股票(ADS)1800万股,同时承销商有权在未来30天内,行使总额达到270万股的超额配售权,此次IPO共计募集资金约2.16亿美元。作为中国在美国的地产科技第一股,CRIC是中国最大的专业房地产信息服务公司,并且拥有同时覆盖线上线下的房地产综合信息和服务平台。CRIC的成功上市,也创造了两家在美国上市的中国公司,分拆各自极具成长力的业务后进行合并,并进行二次上市的先河。CRIC联席董事长、CEO周忻表示;“我们很高兴看到CRIC成功上市,此次IPO将确立CRIC作为中国房地产信息服务第一品牌的地位,并有利于CRIC继续推进国内最大和最先进的房地产信息系统建设,使CRIC成为同时覆盖线上和线下的强大中国房地产网络信息服务平台,为房地产开发商、供应商、专业机构以及个人用户提供多元化房地产信息服务。CRIC联席董事长、新浪CEO曹国伟表示:“CRIC的成功上市,是易居中国和新浪合作的重要一步,也是我们在垂直领域商业模式探索的有益尝试,我们很高兴有机会发挥双方的协同效应。而进一步拓展和深化互联网垂直领域的商机,建立公司在细分市场的核心竞争力并做大做强,这也是新浪未来长远战略的重要组成部分。     -房产 弘阳大厦骏馆开盘 首日热销1亿昨天,位于南京大桥北路69号的红太阳销售中心人头攒动,当天开盘的弘阳大厦·骏馆取得了开门红,由于产品品质高端、户型精致总价又低,吸引了一拨又一拨看房者,当天销售额突破了一个亿。弘阳大厦·骏馆位于南京市浦口区大桥北路西侧,紧邻已建成的旭日华庭金棕榈园区旁,用地总面积6万多平米,包括一个包含酒店公寓、商业及办公的综合楼,一个酒店式公寓以及8万平方米的居住建筑和15000平方米的商业。弘阳大厦作为这块地块中的综合楼,主楼高99.65米,共28层,是集办公、商业、餐饮、公寓为一体的泛配套复合多功能商住楼。此次推出的弘阳大厦·骏馆,是弘阳大厦其中5-22层的酒店式公寓,主力户型为41-75平米商住先锋小户型。由于项目地处桥北新城的核心位置,离市区仅一桥之隔,规划中的地铁与过江隧道近在咫尺,兼具成熟配套资源优势。公共交通也非常方便,131、132、鼓珍、鼓扬、汉江、中六、汉六等多条公交线路可以直达该项目。除了地处桥北核心地段,具备传统的生活多方面配套以外,弘阳大厦·骏馆还拥有同属弘阳集团旗下的华东MALL完美商业配套。 我要评论 -教育 名师解析标准读音在四级考试中的重要性对于中国学生而言,都知道口语和听力很重要,但就是怎么也不好过关,究其原因就是他们英语发音不标准。一、口语。一口标准而流利的口语可以立即提升你的形象,给人以很好的第一印象。举例1:汤姆汉克斯主演的电影《幸福终点站》中有一个情节,大家应该很熟悉:他将a man of mystery“一个神秘的人”读成了a man of misery“一个痛苦的人”,意思相差了十万八千里,自然造成理解障碍。举例2:中文中v和w没有任何区别,说“我wo”的时候,如果上齿咬着下唇的话,也无所谓,因为不会产生任何歧义。但是英文中不一样,这两个音区别很大。vine表示“葡萄藤”;而wine则表示“葡萄酒”。green wine表示“新酒”;而green vine则表示“绿色的葡萄藤”。读错了音意思差别可就大了去了。举例3:一位外国人在中国马路上迷了路,见到一位姑娘,立即冲上前去,说道:“我想吻(问)你...”吓得姑娘连忙跑掉,就是因为读音的问题,外国人在中国也会遭遇理解障碍。二、听力。听力在四级考试中占35%的份额,如果听力不如意的话,考试想要及格真的是很难。听力过程中学生可能会有以下几种体会:1. 根本听不清楚读音——因为不熟悉英文的读音规则;2. 听清了读音,但对应不出是哪个单词——词汇量不够,没有好好记单词;3. 听清了读音,也知道是哪个单词,但忘了啥意思了——还是词汇量不够,对于单词不熟悉;4. 对于spot dictation题型而言,听清了,知道是哪个单词,但就是—写就出现拼写错误——还是词汇没记好。第一,注意单词的读音,英式的和美式的。如:It's very hot today. 中hot美语中几乎就读成了hut这个词的读音了。第二,句子一连读、失去爆破等,连单词的影子都找不到了。如:This-is-an ol(d) pi(c)ture-of-a bi(g) car。横线表示连读,连读起来都不知道到底是一个词还是几个词了,括号里是不发音的,所以这个句子一旦读出来就完全走了样了。但听力中这种现象确是很常见的。要想练习好听力,首先要练习好英文的读音,包括词和句的读音规则。尤其对于外地孩子来说,就更重要了。如湖南的孩子说“我来自湖南”,由于方言影响就成了“我来自弗兰”。而这些人都不认为自己的读音是错误的,所以他听别人这样说的时候也认为是正确的。总之,如果我们平时的读音是错误的话,当听到正确读音时反而会不知道是哪个词,所以要想加强听力,首先要加强自己的读音。(党敏) -时尚 组图:10款艳丽泳装熟女穿出少女情怀导语:时下的泳装注重层次和线条感的悠闲设计,流露出自然的气质。 简洁的色彩搭配,甜美感觉凸显少女情怀,抽象概念化的异域花卉,颜色和谐、明快,印花纱裙,感觉轻盈,细致有女人味。 -时政 台“中选会”称12月5日选举时程不变新华网消息 据台联合晚报报道,台“中选会”上午如期召开幕僚选务会议,仍按原定12月5日举办“三合一”选举时程进行相关作业规划。“中选会”将在9月4日发布选举公告。基于考量莫拉克风灾灾后重建,以及H1N1疫情发烧,有部分蓝绿政治人物倡议延后年底“三合一”选举。据了解,到目前为止,年底“三合一”选举的相关选务作业仍如期进行。“中选会”表示,“中选会”是选务机关,是否延选,仍须由政策决定,在政策未改变前,“中选会”将依既定时程,规划年底“三合一”选举的相关选务作业。 -游戏 《天问》国家系统神秘美丽女儿国初探传说在遥远的西域,有一个神秘美丽的国家,上至国王,下至百姓,全国居民都是美丽温婉的女性。唐僧四师徒一路西行,就是来到了这个风光如画的女性之国。粉色帷幔随风飘扬,阳光照耀着的粉色砖墙闪闪发亮;清澈的泉水边,风情万种的女子们悠闲地编制精美的地毯,蝴蝶在花香中起舞……西梁女国就是一位端坐西域的温柔而美丽的少女,带着神秘的微笑注视来来往往的游客。解阳山是全新的练级场景, 山上微风吹拂,仙鹤悠闲地梳理着翎羽,处处透露平和安逸的气氛。但是山顶一座简陋的道观,竟藏着不少金银财宝?西梁女国百姓最珍视的一口泉水,也隐藏在道观山之上,这里到底隐藏着什么秘密?在解阳山上有一个神秘的副本波月洞,里面溶岩密布,石柱高耸,组成了各种美妙的景观。然而,波月洞盘踞着以毒蝎精领导的一群女妖,这帮妖精已与女儿国争战多年。当群侠得知毒蝎精近来甚至企图绑架女儿国太子,以要挟国王就范时,不论是出于怜香惜玉,还是英雄救美,一场的激烈的战争终将不可避免的开始了…… -科技 五彩时尚MP3 三星U5仅售299元 三星YP-U5(2GB)共有蓝、粉、白、红、黑五种时尚漂亮颜色可供选择。色彩感很浓烈。三星YP-U5(2GB)的背面还提供了一个背夹,再加上五颜六色的款式,使它看上去很像一个美发卡。机身很小巧,三围尺寸只有25×88×11.8mm,重量也只有23g,完全可以随身携带。在机身正面可以看到一个OLED冷光屏,显示的字体比较清晰。三星YP-U5(2GB)可以支持mp3、wma、ogg、Flac音频格式文件播放,此外,它支持三星最新的DNSe代3代音效,5种音效,提供自动、正常、工作室、摇滚、节奏及布鲁斯、舞厅、音乐厅7种选择,也可以进行自定义,对EQ和3D进行调节,效果非常好。除了出色的音乐播放功能以外,三星YP-U5(2GB)还支持FM收音机、歌词显示、MIC录音等功能。编辑点评:U系列是三星主打平价市场的产品,主要针对学生、办公室一族。相信这款音质出众、色彩绚丽的时尚MP3,也将为学生和年轻白领一族的个性生活增添亮丽色彩。    三星YP-U5(2GB)      [参考价格] 299元    [联系方式] 13434155009      diff --git a/tests/data_for_tests/io/THUCNews/test.txt b/tests/data_for_tests/io/THUCNews/test.txt deleted file mode 100644 index 81d00e65..00000000 --- a/tests/data_for_tests/io/THUCNews/test.txt +++ /dev/null @@ -1,9 +0,0 @@ -体育 凯尔特人vs尼克斯前瞻III纽约背水战 甜瓜必杀令新浪体育讯北京时间4月23日上午7点,凯尔特人将迎移师纽约城,挑战尼克斯,这是两队首轮的第三次交锋。前两场比赛中,小斯和安东尼轮番打出现象级的表现,可惜都无法为尼克斯带来一场胜利。目前凯尔特人总比分2-0领先,对尼克斯而言,他们没有退路。“第三场在主场击败,这是一场必胜的战争,我们根本输不起,这是本赛季为止将要面临的最艰难的一场比赛。”安东尼说。然而运气却不在纽约这边,他们接连以小分差输掉两场,与此同时,比卢普斯和小斯又接连出现伤病,第三场比赛两人的状态仍旧未知,小斯缺席了球队的训练,他在第二场下半场因为背部痉挛休战,但小斯仍希望能够在第三场出战,比卢普斯则有膝伤在身,能否复出还要等赛前决定。第二场比赛中,比卢普斯休战,小斯下半场未打,比尔-沃克全场11投0中,但是尼克斯凭借安东尼的42分17个篮板6次助攻,顽强的将比赛拖到最后一秒,直到最后时刻杰弗里斯的传球被KG抢断,才遗憾落败。德安东尼说:“很遗憾他们两不能上场,但从积极方面看,下半场球队打出的顽强表现,让我们信心满满。”小斯在第一场拿到28分11个篮板,但是安东尼在那场饱受犯规困扰,18投5中只拿到15分,下半场11投1中,尼克斯最终85-87落败,纽约人相信,如果安东尼和小斯同时发挥,他们有很大机会扳倒绿巨人。“我想这是一种精神折磨,你知道自己打得有多努力,有多棒,但两次我们都距离胜利差之毫厘。”安东尼说。第三战将是尼克斯自从2004年4月25日以来,首次在麦迪逊广场花园首次举办季后赛,这座举世闻名的篮球麦加殿堂已有七年未曾染指季后赛。对凯尔特人而言,他们的进攻出现了不少问题,季后赛前两场分别是靠雷-阿伦和凯文-加内特的关键球才勉强击败对手。里弗斯表示,球队表现需要提高,奥尼尔第三场能否出战还是谜,雷-阿伦连续两场打出不俗表现,隆多则在第二场砍下30分7次助攻,他们将尼克斯的命中率限制到35.6%,但与此同时,他们也丢失了大量的防守篮板,上场比赛尼克斯抢下了20个进攻篮板,而凯尔特人只有9个。小斯曾在这轮系列赛中和格伦-戴维斯大打口水仗,此战重回纽约,尼克斯急需他的发挥,接下来就看小斯带伤出战,能为尼克斯提供多少支援了。两队预计首发:凯尔特人:隆多、阿伦、皮尔斯、加内特、小奥尼尔尼克斯:道格拉斯、菲尔德斯、图里亚夫、安东尼、小斯(木瓜丁) -娱乐 独家探班李康生蔡明亮短片《自转》(组图)新浪娱乐讯蔡明亮(阿亮)导演、李康生(小康)演出的银幕组合让两人在国际影坛挣出一席地位,如今两人“角色互换”!李康生执导台湾公视《台北异想》影片中的短片──《自转》,请出已20年没站在镜头前的蔡明亮当演员,阿亮为了爱徒再次“下海”演戏,没想到自称对演员施以爱的教育的小康,拍第一场戏就让阿亮吃了18次NG,现场更放催泪音乐,让感情丰富的阿亮流下真情的眼泪。台湾公视的《台北异想》影片,概念将一天从清晨六点起分为八个时段,邀来李康生、郑芬芬、钮承泽、林靖杰等八位导演,拍摄八部十分钟短片,接力诠释24小时的台北故事。小康选了凌晨四时至六时的时段发挥,他说:“2006年,舞蹈家伍国柱、罗曼菲相继过世让我感触很深,蔡明亮拍摄电影《洞》时,罗曼菲担任舞蹈编排,她直率、认真的性格留给大家很深的印象。因此特别选择她凌晨四点多辞世的时段,拍摄《自转》,也希望将这部短片献给她。”蔡明亮自从20年前曾在电视单元剧中饰演乐团主唱后,即不再以演员身分现身萤光幕前,为了挺爱徒再站镜头前,阿亮坦言,剧中虽只需扮演自己,但被拍仍令他紧张,要不是近几年常受访,被媒体训练出减少对镜头的恐惧,不然他不会让自己名列演员名单中。被阿亮指导演戏惯了的小康,如何回过头来对恩师教戏?他虽说:“我让演员自由发挥,采取『爱的教育』!”但光是陆奕静炒咖啡豆,阿亮静坐咖啡厅一隅,这全剧第一个镜头就磨了十八次,现场播放雷光夏广播录音和林怀民舞作《挽歌》音乐,更催出阿亮的男儿泪,阿亮说:“我就是想到了罗曼菲,更感受到美好的事物都会消失,真想再看一次罗曼菲跳舞。”《自转》的最后一场戏,陆奕静衬着音乐转圈跳舞,阿亮也即兴起舞,但连两天熬夜赶戏体力透支,加上不停转圈,她拍到呕吐、阿亮则晕眩不止,小康却满意称赞:“这两人跳得不错嘛!”小康当导演,从第一场戏折腾演员到末场戏,堪称“有始有终”,蔡明亮笑说:“未来我还是选择继续当导演吧。”台湾特派记者郑伟柏/台北报导 声明:新浪网独家稿件,转载请注明出处。 -家居 打好算盘最省钱瓷砖选购法面对导购小姐的微笑更是心中打鼓:人家说的好像挺有道理,但会觉得说得越好,会不会上当啊,是不是有猫腻呢?本文从建筑卫生陶瓷角度来分析,其它建材选购原理也与之相差无几。瓷砖的选购很讲究,要知道瓷砖这玩意儿一旦铺上了要是再发现有问题,后果是很严重的!下面列出的几点问题是在装修前一定要想清楚的,这些问题往往决定了以后选择瓷砖的种类、规格、价位甚至家居的整体风格。1、到底铺什么?这个问题好像问得很白痴,但这却是最基本的,首先你得充分了解哪些空间适合用哪些瓷砖啊!其实这个问题的关键不是用什么铺地,而是各种材料该怎么搭配。比如:有些业主希望在客厅铺瓷砖,同时在卧室选择木地板,这样问题就产生了:如果客厅铺普通玻化砖,卧室铺强化复合地板,那么卧室与客厅就会存在3cm左右的高度差,这主要是由于强化地板下没有打龙骨造成的。那么是不是在卧室选择实木地板就行了呢?当然不是。通常实木地板由厂家安装都会使用3×2cm的龙骨,如果为了和客厅的瓷砖找平最好使用5×4cm规格的龙骨,但是各个地板厂商对于更换龙骨的服务条款可是不同的。所以要充分与业主沟通,毕竟我们的目的是要让业主满意,了解业主的最基本的要求,然后根据业主的原始思路,找出最合适的方案。如果业主希望选择地板与地砖混铺的方式,就一定要规划好,避免不必要的麻烦。下面介绍两种基本搭配方式:瓷砖+强化地板=铺地板的房间用水泥灰浆垫高3cm,瓷砖+实木地板=地板下采用5×4cm规格的龙骨。2、选择什么规格的地砖?是铺600的?800的?还是1000的或是其它规格的?这是一个问题!现在的地砖,尤其是客厅使用的地砖主要是500mm、600mm、 800mm和1000mm(即1米)等规格,其中使用最多的是600mm和800mm两种。那么该如何选择呢?建议根据铺贴的面积及家具的摆放进行选择。由于单位面积中600mm的砖比800mm的砖铺贴数量要多,所以视觉上能产生空间的扩张感,同时在铺贴边角时的废料率要低于800mm的砖,而空间大时铺800甚至1米规格的砖就显得大气。因此建议小于40平米的空间选择600mm规格的地砖;而大于40平米的空间则可以选择800mm或一米的地砖。值得注意的是,如果在房间中家具过多(如卧室),盖住大块地面时,最好也采用600mm的地砖。3、该铺怎样的砖?到底是选择铺怎样的砖呢?是仿古砖还是抛光砖?仿古砖自然、柔务,在复古风格、尤其是拼花上有着玻化砖无法比拟的优势。同时,由于表面釉层的保护,对于茶水、墨水甚至热烟头的抗污能力也优于玻化砖。但是玻化砖也并非一无是处。随着技术的发展,现在玻化砖表面玻化层的密实度、光洁度已经相当的高,不仅能够使居室显得更加亮堂,还决不会像釉面砖由于外力碰撞、摩擦产生釉面破损的现象。所以选择什么样的砖要根据你要体现的风格,要明亮、大气就选抛光砖,要自然、温馨就选仿古砖。建议居室空间、客厅如果采光相对有限选择玻化砖,而光线充足的客厅和和需防滑的厨房和卫生间地面,及阳台等可选择仿古砖或其它釉面砖。4、“微晶玉”、“微晶石”、“微晶钻”都是什么意思?很多人逛建材城最头疼的恐怕就是记录瓷砖的名字了。什么“微晶玉”、“微晶石”、“微晶钻”、“超炫石”、“聚晶玉”等等。其实大家根本没必要记住这些拗口的名字,它们描述的都是同一种东西——玻化砖,这些名字只是厂商为了区分产品的档次,进一步细化市场而使用的代号罢了。在选择时大家只要坚持自己的预算,尽量选择适合自己的产品就行了。微晶石表面很炫,但其硬度只有莫氏五度左右,不耐磨,不适于用在地面,比较适于用在外墙干挂。 -房产 迪拜危机启示录:空中楼阁迟早要倒塌美国拉斯维加斯,又一家奢侈至极的酒店在这个“罪恶之城”绽放。但此次,相较酒店豪华的各种天价服务和开幕典礼上的好莱坞群星璀璨外,似乎其幕后的主人更吸引人们的眼球--迪拜世界。仅仅一周前,迪拜世界这个名词牵动了世界每个角落的神经。11月25日,迪拜主权财富基金迪拜世界宣布,暂缓偿还债务。根据评级机构穆迪的估算,迪拜的债务预计接近1000亿美元。巨大的数额勾起了人们对去年雷曼兄弟倒闭以来那波汹涌澎湃的国际金融危机的回忆。汇丰、渣打、巴克莱、苏格兰皇家银行等在内的多家银行涉及在内。人们开始担心,我国是否也会因此受到波及。庆幸的是,国内几大商业银行随即申明表示,没有涉及迪拜世界、迪拜政府和其他相关迪拜主权基金及机构发行的债权。有所涉及的,比例也相当的小。记者致电多家研究所银行业分析师,均表示认为此事对国内银行业影响不大,目前没有特别关注。因此,公众的目光从银行投向了导致其债务根源的房地产业。迪拜世界的房产项目,现在已经成为了全世界最大的烂尾楼。而就在这债务问题凸显的时刻,其旗下的“重型”项目却闪亮登场。“城市中心”酒店的开幕,似乎使得地产行业最尴尬的一面展现在了公众眼中。反观我国的地产行业,近期拍卖地王频现,房屋交易价格再次飙升,种种迹象也让人们对其产生了许多担忧。有专家对记者表示,在高速成长时期,楼价和地价互相推动的背后,是资金的不断流入。在那些光鲜的大楼后被后默默支撑的是债券、贷款等各种负债工具。一个原本是沙漠中人口只有十几万的小城,在几乎没有任何实业的基础上,居然吸引了世界上各方的资金,建成了一个人口上百万的豪华都市。房地产市场的巨大利益诱惑在其中占据了重大的因素。不断高涨的楼市,加上免税的便利,使得国际游资疯狂涌入。在聚集了巨大资金后,其所投资的项目遍布世界,美国这次的拉斯维加斯“城市中心”项目,迪拜世界就砸了近50亿美元。这种推动与反推动作用,给予了人们一个璀璨的迪拜,但当问题暴露,留下的却是满目疮痍。“迪拜危机对我们而言更多的是警示作用。”中国社科院金融研究所中国经济评价中心主任刘煜辉在接受《证券日报》记者采访时如此表示。他认为,目前为止迪拜危机对我国银行业的影响不多,但由于有过全球金融危机的影响,心理上的波动是会有的。此外,刘煜辉还告诉记者,任何以过度负债支撑起来的价格上涨或资产泡沫都是需要高度警惕。因为一旦泡沫破裂,就会带来破坏性较强的连锁反应。相信通过这次迪拜危机的警示,国内更多的行业会关注本行业内的负债和泡沫,对于投机性行为和高风险项目将会更加冷静。我要评论 -教育 知名美国私立寄宿中学来华招生行程序号 学校 时间 地点 学校情况 1、北野山中学Northfield Mount Hermon School10月26日 星期三PM1:00 美丽园龙都美国教育部认可的示范型学校2、Cranbrook school10月27日 星期四AM8:40-10:20美丽园龙都每年本校学生的AP考试成绩都位列于全国成绩最好的学校之中3、The Storm King School10月29日 星期六PM4:30上海南京西路1515号嘉里中心1809室纽约州一所私立男女混合精英寄宿中学4、Villanova Preparatory School10月30日 星期日PM1:00-4:00虹桥万豪酒店美国唯一一所的男女混合寄宿制天主教教会学校5、Wyoming Seminary Upper School11月1日 星期二AM10:00香格里拉美国著名的百年贵族名校,也是美国东北部最古老的中学及大学预科学校6、胡桃山音乐学校Walnut Hill School11月2日 星期三PM1:00浦东香格里拉美国最古老的艺术高中7、弗莱堡学校Fryeburg Academy11月3日 星期四PM2:00-6:00上海南京西路1515号嘉里中心1809室一所独特的提供寄宿和走读学习的学校8、St.Johnsbury Academy11月8日 星期二AM9:00-12:00上海南京西路1515号嘉里中心1809室美国中学中拥有最棒校园的男女合校寄宿学校9、波特茅斯教会学校Portsmouth Abbey School11月8日 星期二PM1:00-3:00北京朝阳区建外SOHO,A座9层全国首屈一指的天主教混合住宿学校10、波特茅斯教会学校Portsmouth Abbey School11月15日 星期三PM1:00-4:00上海南京西路1515号嘉里中心1809室全国首屈一指的天主教混合住宿学校11、库欣高中Cushing Academy11月第三周待定美国最悠久男女合校寄宿中学之一12、West NottinghamAcademy11月19日 星期六PM2:00上海南京西路1515号嘉里中心1809室美国最早的学校,245年历史13、格瑞尔女子中学The Grier School11月26日 星期六PM9:45明天广场万豪历史悠久的著名女子寄宿学校14、萨菲尔德学院Suffield Academy11月30日 星期三 待定有170多年历史,是一所男女同校的私立中学15、威利斯顿 • 诺塞普顿中学The Williston Northampton School12月1日 星期四PM2:00-4:00上海南京西路1515号嘉里中心1809室学校以其优质的教学质量而闻名16、菲利普斯埃克塞特Philips Exeter Academy12月2日星期五PM6:30-8:30北京建国饭店牡丹厅(北京建国门外大街5号)“美国高中的哈佛” 、全美国最好的私立寄宿制高中17、菲利普斯埃克塞特Philips Exeter Academy12月3日星期六PM2:30-4:30上海浦东香格里拉浦江楼2层青岛厅“美国高中的哈佛” 、全美国最好的私立寄宿制高中18、菲利普斯埃克塞特Philips Exeter Academy12月5日星期一PM6:30-8:30浙江图书馆1楼文澜厅(杭州西湖区曙光路73号)“美国高中的哈佛” 、全美国最好的私立寄宿制高中19、坎特伯雷中学Canterbury School12月5日  星期一AM9:00-12:00 待定走读与寄宿都有的男女合校20、西城中学/威斯顿中学Westtown School12月5日 星期一AM9:00待定一所拥有205年悠远传统的中学21菲利普斯埃克塞特Philips Exeter Academy12月6日 星期二PM6:30-8:30广州天河区林和中路6号海肮威斯汀酒店5楼蓝厅“美国高中的哈佛” 、全美国最好的私立寄宿制高中22菲利普斯埃克塞特Philips Exeter Academy12月7日 星期三PM6:30-8:30深圳格兰云天酒店26楼云河厅(福田区深南中路3024号)“美国高中的哈佛” 、全美国最好的私立寄宿制高中23Cheshire Academy12月18日 星期日待定美国最早的传统寄宿中学24The Governor’s Academy待定待定美国最古老的寄宿高中之一25Peddie School待定待定著名的具有悠久历史的男女混合寄宿学校26Westover School待定待定美国著名的大学预备女子私立寄宿中学27Rabun Gap-Nacoochee School待定待定一所6-12年级的大学预备住宿走读中学28Ben Lippen School待定待定一所为学生提供大学准备课程的教会学院29George Stevens Academy待定待定一所拥有200多年历史的学校 -时尚 组图:纽约2011时装周 博主编辑街拍自成风景导语:纽约2011春夏时装秀正在如火如荼地进行着,打开任何时尚网站,你都可以看到这RUNWAY秀的图片,所以我不想在这里赘述了,反而我觉得秀场外这些赶赴现场的模特们和时尚博主以及时尚编辑的街拍更有意思。 -时政 台当局开放大陆银联卡在台刷卡消费中国台湾网7月16日消息 据台湾《联合报》报道,台当局“金管会”昨天发布修正“两岸金融业务往来许可办法”,开放大陆银联卡在台刷卡消费。最快9月初大陆民众就可以持银联卡在台刷卡消费,将可提高大陆游客赴台观光、消费意愿,并为台湾每年新增1000亿元(新台币,下同)刷卡商机。岛内银行也将可办理相关收单业务,对收单银行的手续费年收益至少可多出20亿元的贡献。报道称,台当局“金管会银行局副局长”萧长瑞表示,办法发布生效后,“金管会”就可开始受理岛内收单银行、联合信用卡中心等申请,台湾的联合信用卡中心也要跟大陆银联公司签约,估计最快9月初银联卡就可进入台湾。大陆银联卡赴台使用研议多时,消算等技术层面问题一直待克服,昨天“金管会”正式发布相关规定开放银联卡赴台,也代表技术面问题都已解决。根据“金管会”昨天发布的两岸金融业务往来许可办法第二条及第七条之一修正案,明定岛内信用卡业务机构经主管机关许可者,可以与银联公司从事信用卡或转帐卡的业务往来。主要包括银联卡在岛内刷卡消费的收单业务,以及交易授权与清算业务等两项。至于岛内银行发行银联卡的发卡业务则未开放。(高大林) -游戏 腾讯手游在线 《幻想西游》勇创新高根据腾讯QQ游戏中心2009年11月26日显示的在线数据,由腾讯和广州银汉联合运营的《幻想西游》再创新高,同时在线达到54336!54336同时在线一举打破之前的在线记录,创造手机游戏在线新高,这是《幻想西游》的光荣,也是手机游戏界的光荣!罗马不是一天建成的,《幻想西游》运营三年以前,开发组一直注重提升游戏品质和馈玩家,做属于玩家自己的游戏。这次创造在线人数新高,就是对开发组最高的褒奖。11月期间,《幻想西游》举行了“美在西游”系列活动吸引了数千美女玩家报名,6万多玩家参与了本次活动,掀起了11月的活动高潮。11月25日感恩节,开发组成员更是身怀感恩之心,化身GM来到游戏中倾听玩家的心声,并且心甘情愿地被玩家击败后奉上了感恩节礼物。12月将进入“美在西游”决赛阶段,广州银汉笑迎八方客,热情地邀请来自全国各地的美女玩家和跨服帮战优秀代表共聚羊城,共叙三年幻想情,畅谈西游未来路。《幻想西游》是根据名著《西游记》改编的手机网络游戏,具有操作简洁,界面美观,互动性好,娱乐性强的特点,营造出一个充满梦幻的西游世界。进入游戏:手机访问 http://3g.qq.com,选择游戏-网游-幻想手机官网 http://wap.01234.com.cn,选择快速进入 -科技 配18-135mm镜头 佳能7D国庆带票促销中(中关村在线数码影像行情报道)佳能EOS-7D是一款拥有1800万像素成像能力,每秒钟8张连怕性能,并具备高清摄像功能的单反相机。这款单反相机于上周登陆中关村市场,是目前APS-C规格单反中的旗舰机型。今天笔者在市场上了解到,配备有18-135mm防抖镜头的7D套机,价格为13800元带发票。EOS 7D实现了在约1800万有效像素的高画质下,高达约8张/秒的连拍速度。并搭载了高速智能的自动对焦系统等众多新功能。EOS 7D不仅达到了约1800万的有效像素,还实现了低噪点的精细图像表现。其搭载的CMOS图像感应器是佳能自行研发生产的产品。在提高像素感光度的同时,对像素内的晶体管进行了改良实现了更高的S/N(信噪)比。7D的常用ISO感光度为100-6400,扩展ISO感光度最高为12800。图像信号传输是在将单通道序列读取高速化的同时,采用8通道进行高速读取。与EOS 50D相比要快约1.3倍,实现了约8张/秒的高速连拍。另外,对更换镜头时以及反光镜、快门等动作时产生的感应器灰尘也采用了相应的综合除尘措施;同时还搭载了可从相机硬件和附带软件两方面进行除尘的“EOS综合除尘系统”,在除尘功能上考虑得十分周到。快门单元和机身盖采用了不易产生碎屑的特殊材料;即便是不小心进入了灰尘,也可以通过超声波使图像感应器最前面的低通滤镜产生振动将灰尘抖落。低通滤镜表面进行了氟涂层处理,不论是对难以脱落的具有较高粘度的灰尘还是潮湿的灰尘都有着很好的除尘效果。双DIGIC 4数字影像处理器实现了对通过8个通道从图像感应器中高速读取出的,具有约1800万像素的庞大数据的迅速且高精度处理。搭载了2个高性能数字影像处理器DIGIC 4,能够对各种数据进行并行处理,即使是约1800万有效像素也可以实现最高约8张/秒连拍的高速图像处理。EOS 7D搭载了多达19个的自动对焦点,并且提高了每个对焦点的对焦精度。19个对焦点全部采用对应F5.6光束的十字型自动对焦感应器。将用于检测纵向线条的横向线型自动对焦感应器与用于检测横向线条的纵向线型自动对焦感应器呈十字型排列,从而实现了很高的被摄体捕捉能力。中央对焦点在相对于F5.6光束十字型自动对焦感应器的斜方向上配置了对应F2.8光束精度更高的十字型自动对焦感应器。通过中央八向双十字自动对焦感应器的协同工作,实现了高速且高精度的合焦。追踪被摄体的人工智能伺服自动对焦功能也在EOS 7D上得到了大幅的进化。EOS 7D的光学取景器具有约100%的视野率和约1倍(100%)的放大倍率,同时具有29.4°的视角和22毫米的眼点,其光学性能在历代EOS单反相机中也名列前茅。通过视野率约100%的光学取景器观察到的范围与实际拍摄的范围基本一致,因此能够得到非常精确的构图。此外,EOS 7D还在光学取景器内搭载了具有背透型液晶面板的“智能信息显示光学取景器”,它能够在对焦屏上显示网格线和三维电子水准仪等内容。EOS 7D的机身外壳采用了重量轻,刚性高且具有电磁屏蔽效果的镁合金材料。表面涂层采用了与EOS数码单反相机中顶级的EOS-1D系列相同的涂层材料及工艺。此外,EOS 7D还具有防水滴防尘构造,镁合金的外部部件变为高精度接缝构造,电池仓、存储卡插槽盖以及各操作按钮周围等都采用了密封部件,来保护相机的内部。EOS 7D背面的液晶监视器采用了具有160°的广视角(上下左右方向)及高清晰的92万点新型液晶监视器——“3.0"清晰显示液晶监视器II型”,其内部构造也经过重新研发,采用了新技术。7D机身上分别设置了专用的“实时显示/短片拍摄开关 ”和相应的“开始/停止按钮 ”,并且短片拍摄时能够在手动模式下对曝光进行控制。此外,可实现每秒30/25/24帧,分辨率1920×1080像素的全高清短片拍摄,在使用高清画质(分辨率1280×720像素)及标清画质(分辨率640×480像素)时,能够以每秒60/50帧进行拍摄。编辑观点:佳能7D的出现,再一次丰富了E0S产品系列中APS-C规格单反的阵营。佳能也终于有了可以和尼康D300级别单反正面对抗的产品。而出色的性能表现,不论是摄影爱好者还是专业人士,都会对其青睐有加。而上市价格也比较合理,只是希望7D不要重蹈5D II缺货涨价的覆辙。 diff --git a/tests/data_for_tests/io/THUCNews/train.txt b/tests/data_for_tests/io/THUCNews/train.txt deleted file mode 100644 index 65ca8a36..00000000 --- a/tests/data_for_tests/io/THUCNews/train.txt +++ /dev/null @@ -1,9 +0,0 @@ -体育 火箭这一胜有更多意义 这是联盟最差击败联盟王者根据ESPN记者亨利-艾伯特的报道,对于一支NBA球队来说,在比赛最后24秒落后一两分或者和对方打成平局,这时候得分能力的高下就将决定最后的胜负。根据近五年来的统计,在这样的关键时刻下,联盟里最擅长得分的球队是黄蜂队,而最不擅长得分的球队则是火箭队。今天这两支球队狭路相逢,最后的24秒正是这样的情形。如果根据近5年火箭和黄蜂的表现来开,那火箭输定了。可是,奇迹出现了,火箭在距离比赛还有22秒的时候以88-87领先对手1分,但是他们并未停下得分的脚步,通过马丁和科特尼-李的三次罚球,他们最终让联盟最会把握最后时刻的王者球队黄蜂最终只是在临近终场的时候由大卫-韦斯特投进了无关紧要的一球,而以2分的优势胜出。一向不善于打关键球的火箭队今天却在最后时刻顶住了压力,力挽狂澜,这相当于火箭用自己最差的技能战胜了全联盟此项技能最强的球队。这和我们以往印象中的火箭截然不同。以往火箭总是在最后时刻无人挺身而出。然而马丁的出色发挥保证了火箭在最后时刻对对手篮筐的冲击力,他不断地抢断、造对手犯规,让黄蜂无法跟上火箭的得分脚步。在今天的比赛中,我们没有看到那支曾经缩手缩脚的球队,也许交易截止日期过了之后,所有的球员终于能安心稳定下来打球了吧。所以一度拥有巨大领先优势、穿着庆祝节日盛装队服的黄蜂最后俨然不敢接受这样的现实,我们至少从保罗的眼神中读出了这失望。所以,这场比赛的胜利对于火箭来说有着更深一层的意义。不论火箭是否已经达到脱胎换骨的境界,至少全明星后的四连胜对火箭冲击季后赛这个短期目标来说,是个极好的兆头。(大猩猩) -娱乐 《山楂树》电影比原著还干净 删减情节曝光(图)《山楂树之恋》小说有20万字,要将原著的全部内容压缩到一部110分钟的电影里,实属不易。事实上,电影里删掉了小说原著中的几场吻戏和激情戏的大部分内容,比小说原著还“干净”。张艺谋自己在说到改编的时候也表示,“其实原作中很多情节我都拍了,但是实在是太长了,我希望能将更多的笔墨放在老三和静秋身上,又能让故事平静地娓娓道来,所以剪掉了大半,后来还做了一些字幕将一些年代关系简化掉。 ”删除部分——长林喜欢静秋小说:静秋刚到生产队长家时,队长老婆希望把她说给自己的二儿子长林,而憨厚的长林也确实喜欢静秋。于是他偷偷地以自己的方式表达着他的爱,然而当他知道老三喜欢静秋时,也觉得自己配不上静秋,默默地就收回了自己的这份感情。影片:影片中这个分支被彻底删掉了,长林到静秋家送过一次核桃和冰糖,但都是老三让他去的。不过静秋在队长家吃饭时,队长一一介绍大哥二哥三哥的时候,长林突然间站起来的反常表现,还是可以看出他面对静秋时候的紧张。很显然,张艺谋其实拍了长林这段,但后来剪掉了。大量枝杈人物小说:为了让故事更丰满,小说中有很多配角在不同的阶段出现。例如,为了表现静秋被欺负,安排了王长生、万驼子这样的反面角色,也安排了成医生一家的出场,静秋对于白血病的一些知识都是从成医生那儿得来的。书中的静秋有个哥哥,为了能让哥哥顺利娶媳妇,这一家人也是做了不少牺牲和努力。影片:这些人物不复存在,张艺谋明确表示,为了有充分空间描述静秋和老三的爱情,不得不舍弃。老三的告别信小说:静秋无意中得知老三得了白血病。两人在医院度过了难忘的一夜,静秋向老三表示:“如果你死了,我也去死。 ”因此,老三选择了离开,并留下一封告别信,表示自己根本没得白血病,只是感冒了,而他不打算要静秋了。影片:老三早早就就澄清自己只是感冒,而之后又不告而别,令静秋既迷惑又伤心,那封告别信并没有出现。更多亲密片段小说:虽然号称“史上最干净的爱情”,小说中也有老三亲吻静秋的描写,包括二人在医院度过难忘一夜中“床戏”的描写。影片:张艺谋拍得比作者写得更干净,能算得上亲密的只有老三用军大衣拥静秋入怀,在医院难忘一夜里,老三和静秋手握着手和衣而眠。对此,张艺谋的解释是,对于影片来说,小说中某些场面还是较为“露骨”,毕竟要考虑到国内电影的审查制度,而且两张清纯的面庞经不起附加太多的“性”。作者有话——改编忠实度把握不好而小说《山楂树之恋》的作者艾米,在接受专访时曾表示,电影删掉的原著中的几场吻戏,没什么道理。《山楂树之恋》的主线就是静秋由惧怕“失足”到主动要求“失足”的转变过程,每场吻戏都是这个过程不可或缺的部分。如果去掉,就等于去掉了故事的主线,静秋后来的要求“失足”就会显得突兀。艾米同时指出:“我以为,这两位导演改编的忠实度把握得不好。仅从现在已经透露出的信息来看,就做了几个很没水平的改编。 ”记者 王琳娜 陈妍妮 -家居 物业交地产公司 以月租10万英镑放盘一年(图)   丹尼尔明年9月担纲演百老汇剧《恋马狂》时,正好方便落脚,但他似乎并非如此打算,因为他已把物业交地产公司,以月租10万英镑(150万人民币)放盘一年。租客将可享用会所设施,包括泳池和蒸气浴室,以及酒店公寓服务。 -房产 开发商频频拿地 市场复苏谨防再起炒作风10日,经过50次举牌,广州市城市建设有限公司以总价34500万元夺得广州天河区珠江新城一地块,折合楼面地价15324元/平方米,而此前珠江新城最高楼面地价为11912元/平方米。 今年2月份以来,随着楼市“小阳春”的到来,沉寂了多个月的土地交易市场再起波澜,开发商们在土地收储上的集体爆发引人关注。再露繁荣景象的土地市场反映出房地产企业充足的资本和对后市的信心,同时,随之高涨的地价、房价也让人们担心,新一轮炒地提价的闸门是否已经悄然打开。 信心加资本撬动土地市场全面复苏 从绿地集团(企业专区,旗下楼盘)分别以9.57亿元和12亿元的价格接连拿下上海松江区辰花路15号B地块和徐汇区斜土街道107街坊,创今年上海土地出让价格的新高,到富力地产(企业专区,旗下楼盘)10.22亿元拿下北京广渠门外10号地,再到中洲宝城26.1亿元拿下深圳3宗捆绑商住地块,雅戈尔10.28亿元拿下宁波“地王”。一个多月的时间内,国内“地王”频现。 中国指数研究院最新的统计数据显示,6月1日至7日,全国20个重点城市共推出土地124宗,环比增加25%,推出土地面积608万平方米,环比增加25%,成交土地面积173万平方米,环比增加14%。 “优质地块一直是开发商们收储的对象,只不过去年楼市的低迷抑制了开发商的热情。”易居中国房地产研究院综合部部长杨红旭在接受采访时指出,目前的情况表明冷落已久的土地市场开始复苏,地产商对后市的预期正在转好,信心正在增强。 国内地产巨头万科近日发布的公告显示,在过去的一个多月中,公司已斥资23亿元多处拿地。这与其两个月前对于国内楼市“尚需进一步观察”的谨慎表态形成了鲜明的对比。 万科能在短时间内连连出手,表明公司“不差钱”。上述公告显示,5月份万科实现销售面积69.7万平方米,销售金额64.1亿元,同比分别增长19.3%和19.7%。这一销售额已经接近2007年高峰时期的单月最高纪录。而今年1至5月,万科的销售总额已达238.9亿元,较2008年同期大涨20.9%。 嘉华(中国)投资有限公司总经理助理谷文胜表示,近期国内楼市十分活跃,开发商在短时间内回笼了大量资金,而开发项目资本金比例也降低了15个百分点,这都使开发商的财务状况大大改善,现金流增加,出于持续发展的需要,买地是很自然的。 地价楼价再入上升通道引发担忧 然而伴随着土地市场的不断回暖,房地产市场成交价格的不断冲高也越来越成为人们关心的问题。 根据国家发展改革委、国家统计局调查显示,5月份,全国70个大中城市房屋销售价格同比下降0.6%,降幅比上月缩小0.5个百分点;环比上涨0.6%,涨幅比上月扩大0.2个百分点。 北京、上海、深圳等地不断传出各类楼市涨价新闻,其中北京朝阳区一处楼盘一个月内每平方米房价上涨5000元的消息更是加重了购房者对后市的担忧。就在富力集团高价拿下广渠门外10号地之后,周边的二手房价格就开始跟风上涨,虽然尚无准确的统计数据,但据业内人士透露,部分业主跟风涨价的行为已经在京城房地产市场上营造出了浓浓的涨价氛围。 “现在开发商又在大量买地,土地市场和楼市会不会再像2007年一样被炒出一波高涨的行情?”正准备买房的丁先生向记者表达了自己的担忧。 丁先生的担忧不无道理,一边是高调拿地,一边是悄悄涨价。虽然综合全国土地收储和开发的情况看,开发商先前收储的土地并没有完全消化,市场供求关系也没有发生根本性的变化。但主要开发商在土地市场上的频频出手,还是很容易让人联想起2007年地价、房价交替上涨的火暴局面。 市场复苏谨防再起炒作之风 “目前的土地市场仍处于恢复性增长阶段,尚未到达繁荣期。”面对地产商纷纷布局土地市场的现状,杨红旭表示,现在还处于宏观经济的低谷期,很多开发商仍旧不敢对后市过于乐观。开发商们在土地市场上频频出手、高价成交,虽然客观上会使楼市预期升温。但土地市场的回暖和楼市的回暖毕竟还是两回事。在宏观经济形势没有发生根本性变化之前,盲目看高后市的地产商有可能碰壁。 北京我爱我家市场研究部高级研究员秦瑞表示,开发商高价拿地之后,地块周边二手房的业主常常会盲目跟风追涨,但从目前的市场环境来看,较高的房价只可能吓退对价格特别敏感的刚性需求,进而导致成交量的萎缩,加重市场的观望情绪。 对于一季度的楼市暖春,再次走上炒地涨价之路,无论是对开发商还是中小业主都不一定是件好事。机构分析人士认为,造成目前房价普涨、开发商收地加快的原因,一方面是市场回暖,另一方面是开发商的去库存已接近尾声,开发商注意力将转向购地、新开工面积和涨价上。 不过“去年以来的经验让购房者变聪明了”,秦瑞告诉记者,如果现在开发商或是中小业主盲目利用市场回暖的时机涨价,那么购房者很可能会再次持币观望,交易量的回落不可避免,房价的持续上涨也不会有市场的依托。 把握推地节奏警惕泡沫出现 谷文胜表示,企业决定买地与否的主要根据是对宏观经济形势的判断和对未来的预期,但“也可能是在全球性通胀预期的驱动下进行资产保值的一种选择,毕竟,持有土地的风险要小于持有现金的风险”。 尽管对购买土地是否真能规避通胀风险存有不同意见。但业内人士还是普遍认为,当土地交易市场成为投资市场,泡沫就随时可能浮现。在全球经济尚未好转、国内信贷相对宽松的背景下,如果将土地进行资本化杠杆运作,频频制造高价抢地的现象,泡沫便会被迅速吹大。 目前看来,地方政府较好地掌握了推地节奏,企业也还比较理性,没有盲目抢地的现象。不少房地产企业判断,“只要政府调控得当,今年应该不会出现像2007年那么多的‘地王’”。 长期调研楼市的上海市政协人资环建委员会专职副主任孙钟炬认为,要让房地产业回归理性、减少泡沫,就需要降低房产成本,而地价成本是房价成本的一个重要组成部分。 “拿地还是要谨慎,现在把地价抬得过高,未来可能心生悔意,就如2007年很多高价拿地企业一样。”杨红旭说。(记者 罗宇凡 叶锋) 我要评论 -教育 澳驻华使馆:政府公布多项国际教育新规澳大利亚驻华使领馆教育处17日通报称,澳大利亚移民与公民事务部长克里斯·鲍恩(Chris Bowen)议员及教育、技能、工作和劳资关系部长克里斯·埃文斯(Chris Evans)参议员今日宣布将对学生签证项目进行复审以及为国际教育行业制订的多项具体措施。埃文斯表示,澳币升值,全球金融危机在一些国家的持续影响,以及逐步加剧的来自美国、新西兰和加拿大等国为吸引国际学生而形成的竞争,给澳大利亚国际教育行业带来的压力在不断增加。他说,国际教育行业的规模和性质在过去十年中也发生了剧大的变化,因此我们采取政府各部门间通力合作的方式来应对这些变化是至关重要的。复审担负着提高国际教育行业的持续竞争力和加强优化学生签证项目两项任务,将为教育机构和各利益相关方提供机会,阐述他们对国际教育行业未来的远见卓识。据介绍,吉拉德政府已任命了澳大利亚勋章获得者迈克尔(Michael Knight)负责复审工作,并于2011年中旬向鲍恩和埃文斯提交复审报告。鲍恩指出,复审工作将考察主要利益相关方与学生签证申请要求之间所建立起来的合作伙伴框架,并将就如何建立一个更加有效的合作伙伴框架提出建议。同时还将审视各种更好的针对学生签证案例中移民风险的管理方法,遏制违规及滥用学生签证项目的行为,并考虑各类学生签证对不同教育类别的适宜性。他介绍说,政府还将采取多项措施,在继续坚持优化学生签证项目的同时,精简低风险人群的签证申请审理程序。这些措施有力支撑了政府近期为优化学生签证项目而采取的改革措施,并再次强调技术移民项目应为澳大利亚中长期经济发展提供所需的高端技能。这些措施包括:——按照近期澳大利亚移民与公民事务部进行的评估等级复审的建议,从2011年4月起,降低一些学生签证评估等级。作为这项决策的一部分,来自中国和印度的高等教育类别的学生签证申请评估等级将会被降低;——调整规定使预付的寄宿学校住宿费可以从签证申请所要求的生活费中扣除;——促进政府和国际教育行业间的信息交流,这包括即将在移民部网站上公布学生签证季度统计数据,以便院校跟踪了解学生签证新趋势;——使职业教育与培训(VET)学生签证评估等级(AL)4的签证申请人能够就读证书级别的配套课程,并能满足获得学生签证的要求。使馆介绍说,今天的这项宣布是对最近澳大利亚政府为加强国际教育行业而实施的多项措施的补充。这些措施包括:针对《2000年海外学生教育服务(ESOS)法案》的贝尔德复审(BairdReview),要求所有提供国际教育的院校于2010年底前重新注册的《海外学生教育服务(ESOS)法案》修正案,以及发布由澳大利亚政府理事会(Councilof Australian Government)制订的《澳大利亚国际学生战略》。埃文斯说:“保持澳大利亚教育继续被高度公认为能够为赴澳留学的国际学生提供高质量课程是十分重要的。”即将于明年成立的国家职业教育与培训规范局(National VET Regulator)和高等教育质量和标准署(Tertiary Education Quality Standards Agency)将保障职业教育与培训和高等教育行业继续保持高质量。 -时尚 组图:香肩美锁骨 性感不张扬女人哪个部位最美最性感?不是红唇,不是翘臀,更不是波胸,而是肩膀。锁骨,是你身着斜肩上装引来同性羡慕的地方,是被抹胸曳地长礼服衬托得最耀眼的地方,它的美充满灵性,让女人立刻有了一种轻盈的气质。它堪称女人身上一道最美的风景线。今夏,单肩装将低调并一枝独秀地流行着,一抹香肩半边锁骨的靓丽,同时造就了几个层次的美感,不对称、错落感、优雅、性感……一切都在那微微倾斜的一道色彩。单肩休闲衫 搭配牛仔最IN如果你认为,单肩风潮仅仅适用于相对正式的礼服或小洋装,那你就大错特错了,一款棉质的普通T恤,只需在剪裁上作一些调整,同时将领口开大,就能轻松呈现出当季最In的单肩感觉,在斜肩处露出细细的肩带,搭配牛仔裤就很好看。时尚女王凯特-摩丝永远懂得美的定义,就连最普通的T恤,一样可以穿出最Fashion的感觉。单肩小洋装 呈现多样风格短款单肩连衣裙根据面料、剪裁的不同,往往可以展现出多样、多变的风格。礼服型的单肩连衣裙充满野性;而缎面、丝绸材质的连衣裙则散发着迷人的青春气息。“绯闻女孩”布莱克-莱弗利一袭玫红色缎面单肩小洋装,玲珑曲线凸显无遗。 -时政 全国95%以上地市建立特邀监察员制度新华网北京3月13日电(记者李亚杰)记者日前从监察部获悉,自1989年以来,监察部已聘请了四批特邀监察员,共计130人次。目前,全国31个省、自治区、直辖市,95%以上的地(市)、65%以上的县和中央国家机关的十多个部委,建立了特邀监察员制度。特邀监察员制度是中国共产党领导的多党合作和政治协商制度的重要组成部分,也是民主监督、参政议政在反腐败领域的成功实践。监察部有关负责人表示,自1989年建立特邀监察员制度以来,监察部一直高度重视,把这项工作作为监察机关的一项重要工作来抓,明确把专门监督与群众监督相结合的制度坚持得如何、特邀监察员工作是加强了还是削弱了,作为衡量和判断在纪检监察机关合署办公体制下行政监察职能是否得到加强的六条标准之一。特邀监察员工作开展近20年来,特邀监察员制度在实践中进一步得到完善和发展,特邀监察员队伍不断壮大,工作领域逐步拓宽,在党风廉政建设和反腐败工作中的作用也越来越明显。1989年5月,经过充分酝酿并经中央同意,监察部作出建立特邀监察员制度的决定。同年12月,监察部从民革、民盟、民建、民进、农工党、致公党、九三学社、台盟8个民主党派和全国工商联聘请了21位专家、学者为监察部首批特邀监察员。之后,特邀监察员工作在全国各级纪检监察机关逐步推开。1996年11月,监察部召开了全国纪检监察机关特邀监察员工作座谈会,这是特邀监察员制度建立以来召开的第一次全国性会议,总结交流了全国纪检监察机关开展特邀监察员工作的经验和做法,有力地推动了特邀监察员工作的深入开展。2004年10月颁布实施的《中华人民共和国行政监察法实施条例》进一步明确:监察机关根据工作需要,可以在国家行政机关、企业、事业单位、社会团体中聘请特邀监察员。聘请特邀监察员的具体办法由国务院监察机关规定。之后,监察部先后制定颁布了《监察部关于聘请特邀监察员的几点意见》、《关于改进特邀监察员工作的几点意见》、《中央纪委监察部关于加强和改进行政监察工作的意见》等一系列法规、文件和规定,明确了特邀监察员工作的总体要求和主要内容。即将颁布施行的《中国人民共和国行政监察法》,将进一步明确特邀监察员选聘程序、职责权限等,为特邀监察员全面履行职责提供法律依据。各地也结合工作实际,纷纷制定颁布了切实可行的工作制度。北京、上海、河南、广东、广西、山东、福建、四川、深圳等地还根据实践发展不断修订、完善特邀监察员工作办法等制度规定,特邀监察员工作的规范化、制度化水平不断提高。 -游戏 经典无法复制!《神鬼寓言3》PC版评析《神鬼寓言3》在一个异彩纷呈的虚拟世界,人类在电脑治下民主共存 -- 再没有什么比这更能激发想象的火花了。我的一个小巧玲珑的世界,我可以予取予求。力量感在我周身涌起,因为这结果完全由我来主宰。若是不想眼看着那帮狼人们凌虐镇子,我或者施法送出火球,或者挥舞宝剑,怎样都能拯救世界。我也可以将镇子寻求保护的一丝光芒熄灭干净,看着怪物们把尖叫的无辜百姓给撕成碎片。这些方面,《神鬼寓言3》做得可圈可点,但是 -- 太罕见了。在阿尔比昂大陆最新的故事里,纵然Lionhead工作室用令人荡气回肠的道德抉择设置了无数奇思妙想和激动时刻,它们却被深埋在了一堆毫不丰满的人物形象、冗长的故事和狗血情节里。如果你从来没玩儿过《神鬼寓言》,Xbox-360独占的《神鬼寓言2》也错过了 -- 没关系的,别担心为了了解《神鬼寓言3》而做好功课的事儿。所有需要你知道的,开篇全交代了:国王是个恶棍,需要被干掉。并不是遵循着最初的故事,总之我 -- 就是主角,从城堡里跑了,混迹市井之中,在阿尔比昂这个奇妙的大陆中徘徊,以期攒足人气资本,把国王搞下来,我自己坐这把交椅。《神鬼寓言3》所耍的手段在于,并不是我戴上王冠就终章了。那些我帮过的人,我还得给出承诺来;一旦取得王位,我得决定是旧账一律不认,还是一律兑现。这事儿让我真的很不舒服。我费大力气拯救出的那些人,敢情谁都不是跑龙套的,都等着最后来向我讨债,都等着我登基之后捎只胳膊带把手儿去拉他们一把。而且大多数的这种事儿都跟王国的安全这种更高层次的要求是冲突的。我不得不在践行诺言与保证阿尔比昂的安全之间竭力求取平衡,小心翼翼如履薄冰。这种构思其实挺不错,但是本来挺好的一件事儿,感觉怎么就这么恶心呢。首先这些人物就有问题。绝大多数的这些角色都同样地逡巡。相比行动来说,还是口音和衣着能有些区分。置他们的吁求不顾而去推广童工或者把妓院夷为平地,我这是多么撕心裂肺的抉择啊!除了我的导师与伙伴沃特,以及暴君洛根之外,剩下的角色全都一个心眼儿,根本就不比普通的三维物件强到哪里去。作为国王而背弃承诺之时,我真是毫无任何感觉,仅仅按下键盘命令,让他们滚,如是而已。穿插在《神鬼寓言3》的主线故事之中,有很多招募的任务 -- 几乎就没有哪个有意思。也有分支任务,可大部分都是教科书一般的护送或者刺杀任务。我可以购置实业,但是只有最基本的项目可供自定义。一个饶有趣味的帝国管理游戏就这样被剥夺了,成了一个单调、乏味的流程,仅仅在金钱进入游戏里钱包的那轻轻一声响更是放大了这一点。我可以杀死或者审判阿尔比昂的百姓,但是与此一道的各种冷笑话和莫名其妙的大打出手,完全把这种感受给毁了。哪怕是黎民们当面儿大喊大叫说我是“刽子手”,我也照旧可以傻乎乎地跳舞、做支线任务、去约会,搞不好就结婚了,还拖家带口的。游戏中的形成、发展和关系的维系,全因为这个设定被束缚住了。就算是《神鬼寓言3》在某些方面引入了阴谋和神秘的元素,例如我被丢到一个黑暗荒芜的洞穴之后,我不得不面对各种恐惧,这使得我无法探索每一个角落。恐惧在这个大陆上是最强大的邪恶,而且大约会在游戏进程的三分之二处出现,而且仅仅会遭遇几次而已。游戏给人的感觉就是完成度不高,而且赶工迹象明显。寻找游戏中的收集元素、参与小鸡快跑比赛、镇压地精等等事情都让人很难一直保持兴趣。而当我最终坐上王座之后,《神鬼寓言3》所能提供的选择少得可怜。还好《神鬼寓言3》有一些时尚和幽默。有些台词写得还是非常有意思的。虽然这样的台词对塑造人物没有任何意义,但是会让你一直一直笑。阿尔比昂仍然是个美丽的地方,而且角色模型、动画和环境光跟随构造除了这个美丽的世界。从墓地的薄雾到荒漠的午后阳光,这样一个充满生机的地方非常令人赞叹。配音做的很专业。任务繁多,讲述了一个宏大的故事,而且还有很多娱乐元素,不过所有这些都相互孤立,让本该成为一款佳作的《神鬼寓言3》就这样沦为了一款毫不出彩的作品。战斗过程令人兴奋,但是缺乏打击感。由于战斗过程的乏味,所以战斗无法使玩家的注意力从游戏剧情和肤浅的人物问题上转移开。格斗武器,枪支和魔法本质上来说都是一样的。基本上都是闪躲和攻击,这样的方法可以用来对付所有遇到的敌人。说实话,这样的战斗系统着实令人失望。武器升级所带来的外观和属性上的改变让我切实感受到了游戏的进程,不过由于战斗系统的失败,这样的设定也让人感到无聊。整体感觉6.5分:漂亮的界面,不过与PC平台毫不相称。杂乱无章的故事与游戏节奏画面表现7.5分:一些很棒的动画和特效,还有多彩和谐的艺术风格声效表现8.0分:令人振奋的音乐,配音表演相当完美上手体验6.0分:有很多可以做的内容,但只有很小部分令人兴奋。单调的战斗,重复的任务,只有很小部分值得情感投入耐玩性5.5分:你或许从合作游戏和大量的收集感到愉悦,但这也无法更改核心游戏体验总评6.0分:还行吧 -科技 摩托罗拉:GPON在FTTH中比EPON更有优势作 者:鲁义轩2009年,在国内光进铜退的火热趋势下,摩托罗拉携其在国际市场上已经获得丰富运营经验的GPON解决方案,大举进入中国的光通信市场。对于这一个时间点的选择,摩托罗拉宽带及移动网络事业部网络接入解决方案部全球营销与传播总监FloydWagoner的解释是:中国利用GPON推进光线到户的时机正在趋于成熟,而摩托罗拉在国际上的GPON研发和运营经验,可以更好地提升国内运营商推进FTTH的效率。GPON的国际性优势在亚洲地区,推进光线到户的多种技术中,EPON一直是非常强大并且主流的技术。而在亚洲以外的国际很多地区,运营商都开始越来越多地关注GPON,今年GPON预计占到全球光纤到户市场的40%。在FloydWagoner看来,EPON虽然仍然强大,而GPON的实力在显著加强。在带宽方面,GPON比EPON上下行带宽都加强了至少一倍。因为EPON利用率相对于GPON要低一些,在相同的用户部署、相同终端情况下,统计数据表明EPON支持上、下行29Mbit/s的带宽,而GPON可以达到下行79Mbit/s上行37Mbit/s的实际带宽,从根本上提升了对数据业务的支持。在服务的质量保证(QoS)上,目前EPON的业务主要是数据业务,而运营商要推广三网融合等复杂的业务,服务质量保证要求会更高。在这方面,GPON有了更好的机制来保证多业务服务质量的实现。此外,在部署的方便性上,光线路中的光功率意味着传输距离的长短。EPON的速率是24dB,而GPON是28dB,在相同的条件下,GPON的传输距离更远。运营商可以把ONT布置在更远的位置,节省线路的成本,将来可以覆盖更多、更远的终端单元。综合比较,无论在技术方面还是在业务保障方面以及在材料方面,GPON到现在为止所体现的趋势更加地优于EPON。而且GPON的成本价格已经下降很多,得到越来越多的运营商的青睐。目前国内中国电信、中国联通以及中国移动都已经表示过把GPON作为下一步光网络发展的优选。创新性的GPONONT和OLT据FloydWagoner介绍,凭借在全球FTTH领域积累的经验,摩托罗拉开发了创新产品,以满足服务供应商提供更低密度的OLT、满足更高密度的 MDU环境以及具集成功能的室内ONT等方面的需求。创新性的GPONONT和OLT,可以将光纤延伸至服务供应商网络的边缘,从而保证用户在任何地方都能享用端到端的超宽带服务。同时,摩托罗拉的FTTH网元管理系统AXSvision,还能简化网管界面,并帮助运营商加速新型、丰富的个性化娱乐业务推出速度。 diff --git a/tests/data_for_tests/io/WeiboSenti100k/dev.txt b/tests/data_for_tests/io/WeiboSenti100k/dev.txt deleted file mode 100644 index fdca0212..00000000 --- a/tests/data_for_tests/io/WeiboSenti100k/dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -label text -1 多谢小莲,好运满满[爱你] -1 能在他乡遇老友真不赖,哈哈,珠儿,我也要用这个拼图软件!BTW,小飞人儿终于要飞回家啦,深圳行,谢谢每位工作人员的照顾![爱你] -0 [衰]补鞋的说鞋子是进口的,质量太好,刀子都切不进去!所以说大家以后别买进口,到时补都没的补![爱你] -0 第五季都没看了[泪]要补起来 -1 美图好诗![鼓掌] //@言家楼:回复@俺叫老鬼:【七律。感时】 叶随风舞身何处, 鸟逆风行觅树梢。 岁月风来无退路, 激流风助有波涛。 寒微风动曾言志, 富贵风骚似不牢。 雪竹风梅诗未尽, 休云风雨剪春刀。//鸢肩格:藏珠“风”。 -0 没敢问,她男朋友在旁边呢。。[泪]//@好饭换坏饭: 你问问她能不能调成静音模式 diff --git a/tests/data_for_tests/io/WeiboSenti100k/test.txt b/tests/data_for_tests/io/WeiboSenti100k/test.txt deleted file mode 100644 index 3d071fb2..00000000 --- a/tests/data_for_tests/io/WeiboSenti100k/test.txt +++ /dev/null @@ -1,8 +0,0 @@ -label text -1 钟爱大粉的亲们,这一茬我们又种大粉了,座果也不错,能吃上了[嘻嘻] -0 //@北京全攻略: 我擦。。。牛逼~果断收藏[衰] -1 都有我都有我~~~我的2012注定是美美的精彩的不得了啊~哈哈哈[太开心]//@哆啦胖兔梦: 转发微博。 -1 这周的成果就是这样 刻的好累但是很喜欢[嘻嘻]#我的橡皮章# -1 你把我整?了。[抓狂] //@窦智耀:开 往大稿艺术区店开 带上祝贺的花篮。。。昨夜 杨家火锅 你把我灌醉。。。今夜 我要学会排队等位。再贺开业大吉![鼓掌][鼓掌][鼓掌] -1 [爱你]亲们,我刚刚发表了一篇文章,有图有真相,速来围观![围观]||#蚂蜂窝游记#《新疆,雨中的野核桃沟》,查看更多精彩>>> http://t.cn/zR4BMN3 (分享自 @蚂蜂窝旅游攻略) -0 [泪]//@平安北京: 珍爱生命,小心驾驶,驾车时请勿接打电话! diff --git a/tests/data_for_tests/io/WeiboSenti100k/train.txt b/tests/data_for_tests/io/WeiboSenti100k/train.txt deleted file mode 100644 index 4f0adf27..00000000 --- a/tests/data_for_tests/io/WeiboSenti100k/train.txt +++ /dev/null @@ -1,7 +0,0 @@ -label text -1 //@实用小百科:这才是吃货本色[哈哈] -0 回复@邋遢大王诗文:好的[ok] //@邋遢大王诗文:回复@静冈叔叔:[ok]木有问题!回来了和我联系 //@静冈叔叔:回复@西瓜叫高荔蜒啊:在富士山静冈机场有很多小丸子的土产啊[嘻嘻] //@西瓜叫高荔蜒啊:祝你一路顺风~ 想要小丸子的お土?~[泪] -1 我花了两年最后被抢的只剩下一枚,情何以堪! //@自由橙的小窝:@程诗然 同学集卡速度最快,我花了两年时间才集全 //@怯弱的狮子Susan: 回复@阮导:@墙墙-墙根俱乐部 看你多抢手!快给我们各发一套吧![嘻嘻] //@阮导:回复@怯弱的狮子Susan:所以。。。。你要给我找一套撒。。哈哈哈哈哈!!! -1 KIMSCLOSET的年会,海鲜自助餐,太丰盛了!大家吃的HIGH,喝的HIGH,聊的HIGH!太开心了![哈哈][爱你] -1 在iPhone的便携鱼眼镜头之下,扣肉蝴蝶饱子显得多诱人呀![围观][馋嘴][嘻嘻] -0 英织,你知道不知道,他是我最最最爱的大叔,你跟他靠这么近,我的心都碎了!!!你说你说你说,你有没有他的签名![泪] diff --git a/tests/data_for_tests/io/XNLI/dev.txt b/tests/data_for_tests/io/XNLI/dev.txt deleted file mode 100644 index eced8fac..00000000 --- a/tests/data_for_tests/io/XNLI/dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -language gold_label sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 promptID pairID genre label1 label2 label3 label4 label5 sentence1_tokenized sentence2_tokenized match -zh neutral 他说,妈妈,我回来了。 校车把他放下后,他立即给他妈妈打了电话。 1 1 facetoface neutral contradiction neutral neutral neutral 他 说 , 妈妈 , 我 回来 了 。 校车 把 他 放下 后 , 他 立即 给 他 妈妈 打 了 电话 。 True -zh contradiction 他说,妈妈,我回来了。 他没说一句话。 1 2 facetoface contradiction contradiction contradiction contradiction contradiction 他 说 , 妈妈 , 我 回来 了 。 他 没 说 一 句 话 。 True -zh entailment 他说,妈妈,我回来了。 他告诉他的妈妈他已经回到家了。 1 3 facetoface entailment entailment neutral entailment entailment 他 说 , 妈妈 , 我 回来 了 。 他 告诉 他 的 妈妈 他 已经 回到家 了 。 True -zh neutral 他们停止了跟这家交朋友,因为他们决定了当白人。 种族紧张局势开始时,他们不再探望这家人。 13 39 facetoface neutral entailment entailment entailment entailment 他们 停止 了 跟 这家 交朋友 , 因为 他们 决定 了 当 白人 。 种族 紧张 局势 开始 时 , 他们 不再 探望 这家 人 。 False -zh contradiction 老太太以前常说她姐姐和姐丈是如何决定要搬到奥古斯塔城里去,并且被当做白人看待。 奶奶的妹妹是白人,搬到了德克萨斯州。 17 49 facetoface contradiction contradiction contradiction contradiction neutral 老太太 以前 常 说 她 姐姐 和 姐丈 是 如何 决定 要 搬 到 奥古斯塔 城里 去 , 并且 被 当做 白人 看待 。 奶奶 的 妹妹 是 白人 , 搬 到 了 德克萨斯州 。 True -zh entailment 老太太以前常说她姐姐和姐丈是如何决定要搬到奥古斯塔城里去,并且被当做白人看待。 奶奶的姐姐不是白人。 17 50 facetoface entailment entailment contradiction neutral entailment 老太太 以前 常 说 她 姐姐 和 姐丈 是 如何 决定 要 搬 到 奥古斯塔 城里 去 , 并且 被 当做 白人 看待 。 奶奶 的 姐姐 不 是 白人 。 True diff --git a/tests/data_for_tests/io/XNLI/test.txt b/tests/data_for_tests/io/XNLI/test.txt deleted file mode 100644 index d5ff4c24..00000000 --- a/tests/data_for_tests/io/XNLI/test.txt +++ /dev/null @@ -1,7 +0,0 @@ -language gold_label sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 promptID pairID genre label1 label2 label3 label4 label5 sentence1_tokenized sentence2_tokenized match -zh contradiction 嗯,我根本没想过,但是我很沮丧,最后我又和他说话了。 我还没有和他再次谈论。 2 4 facetoface contradiction contradiction contradiction contradiction contradiction 嗯 , 我 根本 没 想 过 , 但是 我 很 沮丧 , 最后 我 又 和 他 说话 了 。 我 还 没有 和 他 再次 谈论 。 True -zh entailment 嗯,我根本没想过,但是我很沮丧,最后我又和他说话了。 我非常沮丧,我刚刚开始跟他说话。 2 5 facetoface entailment entailment entailment entailment entailment 嗯 , 我 根本 没 想 过 , 但是 我 很 沮丧 , 最后 我 又 和 他 说话 了 。 我 非常 沮丧 , 我 刚刚 开始 跟 他 说话 。 True -zh neutral 嗯,我根本没想过,但是我很沮丧,最后我又和他说话了。 我们谈得很好。 2 6 facetoface neutral neutral neutral neutral neutral 嗯 , 我 根本 没 想 过 , 但是 我 很 沮丧 , 最后 我 又 和 他 说话 了 。 我们 谈 得 很 好 。 True -zh neutral 而我当初认为这是一个特权,我现在仍然这样想,我是唯一的922 Ex-O,也是我的AFFC空军职业生涯。 我不知道那天我不是唯一一个在场的人。 3 7 facetoface neutral contradiction contradiction contradiction contradiction 而 我 当初 认为 这 是 一个 特权 , 我 现在 仍然 这样 想 , 我 是 唯一 的 922 Ex-O , 也 是 我 的 AFFC 空军 职业生涯 。 我 不 知道 那天 我 不 是 唯一 一个 在场 的 人 。 False -zh contradiction 而我当初认为这是一个特权,我现在仍然这样想,我是唯一的922 Ex-O,也是我的AFFC空军职业生涯。 我们都被赋予了相同的确切数字,无论我们被许诺了何种特权,都是谎言。 3 9 facetoface contradiction contradiction entailment contradiction contradiction 而 我 当初 认为 这 是 一个 特权 , 我 现在 仍然 这样 想 , 我 是 唯一 的 922 Ex-O , 也 是 我 的 AFFC 空军 职业生涯 。 我们 都 被 赋予 了 相同 的 确切 数字 , 无论 我们 被 许诺 了 何种 特权 , 都 是 谎言 。 True -zh entailment 这是Fannie Flono,她在佐治亚州奥古斯塔长大,她会讲述她童年时的一些故事。 Fannie Flono就在这里,她将与我们分享她在奥古斯塔成长的童年故事。 12 35 facetoface entailment entailment entailment entailment entailment 这 是 Fannie Flono , 她 在 佐治亚州 奥古斯塔 长大 , 她 会讲 述 她 童年 时 的 一些 故事 。 Fannie Flono 就 在 这里 , 她 将 与 我们 分享 她 在 奥古斯塔 成 长 的 童年 故事 。 True diff --git a/tests/data_for_tests/io/XNLI/train.txt b/tests/data_for_tests/io/XNLI/train.txt deleted file mode 100644 index 8a2fd3a3..00000000 --- a/tests/data_for_tests/io/XNLI/train.txt +++ /dev/null @@ -1,9 +0,0 @@ -premise hypo label -我们 家里 有 一个 但 我 没 找到 我 可以 用 的 时间 我们 家里 有 一个 但 我 从来 没有 时间 使用 它 . entailment -该镇 仍然 充满 雕塑家 , piazza alberica 是 一个 夏季 雕塑 比赛 的 现场 14 天 来 制作 一个 杰作 . 几乎 所有 的 雕塑家 都 离开 了 piazza alberica 为 其他 城市 . contradictory -土耳其 的 面包车 是 自己 坐 下 来 的 , 但 他们 喜欢 玩和呃 , 他们 喜欢 和 他们 一起 玩 , 他们 把 他们 的 社会 从 它 . neutral -好 吗 ? 我 问 benignantly , 因为 她 犹豫 了 . 我 抓住 她 的 胳膊 和 她 愤怒地 , 问 , 好 吗 ? contradictory -一 段 时间 来 看 , 这 一 运动 似乎 要 取得 成功 , 但 政治 事件 , 加 上 帕内尔 在 一个 令 人 愤慨 的 离婚案 中 被 称为 共同 答辩人 , 导致 许多 人 撤回 他们 的 支持 . 帕内尔 在 一个 令 人 愤慨 的 离婚 问题 上 的 法律 问题 使 这 场 运动 受到 了 影响 . entailment -看 在 这里 , 他 说 我们 不 希望 任何 律师 混在 这 一 点 . 他 说 看看 那 张 纸 neutral -Soderstrom 在 创伤 中心 进行 了 多次 筛选 测试 . 测试 必须 在 创伤 中心 进行 比较 , 否则 就 会 无效 . neutral -嗯 , 这 是 一 种 明显 的 我 的 意思 是 , 他们 甚至 把 它 带 到 现在 呢 , 他们 在 电视 上 做 广告 , 你 知道 如果 你 知道 , 如果 你 知道 这样 做 , 或者 如果 你 需要 这 个呃 , 我们 会 告 你 和 你 你 不用 给 我们 钱 , 但 他们 不 告诉 你 的 是 如果 他们 赢 了 你 给 他们 至少 三分之一 他们 赢 的 东西 , 所以 我 不 知道 它 是呃 , 它 得到 了 现在 做 更 多 的 生意 , 而 不 是呃 实际上 是 在 处理 犯罪 而 不 是 与 呃嗯 他们 的 律师 只 是 为了 钱 , 我 相信 , 我 知道 我 同意 你 , 我 认为 你 是 真实 的 你. 非常 正确 的 是 , 我 认为 他们 应该 有 同等 数量 的 你 知道 也许 他们 可以 有 几 个 , 但 我 认为 大多数 他们 应该 不 是 律师 在 事实 , 这 是 方式 他们 已经 进入 政治 , 这 是 因为 在 法律 上 , 你 知道 的 循环 和 一切 , 但 我 不 知道 我们 是 在 马里兰州 和呃 , 我们 有 同样 的 东西 人满为患 , 和呃 他们 让 他们 出来 我 的 意思 是 只 是 普通 的 监狱 判决 的 事情 , 他们 让. 他们 是 因为 他们 没有 任何 地方 可以 留住 他们 所以 你 可以 知道呃 , 除非 是 一个 重大 的 罪行 , 但呃 , 即使 是 小小的 东西 , 我 的 意思 是 那些 在 美国 失去 的 人 是 受害者 和 谁 可能 是 抢劫 或 毒品 , 或者 其他 什么 , 他们 是 谁 要 支付 , 他们 是 一个 会 受苦 , 另 一个 你 知道 的 人 , 如果 他们 被 逮捕 , 如果 他们 逮捕 他们嗯 , 然后 呢 , 你 知道 的 时间 法律 接管 了 一 半 时间 呃 他们 要么 让 他们 走 , 或者 他们 下 了 一个 句子 , 因为 他们 有 一个 律师 , 你 知道 的 感觉 他们 是 不 是 所有 在 一起 当 他们 做到 了 .它 我 不 知道 我们 怎么 到 这 一 点 , 虽然 . neutral diff --git a/tests/data_for_tests/io/ag/test.csv b/tests/data_for_tests/io/ag/test.csv deleted file mode 100644 index 3a4cc0ae..00000000 --- a/tests/data_for_tests/io/ag/test.csv +++ /dev/null @@ -1,5 +0,0 @@ -"3","Fears for T N pension after talks","Unions representing workers at Turner Newall say they are 'disappointed' after talks with stricken parent firm Federal Mogul." -"4","The Race is On: Second Private Team Sets Launch Date for Human Spaceflight (SPACE.com)","SPACE.com - TORONTO, Canada -- A second\team of rocketeers competing for the #36;10 million Ansari X Prize, a contest for\privately funded suborbital space flight, has officially announced the first\launch date for its manned rocket." -"4","Ky. Company Wins Grant to Study Peptides (AP)","AP - A company founded by a chemistry researcher at the University of Louisville won a grant to develop a method of producing better peptides, which are short chains of amino acids, the building blocks of proteins." -"4","Prediction Unit Helps Forecast Wildfires (AP)","AP - It's barely dawn when Mike Fitzpatrick starts his shift with a blur of colorful maps, figures and endless charts, but already he knows what the day will bring. Lightning will strike in places he expects. Winds will pick up, moist places will dry and flames will roar." -"4","Calif. Aims to Limit Farm-Related Smog (AP)","AP - Southern California's smog-fighting agency went after emissions of the bovine variety Friday, adopting the nation's first rules to reduce air pollution from dairy cow manure." diff --git a/tests/data_for_tests/io/ag/train.csv b/tests/data_for_tests/io/ag/train.csv deleted file mode 100644 index e766a481..00000000 --- a/tests/data_for_tests/io/ag/train.csv +++ /dev/null @@ -1,4 +0,0 @@ -"3","Wall St. Bears Claw Back Into the Black (Reuters)","Reuters - Short-sellers, Wall Street's dwindling\band of ultra-cynics, are seeing green again." -"4","Building Dedicated to Columbia Astronauts (AP)","AP - A former dormitory converted to classrooms at the Pensacola Naval Air Station was dedicated Friday to two Columbia astronauts who were among the seven who died in the shuttle disaster Feb. 1, 2003." -"2","Phelps On Relay Team","Michael Phelps is named to the 4x100-meter freestyle relay team that will compete in Sunday's final, keeping alive his quest for a possible eight Olympic gold medals." -"1","Venezuelans Vote Early in Referendum on Chavez Rule (Reuters)","Reuters - Venezuelans turned out early\and in large numbers on Sunday to vote in a historic referendum\that will either remove left-wing President Hugo Chavez from\office or give him a new mandate to govern for the next two\years." diff --git a/tests/data_for_tests/io/cmrc/dev.json b/tests/data_for_tests/io/cmrc/dev.json deleted file mode 100644 index c9069efe..00000000 --- a/tests/data_for_tests/io/cmrc/dev.json +++ /dev/null @@ -1,155 +0,0 @@ -{ - "version": "v1.0", - "data": [ - { - "paragraphs": [ - { - "id": "DEV_0", - "context": "《战国无双3》()是由光荣和ω-force开发的战国无双系列的正统第三续作。本作以三大故事为主轴,分别是以武田信玄等人为主的《关东三国志》,织田信长等人为主的《战国三杰》,石田三成等人为主的《关原的年轻武者》,丰富游戏内的剧情。此部份专门介绍角色,欲知武器情报、奥义字或擅长攻击类型等,请至战国无双系列1.由于乡里大辅先生因故去世,不得不寻找其他声优接手。从猛将传 and Z开始。2.战国无双 编年史的原创男女主角亦有专属声优。此模式是任天堂游戏谜之村雨城改编的新增模式。本作中共有20张战场地图(不含村雨城),后来发行的猛将传再新增3张战场地图。但游戏内战役数量繁多,部分地图会有兼用的状况,战役虚实则是以光荣发行的2本「战国无双3 人物真书」内容为主,以下是相关介绍。(注:前方加☆者为猛将传新增关卡及地图。)合并本篇和猛将传的内容,村雨城模式剔除,战国史模式可直接游玩。主打两大模式「战史演武」&「争霸演武」。系列作品外传作品", - "qas": [ - { - "question": "《战国无双3》是由哪两个公司合作开发的?", - "id": "DEV_0_QUERY_0", - "answers": [ - { - "text": "光荣和ω-force", - "answer_start": 11 - }, - { - "text": "光荣和ω-force", - "answer_start": 11 - }, - { - "text": "光荣和ω-force", - "answer_start": 11 - } - ] - }, - { - "question": "男女主角亦有专属声优这一模式是由谁改编的?", - "id": "DEV_0_QUERY_1", - "answers": [ - { - "text": "村雨城", - "answer_start": 226 - }, - { - "text": "村雨城", - "answer_start": 226 - }, - { - "text": "任天堂游戏谜之村雨城", - "answer_start": 219 - } - ] - }, - { - "question": "战国史模式主打哪两个模式?", - "id": "DEV_0_QUERY_2", - "answers": [ - { - "text": "「战史演武」&「争霸演武」", - "answer_start": 395 - }, - { - "text": "「战史演武」&「争霸演武」", - "answer_start": 395 - }, - { - "text": "「战史演武」&「争霸演武」", - "answer_start": 395 - } - ] - } - ] - } - ], - "id": "DEV_0", - "title": "战国无双3" - }, - { - "paragraphs": [ - { - "id": "DEV_1", - "context": "锣鼓经是大陆传统器乐及戏曲里面常用的打击乐记谱方法,以中文字的声音模拟敲击乐的声音,纪录打击乐的各种不同的演奏方法。常用的节奏型称为「锣鼓点」。而锣鼓是戏曲节奏的支柱,除了加强演员身段动作的节奏感,也作为音乐的引子和尾声,提示音乐的板式和速度,以及作为唱腔和念白的伴奏,令诗句的韵律更加抑扬顿锉,段落分明。锣鼓的运用有约定俗成的程式,依照角色行当的身份、性格、情绪以及环境,配合相应的锣鼓点。锣鼓亦可以模仿大自然的音响效果,如雷电、波浪等等。戏曲锣鼓所运用的敲击乐器主要分为鼓、锣、钹和板四类型:鼓类包括有单皮鼓(板鼓)、大鼓、大堂鼓(唐鼓)、小堂鼓、怀鼓、花盆鼓等;锣类有大锣、小锣(手锣)、钲锣、筛锣、马锣、镗锣、云锣;钹类有铙钹、大钹、小钹、水钹、齐钹、镲钹、铰子、碰钟等;打拍子用的檀板、木鱼、梆子等。因为京剧的锣鼓通常由四位乐师负责,又称为四大件,领奏的师傅称为:「鼓佬」,其职责有如西方乐队的指挥,负责控制速度以及利用各种手势提示乐师演奏不同的锣鼓点。粤剧吸收了部份京剧的锣鼓,但以木鱼和沙的代替了京剧的板和鼓,作为打拍子的主要乐器。以下是京剧、昆剧和粤剧锣鼓中乐器对应的口诀用字:", - "qas": [ - { - "question": "锣鼓经是什么?", - "id": "DEV_1_QUERY_0", - "answers": [ - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - }, - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - }, - { - "text": "大陆传统器乐及戏曲里面常用的打击乐记谱方法", - "answer_start": 4 - } - ] - }, - { - "question": "锣鼓经常用的节奏型称为什么?", - "id": "DEV_1_QUERY_1", - "answers": [ - { - "text": "锣鼓点", - "answer_start": 67 - }, - { - "text": "锣鼓点", - "answer_start": 67 - }, - { - "text": "锣鼓点", - "answer_start": 67 - } - ] - }, - { - "question": "锣鼓经运用的程式是什么?", - "id": "DEV_1_QUERY_2", - "answers": [ - { - "text": "依照角色行当的身份、性格、情绪以及环境,配合相应的锣鼓点。", - "answer_start": 167 - }, - { - "text": "依照角色行当的身份、性格、情绪以及环境,配合相应的锣鼓点。", - "answer_start": 167 - }, - { - "text": "依照角色行当的身份、性格、情绪以及环境,配合相应的锣鼓点", - "answer_start": 167 - } - ] - }, - { - "question": "戏曲锣鼓所运用的敲击乐器主要有什么类型?", - "id": "DEV_1_QUERY_3", - "answers": [ - { - "text": "鼓、锣、钹和板", - "answer_start": 237 - }, - { - "text": "鼓、锣、钹和板", - "answer_start": 237 - }, - { - "text": "鼓、锣、钹和板", - "answer_start": 237 - } - ] - } - ] - } - ], - "id": "DEV_1", - "title": "锣鼓经" - } - ] -} \ No newline at end of file diff --git a/tests/data_for_tests/io/cmrc/train.json b/tests/data_for_tests/io/cmrc/train.json deleted file mode 100644 index 823b9c80..00000000 --- a/tests/data_for_tests/io/cmrc/train.json +++ /dev/null @@ -1,161 +0,0 @@ -{ - "version": "v1.0", - "data": [ - { - "paragraphs": [ - { - "id": "TRAIN_186", - "context": "范廷颂枢机(,),圣名保禄·若瑟(),是越南罗马天主教枢机。1963年被任为主教;1990年被擢升为天主教河内总教区宗座署理;1994年被擢升为总主教,同年年底被擢升为枢机;2009年2月离世。范廷颂于1919年6月15日在越南宁平省天主教发艳教区出生;童年时接受良好教育后,被一位越南神父带到河内继续其学业。范廷颂于1940年在河内大修道院完成神学学业。范廷颂于1949年6月6日在河内的主教座堂晋铎;及后被派到圣女小德兰孤儿院服务。1950年代,范廷颂在河内堂区创建移民接待中心以收容到河内避战的难民。1954年,法越战争结束,越南民主共和国建都河内,当时很多天主教神职人员逃至越南的南方,但范廷颂仍然留在河内。翌年管理圣若望小修院;惟在1960年因捍卫修院的自由、自治及拒绝政府在修院设政治课的要求而被捕。1963年4月5日,教宗任命范廷颂为天主教北宁教区主教,同年8月15日就任;其牧铭为「我信天主的爱」。由于范廷颂被越南政府软禁差不多30年,因此他无法到所属堂区进行牧灵工作而专注研读等工作。范廷颂除了面对战争、贫困、被当局迫害天主教会等问题外,也秘密恢复修院、创建女修会团体等。1990年,教宗若望保禄二世在同年6月18日擢升范廷颂为天主教河内总教区宗座署理以填补该教区总主教的空缺。1994年3月23日,范廷颂被教宗若望保禄二世擢升为天主教河内总教区总主教并兼天主教谅山教区宗座署理;同年11月26日,若望保禄二世擢升范廷颂为枢机。范廷颂在1995年至2001年期间出任天主教越南主教团主席。2003年4月26日,教宗若望保禄二世任命天主教谅山教区兼天主教高平教区吴光杰主教为天主教河内总教区署理主教;及至2005年2月19日,范廷颂因获批辞去总主教职务而荣休;吴光杰同日真除天主教河内总教区总主教职务。范廷颂于2009年2月22日清晨在河内离世,享年89岁;其葬礼于同月26日上午在天主教河内总教区总主教座堂举行。", - "qas": [ - { - "question": "范廷颂是什么时候被任为主教的?", - "id": "TRAIN_186_QUERY_0", - "answers": [ - { - "text": "1963年", - "answer_start": 30 - } - ] - }, - { - "question": "1990年,范廷颂担任什么职务?", - "id": "TRAIN_186_QUERY_1", - "answers": [ - { - "text": "1990年被擢升为天主教河内总教区宗座署理", - "answer_start": 41 - } - ] - }, - { - "question": "范廷颂是于何时何地出生的?", - "id": "TRAIN_186_QUERY_2", - "answers": [ - { - "text": "范廷颂于1919年6月15日在越南宁平省天主教发艳教区出生", - "answer_start": 97 - } - ] - }, - { - "question": "1994年3月,范廷颂担任什么职务?", - "id": "TRAIN_186_QUERY_3", - "answers": [ - { - "text": "1994年3月23日,范廷颂被教宗若望保禄二世擢升为天主教河内总教区总主教并兼天主教谅山教区宗座署理", - "answer_start": 548 - } - ] - }, - { - "question": "范廷颂是何时去世的?", - "id": "TRAIN_186_QUERY_4", - "answers": [ - { - "text": "范廷颂于2009年2月22日清晨在河内离世", - "answer_start": 759 - } - ] - } - ] - } - ], - "id": "TRAIN_186", - "title": "范廷颂" - }, - { - "paragraphs": [ - { - "id": "TRAIN_54", - "context": "安雅·罗素法(,),来自俄罗斯圣彼得堡的模特儿。她是《全美超级模特儿新秀大赛》第十季的亚军。2008年,安雅宣布改回出生时的名字:安雅·罗素法(Anya Rozova),在此之前是使用安雅·冈()。安雅于俄罗斯出生,后来被一个居住在美国夏威夷群岛欧胡岛檀香山的家庭领养。安雅十七岁时曾参与香奈儿、路易·威登及芬迪(Fendi)等品牌的非正式时装秀。2007年,她于瓦伊帕胡高级中学毕业。毕业后,她当了一名售货员。她曾为Russell Tanoue拍摄照片,Russell Tanoue称赞她是「有前途的新面孔」。安雅在半准决赛面试时说她对模特儿行业充满热诚,所以参加全美超级模特儿新秀大赛。她于比赛中表现出色,曾五次首名入围,平均入围顺序更拿下历届以来最优异的成绩(2.64),另外胜出三次小挑战,分别获得与评判尼祖·百克拍照、为柠檬味道的七喜拍摄广告的机会及十万美元、和盖马蒂洛(Gai Mattiolo)设计的晚装。在最后两强中,安雅与另一名参赛者惠妮·汤姆森为范思哲走秀,但评判认为她在台上不够惠妮突出,所以选了惠妮当冠军,安雅屈居亚军(但就整体表现来说,部份网友认为安雅才是第十季名副其实的冠军。)安雅在比赛拿五次第一,也胜出多次小挑战。安雅赛后再次与Russell Tanoue合作,为2008年4月30日出版的MidWeek杂志拍摄封面及内页照。其后她参加了V杂志与Supreme模特儿公司合办的模特儿选拔赛2008。她其后更与Elite签约。最近她与香港的模特儿公司 Style International Management 签约,并在香港发展其模特儿事业。她曾在很多香港的时装杂志中任模特儿,《Jet》、《东方日报》、《Elle》等。", - "qas": [ - { - "question": "安雅·罗素法参加了什么比赛获得了亚军?", - "id": "TRAIN_54_QUERY_0", - "answers": [ - { - "text": "《全美超级模特儿新秀大赛》第十季", - "answer_start": 26 - } - ] - }, - { - "question": "Russell Tanoue对安雅·罗素法的评价是什么?", - "id": "TRAIN_54_QUERY_1", - "answers": [ - { - "text": "有前途的新面孔", - "answer_start": 247 - } - ] - }, - { - "question": "安雅·罗素法合作过的香港杂志有哪些?", - "id": "TRAIN_54_QUERY_2", - "answers": [ - { - "text": "《Jet》、《东方日报》、《Elle》等", - "answer_start": 706 - } - ] - }, - { - "question": "毕业后的安雅·罗素法职业是什么?", - "id": "TRAIN_54_QUERY_3", - "answers": [ - { - "text": "售货员", - "answer_start": 202 - } - ] - } - ] - } - ], - "id": "TRAIN_54", - "title": "安雅·罗素法" - }, - { - "paragraphs": [ - { - "id": "TRAIN_756", - "context": "为日本漫画足球小将翼的一个角色,自小父母离异,与父亲一起四处为家,每个地方也是待一会便离开,但他仍然能够保持优秀的学业成绩。在第一次南葛市生活时,与同样就读于南葛小学的大空翼为黄金拍档,曾效力球队包括南葛小学、南葛高中、日本少年队、日本青年军、日本奥运队。效力日本青年军期间,因救同母异父的妹妹导致被车撞至断脚,在决赛周只在决赛的下半场十五分钟开始上场,成为日本队夺得世青冠军的其中一名功臣。基本资料绰号:球场上的艺术家出身地:日本南葛市诞生日:5月5日星座:金牛座球衣号码:11担任位置:中场、攻击中场、右中场擅长脚:右脚所属队伍:盘田山叶故事发展岬太郎在小学期间不断转换学校,在南葛小学就读时在全国大赛中夺得冠军;国中三年随父亲孤单地在法国留学;回国后三年的高中生涯一直输给日本王牌射手日向小次郎率领的东邦学院。在【Golden 23】年代,大空翼、日向小次郎等名将均转战海外,他与松山光、三杉淳组成了「3M」组合(松山光Hikaru Matsuyama、岬太郎Taro Misaki、三杉淳Jyun Misugi)。必杀技1. 回力刀射门2. S. S. S. 射门3. 双人射门(与大空翼合作)", - "qas": [ - { - "question": "岬太郎在第一次南葛市生活时的搭档是谁?", - "id": "TRAIN_756_QUERY_0", - "answers": [ - { - "text": "大空翼", - "answer_start": 84 - } - ] - }, - { - "question": "日本队夺得世青冠军,岬太郎发挥了什么作用?", - "id": "TRAIN_756_QUERY_1", - "answers": [ - { - "text": "在决赛周只在决赛的下半场十五分钟开始上场,成为日本队夺得世青冠军的其中一名功臣。", - "answer_start": 156 - } - ] - }, - { - "question": "岬太郎与谁一起组成了「3M」组合?", - "id": "TRAIN_756_QUERY_2", - "answers": [ - { - "text": "他与松山光、三杉淳组成了「3M」组合(松山光Hikaru Matsuyama、岬太郎Taro Misaki、三杉淳Jyun Misugi)。", - "answer_start": 391 - } - ] - } - ] - } - ], - "id": "TRAIN_756", - "title": "岬太郎" - } - ] -} \ No newline at end of file diff --git a/tests/data_for_tests/io/cnndm/dev.label.jsonl b/tests/data_for_tests/io/cnndm/dev.label.jsonl deleted file mode 100644 index 52a56ab0..00000000 --- a/tests/data_for_tests/io/cnndm/dev.label.jsonl +++ /dev/null @@ -1,4 +0,0 @@ -{"label": [1, 19, 25], "text": ["marseille , france -lrb- cnn -rrb- the french prosecutor leading an investigation into the crash of germanwings flight 9525 insisted wednesday that he was not aware of any video footage from on board the plane .", "marseille prosecutor brice robin told cnn that `` so far no videos were used in the crash investigation . ''", "he added , `` a person who has such a video needs to immediately give it to the investigators . ''", "robin 's comments follow claims by two magazines , german daily bild and french paris match , of a cell phone video showing the harrowing final seconds from on board germanwings flight 9525 as it crashed into the french alps .", "all 150 on board were killed .", "paris match and bild reported that the video was recovered from a phone at the wreckage site .", "the two publications described the supposed video , but did not post it on their websites .", "the publications said that they watched the video , which was found by a source close to the investigation .", "`` one can hear cries of ` my god ' in several languages , '' paris match reported .", "`` metallic banging can also be heard more than three times , perhaps of the pilot trying to open the cockpit door with a heavy object .", "towards the end , after a heavy shake , stronger than the others , the screaming intensifies .", "then nothing . ''", "`` it is a very disturbing scene , '' said julian reichelt , editor-in-chief of bild online .", "an official with france 's accident investigation agency , the bea , said the agency is not aware of any such video .", "lt. col. jean-marc menichini , a french gendarmerie spokesman in charge of communications on rescue efforts around the germanwings crash site , told cnn that the reports were `` completely wrong '' and `` unwarranted . ''", "cell phones have been collected at the site , he said , but that they `` had n't been exploited yet . ''", "menichini said he believed the cell phones would need to be sent to the criminal research institute in rosny sous-bois , near paris , in order to be analyzed by specialized technicians working hand-in-hand with investigators .", "but none of the cell phones found so far have been sent to the institute , menichini said .", "asked whether staff involved in the search could have leaked a memory card to the media , menichini answered with a categorical `` no . ''", "reichelt told `` erin burnett : outfront '' that he had watched the video and stood by the report , saying bild and paris match are `` very confident '' that the clip is real .", "he noted that investigators only revealed they 'd recovered cell phones from the crash site after bild and paris match published their reports .", "`` that is something we did not know before .", "... overall we can say many things of the investigation were n't revealed by the investigation at the beginning , '' he said .", "what was mental state of germanwings co-pilot ?", "german airline lufthansa confirmed tuesday that co-pilot andreas lubitz had battled depression years before he took the controls of germanwings flight 9525 , which he 's accused of deliberately crashing last week in the french alps .", "lubitz told his lufthansa flight training school in 2009 that he had a `` previous episode of severe depression , '' the airline said tuesday .", "email correspondence between lubitz and the school discovered in an internal investigation , lufthansa said , included medical documents he submitted in connection with resuming his flight training .", "the announcement indicates that lufthansa , the parent company of germanwings , knew of lubitz 's battle with depression , allowed him to continue training and ultimately put him in the cockpit .", "lufthansa , whose ceo carsten spohr previously said lubitz was 100 % fit to fly , described its statement tuesday as a `` swift and seamless clarification '' and said it was sharing the information and documents -- including training and medical records -- with public prosecutors .", "spohr traveled to the crash site wednesday , where recovery teams have been working for the past week to recover human remains and plane debris scattered across a steep mountainside .", "he saw the crisis center set up in seyne-les-alpes , laid a wreath in the village of le vernet , closer to the crash site , where grieving families have left flowers at a simple stone memorial .", "menichini told cnn late tuesday that no visible human remains were left at the site but recovery teams would keep searching .", "french president francois hollande , speaking tuesday , said that it should be possible to identify all the victims using dna analysis by the end of the week , sooner than authorities had previously suggested .", "in the meantime , the recovery of the victims ' personal belongings will start wednesday , menichini said .", "among those personal belongings could be more cell phones belonging to the 144 passengers and six crew on board .", "check out the latest from our correspondents .", "the details about lubitz 's correspondence with the flight school during his training were among several developments as investigators continued to delve into what caused the crash and lubitz 's possible motive for downing the jet .", "a lufthansa spokesperson told cnn on tuesday that lubitz had a valid medical certificate , had passed all his examinations and `` held all the licenses required . ''", "earlier , a spokesman for the prosecutor 's office in dusseldorf , christoph kumpa , said medical records reveal lubitz suffered from suicidal tendencies at some point before his aviation career and underwent psychotherapy before he got his pilot 's license .", "kumpa emphasized there 's no evidence suggesting lubitz was suicidal or acting aggressively before the crash .", "investigators are looking into whether lubitz feared his medical condition would cause him to lose his pilot 's license , a european government official briefed on the investigation told cnn on tuesday .", "while flying was `` a big part of his life , '' the source said , it 's only one theory being considered .", "another source , a law enforcement official briefed on the investigation , also told cnn that authorities believe the primary motive for lubitz to bring down the plane was that he feared he would not be allowed to fly because of his medical problems .", "lubitz 's girlfriend told investigators he had seen an eye doctor and a neuropsychologist , both of whom deemed him unfit to work recently and concluded he had psychological issues , the european government official said .", "but no matter what details emerge about his previous mental health struggles , there 's more to the story , said brian russell , a forensic psychologist .", "`` psychology can explain why somebody would turn rage inward on themselves about the fact that maybe they were n't going to keep doing their job and they 're upset about that and so they 're suicidal , '' he said .", "`` but there is no mental illness that explains why somebody then feels entitled to also take that rage and turn it outward on 149 other people who had nothing to do with the person 's problems . ''", "germanwings crash compensation : what we know .", "who was the captain of germanwings flight 9525 ?", "cnn 's margot haddad reported from marseille and pamela brown from dusseldorf , while laura smith-spark wrote from london .", "cnn 's frederik pleitgen , pamela boykoff , antonia mortensen , sandrine amiel and anna-maja rappard contributed to this report ."], "summary": ["marseille prosecutor says `` so far no videos were used in the crash investigation '' despite media reports .", "journalists at bild and paris match are `` very confident '' the video clip is real , an editor says .", "andreas lubitz had informed his lufthansa training school of an episode of severe depression , airline says ."], "publication": "cnndm", "compression": 22.283333333333335, "coverage": 0.8666666666666667, "density": 4.6} -{"label": [3, 5, 24], "text": ["-lrb- cnn -rrb- the palestinian authority officially became the 123rd member of the international criminal court on wednesday , a step that gives the court jurisdiction over alleged crimes in palestinian territories .", "the formal accession was marked with a ceremony at the hague , in the netherlands , where the court is based .", "the palestinians signed the icc 's founding rome statute in january , when they also accepted its jurisdiction over alleged crimes committed `` in the occupied palestinian territory , including east jerusalem , since june 13 , 2014 . ''", "later that month , the icc opened a preliminary examination into the situation in palestinian territories , paving the way for possible war crimes investigations against israelis .", "as members of the court , palestinians may be subject to counter-charges as well .", "israel and the united states , neither of which is an icc member , opposed the palestinians ' efforts to join the body .", "but palestinian foreign minister riad al-malki , speaking at wednesday 's ceremony , said it was a move toward greater justice .", "`` as palestine formally becomes a state party to the rome statute today , the world is also a step closer to ending a long era of impunity and injustice , '' he said , according to an icc news release .", "`` indeed , today brings us closer to our shared goals of justice and peace . ''", "judge kuniko ozaki , a vice president of the icc , said acceding to the treaty was just the first step for the palestinians .", "`` as the rome statute today enters into force for the state of palestine , palestine acquires all the rights as well as responsibilities that come with being a state party to the statute .", "these are substantive commitments , which can not be taken lightly , '' she said .", "rights group human rights watch welcomed the development .", "`` governments seeking to penalize palestine for joining the icc should immediately end their pressure , and countries that support universal acceptance of the court 's treaty should speak out to welcome its membership , '' said balkees jarrah , international justice counsel for the group .", "`` what 's objectionable is the attempts to undermine international justice , not palestine 's decision to join a treaty to which over 100 countries around the world are members . ''", "in january , when the preliminary icc examination was opened , israeli prime minister benjamin netanyahu described it as an outrage , saying the court was overstepping its boundaries .", "the united states also said it `` strongly '' disagreed with the court 's decision .", "`` as we have said repeatedly , we do not believe that palestine is a state and therefore we do not believe that it is eligible to join the icc , '' the state department said in a statement .", "it urged the warring sides to resolve their differences through direct negotiations .", "`` we will continue to oppose actions against israel at the icc as counterproductive to the cause of peace , '' it said .", "but the icc begs to differ with the definition of a state for its purposes and refers to the territories as `` palestine . ''", "while a preliminary examination is not a formal investigation , it allows the court to review evidence and determine whether to investigate suspects on both sides .", "prosecutor fatou bensouda said her office would `` conduct its analysis in full independence and impartiality . ''", "the war between israel and hamas militants in gaza last summer left more than 2,000 people dead .", "the inquiry will include alleged war crimes committed since june .", "the international criminal court was set up in 2002 to prosecute genocide , crimes against humanity and war crimes .", "cnn 's vasco cotovio , kareem khadder and faith karimi contributed to this report ."], "summary": ["membership gives the icc jurisdiction over alleged crimes committed in palestinian territories since last june .", "israel and the united states opposed the move , which could open the door to war crimes investigations against israelis ."], "publication": "cnndm", "compression": 17.57894736842105, "coverage": 0.8947368421052632, "density": 3.1052631578947367} -{"label": [0, 6], "text": ["-lrb- cnn -rrb- governments around the world are using the threat of terrorism -- real or perceived -- to advance executions , amnesty international alleges in its annual report on the death penalty .", "`` the dark trend of governments using the death penalty in a futile attempt to tackle real or imaginary threats to state security and public safety was stark last year , '' said salil shetty , amnesty 's secretary general in a release .", "`` it is shameful that so many states around the world are essentially playing with people 's lives -- putting people to death for ` terrorism ' or to quell internal instability on the ill-conceived premise of deterrence . ''", "the report , `` death sentences and executions 2014 , '' cites the example of pakistan lifting a six-year moratorium on the execution of civilians following the horrific attack on a school in peshawar in december .", "china is also mentioned , as having used the death penalty as a tool in its `` strike hard '' campaign against terrorism in the restive far-western province of xinjiang .", "the annual report catalogs the use of state-sanctioned killing as a punitive measure across the globe , and this year 's edition contains some mixed findings .", "on one hand , the number of executions worldwide has gone down by almost 22 % on the previous year .", "at least 607 people were executed around the world in 2014 , compared to 778 in 2013 .", "amnesty 's figures do not include statistics on executions carried out in china , where information on the practice is regarded as a state secret .", "belarus and vietnam , too , do not release data on death penalty cases .", "`` the long-term trend is definitely positive -- we are seeing a decrease in the number of executions -lrb- worldwide -rrb- , '' audrey gaughran , amnesty 's director of global issues , told cnn .", "`` a number of countries are closer to abolition , and there are some signs that some countries will be abolitionist by 2015 .", "-lrb- there are -rrb- signals of a world that is nearing abolition . ''", "while the report notes some encouraging signs , it also highlights a marked increase in the number of people sentenced to death in 2014 .", "at least 2,466 people globally are confirmed to have been handed the sentence last year , an increase of 28 % compared with 2013 .", "the report notes that the spike in sentencing is attributable to mass-sentencing in countries including egypt and nigeria , `` against scores of people in some cases . ''", "the organization found `` positive developments '' worldwide , with most regions seeming to show reductions in the number of executions .", "opinion : sharp spike in death sentences .", "sub-saharan africa , for example , saw a 28 % fall in reported cases , and executions recorded in the middle east and north africa were down 23 % compared to 2013 .", "`` even though we 've highlighted some of the negative developments ... i think we would always highlight that there are positive developments , '' gaughran said .", "`` across the board , with the exception of europe and central asia there were fewer reports of executions in every region . ''", "the resumption of the use of capital punishment in belarus -- the only country in europe and central asia to execute people -- after a two year hiatus spoiled an near-universal decrease in countries using the death penalty by region .", "the united states has the dubious distinction of being the only country in the americas to conduct executions , but the number of convicts put to death here fell slightly , from 39 in 2013 to 35 in 2014 .", "the state of washington also imposed a moratorium on executions last year .", "the u.s. remains one of the worst offenders for imposing capital punishment , with only iran -lrb- 289 + -rrb- , iraq -lrb- 61 + -rrb- , and saudi arabia -lrb- 90 + -rrb- executing more people in 2014 .", "while figures are not available , amnesty estimates that china also executes `` thousands '' of prisoners each year , `` more than the rest of the world put together . ''", "the report also highlights the imperfections in the judiciary processes that lead to many sentenced to death .", "`` in the majority of countries where people were sentenced to death or executed , the death penalty was imposed after proceedings that did not meet international fair trial standards , '' the report stated .", "`` in 2014 amnesty international raised particular concerns in relation to court proceedings in afghanistan , bangladesh , china , egypt , iran , iraq , north korea , pakistan , saudi arabia and sri lanka . ''", "the united nations secretary-general , ban ki-moon , last year stressed the need to move toward abolition of capital punishment .", "`` the taking of life is too irreversible for one human being to inflict it on another , '' he said , in marking world day against death penalty in october .", "`` we must continue to argue strongly that the death penalty is unjust and incompatible with fundamental human rights . ''", "amnesty estimates that at least 19,094 people were believed to be on death row at the end of 2014 ."], "summary": ["amnesty 's annual death penalty report catalogs encouraging signs , but setbacks in numbers of those sentenced to death .", "organization claims that governments around the world are using the threat of terrorism to advance executions .", "the number of executions worldwide has gone down by almost 22 % compared with 2013 , but death sentences up by 28 % ."], "publication": "cnndm", "compression": 14.841269841269842, "coverage": 0.8888888888888888, "density": 5.079365079365079} -{"label": [8, 9, 34], "text": ["-lrb- cnn -rrb- on may 28 , 2014 , some 7,000 people gathered in a stadium in china 's northwestern xinjiang region .", "but they had not come to watch the local football team or any other grand sporting event .", "instead , the authorities paraded scores of prisoners dressed in orange jumpsuits .", "armed soldiers guarded the exits .", "in the patently unfair , open air trial that followed , 55 people were found guilty of a range of offenses linked to violent attacks in the region and jailed .", "three were sentenced to death .", "the public mass sentencing was part a china 's `` strike hard '' campaign against unrest in xinjiang , a campaign the government claims was launched to combat `` terrorism '' and `` separatism . ''", "but it was also indicative of a trend that was starkly evident last year around the world -- governments using the death penalty in a misguided , and often cynical , attempt to tackle crime and terrorism .", "today , amnesty international releases its annual review of the death penalty worldwide .", "much of it makes for grim reading .", "in pakistan , the government lifted a six-year moratorium on the execution of civilians in the wake of the horrific taliban attack on a school in peshawar in december .", "more than 60 people have been put to death since , and the government has threatened to send thousands more death row prisoners to the gallows .", "iran and iraq executed people for `` terrorism , '' and other countries expanded the scope of capital crimes in their penal codes .", "in a year when abhorrent summary executions by armed groups were branded on the global consciousness as never before , governments are themselves resorting to more executions in a knee-jerk reaction to terrorism .", "other countries made use of executions in similarly flawed attempts to address -- or appear to address -- crime rates .", "jordan ended an eight-year moratorium in december , putting 11 murder convicts to death , with the government saying it was a move to end a surge in violent crime .", "in indonesia , authorities announced plans to execute mainly drug traffickers to tackle a public safety `` national emergency . ''", "six people have already been executed this year .", "a sharp spike in death sentences recorded in 2014 -- up more than 500 on the previous year -- can also be attributed to governments using the death penalty as a political tool .", "the rise was largely because of developments in egypt and nigeria , where courts imposed hundreds of death sentences in the context of internal political instability or crime and armed conflict .", "the simple fact is that governments using the death penalty to tackle crime and security threats are deceiving themselves or the public or both .", "there is no evidence that the threat of execution is more of a deterrent to crime than a prison sentence , as united nations and other studies have repeatedly confirmed .", "it is high time that world leaders stop using the death penalty as an easy way out when times get tough .", "at amnesty international , we have campaigned for an end to the death penalty for decades .", "thankfully , most of the world now appears to agree with us .", "the numbers speak for themselves .", "in 1945 when the united nations was founded , only eight countries had abolished the death penalty .", "today , 140 states are abolitionist in law or practice .", "last year , we recorded executions in 22 countries , down by almost a half from 20 years ago .", "despite the troubling developments we recorded last year , there was still much good news to be found .", "the number of executions recorded around the world dropped significantly in 2014 compared with the previous year , from 778 to 607 .", "this number does not include china , where more people are put to death than the rest of the world put together , but with death penalty statistics treated as a state secret , the true figure is impossible to determine .", "executions were recorded in only three countries in sub-saharan africa -- equatorial guinea , somalia and sudan -- and the number of people put to death went down by more than a quarter .", "the americas continued to be execution-free , apart from the united states .", "those governments that still execute need to realize that they are on the wrong side of history .", "they must join the vast majority of countries which have dropped the ultimate cruel punishment .", "fighting for an end to the death penalty remains an uphill task , but all of us must try to make the world free of this punishment .", "with determination , i know that we can achieve this goal ."], "summary": ["amnesty international releases its annual review of the death penalty worldwide ; much of it makes for grim reading .", "salil shetty : countries that use executions to deal with problems are on the wrong side of history ."], "publication": "cnndm", "compression": 20.85, "coverage": 0.825, "density": 6.375} diff --git a/tests/data_for_tests/io/cnndm/test.label.jsonl b/tests/data_for_tests/io/cnndm/test.label.jsonl deleted file mode 100644 index d74ebd9f..00000000 --- a/tests/data_for_tests/io/cnndm/test.label.jsonl +++ /dev/null @@ -1,4 +0,0 @@ -{"label": [2, 3], "text": ["-lrb- cnn -rrb- the rev.", "robert h. schuller , california televangelist and founder of the television ministry `` hour of power , '' died thursday , according to his family .", "he was 88 years old .", "schuller , also the founder of crystal cathedral megachurch , had been diagnosed with esophageal cancer in august 2013 , a release from `` hour of power '' said .", "`` my father-in-law passed away peacefully early this morning .", "he was a great dad and a great man of god , '' said schuller 's daughter-in-law , donna schuller , in a twitter message .", "schuller 's life followed an almost shakespearean arc .", "he was born in a iowa farmhouse without running water and longed to preach from his earliest days .", "in his autobiography , `` prayer : my soul 's adventure with god , '' he described standing alone by a river and picturing himself delivering sermons to a rapt congregation .", "after attending a hope college and western theological seminary in michigan , he met his wife of more than 60 years , arvella , while preaching at her church -lrb- she was the organist -rrb- .", "with their young family in tow , the schullers caravanned west to california , where he rented a drive-in theater and preached from the roof of the snack bar .", "it was beneath the dignity of christian ministry , some local pastors huffed .", "the `` passion pits '' where teenagers necked was no place for the gospel .", "schuller was undeterred , and he quickly outgrew the drive-in .", "he called the explosive growth of his tiny congregation a `` miracle , '' though his many mainstream critics had other names for it .", "his confident , breezy version of christianity -- too breezy , by some estimations -- drew hordes of seekers and lapsed christians who were put off by the hellfire fulminations of many post-war american preachers .", "schuller sold a softer , gentler message , which borrowed heavily , he acknowledged , from the father of the feel-good gospel , norman vincent peale .", "he preached not to convert or condemn people , but to encourage them , a sentiment he called `` possibility thinking . ''", "people loved it .", "`` evangelicalism at its best wants to be innovative and reach people , '' said timothy larsen , a professor of christian thought at wheaton college in illinois .", "`` and schuller was a master at that . ''", "`` what he got right is that the gospel is good news , '' larsen continued .", "`` and he preached an uplifting message about personal transformation and uplift and hope . ''", "some of schuller 's favored phrases , though , struck others as cornpone christianity .", "`` turn your hurt into a halo ? ''", "said randall balmer , a professor of american religious history at dartmouth college , citing one such phrase .", "`` that 's pretty weak tea . ''", "still , balmer gives schuller some credit .", "`` it may be bad theology , but it 's brilliant marketing . ''", "in 1970 , schuller began broadcasting `` hour of power , '' believed to be one of the first , if not the very first , sunday service to be shown regularly on television .", "with his genial smile , priestly robes and gray hair , he looked and talked like a guy who wanted nothing more than to see his flock succeed .", "the show , which ran for decades , reached millions , making schuller a televangelist before the term became tarnished by the sins of his many successors .", "schuller 's crowning achievement , at least architecturally , still stands in orange county , california , though it is now owned by the roman catholic church .", "the crystal cathedral , a great gleaming edifice with 10,000 glass panels , gave worshipers a look at the clouds that house the heavens , while schuller preached in the pulpit below .", "the message was clear to many : the road to the former ran through the latter .", "during the 1980s and 1990s , schuller 's star continued to rise , with presidents stopping by the crystal cathedral -- often during campaigns , it should be said -- and future megachurch pastors like rick warren and bill hybels seeking his advice .", "as schuller aged , though , his family was beset by a succession scandal straight from the pages of `` king lear . ''", "he tried to install his only son , bobby jr. , as pastor of crystal cathedral .", "but the preaching styles of father and son were too different for the congregation -- measured at times at 10,000 strong -- to countenance .", "bobby schuller jr. left `` hour of power '' and the pulpit at crystal cathedral after a short time .", "as the family searched for a new successor and tussled over finances , viewers and donations to the church and its television show dropped precipitously .", "crystal cathedral ministries filed for bankruptcy in 2010 , citing debts of more than $ 43 million , according to the associated press .", "schuller 's empire , which once soared as high as his glassy cathedral , had fallen to dust .", "eventually , schuller 's grandson , also named bobby , took over `` hour of power , '' though at a different church .", "in a statement on thursday , the younger schuller recalled standing atop crystal cathedral 's 12-story tower of hope with his grandfather as they surveyed the surrounding landscape .", "`` you could see the whole world from there , '' he said .", "people we 've lost in 2015 .", "cnn 's stella chan reported from los angeles ."], "summary": ["the rev.", "robert schuller , 88 , had been diagnosed with esophageal cancer in 2013 .", "his tv show , `` hour of power , '' was enormously popular in the 1970s and 1980s ."], "publication": "cnndm", "compression": 26.342105263157894, "coverage": 0.8421052631578947, "density": 3.4210526315789473} -{"label": [4, 6], "text": ["-lrb- cnn -rrb- never mind cats having nine lives .", "a stray pooch in washington state has used up at least three of her own after being hit by a car , apparently whacked on the head with a hammer in a misguided mercy killing and then buried in a field -- only to survive .", "that 's according to washington state university , where the dog -- a friendly white-and-black bully breed mix now named theia -- has been receiving care at the veterinary teaching hospital .", "four days after her apparent death , the dog managed to stagger to a nearby farm , dirt-covered and emaciated , where she was found by a worker who took her to a vet for help .", "she was taken in by moses lake , washington , resident sara mellado .", "`` considering everything that she 's been through , she 's incredibly gentle and loving , '' mellado said , according to wsu news .", "`` she 's a true miracle dog and she deserves a good life . ''", "theia is only one year old but the dog 's brush with death did not leave her unscathed .", "she suffered a dislocated jaw , leg injuries and a caved-in sinus cavity -- and still requires surgery to help her breathe .", "the veterinary hospital 's good samaritan fund committee awarded some money to help pay for the dog 's treatment , but mellado has set up a fundraising page to help meet the remaining cost of the dog 's care .", "she 's also created a facebook page to keep supporters updated .", "donors have already surpassed the $ 10,000 target , inspired by theia 's tale of survival against the odds .", "on the fundraising page , mellado writes , `` she is in desperate need of extensive medical procedures to fix her nasal damage and reset her jaw .", "i agreed to foster her until she finally found a loving home . ''", "she is dedicated to making sure theia gets the medical attention she needs , mellado adds , and wants to `` make sure she gets placed in a family where this will never happen to her again ! ''", "any additional funds raised will be `` paid forward '' to help other animals .", "theia is not the only animal to apparently rise from the grave in recent weeks .", "a cat in tampa , florida , found seemingly dead after he was hit by a car in january , showed up alive in a neighbor 's yard five days after he was buried by his owner .", "the cat was in bad shape , with maggots covering open wounds on his body and a ruined left eye , but remarkably survived with the help of treatment from the humane society ."], "summary": ["theia , a bully breed mix , was apparently hit by a car , whacked with a hammer and buried in a field .", "`` she 's a true miracle dog and she deserves a good life , '' says sara mellado , who is looking for a home for theia ."], "publication": "cnndm", "compression": 9.150943396226415, "coverage": 0.9433962264150944, "density": 4.7924528301886795} -{"label": [32, 36], "text": ["-lrb- cnn -rrb- if you 've been following the news lately , there are certain things you doubtless know about mohammad javad zarif .", "he is , of course , the iranian foreign minister .", "he has been u.s. secretary of state john kerry 's opposite number in securing a breakthrough in nuclear discussions that could lead to an end to sanctions against iran -- if the details can be worked out in the coming weeks .", "and he received a hero 's welcome as he arrived in iran on a sunny friday morning .", "`` long live zarif , '' crowds chanted as his car rolled slowly down the packed street .", "you may well have read that he is `` polished '' and , unusually for one burdened with such weighty issues , `` jovial . ''", "an internet search for `` mohammad javad zarif '' and `` jovial '' yields thousands of results .", "he certainly has gone a long way to bring iran in from the cold and allow it to rejoin the international community .", "but there are some facts about zarif that are less well-known .", "here are six : .", "in september 2013 , zarif tweeted `` happy rosh hashanah , '' referring to the jewish new year .", "that prompted christine pelosi , the daughter of house minority leader nancy pelosi , to respond with a tweet of her own : `` thanks .", "the new year would be even sweeter if you would end iran 's holocaust denial , sir . ''", "and , perhaps to her surprise , pelosi got a response .", "`` iran never denied it , '' zarif tweeted back .", "`` the man who was perceived to be denying it is now gone .", "happy new year . ''", "the reference was likely to former iranian president mahmoud ahmadinejad , who had left office the previous month .", "zarif was nominated to be foreign minister by ahmadinejad 's successor , hassan rouhami .", "his foreign ministry notes , perhaps defensively , that `` due to the political and security conditions of the time , he decided to continue his education in the united states . ''", "that is another way of saying that he was outside the country during the demonstrations against the shah of iran , which began in 1977 , and during the iranian revolution , which drove the shah from power in 1979 .", "zarif left the country in 1977 , received his undergraduate degree from san francisco state university in 1981 , his master 's in international relations from the university of denver in 1984 and his doctorate from the university of denver in 1988 .", "both of his children were born in the united states .", "the website of the iranian foreign ministry , which zarif runs , can not even agree with itself on when he was born .", "the first sentence of his official biography , perhaps in a nod to the powers that be in tehran , says zarif was `` born to a religious traditional family in tehran in 1959 . ''", "later on the same page , however , his date of birth is listed as january 8 , 1960 .", "and the iranian diplomacy website says he was born in in 1961 .", "so he is 54 , 55 or maybe even 56 .", "whichever , he is still considerably younger than his opposite number , kerry , who is 71 .", "the feds investigated him over his alleged role in controlling the alavi foundation , a charitable organization .", "the u.s. justice department said the organization was secretly run on behalf of the iranian government to launder money and get around u.s. sanctions .", "but last year , a settlement in the case , under which the foundation agreed to give a 36-story building in manhattan along with other properties to the u.s. government , did not mention zarif 's name .", "early in the iranian revolution , zarif was among the students who took over the iranian consulate in san francisco .", "the aim , says the website iranian.com -- which cites zarif 's memoirs , titled `` mr. ambassador '' -- was to expel from the consulate people who were not sufficiently islamic .", "later , the website says , zarif went to make a similar protest at the iranian mission to the united nations .", "in response , the iranian ambassador to the united nations offered him a job .", "in fact , he has now spent more time with kerry than any other foreign minister in the world .", "and that amount of quality time will only increase as the two men , with help from other foreign ministers as well , try to meet a june 30 deadline for nailing down the details of the agreement they managed to outline this week in switzerland ."], "summary": ["mohammad javad zarif has spent more time with john kerry than any other foreign minister .", "he once participated in a takeover of the iranian consulate in san francisco .", "the iranian foreign minister tweets in english ."], "publication": "cnndm", "compression": 20.85, "coverage": 0.825, "density": 2.825} -{"label": [2], "text": ["-lrb- cnn -rrb- for the first time in eight years , a tv legend returned to doing what he does best .", "contestants told to `` come on down ! ''", "on the april 1 edition of `` the price is right '' encountered not host drew carey but another familiar face in charge of the proceedings .", "instead , there was bob barker , who hosted the tv game show for 35 years before stepping down in 2007 .", "looking spry at 91 , barker handled the first price-guessing game of the show , the classic `` lucky seven , '' before turning hosting duties over to carey , who finished up .", "despite being away from the show for most of the past eight years , barker did n't seem to miss a beat ."], "summary": ["bob barker returned to host `` the price is right '' on wednesday .", "barker , 91 , had retired as host in 2007 ."], "publication": "cnndm", "compression": 5.346153846153846, "coverage": 0.8076923076923077, "density": 2.5} diff --git a/tests/data_for_tests/io/cnndm/train.cnndm.jsonl b/tests/data_for_tests/io/cnndm/train.cnndm.jsonl deleted file mode 100644 index 97719a61..00000000 --- a/tests/data_for_tests/io/cnndm/train.cnndm.jsonl +++ /dev/null @@ -1,10 +0,0 @@ -{"label": [1, 19, 25], "text": ["marseille , france -lrb- cnn -rrb- the french prosecutor leading an investigation into the crash of germanwings flight 9525 insisted wednesday that he was not aware of any video footage from on board the plane .", "marseille prosecutor brice robin told cnn that `` so far no videos were used in the crash investigation . ''", "he added , `` a person who has such a video needs to immediately give it to the investigators . ''", "robin 's comments follow claims by two magazines , german daily bild and french paris match , of a cell phone video showing the harrowing final seconds from on board germanwings flight 9525 as it crashed into the french alps .", "all 150 on board were killed .", "paris match and bild reported that the video was recovered from a phone at the wreckage site .", "the two publications described the supposed video , but did not post it on their websites .", "the publications said that they watched the video , which was found by a source close to the investigation .", "`` one can hear cries of ` my god ' in several languages , '' paris match reported .", "`` metallic banging can also be heard more than three times , perhaps of the pilot trying to open the cockpit door with a heavy object .", "towards the end , after a heavy shake , stronger than the others , the screaming intensifies .", "then nothing . ''", "`` it is a very disturbing scene , '' said julian reichelt , editor-in-chief of bild online .", "an official with france 's accident investigation agency , the bea , said the agency is not aware of any such video .", "lt. col. jean-marc menichini , a french gendarmerie spokesman in charge of communications on rescue efforts around the germanwings crash site , told cnn that the reports were `` completely wrong '' and `` unwarranted . ''", "cell phones have been collected at the site , he said , but that they `` had n't been exploited yet . ''", "menichini said he believed the cell phones would need to be sent to the criminal research institute in rosny sous-bois , near paris , in order to be analyzed by specialized technicians working hand-in-hand with investigators .", "but none of the cell phones found so far have been sent to the institute , menichini said .", "asked whether staff involved in the search could have leaked a memory card to the media , menichini answered with a categorical `` no . ''", "reichelt told `` erin burnett : outfront '' that he had watched the video and stood by the report , saying bild and paris match are `` very confident '' that the clip is real .", "he noted that investigators only revealed they 'd recovered cell phones from the crash site after bild and paris match published their reports .", "`` that is something we did not know before .", "... overall we can say many things of the investigation were n't revealed by the investigation at the beginning , '' he said .", "what was mental state of germanwings co-pilot ?", "german airline lufthansa confirmed tuesday that co-pilot andreas lubitz had battled depression years before he took the controls of germanwings flight 9525 , which he 's accused of deliberately crashing last week in the french alps .", "lubitz told his lufthansa flight training school in 2009 that he had a `` previous episode of severe depression , '' the airline said tuesday .", "email correspondence between lubitz and the school discovered in an internal investigation , lufthansa said , included medical documents he submitted in connection with resuming his flight training .", "the announcement indicates that lufthansa , the parent company of germanwings , knew of lubitz 's battle with depression , allowed him to continue training and ultimately put him in the cockpit .", "lufthansa , whose ceo carsten spohr previously said lubitz was 100 % fit to fly , described its statement tuesday as a `` swift and seamless clarification '' and said it was sharing the information and documents -- including training and medical records -- with public prosecutors .", "spohr traveled to the crash site wednesday , where recovery teams have been working for the past week to recover human remains and plane debris scattered across a steep mountainside .", "he saw the crisis center set up in seyne-les-alpes , laid a wreath in the village of le vernet , closer to the crash site , where grieving families have left flowers at a simple stone memorial .", "menichini told cnn late tuesday that no visible human remains were left at the site but recovery teams would keep searching .", "french president francois hollande , speaking tuesday , said that it should be possible to identify all the victims using dna analysis by the end of the week , sooner than authorities had previously suggested .", "in the meantime , the recovery of the victims ' personal belongings will start wednesday , menichini said .", "among those personal belongings could be more cell phones belonging to the 144 passengers and six crew on board .", "check out the latest from our correspondents .", "the details about lubitz 's correspondence with the flight school during his training were among several developments as investigators continued to delve into what caused the crash and lubitz 's possible motive for downing the jet .", "a lufthansa spokesperson told cnn on tuesday that lubitz had a valid medical certificate , had passed all his examinations and `` held all the licenses required . ''", "earlier , a spokesman for the prosecutor 's office in dusseldorf , christoph kumpa , said medical records reveal lubitz suffered from suicidal tendencies at some point before his aviation career and underwent psychotherapy before he got his pilot 's license .", "kumpa emphasized there 's no evidence suggesting lubitz was suicidal or acting aggressively before the crash .", "investigators are looking into whether lubitz feared his medical condition would cause him to lose his pilot 's license , a european government official briefed on the investigation told cnn on tuesday .", "while flying was `` a big part of his life , '' the source said , it 's only one theory being considered .", "another source , a law enforcement official briefed on the investigation , also told cnn that authorities believe the primary motive for lubitz to bring down the plane was that he feared he would not be allowed to fly because of his medical problems .", "lubitz 's girlfriend told investigators he had seen an eye doctor and a neuropsychologist , both of whom deemed him unfit to work recently and concluded he had psychological issues , the european government official said .", "but no matter what details emerge about his previous mental health struggles , there 's more to the story , said brian russell , a forensic psychologist .", "`` psychology can explain why somebody would turn rage inward on themselves about the fact that maybe they were n't going to keep doing their job and they 're upset about that and so they 're suicidal , '' he said .", "`` but there is no mental illness that explains why somebody then feels entitled to also take that rage and turn it outward on 149 other people who had nothing to do with the person 's problems . ''", "germanwings crash compensation : what we know .", "who was the captain of germanwings flight 9525 ?", "cnn 's margot haddad reported from marseille and pamela brown from dusseldorf , while laura smith-spark wrote from london .", "cnn 's frederik pleitgen , pamela boykoff , antonia mortensen , sandrine amiel and anna-maja rappard contributed to this report ."], "summary": ["marseille prosecutor says `` so far no videos were used in the crash investigation '' despite media reports .", "journalists at bild and paris match are `` very confident '' the video clip is real , an editor says .", "andreas lubitz had informed his lufthansa training school of an episode of severe depression , airline says ."], "publication": "CNN", "compression": 22.283333333333335, "coverage": 0.8666666666666667, "density": 4.6} -{"label": [3, 5, 24], "text": ["-lrb- cnn -rrb- the palestinian authority officially became the 123rd member of the international criminal court on wednesday , a step that gives the court jurisdiction over alleged crimes in palestinian territories .", "the formal accession was marked with a ceremony at the hague , in the netherlands , where the court is based .", "the palestinians signed the icc 's founding rome statute in january , when they also accepted its jurisdiction over alleged crimes committed `` in the occupied palestinian territory , including east jerusalem , since june 13 , 2014 . ''", "later that month , the icc opened a preliminary examination into the situation in palestinian territories , paving the way for possible war crimes investigations against israelis .", "as members of the court , palestinians may be subject to counter-charges as well .", "israel and the united states , neither of which is an icc member , opposed the palestinians ' efforts to join the body .", "but palestinian foreign minister riad al-malki , speaking at wednesday 's ceremony , said it was a move toward greater justice .", "`` as palestine formally becomes a state party to the rome statute today , the world is also a step closer to ending a long era of impunity and injustice , '' he said , according to an icc news release .", "`` indeed , today brings us closer to our shared goals of justice and peace . ''", "judge kuniko ozaki , a vice president of the icc , said acceding to the treaty was just the first step for the palestinians .", "`` as the rome statute today enters into force for the state of palestine , palestine acquires all the rights as well as responsibilities that come with being a state party to the statute .", "these are substantive commitments , which can not be taken lightly , '' she said .", "rights group human rights watch welcomed the development .", "`` governments seeking to penalize palestine for joining the icc should immediately end their pressure , and countries that support universal acceptance of the court 's treaty should speak out to welcome its membership , '' said balkees jarrah , international justice counsel for the group .", "`` what 's objectionable is the attempts to undermine international justice , not palestine 's decision to join a treaty to which over 100 countries around the world are members . ''", "in january , when the preliminary icc examination was opened , israeli prime minister benjamin netanyahu described it as an outrage , saying the court was overstepping its boundaries .", "the united states also said it `` strongly '' disagreed with the court 's decision .", "`` as we have said repeatedly , we do not believe that palestine is a state and therefore we do not believe that it is eligible to join the icc , '' the state department said in a statement .", "it urged the warring sides to resolve their differences through direct negotiations .", "`` we will continue to oppose actions against israel at the icc as counterproductive to the cause of peace , '' it said .", "but the icc begs to differ with the definition of a state for its purposes and refers to the territories as `` palestine . ''", "while a preliminary examination is not a formal investigation , it allows the court to review evidence and determine whether to investigate suspects on both sides .", "prosecutor fatou bensouda said her office would `` conduct its analysis in full independence and impartiality . ''", "the war between israel and hamas militants in gaza last summer left more than 2,000 people dead .", "the inquiry will include alleged war crimes committed since june .", "the international criminal court was set up in 2002 to prosecute genocide , crimes against humanity and war crimes .", "cnn 's vasco cotovio , kareem khadder and faith karimi contributed to this report ."], "summary": ["membership gives the icc jurisdiction over alleged crimes committed in palestinian territories since last june .", "israel and the united states opposed the move , which could open the door to war crimes investigations against israelis ."], "publication": "CNN", "compression": 17.57894736842105, "coverage": 0.8947368421052632, "density": 3.1052631578947367} -{"label": [0, 6], "text": ["-lrb- cnn -rrb- governments around the world are using the threat of terrorism -- real or perceived -- to advance executions , amnesty international alleges in its annual report on the death penalty .", "`` the dark trend of governments using the death penalty in a futile attempt to tackle real or imaginary threats to state security and public safety was stark last year , '' said salil shetty , amnesty 's secretary general in a release .", "`` it is shameful that so many states around the world are essentially playing with people 's lives -- putting people to death for ` terrorism ' or to quell internal instability on the ill-conceived premise of deterrence . ''", "the report , `` death sentences and executions 2014 , '' cites the example of pakistan lifting a six-year moratorium on the execution of civilians following the horrific attack on a school in peshawar in december .", "china is also mentioned , as having used the death penalty as a tool in its `` strike hard '' campaign against terrorism in the restive far-western province of xinjiang .", "the annual report catalogs the use of state-sanctioned killing as a punitive measure across the globe , and this year 's edition contains some mixed findings .", "on one hand , the number of executions worldwide has gone down by almost 22 % on the previous year .", "at least 607 people were executed around the world in 2014 , compared to 778 in 2013 .", "amnesty 's figures do not include statistics on executions carried out in china , where information on the practice is regarded as a state secret .", "belarus and vietnam , too , do not release data on death penalty cases .", "`` the long-term trend is definitely positive -- we are seeing a decrease in the number of executions -lrb- worldwide -rrb- , '' audrey gaughran , amnesty 's director of global issues , told cnn .", "`` a number of countries are closer to abolition , and there are some signs that some countries will be abolitionist by 2015 .", "-lrb- there are -rrb- signals of a world that is nearing abolition . ''", "while the report notes some encouraging signs , it also highlights a marked increase in the number of people sentenced to death in 2014 .", "at least 2,466 people globally are confirmed to have been handed the sentence last year , an increase of 28 % compared with 2013 .", "the report notes that the spike in sentencing is attributable to mass-sentencing in countries including egypt and nigeria , `` against scores of people in some cases . ''", "the organization found `` positive developments '' worldwide , with most regions seeming to show reductions in the number of executions .", "opinion : sharp spike in death sentences .", "sub-saharan africa , for example , saw a 28 % fall in reported cases , and executions recorded in the middle east and north africa were down 23 % compared to 2013 .", "`` even though we 've highlighted some of the negative developments ... i think we would always highlight that there are positive developments , '' gaughran said .", "`` across the board , with the exception of europe and central asia there were fewer reports of executions in every region . ''", "the resumption of the use of capital punishment in belarus -- the only country in europe and central asia to execute people -- after a two year hiatus spoiled an near-universal decrease in countries using the death penalty by region .", "the united states has the dubious distinction of being the only country in the americas to conduct executions , but the number of convicts put to death here fell slightly , from 39 in 2013 to 35 in 2014 .", "the state of washington also imposed a moratorium on executions last year .", "the u.s. remains one of the worst offenders for imposing capital punishment , with only iran -lrb- 289 + -rrb- , iraq -lrb- 61 + -rrb- , and saudi arabia -lrb- 90 + -rrb- executing more people in 2014 .", "while figures are not available , amnesty estimates that china also executes `` thousands '' of prisoners each year , `` more than the rest of the world put together . ''", "the report also highlights the imperfections in the judiciary processes that lead to many sentenced to death .", "`` in the majority of countries where people were sentenced to death or executed , the death penalty was imposed after proceedings that did not meet international fair trial standards , '' the report stated .", "`` in 2014 amnesty international raised particular concerns in relation to court proceedings in afghanistan , bangladesh , china , egypt , iran , iraq , north korea , pakistan , saudi arabia and sri lanka . ''", "the united nations secretary-general , ban ki-moon , last year stressed the need to move toward abolition of capital punishment .", "`` the taking of life is too irreversible for one human being to inflict it on another , '' he said , in marking world day against death penalty in october .", "`` we must continue to argue strongly that the death penalty is unjust and incompatible with fundamental human rights . ''", "amnesty estimates that at least 19,094 people were believed to be on death row at the end of 2014 ."], "summary": ["amnesty 's annual death penalty report catalogs encouraging signs , but setbacks in numbers of those sentenced to death .", "organization claims that governments around the world are using the threat of terrorism to advance executions .", "the number of executions worldwide has gone down by almost 22 % compared with 2013 , but death sentences up by 28 % ."], "publication": "CNN", "compression": 14.841269841269842, "coverage": 0.8888888888888888, "density": 5.079365079365079} -{"label": [8, 9, 34], "text": ["-lrb- cnn -rrb- on may 28 , 2014 , some 7,000 people gathered in a stadium in china 's northwestern xinjiang region .", "but they had not come to watch the local football team or any other grand sporting event .", "instead , the authorities paraded scores of prisoners dressed in orange jumpsuits .", "armed soldiers guarded the exits .", "in the patently unfair , open air trial that followed , 55 people were found guilty of a range of offenses linked to violent attacks in the region and jailed .", "three were sentenced to death .", "the public mass sentencing was part a china 's `` strike hard '' campaign against unrest in xinjiang , a campaign the government claims was launched to combat `` terrorism '' and `` separatism . ''", "but it was also indicative of a trend that was starkly evident last year around the world -- governments using the death penalty in a misguided , and often cynical , attempt to tackle crime and terrorism .", "today , amnesty international releases its annual review of the death penalty worldwide .", "much of it makes for grim reading .", "in pakistan , the government lifted a six-year moratorium on the execution of civilians in the wake of the horrific taliban attack on a school in peshawar in december .", "more than 60 people have been put to death since , and the government has threatened to send thousands more death row prisoners to the gallows .", "iran and iraq executed people for `` terrorism , '' and other countries expanded the scope of capital crimes in their penal codes .", "in a year when abhorrent summary executions by armed groups were branded on the global consciousness as never before , governments are themselves resorting to more executions in a knee-jerk reaction to terrorism .", "other countries made use of executions in similarly flawed attempts to address -- or appear to address -- crime rates .", "jordan ended an eight-year moratorium in december , putting 11 murder convicts to death , with the government saying it was a move to end a surge in violent crime .", "in indonesia , authorities announced plans to execute mainly drug traffickers to tackle a public safety `` national emergency . ''", "six people have already been executed this year .", "a sharp spike in death sentences recorded in 2014 -- up more than 500 on the previous year -- can also be attributed to governments using the death penalty as a political tool .", "the rise was largely because of developments in egypt and nigeria , where courts imposed hundreds of death sentences in the context of internal political instability or crime and armed conflict .", "the simple fact is that governments using the death penalty to tackle crime and security threats are deceiving themselves or the public or both .", "there is no evidence that the threat of execution is more of a deterrent to crime than a prison sentence , as united nations and other studies have repeatedly confirmed .", "it is high time that world leaders stop using the death penalty as an easy way out when times get tough .", "at amnesty international , we have campaigned for an end to the death penalty for decades .", "thankfully , most of the world now appears to agree with us .", "the numbers speak for themselves .", "in 1945 when the united nations was founded , only eight countries had abolished the death penalty .", "today , 140 states are abolitionist in law or practice .", "last year , we recorded executions in 22 countries , down by almost a half from 20 years ago .", "despite the troubling developments we recorded last year , there was still much good news to be found .", "the number of executions recorded around the world dropped significantly in 2014 compared with the previous year , from 778 to 607 .", "this number does not include china , where more people are put to death than the rest of the world put together , but with death penalty statistics treated as a state secret , the true figure is impossible to determine .", "executions were recorded in only three countries in sub-saharan africa -- equatorial guinea , somalia and sudan -- and the number of people put to death went down by more than a quarter .", "the americas continued to be execution-free , apart from the united states .", "those governments that still execute need to realize that they are on the wrong side of history .", "they must join the vast majority of countries which have dropped the ultimate cruel punishment .", "fighting for an end to the death penalty remains an uphill task , but all of us must try to make the world free of this punishment .", "with determination , i know that we can achieve this goal ."], "summary": ["amnesty international releases its annual review of the death penalty worldwide ; much of it makes for grim reading .", "salil shetty : countries that use executions to deal with problems are on the wrong side of history ."], "publication": "CNN", "compression": 20.85, "coverage": 0.825, "density": 6.375} -{"label": [2, 3], "text": ["-lrb- cnn -rrb- seventy years ago , anne frank died of typhus in a nazi concentration camp at the age of 15 .", "just two weeks after her supposed death on march 31 , 1945 , the bergen-belsen concentration camp where she had been imprisoned was liberated -- timing that showed how close the jewish diarist had been to surviving the holocaust .", "but new research released by the anne frank house shows that anne and her older sister , margot frank , died at least a month earlier than previously thought .", "researchers re-examined archives of the red cross , the international training service and the bergen-belsen memorial , along with testimonies of survivors .", "they concluded that anne and margot probably did not survive to march 1945 -- contradicting the date of death which had previously been determined by dutch authorities .", "in 1944 , anne and seven others hiding in the amsterdam secret annex were arrested and sent to the auschwitz-birkenau concentration camp .", "anne frank 's final entry .", "that same year , anne and margot were separated from their mother and sent away to work as slave labor at the bergen-belsen camp in germany .", "days at the camp were filled with terror and dread , witnesses said .", "the sisters stayed in a section of the overcrowded camp with no lighting , little water and no latrine .", "they slept on lice-ridden straw and violent storms shredded the tents , according to the researchers .", "like the other prisoners , the sisters endured long hours at roll call .", "her classmate , nannette blitz , recalled seeing anne there in december 1944 : `` she was no more than a skeleton by then .", "she was wrapped in a blanket ; she could n't bear to wear her clothes anymore because they were crawling with lice . ''", "listen to anne frank 's friends describe her concentration camp experience .", "as the russians advanced further , the bergen-belsen concentration camp became even more crowded , bringing more disease .", "a deadly typhus outbreak caused thousands to die each day .", "typhus is an infectious disease caused by lice that breaks out in places with poor hygiene .", "the disease causes high fever , chills and skin eruptions .", "`` because of the lice infesting the bedstraw and her clothes , anne was exposed to the main carrier of epidemic typhus for an extended period , '' museum researchers wrote .", "they concluded that it 's unlikely the sisters survived until march , because witnesses at the camp said the sisters both had symptoms before february 7 .", "`` most deaths caused by typhus occur around twelve days after the first symptoms appear , '' wrote authors erika prins and gertjan broek .", "the exact dates of death for anne and margot remain unclear .", "margot died before anne .", "`` anne never gave up hope , '' said blitz , her friend .", "`` she was absolutely convinced she would survive . ''", "her diary endures as one of the world 's most popular books .", "read more about anne frank 's cousin , a keeper of her legacy ."], "summary": ["museum : anne frank died earlier than previously believed .", "researchers re-examined archives and testimonies of survivors .", "anne and older sister margot frank are believed to have died in february 1945 ."], "publication": "CNN", "compression": 14.864864864864865, "coverage": 0.8378378378378378, "density": 2.189189189189189} -{"label": [1, 2, 10, 14, 19], "text": ["it is a week which has seen him in deep water - both on and off the pitch .", "just days after dallas cowboys ' greg hardy was suspended from 10 nfl games he appeared to get into trouble when he drove his luxury car through flash floods in dallas , getting stuck when the car could not make it through the rising , fast flowing waters .", "the 25-year-old was forced to abandon his bentley , leaving it stranded until the waters receded and the car could be towed away .", "it took the tow truck several hours to successfully remove the car and hardy was later seen returning to the vehicle to collect some of his possessions .", "he left in another luxury car , a white ferrari .", "scroll down for video .", "greg hardy found himself in more deep water when he was forced to abandon his bentley in flash floods .", "the problem with his car comes as more bad news for hardy who was suspended by the nfl just days ago after an incident of domestic abuse that allegedly occurred last year .", "hardy , who signed with the dallas cowboys last month , will be forced to sit out the first 10 games of the season and will not receive his salary for these games .", "last year hardy , 25 , was convicted by a judge in charlotte , north carolina of beating , strangling and threatening to kill his ex-girlfriend , nicki holder .", "those charges were later dropped on an appeal when holder could not be located to testify .", "a two month investigation by the nfl followed and officials decided he had to be suspended .", "hardy was informed in a letter from nfl commissioner roger goodell that the probe determined there was ` sufficient credible evidence that hardy engaged in conduct that violated nfl policies in multiple respects . '", "hardy was dropped by his previous team , the carolina panthers , because of these charges last season , but was still able to collect his salary during that time , which was roughly $ 770,000 a week .", "hardy previously played for the carolina panthers but was dropped after allegations of domestic abuse emerged and was then signed by dallas cowboys and suspended for 10 games by the nfl .", "hardy is seen talking to officials after his bentley got stuck in flash floods in dallas this week . '", "i understand that i need to step away from football right now and take care of this legal matter , ' hardy said in a statement after he was cut from the panthers .", "the panthers had originally agreed to wait to take action until hardy had a jury trial regarding the incident in may .", "his previous conviction was the result of a bench trial .", "a jury trial ultimately led to all charges being dropped .", "holder told police that hardy choked her , slammed her against a bathtub , threw her to the floor and threatened to kill her after a fight at his charlotte condo .", "the dallas cowboys star was seen attempting to drive his bentley during the floods , but had to abandon it .", "it took officials and a tow truck several hours to pull the luxury bentley free from dallas flood waters .", "this all came at a time when the league was under heavy scrutiny in the wake of two abuse scandals involving stars ray rice and adrian peterson .", "many were upset with the punishments those two received , feeling the nfl was too lenient .", "video of rice punching then-fianc\u00e9e janay palmer went public last monday , and peterson was indicted on charges of reckless or negligent injury to a child on friday for an incident in which he hit his son with a switch back in may .", "hardy -lrb- above -rrb- was convicted by a judge last july of beating , strangling and threatening to kill ex-girlfriend nicki holder .", "the nfl announced that hardy would be suspended without pay for 10 games at the start of the 2015 season .", "holder -lrb- above with hardy -rrb- told police that he choked her , slammed her against a bathtub , threw her to the floor and threatened to kill her after a fight at his condo .", "rice was definitely suspended from the nfl and had his contract terminated by the baltimore ravens , while peterson , who was sidelined by the minnesota vikings last sunday , has now been suspended by the team .", "both men are expected by many to return to play in the 2015 , with peterson back on the vikings after an nfl decision and rice winning a wrongful termination suit during the off-season .", "rice even pocketed roughly $ 1.6 million in back pay ."], "summary": ["hardy was convicted of domestic abuse against ex-girlfriend nicki holder and was suspended from the dallas cowboys for 10 days by the nfl .", "charges were eventually dropped after holder could not be located when hardy 's lawyers appealed the decision and asked for a jury trial .", "this week he got stuck in his bentley in deep flash flood waters in dallas .", "hardy was forced to abandon his car and it was towed away hours later ."], "publication": "DailyMail", "compression": 9.845238095238095, "coverage": 0.9047619047619048, "density": 2.3333333333333335} -{"label": [1, 2], "text": ["an hiv self-testing kit is on sale for the first time in the uk .", "the 99.7 per cent accurate biosure hiv self test enables people to test themselves when and where they like .", "an estimated 26,000 people in the uk have hiv but are unaware of it and may be transmitting the disease to others .", "the 99.7 per cent accurate biosure hiv self test enables people to test themselves when and where they like .", "the testing kit , on sale online , uses a small amount of blood from a finger-prick sample to detect the presence of hiv antibodies , giving a result in just 15 minutes .", "treatments available mean hiv is now a manageable disease -- but late diagnosis can have a devastating impact on health and life expectancy .", "the national aids trust warns that 40 per cent of those living with hiv remain undiagnosed for at least four years , with those diagnosed late 11 times more likely to die in the first year after diagnosis .", "the testing kit , on sale online , uses a small amount of blood from a finger-prick sample to detect the presence of hiv antibodies , giving a result in just 15 minutes .", "biosure founder brigette bard said it is a significant step towards normalising hiv testing , adding : ` knowing your hiv status is critical and the launch of this product will empower people to discreetly test themselves when it is convenient to them and in a place where they feel comfortable . '", "positive test results need to be confirmed by a healthcare professional and those in high-risk groups are recommended to be tested every three months .", "the only alternative currently available is ` home sampling ' , which involves collecting a blood sample 160 times larger than that for the self-test and posting it to a laboratory , with results given five days later .", "biosure founder brigette bard said it is a significant step towards normalising hiv testing ."], "summary": ["the 99.7 per cent accurate biosure hiv self test enables people to test themselves when and where they like .", "an estimated 26,000 people in the uk have hiv but are unaware of it .", "treatments available mean hiv is now a manageable disease ."], "publication": "DailyMail", "compression": 7.468085106382978, "coverage": 0.9574468085106383, "density": 14.446808510638299} -{"label": [4, 10, 15], "text": ["everyone knows the tortoise beat the hare , but this little fellow has gone one better and beaten two cheetahs .", "these pictures capture the amazing moment when one of the notoriously slow-moving reptiles escaped becoming big cat fast food by retreating into its shell before scuttling off across desert sands .", "the baffled cheetahs surrounded the tortoise and attempted to scare it out of its shell with snarls but the reptile kept well tucked up inside its tough exterior forcing the big cats to wander off in search of another snack .", "hard target : the tortoise attempts a quick getaway under the watchful eye of one of the curious cheetahs .", "confused : the two cheetahs exchange glances as they move in to size up their potential meal .", "the intriguing scene was captured by john mullineux , a chemical engineer from secunda , south africa .", "he said : ` while driving on the sandy tracks of the kalahari desert in south africa , i came across two cheetahs lying in the shade near the road .", "` shortly after i stopped , they got up and slowly headed to the dunes .", "` halfway up the red sandy dune the younger one stopped to inspect a tortoise , the older one also stopped and tried to bite the shell but could n't manage it .", "now you see me : the tortoise retreats into its shell as the big cats get too close for comfort .", "snarl : one of the cheetahs gets up close and personal to the little reptile and tries to scare it out of its shell .", "` by the time the older cheetah had made it to the top of the dune , the younger one decided to run off and follow rather than spend more time at the hard meal .", "` the tortoise then casually moved on as if nothing unusual had happened .", "from a young age i have loved cheetahs for their elegance and speed - seeing two so close was dream but seeing them size up their lunch was unique .", "` it was something that was both exciting and naturally beautiful at the same time . '", "slow and steady : the tortoise continues his escape across the sands of the kalahari desert in south africa .", "john mullineux , a chemical engineer from secunda , south africa , spotted the scene while driving along a desert track .", "one of the cheetahs appears to admit defeat and wander off throwing a last glance of its shoulder at the lucky tortoise ."], "summary": ["amazing scene captured on film in south africa 's kalahari desert .", "two of the big cats approach the little reptile as it scuttled across the sands .", "but they were denied their meal and forced to wander off disappointed ."], "publication": "DailyMail", "compression": 10.209302325581396, "coverage": 0.7674418604651163, "density": 1.4651162790697674} -{"label": [4, 9, 33], "text": ["angus hawley 's brother has spoken of his shock after his brother , the ex-husband of antonia kidman , died of a suspected heart attack , age 46 , in new york on saturday .", "speaking to daily mail australia on monday , david hawley said : ` it 's a real shock , he was one of the fittest men i 've ever met -- he 's swimming everyday . '", "responding to a question about whether angus had a history of heart problems , david answered : ` no , no , not that we know of ' , adding : ` he 's so fit , i do n't understand . '", "scroll down for video .", "` he did n't have heart problems ' angus hawley 's brother reveals shock after ex-husband of antonia kidman dies from a suspected heart attack in new york after ` returning from a swim ' .", "angus and antonia pictured together in 2005 at the chuan spa opening in the langham hotel .", "mr hawley , who was in new york attending a business conference at the time , collapsed after returning from a swim .", "` he did go for a big swim in the morning , he trains very hard , ' david said of his brother , who he described as a ` bit of a fitness fanatic ' and was known to lead a healthy and active lifestyle . '", "i think his body clock was round the wrong way and it just got everything round the wrong way and he 's over done it . '", "mr hawley was a father to four children , lucia , 16 , hamish , 14 , james , 12 , and sybella , eight , all of whom he shared with nicole kidman 's sister antonia before their 2007 split .", "the children are reportedly set to join the family in sydney as they rally around david 's second wife prue fisher , who he married in palm beach in 2011 .", "sad news : antonia kidman 's former husband angus hawley has died of a suspected heart attack aged 46 in new york .", "the pair are seen here in 2003 .", "fitness fanatic : mr hawley 's brother says he does n't ` understand ' the death of his fit and healthy brother , pictured with his wife prue fisher in 2011 .", "led an active lifestyle : mr hawley , 46 , is believed to have suffered a heart attack after returning from a swim .", "the former couple are pictured above with antonia 's parents janelle and the late dr. antony kidman .", "david described his brother , a business development manager at valor private wealth , as ` one of the most beautiful men that i have ever known .", "` he is absolutely adored by everybody , he made everybody feel like he 's their best friend and that 's why everybody loved him .", "and he loved everybody else , it 's just a really emotional time . '", "prue is being comforted by her family in sydney , after they traveled from orange in new south wales to be by her side .", "she was reportedly seen at the bondi icebergs public pool , a place her late husband often frequented , on sunday .", "moved on : both antonia and mr hawley remarried following their divorce in 2007 - she to businessman craig marran -lrb- l -rrb- in 2010 , and he to sydney fashion boutique manager prue the following year -lrb- r -rrb- .", "david described prue as ` devastated ' saying she 's ` terrible , terrible ' , adding , ` it 's a huge hole in our lives .", "` they were absolutely devoted to each other and prue 's relationship with angus 's children was fantastic , ' said david of his late brother 's wife .", "` his wife adores him , and he adored her , his four children , it 's just so sad .", "it 's a tragic loss to our family and to his family , it 's just a nightmare .", "` no matter what happens for the rest of her life , she 'll still be my sister-in-law . '", "on saturday another of angus 's brothers phillip released a statement , describing his death as ` sudden ' and ` very unexpected ' to news.com.au .", "wedding day : antonia and angus wed in 1996 , they were together for 11 years before their divorced was finalised in 2007 .", "legacy : the 46-year-old was a father to four children in lucia , 16 , hamish , 14 , james , 12 , and sybella , eight , all of whom he shared with nicole kidman 's sister antonia , pictured .", "` there are no further details at this time as it only occurred last night , our time , ' the statement read .", "reports about his death have as yet been mixed , with news.com.au saying that mr hawley went to dinner with a friend in new york and then went into cardiac arrest .", "he is said to have later passed away in the ambulance on the way to hospital .", "mr hawley 's death comes less than seven months after the sudden passing of nicole and antonia 's father dr. antony kidman , who also suffered a suspected heart attack , in singapore .", "family tragedy : mr hawley 's death comes less than seven months after the sudden passing of nicole and antonia 's father dr. antony , who also suffered a heart attack , in singapore .", "both 44-years-old antonia and her ex husband both remarried following their divorce in 2007 - she to businessman craig marran in 2010 , and he to sydney fashion boutique manager prue , the following year .", "he has kept himself largely out of the spotlight following his split from antonia and a battle with depression .", "the father of four checked himself into a sydney rehab clinic in 2007 following a period of mental health issues .", "tragic : antonia 's second husband craig marran accompanied her , her sister nicole and husband keith urban to dr. antony 's funeral in september last year .", "he told woman 's day in 2009 : ' i was depressed , out of control and full of self-loathing , and i resorted to drugs to get through it . '", "i was n't in a happy place and it was an appalling thing , but i was sick , and at least i was big enough to do something about it . '", "merivale hotel founder justin hemmes , has paid tribute to his good friend angus , explaining to the daily telegraph that the pair became friends at just four years old .", "family man : dr. antony kidman was visiting antonia and her family in singapore when he passed away .", "day of mourning : antonia 's six children lucia , hamish , james , sybella , nicholas , two , and alexander , one , attended the funeral along with nicole 's daughters sunday rose and faith .", "support : keith and craig acted as pallbearers at the funeral , as did family friends russell crowe and channel nine newsreader peter overton .", "` he was my next door neighbour but quickly became a best friend , one i was fortunate enough to have by my side ever since , ' he said , describing mr hawley as ` the most caring , thoughtful and loving man . '", "` the most loving father to his four wonderful children and adoring wife .", "his family was his treasure .", "his kids were his life , ' he continued .", "mr hawley 's death is the second devastating loss the kidman family has suffered in the past seven months , after dr. antony kidman sadly collapsed and died in a singapore hotel last september at the age of 75 .", "family photo : antonia , janelle , dr. antony and nicole are seen here in 1990 .", "nicole said at his funeral she was ` so lucky ' to be her father 's daughter .", "close knit : nicole and antonia are pictured here with their late father in 1990 .", "a respected sydney psychologist , dr. antony was in the country visiting antonia and his six grandchildren .", "antonia , a journalist and writer , is currently based in singapore with her second husband with whom she shares two sons , nicholas , two , and alexander , one .", "she remembered the close relationship she had with her father at his funeral last year and said they were ` similar in many ways ' .", "new home : antonia resides in singapore with second husband craig .", "she 's pictured here with nicole , who lives in nashville with keith urban , in 2005 .", "` i 'm so lucky to be his daughter , ' 47-year-old nicole said , ` and that he chose my mother to make me with . '", "appearing on ellen last october , nicole said husband keith urban had to carry her , sometimes literally , because she was ` so devastated ' by the loss .", "daily mail australia has contacted the kidman family 's management .", "tribute : a good friend of mr hawley , merivale founder justin hemmes has described him as ` the most caring , thoughtful and loving man '"], "summary": ["angus hawley 's brother said his late sibling ` did n't have heart problems ' he is reported to have had a suspected heart attack in new york .", "angus was a father of four children - lucia , hamish , james and sybella .", "he had all four with nicole kidman 's sister antonia before their 2007 split .", "both 44-year-old antonia and angus , 46 , remarried following their divorce .", "angus ' death comes seven months after dr. antony kidman 's death .", "nicole and antonia 's father also died of a heart attack in singapore ."], "publication": "DailyMail", "compression": 15.157407407407407, "coverage": 0.9259259259259259, "density": 3.740740740740741} -{"label": [7, 17], "text": ["despite the hype surrounding its first watch , the iphone is still the engine behind apple 's phenomenal success , its latest figures have revealed .", "the results far surpassed most analysts ' expectations for the first three months of the year , when sales traditionally fall from their holiday-season peak .", "apple sold more than 61 million iphones in the quarter , accounting for more than two-thirds of its $ 58 billion in revenue for the quarter and the lion 's share of its $ 13.6 billion in profit - and up 40 % from a year ago .", "sales of iphones in china were also revealed to have outstripped those in the us .", "apple sold more than 61 million iphones in the quarter , accounting for more than two-thirds of its $ 58 billion in revenue for the quarter and the lion 's share of its $ 13.6 billion in profit .", "$ 58 billion in revenue , $ 13.6 billion in profit .", "$ 200 billion in cash , up from around $ 150 billion a year ago .", "more than 61 million iphones sole .", "ipad revenue fell 29 % to $ 5.4 billion .", "revenue from mac computers rose 2 % from a year earlier , to $ 5.6 billion .", "` we are thrilled by the continued strength of iphone , mac and the app store , which drove our best march quarter results ever , ' said tim cook , apple 's ceo .", "` we 're seeing a higher rate of people switching to iphone than we 've experienced in previous cycles , and we 're off to an exciting start to the june quarter with the launch of apple watch . '", "as expected , the numbers were down from the previous quarter , when holiday shoppers snapped up a record 74 million of apple 's new iphone 6 , 6 plus and older models .", "but it was a 40 percent increase over the number of iphones sold in the first three months of 2014 .", "` we 're seeing great results all over the world , ' apple chief financial officer luca maestri told the associated press , adding that iphone sales grew 72 percent in china , where the company has big hopes for expansion .", "other products played a much smaller role .", "revenue from mac computers rose 2 percent from a year earlier , to $ 5.6 billion , while ipad revenue fell 29 percent , to $ 5.4 billion -- continuing a steady decline in tablet sales .", "apple did n't report any results for the new apple watch , which it began selling this month , after the quarter ended .", "maestri said customer response had been ` positive . '", "analysts estimate about 2 million have sold to date , suggesting early demand is healthy but not of blockbuster proportions .", "apple shares have gained more than 50 percent over the last year , making it the world 's most valuable company .", "` it 's been really great to see the reaction of customers , ' said cook .", "` the response has been overwhelming .", "we ca n't wait to see more of the inspiring apps developers dream up . '", "the iphone is another story .", "since it began offering models with bigger screens last fall , apple has vied with south korea 's samsung for the no.", "1 position in the global smartphone market .", "by some estimates , apple outsold samsung in the quarter that ended in december , and analysts will be watching closely when samsung reports its latest results this week .", "apple also announced an expansion of its effort to return more of its sizable cash war chest to investors .", "the company said it will raise its quarterly dividend by 11 percent , to 52 cents a share , and has increased a $ 90 billion stock buyback program to $ 140 billion .", "apple did n't report any results for the new apple watch , which it began selling this month , after the quarter ended .", "in total , the company said the program will return $ 200 billion to investors by the end of march 2017 .", "as iphone sales have surged , so has apple 's stock .", "apple shares have gained more than 50 percent over the last year , making it the world 's most valuable company .", "the stock closed monday at $ 132.65 , up 1.8 percent for the day , and was rising in late trading .", "the iphone is n't just apple 's ` dominant product , ' said frank gillett , a tech industry analyst at forrester research .", "` it 's more than anything else what 's driving the success of their company . '", "market researchers , however , expect growth in the world smartphone market will slow this year , particularly at the higher price range where apple competes , as most consumers in developed countries have already bought one .", "that could make it difficult for apple to maintain its recent pace .", "` they 're extremely dependent on the iphone , ' said investment colin gillis at bgc partners .", "` at some point , the market dynamics change , ' he said , adding that ` the question is what could replace the iphone ' if sales begin to slow .", "customers looking at apple iphones in an apple store in shanghai , china , on january 14 , 2014 .", "apple ceo tim cook has said he 's optimistic about new markets such as china , where apple has made a strong showing against samsung and china 's xiaomi .", "and even if apple is increasingly selling new iphones to people who are simply upgrading older models , ` that 's still a pretty healthy market , ' said gartner analyst van baker , noting that more than 700 million iphones have been sold since the first model was introduced in 2007 .", "maestri also stressed the potential for new products like apple watch and apple pay , the company 's mobile payment service .", "while these currently provide minimal revenue , analysts say they have big potential .", "and they are designed to work closely with the iphone , which means each may bolster the other 's popularity in the future , gillett said ."], "summary": ["apple sold more than 61 million iphones in the quarter .", "apple did n't report any results for the new apple watch .", "believed around 2 million watches have been sold , according to estimates ."], "publication": "DailyMail", "compression": 28.657894736842106, "coverage": 0.868421052631579, "density": 6.342105263157895} diff --git a/tests/data_for_tests/io/cnndm/vocab b/tests/data_for_tests/io/cnndm/vocab deleted file mode 100644 index 26e83ade..00000000 --- a/tests/data_for_tests/io/cnndm/vocab +++ /dev/null @@ -1,100 +0,0 @@ -. 12172211 -the 11896296 -, 9609022 -to 5751102 -a 5100569 -and 4892246 -of 4867879 -in 4431149 -'s 2202754 -was 2086001 -for 1995054 -that 1944328 -' 1880335 -on 1858606 -` 1821696 -is 1797908 -he 1678396 -it 1603145 -with 1497568 -said 1348297 -: 1344327 -his 1302056 -at 1260578 -as 1230256 -i 1089458 -by 1064355 -have 1016505 -from 1015625 -has 969042 -her 935151 -be 932950 -'' 904149 -`` 898933 -but 884494 -are 865728 -she 850971 -they 816011 -an 766001 -not 738121 -had 725375 -who 722127 -this 721027 -after 669231 -were 655187 -been 647432 -their 645014 -we 625684 -will 577581 -when 506811 --rrb- 501827 -n't 499765 --lrb- 497508 -one 490666 -which 465040 -you 461359 --- 460450 -up 437177 -more 433177 -out 432343 -about 428037 -would 400420 -- 399113 -or 399001 -there 389590 -people 386121 -new 380970 -also 380041 -all 350670 -two 343787 -can 341110 -him 338345 -do 330166 -into 319067 -last 315857 -so 308507 -than 306701 -just 305759 -time 302071 -police 301341 -could 298919 -told 298384 -over 297568 -if 297292 -what 293759 -years 288999 -first 283683 -no 274488 -my 273829 -year 272392 -them 270715 -its 269566 -now 262011 -before 260991 -mr 250970 -other 247663 -some 245191 -being 243458 -home 229570 -like 229425 -did 227833 diff --git a/tests/data_for_tests/io/conll2003/dev.txt b/tests/data_for_tests/io/conll2003/dev.txt deleted file mode 100644 index 90834721..00000000 --- a/tests/data_for_tests/io/conll2003/dev.txt +++ /dev/null @@ -1,49 +0,0 @@ --DOCSTART- -X- -X- O - -CRICKET NNP B-NP O -- : O O -LEICESTERSHIRE NNP B-NP B-ORG -TAKE NNP I-NP O -OVER IN B-PP O -AT NNP B-NP O -TOP NNP I-NP O -AFTER NNP I-NP O -INNINGS NNP I-NP O -VICTORY NN I-NP O -. . O O - -LONDON NNP B-NP B-LOC -1996-08-30 CD I-NP O - -Phil NNP B-NP B-PER -Simmons NNP I-NP I-PER -took VBD B-VP O -four CD B-NP O -for IN B-PP O -38 CD B-NP O -on IN B-PP O -Friday NNP B-NP O -as IN B-PP O -Leicestershire NNP B-NP B-ORG -beat VBD B-VP O -Somerset NNP B-NP B-ORG -by IN B-PP O -an DT B-NP O -innings NN I-NP O -and CC O O -39 CD B-NP O -runs NNS I-NP O -in IN B-PP O -two CD B-NP O -days NNS I-NP O -to TO B-VP O -take VB I-VP O -over IN B-PP O -at IN B-PP O -the DT B-NP O -head NN I-NP O -of IN B-PP O -the DT B-NP O -county NN I-NP O -championship NN I-NP O -. . O O diff --git a/tests/data_for_tests/io/conll2003/test.txt b/tests/data_for_tests/io/conll2003/test.txt deleted file mode 100644 index b5b3aef0..00000000 --- a/tests/data_for_tests/io/conll2003/test.txt +++ /dev/null @@ -1,51 +0,0 @@ --DOCSTART- -X- -X- O - -SOCCER NN B-NP O -- : O O -JAPAN NNP B-NP B-LOC -GET VB B-VP O -LUCKY NNP B-NP O -WIN NNP I-NP O -, , O O -THE NP B-NP B-PER -CHINA NNP I-NP I-PER -IN IN B-PP O -SURPRISE DT B-NP O -DEFEAT NN I-NP O -. . O O - -Nadim NNP B-NP B-PER -Ladki NNP I-NP I-PER - -AL-AIN NNP B-NP B-LOC -, , O O -United NNP B-NP B-LOC -Arab NNP I-NP I-LOC -Emirates NNPS I-NP I-LOC -1996-12-06 CD I-NP O - -Japan NNP B-NP B-LOC -began VBD B-VP O -the DT B-NP O -defence NN I-NP O -of IN B-PP O -their PRP$ B-NP O -Asian JJ I-NP B-MISC -Cup NNP I-NP I-MISC -title NN I-NP O -with IN B-PP O -a DT B-NP O -lucky JJ I-NP O -2-1 CD I-NP O -win VBP B-VP O -against IN B-PP O -Syria NNP B-NP B-LOC -in IN B-PP O -a DT B-NP O -Group NNP I-NP O -C NNP I-NP O -championship NN I-NP O -match NN I-NP O -on IN B-PP O -Friday NNP B-NP O -. . O O diff --git a/tests/data_for_tests/io/conll2003/train.txt b/tests/data_for_tests/io/conll2003/train.txt deleted file mode 100644 index 4f0c4bf2..00000000 --- a/tests/data_for_tests/io/conll2003/train.txt +++ /dev/null @@ -1,48 +0,0 @@ --DOCSTART- -X- -X- O - -EU NNP B-NP B-ORG -rejects VBZ B-VP O -German JJ B-NP B-MISC -call NN I-NP O -to TO B-VP O -boycott VB I-VP O -British JJ B-NP B-MISC -lamb NN I-NP O -. . O O - -Peter NNP B-NP B-PER -Blackburn NNP I-NP I-PER - -BRUSSELS NNP B-NP B-LOC -1996-08-22 CD I-NP O - -The DT B-NP O -European NNP I-NP B-ORG -Commission NNP I-NP I-ORG -said VBD B-VP O -on IN B-PP O -Thursday NNP B-NP O -it PRP B-NP O -disagreed VBD B-VP O -with IN B-PP O -German JJ B-NP B-MISC -advice NN I-NP O -to TO B-PP O -consumers NNS B-NP O -to TO B-VP O -shun VB I-VP O -British JJ B-NP B-MISC -lamb NN I-NP O -until IN B-SBAR O -scientists NNS B-NP O -determine VBP B-VP O -whether IN B-SBAR O -mad JJ B-NP O -cow NN I-NP O -disease NN I-NP O -can MD B-VP O -be VB I-VP O -transmitted VBN I-VP O -to TO B-PP O -sheep NN B-NP O -. . O O diff --git a/tests/data_for_tests/io/coreference/coreference_dev.json b/tests/data_for_tests/io/coreference/coreference_dev.json deleted file mode 100644 index bb6592d3..00000000 --- a/tests/data_for_tests/io/coreference/coreference_dev.json +++ /dev/null @@ -1 +0,0 @@ -{"doc_key": "bc/cctv/00/cctv_0000_0", "speakers": [["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"], ["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"], ["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"]], "clusters": [[[70, 70], [485, 486], [500, 500], [73, 73], [55, 55], [153, 154], [366, 366]]], "sentences": [["In", "the", "summer", "of", "2005", ",", "a", "picture", "that", "people", "have", "long", "been", "looking", "forward", "to", "started", "emerging", "with", "frequency", "in", "various", "major", "Hong", "Kong", "media", "."], ["With", "their", "unique", "charm", ",", "these", "well", "-", "known", "cartoon", "images", "once", "again", "caused", "Hong", "Kong", "to", "be", "a", "focus", "of", "worldwide", "attention", "."]]} diff --git a/tests/data_for_tests/io/coreference/coreference_test.json b/tests/data_for_tests/io/coreference/coreference_test.json deleted file mode 100644 index 9577da0e..00000000 --- a/tests/data_for_tests/io/coreference/coreference_test.json +++ /dev/null @@ -1 +0,0 @@ -{"doc_key": "bc/cctv/00/cctv_0005_0", "speakers": [["speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1"], ["speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1", "speaker#1"]], "clusters": [[[57, 59], [25, 27], [42, 44]]], "sentences": [["--", "basically", ",", "it", "was", "unanimously", "agreed", "upon", "by", "the", "various", "relevant", "parties", "."], ["To", "express", "its", "determination", ",", "the", "Chinese", "securities", "regulatory", "department", "compares", "this", "stock", "reform", "to", "a", "die", "that", "has", "been", "cast", "."]]} \ No newline at end of file diff --git a/tests/data_for_tests/io/coreference/coreference_train.json b/tests/data_for_tests/io/coreference/coreference_train.json deleted file mode 100644 index 0c2940df..00000000 --- a/tests/data_for_tests/io/coreference/coreference_train.json +++ /dev/null @@ -1 +0,0 @@ -{"doc_key": "bc/cctv/00/cctv_0001_0", "speakers": [["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"], ["Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1", "Speaker#1"]], "clusters": [[[113, 114], [42, 45], [88, 91]]], "sentences": [["What", "kind", "of", "memory", "?"], ["We", "respectfully", "invite", "you", "to", "watch", "a", "special", "edition", "of", "Across", "China", "."]]} diff --git a/tests/data_for_tests/io/cws_as/dev.txt b/tests/data_for_tests/io/cws_as/dev.txt deleted file mode 100755 index f4c96e9e..00000000 --- a/tests/data_for_tests/io/cws_as/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -時間 : -三月 十日 ( 星期四 ) 上午 十時 。 -並 辦理 加州 大學 退休 等 手續 。 -包括 一九七八年 獲有 數學 諾貝爾 之 稱 的 費爾茲獎 , -在 台大 的 四 年 裡 , -他 語重心長 的 勉勵 同學 們 一 番 話 , diff --git a/tests/data_for_tests/io/cws_as/test.txt b/tests/data_for_tests/io/cws_as/test.txt deleted file mode 100755 index a61009b2..00000000 --- a/tests/data_for_tests/io/cws_as/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -許多 社區 長青 學苑 多 開設 有 書法 、 插花 、 土風舞班 , -文山區 長青 學苑 則 有 個 十分 特別 的 「 英文 歌唱班 」 , -成員 年齡 均 超過 六十 歲 , -這 群 白髮蒼蒼 , -爺爺 、 奶奶級 的 學員 唱起 英文 歌 來 字正腔圓 , -有模有樣 。 diff --git a/tests/data_for_tests/io/cws_as/train.txt b/tests/data_for_tests/io/cws_as/train.txt deleted file mode 100755 index b6eab6a3..00000000 --- a/tests/data_for_tests/io/cws_as/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -地點 : -學術 活動 中心 一樓 簡報室 。 -主講 : -民族所 所長 莊英章 先生 。 -講題 : -閩 、 台 漢人 社會 研究 的 若干 考察 。 diff --git a/tests/data_for_tests/io/cws_cityu/dev.txt b/tests/data_for_tests/io/cws_cityu/dev.txt deleted file mode 100755 index eac550f2..00000000 --- a/tests/data_for_tests/io/cws_cityu/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -立會 選情 告一段落 民主 進程 還 看 明天 -所謂 「 左 」 的 勢力 , 是 指 以 鄭經翰 、 梁國雄 ( 長毛 ) 為 代表 的 激進 民主 勢力 , 他們 尖銳 批評 中央 和 特區 政府 , 積極 為 基層 勞工 爭取 福利 , 可能 會 為 民主派 與 中央 和解 增加 困難 , 牽制 民主黨 走 中產 溫和 路線 。 -特區 政府 應該 積極 與 民主派 改善 關係 , 尤其 要 爭取 中間 及 「 右 」 翼 的 民主 勢力 , 因為 這些 人 背後 反映 的 是 香港 的 主流 民意 , 除了 民主 步伐 和 涉及 中央 的 敏感 政治 議題 , 他們 和 建制派 的 溫和 力量 沒有 基本 不同 , 很 容易 達成 跨 黨派 的 共識 , 令 特區 政府 處於 不得不 從 的 被動 位置 , 23 條 立法 撤回 、 追究 SARS 責任 等 , 都 是 記憶猶新 的 例子 。 -為 何秀蘭 喝彩 為 香港 人 神傷 -單說 立法會 , 自 91 年 以來 , 經歷 5 次 類似 的 地區 直選 。 -點票 過程 出現 的 笑話 更 多 。 diff --git a/tests/data_for_tests/io/cws_cityu/test.txt b/tests/data_for_tests/io/cws_cityu/test.txt deleted file mode 100755 index aa838fe2..00000000 --- a/tests/data_for_tests/io/cws_cityu/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -「 練 得 銅皮鐵骨 」 露宿 早 慣 蚊叮 -本 港 約 有 450 至 600 名 露宿者 , 其中 近 四分之一 , 即 約 150 人 露宿 在 深水埗 。 -有 外展 社工 稱 , 露宿者 日間 多 到 商場 等 冷氣 場所 避暑 , 流連 至 晚上 11 、 12 時 , 才 用 紙皮 在 公園 外 「 打地鋪 」 , 他們 早已 「 練 得 一 身 銅皮鐵骨 」 , 徹夜 被 蚊 叮 也 習以為常 , 但 社工 在 炎夏 仍 會 頻頻 給 他們 派發 蚊香 。 -基督教 關懷 無家者 協會 的 外展 社工 , 過去 一直 有 探訪 李鄭屋 遊樂場 外 的 露宿者 , 該 會 總幹事 賴淑芬 說 , 該 處 的 露宿者 只 有 數 人 , 且 流動性 很 大 。 -不管 被 多少 蚊 叮 也 沒 什 感覺 -她 指 這些 露宿者 日間 都 會 流連 於 冷氣 場所 , 晚上 才 到 遊樂場 露宿 , 但 礙於 遊樂場 晚上 關門 , 他們 只 可 在 外圍 「 打地鋪 」 。 diff --git a/tests/data_for_tests/io/cws_cityu/train.txt b/tests/data_for_tests/io/cws_cityu/train.txt deleted file mode 100755 index 6338621c..00000000 --- a/tests/data_for_tests/io/cws_cityu/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -立法會 選舉 出現 了 戲劇性 的 結果 , 儘管 投票率 創下 新高 , 而 過去 經驗 顯示 高 投票率 對 民主派 較 有利 , 但 由於 名單 協調 不當 及 配票 策略 失誤 , 加上 醜聞 影響 選情 , 民主黨 的 議席 比 上 一 屆 減少 , 由 第 一 大 黨 跌 至 第 三 ; -而 泛民主派 在 30 席 普選 中 亦 只能 取得 18 席 , 比 選前 預期 的 20 席 少 ; -但 在 功能 組別 選舉 卻 有 意外 收穫 , 除 保住 原有 的 5 個 議席 , 還 搶佔 了 醫學 和 會計 兩 個 專業 界別 , 令 議席 總數 達到 25 席 , 比 上 一 屆 多 了 3 席 。 -更 值得 注意 的 是 , 泛民主派 候選人 在 普選 中 合共 取得 110萬 張 選票 , 佔 178萬 選票 總數 的 62 % , 顯示 多數 市民 認同 早日 實現 全面 普選 的 民主 訴求 , 這 一 點 應 為 政府 及 各 黨派 人士 所 尊重 。 -須 為 2012 全面 普選 創造 條件 -親 建制 陣營 方面 , 民建聯 和 自由黨 都 取得 佳績 , 分別 取得 12 席 和 11 席 , 成為 立法會 內 的 第 一 及 第 二 大 黨 。 diff --git a/tests/data_for_tests/io/cws_msra/dev.txt b/tests/data_for_tests/io/cws_msra/dev.txt deleted file mode 100644 index 9c6b34ee..00000000 --- a/tests/data_for_tests/io/cws_msra/dev.txt +++ /dev/null @@ -1,2 +0,0 @@ -“ 人们 常 说 生活 是 一 部 教科书 , 而 血 与 火 的 战争 更 是 不可多得 的 教科书 , 她 确实 是 名副其实 的 ‘ 我 的 大学 ’ 。 -他 “ 严格要求 自己 , 从 一个 科举 出身 的 进士 成为 一个 伟大 的 民主主义 者 , 进而 成为 一 位 杰出 的 党外 共产主义 战士 , 献身 于 崇高 的 共产主义 事业 。 diff --git a/tests/data_for_tests/io/cws_msra/test.txt b/tests/data_for_tests/io/cws_msra/test.txt deleted file mode 100644 index 8d5c6b3c..00000000 --- a/tests/data_for_tests/io/cws_msra/test.txt +++ /dev/null @@ -1,2 +0,0 @@ -扬帆 远东 做 与 中国 合作 的 先行 -希腊 的 经济 结构 较 特殊 。 diff --git a/tests/data_for_tests/io/cws_msra/train.txt b/tests/data_for_tests/io/cws_msra/train.txt deleted file mode 100644 index 35c2cad0..00000000 --- a/tests/data_for_tests/io/cws_msra/train.txt +++ /dev/null @@ -1,3 +0,0 @@ -“ 心 静 渐 知 春 似 海 , 花 深 每 觉 影 生 香 。 -“ 吃 屎 的 东西 , 连 一 捆 麦 也 铡 不 动 呀 ? -复旦大学 百年 校庆 。 \ No newline at end of file diff --git a/tests/data_for_tests/io/cws_pku/dev.txt b/tests/data_for_tests/io/cws_pku/dev.txt deleted file mode 100755 index df77c5ca..00000000 --- a/tests/data_for_tests/io/cws_pku/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -在 十五大 精神 指引 下 胜利 前进 —— 元旦 献辞 -我们 即将 以 丰收 的 喜悦 送 走 牛年 , 以 昂扬 的 斗志 迎来 虎年 。 我们 伟大 祖国 在 新 的 一 年 , 将 是 充满 生机 、 充满 希望 的 一 年 。 -李 鹏 在 北京 考察 企业 -李 鹏 说 : “ 作为 首都 的 电力 工作者 , 你们 为 首都 的 各项 重大 活动 的 顺利 进行 , 为 保障 人民 群众 的 工作 、 生活 和 学习 , 为 促进 首都 经济 的 发展 作出 了 自己 的 贡献 。 明天 就 是 元旦 , 你们 还有 许多 同志 要 坚守 岗位 , 我 向 你们 、 向 全体 电力 工作者 表示 感谢 。 现在 , 我们 的 首都 已经 结束 了 拉 闸 限 电 的 历史 , 希望 依靠 大家 , 使 拉 闸 限 电 的 历史 永远 不再 重演 。 同时 , 也 希望 你们 安全 生产 、 经济 调度 , 实现 经济 增长 方式 的 转变 。 ” 李 鹏 最后 向 电业 职工 , 向 全 北京市 的 人民 拜年 , 向 大家 致以 新春 的 问候 , 祝愿 电力 事业 取得 新 的 成绩 , 祝愿 北京市 在 改革 、 发展 和 稳定 的 各项 工作 中 取得 新 的 成就 。 -( 附 图片 1 张 ) -据 介绍 , 播音员 、 主持人 持证 上岗 工作 , 是 在 1996年 全国 广播 影视 系统 语言 工作 会议 上 提 出来 的 , 它 是 加强 宣传 队伍 建设 , 促进 语言 文字 走向 标准化 、 规范化 的 重要 举措 。 播音员 、 主持人 只有 通过 汉语 普通话 水平 测试 和 政治 、 业务 考核 后 才 能 获得 上岗 资格 证书 。 diff --git a/tests/data_for_tests/io/cws_pku/test.txt b/tests/data_for_tests/io/cws_pku/test.txt deleted file mode 100755 index c7ad3e85..00000000 --- a/tests/data_for_tests/io/cws_pku/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -共同 创造 美好 的 新 世纪 —— 二○○一年 新年 贺词 -( 二○○○年 十二月 三十一日 ) ( 附 图片 1 张 ) -女士 们 , 先生 们 , 同志 们 , 朋友 们 : -2001年 新年 钟声 即将 敲响 。 人类 社会 前进 的 航船 就要 驶入 21 世纪 的 新 航程 。 中国 人民 进入 了 向 现代化 建设 第三 步 战略 目标 迈进 的 新 征程 。 -在 这个 激动人心 的 时刻 , 我 很 高兴 通过 中国 国际 广播 电台 、 中央 人民 广播 电台 和 中央 电视台 , 向 全国 各族 人民 , 向 香港 特别 行政区 同胞 、 澳门 特别 行政区 同胞 和 台湾 同胞 、 海外 侨胞 , 向 世界 各国 的 朋友 们 , 致以 新 世纪 第一 个 新年 的 祝贺 ! -过去 的 一 年 , 是 我国 社会主义 改革 开放 和 现代化 建设 进程 中 具有 标志 意义 的 一 年 。 在 中国 共产党 的 领导 下 , 全国 各族 人民 团结 奋斗 , 国民经济 继续 保持 较 快 的 发展 势头 , 经济 结构 的 战略性 调整 顺利 部署 实施 。 西部 大 开发 取得 良好 开端 。 精神文明 建设 和 民主 法制 建设 进一步 加强 。 我们 在 过去 几 年 取得 成绩 的 基础 上 , 胜利 完成 了 第九 个 五年计划 。 我国 已 进入 了 全面 建设 小康 社会 , 加快 社会主义 现代化 建设 的 新 的 发展 阶段 。 diff --git a/tests/data_for_tests/io/cws_pku/train.txt b/tests/data_for_tests/io/cws_pku/train.txt deleted file mode 100755 index 94ee7c93..00000000 --- a/tests/data_for_tests/io/cws_pku/train.txt +++ /dev/null @@ -1,9 +0,0 @@ -迈向 充满 希望 的 新 世纪 —— 一九九八年 新年 讲话 ( 附 图片 1 张 ) -中共中央 总书记 、 国家 主席 江 泽民 -( 一九九七年 十二月 三十一日 ) -12月 31日 , 中共中央 总书记 、 国家 主席 江 泽民 发表 1998年 新年 讲话 《 迈向 充满 希望 的 新 世纪 》 。 ( 新华社 记者 兰 红光 摄 ) -同胞 们 、 朋友 们 、 女士 们 、 先生 们 : -在 1998年 来临 之际 , 我 十分 高兴 地 通过 中央 人民 广播 电台 、 中国 国际 广播 电台 和 中央 电视台 , 向 全国 各族 人民 , 向 香港 特别 行政区 同胞 、 澳门 和 台湾 同胞 、 海外 侨胞 , 向 世界 各国 的 朋友 们 , 致以 诚挚 的 问候 和 良好 的 祝愿 ! -占 比 57.8% > 40% -占 比 57.8% < 40% -占 比 57.8% < < 40% > \ No newline at end of file diff --git a/tests/data_for_tests/io/dbpedia/test.csv b/tests/data_for_tests/io/dbpedia/test.csv deleted file mode 100644 index 4e50b3fb..00000000 --- a/tests/data_for_tests/io/dbpedia/test.csv +++ /dev/null @@ -1,5 +0,0 @@ -1,"TY KU"," TY KU /taɪkuː/ is an American alcoholic beverage company that specializes in sake and other spirits. The privately-held company was founded in 2004 and is headquartered in New York City New York. While based in New York TY KU's beverages are made in Japan through a joint venture with two sake breweries. Since 2011 TY KU's growth has extended its products into all 50 states." -1,"Odd Lot Entertainment"," OddLot Entertainment founded in 2001 by longtime producers Gigi Pritzker and Deborah Del Prete (The Wedding Planner) is a film production and financing company based in Culver City California.OddLot produced the film version of Orson Scott Card's sci-fi novel Ender's Game. A film version of this novel had been in the works in one form or another for more than a decade by the time of its release." -1,"Henkel"," Henkel AG & Company KGaA operates worldwide with leading brands and technologies in three business areas: Laundry & Home Care Beauty Care and Adhesive Technologies. Henkel is the name behind some of America’s favorite brands." -1,"GOAT Store"," The GOAT Store (Games Of All Type Store) LLC is one of the largest retro gaming online stores and an Independent Video Game Publishing Label. Additionally they are one of the primary sponsors for Midwest Gaming Classic." -1,"RagWing Aircraft Designs"," RagWing Aircraft Designs (also called the RagWing Aeroplane Company and RagWing Aviation) was an American aircraft design and manufacturing company based in Belton South Carolina." diff --git a/tests/data_for_tests/io/dbpedia/train.csv b/tests/data_for_tests/io/dbpedia/train.csv deleted file mode 100644 index d3698589..00000000 --- a/tests/data_for_tests/io/dbpedia/train.csv +++ /dev/null @@ -1,14 +0,0 @@ -1,"Boneau/Bryan-Brown"," Boneau/Bryan-Brown Inc. is a public relations company based in Manhattan New York USA largely supporting Broadway theatre productions as a theatrical press agency.The company was formed by the partnership of Chris Boneau and Adrian Bryan-Brown in 1991. Broadway productions supported include among hundreds the musical Guys and Dolls in 1992. The company initially represented the rock musical Spider-Man: Turn Off the Dark which finally opened on Broadway in 2011." -2,"Dubai Gem Private School & Nursery"," Dubai Gem Private School (DGPS) is a British school located in the Oud Metha area of Dubai United Arab Emirates. Dubai Gem Nursery is located in Jumeirah. Together the institutions enroll almost 1500 students aged 3 to 18." -3,"Shahar Marcus"," Shahar Marcus (born 1971 in Petach Tikva) is an Israeli performance artist." -4,"Martin McKinnon"," Martin Marty McKinnon (born 5 July 1975 in Adelaide) is a former Australian rules footballer who played with Adelaide Geelong and the Brisbane Lions in the Australian Football League (AFL).McKinnon was recruited by Adelaide in the 1992 AFL Draft with their first ever national draft pick. He was the youngest player on Adelaide's list at the time and played for Central District in the SANFL when not appearing with Adelaide." -5,"Steve Howitt"," Steven S. Howitt is the current member of the Massachusetts House of Representatives for the 4th Bristol district." -6,"Wedell-Williams XP-34"," The Wedell-Williams XP-34 was a fighter aircraft design submitted to the United States Army Air Corps (USAAC) before World War II by Marguerite Clark Williams widow of millionaire Harry P. Williams former owner and co-founder of the Wedell-Williams Air Service Corporation." -7,"Nationality Rooms"," The Nationality Rooms are a collection of 29 classrooms in the University of Pittsburgh's Cathedral of Learning depicting and donated by the ethnic groups that helped build the city of Pittsburgh." -8,"Duruitoarea River"," The Duruitoarea River is a tributary of the Camenca River in Romania." -9,"Shirvan Shahlu"," Shirvan Shahlu (Persian: شيروان شاهلو‎ also Romanized as Shīrvān Shāhlū; also known as Shīravān Shāmnū) is a village in Gavdul-e Sharqi Rural District in the Central District of Malekan County East Azerbaijan Province Iran. At the 2006 census its population was 137 in 35 families." -10,"Oenopota impressa"," Oenopota impressa is a species of sea snail a marine gastropod mollusk in the family Mangeliidae." -11,"Utricularia simulans"," Utricularia simulans the fringed bladderwort is a small to medium-sized probably perennial carnivorous plant that belongs to the genus Utricularia. U. simulans is native to tropical Africa and the Americas. It grows as a terrestrial plant in damp sandy soils in open savanna at altitudes from near sea level to 1575 m (5167 ft). U. simulans was originally described and published by Robert Knud Friedrich Pilger in 1914." -12,"Global Chillage"," Global Chillage is the second album by The Irresistible Force released in 1994 through Rising High Records." -13,"The Nuisance (1933 film)"," The Nuisance is a 1933 film starring Lee Tracy as a lawyer Madge Evans as his love interest (with a secret) and Frank Morgan as his accomplice." -14,"Razadarit Ayedawbon"," Razadarit Ayedawbon (Burmese: ရာဇာဓိရာဇ် အရေးတော်ပုံ) is a Burmese chronicle covering the history of Ramanya from 1287 to 1421. The chronicle consists of accounts of court intrigues rebellions diplomatic missions wars etc. About half of the chronicle is devoted to the reign of King Razadarit (r." diff --git a/tests/data_for_tests/io/imdb/dev.txt b/tests/data_for_tests/io/imdb/dev.txt deleted file mode 100644 index 423e158b..00000000 --- a/tests/data_for_tests/io/imdb/dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -neg You can never have seen either film and still know that The Jerk Too is a disaster. The question is not, "How did it get made," because if you throw money at anyone and tell them to make a film, they will do so.

No. The question is "Why, oh why, did Steve Martin allow it to be made?" I think he needed the money to fight a nuisance lawsuit and was determined it not cost him anything. He knew the sequel was going to be so frightful, that out of pride, he wouldn't even count it's royalties as income.

The only way this sequel could not be an embarrassment is to have had Carl Gottlieb and Steve Martin revive the nation's favorite poor black family.

And "dcreasy2001" (aka Mark Blankfield?): It's just transparently obvious that you worked on this film in some sad capacity, and the only way you can feel better about your involvement is to be the sequel's lone cheerleader as an IMDb user comment. I was praying for you to veer over into satire, but alas, you were really making an effort at spin. Why not 10 stars? -neg The Hazing is confused mumbo-jumbo that wants so hard to be The Evil Dead that it even references Bruce Campbell several times. The problem is, it is simply not in the same league as that terrific movie. This movie is nowhere near as original. The plot has been used before, by Kevin Tenney in Night of the Demons, and that was a lot more fun. This flick wastes too much time with complicated exposition before getting the kids into the spooky mansion and starting the demonic happenings.

Brad Dourif is, as usual, not given much to do here, but when he is on screen he puts in another over-the-top performance that would make Christopher Walken jealous. As for the acting of the kids, it's passable but by no means good. The shaky camera work is more annoying than clever or atmospheric. There are a few good moments when the first guy gets possessed and throws around some deadly one liners while dispatching his victims, but it was never scary for a second. The gore level is mid-range to low, but the director tries to make up for it by showing the actresses topless a few times. All in all, just okay if you have 87 minutes to waste. -neg I have seen bad movies before, but this one takes the "Worst Movie of a Lifetime" award by far !! Anthony Hopkins has to be completely mentally ill to have his name attached to this one - anywhere ! I will never see another movie with him in it, directing it, etc., etc. ! I can't believe the other actors & actresses that I liked, (in this picture), that stooped so low to be a part of this disaster ! There must be some great drugs out there ! For anyone to not be embarrassed to be a part of such a film, is beyond me ! Save your money on this one ! HUGE FLOP from beginning to end ! Shame on you Mr. Hopkins ! Also, shame on Christian Slater ! I can't believe you put your reputations on the line for this one ! -neg You may want to know up front that I am not a Mormon, unlike a good number of those who have already reviewed this film. I mention this so you'll understand that the way I look at the film may differ greatly from those in the faith. For some, being critical of the film might be seen as being critical of the faith--and that is NOT my intention. So, my review is that of an outsider trying to look inside and learn more about who this man and his people were. Well, after seeing the film, I doubt if I have learned much at all. Since I have been a history teacher, I have a good basic understanding about Young as well as Joseph Smith as well as the teachings of the church. But anyone wanting to see this film to really learn anything will probably be disappointed because the film seems so gosh-darn nice--too nice and too unrealistic in its portrayal. Plus, you learn practically nothing about the church's beliefs other than they are nice people, work hard and some have many wives (and this latter part is only barely hinted at in the film). Instead, the people are almost cartoon-like in their simplistic portrayals. Joseph Smith and Brigham Young and their followers are angelic, the non-Mormons were all devils and Brian Donlevy (playing EXACTLY the same sort of role Edward G. Robinson later played in THE TEN COMMANDMENTS) is the trouble-maker who claims to be a Mormon but just comes along so the film can have a bad guy. It's all so very simple....too simple. Almost like an indoctrination film or infomercial.

Brigham Young especially was a very complex man--with many good points (an excellent organizer and visionary) as well as bad (don't even get me started on his views about Blacks within the church or intermarriage). To portray him in such vague terms is just plain silly. It's also a lot like how Gandhi was portrayed in the film with Ben Kingsley--only the facts that led to his being almost super-human were emphasized. Heck, now that I think about that, this is the trouble with most religious films--they often come off as one-dimensional, trite and bland. Let's have a full and more complete film of these men--one that will stick to facts and not emotional appeals.

Now if you can ignore the fact that you won't learn very much about the faith or its second leader, the film is enjoyable enough. It's obvious someone at 20th Century-Fox really cared about the film, as they had a wonderful cast of both premier actors (Tyrone Power), up and coming actors (Linda Darnell, Jane Darwell and Vincent Price) and wonderful character actors (Dean Jagger, John Carradine and Brian Donlevy). The film also had wonderful location shooting and lots of gloss. It just didn't have a lot to tell us other than they were all "swell". Plus, there were plenty of factual errors and a few just plain dumb scenes. A few of the mistakes include Young taking over the helm immediately after the death of Joseph Smith (it was three years later), no mention of the various Mormon denominations and splinter groups, talk of "gold in California"--even though it was 1847 and gold wouldn't be discovered until 1948, as well as no specific mention of polygamy or Smith's many wives. Just plain dumb scenes include Carradine pulling out a gun and waving it about in the courtroom scene--and no one seemed to care--even though it was a very hostile audience! Don't you think at least the judge would tell him to put it away and stop threatening people with it?!

One final comment. Do not, I repeat, do not watch this film when it's shown on American Movie Classics (a one great station that has sunk a lot in recent years). While I am critical of the film because of its simplistic message, I was horrified with the complete disrespect the station had for the church and its traditions. What I mean is this. The film was punctuated with ads for penis enlargement formulas as well as tons of pop-ups (some advertising a show that features the "sexiest cast"). Talk about disrespectful and gross and I would be just as offended if they did this for any other religious film. By doing this, they not only insult the faith but marginalize their market--after all, who is into hearing about these things AND the life of Brigham Young?! Is this a movie, in this form, that you can show to your kids or recommend to others?! -pos Fifteen years later and Paris Is Burning is still aflame. This is a classic in black gay films, right up there with the other honorary black gay films, The Color Purple and Mahoganoy. This seminal work captures underground and underclass (i.e."underserved) black and Latin gay culture and community like no other work before or since, including all the sentimental Harlem Rennaissance gay retrospectives and renderings. They're good, but this is the best (dare I say the only "real") film you'll find on the subject. It's Relentlessy Cunty (the classic house music invention)comes to Hollywood, non-stop, hilarious camp (like only we do it) and dead-on social critique. All this by a white female director (who obviously must have been a Sister Gurl or Mizz Thing in a former life.) I could go on, but I think you get the point by now: I love this movie! -pos I have been an admirer of Edward Burtynsky's work for years, and it was such a pleasure to be able to see the man at work, thanks to Jennifer Baichwal's documentary. The severe beauty of the ship-breaking yard in Bangladesh, the stone quarry in Vermont, the enormous assembly plant in China, the beleaguered old neighbourhoods in Shanghai that are just waiting to be torn down: these landscapes are captured so well by the photographer and the filmmaker.

At times I thought of old TV documentaries on abandoned coal mines and plastic-mold factories; the sort of stuff I grew up watching. Burtynsky's work has the great value of pointing out how the industrial activity has only shifted to Asia, it has not stopped. The strangest scene for me was the computer scrap-yard somewhere in China--the waste had a threatening air about it, while the workers were very jovial. diff --git a/tests/data_for_tests/io/imdb/test.txt b/tests/data_for_tests/io/imdb/test.txt deleted file mode 100644 index 68768ec6..00000000 --- a/tests/data_for_tests/io/imdb/test.txt +++ /dev/null @@ -1,6 +0,0 @@ -neg Alan Rickman & Emma Thompson give good performances with southern/New Orleans accents in this detective flick. It's worth seeing for their scenes- and Rickman's scene with Hal Holbrook. These three actors mannage to entertain us no matter what the movie, it seems. The plot for the movie shows potential, but one gets the impression in watching the film that it was not pulled off as well as it could have been. The fact that it is cluttered by a rather uninteresting subplot and mostly uninteresting kidnappers really muddles things. The movie is worth a view- if for nothing more than entertaining performances by Rickman, Thompson, and Holbrook. -neg I have seen this movie and I did not care for this movie anyhow. I would not think about going to Paris because I do not like this country and its national capital. I do not like to learn french anyhow because I do not understand their language. Why would I go to France when I rather go to Germany or the United Kingdom? Germany and the United Kingdom are the nations I tolerate. Apparently the Olsen Twins do not understand the French language just like me. Therefore I will not bother the France trip no matter what. I might as well stick to the United Kingdom and meet single women and play video games if there is a video arcade. That is all. -neg In Los Angeles, the alcoholic and lazy Hank Chinaski (Matt Dillon) performs a wide range of non-qualified functions just to get enough money to drink and gamble in horse races. His primary and only objective is writing and having sexy with dirty women.

"Factotum" is an uninteresting, pointless and extremely boring movie about an irresponsible drunken vagrant that works a couple of days or weeks just to get enough money to buy spirits and gamble, being immediately fired due to his reckless behavior. In accordance with IMDb, this character would be the fictional alter-ego of the author Charles Bukowski, and based on this story, I will certainly never read any of his novels. Honestly, if the viewer likes this theme of alcoholic couples, better off watching the touching and heartbreaking Hector Babenco's "Ironweed" or Marco Ferreri's "Storie di Ordinaria Follia" that is based on the life of the same writer. My vote is four.

Title (Brazil): "Factotum – Sem Destino" ("Factotum – Without Destiny") -neg This film is bundled along with "Gli fumavano le Colt... lo chiamavano Camposanto" and both films leave a lot to be desired in the way of their DVD prints. First, both films are very dark--occasionally making it hard to see exactly what's happening. Second, neither film has subtitles and you are forced to watch a dubbed film--though "Il Prezzo del Potere" does seem to have a better dub. Personally, I always prefer subtitles but for the non-purists out there this isn't a problem. These DVD problems, however, are not the fault of the original film makers--just the indifferent package being marketed four decades later.

As for the film, it's about the assassination of President Garfield. This is a MAJOR problem, as Van Johnson looks about as much like Garfield as Judy Garland. In no way whatsoever does he look like Garfield. He's missing the beard, has the wrong hair color and style and is just not even close in any way (trust me on this, I am an American History teacher and we are paid to know these sort of things!). The real life Garfield was a Civil War general and looked like the guys on the Smith Brothers cough drop boxes. Plus, using some other actor to provide the voice for Johnson in the dubbing is just surreal. Never before or since has Van Johnson sounded quite so macho!! He was a fine actor...but certainly not a convincing general or macho president.

In addition to the stupid casting, President Garfield's death was in no way like this film. It's obvious that the film makers are actually cashing in on the crazy speculation about conspiracies concerning the death of JFK, not Garfield. Garfield was shot in Washington, DC (not Dallas) by a lone gunman with severe mental problems--not a group of men with rifles. However, according to most experts, what actually killed Garfield (over two months later) were incompetent doctors--who probed and probed and probed to retrieve a bullet (to no avail) and never bothered cleaning their hands or implements in the process. In other words, like George Washington (who was basically killed by repeated bloodletting when suffering with pneumonia) he died due to malpractice. In the movie they got nothing right whatsoever...other than indeed President Garfield was shot.

Because the film bears almost no similarity to real history, it's like a history lesson as taught from someone from another planet or someone with a severe brain injury. Why not also include ninjas, fighting robots and the Greek gods while you're at it?!?! Aside from some decent acting and production values, because the script is utter cow crap, I don't recommend anyone watch it. It's just a complete and utter mess. -neg I only comment on really very good films and on utter rubbish. My aim is to help people who want to see great films to spend their time - and money - wisely.

I also want to stop people wasting their time on garbage, and want to publicize the fact that the director/producer of these garbage films can't get away with it for very long. We will find out who you are and will vote with out feet - and wallets.

This film clearly falls into the garbage category.

The director and writer is John Shiban. It's always a bad sign when the writer is also the director. Maybe he wants two pay cheques. He shouldn't get any. So remember the name - John SHIBAN. And if you see anything else by him, forget it.

I won't say anything about the plot - others have already. I am a little worried by how much the director likes to zoom in to the poor girl's face when she is crying and screaming. These long duration shots are a little worrying and may say something about the state of mind of Mr. Shiban. Maybe he should get psychiatric help.

Enough already. It's crap - don't waste your time on it. -neg When you look at the cover and read stuff about it an entirely different type of movie comes to mind than what you get here. Then again maybe I read the summary for the other movie called "Mausolem" instead as there were two movies of this title released about the same time with both featuring plots that had key elements in common. However, reading stuff about that movie here I know I saw this one and not that one and that movie is even less what one would imagine a movie with that title would be about. I will be honest, I expect more of a zombie type picture and you get that in this movie to some degree. However, there is more stuff involving the occult and strange powers as the opening scene of the people being taken away by the coroner at the beginning of the film will attest to. The movie also has the old theme of kids going somewhere they do not belong to have some crazy party, in this case it is in fact a mausoleum. The other movie I do not think really has that key feature playing that prominent role in the movie and I see the score for this one is higher too, still it was just not the movie I was expecting. diff --git a/tests/data_for_tests/io/imdb/train.txt b/tests/data_for_tests/io/imdb/train.txt deleted file mode 100644 index bbf4d799..00000000 --- a/tests/data_for_tests/io/imdb/train.txt +++ /dev/null @@ -1,6 +0,0 @@ -neg The monster from Enemy Mine somehow made his way into a small mountain community, where he has taken up residence. He's being hunted by a female doctor-turned-vigilante who is out to exterminate him. This female assassin, who looks like a refugee from a Motley Crue video, rides around on a motorcycle and tries to save a bunch of kids who have chosen to have a Big Chill weekend right smack dab in the middle of the monster's turf. Decapitations and lots of blood are primarily in place to draw attention away from the story which limps along like a bad version of the Island of Dr. Moreau (and yes, it's worse than the one with Val Kilmer). -neg I'll try to use words to describe this on....

I saw the original, which was good in its own way, but back then I should have feared a sequel.

And I was 'afraid' when I picked this one up, but now that I've seen it, I have to say, it's even worse then I thought. Why these movies still get money still makes my mind spin.

Let's start with the actors;they aren't all that good, but it has to be said, some make heads turn by being just plain awful. But what can an actor do with a script like this one. It's trying to be a copy of the original only this time the places have changed, any form of story is gone and any attempt of actually coming up with something that hasn't been done before, fails miserably. In a futile attempt to get it up-to-date, they try to make it exciting by making use of the whole 'big-brother' theme , but that has been worn out ages ago and offers nothing but a filler for between the beginning and the end. An attempt was made to try to save the movie by making a ton of references to the '83 original, but it just ended up being plain funny and sometimes a bit sad. In conclusion, if you have nothing , and I mean nothing , to do... go watch it, or play Frisbee... with the DVD.... by yourself. It'll offer you the same amount of fun.. I promise -pos Most yeti pictures are fatally undermined by a grave paucity of energy and enthusiasm. Not so this gloriously bent, batty and berserk over-the-top Italian-made shot-in-Canada kitsch gut-buster: It's a wildly ripe and vigorously moronic ghastly marvel which reaches a stunning apotheosis of righteously over-baked "what the hell's going on?" crackpot excess and inanity.

A freighter ship crew discovers the body of a 30-foot yeti that resembles a hirsute 70's disco stud (complete with jumbo wavy afro) perfectly preserved in a large chunk of ice. They dethaw the beast, jolt him back to life with electric charges, grossly mistreat him, and keep the poor hairy Goliath in an enormous glass booth. Before you can say "Hey, the filmmakers are obviously ripping off 'King Kong'," our titanic abominable snowdude breaks free of his cage, grabs the first luscious nubile blonde Euro vixen (the gorgeous Pheonix Grant) he lays lustful eyes on, and storms away with his new lady love. The yeti gets recaptured and flown to Toronto to be showed off to a gawking audience. Of course, he breaks free again, nabs the vixen, and goes on the expected stomping around the city rampage.

The sublimely stupid dialogue (sample line: "Philosophy has no place in science, professor"), cheesy (far from) special effects (the horrendous transparent blue screen work and cruddy Tonka toy miniatures are especially uproarious in their very jaw-dropping awfulness), clunky (mis)direction, and a heavy-handed script that even attempts a clumsily sincere "Is the yeti a man or a beast?" ethical debate all combine together to create one of the single most delightfully ridiculous giant monster flicks to ever roar its absurd way across the big screen. Better still, we also have a few funky offbeat touches to add extra shoddy spice to the already succulently schlocky cinematic brew: the vixen accidentally brushes against one of the yeti's nipples, which causes it to harden and elicits a big, leering grin of approval from the lecherous behemoth (!); the vixen nurses the yeti's wounded hand while he makes goo-goo eyes at her, the yeti smashes windows with his feet while climbing a towering office building, and the furry fellow even breaks a man's neck with his toes (!!). Overall, this singularly screwball and shamefully unheralded should-be camp classic stands tall as a remarkable monolith of infectiously asinine celluloid lunacy that's eminently worthy of a substantial hardcore underground cult following. -pos One of the best movies I ever saw was an Irish movie titled Philadelphia,Here I Come. I read the play before I saw the movie and loved them both. It's the story of a young man preparing to leave Ireland to go to America because he can't earn a living in Ireland. It is told both from the perspective of the young man(whom the other characters in the film can see) and another young man representing his uncensored thoughts and feelings., but who cannot be seen by the other characters in the film. It is a very sad movie, but deeply touching, and I would recommend this film to anyone who wants something to think about. I love any Irish movie, or almost any movie about Ireland, and any film that has the late Irish actor Donal McCann in it gets my vote.I would watch that man chew gum for 2 hours on screen, and unfortunately,I have.Terrible shame to have lost him so young. -pos There is such rubbish on the cable movie channels that I hit a gem with this one. From beginning to end it had me gripped and deserves top marks.

Father of two sons hears messages from "God" to kill people who he is told are 'demons'.

When the opening credits showed the director as one of the cast that can often be a warning of a bad film; exceptionally it is the reverse here as the drama is non-stop from beginning to end.

And there is not one moment in the movie when one is not fully enthralled as there are no unnecessary or needless sub-plots, and the script is first class.

All the actors give wholly convincing performances especially the lead child actor who is exceptional.

This film is at least as good as the likes of 'Silence of the Lambs'. -pos This is a nice piece of work. Very sexy and engaging enough plot to keep my interest throughout. Its main disadvantage is that it seems like it was made-for-TV: Full screen, and though there were several sex scenes, there was absolutely no nudity (but boy did it come close!). Strange, too, since Netflix shows that it was rated R.

Nonetheless, very titillating, and I wish Alicia Silverstone made more movies like this.

One Netflix reviewer stated that it was part of a series, but I have been unable to find out what series that is. I'd like to find out, though, because this movie was THAT good.

Walt D in LV. 8/23/2005 diff --git a/tests/data_for_tests/io/mr/dev.csv b/tests/data_for_tests/io/mr/dev.csv deleted file mode 100644 index a00e0b77..00000000 --- a/tests/data_for_tests/io/mr/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -1,"apesar de seus graves problemas , o filme consegue entreter" -0,"except as an acting exercise or an exceptionally dark joke , you wonder what anyone saw in this film that allowed it to get made" -0,"a real clunker a well made , thoughtful , well acted clunker , but a clunker nonetheless" -0,an ugly duckling tale so hideously and clumsily told it feels accidental -0,"unspeakable , of course , barely begins to describe the plot and its complications vulgar is too optimistic a title" -0,at least moore is a real charmer \ No newline at end of file diff --git a/tests/data_for_tests/io/mr/test.csv b/tests/data_for_tests/io/mr/test.csv deleted file mode 100644 index f3804141..00000000 --- a/tests/data_for_tests/io/mr/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -1,the animated sequences are well done and perfectly constructed to convey a sense of childhood imagination and creating adventure out of angst -1,a great companion piece to other napoleon films -1,spellbinding fun and deliciously exploitative -0,an ugly duckling tale so hideously and clumsily told it feels accidental -0,"unspeakable , of course , barely begins to describe the plot and its complications vulgar is too optimistic a title" -0,at least moore is a real charmer \ No newline at end of file diff --git a/tests/data_for_tests/io/mr/train.csv b/tests/data_for_tests/io/mr/train.csv deleted file mode 100644 index 82c01beb..00000000 --- a/tests/data_for_tests/io/mr/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -1,"'moore is like a progressive bull in a china shop , a provocateur crashing into ideas and special interest groups as he slaps together his own brand of liberalism '" -1,idiotic and ugly -1,"even if the naipaul original remains the real masterpiece , the movie possesses its own languorous charm" -1,"the movie is amateurish , but it 's a minor treat" -1,"some people march to the beat of a different drum , and if you ever wondered what kind of houses those people live in , this documentary takes a look at 5 alternative housing options" -1,the movie plays up the cartoon 's more obvious strength of snazziness while neglecting its less conspicuous writing strength diff --git a/tests/data_for_tests/io/ohsumed/dev.csv b/tests/data_for_tests/io/ohsumed/dev.csv deleted file mode 100644 index 7a26fb04..00000000 --- a/tests/data_for_tests/io/ohsumed/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -C23,"assessment biliary tract liver transplantation tube cholangiography iodida scanning biliary tract obstruction anastomotic leakage common problems following liver transplantation sequential study , 31 patients liver transplant investigated 99mtc iodida \( iodida \) scanning tube cholangiography \( ttc \) results compared clinical outcome seven patients extrahepatic biliary obstruction one patient biliary leak detection biliary complications ttc iodida scanning similar terms sensitivity \( 63 per cent \) ttc better specificity \( 79 per cent versus 60 per cent \) accuracy \( 74 per cent versus 60 per cent \) iodida scanning liver function taken account , diagnostic efficacy tests patients bilirubin levels less 200 mumol l similar levels greater 200 mumol l greater number false positive results iodida scanning \( 12 per cent versus 54 per cent \) significant biliary leak clearly detected ttc iodida scanning ttc remains effective way evaluating biliary tract transplantation iodida scanning limited value bilirubin levels elevated , may provide additional information blood supply , hepatocyte function intrahepatic cholestasis" -C23,"patterns dyspepsia patients clinical evidence organic diseases studied 2000 dyspeptic patients obvious signs organic disease first examination , order \( 1 \) verify many diagnoses idiopathic dyspepsia really made diagnostic procedures \( 2 \) evaluate diagnostic power symptoms distinguishing organic idiopathic dyspepsia latter considered structural abnormalities found cases , distinction made related associated organic dyspepsia according whether certain relationship abnormalities dyspeptic symptoms patients referred us follows \( 1 \) spontaneously , \( 2 \) sent physicians us , \( 3 \) referred open access endoscopic service results show frequency idiopathic dyspepsia 26 , whereas associated structural abnormalities present 45 4 obvious organic causes dyspepsia seen 28 6 \( 24 benign 4 6 malignant diseases \) considered separately , symptom alone allows correct diagnosis simultaneous evaluation symptoms linear discriminant analysis distinguishes idiopathic organic dyspeptic patients 70 cases higher discrimination percentage 70 cases higher discrimination percentage could probably obtained using wider range clinical parameters complex statistical analysis interrelationships exist clinical symptoms final diagnosis" -C23,"evaluation 13c urea breath test detection helicobacter pylori monitoring effect non ulcer dyspepsia sixty nine patients non ulcer dyspepsia studied endoscopy , biopsy , quick urease \( \) test , helicobacter pylori culture , 13c urea breath test treatment \( \) two tablets twice daily four weeks symptoms non ulcer dyspepsia recorded using standard questionnaire using h pylori culture gold standard , sensitivity 13c urea breath test 90 , specificity 98 6 , accuracy 94 8 positive predictive value 98 2 negative predictive value 92 5 conversion rate h pylori positive negative status treatment 17 9 symptoms non ulcer dyspepsia improved appreciably treatment irrespective h pylori status 13c urea breath test accurate research tool suitable serial testing population surveys" -C23,"demonstration area slow conduction human atrial flutter ten patients chronic atrial flutter studied prospectively using electrophysiologic mapping pacing techniques assess mechanism atrial flutter presence area slow conduction atria electrograms recorded greater equal 30 right atrial sites patient atrial flutter demonstrated right atrial free wall activation interatrial septum activation , consistent reentrant circuit involving right atrium six patients , slow conduction occurred atrial flutter inferior right atrium spatially associated fractionated recordings four patients , missing interval electrical activity occurred inferior right atrium average 40 atrial flutter cycle transient criteria demonstrated patient rapid high right atrial pacing mean activation time high right atrial pacing site coronary sinus \( inferior left atrial \) recording site long \( 228 ms \) consistent activation area slow conduction rapid pacing atrial flutter coronary sinus site , transient criteria could demonstrated mean activation time coronary sinus pacing site high right atrial recording site relatively short \( 134 ms \) consistent activation high right atrium area slow conduction high right atrial pacing sinus rhythm rates similar atrial flutter demonstrated short activation time coronary sinus low right atrial sites \( mean 169 88 ms , respectively \) , indicating activation area slow conduction coronary sinus pacing sinus rhythm demonstrated phenomena low right atrial electrograms recorded sinus rhythm rapid pacing sinus rhythm fractionated , although atrial flutter thus , atrial mapping pacing data complementary , indicating human atrial flutter patients studied generated reentrant circuit right atrium , area slow conduction low right atrium present atrial flutter" -C23,"analysis base station morphine orders assessment physician consistency paramedic contact base station consistent recommendations reflecting consensus base station physician care urban ems system , paramedics must contact single base station provide morphine sulfate \( ms \) patient chest pain performed retrospective cohort analysis prehospital ms requests chest pain determine consistency circumstances paramedic team refused ms ms requests represented 123 1 , \( 7 \) line physician consultations 6 month study 15 123 \( 12 \) ms requests refused neither mean patient age , sex distribution , presenting vital signs correlated ms refusal maximum estimate transport time hospital less equal 5 minutes noted 7 15 \( 47 \) medication compared 11 96 \( 11 \) documented estimated transport times \( p less equal 0 005 \) simultaneous request nitroglycerin \( \) noted 6 15 \( 40 \) medication 15 108 \( 14 \) \( p less 0 05 \) found refusal ms administration uncommon physicians tended ms transport time short requested concomitant administration also noted physician inconsistencies refusal findings guide physician consensus development avoid mixed paramedics" -C23,"predictors smoking nhanes followup experience published prospective studies predictors spontaneously cigarette smoking nationally representative u population paper describes study , using cohort taken first national health nutrition examination survey \( nhanes , 1971 1975 \) traced nhanes epidemiologic followup survey \( 1982 1984 \) successful \( least 1 year time followup \) ascertained among adults \( age 25 74 years \) smokers time nhanes disabled followup independent predictors \( proportional hazards multiple regression \) \( 1 \) older age \( 2 \) white race \( 3 \) fewer cigarettes smoked day \( 4 \) higher household income \( 5 \) hospitalization followup period predictors relapse \( ex smokers nhanes smoking time followup \) \( 1 \) younger age \( 2 \) urban residence \( 3 \) female gender findings implications intervention strategies , public health projections research" diff --git a/tests/data_for_tests/io/ohsumed/test.csv b/tests/data_for_tests/io/ohsumed/test.csv deleted file mode 100644 index 553af66a..00000000 --- a/tests/data_for_tests/io/ohsumed/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -C23,"development small caliber biologic vascular graft evaluation antithrombogenicity early healing process authors previously showed small caliber xenograft using crosslinking technique applicable aortocoronary bypass grafting study graft , antithrombogenicity healing process evaluated early stage implantation fresh sheep carotid artery \( id \) obtained cross linked compounds , used small caliber vascular graft graft white soft six cm segments graft implanted carotid arteries bilaterally nine dogs sodium heparin given surgery , anticoagulant used postoperatively fifteen grafts eight dogs removed 1 hr 30 days implantation , 13 15 grafts found patent two grafts , one 3 days , 14 days , occluded anastomotic area occluded grafts felt hard outside one dog , grafts shown angiographically patent 14 days implantation , dog kept long term observation macroscopically , thrombus observed patent grafts microscopically , inner surface near anastomotic lines covered endothelial cells , infiltration fibroblasts observed outside 7 days implantation foreign body reactions seen around graft 30 days implantation , thin layer plasma protein middle graft observed scanning electron microscopy \( sem \) observations , concluded grafts exhibited satisfactory early antithrombogenicity healing implantation" -C23,"proliferation substrate effects endothelial cell thrombogenicity effects cellular differentiation status adhesive substrate endothelial cell function cell culture measured enzyme based assay surface thrombogenicity solid plastic , microporous polymeric , fibronectin \( fn \) treated microporous polymeric used substrates growth endothelial cells microporous fn treated synthetic substrates shown aid induction cellular differentiation mechanisms cells studied proliferative growth conditions thrombogenicity surface created endothelial cell monolayers various experimental conditions determined using enzyme based assay fibrin deposition actively proliferating cells solid plastic substrate produced thrombogenic surface , confluent endothelial cell monolayers grown fn treated microporous substrate least thrombogenic surfaces data suggest endothelial cell surface thrombogenicity substrate control , also related cellular differentiation status findings used design novel approach small diameter synthetic vascular graft problem" -C23,"effect complement arachidonic acid pathway inhibition white blood cell count deposition vascular grafts determine role complement arachidonic acid metabolites decrease peripheral white blood cell count \( pwbc \) observed graft implantation , dacron aortic grafts implanted control rabbits \( group , n 13 \) , rabbits pretreated venom factor \( 80 u kg \) complement \( group ii , n 13 \) , indomethacin \( 2 5 mg kg \) inhibit cyclooxygenase \( group iii , n 7 \) , diethylcarbamazine \( dec , 90 mg kg \) inhibit leukotriene synthesis \( group iv , n 7 \) pwbc measured 15 min 1 hr graft implantation graft removal , wbc count grafts \( gwbc \) determined light microscopy \( \) scanning electron microscopy \( sem \) one hr graft implantation , pwbc decreased significantly groups iv 46 , 52 , 40 , 45 preoperative pwbc , respectively significant difference among groups revealed gwbc per field 8 0 , 12 3 , 5 8 , 6 8 groups iv , respectively similarly , sem showed gwbc per field 2 5 , 5 6 , 0 7 , 1 5 groups iv , respectively sem gwbc significantly greater group ii \( p less 0 01 \) , significantly less group iii \( p less 0 05 \) results suggested complement arachidonic acid pathways alone affect fall pwbc , may influence gwbc" -C23,"total perinatal wastage clarification priorities pregnancy outcome 16 , women carrying 17 , living fetuses 16 weeks gestation studied well recording perinatal deaths , losses 28 weeks one year delivery recorded give total perinatal wastage rate 21 6 per 1000 fetuses alive 16 weeks compared perinatal mortality rate \( plus early neonatal deaths \) 7 8 per 1000 births deaths classified according pathological sub groups concept perinatal care using perinatal mortality compared using total perinatal wastage" -C23,"magnetic resonance imaging idiopathic retroperitoneal fibrosis measurement t1 relaxation time magnetic resonance imaging 0 08 performed nine patients proven idiopathic retroperitoneal fibrosis total 11 scans performed three patients scanned diagnosis one also two follow scans six patients scanned variable time diagnosis treatment scan , soft tissue mass readily identified , distribution corresponding seen computed tomography difference mean t1 relaxation time mass patients scanned diagnosis scanned treatment however , patient followed serial scans showed progressive reduction t1 value mass time comparison results obtained patients lymphoma suggests t1 values retroperitoneal fibrosis lower lymphoma , particularly non hodgkin 's lymphoma" -C23,"development reversibility lymphocyte dysfunction experimental obstructive jaundice study evaluates effect experimental biliary obstruction bile duct ligation \( \) biliary drainage cell mediated immunity wistar rats immune status assessed mitogen stimulation test lymphocytes animals followed 35 days regression analysis showed significant negative correlation lymphocyte function period jaundice \( correlation coefficient 0 57 , p less 0 001 \) following 21 days , groups animals internal biliary drainage 7 , 14 28 days , external drainage 14 days compared obstructed animals , 14 days internal drainage required improve lymphocyte function \( p less 0 05 \) animals 14 days external drainage significantly lower lymphocyte stimulation internal drainage animals \( p less 0 05 \) results demonstrate obstructive jaundice produces progressive reduction lymphocyte function reversed biliary drainage , internal drainage effective external drainage" diff --git a/tests/data_for_tests/io/ohsumed/train.csv b/tests/data_for_tests/io/ohsumed/train.csv deleted file mode 100644 index 7a6cfba7..00000000 --- a/tests/data_for_tests/io/ohsumed/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -C23,"role membrane proteins monosodium urate crystal membrane interactions ii effect erythrocyte membranes membrane permeable impermeable protein crosslinking agents intact , human erythrocytes pretreated membrane permeable , dimethyl \( \) dimethyl \( \) membrane impermeable 3 , 3' \( \) \( \) protein crosslinking agents incubated monosodium urate monohydrate \( \) crystals percent inhibition lysis values pretreated cells relative untreated cells determined 3 agents caused concentration dependent inhibition induced hemolysis due decrease binding pretreated membranes proposed inhibition lysis due crosslinking integral cytoskeletal membrane proteins , resulting reduced mobility proteins , inhibition integral proteins aggregates decreased pore formation membrane" -C23,"biliary gut function following shock aim study characterize alterations gallbladder intestinal function hemorrhagic shock blood reperfusion animals subjected shock 30 mm hg arterial blood pressure 60 minutes resuscitated blood reinfusion gallbladder epithelial ion transport , gallbladder motility vitro vivo , gastrointestinal motility , flora stomach small bowel studied 2 24 hours shock changes 2 hours included decreased gallbladder contractility vitro decreased emptying vivo , loss coordination intestinal motor activity , decrease frequency intestinal electrical slow waves , reduced duration intestinal migrating motor complex cycle 24 hours , gallbladder epithelial permeability increased vitro contractility remained reduced vivo functions showed partial recovery gastrointestinal flora affected changes data demonstrate hemorrhagic shock reperfusion affect digestive motility early timing alterations observed partial recovery 24 hours post shock suggest ischemia hypoxia mechanism injury" -C23,"short term long term changes renal function donor nephrectomy retrospectively examined effect nephrectomy renal function 55 living related donors renal function measured scans patients studied preoperatively , 1 week 1 year postoperatively 20 patients 10 year followup available compensatory hypertrophy complete 1 week postoperatively effective renal plasma flow remaining kidney 32 5 higher preoperatively increase remained stable least year degree compensatory hypertrophy significantly greater male patients \( 46 9 1 week \) female patients \( 26 7 \) compensatory hypertrophy occurred age groups studied pronounced patients less 30 years old patients followed 10 years effective renal plasma flow decreased 387 7 ml per minute 1 week nephrectomy 4 ml per minute 10 years result similar decrease seen normal population according results , renal donation living related persons lead long term decrease renal function" -C23,treatment idiopathic retroperitoneal fibrosis immunosuppression idiopathic retroperitoneal fibrosis exceedingly uncommon childhood etiology uncertain support immunological basis disease given report 14 year old girl severe retroperitoneal fibrosis causing progressive azathioprine prednisolone used successfully case supports efficacy immunotherapy treatment idiopathic retroperitoneal fibrosis -C23,en bloc transplantation kidneys donors weighing less 15 kg adult recipients en bloc transplantation kidneys donors weighed less 15 kg 20 adult patients described medial kidney allowed adequate renal positioning growth graft venous thrombosis occurred 1 patient irreversible graft rejection occurred 4 patients graft survival 65 excellent function mean followup 8 8 months en bloc transplantation pediatric cadaver kidney grafts adults acceptable procedure -C23,"afferent nipple valve malfunction caused anchoring collar unexpected late complication kock continent ileal reservoir construction kock continent ileal reservoir urinary diversion , significantly high rates late postoperative complications regarding nipple valves , efferent limb particular , reported reports afferent nipple valve malfunction total 42 patients underwent kock pouch operation observed 12 months \( mean 38 months \) evaluated terms afferent nipple valve malfunction late afferent nipple valve complications observed 10 42 patients \( 24 \) complications included erosion fiber fabric used collar \( 5 patients \) , stenosis afferent limb \( 2 \) obstruction afferent nipple mucous plug fungus ball \( 3 \) latter 2 complications due mechanical dynamic obstruction urine flow caused nonabsorbable collar none 10 patients problems efferent nipple valve function results suggest peristaltic direction intestine use nonabsorbable material collar primarily responsible late afferent nipple valve complications modifications needed produce stable nipple valve otherwise , simpler reliable alternative techniques anastomosis considered" diff --git a/tests/data_for_tests/io/peopledaily/dev.txt b/tests/data_for_tests/io/peopledaily/dev.txt deleted file mode 100755 index 4769eb79..00000000 --- a/tests/data_for_tests/io/peopledaily/dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -中 B-ORG -共 I-ORG -中 I-ORG -央 I-ORG - -致 O -中 B-ORG diff --git a/tests/data_for_tests/io/peopledaily/test.txt b/tests/data_for_tests/io/peopledaily/test.txt deleted file mode 100755 index 1a983ebd..00000000 --- a/tests/data_for_tests/io/peopledaily/test.txt +++ /dev/null @@ -1,41 +0,0 @@ -美 B-LOC -国 I-LOC - -的 O -华 B-PER - -莱 B-PER -士 B-PER - -中 B-ORG -共 I-ORG - -中 I-ORG -央 I-ORG - -举 O -办 O - -《 O -“ O - -一 O -国 O - -两 O -制 O - -” O -与 O - -香 B-LOC -港 I-LOC - -基 O -本 O - -法 O -》 O - -讲 O -座 O diff --git a/tests/data_for_tests/io/peopledaily/train.txt b/tests/data_for_tests/io/peopledaily/train.txt deleted file mode 100755 index 4fb5f61b..00000000 --- a/tests/data_for_tests/io/peopledaily/train.txt +++ /dev/null @@ -1,46 +0,0 @@ -我 O -们 O - -收 O -藏 O - -北 B-LOC -京 I-LOC - -史 O -料 O - -历 B-LOC -博 I-LOC - -、 O -古 B-ORG -研 I-ORG -所 I-ORG - -、 O -北 B-LOC - -大 I-LOC -清 I-LOC - -华 I-LOC -图 I-LOC - -书 I-LOC -馆 I-LOC - -我 O -们 O - -是 O -受 O - -到 O -郑 B-PER - -振 I-PER -铎 I-PER - -先 O -生 O diff --git a/tests/data_for_tests/io/weibo_NER/dev.conll b/tests/data_for_tests/io/weibo_NER/dev.conll deleted file mode 100755 index 11db48f8..00000000 --- a/tests/data_for_tests/io/weibo_NER/dev.conll +++ /dev/null @@ -1,21 +0,0 @@ -老 B-PER.NOM -百 I-PER.NOM -姓 I-PER.NOM - -心 O - -新 B-GPE.NAM -乡 I-GPE.NAM - -年 O - -大 B-ORG.NOM -学 I-ORG.NOM - -同 O - -宿 B-LOC.NOM -舍 I-LOC.NOM - -三 O -年 O diff --git a/tests/data_for_tests/io/weibo_NER/test.conll b/tests/data_for_tests/io/weibo_NER/test.conll deleted file mode 100755 index b92e7efa..00000000 --- a/tests/data_for_tests/io/weibo_NER/test.conll +++ /dev/null @@ -1,17 +0,0 @@ -感 O -动 O - -了 O - -李 B-PER.NAM -开 I-PER.NAM -复 I-PER.NAM - -小 B-ORG.NOM -学 I-ORG.NOM - -美 O -术 O - -新 O -课 O \ No newline at end of file diff --git a/tests/data_for_tests/io/weibo_NER/train.conll b/tests/data_for_tests/io/weibo_NER/train.conll deleted file mode 100755 index 6d6182c0..00000000 --- a/tests/data_for_tests/io/weibo_NER/train.conll +++ /dev/null @@ -1,69 +0,0 @@ -坏 O -男 B-PER.NOM -人 I-PER.NOM - -男 B-PER.NOM -人 I-PER.NOM -帮 I-PER.NOM - - -不 O - -南 B-GPE.NAM -都 I-GPE.NAM - -南 B-GPE.NAM -方 I-GPE.NAM -都 I-GPE.NAM -市 I-GPE.NAM - -的 O - -那 B-LOC.NOM -座 I-LOC.NOM - -来 O - -学 B-ORG.NOM -校 I-ORG.NOM - -的 O - -卫 B-ORG.NAM -生 I-ORG.NAM -部 I-ORG.NAM - -台 B-GPE.NAM -灣 I-GPE.NAM - -火 B-LOC.NAM -焰 I-LOC.NAM -山 I-LOC.NAM - -的 O - -成 O -李 B-PER.NAM -力 I-PER.NAM -帆 I-PER.NAM - -我 O - -南 B-GPE.NAM -都 I-GPE.NAM - -深 B-GPE.NAM -圳 I-GPE.NAM - -一 O -个 O - -国 B-GPE.NOM -家 I-GPE.NOM - -以 O - -民 B-PER.NOM - -为 O -本 O diff --git a/tests/data_for_tests/io/yelp_review_full/dev.csv b/tests/data_for_tests/io/yelp_review_full/dev.csv deleted file mode 100755 index ecc93b0b..00000000 --- a/tests/data_for_tests/io/yelp_review_full/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -"2","Two meals, on the recommendation of a friend who lives near the place, and after the second trip, I was compelled to write. 'Rocky' would definitely describe the experiences.\n\nOn the first trip, I went to try their (at that time)raved about Reuben. And YET to find a true good Reuben in da burgh, I tried it.\n\nWell, they were out of the proper bread, and the guy had to run to the store to buy the closest thing he could find, which was not the proper bread, and instead of one of their 'raved about' Reubens, I received two mini-Reubens, which basically took the guts from one Reuben, and spread it out onto two sandwiches on regular sized bread. I ate it. It wasn't great, but they swore it was because they'd run out of the bread. Bread or not, it still wasn't great. The atmosphere was pleasant in that 'blue collar bar' kind of way, and the staff was very nice, but not a winning pitch on the Reuben.\n\nThe second trip was after a long day of moving furniture with the same friend. Sat in the back room, instead of the bar, which felt more like a restaurant, of course, with the big screen TV covering the sports of the moment.\n\nI was in the mood for dinner this time, and after a scan, decided on fried chicken and mashed potatoes with the salad bar. My friend ordered one of her faves, the breaded pork chops.\n\nWe hit the salad bar, which was uber-basic. Three soups (mostly vegetable loaded, which left me out), basic iceberg lettuce mix (very probably out of a bag), a few veggie toppings, and three or four dressings. It was a basic salad, no big deal. More or less an appetizer filler before the meal.\n\nThe mind-blower in this trip was the ordering of the fried chicken dinner. Our waiter looked like a 19 year old gas station attendant, skinny little blonde guy with a sweet but incredibly naive face, and an air of vapidity, which was confirmed when I placed my order. I asked what chicken pieces came in the dinner, and asked if it was possible to only get dark meat. I never imagined how confusing a question that could possibly be. It literally took him two trips back to the kitchen to 'ask', and the child honestly had no clue what 'white meat' and 'dark meat' meant. The first answer he came back with was that the chicken came in a pre-portioned prepared bag, kind of Kentucky Fried Chicken style...which didn't answer my question, thus prompting the second trip. \n\nAfter the second trip back I heard the cook holler 'Tell him I'll fix him up'. \n\nWell, the chicken was prepackaged dreck like you'd find in the freezer case of Walmart, tiny and not good, and the potatoes had that slight tinge of chem-spuds flavor, laden with some kind of chopped up green (parsley?), and a side of that basic brown gravy served up in 5 gallon buckets.\n\nThank goodness for the basic salad bar.\n\nEven my friend admitted that her pork chops were different and not what she'd expected. They also appeared to be from a freezer bag.\n\nThe irony was that the boy who didn't know white meat from dark meat, was chatting with some other customers...about baseball...and he was a genius about the mindless sport of baseball. Ahhhh da burgh.\n\nThird base? Nah...why bother when there are so many other options around. Go on in a grab a beer and chat black and gold if you happen to be in Carnegie...they can help you out all types of ways in that area. Just don't go hungry if you actually have tastebuds.\n\nFrom what I understand it 'used to be' really good homecooked food. But apparently, mama has left the kitchen." -"4","I belong to this gym... I live in the South section of Pittsburgh, and I find that this gym is not too far from me. The staff is friendly, the equipment is quite good. You get two free personal training sessions when you join. They have lots of weights (which my boyfriend uses) and a decent cardio room. The only thing I would say is to increase some of the cardio equipment. Water is only $1 a bottle!" -"3","I've been to Papa J's twice and had mixed experiences.\n\nBoth times I had the banana pepper appetizer, which is great and goes really well with the FRESH and delicious bread and cheese they give you at the start of your meal.\n\nFor entrees, me and my girlfriend have had mixed experience. I've had the fish sandwich (very good) and the eggplant parm sandwich (okay). My girlfriend got the salad with bread and basil on it, but the basil was over powering and the bread was soggy with the dressing. \n\nThe service is also a mixed bag. The first time our server went out of her way to take care of us and even MADE me cocktail sauce for my fish sandwich. The second time, the server was lackluster, didn't know anything about the menu and wasn't able to take proper care of us. \n\nI would return to Papa J's, but I my terrible experience last time isn't enough to say it would be my first pick of places to eat around Carnegie/Robinson." -"4","Yay, I'm a fan but sometimes service is a little slow, it was very good for us this visit. Go to Papa j's every once in a while but mostly for the White Pizza. It is the best white pizza I have ever had. Order the white pizza on our visit this weekend... it has garlic, spinach, feta cheese and we usually add some veggie on top. It was delicious! Order fried calamari and it was OK...note to self next time try the calamari roman style.\n\nLike the dinning room with the hardwood floors and bright lighting. \n\nThe bar was jumping thou never go to the bar." -"3","Had dinner at Papa J's with a group of 6. I loved how the restaurant is in a old brick building with large windows. It felt like a neighborhood restaurant. On a Saturday night, the restaurant was full but not crowded. We were seated in a room with poor acoustics. It was difficult to hear people at our table and the waitress. While she tried, I can see the asperation in her face when she had to repeat the specials to both sides of the table.\n\nPeople ordered bourbon on the rocks before dinner which seemed watered down, while my lemon drop was made nice. The bread was delicious! Can you describe it to be creamy? The fried zucchini was lightly breaded and not too oily. It was a large portion made up of 2 sliced zucchinis.\n\nWe ordered a variety of dishes. The pasta dish was dry with more pasta than sauce or meat. Those who ordered the fish special thought it was delicious. The shrimp dish was enjoyed as well. I had the chicken marsala which was pretty good. The marsala sauce wasn't too thick, and the chicken moist.\n\nHard to tell if the deserts were \""homemade.\"" The tiramisu and spumoni were small in portion and meant for one. \n\nOn the whole, I was on the fence with my overall impression of Papa J's. \""A-ok\"" probably is the best way to describe it." -"2","Rather typical SnS. Had a good lunch crowd. Milkshake was good but not as good as EnP down the street. It took to long to get the burger for some reason, 25 minutes, I realized cooked to order but this is a little long for SnS. Ordered the Guacamole Steakburger and it only had a small portion of Gauc...not your usual amount..kitchen was not up to speed on portion sizing for some reason. Definitely did not look like the picture on the website. Oh well!" diff --git a/tests/data_for_tests/io/yelp_review_full/test.csv b/tests/data_for_tests/io/yelp_review_full/test.csv deleted file mode 100755 index 63d84891..00000000 --- a/tests/data_for_tests/io/yelp_review_full/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -"1","I got 'new' tires from them and within two weeks got a flat. I took my car to a local mechanic to see if i could get the hole patched, but they said the reason I had a flat was because the previous patch had blown - WAIT, WHAT? I just got the tire and never needed to have it patched? This was supposed to be a new tire. \nI took the tire over to Flynn's and they told me that someone punctured my tire, then tried to patch it. So there are resentful tire slashers? I find that very unlikely. After arguing with the guy and telling him that his logic was far fetched he said he'd give me a new tire \""this time\"". \nI will never go back to Flynn's b/c of the way this guy treated me and the simple fact that they gave me a used tire!" -"1","Don't waste your time. We had two different people come to our house to give us estimates for a deck (one of them the OWNER). Both times, we never heard from them. Not a call, not the estimate, nothing." -"1","All I can say is the worst! We were the only 2 people in the place for lunch, the place was freezing and loaded with kids toys! 2 bicycles, a scooter, and an electronic keyboard graced the dining room. A fish tank with filthy, slimy fingerprints smeared all over it is there for your enjoyment.\n\nOur food came... no water to drink, no tea, medium temperature food. Of course its cold, just like the room, I never took my jacket off! The plates are too small, you food spills over onto some semi-clean tables as you sit in your completely worn out booth seat. The fried noodles were out of a box and nasty, the shrimp was mushy, the fried rice was bright yellow.\n\nWe asked for water, they brought us 1 in a SOLO cup for 2 people. I asked for hot tea, they said 10 minutes. What Chinese restaurant does not have hot tea available upon request?\n\nOver all.... my first and last visit to this place. The only good point was that it was cheap, and deservingly so." -"1","I have been to this restaurant twice and was disappointed both times. I won't go back. The first time we were there almost 3 hours. It took forever to order and then forever for our food to come and the place was empty. When I complained the manager was very rude and tried to blame us for taking to long to order. It made no sense, how could we order when the waitress wasn't coming to the table? After arguing with me he ended up taking $6 off of our $200+ bill. Ridiculous. If it were up to me I would have never returned. Unfortunately my family decided to go here again tonight. Again it took a long time to get our food. My food was cold and bland, my kids food was cold. My husbands salmon was burnt to a crisp and my sister in law took one bite of her trout and refused to eat any more because she claims it was so disgusting. The wedding soup and bread were good, but that's it! My drink sat empty throughout my meal and never got refilled even when I asked. Bad food, slow service and rude managers. I'll pass on this place if my family decides to go again. Not worth it at all with all the other good Italian options around." -"1","Food was NOT GOOD at all! My husband & I ate here a couple weeks ago for the first time. I ordered a salad & basil pesto cream pasta & my husband ordered the spinach & feta pasta. The salad was just a huge plate of spring mix (nothing else in it) with WAY to much vinegar dressing. My lettuce was drowning in the vinegar. My pesto pasta had no flavor (did not taste like a cream sauce to me) & the pesto was so runny/watery & way too much sauce not enough noodles. My husband's pasta had even less flavor than mine. We ate about a quarter of the food & couldn't even finish it. We took it home & it was so bad I didn't even eat my leftovers. And I hate wasting food!! Plus the prices are expensive for the amount of food you get & of course the poor quality. Don't waste your time eating here. There are much better Italian restaurants in Pittsburgh." -"3","This is a tiny Starbucks and it locations like this (although cute) makes you wonder if your really meant to hang out or just grab your coffee and leave. Leaving is always a good idea at this location anyway since you have a nice fountain in the back with benches and it is a central part of the Waterfront Shopping. \n\nStarbuck isn't my favorite coffee chain by any means. Is it just me or do all Starbuck coffees taste a little burnt and bitter? No matter how trendy, cool and upscale their establishments are I can't get around the yicky tasting bitterness of Staryucks regular coffees. Talk about over roasting a bean...Maybe something has changed with their regular coffee but I have not drank it in about a year. I am not one for soy caramel latte foofy stuff. Still I'll give the establishment tres estrellas for the fact that their espresso is acceptable and doesn't taste half as bad as the regular coffee bean." diff --git a/tests/data_for_tests/io/yelp_review_full/train.csv b/tests/data_for_tests/io/yelp_review_full/train.csv deleted file mode 100755 index 032d423a..00000000 --- a/tests/data_for_tests/io/yelp_review_full/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -"5","dr. goldberg offers everything i look for in a general practitioner. he's nice and easy to talk to without being patronizing; he's always on time in seeing his patients; he's affiliated with a top-notch hospital (nyu) which my parents have explained to me is very important in case something happens and you need surgery; and you can get referrals to see specialists without having to see him first. really, what more do you need? i'm sitting here trying to think of any complaints i have about him, but i'm really drawing a blank." -"2","Unfortunately, the frustration of being Dr. Goldberg's patient is a repeat of the experience I've had with so many other doctors in NYC -- good doctor, terrible staff. It seems that his staff simply never answers the phone. It usually takes 2 hours of repeated calling to get an answer. Who has time for that or wants to deal with it? I have run into this problem with many other doctors and I just don't get it. You have office workers, you have patients with medical needs, why isn't anyone answering the phone? It's incomprehensible and not work the aggravation. It's with regret that I feel that I have to give Dr. Goldberg 2 stars." -"4","Been going to Dr. Goldberg for over 10 years. I think I was one of his 1st patients when he started at MHMG. He's been great over the years and is really all about the big picture. It is because of him, not my now former gyn Dr. Markoff, that I found out I have fibroids. He explores all options with you and is very patient and understanding. He doesn't judge and asks all the right questions. Very thorough and wants to be kept in the loop on every aspect of your medical health and your life." -"3","Got a letter in the mail last week that said Dr. Goldberg is moving to Arizona to take a new position there in June. He will be missed very much. \n\nI think finding a new doctor in NYC that you actually like might almost be as awful as trying to find a date!" -"1","I don't know what Dr. Goldberg was like before moving to Arizona, but let me tell you, STAY AWAY from this doctor and this office. I was going to Dr. Johnson before he left and Goldberg took over when Johnson left. He is not a caring doctor. He is only interested in the co-pay and having you come in for medication refills every month. He will not give refills and could less about patients's financial situations. Trying to get your 90 days mail away pharmacy prescriptions through this guy is a joke. And to make matters even worse, his office staff is incompetent. 90% of the time when you call the office, they'll put you through to a voice mail, that NO ONE ever answers or returns your call. Both my adult children and husband have decided to leave this practice after experiencing such frustration. The entire office has an attitude like they are doing you a favor. Give me a break! Stay away from this doc and the practice. You deserve better and they will not be there when you really need them. I have never felt compelled to write a bad review about anyone until I met this pathetic excuse for a doctor who is all about the money." -"5","Top notch doctor in a top notch practice. Can't say I am surprised when I was referred to him by another doctor who I think is wonderful and because he went to one of the best medical schools in the country. \nIt is really easy to get an appointment. There is minimal wait to be seen and his bedside manner is great." diff --git a/tests/data_for_tests/io/yelp_review_polarity/dev.csv b/tests/data_for_tests/io/yelp_review_polarity/dev.csv deleted file mode 100755 index 09228213..00000000 --- a/tests/data_for_tests/io/yelp_review_polarity/dev.csv +++ /dev/null @@ -1,6 +0,0 @@ -"1","Hoofah." -"1","Two meals, on the recommendation of a friend who lives near the place, and after the second trip, I was compelled to write. 'Rocky' would definitely describe the experiences.\n\nOn the first trip, I went to try their (at that time)raved about Reuben. And YET to find a true good Reuben in da burgh, I tried it.\n\nWell, they were out of the proper bread, and the guy had to run to the store to buy the closest thing he could find, which was not the proper bread, and instead of one of their 'raved about' Reubens, I received two mini-Reubens, which basically took the guts from one Reuben, and spread it out onto two sandwiches on regular sized bread. I ate it. It wasn't great, but they swore it was because they'd run out of the bread. Bread or not, it still wasn't great. The atmosphere was pleasant in that 'blue collar bar' kind of way, and the staff was very nice, but not a winning pitch on the Reuben.\n\nThe second trip was after a long day of moving furniture with the same friend. Sat in the back room, instead of the bar, which felt more like a restaurant, of course, with the big screen TV covering the sports of the moment.\n\nI was in the mood for dinner this time, and after a scan, decided on fried chicken and mashed potatoes with the salad bar. My friend ordered one of her faves, the breaded pork chops.\n\nWe hit the salad bar, which was uber-basic. Three soups (mostly vegetable loaded, which left me out), basic iceberg lettuce mix (very probably out of a bag), a few veggie toppings, and three or four dressings. It was a basic salad, no big deal. More or less an appetizer filler before the meal.\n\nThe mind-blower in this trip was the ordering of the fried chicken dinner. Our waiter looked like a 19 year old gas station attendant, skinny little blonde guy with a sweet but incredibly naive face, and an air of vapidity, which was confirmed when I placed my order. I asked what chicken pieces came in the dinner, and asked if it was possible to only get dark meat. I never imagined how confusing a question that could possibly be. It literally took him two trips back to the kitchen to 'ask', and the child honestly had no clue what 'white meat' and 'dark meat' meant. The first answer he came back with was that the chicken came in a pre-portioned prepared bag, kind of Kentucky Fried Chicken style...which didn't answer my question, thus prompting the second trip. \n\nAfter the second trip back I heard the cook holler 'Tell him I'll fix him up'. \n\nWell, the chicken was prepackaged dreck like you'd find in the freezer case of Walmart, tiny and not good, and the potatoes had that slight tinge of chem-spuds flavor, laden with some kind of chopped up green (parsley?), and a side of that basic brown gravy served up in 5 gallon buckets.\n\nThank goodness for the basic salad bar.\n\nEven my friend admitted that her pork chops were different and not what she'd expected. They also appeared to be from a freezer bag.\n\nThe irony was that the boy who didn't know white meat from dark meat, was chatting with some other customers...about baseball...and he was a genius about the mindless sport of baseball. Ahhhh da burgh.\n\nThird base? Nah...why bother when there are so many other options around. Go on in a grab a beer and chat black and gold if you happen to be in Carnegie...they can help you out all types of ways in that area. Just don't go hungry if you actually have tastebuds.\n\nFrom what I understand it 'used to be' really good homecooked food. But apparently, mama has left the kitchen." -"2","I've lived in Pittsburgh for 6 years, and in Carnegie for over 2 years, and by far, this is the best greasy spoon joint I've found. If you can stomach the wait (no reservations, naturally), you'll enjoy overflowing plates of goodness, thanks to the well-seasoned griddle where all of the food is made. \n\nHere are the highlights:\n\n-Cheap: Breakfast for two can be well under $10, with lunch around the same.\n-Crowded: Get there early and expect to wait. They close pretty early on the weekends too (oddly, at 12:45pm)\n-Cash only\n-Huge portions: When ordering fries or homefries, always get the half order, unless you're a lumberjack\n-About those homefries: They're often undercooked. I've had better, believe me. My favorite things to eat in life are potato products.\n-My favorite item: hot sausage sandwich on thick Italian toast, with cheese, lettuce, tomato and mayo" -"2","Classic breakfast joint. Grimy looking hole in the wall located on one end of a seedy looking strip mall. Window is opaque due to the grease so you can't hardly see inside. On the outside, there are about a dozen people waiting to get in. When you finally do get inside, you see that there are 15 tables and a counter, all occupied by people from all walks of life.\n\nWhat's the attraction behind this flea hole? The FOOD! Lots of it and dirt cheap. I sat at a vacant stool behind the formica counter and ordered the mixed grill. Potatoes, eggs, sausage, bacon and Italian toast. A giant mound of food guaranteed to sooth any hangover. I swear the full mixed grill had two pounds of food. Neat thing is that the grill is right in front of you so you can see your potatoes and eggs frying in a pool of fresh grease. All that food, plus coffee and tip for around ten bucks. Cash only, so put that plastic away.\n\nOnly bad thing that could happen is some douche bag from the Food Network or Travel Channel will make this place famous, and then I'll never be able to get in." -"1","Some of the worst pizza I've ever had. We used a coupon from the paper for a 2 topping 8 cut Sicilian. First of all the pizza wasn't even cut through, and the sad attempt at cutting was so uneven that 4 of the slices were about an inch wide, while the others were about 4\"" each. The toppings were scarce, they used mini pepperoni and put maybe 8 on the whole pizza. The onions were huge chunks and the mushrooms were straight from a can. The worst part though was the thick doughy crust that tasted more like a fishy sourdough roll. I'm serious... It was so noticeable that it made me wonder if the dough was bad or if they for some weird reason put fish sauce in it. It was gross. \n\nWe also ordered steak and Italian hoagies. The veggies were old and wilted, and there was no dressing on either. The Italian had deli meat that was clearly bottom of the line and not very generous. The \""steak\"" (if you an call it that) was greyish instead of brown and looked like it was a processed meat chopped into pieces. No flavor or seasoning and the texture was reminiscent of spam. It was so bad that I only ate 1/4 of it and tossed the rest. \n\nI have ordered from here in the past and always been disappointed. I thought I would give them another try since I'd never ordered a Sicilian pizza from there. What a mistake. I will never order from them again!" -"1","Terrible service. Food unremarkable. Waiter disappeared for 45 minutes to serve larger group due to staffing mismanagement. Saved his tip by discounting meal after I complained. All and all, a very crude and unpleasant dining experience for me and my guests. Not to be repeated, never again!" diff --git a/tests/data_for_tests/io/yelp_review_polarity/test.csv b/tests/data_for_tests/io/yelp_review_polarity/test.csv deleted file mode 100755 index 95ac34f3..00000000 --- a/tests/data_for_tests/io/yelp_review_polarity/test.csv +++ /dev/null @@ -1,6 +0,0 @@ -"2","Contrary to other reviews, I have zero complaints about the service or the prices. I have been getting tire service here for the past 5 years now, and compared to my experience with places like Pep Boys, these guys are experienced and know what they're doing. \nAlso, this is one place that I do not feel like I am being taken advantage of, just because of my gender. Other auto mechanics have been notorious for capitalizing on my ignorance of cars, and have sucked my bank account dry. But here, my service and road coverage has all been well explained - and let up to me to decide. \nAnd they just renovated the waiting room. It looks a lot better than it did in previous years." -"1","Last summer I had an appointment to get new tires and had to wait a super long time. I also went in this week for them to fix a minor problem with a tire they put on. They \""fixed\"" it for free, and the very next morning I had the same issue. I called to complain, and the \""manager\"" didn't even apologize!!! So frustrated. Never going back. They seem overpriced, too." -"2","Friendly staff, same starbucks fair you get anywhere else. Sometimes the lines can get long." -"1","The food is good. Unfortunately the service is very hit or miss. The main issue seems to be with the kitchen, the waiters and waitresses are often very apologetic for the long waits and it's pretty obvious that some of them avoid the tables after taking the initial order to avoid hearing complaints." -"2","Even when we didn't have a car Filene's Basement was worth the bus trip to the Waterfront. I always find something (usually I find 3-4 things and spend about $60) and better still, I am always still wearing the clothes and shoes 3 months later. \n\nI kind of suspect this is the best shopping in Pittsburgh; it's much better than the usual department stores, better than Marshall's and TJ Maxx and better than the Saks downtown, even when it has a sale. Selection, bargains AND quality.\n\nI like this Filene's better than Gabriel Brothers, which are harder to get to. Gabriel Brothers are a real discount shopper's challenge and I'm afraid I didn't live in Pittsburgh long enough to develop the necessary skills . . . Filene's was still up and running in June 2007 when I left town." -"2","Picture Billy Joel's \""Piano Man\"" DOUBLED mixed with beer, a rowdy crowd, and comedy - Welcome to Sing Sing! A unique musical experience found in Homestead.\n\nIf you're looking to grab a bite to eat or a beer, come on in! Serving food and brews from Rock Bottom Brewery, Sing Sing keeps your tummy full while you listen to two (or more) amazingly talented pianists take your musical requests. They'll play anything you'd like, for tips of course. Wanting to hear Britney Spears? Toto? Duran Duran? Yep, they play that... new or old.\n\nThe crowd makes the show, so make sure you come ready for a good time. If the crowd is dead, it's harder for the Guys to get a reaction. If you're wanting to have some fun, it can be a GREAT time! It's the perfect place for Birthday parties - especially if you want to embarrass a friend. The guys will bring them up to the pianos and perform a little ditty. For being a good sport, you get the coveted Sing Sing bumper sticker. Now who wouldn't want that?\n\nDueling Pianos and brews... time to Shut Up & Sing Sing!" diff --git a/tests/data_for_tests/io/yelp_review_polarity/train.csv b/tests/data_for_tests/io/yelp_review_polarity/train.csv deleted file mode 100755 index 6b72a7d6..00000000 --- a/tests/data_for_tests/io/yelp_review_polarity/train.csv +++ /dev/null @@ -1,6 +0,0 @@ -"1","Unfortunately, the frustration of being Dr. Goldberg's patient is a repeat of the experience I've had with so many other doctors in NYC -- good doctor, terrible staff. It seems that his staff simply never answers the phone. It usually takes 2 hours of repeated calling to get an answer. Who has time for that or wants to deal with it? I have run into this problem with many other doctors and I just don't get it. You have office workers, you have patients with medical needs, why isn't anyone answering the phone? It's incomprehensible and not work the aggravation. It's with regret that I feel that I have to give Dr. Goldberg 2 stars." -"2","Been going to Dr. Goldberg for over 10 years. I think I was one of his 1st patients when he started at MHMG. He's been great over the years and is really all about the big picture. It is because of him, not my now former gyn Dr. Markoff, that I found out I have fibroids. He explores all options with you and is very patient and understanding. He doesn't judge and asks all the right questions. Very thorough and wants to be kept in the loop on every aspect of your medical health and your life." -"1","I don't know what Dr. Goldberg was like before moving to Arizona, but let me tell you, STAY AWAY from this doctor and this office. I was going to Dr. Johnson before he left and Goldberg took over when Johnson left. He is not a caring doctor. He is only interested in the co-pay and having you come in for medication refills every month. He will not give refills and could less about patients's financial situations. Trying to get your 90 days mail away pharmacy prescriptions through this guy is a joke. And to make matters even worse, his office staff is incompetent. 90% of the time when you call the office, they'll put you through to a voice mail, that NO ONE ever answers or returns your call. Both my adult children and husband have decided to leave this practice after experiencing such frustration. The entire office has an attitude like they are doing you a favor. Give me a break! Stay away from this doc and the practice. You deserve better and they will not be there when you really need them. I have never felt compelled to write a bad review about anyone until I met this pathetic excuse for a doctor who is all about the money." -"1","I'm writing this review to give you a heads up before you see this Doctor. The office staff and administration are very unprofessional. I left a message with multiple people regarding my bill, and no one ever called me back. I had to hound them to get an answer about my bill. \n\nSecond, and most important, make sure your insurance is going to cover Dr. Goldberg's visits and blood work. He recommended to me that I get a physical, and he knew I was a student because I told him. I got the physical done. Later, I found out my health insurance doesn't pay for preventative visits. I received an $800.00 bill for the blood work. I can't pay for my bill because I'm a student and don't have any cash flow at this current time. I can't believe the Doctor wouldn't give me a heads up to make sure my insurance would cover work that wasn't necessary and was strictly preventative. The office can't do anything to help me cover the bill. In addition, the office staff said the onus is on me to make sure my insurance covers visits. Frustrating situation!" -"2","All the food is great here. But the best thing they have is their wings. Their wings are simply fantastic!! The \""Wet Cajun\"" are by the best & most popular. I also like the seasoned salt wings. Wing Night is Monday & Wednesday night, $0.75 whole wings!\n\nThe dining area is nice. Very family friendly! The bar is very nice is well. This place is truly a Yinzer's dream!! \""Pittsburgh Dad\"" would love this place n'at!!" -"1","Wing sauce is like water. Pretty much a lot of butter and some hot sauce (franks red hot maybe). The whole wings are good size and crispy, but for $1 a wing the sauce could be better. The hot and extra hot are about the same flavor/heat. The fish sandwich is good and is a large portion, sides are decent." diff --git a/tests/data_for_tests/modules/decoder/crf.json b/tests/data_for_tests/modules/decoder/crf.json deleted file mode 100644 index ff2d6689..00000000 --- a/tests/data_for_tests/modules/decoder/crf.json +++ /dev/null @@ -1 +0,0 @@ -{"bio_logits": [[[-1.8154915571212769, -1.3753865957260132, -10001.513671875, -1.619813084602356, -10001.79296875], [-1.742034673690796, -1.5048011541366577, -2.042131185531616, -1.2594754695892334, -1.6648437976837158], [-1.5522804260253906, -1.2926381826400757, -1.8607124090194702, -1.6692707538604736, -1.7734650373458862], [-1.6101375818252563, -1.3285458087921143, -1.7735439538955688, -1.5734118223190308, -1.8438279628753662], [-1.6522153615951538, -1.2640260457992554, -1.9092718362808228, -1.6192445755004883, -1.7168875932693481], [-1.4932769536972046, -1.4628725051879883, -1.9623159170150757, -1.497014045715332, -1.7177777290344238], [-1.8419824838638306, -2.1428799629211426, -1.4285861253738403, -1.2972710132598877, -1.5546820163726807], [-1.671349048614502, -1.4115079641342163, -1.624293565750122, -1.537371277809143, -1.8563929796218872], [-1.5080815553665161, -1.3281997442245483, -1.7912147045135498, -1.5656323432922363, -1.980512022972107], [-2.0562098026275635, -1.4711416959762573, -1.5297126770019531, -1.7554184198379517, -1.3744999170303345]], [[-1.3193378448486328, -1.997290849685669, -10002.0751953125, -1.3334847688674927, -10001.5712890625], [-1.229069471359253, -1.2702847719192505, -2.0717740058898926, -1.9828989505767822, -1.8136863708496094], [-1.8161871433258057, -1.4339262247085571, -1.4476666450500488, -1.8693819046020508, -1.562330722808838], [-1.897119402885437, -1.5767627954483032, -1.54145348072052, -1.6185026168823242, -1.4649395942687988], [-1.8498220443725586, -1.264282464981079, -1.7192784547805786, -1.8041315078735352, -1.530255913734436], [-1.1517643928527832, -1.6473538875579834, -1.5833101272583008, -1.9973593950271606, -1.894622802734375], [-1.7796387672424316, -1.8036197423934937, -1.2666513919830322, -1.4641741514205933, -1.8736846446990967], [-1.555580496788025, -1.5448863506317139, -1.609066128730774, -1.5487936735153198, -1.8138916492462158], [-1.8701002597808838, -2.0567376613616943, -1.6318782567977905, -1.2336504459381104, -1.4643338918685913], [-1.6615228652954102, -1.9764257669448853, -1.277781367301941, -1.3614437580108643, -1.990394949913025]], [[-1.74202299118042, -1.659791111946106, -10001.9951171875, -1.0417697429656982, -10001.9248046875], [-1.2423228025436401, -1.7404581308364868, -1.7569608688354492, -1.5077661275863647, -1.9528108835220337], [-1.7840592861175537, -1.50230872631073, -1.4460601806640625, -1.9473626613616943, -1.4641118049621582], [-1.6109998226165771, -2.0336639881134033, -1.3807575702667236, -1.221280574798584, -2.0938124656677246], [-1.8956525325775146, -1.6966334581375122, -1.8089725971221924, -1.9510140419006348, -1.020185947418213], [-1.7131900787353516, -1.7260419130325317, -2.161870241165161, -1.2767468690872192, -1.3956587314605713], [-1.7567639350891113, -1.1352611780166626, -1.7109652757644653, -1.8825695514678955, -1.7534843683242798], [-1.826012372970581, -1.9964908361434937, -1.7898284196853638, -1.2279980182647705, -1.413594365119934], [-1.522060513496399, -1.56121826171875, -1.5711766481399536, -1.4620665311813354, -2.0226776599884033], [-1.3122025728225708, -2.0931777954101562, -1.8858696222305298, -1.831908106803894, -1.2184979915618896]], [[-1.3956559896469116, -1.8315693140029907, -10001.48046875, -1.844576358795166, -10001.5771484375], [-1.562046766281128, -1.7216087579727173, -1.5044764280319214, -1.4362742900848389, -1.8867106437683105], [-1.5304349660873413, -1.5527287721633911, -1.5590341091156006, -1.6369349956512451, -1.7899152040481567], [-1.6007282733917236, -2.054649829864502, -1.9757367372512817, -1.4219664335250854, -1.2371348142623901], [-1.841418981552124, -1.8178046941757202, -1.5939710140228271, -1.2179311513900757, -1.7144266366958618], [-1.6715152263641357, -1.5060933828353882, -1.6629694700241089, -1.633326530456543, -1.5827515125274658], [-1.9413940906524658, -1.853175163269043, -1.6390701532363892, -1.2217824459075928, -1.5564061403274536], [-1.746218204498291, -1.7089520692825317, -1.6738371849060059, -1.627657175064087, -1.344780445098877], [-1.1776174306869507, -1.629957675933838, -1.79096519947052, -1.7566864490509033, -1.853833556175232], [-1.4880272150039673, -1.4722591638565063, -1.631064534187317, -1.9562634229660034, -1.5718109607696533]]], "bio_scores": [-1.3754, -4.5403, -8.7047, -12.8693], "bio_path": [[1], [3, 0, 1, 1], [3, 0, 1, 3, 4, 3, 1, 3], [0, 1, 1, 0, 3, 0, 3, 0, 1, 0]], "bio_trans_m": [[-0.095858134329319, 0.01011368352919817, -0.33539193868637085, -0.20200660824775696, 0.136741504073143], [0.5436117649078369, 0.37222158908843994, -0.15174923837184906, 0.10455792397260666, -0.35702475905418396], [0.3681447505950928, -0.6996435523033142, -0.002348324516788125, 0.5087339282035828, -0.08750446885824203], [0.6505969762802124, 0.0064192176796495914, -0.10901711881160736, -0.24849674105644226, -0.1375938355922699], [-0.019853945821523666, -0.9098508954048157, 0.06740495562553406, 0.2244909256696701, -0.29204151034355164]], "bio_seq_lens": [1, 4, 8, 10], "bmes_logits": [[[-10002.5830078125, -20002.54296875, -10001.9765625, -2.033155679702759, -10001.712890625, -20001.68359375, -10002.4130859375, -2.1159744262695312], [-1.870416283607483, -2.2075278759002686, -1.9922529458999634, -2.1696650981903076, -2.4956214427948, -2.1040704250335693, -2.065218925476074, -1.869700312614441], [-1.8947919607162476, -2.398089647293091, -2.1316606998443604, -1.6458176374435425, -2.001098871231079, -2.362668514251709, -2.513232707977295, -1.9884836673736572], [-1.5058399438858032, -2.3359181880950928, -2.382275342941284, -2.4573683738708496, -1.7870502471923828, -2.342841148376465, -2.1982951164245605, -2.0483522415161133], [-2.0845396518707275, -2.0447516441345215, -1.7635326385498047, -1.9375617504119873, -2.530120611190796, -1.8380637168884277, -2.099860906600952, -2.666682481765747], [-2.299673557281494, -2.3165550231933594, -1.9403637647628784, -1.8729832172393799, -1.8798956871032715, -1.8799573183059692, -2.2314014434814453, -2.39471173286438], [-1.9613308906555176, -2.136000633239746, -2.1178860664367676, -2.1553683280944824, -1.7840471267700195, -2.4148807525634766, -2.4621479511260986, -1.817263126373291], [-2.056917428970337, -2.5026133060455322, -1.9233015775680542, -2.0078444480895996, -2.064028024673462, -1.776533842086792, -2.3748488426208496, -2.114560127258301], [-2.3671767711639404, -1.7896978855133057, -2.416537284851074, -2.26574444770813, -2.2460145950317383, -1.7739624977111816, -1.9555294513702393, -2.045677661895752], [-2.3571174144744873, -1.820650577545166, -2.2781612873077393, -1.9325084686279297, -1.863953948020935, -2.2260994911193848, -2.5020244121551514, -1.8891260623931885]], [[-2.0461926460266113, -10002.0625, -10001.712890625, -2.251368761062622, -2.2985825538635254, -10002.146484375, -10002.0185546875, -2.225799560546875], [-1.9879356622695923, -2.4706358909606934, -2.3151662349700928, -1.5818747282028198, -2.329188346862793, -2.1170380115509033, -2.159011125564575, -1.9593485593795776], [-2.2397706508636475, -2.2388737201690674, -1.826286792755127, -2.444268226623535, -1.7793290615081787, -2.402519941329956, -1.8540253639221191, -2.09319806098938], [-1.7938345670700073, -2.525993585586548, -1.9962739944458008, -1.9414381980895996, -2.5183513164520264, -2.5057737827301025, -1.7933388948440552, -1.925837755203247], [-2.2330663204193115, -2.098536491394043, -1.9872602224349976, -1.7660422325134277, -2.5269722938537598, -1.9648237228393555, -1.80750572681427, -2.551790475845337], [-1.802718162536621, -2.4936702251434326, -1.846991777420044, -2.6299049854278564, -1.8180453777313232, -2.010246992111206, -1.9285591840744019, -2.5121750831604004], [-1.7665618658065796, -2.2445054054260254, -1.822519063949585, -2.5471863746643066, -2.719733715057373, -1.9708809852600098, -1.7871110439300537, -2.2026400566101074], [-2.2046854496002197, -2.375577926635742, -1.9162014722824097, -2.397550344467163, -1.9547137022018433, -1.759222149848938, -1.818831443786621, -2.4931435585021973], [-1.9187703132629395, -2.5046753883361816, -1.871201515197754, -2.3421711921691895, -2.372368335723877, -1.883248209953308, -1.8868682384490967, -2.0830271244049072], [-2.406679630279541, -1.7564219236373901, -2.340674877166748, -1.8392919301986694, -2.3711328506469727, -1.913435935974121, -2.221808433532715, -2.019878625869751]], [[-10001.7607421875, -20002.30078125, -10001.9677734375, -1.7931804656982422, -10002.2451171875, -20002.15234375, -10002.208984375, -2.4127495288848877], [-2.162931442260742, -2.121459484100342, -2.4020097255706787, -2.5620131492614746, -1.7713403701782227, -2.1945695877075195, -1.8392865657806396, -1.8513271808624268], [-2.2151875495910645, -1.9279260635375977, -2.24403977394104, -2.1955597400665283, -2.2283377647399902, -1.7366830110549927, -2.634793519973755, -1.757084608078003], [-1.813708782196045, -1.93169105052948, -2.2419192790985107, -2.307635545730591, -2.19914174079895, -2.070988178253174, -2.0030927658081055, -2.1678688526153564], [-2.118651866912842, -1.867727518081665, -2.312565326690674, -2.274792194366455, -1.9973562955856323, -2.000102996826172, -1.8425841331481934, -2.3635623455047607], [-2.435579538345337, -1.7167878150939941, -2.3040761947631836, -1.657408595085144, -2.462364912033081, -2.2767324447631836, -1.7957141399383545, -2.425132989883423], [-1.806656837463379, -1.7759110927581787, -2.5295629501342773, -1.9216285943984985, -2.2615668773651123, -1.8556532859802246, -2.4842538833618164, -2.3384106159210205], [-1.9859262704849243, -1.6575560569763184, -2.2854154109954834, -1.9267034530639648, -2.5214226245880127, -2.0166244506835938, -2.479127883911133, -2.0595011711120605], [-2.0371243953704834, -2.2420313358306885, -2.0946967601776123, -2.2463889122009277, -1.8954271078109741, -1.942257285118103, -2.0445871353149414, -2.1946396827697754], [-2.0210611820220947, -2.362877130508423, -1.9862446784973145, -1.8275481462478638, -2.140009880065918, -1.869648814201355, -2.6818318367004395, -2.0021097660064697]], [[-1.986312985420227, -10002.50390625, -10002.0361328125, -1.908732295036316, -2.21740984916687, -10002.1318359375, -10002.1044921875, -1.87873113155365], [-1.9292036294937134, -2.163956880569458, -2.3703503608703613, -1.939669132232666, -1.8776776790618896, -2.4469380378723145, -2.423905611038208, -1.7453217506408691], [-2.0289347171783447, -2.520860195159912, -2.5013701915740967, -2.078547477722168, -1.9699862003326416, -1.8206181526184082, -1.7796630859375, -2.1984922885894775], [-1.8523262739181519, -1.978093147277832, -2.558772087097168, -2.498471260070801, -1.9756053686141968, -1.8080697059631348, -1.9115748405456543, -2.357147216796875], [-2.314960479736328, -2.2433876991271973, -1.6113512516021729, -2.19716477394104, -1.78402578830719, -2.343987226486206, -2.3425848484039307, -2.084155797958374], [-2.002289056777954, -2.2630276679992676, -1.887984275817871, -2.044983386993408, -2.217646360397339, -1.9103771448135376, -2.154231548309326, -2.2321436405181885], [-2.199540853500366, -2.063075065612793, -1.813851237297058, -2.3199379444122314, -1.7984188795089722, -2.4952447414398193, -2.4516515731811523, -1.7922154664993286], [-2.509786367416382, -1.79443359375, -1.8561275005340576, -2.2977330684661865, -2.2080044746398926, -1.7294546365737915, -2.4617154598236084, -2.0944302082061768], [-2.491340160369873, -2.403804063796997, -1.8452543020248413, -1.6882175207138062, -2.5513625144958496, -2.294516086578369, -1.9522627592086792, -1.8124374151229858], [-2.1524035930633545, -2.2049806118011475, -2.3353655338287354, -2.317572832107544, -2.2914233207702637, -1.8211665153503418, -1.69517982006073, -2.0270023345947266]]], "bmes_scores": [-2.0332, -6.1623, -1.7932, -16.7561], "bmes_path": [[3], [7, 3, 4, 6], [3], [3, 4, 5, 6, 7, 3, 4, 5, 6, 7]], "bmes_trans_m": [[0.47934335470199585, -0.2151593416929245, -0.12467780709266663, -0.44244644045829773, 0.16480575501918793, -0.006573359947651625, -1.187401294708252, -0.17424514889717102], [-0.03494556248188019, -0.8173441290855408, -0.2682552933692932, 0.18933893740177155, 0.2203899323940277, 0.3905894160270691, -0.007638207171112299, 0.19527725875377655], [-0.2779119908809662, -0.37053248286247253, 0.34394705295562744, -0.26433902978897095, -0.0001995275670196861, -0.39156094193458557, -0.035449881106615067, 0.02454843744635582], [-0.01391045656055212, 0.3419516384601593, -0.48559853434562683, -0.5893992781639099, 0.9119477272033691, 0.1731061041355133, -0.15039317309856415, 0.1523006409406662], [0.4866299033164978, 0.28264448046684265, -0.25895795226097107, 0.0404033362865448, -0.060920555144548416, 0.12364576756954193, 0.1294233351945877, 0.2434755265712738], [-0.04159824922680855, 0.25353407859802246, 0.12913571298122406, -0.036356933414936066, -0.18522876501083374, -0.5329958200454712, 0.2505933344364166, 0.26512718200683594], [-0.2509276270866394, 0.3572998046875, 0.01873799040913582, -0.30620086193084717, -0.09893298894166946, -0.37399813532829285, -0.6530448198318481, -0.17514197528362274], [-0.29702028632164, 0.680363118648529, -0.6010262370109558, 0.17669369280338287, 0.45010149478912354, -0.1026386097073555, 0.34120017290115356, -0.04910941794514656]], "bmes_seq_lens": [1, 4, 1, 10]} \ No newline at end of file diff --git a/tests/data_for_tests/people.txt b/tests/data_for_tests/people.txt deleted file mode 100644 index 9ef0de6d..00000000 --- a/tests/data_for_tests/people.txt +++ /dev/null @@ -1,307 +0,0 @@ -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -中 B-nt -共 M-nt -中 M-nt -央 E-nt -总 B-n -书 M-n -记 E-n -、 S-w -国 B-n -家 E-n -主 B-n -席 E-n -江 B-nr -泽 M-nr -民 E-nr - -( S-w -一 B-t -九 M-t -九 M-t -七 M-t -年 E-t -十 B-t -二 M-t -月 E-t -三 B-t -十 M-t -一 M-t -日 E-t -) S-w - -1 B-t -2 M-t -月 E-t -3 B-t -1 M-t -日 E-t -, S-w -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -中 B-nt -共 M-nt -中 M-nt -央 E-nt -总 B-n -书 M-n -记 E-n -、 S-w -国 B-n -家 E-n -主 B-n -席 E-n -江 B-nr -泽 M-nr -民 E-nr - -( S-w -一 B-t -九 M-t -九 M-t -七 M-t -年 E-t -十 B-t -二 M-t -月 E-t -三 B-t -十 M-t -一 M-t -日 E-t -) S-w - -1 B-t -2 M-t -月 E-t -3 B-t -1 M-t -日 E-t -, S-w -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w - -迈 B-v -向 E-v -充 B-v -满 E-v -希 B-n -望 E-n -的 S-u -新 S-a -世 B-n -纪 E-n -— B-w -— E-w -一 B-t -九 M-t -九 M-t -八 M-t -年 E-t -新 B-t -年 E-t -讲 B-n -话 E-n -( S-w -附 S-v -图 B-n -片 E-n -1 S-m -张 S-q -) S-w \ No newline at end of file diff --git a/tests/data_for_tests/people_daily_raw.txt b/tests/data_for_tests/people_daily_raw.txt deleted file mode 100644 index 8255edb6..00000000 --- a/tests/data_for_tests/people_daily_raw.txt +++ /dev/null @@ -1,27 +0,0 @@ -19980101-01-001-001/m 迈向/v 充满/v 希望/n 的/u 新/a 世纪/n ——/w 一九九八年/t 新年/t 讲话/n (/w 附/v 图片/n 1/m 张/q )/w -19980101-01-001-002/m 中共中央/nt 总书记/n 、/w 国家/n 主席/n 江/nr 泽民/nr -19980101-01-001-003/m (/w 一九九七年/t 十二月/t 三十一日/t )/w -19980101-01-001-004/m 12月/t 31日/t ,/w 中共中央/nt 总书记/n 、/w 国家/n 主席/n 江/nr 泽民/nr 发表/v 1998年/t 新年/t 讲话/n 《/w 迈向/v 充满/v 希望/n 的/u 新/a 世纪/n 》/w 。/w (/w 新华社/nt 记者/n 兰/nr 红光/nr 摄/Vg )/w -19980101-01-001-005/m 同胞/n 们/k 、/w 朋友/n 们/k 、/w 女士/n 们/k 、/w 先生/n 们/k :/w -19980101-01-001-006/m 在/p 1998年/t 来临/v 之际/f ,/w 我/r 十分/m 高兴/a 地/u 通过/p [中央/n 人民/n 广播/vn 电台/n]nt 、/w [中国/ns 国际/n 广播/vn 电台/n]nt 和/c [中央/n 电视台/n]nt ,/w 向/p 全国/n 各族/r 人民/n ,/w 向/p [香港/ns 特别/a 行政区/n]ns 同胞/n 、/w 澳门/ns 和/c 台湾/ns 同胞/n 、/w 海外/s 侨胞/n ,/w 向/p 世界/n 各国/r 的/u 朋友/n 们/k ,/w 致以/v 诚挚/a 的/u 问候/vn 和/c 良好/a 的/u 祝愿/vn !/w -19980101-01-001-007/m 1997年/t ,/w 是/v 中国/ns 发展/vn 历史/n 上/f 非常/d 重要/a 的/u 很/d 不/d 平凡/a 的/u 一/m 年/q 。/w 中国/ns 人民/n 决心/d 继承/v 邓/nr 小平/nr 同志/n 的/u 遗志/n ,/w 继续/v 把/p 建设/v 有/v 中国/ns 特色/n 社会主义/n 事业/n 推向/v 前进/v 。/w [中国/ns 政府/n]nt 顺利/ad 恢复/v 对/p 香港/ns 行使/v 主权/n ,/w 并/c 按照/p “/w 一国两制/j ”/w 、/w “/w 港人治港/l ”/w 、/w 高度/d 自治/v 的/u 方针/n 保持/v 香港/ns 的/u 繁荣/an 稳定/an 。/w [中国/ns 共产党/n]nt 成功/a 地/u 召开/v 了/u 第十五/m 次/q 全国/n 代表大会/n ,/w 高举/v 邓小平理论/n 伟大/a 旗帜/n ,/w 总结/v 百年/m 历史/n ,/w 展望/v 新/a 的/u 世纪/n ,/w 制定/v 了/u 中国/ns 跨/v 世纪/n 发展/v 的/u 行动/vn 纲领/n 。/w -19980101-01-001-008/m 在/p 这/r 一/m 年/q 中/f ,/w 中国/ns 的/u 改革/vn 开放/vn 和/c 现代化/vn 建设/vn 继续/v 向前/v 迈进/v 。/w 国民经济/n 保持/v 了/u “/w 高/a 增长/vn 、/w 低/a 通胀/j ”/w 的/u 良好/a 发展/vn 态势/n 。/w 农业/n 生产/vn 再次/d 获得/v 好/a 的/u 收成/n ,/w 企业/n 改革/vn 继续/v 深化/v ,/w 人民/n 生活/vn 进一步/d 改善/v 。/w 对外/vn 经济/n 技术/n 合作/vn 与/c 交流/vn 不断/d 扩大/v 。/w 民主/a 法制/n 建设/vn 、/w 精神文明/n 建设/vn 和/c 其他/r 各项/r 事业/n 都/d 有/v 新/a 的/u 进展/vn 。/w 我们/r 十分/m 关注/v 最近/t 一个/m 时期/n 一些/m 国家/n 和/c 地区/n 发生/v 的/u 金融/n 风波/n ,/w 我们/r 相信/v 通过/p 这些/r 国家/n 和/c 地区/n 的/u 努力/an 以及/c 有关/v 的/u 国际/n 合作/vn ,/w 情况/n 会/v 逐步/d 得到/v 缓解/vn 。/w 总的来说/c ,/w 中国/ns 改革/v 和/c 发展/v 的/u 全局/n 继续/v 保持/v 了/u 稳定/an 。/w -19980101-01-001-009/m 在/p 这/r 一/m 年/q 中/f ,/w 中国/ns 的/u 外交/n 工作/vn 取得/v 了/u 重要/a 成果/n 。/w 通过/p 高层/n 互访/v ,/w 中国/ns 与/p 美国/ns 、/w 俄罗斯/ns 、/w 法国/ns 、/w 日本/ns 等/u 大国/n 确定/v 了/u 双方/n 关系/n 未来/t 发展/v 的/u 目标/n 和/c 指导/vn 方针/n 。/w 中国/ns 与/p 周边/n 国家/n 和/c 广大/b 发展中国家/l 的/u 友好/a 合作/vn 进一步/d 加强/v 。/w 中国/ns 积极/ad 参与/v [亚/j 太/j 经合/j 组织/n]nt 的/u 活动/vn ,/w 参加/v 了/u 东盟/ns —/w 中/j 日/j 韩/j 和/c 中国/ns —/w 东盟/ns 首脑/n 非正式/b 会晤/vn 。/w 这些/r 外交/n 活动/vn ,/w 符合/v 和平/n 与/c 发展/v 的/u 时代/n 主题/n ,/w 顺应/v 世界/n 走向/v 多极化/v 的/u 趋势/n ,/w 对于/p 促进/v 国际/n 社会/n 的/u 友好/a 合作/vn 和/c 共同/b 发展/vn 作出/v 了/u 积极/a 的/u 贡献/n 。/w -19980101-01-001-010/m 1998年/t ,/w 中国/ns 人民/n 将/d 满怀信心/l 地/u 开创/v 新/a 的/u 业绩/n 。/w 尽管/c 我们/r 在/p 经济/n 社会/n 发展/v 中/f 还/d 面临/v 不少/m 困难/an ,/w 但/c 我们/r 有/v 邓小平理论/n 的/u 指引/vn ,/w 有/v 改革/v 开放/v 近/a 20/m 年/q 来/f 取得/v 的/u 伟大/a 成就/n 和/c 积累/v 的/u 丰富/a 经验/n ,/w 还/d 有/v 其他/r 的/u 各种/r 有利/a 条件/n ,/w 我们/r 一定/d 能够/v 克服/v 这些/r 困难/an ,/w 继续/v 稳步前进/l 。/w 只要/c 我们/r 进一步/d 解放思想/i ,/w 实事求是/i ,/w 抓住/v 机遇/n ,/w 开拓进取/l ,/w 建设/v 有/v 中国/ns 特色/n 社会主义/n 的/u 道路/n 就/c 会/v 越/d 走/v 越/d 宽广/a 。/w -19980101-01-001-011/m 实现/v 祖国/n 的/u 完全/a 统一/vn ,/w 是/v 海内外/s 全体/n 中国/ns 人/n 的/u 共同/b 心愿/n 。/w 通过/p 中/j 葡/j 双方/n 的/u 合作/vn 和/c 努力/an ,/w 按照/p “/w 一国两制/j ”/w 方针/n 和/c 澳门/ns 《/w 基本法/n 》/w ,/w 1999年/t 12月/t 澳门/ns 的/u 回归/vn 一定/d 能够/v 顺利/ad 实现/v 。/w -19980101-01-001-012/m 台湾/ns 是/v 中国/ns 领土/n 不可分割/l 的/u 一/m 部分/n 。/w 完成/v 祖国/n 统一/vn ,/w 是/v 大势所趋/i ,/w 民心所向/l 。/w 任何/r 企图/v 制造/v “/w 两/m 个/q 中国/ns ”/w 、/w “/w 一中一台/j ”/w 、/w “/w 台湾/ns 独立/v ”/w 的/u 图谋/n ,/w 都/d 注定/v 要/v 失败/v 。/w 希望/v 台湾/ns 当局/n 以/p 民族/n 大义/n 为重/v ,/w 拿/v 出/v 诚意/n ,/w 采取/v 实际/a 的/u 行动/vn ,/w 推动/v 两岸/n 经济/n 文化/n 交流/vn 和/c 人员/n 往来/vn ,/w 促进/v 两岸/n 直接/ad 通邮/v 、/w 通航/v 、/w 通商/v 的/u 早日/d 实现/v ,/w 并/c 尽早/d 回应/v 我们/r 发出/v 的/u 在/p 一个/m 中国/ns 的/u 原则/n 下/f 两岸/n 进行/v 谈判/vn 的/u 郑重/a 呼吁/vn 。/w -19980101-01-001-013/m 环顾/v 全球/n ,/w 日益/d 密切/a 的/u 世界/n 经济/n 联系/vn ,/w 日新月异/i 的/u 科技/n 进步/vn ,/w 正在/d 为/p 各国/r 经济/n 的/u 发展/vn 提供/v 历史/n 机遇/n 。/w 但是/c ,/w 世界/n 还/d 不/d 安宁/a 。/w 南北/f 之间/f 的/u 贫富/n 差距/n 继续/v 扩大/v ;/w 局部/n 冲突/vn 时有发生/l ;/w 不/d 公正/a 不/d 合理/a 的/u 旧/a 的/u 国际/n 政治/n 经济/n 秩序/n 还/d 没有/v 根本/a 改变/vn ;/w 发展中国家/l 在/p 激烈/a 的/u 国际/n 经济/n 竞争/vn 中/f 仍/d 处于/v 弱势/n 地位/n ;/w 人类/n 的/u 生存/vn 与/c 发展/vn 还/d 面临/v 种种/q 威胁/vn 和/c 挑战/vn 。/w 和平/n 与/c 发展/vn 的/u 前景/n 是/v 光明/a 的/u ,/w 21/m 世纪/n 将/d 是/v 充满/v 希望/n 的/u 世纪/n 。/w 但/c 前进/v 的/u 道路/n 不/d 会/v 也/d 不/d 可能/v 一帆风顺/i ,/w 关键/n 是/v 世界/n 各国/r 人民/n 要/v 进一步/d 团结/a 起来/v ,/w 共同/d 推动/v 早日/d 建立/v 公正/a 合理/a 的/u 国际/n 政治/n 经济/n 新/a 秩序/n 。/w -19980101-01-001-014/m [中国/ns 政府/n]nt 将/d 继续/v 坚持/v 奉行/v 独立自主/i 的/u 和平/n 外交/n 政策/n ,/w 在/p 和平共处/l 五/m 项/q 原则/n 的/u 基础/n 上/f 努力/ad 发展/v 同/p 世界/n 各国/r 的/u 友好/a 关系/n 。/w 中国/ns 愿意/v 加强/v 同/p 联合国/nt 和/c 其他/r 国际/n 组织/n 的/u 协调/vn ,/w 促进/v 在/p 扩大/v 经贸/j 科技/n 交流/vn 、/w 保护/v 环境/n 、/w 消除/v 贫困/an 、/w 打击/v 国际/n 犯罪/vn 等/u 方面/n 的/u 国际/n 合作/vn 。/w 中国/ns 永远/d 是/v 维护/v 世界/n 和平/n 与/c 稳定/an 的/u 重要/a 力量/n 。/w 中国/ns 人民/n 愿/v 与/p 世界/n 各国/r 人民/n 一道/d ,/w 为/p 开创/v 持久/a 和平/n 、/w 共同/d 发展/v 的/u 新/a 世纪/n 而/c 不懈努力/l !/w -19980101-01-001-015/m 在/p 这/r 辞旧迎新/l 的/u 美好/a 时刻/n ,/w 我/r 祝/v 大家/r 新年/t 快乐/a ,/w 家庭/n 幸福/a !/w -19980101-01-001-016/m 谢谢/v !/w (/w 新华社/nt 北京/ns 12月/t 31日/t 电/n )/w - -19980101-01-002-001/m 在/p 十五大/j 精神/n 指引/vn 下/f 胜利/vd 前进/v ——/w 元旦/t 献辞/n -19980101-01-002-002/m 我们/r 即将/d 以/p 丰收/vn 的/u 喜悦/an 送/v 走/v 牛年/t ,/w 以/p 昂扬/a 的/u 斗志/n 迎来/v 虎年/t 。/w 我们/r 伟大/a 祖国/n 在/p 新/a 的/u 一/m 年/q ,/w 将/d 是/v 充满/v 生机/n 、/w 充满/v 希望/n 的/u 一/m 年/q 。/w -19980101-01-002-003/m 刚刚/d 过去/v 的/u 一/m 年/q ,/w 大气磅礴/i ,/w 波澜壮阔/i 。/w 在/p 这/r 一/m 年/q ,/w 以/p 江/nr 泽民/nr 同志/n 为/v 核心/n 的/u 党中央/nt ,/w 继承/v 邓/nr 小平/nr 同志/n 的/u 遗志/n ,/w 高举/v 邓小平理论/n 的/u 伟大/a 旗帜/n ,/w 领导/v 全党/n 和/c 全国/n 各族/r 人民/n 坚定不移/i 地/u 沿着/p 建设/v 有/v 中国/ns 特色/n 社会主义/n 道路/n 阔步/d 前进/v ,/w 写/v 下/v 了/u 改革/v 开放/v 和/c 社会主义/n 现代化/vn 建设/vn 的/u 辉煌/a 篇章/n 。/w 顺利/a 地/u 恢复/v 对/p 香港/ns 行使/v 主权/n ,/w 胜利/v 地/u 召开/v 党/n 的/u 第十五/m 次/q 全国/n 代表大会/n ———/w 两/m 件/q 大事/n 办/v 得/u 圆满/a 成功/a 。/w 国民经济/n 稳中求进/l ,/w 国家/n 经济/n 实力/n 进一步/d 增强/v ,/w 人民/n 生活/vn 继续/v 改善/v ,/w 对外/vn 经济/n 技术/n 交流/vn 日益/d 扩大/v 。/w 在/p 国际/n 金融/n 危机/n 的/u 风浪/n 波及/v 许多/m 国家/n 的/u 情况/n 下/f ,/w 我国/n 保持/v 了/u 金融/n 形势/n 和/c 整个/b 经济/n 形势/n 的/u 稳定/a 发展/vn 。/w 社会主义/n 精神文明/n 建设/vn 和/c 民主/a 法制/n 建设/vn 取得/v 新/a 的/u 成绩/n ,/w 各项/r 社会/n 事业/n 全面/ad 进步/v 。/w 外交/n 工作/vn 取得/v 可喜/a 的/u 突破/vn ,/w 我国/n 的/u 国际/n 地位/n 和/c 国际/n 威望/n 进一步/d 提高/v 。/w 实践/v 使/v 亿万/m 人民/n 对/p 邓小平理论/n 更加/d 信仰/v ,/w 对/p 以/p 江/nr 泽民/nr 同志/n 为/v 核心/n 的/u 党中央/nt 更加/d 信赖/v ,/w 对/p 伟大/a 祖国/n 的/u 光辉/n 前景/n 更加/d 充满/v 信心/n 。/w -19980101-01-002-004/m 1998年/t ,/w 是/v 全面/ad 贯彻/v 落实/v 党/n 的/u 十五大/j 提出/v 的/u 任务/n 的/u 第一/m 年/q ,/w 各/r 条/q 战线/n 改革/v 和/c 发展/v 的/u 任务/n 都/d 十分/m 繁重/a ,/w 有/v 许多/m 深/a 层次/n 的/u 矛盾/an 和/c 问题/n 有待/v 克服/v 和/c 解决/v ,/w 特别/d 是/v 国有/vn 企业/n 改革/vn 已经/d 进入/v 攻坚/vn 阶段/n 。/w 我们/r 必须/d 进一步/d 深入/ad 学习/v 和/c 掌握/v 党/n 的/u 十五大/j 精神/n ,/w 统揽全局/l ,/w 精心/ad 部署/v ,/w 狠抓/v 落实/v ,/w 团结/a 一致/a ,/w 艰苦奋斗/i ,/w 开拓/v 前进/v ,/w 为/p 夺取/v 今年/t 改革/v 开放/v 和/c 社会主义/n 现代化/vn 建设/vn 的/u 新/a 胜利/vn 而/c 奋斗/v 。/w -19980101-01-002-005/m 今年/t 是/v 党/n 的/u 十一/m 届/q 三中全会/j 召开/v 20/m 周年/q ,/w 是/v 我们/r 党/n 和/c 国家/n 实现/v 伟大/a 的/u 历史/n 转折/vn 、/w 进入/v 改革/vn 开放/vn 历史/n 新/a 时期/n 的/u 20/m 周年/q 。/w 在/p 新/a 的/u 一/m 年/q 里/f ,/w 大力/d 发扬/v 十一/m 届/q 三中全会/j 以来/f 我们/r 党/n 所/u 恢复/v 的/u 优良/z 传统/n 和/c 在/p 新/a 的/u 历史/n 条件/n 下/f 形成/v 的/u 优良/z 作风/n ,/w 对于/p 完成/v 好/a 今年/t 的/u 各项/r 任务/n 具有/v 十分/m 重要/a 的/u 意义/n 。/w -19980101-01-002-006/m 我们/r 要/v 更/d 好/a 地/u 坚持/v 解放思想/i 、/w 实事求是/i 的/u 思想/n 路线/n 。/w 解放思想/i 、/w 实事求是/i ,/w 是/v 邓小平理论/n 的/u 精髓/n 。/w 实践/v 证明/v ,/w 只有/c 解放思想/i 、/w 实事求是/i ,/w 才/c 能/v 冲破/v 各种/r 不/d 切合/v 实际/n 的/u 或者/c 过时/a 的/u 观念/n 的/u 束缚/vn ,/w 真正/d 做到/v 尊重/v 、/w 认识/v 和/c 掌握/v 客观/a 规律/n ,/w 勇于/v 突破/v ,/w 勇于/v 创新/v ,/w 不断/d 开创/v 社会主义/n 现代化/vn 建设/vn 的/u 新/a 局面/n 。/w 党/n 的/u 十五大/j 是/v 我们/r 党/n 解放思想/i 、/w 实事求是/i 的/u 新/a 的/u 里程碑/n 。/w 进一步/d 认真/ad 学习/v 和/c 掌握/v 十五大/j 精神/n ,/w 解放思想/i 、/w 实事求是/i ,/w 我们/r 的/u 各项/r 事业/n 就/d 能/v 结/v 出/v 更加/d 丰硕/a 的/u 成果/n 。/w -19980101-01-002-007/m 我们/r 要/v 更/d 好/a 地/u 坚持/v 以/p 经济/n 建设/vn 为/v 中心/n 。/w 各项/r 工作/vn 必须/d 以/p 经济/n 建设/vn 为/v 中心/n ,/w 是/v 邓小平理论/n 的/u 基本/a 观点/n ,/w 是/v 党/n 的/u 基本/a 路线/n 的/u 核心/n 内容/n ,/w 近/a 20/m 年/q 来/f 的/u 实践/vn 证明/v ,/w 坚持/v 这个/r 中心/n ,/w 是/v 完全/ad 正确/a 的/u 。/w 今后/t ,/w 我们/r 能否/v 把/p 建设/v 有/v 中国/ns 特色/n 社会主义/n 伟大/a 事业/n 全面/ad 推向/v 21/m 世纪/n ,/w 关键/n 仍然/d 要/v 看/v 能否/v 把/p 经济/n 工作/vn 搞/v 上去/v 。/w 各级/r 领导/n 干部/n 要/v 切实/ad 把/p 精力/n 集中/v 到/v 贯彻/v 落实/v 好/a 中央/n 关于/p 今年/t 经济/n 工作/vn 的/u 总体/n 要求/n 和/c 各项/r 重要/a 任务/n 上/f 来/v ,/w 不断/d 提高/v 领导/v 经济/n 建设/vn 的/u 能力/n 和/c 水平/n 。/w -19980101-01-002-008/m 我们/r 要/v 更/d 好/a 地/u 坚持/v “/w 两手抓/l 、/w 两手/m 都/d 要/v 硬/a ”/w 的/u 方针/n 。/w 在/p 坚持/v 以/p 经济/n 建设/vn 为/v 中心/n 的/u 同时/n ,/w 积极/ad 推进/v 社会主义/n 精神文明/n 建设/vn 和/c 民主/a 法制/n 建设/vn ,/w 是/v 建设/v 富强/a 、/w 民主/a 、/w 文明/a 的/u 社会主义/n 现代化/vn 国家/n 的/u 重要/a 内容/n 。/w 实践/v 证明/v ,/w 经济/n 建设/vn 的/u 顺利/a 进行/vn ,/w 离/v 不/d 开/v 精神文明/n 建设/vn 和/c 民主/a 法制/n 建设/vn 的/u 保证/vn 。/w 党/n 的/u 十五大/j 依据/p 邓小平理论/n 和/c 党/n 的/u 基本/a 路线/n 提出/v 的/u 党/n 在/p 社会主义/n 初级/b 阶段/n 经济/n 、/w 政治/n 、/w 文化/n 的/u 基本/a 纲领/n ,/w 为/p “/w 两手抓/l 、/w 两手/m 都/d 要/v 硬/a ”/w 提供/v 了/u 新/a 的/u 理论/n 根据/n ,/w 提出/v 了/u 更/d 高/a 要求/n ,/w 现在/t 的/u 关键/n 是/v 认真/ad 抓好/v 落实/v 。/w -19980101-01-002-009/m 我们/r 要/v 更/d 好/a 地/u 发扬/v 求真务实/l 、/w 密切/ad 联系/v 群众/n 的/u 作风/n 。/w 这/r 是/v 把/p 党/n 的/u 方针/n 、/w 政策/n 落到实处/l ,/w 使/v 改革/v 和/c 建设/v 取得/v 胜利/vn 的/u 重要/a 保证/vn 。/w 在/p 当前/t 改革/v 进一步/d 深化/v ,/w 经济/n 不断/d 发展/v ,/w 同时/c 又/d 出现/v 一些/m 新/a 情况/n 、/w 新/a 问题/n 和/c 新/a 困难/an 的/u 形势/n 下/f ,/w 更/d 要/v 发扬/v 这样/r 的/u 好/a 作风/n 。/w 要/v 尊重/v 群众/n 的/u 意愿/n ,/w 重视/v 群众/n 的/u 首创/vn 精神/n ,/w 关心/v 群众/n 的/u 生活/vn 疾苦/n 。/w 江/nr 泽民/nr 同志/n 最近/t 强调/vd 指出/v ,/w 要/v 大力/d 倡导/v 说实话/l 、/w 办/v 实事/n 、/w 鼓/v 实劲/n 、/w 讲/v 实效/n 的/u 作风/n ,/w 坚决/ad 制止/v 追求/v 表面文章/i ,/w 搞/v 花架子/n 等/u 形式主义/n ,/w 坚决/ad 杜绝/v 脱离/v 群众/n 、/w 脱离/v 实际/n 、/w 浮躁/a 虚夸/v 等/u 官僚主义/n 。/w 这/r 是/v 非常/d 重要/a 的/u 。/w 因此/c ,/w 各级/r 领导/n 干部/n 务必/d 牢记/v 全心全意/i 为/p 人民/n 服务/v 的/u 宗旨/n ,/w 在/p 勤政廉政/l 、/w 艰苦奋斗/i 方面/n 以身作则/i ,/w 当/v 好/a 表率/n 。/w -19980101-01-002-010/m 1998/m ,/w 瞩目/v 中华/nz 。/w 新/a 的/u 机遇/n 和/c 挑战/vn ,/w 催/v 人/n 进取/v ;/w 新/a 的/u 目标/n 和/c 征途/n ,/w 催/v 人/n 奋发/v 。/w 英雄/n 的/u 中国/ns 人民/n 在/p 以/p 江/nr 泽民/nr 同志/n 为/v 核心/n 的/u 党中央/nt 坚强/a 领导/vn 和/c 党/n 的/u 十五大/j 精神/n 指引/v 下/f ,/w 更/d 高/a 地/u 举起/v 邓小平理论/n 的/u 伟大/a 旗帜/n ,/w 团结/a 一致/a ,/w 扎实/ad 工作/v ,/w 奋勇/d 前进/v ,/w 一定/d 能够/v 创造/v 出/v 更加/d 辉煌/a 的/u 业绩/n !/w diff --git a/tests/data_for_tests/sample_mnli.tsv b/tests/data_for_tests/sample_mnli.tsv deleted file mode 100644 index 9a30b95b..00000000 --- a/tests/data_for_tests/sample_mnli.tsv +++ /dev/null @@ -1,12 +0,0 @@ -index promptID pairID genre sentence1_binary_parse sentence2_binary_parse sentence1_parse sentence2_parse sentence1 sentence2 label1 label2 label3 label4 label5 gold_label -0 63735 63735n slate ( ( The ( new rights ) ) ( are ( nice enough ) ) ) ( Everyone ( really ( likes ( the ( newest benefits ) ) ) ) ) (ROOT (S (NP (DT The) (JJ new) (NNS rights)) (VP (VBP are) (ADJP (JJ nice) (RB enough))))) (ROOT (S (NP (NN Everyone)) (VP (ADVP (RB really)) (VBZ likes) (NP (DT the) (JJS newest) (NNS benefits))))) The new rights are nice enough Everyone really likes the newest benefits neutral entailment neutral neutral neutral neutral -1 91383 91383c government ( ( This site ) ( ( includes ( ( ( ( a list ) ( of ( all ( award winners ) ) ) ) and ) ( ( a ( searchable database ) ) ( of ( Government ( Executive articles ) ) ) ) ) ) . ) ) ( ( ( The ( Government ( Executive articles ) ) ) ( housed ( on ( the website ) ) ) ) ( ( ( are not ) ( able ( to ( be searched ) ) ) ) . ) ) (ROOT (S (NP (DT This) (NN site)) (VP (VBZ includes) (NP (NP (NP (DT a) (NN list)) (PP (IN of) (NP (DT all) (NN award) (NNS winners)))) (CC and) (NP (NP (DT a) (JJ searchable) (NN database)) (PP (IN of) (NP (NNP Government) (NNP Executive) (NNS articles)))))) (. .))) (ROOT (S (NP (NP (DT The) (NNP Government) (NNP Executive) (NNS articles)) (VP (VBN housed) (PP (IN on) (NP (DT the) (NN website))))) (VP (VBP are) (RB not) (ADJP (JJ able) (S (VP (TO to) (VP (VB be) (ADJP (JJ searched))))))) (. .))) This site includes a list of all award winners and a searchable database of Government Executive articles. The Government Executive articles housed on the website are not able to be searched. contradiction contradiction contradiction contradiction contradiction contradiction -2 755 755e telephone ( ( ( ( uh ( i ( ( do n't ) ( know ( ( i i ) ( have ( ( mixed emotions ) ( about ( him ( ( uh sometimes ) ( i ( like him ) ) ) ) ) ) ) ) ) ) ) ) but ) ( ( at ( the ( same times ) ) ) ( i ( love ( to ( see somebody ) ) ) ) ) ) ( beat him ) ) ( I ( ( ( ( ( ( like him ) ( for ( the ( most part ) ) ) ) , ) but ) ( ( would still ) ( enjoy ( seeing ( someone ( beat him ) ) ) ) ) ) . ) ) (ROOT (SINV (S (S (INTJ (UH uh)) (NP (FW i)) (VP (VBP do) (RB n't) (VP (VB know) (NP (NP (FW i) (FW i)) (SBAR (S (VP (VBP have) (VP (VBN mixed) (NP (NNS emotions)) (PP (IN about) (S (NP (PRP him)) (VP (VBG uh) (ADVP (RB sometimes)) (NP (NP (FW i)) (PP (IN like) (NP (PRP him))))))))))))))) (CC but) (S (PP (IN at) (NP (DT the) (JJ same) (NNS times))) (NP (FW i)) (VP (VBP love) (S (VP (TO to) (VP (VB see) (NP (NN somebody)))))))) (VP (VBD beat)) (NP (PRP him)))) (ROOT (S (NP (PRP I)) (VP (VP (VBP like) (NP (PRP him)) (PP (IN for) (NP (DT the) (JJS most) (NN part)))) (, ,) (CC but) (VP (MD would) (ADVP (RB still)) (VP (VB enjoy) (S (VP (VBG seeing) (S (NP (NN someone)) (VP (VB beat) (NP (PRP him))))))))) (. .))) uh i don't know i i have mixed emotions about him uh sometimes i like him but at the same times i love to see somebody beat him I like him for the most part, but would still enjoy seeing someone beat him. entailment entailment entailment entailment entailment entailment -3 78013 78013c telephone ( yeah ( ( i i ) ( think ( ( my ( favorite restaurant ) ) ( ( is always ) ( been ( ( the ( one closest ) ) ( you ( ( know ( the closest ) ) ( ( as long ) ( as ( it ( 's ( it ( meets ( ( the ( minimum criteria ) ) ( you ( know ( of ( good food ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ( ( My ( favorite restaurants ) ) ( ( ( ( are always ) ( ( ( ( ( at least ) a ) hundred ) miles ) away ) ) ( from ( my house ) ) ) . ) ) (ROOT (S (VP (VB yeah) (NP (NP (FW i) (FW i)) (SBAR (S (VP (VBP think) (SBAR (S (NP (PRP$ my) (JJ favorite) (NN restaurant)) (VP (VBZ is) (ADVP (RB always)) (VP (VBN been) (NP (NP (DT the) (CD one) (JJS closest)) (SBAR (S (NP (PRP you)) (VP (VBP know) (NP (DT the) (JJS closest)) (ADVP (ADVP (RB as) (RB long)) (SBAR (IN as) (S (NP (PRP it)) (VP (VBZ 's) (SBAR (S (NP (PRP it)) (VP (VBZ meets) (NP (NP (DT the) (JJ minimum) (NNS criteria)) (SBAR (S (NP (PRP you)) (VP (VBP know) (PP (IN of) (NP (JJ good) (NN food))))))))))))))))))))))))))))) (ROOT (S (NP (PRP$ My) (JJ favorite) (NNS restaurants)) (VP (VBP are) (ADVP (RB always)) (ADVP (NP (QP (IN at) (JJS least) (DT a) (CD hundred)) (NNS miles)) (RB away)) (PP (IN from) (NP (PRP$ my) (NN house)))) (. .))) yeah i i think my favorite restaurant is always been the one closest you know the closest as long as it's it meets the minimum criteria you know of good food My favorite restaurants are always at least a hundred miles away from my house. contradiction contradiction contradiction contradiction contradiction contradiction -4 96377 96377c telephone ( i ( ( do n't ) ( know ( um ( do ( you ( do ( ( a lot ) ( of camping ) ) ) ) ) ) ) ) ) ( I ( ( know exactly ) . ) ) (ROOT (S (NP (FW i)) (VP (VBP do) (RB n't) (VP (VB know) (SBAR (S (NP (NN um)) (VP (VBP do) (SBAR (S (NP (PRP you)) (VP (VBP do) (NP (NP (DT a) (NN lot)) (PP (IN of) (NP (NN camping)))))))))))))) (ROOT (S (NP (PRP I)) (VP (VBP know) (ADVP (RB exactly))) (. .))) i don't know um do you do a lot of camping I know exactly. contradiction contradiction contradiction contradiction contradiction contradiction -5 139749 139749c telephone ( well ( that ( would ( be ( ( a help ) ( i ( wish ( they ( would ( do ( that ( ( ( here ( we ( have ( got ( so ( ( little ( landfill space ) ) ( left ( that ( we ( 're ( going ( to ( ( run out ) ( before ( ( the end ) ( of ( this decade ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) and ) ( it ( ( 's really ) ( going ( to be ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ) ( We ( ( have ( plenty ( of ( space ( in ( the landfill ) ) ) ) ) ) . ) ) (ROOT (FRAG (ADVP (RB well)) (SBAR (WHNP (WDT that)) (S (VP (MD would) (VP (VB be) (NP (NP (DT a) (NN help)) (SBAR (S (NP (FW i)) (VP (VBP wish) (SBAR (S (NP (PRP they)) (VP (MD would) (VP (VB do) (SBAR (IN that) (S (S (ADVP (RB here)) (NP (PRP we)) (VP (VBP have) (VP (VBN got) (SBAR (IN so) (S (NP (JJ little) (NN landfill) (NN space)) (VP (VBD left) (SBAR (IN that) (S (NP (PRP we)) (VP (VBP 're) (VP (VBG going) (S (VP (TO to) (VP (VB run) (PRT (RP out)) (PP (IN before) (NP (NP (DT the) (NN end)) (PP (IN of) (NP (DT this) (NN decade)))))))))))))))))) (CC and) (S (NP (PRP it)) (VP (VBZ 's) (ADVP (RB really)) (VP (VBG going) (S (VP (TO to) (VP (VB be))))))))))))))))))))))) (ROOT (S (NP (PRP We)) (VP (VBP have) (NP (NP (RB plenty)) (PP (IN of) (NP (NP (NN space)) (PP (IN in) (NP (DT the) (NN landfill))))))) (. .))) well that would be a help i wish they would do that here we have got so little landfill space left that we're going to run out before the end of this decade and it's really going to be We have plenty of space in the landfill. contradiction contradiction contradiction contradiction contradiction contradiction -6 101415 101415c telephone ( yeah ( ( ( i know ) and ) ( i ( did ( that ( ( ( all ( through college ) ) and ) ( it ( worked too ) ) ) ) ) ) ) ) ( I ( ( ( did ( that all ) ) ( through college ) ) ( but ( it ( never worked ) ) ) ) ) (ROOT (S (VP (VB yeah) (S (S (NP (FW i)) (VP (VBP know))) (CC and) (S (NP (FW i)) (VP (VBD did) (SBAR (IN that) (S (S (NP (DT all)) (PP (IN through) (NP (NN college)))) (CC and) (S (NP (PRP it)) (VP (VBD worked) (ADVP (RB too)))))))))))) (ROOT (S (NP (PRP I)) (VP (VBD did) (ADVP (IN that) (DT all)) (PP (IN through) (NP (NN college))) (SBAR (CC but) (S (NP (PRP it)) (ADVP (RB never)) (VP (VBD worked))))))) yeah i know and i did that all through college and it worked too I did that all through college but it never worked contradiction contradiction contradiction contradiction contradiction contradiction -7 93958 93958n travel ( ( ( ( ( Calcutta ( seems ( to ( be ( ( the ( only ( other ( production center ) ) ) ) ( ( having ( any pretensions ) ) ( to ( ( artistic creativity ) ( at all ) ) ) ) ) ) ) ) ) , ) but ) ( ironically ( you ( ( 're actually ) ( ( more ( likely ( to ( see ( ( the works ) ( of ( ( ( Satyajit Ray ) or ) ( ( Mrinal Sen ) ( shown ( in ( Europe ( or ( North America ) ) ) ) ) ) ) ) ) ) ) ) ) ( than ( in ( India itself ) ) ) ) ) ) ) ) . ) ( ( Most ( of ( ( Mrinal ( Sen 's ) ) work ) ) ) ( ( can ( be ( found ( in ( European collections ) ) ) ) ) . ) ) (ROOT (S (S (NP (NNP Calcutta)) (VP (VBZ seems) (S (VP (TO to) (VP (VB be) (NP (NP (DT the) (JJ only) (JJ other) (NN production) (NN center)) (VP (VBG having) (NP (DT any) (NNS pretensions)) (PP (TO to) (NP (NP (JJ artistic) (NN creativity)) (ADVP (IN at) (DT all))))))))))) (, ,) (CC but) (S (ADVP (RB ironically)) (NP (PRP you)) (VP (VBP 're) (ADVP (RB actually)) (ADJP (ADJP (RBR more) (JJ likely) (S (VP (TO to) (VP (VB see) (NP (NP (DT the) (NNS works)) (PP (IN of) (NP (NP (NNP Satyajit) (NNP Ray)) (CC or) (NP (NP (NNP Mrinal) (NNP Sen)) (VP (VBN shown) (PP (IN in) (NP (NNP Europe) (CC or) (NNP North) (NNP America)))))))))))) (ADVP (IN than) (PP (IN in) (S (VP (VBG India) (NP (PRP itself))))))))) (. .))) (ROOT (S (NP (NP (JJS Most)) (PP (IN of) (NP (NP (NNP Mrinal) (NNP Sen) (POS 's)) (NN work)))) (VP (MD can) (VP (VB be) (VP (VBN found) (PP (IN in) (NP (JJ European) (NNS collections)))))) (. .))) Calcutta seems to be the only other production center having any pretensions to artistic creativity at all, but ironically you're actually more likely to see the works of Satyajit Ray or Mrinal Sen shown in Europe or North America than in India itself. Most of Mrinal Sen's work can be found in European collections. neutral neutral entailment neutral neutral neutral -8 12567 12567c slate ( ( If ( ( that investor ) ( were ( willing ( to ( pay ( extra ( for ( ( the security ) ( of ( limited downside ) ) ) ) ) ) ) ) ) ) ) ( , ( she ( ( could ( ( buy ( put options ) ) ( with ( ( a ( strike price ) ) ( of ( ( ( $ 98 ) , ) ( which ( would ( ( ( lock ( in ( ( her profit ) ( on ( ( the shares ) ( at ( $ 18 ) ) ) ) ) ) ) , ) ( less ( whatever ( ( the options ) cost ) ) ) ) ) ) ) ) ) ) ) ) . ) ) ) ) ( ( THe ( strike price ) ) ( ( could ( be ( $ 8 ) ) ) . ) ) (ROOT (S (SBAR (IN If) (S (NP (DT that) (NN investor)) (VP (VBD were) (ADJP (JJ willing) (S (VP (TO to) (VP (VB pay) (NP (NP (JJ extra)) (PP (IN for) (NP (NP (DT the) (NN security)) (PP (IN of) (NP (JJ limited) (NN downside))))))))))))) (, ,) (NP (PRP she)) (VP (MD could) (VP (VB buy) (NP (NN put) (NNS options)) (PP (IN with) (NP (NP (DT a) (NN strike) (NN price)) (PP (IN of) (NP (NP ($ $) (CD 98)) (, ,) (SBAR (WHNP (WDT which)) (S (VP (MD would) (VP (VB lock) (PP (IN in) (NP (NP (PRP$ her) (NN profit)) (PP (IN on) (NP (NP (DT the) (NNS shares)) (PP (IN at) (NP ($ $) (CD 18))))))) (, ,) (ADVP (ADVP (RBR less)) (SBAR (WHNP (WDT whatever)) (S (NP (DT the) (NNS options)) (VP (VBD cost))))))))))))))) (. .))) (ROOT (S (NP (NNP THe) (NN strike) (NN price)) (VP (MD could) (VP (VB be) (NP ($ $) (CD 8)))) (. .))) If that investor were willing to pay extra for the security of limited downside, she could buy put options with a strike price of $98, which would lock in her profit on the shares at $18, less whatever the options cost. THe strike price could be $8. contradiction contradiction contradiction contradiction contradiction contradiction -9 117487 117487n slate ( ( 3 -RRB- ) ( ( Dare ( you ( ( ( rise ( to ( ( ( ( the occasion ) , ) ( like Raskolnikov ) ) , ) ) ) and ) ( reject ( ( the ( petty rules ) ) ( that ( govern ( lesser men ) ) ) ) ) ) ) ) ? ) ) ( ( ( Would you ) ( ( ( rise up ) and ) ( defeaat ( ( all ( evil lords ) ) ( in ( the town ) ) ) ) ) ) ? ) (ROOT (S (LST (LS 3) (-RRB- -RRB-)) (VP (VB Dare) (S (NP (PRP you)) (VP (VP (VB rise) (PP (TO to) (NP (NP (DT the) (NN occasion)) (, ,) (PP (IN like) (NP (NNP Raskolnikov))) (, ,)))) (CC and) (VP (VB reject) (NP (NP (DT the) (JJ petty) (NNS rules)) (SBAR (WHNP (WDT that)) (S (VP (VBP govern) (NP (JJR lesser) (NNS men)))))))))) (. ?))) (ROOT (SQ (MD Would) (NP (PRP you)) (VP (VP (VB rise) (PRT (RP up))) (CC and) (VP (VB defeaat) (NP (NP (DT all) (JJ evil) (NNS lords)) (PP (IN in) (NP (DT the) (NN town)))))) (. ?))) 3) Dare you rise to the occasion, like Raskolnikov, and reject the petty rules that govern lesser men? Would you rise up and defeaat all evil lords in the town? neutral neutral neutral neutral neutral neutral -10 9616 9616c travel ( ( The ( ( most important ) directions ) ) ( ( ( are ( simply ( ( up and ) up ) ) ) ( ( ( ( ( ( ( ( leads eventually ) ( to ( the cathedral ) ) ) and ) ( fortress ( commanding ( the hilltop ) ) ) ) , ) and ) down ) ( inevitably ( ( leads ( to ( one ( of ( three gates ) ) ) ) ) ( through ( ( the wall ) ( to ( the ( new town ) ) ) ) ) ) ) ) ) . ) ) ( Go ( ( downwards ( to ( one ( of ( ( ( the gates ) , ) ( ( all ( of which ) ) ( will ( ( lead you ) ( into ( the cathedral ) ) ) ) ) ) ) ) ) ) . ) ) (ROOT (S (NP (DT The) (ADJP (RBS most) (JJ important)) (NNS directions)) (VP (VBP are) (PRN (ADVP (RB simply)) (ADVP (RB up) (CC and) (RB up))) (VP (VP (VBZ leads) (ADVP (RB eventually)) (PP (TO to) (NP (DT the) (NN cathedral)))) (CC and) (VP (VBZ fortress) (NP (JJ commanding) (DT the) (NN hilltop))) (, ,) (CC and) (ADVP (RB down)) (VP (ADVP (RB inevitably)) (VBZ leads) (PP (TO to) (NP (NP (CD one)) (PP (IN of) (NP (CD three) (NNS gates))))) (PP (IN through) (NP (NP (DT the) (NN wall)) (PP (TO to) (NP (DT the) (JJ new) (NN town)))))))) (. .))) (ROOT (S (NP (NNP Go)) (VP (VBZ downwards) (PP (TO to) (NP (NP (CD one)) (PP (IN of) (NP (NP (DT the) (NNS gates)) (, ,) (SBAR (WHNP (DT all) (WHPP (IN of) (WHNP (WDT which)))) (S (VP (MD will) (VP (VB lead) (NP (PRP you)) (PP (IN into) (NP (DT the) (NN cathedral)))))))))))) (. .))) The most important directions are simply up and up leads eventually to the cathedral and fortress commanding the hilltop, and down inevitably leads to one of three gates through the wall to the new town. Go downwards to one of the gates, all of which will lead you into the cathedral. contradiction contradiction entailment contradiction contradiction contradiction diff --git a/tests/data_for_tests/sample_snli.jsonl b/tests/data_for_tests/sample_snli.jsonl deleted file mode 100644 index e62856ac..00000000 --- a/tests/data_for_tests/sample_snli.jsonl +++ /dev/null @@ -1,3 +0,0 @@ -{"annotator_labels": ["neutral"], "captionID": "3416050480.jpg#4", "gold_label": "neutral", "pairID": "3416050480.jpg#4r1n", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is training his horse for a competition.", "sentence2_binary_parse": "( ( A person ) ( ( is ( ( training ( his horse ) ) ( for ( a competition ) ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (VP (VBG training) (NP (PRP$ his) (NN horse)) (PP (IN for) (NP (DT a) (NN competition))))) (. .)))"} -{"annotator_labels": ["contradiction"], "captionID": "3416050480.jpg#4", "gold_label": "contradiction", "pairID": "3416050480.jpg#4r1c", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is at a diner, ordering an omelette.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is ( at ( a diner ) ) ) , ) ( ordering ( an omelette ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (PP (IN at) (NP (DT a) (NN diner))) (, ,) (S (VP (VBG ordering) (NP (DT an) (NN omelette))))) (. .)))"} -{"annotator_labels": ["entailment"], "captionID": "3416050480.jpg#4", "gold_label": "entailment", "pairID": "3416050480.jpg#4r1e", "sentence1": "A person on a horse jumps over a broken down airplane.", "sentence1_binary_parse": "( ( ( A person ) ( on ( a horse ) ) ) ( ( jumps ( over ( a ( broken ( down airplane ) ) ) ) ) . ) )", "sentence1_parse": "(ROOT (S (NP (NP (DT A) (NN person)) (PP (IN on) (NP (DT a) (NN horse)))) (VP (VBZ jumps) (PP (IN over) (NP (DT a) (JJ broken) (JJ down) (NN airplane)))) (. .)))", "sentence2": "A person is outdoors, on a horse.", "sentence2_binary_parse": "( ( A person ) ( ( ( ( is outdoors ) , ) ( on ( a horse ) ) ) . ) )", "sentence2_parse": "(ROOT (S (NP (DT A) (NN person)) (VP (VBZ is) (ADVP (RB outdoors)) (, ,) (PP (IN on) (NP (DT a) (NN horse)))) (. .)))"} \ No newline at end of file diff --git a/tests/data_for_tests/text_classify.txt b/tests/data_for_tests/text_classify.txt deleted file mode 100644 index 24a51ce9..00000000 --- a/tests/data_for_tests/text_classify.txt +++ /dev/null @@ -1,100 +0,0 @@ -entertainment 台 媒 预 测 周 冬 雨 金 马 奖 封 后 , 大 气 的 倪 妮 却 佳 作 难 出 -food 农 村 就 是 好 , 能 吃 到 纯 天 然 无 添 加 的 野 生 蜂 蜜 , 营 养 又 健 康 -fashion 1 4 款 知 性 美 装 , 时 尚 惊 艳 搁 浅 的 阳 光 轻 熟 的 优 雅 -history 火 焰 喷 射 器 1 0 0 0 度 火 焰 烧 死 鬼 子 4 连 拍 -society 1 8 岁 青 年 砍 死 8 8 岁 老 兵 -fashion 醋 洗 脸 的 正 确 方 法 洗 对 了 不 仅 美 容 肌 肤 还 能 收 缩 毛 孔 -game 大 家 都 说 说 除 了 这 1 0 个 英 雄 , L O L 还 有 哪 些 英 雄 可 以 单 挑 男 爵 -sports 王 仕 鹏 退 役 担 任 N B A 总 决 赛 现 场 解 说 嘉 宾 -regimen 天 天 吃 “ 洋 快 餐 ” , 5 岁 女 童 患 上 肝 炎 -food 汤 里 的 蛋 花 怎 样 才 能 如 花 朵 般 漂 亮 , 注 意 这 一 点 即 可 ! -tech 英 退 休 人 士 把 谷 歌 当 活 人 以 礼 貌 搜 索 请 求 征 服 整 个 互 联 网 -discovery N A S A 探 测 器 拍 摄 地 球 、 火 星 和 冥 王 星 合 影 -society 当 骗 子 遇 上 撒 贝 宁 ! 几 句 话 过 后 骗 子 赔 礼 道 歉 . . . . . -history 红 军 长 征 在 中 国 革 命 史 上 的 地 位 -world 实 拍 神 秘 之 国 , 带 你 走 进 真 实 的 朝 鲜 -tech 逼 格 爆 表 ! 古 文 版 2 0 1 6 网 络 热 词 : 燃 尽 洪 荒 之 力 -story 因 为 一 样 东 西 这 个 后 娘 竟 然 给 孩 子 磕 头 -game L O L : 皮 肤 对 操 作 没 影 响 ? 细 数 那 些 有 加 成 效 果 的 皮 肤 -fashion 冬 天 想 穿 裙 子 又 怕 冷 ? 学 了 这 些 搭 配 就 能 好 看 又 温 暖 ! -entertainment 贾 建 军 少 林 三 光 剑 视 频 -food 再 也 不 用 出 去 吃 羊 肉 串 , 自 己 做 又 卫 生 又 健 康 -regimen 男 人 多 吃 这 几 道 菜 , 效 果 胜 “ 伟 哥 ” -baby 宝 贝 厨 房 丨 肉 类 辅 食 第 一 步 宝 宝 的 生 长 发 育 每 天 都 离 不 开 它 ! -travel 近 8 0 亿 的 顶 级 豪 华 邮 轮 上 到 底 有 什 么 ? -sports 厄 齐 尔 心 中 最 想 签 约 的 三 个 人 -food 东 北 的 粘 豆 包 啊 , 想 死 你 们 了 ! -military 强 军 足 音 -sports 奥 运 赛 场 上 , 被 喷 子 痛 批 的 十 大 知 名 运 动 员 -game 老 玩 家 分 享 对 2 0 1 6 L P L 夏 季 赛 R N G 的 分 析 -military 揭 秘 : 关 于 战 争 的 五 大 真 相 , 不 要 再 被 影 视 所 欺 骗 了 ! -food 小 丫 厨 房 : 夏 天 怎 么 吃 辣 不 长 痘 ? 告 诉 你 火 锅 鸡 、 香 辣 鱼 的 正 确 做 法 -travel 中 国 首 个 内 陆 城 市 群 上 的 9 座 城 市 , 看 看 有 你 的 家 乡 吗 -fashion 李 小 璐 做 榜 样 接 亲 吻 脚 大 流 行 新 娘 玉 足 怎 样 才 有 好 味 道 ? -game 黄 金 吊 打 钻 石 ? L O L 最 强 刷 钱 毒 瘤 打 法 诞 生 -history 奇 事 ! 上 万 只 青 蛙 拦 路 告 状 , 竟 然 牵 扯 出 一 桩 命 案 -baby 奶 奶 , 你 为 什 么 不 让 我 用 尿 不 湿 -game L O L 当 5 个 大 发 明 家 炮 台 围 住 泉 水 的 时 候 : 这 是 真 虐 泉 ! -essay 文 友 忠 告 暖 人 心 : 人 到 中 年 “ 不 交 五 友 ” -travel 这 一 年 , 我 们 去 日 本 -food 好 吃 早 饭 近 似 吃 补 药 -fashion 夏 天 太 热 , 唇 膏 化 了 如 何 办 ? -society 厂 里 面 的 9 0 后 打 工 妹 , 辛 苦 来 之 不 易 -history 罕 见 老 照 片 展 示 美 国 大 萧 条 时 期 景 象 -world 美 国 总 统 奥 巴 马 , 是 童 心 未 泯 的 温 情 奥 大 大 , 还 是 个 超 级 老 顽 童 -finance 脱 欧 公 投 前 一 天 抛 售 英 镑 这 一 次 索 罗 斯 也 被 “ 打 败 ” 了 . . . -history 翻 越 长 征 路 上 第 一 座 大 山 -world 朝 鲜 批 奥 巴 马 涉 朝 言 论 , 称 只 要 核 威 胁 存 在 将 继 续 强 化 核 武 力 量 -game 《 巫 师 3 : 狂 猎 》 不 良 因 素 解 析 攻 略 -travel 在 郑 州 有 个 地 方 , 时 光 仿 佛 在 那 儿 停 下 脚 步 -history 它 号 称 “ 天 下 第 一 团 ” , 走 出 过 1 4 位 共 和 国 将 军 以 及 一 位 著 名 作 家 -car 煤 老 板 去 黄 江 买 车 , 以 为 占 了 便 宜 没 想 被 坑 了 1 0 0 多 万 -society “ 试 管 婴 儿 之 母 ” 张 丽 珠 遗 体 告 别 仪 式 8 日 举 行 -sports 东 京 奥 运 会 , 中 国 女 排 卫 冕 的 几 率 有 多 大 ? -travel 成 都 我 们 永 远 依 恋 的 城 市 -tech 雷 布 斯 除 了 小 米 还 有 这 些 秘 密 , 你 知 道 吗 ? -world “ 仲 裁 庭 损 害 国 际 法 体 系 公 正 性 ” — — 访 武 汉 大 学 中 国 边 界 与 海 洋 研 究 院 首 席 专 家 易 显 河 -entertainment 上 海 观 众 和 欧 洲 三 大 影 展 之 间 的 距 离 : 零 时 差 -essay 关 系 好 , 一 切 便 好 -baby 刚 出 生 不 到 1 小 时 的 白 鲸 宝 宝 被 冲 上 岸 , 被 救 后 对 恩 人 露 出 微 笑 -tech 赚 足 眼 球 , 诺 基 亚 五 边 形 W i n 1 0 M o b i l e 概 念 手 机 : 棱 镜 -essay 2 4 句 经 典 语 录 : 穷 三 年 可 以 怨 命 , 穷 十 年 就 得 自 省 -food 这 道 菜 真 下 饭 ! 做 法 简 单 , 防 辐 射 、 抗 衰 老 , 关 键 还 便 宜 -entertainment 《 继 承 者 们 》 要 拍 中 国 版 , 众 角 色 你 期 待 谁 来 演 ? -game D N F 暴 走 改 版 后 怎 么 样 D N F 暴 走 改 版 红 眼 变 弱 了 吗 -entertainment 郑 佩 佩 自 曝 与 李 小 龙 的 过 去 他 是 个 “ 疯 子 ” -baby 女 性 只 有 8 4 次 最 佳 受 孕 机 会 -travel 月 初 一 个 人 去 了 日 本 . . -military 不 为 人 知 的 8 0 万 苏 联 女 兵 ! 最 后 一 张 很 美 ! -tech 网 络 商 家 提 供 小 米 5 运 存 升 级 服 务 : 3 G B 秒 变 6 G B -history 宋 太 祖 、 宋 太 宗 凌 辱 亡 国 皇 后 , 徽 钦 二 帝 后 宫 被 金 人 凌 辱 -history 人 有 三 面 最 “ 难 吃 ” ! 黑 帮 大 佬 杜 月 笙 论 江 湖 规 矩 ! 一 生 只 怕 这 一 人 -game 来 了 ! 索 尼 P S 4 独 占 大 作 《 战 神 4 》 正 式 公 布 -discovery 延 时 视 频 显 示 珊 瑚 如 何 “ 驱 逐 ” 共 生 藻 类 -car 传 祺 G A 8 和 东 风 A 9 谁 才 是 自 主 “ 豪 车 ” 大 佬 -fashion 娶 老 婆 就 要 娶 这 种 ! 蒋 欣 这 样 微 胖 的 女 人 好 看 又 实 用 -sports 黄 山 姑 娘 吕 秀 芝 勇 夺 奥 运 铜 牌 数 百 父 老 彻 夜 为 她 加 油 -military [ 每 日 军 图 ] 土 豪 补 仓 ! 沙 特 再 次 购 买 上 百 辆 美 国 M 1 A 2 主 战 坦 克 -military 美 军 这 款 武 器 号 称 能 让 半 个 中 国 陷 入 黑 暗 , 解 放 军 少 将 : 我 们 也 有 -world 邓 小 平 与 日 本 天 皇 的 历 史 性 会 谈 , 对 中 日 两 国 都 具 有 深 远 的 意 义 啊 ! -baby 为 什 么 有 人 上 个 厕 所 都 能 生 出 孩 子 ? -fashion 欣 宜 举 行 首 次 个 唱 十 万 颗 宝 仕 奥 莎 仿 水 晶 闪 耀 全 场 -food 小 两 口 上 周 的 晚 餐 -society 在 北 京 就 要 守 规 矩 -entertainment 知 情 人 曝 翰 爽 分 手 内 幕 : 郑 爽 想 结 婚 却 被 一 直 拖 着 -military 中 国 反 舰 导 弹 世 界 第 一 远 远 超 过 美 国 但 为 何 却 还 不 如 俄 罗 斯 ? -entertainment 他 除 了 是 《 我 歌 》 音 乐 总 监 , 还 曾 组 乐 队 玩 摇 滚 , 是 黄 家 驹 旧 日 知 己 -baby 长 鹅 口 疮 的 孩 子 怎 么 照 顾 ? 不 要 再 说 拿 他 没 办 法 了 ! -discovery 微 重 力 不 需 使 用 肌 肉 , 太 空 人 返 回 地 球 后 脊 椎 旁 肌 肉 萎 缩 约 1 9 % -regimen 这 6 种 人 将 来 会 得 老 年 痴 呆 ! 预 防 老 年 痴 呆 症 , 这 些 办 法 被 全 世 界 公 认 -society 2 0 1 6 年 上 海 即 将 发 生 哪 些 大 事 件 。 。 。 。 -car 北 汽 自 主 品 牌 亏 损 3 3 . 4 1 亿 额 外 促 销 成 主 因 -car 在 那 山 的 那 边 海 的 那 边 , 有 一 群 自 由 侠 -history 一 个 小 城 就 屠 杀 了 4 0 0 0 苏 军 战 俘 , 希 特 勒 死 神 战 队 的 崛 起 与 覆 灭 -baby 给 孩 子 洗 澡 时 , 这 些 部 位 再 脏 也 不 要 碰 ! -essay 好 久 不 见 , 你 还 好 么 -baby 被 娃 误 伤 的 9 种 痛 , 数 一 数 你 中 了 几 枪 ? -food 初 秋 的 小 炖 品 放 冰 糖 就 比 较 滋 润 , 放 红 糖 就 补 血 又 不 燥 热 -game 佩 服 佩 服 ! 羊 驼 D e f t 单 排 重 回 韩 服 最 强 王 者 第 一 名 ! -game 三 个 时 代 的 标 志 炉 石 传 说 三 大 远 古 毒 瘤 卡 组 -discovery 2 0 世 纪 最 伟 大 科 学 发 现 — — 魔 术 般 的 超 导 材 料 ! \ No newline at end of file diff --git a/tests/data_for_tests/tutorial_sample_dataset.csv b/tests/data_for_tests/tutorial_sample_dataset.csv deleted file mode 100644 index e5c0a74f..00000000 --- a/tests/data_for_tests/tutorial_sample_dataset.csv +++ /dev/null @@ -1,77 +0,0 @@ -A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . 1 -This quiet , introspective and entertaining independent is worth seeking . 4 -Even fans of Ismail Merchant 's work , I suspect , would have a hard time sitting through this one . 1 -A positively thrilling combination of ethnography and all the intrigue , betrayal , deceit and murder of a Shakespearean tragedy or a juicy soap opera . 3 -Aggressive self-glorification and a manipulative whitewash . 1 -A comedy-drama of nearly epic proportions rooted in a sincere performance by the title character undergoing midlife crisis . 4 -Narratively , Trouble Every Day is a plodding mess . 1 -The Importance of Being Earnest , so thick with wit it plays like a reading from Bartlett 's Familiar Quotations 3 -But it does n't leave you with much . 1 -You could hate it for the same reason . 1 -There 's little to recommend Snow Dogs , unless one considers cliched dialogue and perverse escapism a source of high hilarity . 1 -Kung Pow is Oedekerk 's realization of his childhood dream to be in a martial-arts flick , and proves that sometimes the dreams of youth should remain just that . 1 -The performances are an absolute joy . 4 -Fresnadillo has something serious to say about the ways in which extravagant chance can distort our perspective and throw us off the path of good sense . 3 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -While The Importance of Being Earnest offers opportunities for occasional smiles and chuckles , it does n't give us a reason to be in the theater beyond Wilde 's wit and the actors ' performances . 1 -The latest vapid actor 's exercise to appropriate the structure of Arthur Schnitzler 's Reigen . 1 -More vaudeville show than well-constructed narrative , but on those terms it 's inoffensive and actually rather sweet . 2 -Nothing more than a run-of-the-mill action flick . 2 -Hampered -- no , paralyzed -- by a self-indulgent script ... that aims for poetry and ends up sounding like satire . 0 -Ice Age is the first computer-generated feature cartoon to feel like other movies , and that makes for some glacial pacing early on . 2 -There 's very little sense to what 's going on here , but the makers serve up the cliches with considerable dash . 2 -Cattaneo should have followed the runaway success of his first film , The Full Monty , with something different . 2 -They 're the unnamed , easily substitutable forces that serve as whatever terror the heroes of horror movies try to avoid . 1 -It almost feels as if the movie is more interested in entertaining itself than in amusing us . 1 -The movie 's progression into rambling incoherence gives new meaning to the phrase ` fatal script error . ' 0 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 \ No newline at end of file diff --git a/tests/data_for_tests/zh_sample.conllx b/tests/data_for_tests/zh_sample.conllx deleted file mode 100644 index dee802ef..00000000 --- a/tests/data_for_tests/zh_sample.conllx +++ /dev/null @@ -1,100 +0,0 @@ -1 上海 _ NR NR _ 3 nsubj _ _ -2 积极 _ AD AD _ 3 advmod _ _ -3 准备 _ VV VV _ 0 root _ _ -4 迎接 _ VV VV _ 3 ccomp _ _ -5 欧元 _ NN NN _ 6 nn _ _ -6 诞生 _ NN NN _ 4 dobj _ _ - -1 新华社 _ NR NR _ 7 dep _ _ -2 上海 _ NR NR _ 7 dep _ _ -3 十二月 _ NT NT _ 7 dep _ _ -4 三十日 _ NT NT _ 7 dep _ _ -5 电 _ NN NN _ 7 dep _ _ -6 ( _ PU PU _ 7 punct _ _ -7 记者 _ NN NN _ 0 root _ _ -8 潘清 _ NR NR _ 7 dep _ _ -9 ) _ PU PU _ 7 punct _ _ - -1 即将 _ AD AD _ 2 advmod _ _ -2 诞生 _ VV VV _ 4 rcmod _ _ -3 的 _ DEC DEC _ 2 cpm _ _ -4 欧元 _ NN NN _ 6 nsubj _ _ -5 , _ PU PU _ 6 punct _ _ -6 引起 _ VV VV _ 0 root _ _ -7 了 _ AS AS _ 6 asp _ _ -8 上海 _ NR NR _ 14 nn _ _ -9 这 _ DT DT _ 14 det _ _ -10 个 _ M M _ 9 clf _ _ -11 中国 _ NR NR _ 13 nn _ _ -12 金融 _ NN NN _ 13 nn _ _ -13 中心 _ NN NN _ 14 nn _ _ -14 城市 _ NN NN _ 16 assmod _ _ -15 的 _ DEG DEG _ 14 assm _ _ -16 关注 _ NN NN _ 6 dobj _ _ -17 。 _ PU PU _ 6 punct _ _ - -1 上海 _ NR NR _ 2 nn _ _ -2 银行界 _ NN NN _ 4 nsubj _ _ -3 纷纷 _ AD AD _ 4 advmod _ _ -4 推出 _ VV VV _ 0 root _ _ -5 了 _ AS AS _ 4 asp _ _ -6 与 _ P P _ 8 prep _ _ -7 之 _ PN PN _ 6 pobj _ _ -8 相关 _ VA VA _ 15 rcmod _ _ -9 的 _ DEC DEC _ 8 cpm _ _ -10 外汇 _ NN NN _ 15 nn _ _ -11 业务 _ NN NN _ 15 nn _ _ -12 品种 _ NN NN _ 15 conj _ _ -13 和 _ CC CC _ 15 cc _ _ -14 服务 _ NN NN _ 15 nn _ _ -15 举措 _ NN NN _ 4 dobj _ _ -16 , _ PU PU _ 4 punct _ _ -17 积极 _ AD AD _ 18 advmod _ _ -18 准备 _ VV VV _ 4 dep _ _ -19 启动 _ VV VV _ 18 ccomp _ _ -20 欧元 _ NN NN _ 21 nn _ _ -21 业务 _ NN NN _ 19 dobj _ _ -22 。 _ PU PU _ 4 punct _ _ - -1 一些 _ CD CD _ 8 nummod _ _ -2 热衷于 _ VV VV _ 8 rcmod _ _ -3 个人 _ NN NN _ 5 nn _ _ -4 外汇 _ NN NN _ 5 nn _ _ -5 交易 _ NN NN _ 2 dobj _ _ -6 的 _ DEC DEC _ 2 cpm _ _ -7 上海 _ NR NR _ 8 nn _ _ -8 市民 _ NN NN _ 13 nsubj _ _ -9 , _ PU PU _ 13 punct _ _ -10 也 _ AD AD _ 13 advmod _ _ -11 对 _ P P _ 13 prep _ _ -12 欧元 _ NN NN _ 11 pobj _ _ -13 表示 _ VV VV _ 0 root _ _ -14 出 _ VV VV _ 13 rcomp _ _ -15 极 _ AD AD _ 16 advmod _ _ -16 大 _ VA VA _ 18 rcmod _ _ -17 的 _ DEC DEC _ 16 cpm _ _ -18 兴趣 _ NN NN _ 13 dobj _ _ -19 。 _ PU PU _ 13 punct _ _ - -1 继 _ P P _ 38 prep _ _ -2 上海 _ NR NR _ 6 nn _ _ -3 大众 _ NR NR _ 6 nn _ _ -4 汽车 _ NN NN _ 6 nn _ _ -5 有限 _ JJ JJ _ 6 amod _ _ -6 公司 _ NN NN _ 13 nsubj _ _ -7 十八日 _ NT NT _ 13 tmod _ _ -8 在 _ P P _ 13 prep _ _ -9 中国 _ NR NR _ 10 nn _ _ -10 银行 _ NN NN _ 12 nn _ _ -11 上海 _ NR NR _ 12 nn _ _ -12 分行 _ NN NN _ 8 pobj _ _ -13 开立 _ VV VV _ 19 lccomp _ _ -14 上海 _ NR NR _ 16 dep _ _ -15 第一 _ OD OD _ 16 ordmod _ _ -16 个 _ M M _ 18 clf _ _ -17 欧元 _ NN NN _ 18 nn _ _ -18 帐户 _ NN NN _ 13 dobj _ _ -19 后 _ LC LC _ 1 plmod _ _ -20 , _ PU PU _ 38 punct _ _ -21 工商 _ NN NN _ 28 nn _ _ -22 银行 _ NN NN _ 28 conj _ _ diff --git a/tests/embeddings/__init__.py b/tests/embeddings/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/embeddings/test_bert_embedding.py b/tests/embeddings/test_bert_embedding.py deleted file mode 100644 index ef231be0..00000000 --- a/tests/embeddings/test_bert_embedding.py +++ /dev/null @@ -1,129 +0,0 @@ -import unittest -from fastNLP import Vocabulary -from fastNLP.embeddings import BertEmbedding, BertWordPieceEncoder -import torch -import os -from fastNLP import DataSet - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestDownload(unittest.TestCase): - def test_download(self): - # import os - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='en') - words = torch.LongTensor([[2, 3, 4, 0]]) - print(embed(words).size()) - - for pool_method in ['first', 'last', 'max', 'avg']: - for include_cls_sep in [True, False]: - embed = BertEmbedding(vocab, model_dir_or_name='en', pool_method=pool_method, - include_cls_sep=include_cls_sep) - print(embed(words).size()) - - def test_word_drop(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='en', dropout=0.1, word_dropout=0.2) - for i in range(10): - words = torch.LongTensor([[2, 3, 4, 0]]) - print(embed(words).size()) - - -class TestBertEmbedding(unittest.TestCase): - def test_bert_embedding_1(self): - for pool_method in ['first', 'last', 'max', 'avg']: - with self.subTest(pool_method=pool_method): - vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInBERT".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.1, - pool_method=pool_method) - requires_grad = embed.requires_grad - embed.requires_grad = not requires_grad - embed.train() - words = torch.LongTensor([[2, 3, 4, 0]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, 16)) - - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.1, - pool_method=pool_method) - embed.eval() - words = torch.LongTensor([[2, 3, 4, 0]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, 16)) - - # 自动截断而不报错 - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.1, - auto_truncate=True, pool_method=pool_method) - - words = torch.LongTensor([[2, 3, 4, 1]*10, - [2, 3]+[0]*38]) - result = embed(words) - self.assertEqual(result.size(), (2, 40, 16)) - - def test_save_load(self): - bert_save_test = 'bert_save_test' - try: - os.makedirs(bert_save_test, exist_ok=True) - vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInBERT".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.1, - auto_truncate=True) - - embed.save(bert_save_test) - load_embed = BertEmbedding.load(bert_save_test) - words = torch.randint(len(vocab), size=(2, 20)) - embed.eval(), load_embed.eval() - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - - finally: - import shutil - shutil.rmtree(bert_save_test) - - -class TestBertWordPieceEncoder(unittest.TestCase): - def test_bert_word_piece_encoder(self): - embed = BertWordPieceEncoder(model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.1) - ds = DataSet({'words': ["this is a test . [SEP]".split()]}) - embed.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - result = embed(torch.LongTensor([[1,2,3,4]])) - - def test_bert_embed_eq_bert_piece_encoder(self): - ds = DataSet({'words': ["this is a texta model vocab".split(), 'this is'.split()]}) - encoder = BertWordPieceEncoder(model_dir_or_name='tests/data_for_tests/embedding/small_bert') - encoder.eval() - encoder.index_datasets(ds, field_name='words') - word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1])) - word_pieces_res = encoder(word_pieces) - - vocab = Vocabulary() - vocab.from_dataset(ds, field_name='words') - vocab.index_dataset(ds, field_name='words', new_field_name='words') - ds.set_input('words') - words = torch.LongTensor(ds['words'].get([0, 1])) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - pool_method='first', include_cls_sep=True, pooled_cls=False, min_freq=1) - embed.eval() - words_res = embed(words) - - # 检查word piece什么的是正常work的 - self.assertEqual((word_pieces_res[0, :5]-words_res[0, :5]).sum(), 0) - self.assertEqual((word_pieces_res[0, 6:]-words_res[0, 5:]).sum(), 0) - self.assertEqual((word_pieces_res[1, :3]-words_res[1, :3]).sum(), 0) - - def test_save_load(self): - bert_save_test = 'bert_save_test' - try: - os.makedirs(bert_save_test, exist_ok=True) - embed = BertWordPieceEncoder(model_dir_or_name='tests/data_for_tests/embedding/small_bert', word_dropout=0.0, - layers='-2') - ds = DataSet({'words': ["this is a test . [SEP]".split()]}) - embed.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - words = torch.LongTensor([[1, 2, 3, 4]]) - embed.save(bert_save_test) - load_embed = BertWordPieceEncoder.load(bert_save_test) - embed.eval(), load_embed.eval() - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - finally: - import shutil - shutil.rmtree(bert_save_test) - diff --git a/tests/embeddings/test_char_embedding.py b/tests/embeddings/test_char_embedding.py deleted file mode 100644 index ceafe4f5..00000000 --- a/tests/embeddings/test_char_embedding.py +++ /dev/null @@ -1,26 +0,0 @@ -import unittest - -import torch - -from fastNLP import Vocabulary, DataSet, Instance -from fastNLP.embeddings.char_embedding import LSTMCharEmbedding, CNNCharEmbedding - - -class TestCharEmbed(unittest.TestCase): - def test_case_1(self): - ds = DataSet([Instance(words=['hello', 'world']), Instance(words=['Jack'])]) - vocab = Vocabulary().from_dataset(ds, field_name='words') - self.assertEqual(len(vocab), 5) - embed = LSTMCharEmbedding(vocab, embed_size=60) - x = torch.LongTensor([[2, 1, 0], [4, 3, 4]]) - y = embed(x) - self.assertEqual(tuple(y.size()), (2, 3, 60)) - - def test_case_2(self): - ds = DataSet([Instance(words=['hello', 'world']), Instance(words=['Jack'])]) - vocab = Vocabulary().from_dataset(ds, field_name='words') - self.assertEqual(len(vocab), 5) - embed = CNNCharEmbedding(vocab, embed_size=60) - x = torch.LongTensor([[2, 1, 0], [4, 3, 4]]) - y = embed(x) - self.assertEqual(tuple(y.size()), (2, 3, 60)) diff --git a/tests/embeddings/test_elmo_embedding.py b/tests/embeddings/test_elmo_embedding.py deleted file mode 100644 index 7f6f5b35..00000000 --- a/tests/embeddings/test_elmo_embedding.py +++ /dev/null @@ -1,37 +0,0 @@ - -import unittest -from fastNLP import Vocabulary -from fastNLP.embeddings import ElmoEmbedding -import torch -import os - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestDownload(unittest.TestCase): - def test_download_small(self): - # import os - vocab = Vocabulary().add_word_lst("This is a test .".split()) - elmo_embed = ElmoEmbedding(vocab, model_dir_or_name='en-small') - words = torch.LongTensor([[0, 1, 2]]) - print(elmo_embed(words).size()) - - -# 首先保证所有权重可以加载;上传权重;验证可以下载 - - -class TestRunElmo(unittest.TestCase): - def test_elmo_embedding(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - elmo_embed = ElmoEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_elmo', layers='0,1') - words = torch.LongTensor([[0, 1, 2]]) - hidden = elmo_embed(words) - print(hidden.size()) - self.assertEqual(hidden.size(), (1, 3, elmo_embed.embedding_dim)) - - def test_elmo_embedding_layer_assertion(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - try: - elmo_embed = ElmoEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_elmo', - layers='0,1,2') - except AssertionError as e: - print(e) - diff --git a/tests/embeddings/test_gpt2_embedding.py b/tests/embeddings/test_gpt2_embedding.py deleted file mode 100644 index 070ae528..00000000 --- a/tests/embeddings/test_gpt2_embedding.py +++ /dev/null @@ -1,272 +0,0 @@ - -import unittest -import torch -import os - -from fastNLP.modules.tokenizer.gpt2_tokenizer import GPT2Tokenizer -from fastNLP.embeddings import GPT2WordPieceEncoder, GPT2Embedding -from fastNLP import DataSet, Vocabulary - - -class TestGPT2Embedding(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = GPT2Embedding(vocab, model_dir_or_name='en') - words = torch.LongTensor([[2, 3, 4, 0]]) - print(embed(words).size()) - - for pool_method in ['first', 'last', 'max', 'avg']: - embed = GPT2Embedding(vocab, model_dir_or_name='en', pool_method=pool_method) - print(embed(words).size()) - - def test_gpt2_embedding(self): - weight_path = 'tests/data_for_tests/embedding/small_gpt2' - vocab = Vocabulary().add_word_lst("this is a texta sentence".split()) - embed = GPT2Embedding(vocab, model_dir_or_name=weight_path, word_dropout=0.1) - requires_grad = embed.requires_grad - embed.requires_grad = not requires_grad - embed.train() - words = torch.LongTensor([[2, 3, 4, 0]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, 16)) - - embed = GPT2Embedding(vocab, model_dir_or_name=weight_path, word_dropout=0.1, - only_use_pretrain_bpe=False, language_model=True) - embed.eval() - words = torch.LongTensor([[2, 3, 4, 0]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, 16)) - embed.get_lm_loss() - - vocab.add_word("NotInGpt2") - embed = GPT2Embedding(vocab, model_dir_or_name=weight_path, word_dropout=0.1, - only_use_pretrain_bpe=False, auto_truncate=True, min_freq=1) - words = torch.LongTensor([[2, 3, 4, 0]*20]) - result = embed(words) - self.assertEqual(result.size(), (1, 80, 16)) - - def test_gpt2_ebembedding_2(self): - # 测试only_use_pretrain_vocab与truncate_embed是否正常工作 - Embedding = GPT2Embedding - weight_path = 'tests/data_for_tests/embedding/small_gpt2' - vocab = Vocabulary().add_word_lst("this is a texta and".split()) - embed1 = Embedding(vocab, model_dir_or_name=weight_path,layers=list(range(3)), - only_use_pretrain_bpe=True, truncate_embed=True, min_freq=1) - # embed_bpe_vocab_size = len(vocab)-1 + 2 # 排除NotInBERT, 额外加##a, [CLS] - # self.assertEqual(embed_bpe_vocab_size, len(embed1.model.tokenzier.vocab)) - - embed2 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=True, truncate_embed=False, min_freq=1) - # embed_bpe_vocab_size = num_word # 排除NotInBERT - # self.assertEqual(embed_bpe_vocab_size, len(embed2.model.tokenzier.vocab)) - - embed3 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=False, truncate_embed=True, min_freq=1) - # embed_bpe_vocab_size = len(vocab)+2 # 新增##a, [CLS] - # self.assertEqual(embed_bpe_vocab_size, len(embed3.model.tokenzier.vocab)) - - embed4 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=False, truncate_embed=False, min_freq=1) - # embed_bpe_vocab_size = num_word+1 # 新增##a - # self.assertEqual(embed_bpe_vocab_size, len(embed4.model.tokenzier.vocab)) - - # 测试各种情况下以下tensor的值是相等的 - embed1.eval() - embed2.eval() - embed3.eval() - embed4.eval() - tensor = torch.LongTensor([[vocab.to_index(w) for w in 'this is a texta and'.split()]]) - t1 = embed1(tensor) - t2 = embed2(tensor) - t3 = embed3(tensor) - t4 = embed4(tensor) - - self.assertEqual((t1-t2).sum(), 0) - self.assertEqual((t1-t3).sum(), 0) - self.assertEqual((t1-t4).sum(), 0) - - def test_gpt2_tokenizer(self): - from fastNLP.modules.tokenizer import GPT2Tokenizer - - tokenizer = GPT2Tokenizer.from_pretrained('tests/data_for_tests/embedding/small_gpt2') - print(tokenizer.encode("this is a texta a sentence")) - print(tokenizer.encode('this is')) - - def test_gpt2_embed_eq_gpt2_piece_encoder(self): - # 主要检查一下embedding的结果与wordpieceencoder的结果是否一致 - weight_path = 'tests/data_for_tests/embedding/small_gpt2' - ds = DataSet({'words': ["this is a texta a sentence".split(), 'this is'.split()]}) - encoder = GPT2WordPieceEncoder(model_dir_or_name=weight_path) - encoder.eval() - encoder.index_datasets(ds, field_name='words') - word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1])) - word_pieces_res = encoder(word_pieces) - - vocab = Vocabulary() - vocab.from_dataset(ds, field_name='words') - vocab.index_dataset(ds, field_name='words', new_field_name='words') - ds.set_input('words') - words = torch.LongTensor(ds['words'].get([0, 1])) - embed = GPT2Embedding(vocab, model_dir_or_name=weight_path, pool_method='first') - embed.eval() - words_res = embed(words) - - # 检查word piece什么的是正常work的 - self.assertEqual((word_pieces_res[0, :4]-words_res[0, :4]).sum(), 0) - self.assertEqual((word_pieces_res[0, 5:]-words_res[0, 4:]).sum(), 0) - self.assertEqual((word_pieces_res[1, :2]-words_res[1, :2]).sum(), 0) - - -class TestGPT2WordPieceEncoder(unittest.TestCase): - @unittest.skipIf(True, "Only for local debugging") - def test_eq_transformers(self): - # 测试能否正确得到类似于transformers的结果 - weight_path = '' - - # tokenizer = transformers.GPT2Tokenizer.from_pretrained(weight_path) - - ds = DataSet({'words': ["this this this a is texta model vocab".split(), 'this is'.split()]}) - - import transformers - input1 = ' '.join(ds[0]['words']) - input2 = ' '.join(ds[1]['words']) - tokenizer = transformers.GPT2Tokenizer.from_pretrained(weight_path) - idx_list1 = tokenizer.encode(input1) - idx_list2 = tokenizer.encode(input2) - - pad_value = tokenizer.encode('<|endoftext|>')[0] - tensor = torch.nn.utils.rnn.pad_sequence([torch.LongTensor(idx_list1), - torch.LongTensor(idx_list2)], - batch_first=True, - padding_value=pad_value) - gpt2 = transformers.GPT2Model.from_pretrained(weight_path, output_hidden_states=True) - gpt2.eval() - tensor = tensor - output, _, trans_hidden_states = gpt2(tensor, attention_mask=tensor.ne(pad_value)) - - encoder = GPT2WordPieceEncoder(model_dir_or_name=weight_path, layers=list(range(13))) - encoder.eval() - encoder.index_datasets(ds, field_name='words', add_endoftext=False) - word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1])) - - self.assertEqual(idx_list1, ds[0]['word_pieces']) - self.assertEqual(idx_list2, ds[1]['word_pieces']) - - word_pieces_res = encoder(word_pieces) - - self.assertEqual((torch.cat(trans_hidden_states, dim=-1)-word_pieces_res).sum(), 0) - - @unittest.skipIf(True, "Only for local usage") - def test_generate_small_gpt2(self): - # 因为GPT2使用的是GPT2的tokenizer,所以没办法直接生成权重,需要用点下面的方式 - weight_path = '' - tokenizer = GPT2Tokenizer.from_pretrained(weight_path) - - used_pairs = {} - used_vocab = {} - # 修改这里即可获得更多的sentence的数据 - sent1 = "This is a demo sentence" - sent2 = "another demo" - sent3 = 'this is a texta model vocab' - all_tokens = [] - - for sent in [sent1, sent2, sent3]: - tokens = [] - for word in sent.split(): - word = ' '+ word - token = "".join( - tokenizer.byte_encoder[b] for b in word.encode("utf-8") - ) - _token, _used_pairs = tokenizer.get_used_merge_pair_vocab(token) - tokens.extend(_token.split()) - used_pairs.update(_used_pairs) - all_tokens.extend(tokens) - token_ids = tokenizer.convert_tokens_to_ids(tokens) - used_vocab.update({t:i for t,i in zip(tokens, token_ids)}) - - print(used_pairs) - import json - with open('tests/data_for_tests/embedding/small_gpt2/vocab.json', 'w') as f: - new_used_vocab = {} - for idx, key in enumerate(used_vocab.keys()): - new_used_vocab[key] = len(new_used_vocab) - new_used_vocab['<|endoftext|>'] = len(new_used_vocab) - for i in range(65, 91): - if chr(i) not in new_used_vocab: - new_used_vocab[chr(i)] = len(new_used_vocab) - for i in range(97, 123): - if chr(i) not in new_used_vocab: - new_used_vocab[chr(i)] = len(new_used_vocab) - - json.dump(new_used_vocab, f) - - with open('tests/data_for_tests/embedding/small_gpt2/merges.txt', 'w') as f: - f.write('#version: small\n') - for k,v in sorted(sorted(used_pairs.items(), key=lambda kv:kv[1])): - f.write('{} {}\n'.format(k[0], k[1])) - - new_tokenizer = GPT2Tokenizer.from_pretrained('tests/data_for_tests/embedding/small_gpt2') - new_all_tokens = [] - for sent in [sent1, sent2, sent3]: - tokens = new_tokenizer.tokenize(sent, add_prefix_space=True) - new_all_tokens.extend(tokens) - print(all_tokens, new_all_tokens) - - self.assertSequenceEqual(all_tokens, new_all_tokens) - config = { - "architectures": [ - "GPT2LMHeadModel" - ], - "initializer_range": 0.02, - "layer_norm_epsilon": 1e-05, - "n_ctx": 20, - "n_embd": 16, - "n_head": 4, - "n_layer": 2, - "n_positions": 20, - "vocab_size": len(new_used_vocab) - } - with open('tests/data_for_tests/embedding/small_gpt2/config.json', 'w') as f: - json.dump(config, f) - - # 生成更小的merges.txt与vocab.json, 方法是通过记录tokenizer中的值实现 - from fastNLP.modules.encoder.gpt2 import GPT2LMHeadModel, GPT2Config - - config = GPT2Config.from_pretrained('tests/data_for_tests/embedding/small_gpt2') - - model = GPT2LMHeadModel(config) - torch.save(model.state_dict(), 'tests/data_for_tests/embedding/small_gpt2/small_pytorch_model.bin') - print(model(torch.LongTensor([[0,1,2,3]]))) - - def test_gpt2_word_piece_encoder(self): - # 主要检查可以运行 - weight_path = 'tests/data_for_tests/embedding/small_gpt2' - ds = DataSet({'words': ["this is a test sentence".split()]}) - embed = GPT2WordPieceEncoder(model_dir_or_name=weight_path, word_dropout=0.1) - embed.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - result = embed(torch.LongTensor([[1, 2, 3, 4]])) - - embed = GPT2WordPieceEncoder(model_dir_or_name=weight_path, word_dropout=0.1, - language_model=True) - embed.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - result = embed(torch.LongTensor([[1, 2, 3, 4]])) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_generate(self): - # weight_path = 'tests/data_for_tests/embedding/small_gpt2' - weight_path = 'en' - - encoder = GPT2WordPieceEncoder(model_dir_or_name=weight_path, language_model=True) - - # 测试一下各项东西是否正常work - print(encoder.generate_from_str('This', max_len=20, do_sample=False, num_beams=1, temperature=1, top_k=50, top_p=1.0, - repetition_penalty=1.0, length_penalty=1.0)) - print(encoder.generate_from_str('This day', max_len=20, do_sample=False, num_beams=1, temperature=1, top_k=50, top_p=1.0, - repetition_penalty=1.0, length_penalty=1.0)) - print(encoder.generate_from_str('This', max_len=20, do_sample=True, num_beams=3, temperature=1, top_k=50, top_p=1.0, - repetition_penalty=1.0, length_penalty=1.0)) - print(encoder.generate_from_str('This', max_len=20, do_sample=True, num_beams=3, temperature=2, top_k=20, top_p=2.0, - repetition_penalty=2.0, length_penalty=1.5)) diff --git a/tests/embeddings/test_roberta_embedding.py b/tests/embeddings/test_roberta_embedding.py deleted file mode 100644 index d4874a0b..00000000 --- a/tests/embeddings/test_roberta_embedding.py +++ /dev/null @@ -1,279 +0,0 @@ - -import unittest - -import torch -import os - -from fastNLP import DataSet, Vocabulary -from fastNLP.embeddings.roberta_embedding import RobertaWordPieceEncoder, RobertaEmbedding - - -class TestRobertWordPieceEncoder(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = RobertaEmbedding(vocab, model_dir_or_name='en') - words = torch.LongTensor([[2, 3, 4, 0]]) - print(embed(words).size()) - - for pool_method in ['first', 'last', 'max', 'avg']: - for include_cls_sep in [True, False]: - embed = RobertaEmbedding(vocab, model_dir_or_name='en', pool_method=pool_method, - include_cls_sep=include_cls_sep) - print(embed(words).size()) - - def test_robert_word_piece_encoder(self): - # 可正常运行即可 - weight_path = 'tests/data_for_tests/embedding/small_roberta' - encoder = RobertaWordPieceEncoder(model_dir_or_name=weight_path, word_dropout=0.1) - ds = DataSet({'words': ["this is a test . [SEP]".split()]}) - encoder.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - result = encoder(torch.LongTensor([[1,2,3,4]])) - - def test_roberta_embed_eq_roberta_piece_encoder(self): - # 主要检查一下embedding的结果与wordpieceencoder的结果是否一致 - weight_path = 'tests/data_for_tests/embedding/small_roberta' - ds = DataSet({'words': ["this is a texta a sentence".split(), 'this is'.split()]}) - encoder = RobertaWordPieceEncoder(model_dir_or_name=weight_path) - encoder.eval() - encoder.index_datasets(ds, field_name='words') - word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1])) - word_pieces_res = encoder(word_pieces) - - vocab = Vocabulary() - vocab.from_dataset(ds, field_name='words') - vocab.index_dataset(ds, field_name='words', new_field_name='words') - ds.set_input('words') - words = torch.LongTensor(ds['words'].get([0, 1])) - embed = RobertaEmbedding(vocab, model_dir_or_name=weight_path, - pool_method='first', include_cls_sep=True, pooled_cls=False, min_freq=1) - embed.eval() - words_res = embed(words) - - # 检查word piece什么的是正常work的 - self.assertEqual((word_pieces_res[0, :5]-words_res[0, :5]).sum(), 0) - self.assertEqual((word_pieces_res[0, 6:]-words_res[0, 5:]).sum(), 0) - self.assertEqual((word_pieces_res[1, :3]-words_res[1, :3]).sum(), 0) - - @unittest.skipIf(True, "Only for local debugging") - def test_eq_transformers(self): - weight_path = '' - ds = DataSet({'words': ["this is a texta model vocab".split(), 'this is'.split()]}) - encoder = RobertaWordPieceEncoder(model_dir_or_name=weight_path) - encoder.eval() - encoder.index_datasets(ds, field_name='words') - word_pieces = torch.LongTensor(ds['word_pieces'].get([0, 1])) - word_pieces_res = encoder(word_pieces) - - import transformers - input1 = ' '.join(ds[0]['words']) - input2 = ' '.join(ds[1]['words']) - tokenizer = transformers.RobertaTokenizer.from_pretrained(weight_path) - idx_list1 = tokenizer.encode(input1) - idx_list2 = tokenizer.encode(input2) - self.assertEqual(idx_list1, ds[0]['word_pieces']) - self.assertEqual(idx_list2, ds[1]['word_pieces']) - - pad_value = tokenizer.encode('')[0] - tensor = torch.nn.utils.rnn.pad_sequence([torch.LongTensor(idx_list1), - torch.LongTensor(idx_list2)], - batch_first=True, - padding_value=pad_value) - roberta = transformers.RobertaModel.from_pretrained(weight_path, output_hidden_states=True) - roberta.eval() - output, pooled_output, hidden_states = roberta(tensor, attention_mask=tensor.ne(pad_value)) - - self.assertEqual((output-word_pieces_res).sum(), 0) - - @unittest.skipIf(True, "Only for local usage") - def test_generate_small_roberta(self): - """ - 因为Roberta使用的是GPT2的tokenizer,所以没办法直接生成权重,需要用点下面的方式 - - :return: - """ - weight_path = '' - from fastNLP.modules.tokenizer import RobertaTokenizer - tokenizer = RobertaTokenizer.from_pretrained(weight_path) - - used_pairs = {} - used_vocab = {} - # 修改这里即可获得更多的sentence的数据 - sent1 = "This is a demo sentence" - sent2 = "another demo" - sent3 = 'this is a texta model vocab' - all_tokens = [] - - for sent in [sent1, sent2, sent3]: - tokens = [] - for word in sent.split(): - word = ' '+ word - token = "".join( - tokenizer.byte_encoder[b] for b in word.encode("utf-8") - ) - _token, _used_pairs = tokenizer.get_used_merge_pair_vocab(token) - tokens.extend(_token.split()) - used_pairs.update(_used_pairs) - all_tokens.extend(tokens) - token_ids = tokenizer.convert_tokens_to_ids(tokens) - used_vocab.update({t:i for t,i in zip(tokens, token_ids)}) - - import json - with open('tests/data_for_tests/embedding/small_roberta/vocab.json', 'w') as f: - new_used_vocab = {} - for token in ['', '', '', '', '']: # 必须为1 - new_used_vocab[token] = len(new_used_vocab) - for i in range(65, 91): - if chr(i) not in new_used_vocab: - new_used_vocab[chr(i)] = len(new_used_vocab) - for i in range(97, 123): - if chr(i) not in new_used_vocab: - new_used_vocab[chr(i)] = len(new_used_vocab) - for idx, key in enumerate(used_vocab.keys()): - if key not in new_used_vocab: - new_used_vocab[key] = len(new_used_vocab) - json.dump(new_used_vocab, f) - - with open('tests/data_for_tests/embedding/small_roberta/merges.txt', 'w') as f: - f.write('#version: tiny\n') - for k,v in sorted(sorted(used_pairs.items(), key=lambda kv:kv[1])): - f.write('{} {}\n'.format(k[0], k[1])) - - config = { - "architectures": [ - "RobertaForMaskedLM" - ], - "attention_probs_dropout_prob": 0.1, - "finetuning_task": None, - "hidden_act": "gelu", - "hidden_dropout_prob": 0.1, - "hidden_size": 16, - "initializer_range": 0.02, - "intermediate_size": 20, - "layer_norm_eps": 1e-05, - "max_position_embeddings": 20, - "num_attention_heads": 4, - "num_hidden_layers": 2, - "num_labels": 2, - "output_attentions": False, - "output_hidden_states": False, - "torchscript": False, - "type_vocab_size": 1, - "vocab_size": len(new_used_vocab) - } - with open('tests/data_for_tests/embedding/small_roberta/config.json', 'w') as f: - json.dump(config, f) - - new_tokenizer = RobertaTokenizer.from_pretrained('tests/data_for_tests/embedding/small_roberta') - new_all_tokens = [] - for sent in [sent1, sent2, sent3]: - tokens = new_tokenizer.tokenize(sent, add_prefix_space=True) - new_all_tokens.extend(tokens) - print(all_tokens, new_all_tokens) - - self.assertSequenceEqual(all_tokens, new_all_tokens) - - # 生成更小的merges.txt与vocab.json, 方法是通过记录tokenizer中的值实现 - from fastNLP.modules.encoder.roberta import RobertaModel, BertConfig - - config = BertConfig.from_json_file('tests/data_for_tests/embedding/small_roberta/config.json') - - model = RobertaModel(config) - torch.save(model.state_dict(), 'tests/data_for_tests/embedding/small_roberta/small_pytorch_model.bin') - print(model(torch.LongTensor([[0,1,2,3]]))) - - def test_save_load(self): - bert_save_test = 'roberta_save_test' - try: - os.makedirs(bert_save_test, exist_ok=True) - embed = RobertaWordPieceEncoder(model_dir_or_name='tests/data_for_tests/embedding/small_roberta', word_dropout=0.0, - layers='-2') - ds = DataSet({'words': ["this is a test . [SEP]".split()]}) - embed.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - words = torch.LongTensor([[1, 2, 3, 4]]) - embed.save(bert_save_test) - load_embed = RobertaWordPieceEncoder.load(bert_save_test) - embed.eval(), load_embed.eval() - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - finally: - import shutil - shutil.rmtree(bert_save_test) - - -class TestRobertaEmbedding(unittest.TestCase): - def test_roberta_embedding_1(self): - weight_path = 'tests/data_for_tests/embedding/small_roberta' - vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInRoberta".split()) - embed = RobertaEmbedding(vocab, model_dir_or_name=weight_path, word_dropout=0.1) - requires_grad = embed.requires_grad - embed.requires_grad = not requires_grad - embed.train() - words = torch.LongTensor([[2, 3, 4, 1]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, 16)) - - # 自动截断而不报错 - embed = RobertaEmbedding(vocab, model_dir_or_name=weight_path, word_dropout=0.1, auto_truncate=True) - words = torch.LongTensor([[2, 3, 4, 1]*10, - [2, 3]+[0]*38]) - result = embed(words) - self.assertEqual(result.size(), (2, 40, 16)) - - def test_roberta_ebembedding_2(self): - # 测试only_use_pretrain_vocab与truncate_embed是否正常工作 - Embedding = RobertaEmbedding - weight_path = 'tests/data_for_tests/embedding/small_roberta' - vocab = Vocabulary().add_word_lst("this is a texta and".split()) - embed1 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=True, truncate_embed=True, min_freq=1) - # embed_bpe_vocab_size = len(vocab)-1 + 2 # 排除NotInBERT, 额外加##a, [CLS] - # self.assertEqual(embed_bpe_vocab_size, len(embed1.model.tokenzier.vocab)) - - embed2 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=True, truncate_embed=False, min_freq=1) - # embed_bpe_vocab_size = num_word # 排除NotInBERT - # self.assertEqual(embed_bpe_vocab_size, len(embed2.model.tokenzier.vocab)) - - embed3 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=False, truncate_embed=True, min_freq=1) - # embed_bpe_vocab_size = len(vocab)+2 # 新增##a, [CLS] - # self.assertEqual(embed_bpe_vocab_size, len(embed3.model.tokenzier.vocab)) - - embed4 = Embedding(vocab, model_dir_or_name=weight_path, layers=list(range(3)), - only_use_pretrain_bpe=False, truncate_embed=False, min_freq=1) - # embed_bpe_vocab_size = num_word+1 # 新增##a - # self.assertEqual(embed_bpe_vocab_size, len(embed4.model.tokenzier.vocab)) - - # 测试各种情况下以下tensor的值是相等的 - embed1.eval() - embed2.eval() - embed3.eval() - embed4.eval() - tensor = torch.LongTensor([[vocab.to_index(w) for w in 'this is a texta and'.split()]]) - t1 = embed1(tensor) - t2 = embed2(tensor) - t3 = embed3(tensor) - t4 = embed4(tensor) - - self.assertEqual((t1-t2).sum(), 0) - self.assertEqual((t1-t3).sum(), 0) - self.assertEqual((t1-t4).sum(), 0) - - def test_save_load(self): - bert_save_test = 'roberta_save_test' - try: - os.makedirs(bert_save_test, exist_ok=True) - vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInBERT".split()) - embed = RobertaEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_roberta', - word_dropout=0.1, - auto_truncate=True) - embed.save(bert_save_test) - load_embed = RobertaEmbedding.load(bert_save_test) - words = torch.randint(len(vocab), size=(2, 20)) - embed.eval(), load_embed.eval() - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - finally: - import shutil - shutil.rmtree(bert_save_test) diff --git a/tests/embeddings/test_stack_embeddings.py b/tests/embeddings/test_stack_embeddings.py deleted file mode 100644 index 8177fa90..00000000 --- a/tests/embeddings/test_stack_embeddings.py +++ /dev/null @@ -1,33 +0,0 @@ -import unittest - -import torch - -from fastNLP import Vocabulary, DataSet, Instance -from fastNLP.embeddings import LSTMCharEmbedding, CNNCharEmbedding, StackEmbedding - - -class TestCharEmbed(unittest.TestCase): - def test_case_1(self): - ds = DataSet([Instance(words=['hello', 'world']), Instance(words=['hello', 'Jack'])]) - vocab = Vocabulary().from_dataset(ds, field_name='words') - self.assertEqual(len(vocab), 5) - cnn_embed = CNNCharEmbedding(vocab, embed_size=60) - lstm_embed = LSTMCharEmbedding(vocab, embed_size=70) - embed = StackEmbedding([cnn_embed, lstm_embed]) - x = torch.LongTensor([[2, 1, 0], [4, 3, 4]]) - y = embed(x) - self.assertEqual(tuple(y.size()), (2, 3, 130)) - - def test_case_2(self): - # 测试只需要拥有一样的index就可以concat - ds = DataSet([Instance(words=['hello', 'world']), Instance(words=['hello', 'Jack'])]) - vocab1 = Vocabulary().from_dataset(ds, field_name='words') - vocab2 = Vocabulary().from_dataset(ds, field_name='words') - self.assertEqual(len(vocab1), 5) - cnn_embed = CNNCharEmbedding(vocab1, embed_size=60) - lstm_embed = LSTMCharEmbedding(vocab2, embed_size=70) - embed = StackEmbedding([cnn_embed, lstm_embed]) - x = torch.LongTensor([[2, 1, 0], [4, 3, 4]]) - y = embed(x) - self.assertEqual(tuple(y.size()), (2, 3, 130)) - diff --git a/tests/embeddings/test_static_embedding.py b/tests/embeddings/test_static_embedding.py deleted file mode 100644 index 90519338..00000000 --- a/tests/embeddings/test_static_embedding.py +++ /dev/null @@ -1,283 +0,0 @@ -import unittest - -from fastNLP.embeddings import StaticEmbedding -from fastNLP import Vocabulary -import torch -import os - - -class TestLoad(unittest.TestCase): - def test_norm1(self): - # 测试只对可以找到的norm - vocab = Vocabulary().add_word_lst(['the', 'a', 'notinfile']) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - only_norm_found_vector=True) - self.assertEqual(round(torch.norm(embed(torch.LongTensor([[2]]))).item(), 4), 1) - self.assertNotEqual(torch.norm(embed(torch.LongTensor([[4]]))).item(), 1) - - def test_norm2(self): - # 测试对所有都norm - vocab = Vocabulary().add_word_lst(['the', 'a', 'notinfile']) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - normalize=True) - self.assertEqual(round(torch.norm(embed(torch.LongTensor([[2]]))).item(), 4), 1) - self.assertEqual(round(torch.norm(embed(torch.LongTensor([[4]]))).item(), 4), 1) - - def test_dropword(self): - # 测试是否可以通过drop word - vocab = Vocabulary().add_word_lst([chr(i) for i in range(1, 200)]) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=10, dropout=0.1, word_dropout=0.4) - for i in range(10): - length = torch.randint(1, 50, (1,)).item() - batch = torch.randint(1, 4, (1,)).item() - words = torch.randint(1, 200, (batch, length)).long() - embed(words) - - def test_only_use_pretrain_word(self): - def check_word_unk(words, vocab, embed): - for word in words: - self.assertListEqual(embed(torch.LongTensor([vocab.to_index(word)])).tolist()[0], - embed(torch.LongTensor([1])).tolist()[0]) - - def check_vector_equal(words, vocab, embed, embed_dict, lower=False): - for word in words: - index = vocab.to_index(word) - v1 = embed(torch.LongTensor([index])).tolist()[0] - if lower: - word = word.lower() - v2 = embed_dict[word] - for v1i, v2i in zip(v1, v2): - self.assertAlmostEqual(v1i, v2i, places=4) - embed_dict = read_static_embed('tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt') - - # 测试是否只使用pretrain的word - vocab = Vocabulary().add_word_lst(['the', 'a', 'notinfile']) - vocab.add_word('of', no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - only_use_pretrain_word=True) - # notinfile应该被置为unk - check_vector_equal(['the', 'a', 'of'], vocab, embed, embed_dict) - check_word_unk(['notinfile'], vocab, embed) - - # 测试在大小写情况下的使用 - vocab = Vocabulary().add_word_lst(['The', 'a', 'notinfile']) - vocab.add_word('Of', no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - only_use_pretrain_word=True) - check_word_unk(['The', 'Of', 'notinfile'], vocab, embed) # 这些词应该找不到 - check_vector_equal(['a'], vocab, embed, embed_dict) - - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - only_use_pretrain_word=True, lower=True) - check_vector_equal(['The', 'Of', 'a'], vocab, embed, embed_dict, lower=True) - check_word_unk(['notinfile'], vocab, embed) - - # 测试min_freq - vocab = Vocabulary().add_word_lst(['The', 'a', 'notinfile1', 'A', 'notinfile2', 'notinfile2']) - vocab.add_word('Of', no_create_entry=True) - - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', - only_use_pretrain_word=True, lower=True, min_freq=2, only_train_min_freq=True) - - check_vector_equal(['Of', 'a'], vocab, embed, embed_dict, lower=True) - check_word_unk(['notinfile1', 'The', 'notinfile2'], vocab, embed) - - def test_sequential_index(self): - # 当不存在no_create_entry时,words_to_words应该是顺序的 - vocab = Vocabulary().add_word_lst(['The', 'a', 'notinfile1', 'A', 'notinfile2', 'notinfile2']) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt') - for index,i in enumerate(embed.words_to_words): - assert index==i - - embed_dict = read_static_embed('tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt') - - for word, index in vocab: - if word in embed_dict: - index = vocab.to_index(word) - v1 = embed(torch.LongTensor([index])).tolist()[0] - v2 = embed_dict[word] - for v1i, v2i in zip(v1, v2): - self.assertAlmostEqual(v1i, v2i, places=4) - - def test_save_load_static_embed(self): - static_test_folder = 'static_save_test' - try: - # 测试包含no_create_entry - os.makedirs(static_test_folder, exist_ok=True) - - vocab = Vocabulary().add_word_lst(['The', 'a', 'notinfile1', 'A']) - vocab.add_word_lst(['notinfile2', 'notinfile2'], no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt') - embed.save(static_test_folder) - load_embed = StaticEmbedding.load(static_test_folder) - words = torch.randint(len(vocab), size=(2, 20)) - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - - # 测试不包含no_create_entry - vocab = Vocabulary().add_word_lst(['The', 'a', 'notinfile1', 'A']) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt') - embed.save(static_test_folder) - load_embed = StaticEmbedding.load(static_test_folder) - words = torch.randint(len(vocab), size=(2, 20)) - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - - # 测试lower, min_freq - vocab = Vocabulary().add_word_lst(['The', 'the', 'the', 'A', 'a', 'B']) - embed = StaticEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_static_embedding/' - 'glove.6B.50d_test.txt', min_freq=2, lower=True) - embed.save(static_test_folder) - load_embed = StaticEmbedding.load(static_test_folder) - words = torch.randint(len(vocab), size=(2, 20)) - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - - # 测试random的embedding - vocab = Vocabulary().add_word_lst(['The', 'the', 'the', 'A', 'a', 'B']) - vocab = vocab.add_word_lst(['b'], no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=4, min_freq=2, lower=True, - normalize=True) - embed.weight.data += 0.2 # 使得它不是normalize - embed.save(static_test_folder) - load_embed = StaticEmbedding.load(static_test_folder) - words = torch.randint(len(vocab), size=(2, 20)) - self.assertEqual((embed(words) - load_embed(words)).sum(), 0) - - finally: - if os.path.isdir(static_test_folder): - import shutil - shutil.rmtree(static_test_folder) - - -def read_static_embed(fp): - """ - - :param str fp: embedding的路径 - :return: {}, key是word, value是vector - """ - embed = {} - with open(fp, 'r') as f: - for line in f: - line = line.strip() - if line: - parts = line.split() - vector = list(map(float, parts[1:])) - word = parts[0] - embed[word] = vector - return embed - - -class TestRandomSameEntry(unittest.TestCase): - def test_same_vector(self): - vocab = Vocabulary().add_word_lst(["The", "the", "THE", 'a', "A"]) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=5, lower=True) - words = torch.LongTensor([[vocab.to_index(word) for word in ["The", "the", "THE", 'a', 'A']]]) - words = embed(words) - embed_0 = words[0, 0] - for i in range(1, 3): - assert torch.sum(embed_0==words[0, i]).eq(len(embed_0)) - embed_0 = words[0, 3] - for i in range(3, 5): - assert torch.sum(embed_0 == words[0, i]).eq(len(embed_0)) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_same_vector2(self): - vocab = Vocabulary().add_word_lst(["The", 'a', 'b', "the", "THE", "B", 'a', "A"]) - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6B-100d', - lower=True) - words = torch.LongTensor([[vocab.to_index(word) for word in ["The", "the", "THE", 'b', "B", 'a', 'A']]]) - words = embed(words) - embed_0 = words[0, 0] - for i in range(1, 3): - assert torch.sum(embed_0==words[0, i]).eq(len(embed_0)) - embed_0 = words[0, 3] - for i in range(3, 5): - assert torch.sum(embed_0 == words[0, i]).eq(len(embed_0)) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_same_vector3(self): - # 验证lower - word_lst = ["The", "the"] - no_create_word_lst = ['of', 'Of', 'With', 'with'] - vocab = Vocabulary().add_word_lst(word_lst) - vocab.add_word_lst(no_create_word_lst, no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6B-100d', - lower=True) - words = torch.LongTensor([[vocab.to_index(word) for word in word_lst+no_create_word_lst]]) - words = embed(words) - - lowered_word_lst = [word.lower() for word in word_lst] - lowered_no_create_word_lst = [word.lower() for word in no_create_word_lst] - lowered_vocab = Vocabulary().add_word_lst(lowered_word_lst) - lowered_vocab.add_word_lst(lowered_no_create_word_lst, no_create_entry=True) - lowered_embed = StaticEmbedding(lowered_vocab, model_dir_or_name='en-glove-6B-100d', - lower=False) - lowered_words = torch.LongTensor([[lowered_vocab.to_index(word) for word in lowered_word_lst+lowered_no_create_word_lst]]) - lowered_words = lowered_embed(lowered_words) - - all_words = word_lst + no_create_word_lst - - for idx, (word_i, word_j) in enumerate(zip(words[0], lowered_words[0])): - with self.subTest(idx=idx, word=all_words[idx]): - assert torch.sum(word_i == word_j).eq(lowered_embed.embed_size) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_same_vector4(self): - # 验证在有min_freq下的lower - word_lst = ["The", "the", "the", "The", "a", "A"] - no_create_word_lst = ['of', 'Of', "Of", "of", 'With', 'with'] - all_words = word_lst[:-2] + no_create_word_lst[:-2] - vocab = Vocabulary(min_freq=2).add_word_lst(word_lst) - vocab.add_word_lst(no_create_word_lst, no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6B-100d', - lower=True) - words = torch.LongTensor([[vocab.to_index(word) for word in all_words]]) - words = embed(words) - - lowered_word_lst = [word.lower() for word in word_lst] - lowered_no_create_word_lst = [word.lower() for word in no_create_word_lst] - lowered_vocab = Vocabulary().add_word_lst(lowered_word_lst) - lowered_vocab.add_word_lst(lowered_no_create_word_lst, no_create_entry=True) - lowered_embed = StaticEmbedding(lowered_vocab, model_dir_or_name='en-glove-6B-100d', - lower=False) - lowered_words = torch.LongTensor([[lowered_vocab.to_index(word.lower()) for word in all_words]]) - lowered_words = lowered_embed(lowered_words) - - for idx in range(len(all_words)): - word_i, word_j = words[0, idx], lowered_words[0, idx] - with self.subTest(idx=idx, word=all_words[idx]): - assert torch.sum(word_i == word_j).eq(lowered_embed.embed_size) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_same_vector5(self): - # 检查通过使用min_freq后的word是否内容一致 - word_lst = ["they", "the", "they", "the", 'he', 'he', "a", "A"] - no_create_word_lst = ['of', "of", "she", "she", 'With', 'with'] - all_words = word_lst[:-2] + no_create_word_lst[:-2] - vocab = Vocabulary().add_word_lst(word_lst) - vocab.add_word_lst(no_create_word_lst, no_create_entry=True) - embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6B-100d', - lower=False, min_freq=2) - words = torch.LongTensor([[vocab.to_index(word) for word in all_words]]) - words = embed(words) - - min_freq_vocab = Vocabulary(min_freq=2).add_word_lst(word_lst) - min_freq_vocab.add_word_lst(no_create_word_lst, no_create_entry=True) - min_freq_embed = StaticEmbedding(min_freq_vocab, model_dir_or_name='en-glove-6B-100d', - lower=False) - min_freq_words = torch.LongTensor([[min_freq_vocab.to_index(word.lower()) for word in all_words]]) - min_freq_words = min_freq_embed(min_freq_words) - - for idx in range(len(all_words)): - word_i, word_j = words[0, idx], min_freq_words[0, idx] - with self.subTest(idx=idx, word=all_words[idx]): - assert torch.sum(word_i == word_j).eq(min_freq_embed.embed_size) \ No newline at end of file diff --git a/tests/embeddings/test_transformer_embedding.py b/tests/embeddings/test_transformer_embedding.py deleted file mode 100644 index 2a3617d9..00000000 --- a/tests/embeddings/test_transformer_embedding.py +++ /dev/null @@ -1,38 +0,0 @@ -import unittest - -import torch -import os - -from fastNLP import DataSet, Vocabulary -from fastNLP.embeddings.transformers_embedding import TransformersEmbedding, TransformersWordPieceEncoder - - -class TransformersEmbeddingTest(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_transformers_embedding_1(self): - from transformers import ElectraModel, ElectraTokenizer - weight_path = "google/electra-small-generator" - vocab = Vocabulary().add_word_lst("this is a test . [SEP] NotInRoberta".split()) - model = ElectraModel.from_pretrained(weight_path) - tokenizer = ElectraTokenizer.from_pretrained(weight_path) - - embed = TransformersEmbedding(vocab, model, tokenizer, word_dropout=0.1) - - words = torch.LongTensor([[2, 3, 4, 1]]) - result = embed(words) - self.assertEqual(result.size(), (1, 4, model.config.hidden_size)) - - -class TransformersWordPieceEncoderTest(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_transformers_embedding_1(self): - from transformers import ElectraModel, ElectraTokenizer - weight_path = "google/electra-small-generator" - model = ElectraModel.from_pretrained(weight_path) - tokenizer = ElectraTokenizer.from_pretrained(weight_path) - encoder = TransformersWordPieceEncoder(model, tokenizer) - ds = DataSet({'words': ["this is a test . [SEP]".split()]}) - encoder.index_datasets(ds, field_name='words') - self.assertTrue(ds.has_field('word_pieces')) - result = encoder(torch.LongTensor([[1,2,3,4]])) - self.assertEqual(result.size(), (1, 4, model.config.hidden_size)) diff --git a/tests/io/__init__.py b/tests/io/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/io/loader/test_classification_loader.py b/tests/io/loader/test_classification_loader.py deleted file mode 100644 index 02c58e80..00000000 --- a/tests/io/loader/test_classification_loader.py +++ /dev/null @@ -1,56 +0,0 @@ - -import unittest - -import os - -from fastNLP.io import DataBundle -from fastNLP.io.loader.classification import YelpFullLoader, YelpPolarityLoader, IMDBLoader, \ - SSTLoader, SST2Loader, ChnSentiCorpLoader, THUCNewsLoader, WeiboSenti100kLoader, \ - MRLoader, R8Loader, R52Loader, OhsumedLoader, NG20Loader - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestDownload(unittest.TestCase): - def test_download(self): - for loader in [YelpFullLoader, YelpPolarityLoader, IMDBLoader, SST2Loader, SSTLoader, ChnSentiCorpLoader]: - loader().download() - - def test_load(self): - for loader in [YelpFullLoader, YelpPolarityLoader, IMDBLoader, SST2Loader, SSTLoader, ChnSentiCorpLoader]: - data_bundle = loader().load() - print(data_bundle) - - -class TestLoad(unittest.TestCase): - def test_process_from_file(self): - data_set_dict = { - 'yelp.p': ('tests/data_for_tests/io/yelp_review_polarity', YelpPolarityLoader, (6, 6, 6), False), - 'yelp.f': ('tests/data_for_tests/io/yelp_review_full', YelpFullLoader, (6, 6, 6), False), - 'sst-2': ('tests/data_for_tests/io/SST-2', SST2Loader, (5, 5, 5), True), - 'sst': ('tests/data_for_tests/io/SST', SSTLoader, (6, 6, 6), False), - 'imdb': ('tests/data_for_tests/io/imdb', IMDBLoader, (6, 6, 6), False), - 'ChnSentiCorp': ('tests/data_for_tests/io/ChnSentiCorp', ChnSentiCorpLoader, (6, 6, 6), False), - 'THUCNews': ('tests/data_for_tests/io/THUCNews', THUCNewsLoader, (9, 9, 9), False), - 'WeiboSenti100k': ('tests/data_for_tests/io/WeiboSenti100k', WeiboSenti100kLoader, (6, 7, 6), False), - 'mr': ('tests/data_for_tests/io/mr', MRLoader, (6, 6, 6), False), - 'R8': ('tests/data_for_tests/io/R8', R8Loader, (6, 6, 6), False), - 'R52': ('tests/data_for_tests/io/R52', R52Loader, (6, 6, 6), False), - 'ohsumed': ('tests/data_for_tests/io/R52', OhsumedLoader, (6, 6, 6), False), - '20ng': ('tests/data_for_tests/io/R52', NG20Loader, (6, 6, 6), False), - } - for k, v in data_set_dict.items(): - path, loader, data_set, warns = v - with self.subTest(path=path): - if warns: - with self.assertWarns(Warning): - data_bundle = loader().load(path) - else: - data_bundle = loader().load(path) - - self.assertTrue(isinstance(data_bundle, DataBundle)) - self.assertEqual(len(data_set), data_bundle.num_dataset) - for x, y in zip(data_set, data_bundle.iter_datasets()): - name, dataset = y - with self.subTest(split=name): - self.assertEqual(x, len(dataset)) - diff --git a/tests/io/loader/test_conll_loader.py b/tests/io/loader/test_conll_loader.py deleted file mode 100644 index 87ea57c3..00000000 --- a/tests/io/loader/test_conll_loader.py +++ /dev/null @@ -1,44 +0,0 @@ - -import unittest -import os -from fastNLP.io.loader.conll import MsraNERLoader, PeopleDailyNERLoader, WeiboNERLoader, \ - Conll2003Loader, ConllLoader - - -class TestMSRANER(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - MsraNERLoader().download(re_download=False) - data_bundle = MsraNERLoader().load() - print(data_bundle) - - -class TestPeopleDaily(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - PeopleDailyNERLoader().download() - - -class TestWeiboNER(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - WeiboNERLoader().download() - - -class TestConll2003Loader(unittest.TestCase): - def test_load(self): - Conll2003Loader()._load('tests/data_for_tests/conll_2003_example.txt') - - -class TestConllLoader(unittest.TestCase): - def test_conll(self): - db = Conll2003Loader().load('tests/data_for_tests/io/conll2003') - print(db) - -class TestConllLoader(unittest.TestCase): - def test_sep(self): - headers = [ - 'raw_words', 'ner', - ] - db = ConllLoader(headers = headers,sep="\n").load('tests/data_for_tests/io/MSRA_NER') - print(db) diff --git a/tests/io/loader/test_coreference_loader.py b/tests/io/loader/test_coreference_loader.py deleted file mode 100644 index 50f27e39..00000000 --- a/tests/io/loader/test_coreference_loader.py +++ /dev/null @@ -1,26 +0,0 @@ -from fastNLP.io.loader.coreference import CoReferenceLoader -import unittest - - -class TestCR(unittest.TestCase): - def test_load(self): - - test_root = "tests/data_for_tests/io/coreference/" - train_path = test_root+"coreference_train.json" - dev_path = test_root+"coreference_dev.json" - test_path = test_root+"coreference_test.json" - paths = {"train": train_path, "dev": dev_path, "test": test_path} - - bundle1 = CoReferenceLoader().load(paths) - bundle2 = CoReferenceLoader().load(test_root) - print(bundle1) - print(bundle2) - - self.assertEqual(bundle1.num_dataset, 3) - self.assertEqual(bundle2.num_dataset, 3) - self.assertEqual(bundle1.num_vocab, 0) - self.assertEqual(bundle2.num_vocab, 0) - - self.assertEqual(len(bundle1.get_dataset('train')), 1) - self.assertEqual(len(bundle1.get_dataset('dev')), 1) - self.assertEqual(len(bundle1.get_dataset('test')), 1) diff --git a/tests/io/loader/test_cws_loader.py b/tests/io/loader/test_cws_loader.py deleted file mode 100644 index e17d0e0d..00000000 --- a/tests/io/loader/test_cws_loader.py +++ /dev/null @@ -1,24 +0,0 @@ -import unittest -import os -from fastNLP.io.loader import CWSLoader - - -class TestCWSLoader(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_download(self): - dataset_names = ['pku', 'cityu', 'as', 'msra'] - for dataset_name in dataset_names: - with self.subTest(dataset_name=dataset_name): - data_bundle = CWSLoader(dataset_name=dataset_name).load() - print(data_bundle) - - -class TestRunCWSLoader(unittest.TestCase): - def test_cws_loader(self): - dataset_names = ['msra', 'cityu', 'as', 'msra'] - for dataset_name in dataset_names: - with self.subTest(dataset_name=dataset_name): - data_bundle = CWSLoader(dataset_name=dataset_name).load( - f'tests/data_for_tests/io/cws_{dataset_name}' - ) - print(data_bundle) diff --git a/tests/io/loader/test_matching_loader.py b/tests/io/loader/test_matching_loader.py deleted file mode 100644 index 6c7059da..00000000 --- a/tests/io/loader/test_matching_loader.py +++ /dev/null @@ -1,51 +0,0 @@ - -import unittest - -import os - -from fastNLP.io import DataBundle -from fastNLP.io.loader.matching import RTELoader, QNLILoader, SNLILoader, QuoraLoader, MNLILoader, \ - BQCorpusLoader, CNXNLILoader, LCQMCLoader - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestMatchingDownload(unittest.TestCase): - def test_download(self): - for loader in [RTELoader, QNLILoader, SNLILoader, MNLILoader]: - loader().download() - with self.assertRaises(Exception): - QuoraLoader().load() - - def test_load(self): - for loader in [RTELoader, QNLILoader, SNLILoader, MNLILoader]: - data_bundle = loader().load() - print(data_bundle) - - -class TestMatchingLoad(unittest.TestCase): - def test_load(self): - data_set_dict = { - 'RTE': ('tests/data_for_tests/io/RTE', RTELoader, (5, 5, 5), True), - 'SNLI': ('tests/data_for_tests/io/SNLI', SNLILoader, (5, 5, 5), False), - 'QNLI': ('tests/data_for_tests/io/QNLI', QNLILoader, (5, 5, 5), True), - 'MNLI': ('tests/data_for_tests/io/MNLI', MNLILoader, (5, 5, 5, 5, 6), True), - 'Quora': ('tests/data_for_tests/io/Quora', QuoraLoader, (2, 2, 2), False), - 'BQCorpus': ('tests/data_for_tests/io/BQCorpus', BQCorpusLoader, (5, 5, 5), False), - 'XNLI': ('tests/data_for_tests/io/XNLI', CNXNLILoader, (6, 6, 8), False), - 'LCQMC': ('tests/data_for_tests/io/LCQMC', LCQMCLoader, (6, 5, 6), False), - } - for k, v in data_set_dict.items(): - path, loader, instance, warns = v - if warns: - with self.assertWarns(Warning): - data_bundle = loader().load(path) - else: - data_bundle = loader().load(path) - - self.assertTrue(isinstance(data_bundle, DataBundle)) - self.assertEqual(len(instance), data_bundle.num_dataset) - for x, y in zip(instance, data_bundle.iter_datasets()): - name, dataset = y - with self.subTest(path=path, split=name): - self.assertEqual(x, len(dataset)) - diff --git a/tests/io/loader/test_qa_loader.py b/tests/io/loader/test_qa_loader.py deleted file mode 100644 index 99a504c5..00000000 --- a/tests/io/loader/test_qa_loader.py +++ /dev/null @@ -1,14 +0,0 @@ -import unittest - -from fastNLP.io.loader.qa import CMRC2018Loader - -class TestCMRC2018Loader(unittest.TestCase): - def test__load(self): - loader = CMRC2018Loader() - dataset = loader._load('tests/data_for_tests/io/cmrc/train.json') - print(dataset) - - def test_load(self): - loader = CMRC2018Loader() - data_bundle = loader.load('tests/data_for_tests/io/cmrc/') - print(data_bundle) diff --git a/tests/io/pipe/test_classification.py b/tests/io/pipe/test_classification.py deleted file mode 100644 index e3200a1a..00000000 --- a/tests/io/pipe/test_classification.py +++ /dev/null @@ -1,93 +0,0 @@ -import unittest -import os - -from fastNLP.io import DataBundle -from fastNLP.io.pipe.classification import SSTPipe, SST2Pipe, IMDBPipe, YelpFullPipe, YelpPolarityPipe, \ - AGsNewsPipe, DBPediaPipe -from fastNLP.io.pipe.classification import ChnSentiCorpPipe, THUCNewsPipe, WeiboSenti100kPipe - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestClassificationPipe(unittest.TestCase): - def test_process_from_file(self): - for pipe in [YelpPolarityPipe, SST2Pipe, IMDBPipe, YelpFullPipe, SSTPipe]: - with self.subTest(pipe=pipe): - print(pipe) - data_bundle = pipe(tokenizer='raw').process_from_file() - print(data_bundle) - - -class TestRunPipe(unittest.TestCase): - def test_load(self): - for pipe in [IMDBPipe]: - data_bundle = pipe(tokenizer='raw').process_from_file('tests/data_for_tests/io/imdb') - print(data_bundle) - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestCNClassificationPipe(unittest.TestCase): - def test_process_from_file(self): - for pipe in [ChnSentiCorpPipe]: - with self.subTest(pipe=pipe): - data_bundle = pipe(bigrams=True, trigrams=True).process_from_file() - print(data_bundle) - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestRunClassificationPipe(unittest.TestCase): - def test_process_from_file(self): - data_set_dict = { - 'yelp.p': ('tests/data_for_tests/io/yelp_review_polarity', YelpPolarityPipe, - {'train': 6, 'dev': 6, 'test': 6}, {'words': 1176, 'target': 2}, - False), - 'yelp.f': ('tests/data_for_tests/io/yelp_review_full', YelpFullPipe, - {'train': 6, 'dev': 6, 'test': 6}, {'words': 1166, 'target': 5}, - False), - 'sst-2': ('tests/data_for_tests/io/SST-2', SST2Pipe, - {'train': 5, 'dev': 5, 'test': 5}, {'words': 139, 'target': 2}, - True), - 'sst': ('tests/data_for_tests/io/SST', SSTPipe, - {'train': 354, 'dev': 6, 'test': 6}, {'words': 232, 'target': 5}, - False), - 'imdb': ('tests/data_for_tests/io/imdb', IMDBPipe, - {'train': 6, 'dev': 6, 'test': 6}, {'words': 1670, 'target': 2}, - False), - 'ag': ('tests/data_for_tests/io/ag', AGsNewsPipe, - {'train': 4, 'test': 5}, {'words': 257, 'target': 4}, - False), - 'dbpedia': ('tests/data_for_tests/io/dbpedia', DBPediaPipe, - {'train': 14, 'test': 5}, {'words': 496, 'target': 14}, - False), - 'ChnSentiCorp': ('tests/data_for_tests/io/ChnSentiCorp', ChnSentiCorpPipe, - {'train': 6, 'dev': 6, 'test': 6}, - {'chars': 529, 'bigrams': 1296, 'trigrams': 1483, 'target': 2}, - False), - 'Chn-THUCNews': ('tests/data_for_tests/io/THUCNews', THUCNewsPipe, - {'train': 9, 'dev': 9, 'test': 9}, {'chars': 1864, 'target': 9}, - False), - 'Chn-WeiboSenti100k': ('tests/data_for_tests/io/WeiboSenti100k', WeiboSenti100kPipe, - {'train': 6, 'dev': 6, 'test': 7}, {'chars': 452, 'target': 2}, - False), - } - for k, v in data_set_dict.items(): - path, pipe, data_set, vocab, warns = v - with self.subTest(path=path): - if 'Chn' not in k: - if warns: - with self.assertWarns(Warning): - data_bundle = pipe(tokenizer='raw').process_from_file(path) - else: - data_bundle = pipe(tokenizer='raw').process_from_file(path) - else: - data_bundle = pipe(bigrams=True, trigrams=True).process_from_file(path) - - self.assertTrue(isinstance(data_bundle, DataBundle)) - self.assertEqual(len(data_set), data_bundle.num_dataset) - for name, dataset in data_bundle.iter_datasets(): - self.assertTrue(name in data_set.keys()) - self.assertEqual(data_set[name], len(dataset)) - - self.assertEqual(len(vocab), data_bundle.num_vocab) - for name, vocabs in data_bundle.iter_vocabs(): - self.assertTrue(name in vocab.keys()) - self.assertEqual(vocab[name], len(vocabs)) diff --git a/tests/io/pipe/test_conll.py b/tests/io/pipe/test_conll.py deleted file mode 100644 index 30d5b48f..00000000 --- a/tests/io/pipe/test_conll.py +++ /dev/null @@ -1,52 +0,0 @@ -import unittest -import os -from fastNLP.io import MsraNERPipe, PeopleDailyPipe, WeiboNERPipe, Conll2003Pipe, Conll2003NERPipe, \ - OntoNotesNERPipe - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestConllPipe(unittest.TestCase): - def test_process_from_file(self): - for pipe in [MsraNERPipe, PeopleDailyPipe, WeiboNERPipe]: - with self.subTest(pipe=pipe): - print(pipe) - data_bundle = pipe(bigrams=True, trigrams=True).process_from_file() - print(data_bundle) - data_bundle = pipe(encoding_type='bioes').process_from_file() - print(data_bundle) - - -class TestRunPipe(unittest.TestCase): - def test_conll2003(self): - for pipe in [Conll2003Pipe, Conll2003NERPipe]: - with self.subTest(pipe=pipe): - print(pipe) - data_bundle = pipe().process_from_file('tests/data_for_tests/conll_2003_example.txt') - print(data_bundle) - - -class TestNERPipe(unittest.TestCase): - def test_process_from_file(self): - data_dict = { - 'weibo_NER': WeiboNERPipe, - 'peopledaily': PeopleDailyPipe, - 'MSRA_NER': MsraNERPipe, - } - for k, v in data_dict.items(): - pipe = v - with self.subTest(pipe=pipe): - data_bundle = pipe(bigrams=True, trigrams=True).process_from_file(f'tests/data_for_tests/io/{k}') - print(data_bundle) - data_bundle = pipe(encoding_type='bioes').process_from_file(f'tests/data_for_tests/io/{k}') - print(data_bundle) - - -class TestConll2003Pipe(unittest.TestCase): - def test_conll(self): - with self.assertWarns(Warning): - data_bundle = Conll2003Pipe().process_from_file('tests/data_for_tests/io/conll2003') - print(data_bundle) - - def test_OntoNotes(self): - data_bundle = OntoNotesNERPipe().process_from_file('tests/data_for_tests/io/OntoNotes') - print(data_bundle) diff --git a/tests/io/pipe/test_coreference.py b/tests/io/pipe/test_coreference.py deleted file mode 100644 index 784f6954..00000000 --- a/tests/io/pipe/test_coreference.py +++ /dev/null @@ -1,33 +0,0 @@ -import unittest -from fastNLP.io.pipe.coreference import CoReferencePipe - - -class TestCR(unittest.TestCase): - - def test_load(self): - class Config(): - max_sentences = 50 - filter = [3, 4, 5] - char_path = None - config = Config() - - file_root_path = "tests/data_for_tests/io/coreference/" - train_path = file_root_path + "coreference_train.json" - dev_path = file_root_path + "coreference_dev.json" - test_path = file_root_path + "coreference_test.json" - - paths = {"train": train_path, "dev": dev_path, "test": test_path} - - bundle1 = CoReferencePipe(config).process_from_file(paths) - bundle2 = CoReferencePipe(config).process_from_file(file_root_path) - print(bundle1) - print(bundle2) - self.assertEqual(bundle1.num_dataset, 3) - self.assertEqual(bundle2.num_dataset, 3) - self.assertEqual(bundle1.num_vocab, 1) - self.assertEqual(bundle2.num_vocab, 1) - - self.assertEqual(len(bundle1.get_dataset('train')), 1) - self.assertEqual(len(bundle1.get_dataset('dev')), 1) - self.assertEqual(len(bundle1.get_dataset('test')), 1) - self.assertEqual(len(bundle1.get_vocab('words1')), 84) diff --git a/tests/io/pipe/test_cws.py b/tests/io/pipe/test_cws.py deleted file mode 100644 index ef50907f..00000000 --- a/tests/io/pipe/test_cws.py +++ /dev/null @@ -1,41 +0,0 @@ - -import unittest -import os -from fastNLP.io.pipe.cws import CWSPipe - - -class TestCWSPipe(unittest.TestCase): - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_process_from_file(self): - dataset_names = ['pku', 'cityu', 'as', 'msra'] - for dataset_name in dataset_names: - with self.subTest(dataset_name=dataset_name): - data_bundle = CWSPipe(dataset_name=dataset_name).process_from_file() - print(data_bundle) - - def test_demo(self): - # related to issue https://github.com/fastnlp/fastNLP/issues/324#issue-705081091 - from fastNLP import DataSet, Instance - from fastNLP.io import DataBundle - data_bundle = DataBundle() - ds = DataSet() - ds.append(Instance(raw_words="截流 进入 最后 冲刺 ( 附 图片 1 张 )")) - data_bundle.set_dataset(ds, name='train') - data_bundle = CWSPipe().process(data_bundle) - self.assertFalse('<' in data_bundle.get_vocab('chars')) - - -class TestRunCWSPipe(unittest.TestCase): - def test_process_from_file(self): - dataset_names = ['msra', 'cityu', 'as', 'pku'] - for dataset_name in dataset_names: - with self.subTest(dataset_name=dataset_name): - data_bundle = CWSPipe(bigrams=True, trigrams=True).\ - process_from_file(f'tests/data_for_tests/io/cws_{dataset_name}') - print(data_bundle) - - def test_replace_number(self): - data_bundle = CWSPipe(bigrams=True, replace_num_alpha=True).\ - process_from_file(f'tests/data_for_tests/io/cws_pku') - for word in ['<', '>', '']: - self.assertNotEqual(data_bundle.get_vocab('chars').to_index(word), 1) diff --git a/tests/io/pipe/test_matching.py b/tests/io/pipe/test_matching.py deleted file mode 100644 index 0cace97c..00000000 --- a/tests/io/pipe/test_matching.py +++ /dev/null @@ -1,109 +0,0 @@ - -import unittest -import os - -from fastNLP.io import DataBundle -from fastNLP.io.pipe.matching import SNLIPipe, RTEPipe, QNLIPipe, QuoraPipe, MNLIPipe, \ - CNXNLIPipe, BQCorpusPipe, LCQMCPipe -from fastNLP.io.pipe.matching import SNLIBertPipe, RTEBertPipe, QNLIBertPipe, QuoraBertPipe, MNLIBertPipe, \ - CNXNLIBertPipe, BQCorpusBertPipe, LCQMCBertPipe - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestMatchingPipe(unittest.TestCase): - def test_process_from_file(self): - for pipe in [SNLIPipe, RTEPipe, QNLIPipe, MNLIPipe]: - with self.subTest(pipe=pipe): - print(pipe) - data_bundle = pipe(tokenizer='raw').process_from_file() - print(data_bundle) - - -@unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") -class TestMatchingBertPipe(unittest.TestCase): - def test_process_from_file(self): - for pipe in [SNLIBertPipe, RTEBertPipe, QNLIBertPipe, MNLIBertPipe]: - with self.subTest(pipe=pipe): - print(pipe) - data_bundle = pipe(tokenizer='raw').process_from_file() - print(data_bundle) - - -class TestRunMatchingPipe(unittest.TestCase): - - def test_load(self): - data_set_dict = { - 'RTE': ('tests/data_for_tests/io/RTE', RTEPipe, RTEBertPipe, (5, 5, 5), (449, 2), True), - 'SNLI': ('tests/data_for_tests/io/SNLI', SNLIPipe, SNLIBertPipe, (5, 5, 5), (110, 3), False), - 'QNLI': ('tests/data_for_tests/io/QNLI', QNLIPipe, QNLIBertPipe, (5, 5, 5), (372, 2), True), - 'MNLI': ('tests/data_for_tests/io/MNLI', MNLIPipe, MNLIBertPipe, (5, 5, 5, 5, 6), (459, 3), True), - 'BQCorpus': ('tests/data_for_tests/io/BQCorpus', BQCorpusPipe, BQCorpusBertPipe, (5, 5, 5), (32, 2), False), - 'XNLI': ('tests/data_for_tests/io/XNLI', CNXNLIPipe, CNXNLIBertPipe, (6, 6, 8), (39, 3), False), - 'LCQMC': ('tests/data_for_tests/io/LCQMC', LCQMCPipe, LCQMCBertPipe, (6, 5, 6), (36, 2), False), - } - for k, v in data_set_dict.items(): - path, pipe1, pipe2, data_set, vocab, warns = v - if warns: - with self.assertWarns(Warning): - data_bundle1 = pipe1(tokenizer='raw').process_from_file(path) - data_bundle2 = pipe2(tokenizer='raw').process_from_file(path) - else: - data_bundle1 = pipe1(tokenizer='raw').process_from_file(path) - data_bundle2 = pipe2(tokenizer='raw').process_from_file(path) - - self.assertTrue(isinstance(data_bundle1, DataBundle)) - self.assertEqual(len(data_set), data_bundle1.num_dataset) - print(k) - print(data_bundle1) - print(data_bundle2) - for x, y in zip(data_set, data_bundle1.iter_datasets()): - name, dataset = y - with self.subTest(path=path, split=name): - self.assertEqual(x, len(dataset)) - self.assertEqual(len(data_set), data_bundle2.num_dataset) - for x, y in zip(data_set, data_bundle2.iter_datasets()): - name, dataset = y - self.assertEqual(x, len(dataset)) - - self.assertEqual(len(vocab), data_bundle1.num_vocab) - for x, y in zip(vocab, data_bundle1.iter_vocabs()): - name, vocabs = y - self.assertEqual(x, len(vocabs)) - self.assertEqual(len(vocab), data_bundle2.num_vocab) - for x, y in zip(vocab, data_bundle1.iter_vocabs()): - name, vocabs = y - self.assertEqual(x + 1 if name == 'words' else x, len(vocabs)) - - @unittest.skipIf('TRAVIS' in os.environ, "Skip in travis") - def test_spacy(self): - data_set_dict = { - 'Quora': ('tests/data_for_tests/io/Quora', QuoraPipe, QuoraBertPipe, (2, 2, 2), (93, 2)), - } - for k, v in data_set_dict.items(): - path, pipe1, pipe2, data_set, vocab = v - - data_bundle1 = pipe1(tokenizer='spacy').process_from_file(path) - data_bundle2 = pipe2(tokenizer='spacy').process_from_file(path) - - self.assertTrue(isinstance(data_bundle1, DataBundle)) - self.assertEqual(len(data_set), data_bundle1.num_dataset) - print(k) - print(data_bundle1) - print(data_bundle2) - for x, y in zip(data_set, data_bundle1.iter_datasets()): - name, dataset = y - self.assertEqual(x, len(dataset)) - self.assertEqual(len(data_set), data_bundle2.num_dataset) - for x, y in zip(data_set, data_bundle2.iter_datasets()): - name, dataset = y - self.assertEqual(x, len(dataset)) - - self.assertEqual(len(vocab), data_bundle1.num_vocab) - for x, y in zip(vocab, data_bundle1.iter_vocabs()): - name, vocabs = y - self.assertEqual(x, len(vocabs)) - self.assertEqual(len(vocab), data_bundle2.num_vocab) - for x, y in zip(vocab, data_bundle1.iter_vocabs()): - name, vocabs = y - self.assertEqual(x + 1 if name == 'words' else x, len(vocabs)) - diff --git a/tests/io/pipe/test_qa.py b/tests/io/pipe/test_qa.py deleted file mode 100644 index db2245fc..00000000 --- a/tests/io/pipe/test_qa.py +++ /dev/null @@ -1,24 +0,0 @@ - -import unittest -from fastNLP.io.pipe.qa import CMRC2018BertPipe -from fastNLP.io.loader.qa import CMRC2018Loader - - -class CMRC2018PipeTest(unittest.TestCase): - def test_process(self): - data_bundle = CMRC2018Loader().load('tests/data_for_tests/io/cmrc/') - pipe = CMRC2018BertPipe() - data_bundle = pipe.process(data_bundle) - - for name, dataset in data_bundle.iter_datasets(): - for ins in dataset: - if 'target_start' in ins: - # 抓到的答案是对应上的 - start_index = ins['target_start'] - end_index = ins['target_end']+1 - extract_answer = ''.join(ins['raw_chars'][start_index:end_index]) - self.assertEqual(extract_answer, ins['answers'][0]) - # 测试context_len是对的 - raw_chars = ins['raw_chars'] - expect_len = raw_chars.index('[SEP]') - self.assertEqual(expect_len, ins['context_len']) diff --git a/tests/io/pipe/test_summary.py b/tests/io/pipe/test_summary.py deleted file mode 100644 index 03d92214..00000000 --- a/tests/io/pipe/test_summary.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# __author__="Danqing Wang" - -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import unittest -import os - -from fastNLP.io import DataBundle -from fastNLP.io.pipe.summarization import ExtCNNDMPipe - - -class TestRunExtCNNDMPipe(unittest.TestCase): - - def test_load(self): - data_dir = 'tests/data_for_tests/io/cnndm' - vocab_size = 100000 - VOCAL_FILE = 'tests/data_for_tests/io/cnndm/vocab' - sent_max_len = 100 - doc_max_timesteps = 50 - dbPipe = ExtCNNDMPipe(vocab_size=vocab_size, - vocab_path=VOCAL_FILE, - sent_max_len=sent_max_len, - doc_max_timesteps=doc_max_timesteps) - dbPipe2 = ExtCNNDMPipe(vocab_size=vocab_size, - vocab_path=VOCAL_FILE, - sent_max_len=sent_max_len, - doc_max_timesteps=doc_max_timesteps, - domain=True) - db = dbPipe.process_from_file(data_dir) - db2 = dbPipe2.process_from_file(data_dir) - - self.assertTrue(isinstance(db, DataBundle)) - self.assertTrue(isinstance(db2, DataBundle)) - - dbPipe3 = ExtCNNDMPipe(vocab_size=vocab_size, - sent_max_len=sent_max_len, - doc_max_timesteps=doc_max_timesteps, - domain=True) - db3 = dbPipe3.process_from_file(data_dir) - self.assertTrue(isinstance(db3, DataBundle)) - - with self.assertRaises(RuntimeError): - dbPipe4 = ExtCNNDMPipe(vocab_size=vocab_size, - sent_max_len=sent_max_len, - doc_max_timesteps=doc_max_timesteps) - db4 = dbPipe4.process_from_file(os.path.join(data_dir, 'train.cnndm.jsonl')) - - dbPipe5 = ExtCNNDMPipe(vocab_size=vocab_size, - vocab_path=VOCAL_FILE, - sent_max_len=sent_max_len, - doc_max_timesteps=doc_max_timesteps,) - db5 = dbPipe5.process_from_file(os.path.join(data_dir, 'train.cnndm.jsonl')) - self.assertIsInstance(db5, DataBundle) - diff --git a/tests/io/test_embed_loader.py b/tests/io/test_embed_loader.py deleted file mode 100644 index 7c8abc77..00000000 --- a/tests/io/test_embed_loader.py +++ /dev/null @@ -1,50 +0,0 @@ -import unittest -import numpy as np - -from fastNLP import Vocabulary -from fastNLP.io import EmbedLoader - - -class TestEmbedLoader(unittest.TestCase): - def test_load_with_vocab(self): - vocab = Vocabulary() - glove = "tests/data_for_tests/embedding/small_static_embedding/glove.6B.50d_test.txt" - word2vec = "tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt" - vocab.add_word('the') - vocab.add_word('none') - g_m = EmbedLoader.load_with_vocab(glove, vocab) - self.assertEqual(g_m.shape, (4, 50)) - w_m = EmbedLoader.load_with_vocab(word2vec, vocab, normalize=True) - self.assertEqual(w_m.shape, (4, 50)) - self.assertAlmostEqual(np.linalg.norm(w_m, axis=1).sum(), 4, delta=1e-4) - - def test_load_without_vocab(self): - words = ['the', 'of', 'in', 'a', 'to', 'and'] - glove = "tests/data_for_tests/embedding/small_static_embedding/glove.6B.50d_test.txt" - word2vec = "tests/data_for_tests/embedding/small_static_embedding/word2vec_test.txt" - g_m, vocab = EmbedLoader.load_without_vocab(glove) - self.assertEqual(g_m.shape, (8, 50)) - for word in words: - self.assertIn(word, vocab) - w_m, vocab = EmbedLoader.load_without_vocab(word2vec, normalize=True) - self.assertEqual(w_m.shape, (8, 50)) - self.assertAlmostEqual(np.linalg.norm(w_m, axis=1).sum(), 8, delta=1e-4) - for word in words: - self.assertIn(word, vocab) - # no unk - w_m, vocab = EmbedLoader.load_without_vocab(word2vec, normalize=True, unknown=None) - self.assertEqual(w_m.shape, (7, 50)) - self.assertAlmostEqual(np.linalg.norm(w_m, axis=1).sum(), 7, delta=1e-4) - for word in words: - self.assertIn(word, vocab) - - def test_read_all_glove(self): - pass - # TODO - # 这是可以运行的,但是总数少于行数,应该是由于glove有重复的word - # path = '/where/to/read/full/glove' - # init_embed, vocab = EmbedLoader.load_without_vocab(path, error='strict') - # print(init_embed.shape) - # print(init_embed.mean()) - # print(np.isnan(init_embed).sum()) - # print(len(vocab)) diff --git a/tests/io/test_model_io.py b/tests/io/test_model_io.py deleted file mode 100644 index b8960492..00000000 --- a/tests/io/test_model_io.py +++ /dev/null @@ -1,25 +0,0 @@ -import os -import unittest - -from fastNLP.io import ModelSaver, ModelLoader -from fastNLP.models import CNNText - - -class TestModelIO(unittest.TestCase): - def test_save_and_load(self): - model = CNNText((10, 10), 2) - saver = ModelSaver('tmp') - loader = ModelLoader() - saver.save_pytorch(model) - - new_cnn = CNNText((10, 10), 2) - loader.load_pytorch(new_cnn, 'tmp') - - new_model = loader.load_pytorch_model('tmp') - - for i in range(10): - for j in range(10): - self.assertEqual(model.embed.embed.weight[i, j], new_cnn.embed.embed.weight[i, j]) - self.assertEqual(model.embed.embed.weight[i, j], new_model["embed.embed.weight"][i, j]) - - os.system('rm tmp') diff --git a/tests/models/__init__.py b/tests/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/models/model_runner.py b/tests/models/model_runner.py deleted file mode 100644 index ae589470..00000000 --- a/tests/models/model_runner.py +++ /dev/null @@ -1,153 +0,0 @@ -""" -此模块可以非常方便的测试模型。 -若你的模型属于:文本分类,序列标注,自然语言推理(NLI),可以直接使用此模块测试 -若模型不属于上述类别,也可以自己准备假数据,设定loss和metric进行测试 - -此模块的测试仅保证模型能使用fastNLP进行训练和测试,不测试模型实际性能 - -Example:: - - # import 全大写变量... - from model_runner import * - - # 测试一个文本分类模型 - init_emb = (VOCAB_SIZE, 50) - model = SomeModel(init_emb, num_cls=NUM_CLS) - RUNNER.run_model_with_task(TEXT_CLS, model) - - # 序列标注模型 - RUNNER.run_model_with_task(POS_TAGGING, model) - - # NLI模型 - RUNNER.run_model_with_task(NLI, model) - - # 自定义模型 - RUNNER.run_model(model, data=get_mydata(), - loss=Myloss(), metrics=Mymetric()) -""" -from fastNLP import Trainer, Tester, DataSet, Callback -from fastNLP import AccuracyMetric -from fastNLP import CrossEntropyLoss -from fastNLP.core.const import Const as C -from random import randrange - -VOCAB_SIZE = 100 -NUM_CLS = 100 -MAX_LEN = 10 -N_SAMPLES = 100 -N_EPOCHS = 1 -BATCH_SIZE = 5 - -TEXT_CLS = 'text_cls' -POS_TAGGING = 'pos_tagging' -NLI = 'nli' - -class ModelRunner(): - class Checker(Callback): - def on_backward_begin(self, loss): - assert loss.to('cpu').numpy().isfinate() - - def gen_seq(self, length, vocab_size): - """generate fake sequence indexes with given length""" - # reserve 0 for padding - return [randrange(1, vocab_size) for _ in range(length)] - - def gen_var_seq(self, max_len, vocab_size): - """generate fake sequence indexes in variant length""" - length = randrange(3, max_len) # at least 3 words in a seq - return self.gen_seq(length, vocab_size) - - def prepare_text_classification_data(self): - index = 'index' - ds = DataSet({index: list(range(N_SAMPLES))}) - ds.apply_field(lambda x: self.gen_var_seq(MAX_LEN, VOCAB_SIZE), - field_name=index, new_field_name=C.INPUT, - is_input=True) - ds.apply_field(lambda x: randrange(NUM_CLS), - field_name=index, new_field_name=C.TARGET, - is_target=True) - ds.apply_field(len, C.INPUT, C.INPUT_LEN, - is_input=True) - return ds - - def prepare_pos_tagging_data(self): - index = 'index' - ds = DataSet({index: list(range(N_SAMPLES))}) - ds.apply_field(lambda x: self.gen_var_seq(MAX_LEN, VOCAB_SIZE), - field_name=index, new_field_name=C.INPUT, - is_input=True) - ds.apply_field(lambda x: self.gen_seq(len(x), NUM_CLS), - field_name=C.INPUT, new_field_name=C.TARGET, - is_target=True) - ds.apply_field(len, C.INPUT, C.INPUT_LEN, - is_input=True, is_target=True) - return ds - - def prepare_nli_data(self): - index = 'index' - ds = DataSet({index: list(range(N_SAMPLES))}) - ds.apply_field(lambda x: self.gen_var_seq(MAX_LEN, VOCAB_SIZE), - field_name=index, new_field_name=C.INPUTS(0), - is_input=True) - ds.apply_field(lambda x: self.gen_var_seq(MAX_LEN, VOCAB_SIZE), - field_name=index, new_field_name=C.INPUTS(1), - is_input=True) - ds.apply_field(lambda x: randrange(NUM_CLS), - field_name=index, new_field_name=C.TARGET, - is_target=True) - ds.apply_field(len, C.INPUTS(0), C.INPUT_LENS(0), - is_input=True, is_target=True) - ds.apply_field(len, C.INPUTS(1), C.INPUT_LENS(1), - is_input = True, is_target = True) - ds.set_input(C.INPUTS(0), C.INPUTS(1)) - ds.set_target(C.TARGET) - return ds - - def run_text_classification(self, model, data=None): - if data is None: - data = self.prepare_text_classification_data() - loss = CrossEntropyLoss(pred=C.OUTPUT, target=C.TARGET) - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET) - self.run_model(model, data, loss, metric) - - def run_pos_tagging(self, model, data=None): - if data is None: - data = self.prepare_pos_tagging_data() - loss = CrossEntropyLoss(pred=C.OUTPUT, target=C.TARGET, padding_idx=0) - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET, seq_len=C.INPUT_LEN) - self.run_model(model, data, loss, metric) - - def run_nli(self, model, data=None): - if data is None: - data = self.prepare_nli_data() - loss = CrossEntropyLoss(pred=C.OUTPUT, target=C.TARGET) - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET) - self.run_model(model, data, loss, metric) - - def run_model(self, model, data, loss, metrics): - """run a model, test if it can run with fastNLP""" - print('testing model:', model.__class__.__name__) - tester = Tester(data=data, model=model, metrics=metrics, - batch_size=BATCH_SIZE, verbose=0) - before_train = tester.test() - trainer = Trainer(train_data=data, model=model, loss=loss, batch_size=BATCH_SIZE, n_epochs=N_EPOCHS, - dev_data=None, save_path=None, use_tqdm=False) - trainer.train(load_best_model=False) - after_train = tester.test() - for metric_name, v1 in before_train.items(): - assert metric_name in after_train - # # at least we can sure model params changed, even if we don't know performance - # v2 = after_train[metric_name] - # assert v1 != v2 - - def run_model_with_task(self, task, model): - """run a model with certain task""" - TASKS = { - TEXT_CLS: self.run_text_classification, - POS_TAGGING: self.run_pos_tagging, - NLI: self.run_nli, - } - assert task in TASKS - TASKS[task](model) - -RUNNER = ModelRunner() diff --git a/tests/models/test_bert.py b/tests/models/test_bert.py deleted file mode 100644 index 58178bff..00000000 --- a/tests/models/test_bert.py +++ /dev/null @@ -1,171 +0,0 @@ -import unittest - -import torch - -from fastNLP.core import Vocabulary, Const -from fastNLP.models.bert import BertForSequenceClassification, BertForQuestionAnswering, \ - BertForTokenClassification, BertForMultipleChoice, BertForSentenceMatching -from fastNLP.embeddings.bert_embedding import BertEmbedding - - -class TestBert(unittest.TestCase): - def test_bert_1(self): - vocab = Vocabulary().add_word_lst("this is a test .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=True) - - model = BertForSequenceClassification(embed, 2) - - input_ids = torch.LongTensor([[1, 2, 3], [5, 6, 0]]) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2, 2)) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2, 2)) - - def test_bert_1_w(self): - vocab = Vocabulary().add_word_lst("this is a test .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False) - - with self.assertWarns(Warning): - model = BertForSequenceClassification(embed, 2) - - input_ids = torch.LongTensor([[1, 2, 3], [5, 6, 0]]) - - pred = model.predict(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2,)) - - def test_bert_2(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=True) - - model = BertForMultipleChoice(embed, 2) - - input_ids = torch.LongTensor([[[2, 6, 7], [1, 6, 5]]]) - print(input_ids.size()) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (1, 2)) - - def test_bert_2_w(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False) - - with self.assertWarns(Warning): - model = BertForMultipleChoice(embed, 2) - - input_ids = torch.LongTensor([[[2, 6, 7], [1, 6, 5]]]) - print(input_ids.size()) - - pred = model.predict(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (1,)) - - def test_bert_3(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False) - model = BertForTokenClassification(embed, 7) - - input_ids = torch.LongTensor([[1, 2, 3], [6, 5, 0]]) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2, 3, 7)) - - def test_bert_3_w(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=True) - - with self.assertWarns(Warning): - model = BertForTokenClassification(embed, 7) - - input_ids = torch.LongTensor([[1, 2, 3], [6, 5, 0]]) - - pred = model.predict(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2, 3)) - - def test_bert_4(self): - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False) - model = BertForQuestionAnswering(embed) - - input_ids = torch.LongTensor([[1, 2, 3], [6, 5, 0]]) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue('pred_start' in pred) - self.assertTrue('pred_end' in pred) - self.assertEqual(tuple(pred['pred_start'].shape), (2, 3)) - self.assertEqual(tuple(pred['pred_end'].shape), (2, 3)) - - def test_bert_for_question_answering_train(self): - from fastNLP import CMRC2018Loss - from fastNLP.io import CMRC2018BertPipe - from fastNLP import Trainer - - data_bundle = CMRC2018BertPipe().process_from_file('tests/data_for_tests/io/cmrc') - data_bundle.rename_field('chars', 'words') - train_data = data_bundle.get_dataset('train') - vocab = data_bundle.get_vocab('words') - - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False, auto_truncate=True) - model = BertForQuestionAnswering(embed) - loss = CMRC2018Loss() - - trainer = Trainer(train_data, model, loss=loss, use_tqdm=False) - trainer.train(load_best_model=False) - - def test_bert_5(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=True) - model = BertForSentenceMatching(embed) - - input_ids = torch.LongTensor([[1, 2, 3], [6, 5, 0]]) - - pred = model(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2, 2)) - - def test_bert_5_w(self): - - vocab = Vocabulary().add_word_lst("this is a test [SEP] .".split()) - embed = BertEmbedding(vocab, model_dir_or_name='tests/data_for_tests/embedding/small_bert', - include_cls_sep=False) - - with self.assertWarns(Warning): - model = BertForSentenceMatching(embed) - - input_ids = torch.LongTensor([[1, 2, 3], [6, 5, 0]]) - - pred = model.predict(input_ids) - self.assertTrue(isinstance(pred, dict)) - self.assertTrue(Const.OUTPUT in pred) - self.assertEqual(tuple(pred[Const.OUTPUT].shape), (2,)) - diff --git a/tests/models/test_biaffine_parser.py b/tests/models/test_biaffine_parser.py deleted file mode 100644 index 4b38d816..00000000 --- a/tests/models/test_biaffine_parser.py +++ /dev/null @@ -1,47 +0,0 @@ -import unittest - -from fastNLP.models.biaffine_parser import BiaffineParser, ParserLoss, ParserMetric -from .model_runner import * - - -def prepare_parser_data(): - index = 'index' - ds = DataSet({index: list(range(N_SAMPLES))}) - ds.apply_field(lambda x: RUNNER.gen_var_seq(MAX_LEN, VOCAB_SIZE), - field_name=index, new_field_name=C.INPUTS(0), - is_input=True) - ds.apply_field(lambda x: RUNNER.gen_seq(len(x), NUM_CLS), - field_name=C.INPUTS(0), new_field_name=C.INPUTS(1), - is_input=True) - # target1 is heads, should in range(0, len(words)) - ds.apply_field(lambda x: RUNNER.gen_seq(len(x), len(x)), - field_name=C.INPUTS(0), new_field_name=C.TARGETS(0), - is_target=True) - ds.apply_field(lambda x: RUNNER.gen_seq(len(x), NUM_CLS), - field_name=C.INPUTS(0), new_field_name=C.TARGETS(1), - is_target=True) - ds.apply_field(len, field_name=C.INPUTS(0), new_field_name=C.INPUT_LEN, - is_input=True, is_target=True) - return ds - - -class TestBiaffineParser(unittest.TestCase): - def test_train(self): - model = BiaffineParser(embed=(VOCAB_SIZE, 10), - pos_vocab_size=VOCAB_SIZE, pos_emb_dim=10, - rnn_hidden_size=10, - arc_mlp_size=10, - label_mlp_size=10, - num_label=NUM_CLS, encoder='var-lstm') - ds = prepare_parser_data() - RUNNER.run_model(model, ds, loss=ParserLoss(), metrics=ParserMetric()) - - def test_train2(self): - model = BiaffineParser(embed=(VOCAB_SIZE, 10), - pos_vocab_size=VOCAB_SIZE, pos_emb_dim=10, - rnn_hidden_size=16, - arc_mlp_size=10, - label_mlp_size=10, - num_label=NUM_CLS, encoder='transformer') - ds = prepare_parser_data() - RUNNER.run_model(model, ds, loss=ParserLoss(), metrics=ParserMetric()) diff --git a/tests/models/test_cnn_text_classification.py b/tests/models/test_cnn_text_classification.py deleted file mode 100644 index 29154bd6..00000000 --- a/tests/models/test_cnn_text_classification.py +++ /dev/null @@ -1,29 +0,0 @@ - -import unittest - -from .model_runner import * -from fastNLP.models.cnn_text_classification import CNNText - - -class TestCNNText(unittest.TestCase): - def init_model(self, kernel_sizes, kernel_nums=(1,3,5)): - model = CNNText((VOCAB_SIZE, 30), - NUM_CLS, - kernel_nums=kernel_nums, - kernel_sizes=kernel_sizes) - return model - - def test_case1(self): - # 测试能否正常运行CNN - model = self.init_model((1,3,5)) - RUNNER.run_model_with_task(TEXT_CLS, model) - - def test_init_model(self): - self.assertRaises(Exception, self.init_model, (2,4)) - self.assertRaises(Exception, self.init_model, (2,)) - - def test_output(self): - model = self.init_model((3,), (1,)) - global MAX_LEN - MAX_LEN = 2 - RUNNER.run_model_with_task(TEXT_CLS, model) diff --git a/tests/models/test_seq2seq_generator.py b/tests/models/test_seq2seq_generator.py deleted file mode 100644 index ac21281f..00000000 --- a/tests/models/test_seq2seq_generator.py +++ /dev/null @@ -1,76 +0,0 @@ - -import unittest -from fastNLP.models import SequenceGeneratorModel -from fastNLP.models import LSTMSeq2SeqModel, TransformerSeq2SeqModel -from fastNLP import Vocabulary, DataSet -import torch -from fastNLP.embeddings import StaticEmbedding -from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric -from fastNLP import Callback - - -def prepare_env(): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - vocab.add_word_lst("Another test !".split()) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=5) - - src_words_idx = [[3, 1, 2], [1, 2]] - # tgt_words_idx = [[1, 2, 3, 4], [2, 3]] - src_seq_len = [3, 2] - # tgt_seq_len = [4, 2] - - ds = DataSet({'src_tokens': src_words_idx, 'src_seq_len': src_seq_len, 'tgt_tokens': src_words_idx, - 'tgt_seq_len':src_seq_len}) - - ds.set_input('src_tokens', 'tgt_tokens', 'src_seq_len') - ds.set_target('tgt_seq_len', 'tgt_tokens') - - return embed, ds - - -class ExitCallback(Callback): - def __init__(self): - super().__init__() - - def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - if eval_result['AccuracyMetric']['acc']==1: - raise KeyboardInterrupt() - - -class TestSeq2SeqGeneratorModel(unittest.TestCase): - def test_run(self): - # 检测是否能够使用SequenceGeneratorModel训练, 透传预测 - embed, ds = prepare_env() - model1 = TransformerSeq2SeqModel.build_model(src_embed=embed, tgt_embed=None, - pos_embed='sin', max_position=20, num_layers=2, d_model=30, n_head=6, - dim_ff=20, dropout=0.1, - bind_encoder_decoder_embed=True, - bind_decoder_input_output_embed=True) - trainer = Trainer(ds, model1, optimizer=None, loss=CrossEntropyLoss(target='tgt_tokens', seq_len='tgt_seq_len'), - batch_size=32, sampler=None, drop_last=False, update_every=1, - num_workers=0, n_epochs=100, print_every=5, - dev_data=ds, metrics=AccuracyMetric(target='tgt_tokens', seq_len='tgt_seq_len'), metric_key=None, - validate_every=-1, save_path=None, use_tqdm=False, device=None, - callbacks=ExitCallback(), check_code_level=0) - res = trainer.train() - self.assertEqual(res['best_eval']['AccuracyMetric']['acc'], 1) - - embed, ds = prepare_env() - model2 = LSTMSeq2SeqModel.build_model(src_embed=embed, tgt_embed=None, - num_layers=1, hidden_size=20, dropout=0.1, - bind_encoder_decoder_embed=True, - bind_decoder_input_output_embed=True, attention=True) - optimizer = torch.optim.Adam(model2.parameters(), lr=0.01) - trainer = Trainer(ds, model2, optimizer=optimizer, loss=CrossEntropyLoss(target='tgt_tokens', seq_len='tgt_seq_len'), - batch_size=32, sampler=None, drop_last=False, update_every=1, - num_workers=0, n_epochs=200, print_every=1, - dev_data=ds, metrics=AccuracyMetric(target='tgt_tokens', seq_len='tgt_seq_len'), - metric_key=None, - validate_every=-1, save_path=None, use_tqdm=False, device=None, - callbacks=ExitCallback(), check_code_level=0) - res = trainer.train() - self.assertEqual(res['best_eval']['AccuracyMetric']['acc'], 1) - - - - diff --git a/tests/models/test_seq2seq_model.py b/tests/models/test_seq2seq_model.py deleted file mode 100644 index fc35b02e..00000000 --- a/tests/models/test_seq2seq_model.py +++ /dev/null @@ -1,114 +0,0 @@ - -import unittest - -from fastNLP.models.seq2seq_model import TransformerSeq2SeqModel, LSTMSeq2SeqModel -from fastNLP import Vocabulary -from fastNLP.embeddings import StaticEmbedding -import torch -from torch import optim -import torch.nn.functional as F -from fastNLP import seq_len_to_mask - - -def prepare_env(): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - vocab.add_word_lst("Another test !".split()) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=5) - - src_words_idx = torch.LongTensor([[3, 1, 2], [1, 2, 0]]) - tgt_words_idx = torch.LongTensor([[1, 2, 3, 4], [2, 3, 0, 0]]) - src_seq_len = torch.LongTensor([3, 2]) - tgt_seq_len = torch.LongTensor([4, 2]) - - return embed, src_words_idx, tgt_words_idx, src_seq_len, tgt_seq_len - - -def train_model(model, src_words_idx, tgt_words_idx, tgt_seq_len, src_seq_len): - optimizer = optim.Adam(model.parameters(), lr=1e-2) - mask = seq_len_to_mask(tgt_seq_len).eq(0) - target = tgt_words_idx.masked_fill(mask, -100) - - for i in range(100): - optimizer.zero_grad() - pred = model(src_words_idx, tgt_words_idx, src_seq_len)['pred'] # bsz x max_len x vocab_size - loss = F.cross_entropy(pred.transpose(1, 2), target) - loss.backward() - optimizer.step() - - right_count = pred.argmax(dim=-1).eq(target).masked_fill(mask, 1).sum() - return right_count - - -class TestTransformerSeq2SeqModel(unittest.TestCase): - def test_run(self): - # 测试能否跑通 - embed, src_words_idx, tgt_words_idx, src_seq_len, tgt_seq_len = prepare_env() - for pos_embed in ['learned', 'sin']: - with self.subTest(pos_embed=pos_embed): - model = TransformerSeq2SeqModel.build_model(src_embed=embed, tgt_embed=None, - pos_embed=pos_embed, max_position=20, num_layers=2, d_model=30, n_head=6, dim_ff=20, dropout=0.1, - bind_encoder_decoder_embed=True, - bind_decoder_input_output_embed=True) - - output = model(src_words_idx, tgt_words_idx, src_seq_len) - self.assertEqual(output['pred'].size(), (2, 4, len(embed))) - - for bind_encoder_decoder_embed in [True, False]: - tgt_embed = None - for bind_decoder_input_output_embed in [True, False]: - if bind_encoder_decoder_embed == False: - tgt_embed = embed - with self.subTest(bind_encoder_decoder_embed=bind_encoder_decoder_embed, - bind_decoder_input_output_embed=bind_decoder_input_output_embed): - model = TransformerSeq2SeqModel.build_model(src_embed=embed, tgt_embed=tgt_embed, - pos_embed='sin', max_position=20, num_layers=2, - d_model=30, n_head=6, dim_ff=20, dropout=0.1, - bind_encoder_decoder_embed=bind_encoder_decoder_embed, - bind_decoder_input_output_embed=bind_decoder_input_output_embed) - - output = model(src_words_idx, tgt_words_idx, src_seq_len) - self.assertEqual(output['pred'].size(), (2, 4, len(embed))) - - def test_train(self): - # 测试能否train到overfit - embed, src_words_idx, tgt_words_idx, src_seq_len, tgt_seq_len = prepare_env() - - model = TransformerSeq2SeqModel.build_model(src_embed=embed, tgt_embed=None, - pos_embed='sin', max_position=20, num_layers=2, d_model=30, n_head=6, dim_ff=20, dropout=0.1, - bind_encoder_decoder_embed=True, - bind_decoder_input_output_embed=True) - - right_count = train_model(model, src_words_idx, tgt_words_idx, tgt_seq_len, src_seq_len) - self.assertEqual(right_count, tgt_words_idx.nelement()) - - -class TestLSTMSeq2SeqModel(unittest.TestCase): - def test_run(self): - # 测试能否跑通 - embed, src_words_idx, tgt_words_idx, src_seq_len, tgt_seq_len = prepare_env() - - for bind_encoder_decoder_embed in [True, False]: - tgt_embed = None - for bind_decoder_input_output_embed in [True, False]: - if bind_encoder_decoder_embed == False: - tgt_embed = embed - with self.subTest(bind_encoder_decoder_embed=bind_encoder_decoder_embed, - bind_decoder_input_output_embed=bind_decoder_input_output_embed): - model = LSTMSeq2SeqModel.build_model(src_embed=embed, tgt_embed=tgt_embed, - num_layers=2, hidden_size=20, dropout=0.1, - bind_encoder_decoder_embed=bind_encoder_decoder_embed, - bind_decoder_input_output_embed=bind_decoder_input_output_embed) - output = model(src_words_idx, tgt_words_idx, src_seq_len) - self.assertEqual(output['pred'].size(), (2, 4, len(embed))) - - def test_train(self): - embed, src_words_idx, tgt_words_idx, src_seq_len, tgt_seq_len = prepare_env() - - model = LSTMSeq2SeqModel.build_model(src_embed=embed, tgt_embed=None, - num_layers=1, hidden_size=20, dropout=0.1, - bind_encoder_decoder_embed=True, - bind_decoder_input_output_embed=True) - - right_count = train_model(model, src_words_idx, tgt_words_idx, tgt_seq_len, src_seq_len) - self.assertEqual(right_count, tgt_words_idx.nelement()) - diff --git a/tests/models/test_sequence_labeling.py b/tests/models/test_sequence_labeling.py deleted file mode 100644 index 815d7047..00000000 --- a/tests/models/test_sequence_labeling.py +++ /dev/null @@ -1,51 +0,0 @@ - - -import unittest - -from .model_runner import * -from fastNLP.models.sequence_labeling import SeqLabeling, AdvSeqLabel, BiLSTMCRF -from fastNLP.core.losses import LossInForward - -class TestBiLSTM(unittest.TestCase): - def test_case1(self): - # 测试能否正常运行CNN - init_emb = (VOCAB_SIZE, 30) - model = BiLSTMCRF(init_emb, - hidden_size=30, - num_classes=NUM_CLS) - - data = RUNNER.prepare_pos_tagging_data() - data.set_input('target') - loss = LossInForward() - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET, seq_len=C.INPUT_LEN) - RUNNER.run_model(model, data, loss, metric) - - -class TesSeqLabel(unittest.TestCase): - def test_case1(self): - # 测试能否正常运行CNN - init_emb = (VOCAB_SIZE, 30) - model = SeqLabeling(init_emb, - hidden_size=30, - num_classes=NUM_CLS) - - data = RUNNER.prepare_pos_tagging_data() - data.set_input('target') - loss = LossInForward() - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET, seq_len=C.INPUT_LEN) - RUNNER.run_model(model, data, loss, metric) - - -class TesAdvSeqLabel(unittest.TestCase): - def test_case1(self): - # 测试能否正常运行CNN - init_emb = (VOCAB_SIZE, 30) - model = AdvSeqLabel(init_emb, - hidden_size=30, - num_classes=NUM_CLS) - - data = RUNNER.prepare_pos_tagging_data() - data.set_input('target') - loss = LossInForward() - metric = AccuracyMetric(pred=C.OUTPUT, target=C.TARGET, seq_len=C.INPUT_LEN) - RUNNER.run_model(model, data, loss, metric) \ No newline at end of file diff --git a/tests/models/test_snli.py b/tests/models/test_snli.py deleted file mode 100644 index 7a588a4c..00000000 --- a/tests/models/test_snli.py +++ /dev/null @@ -1,9 +0,0 @@ -import unittest -from .model_runner import * -from fastNLP.models.snli import ESIM - - -class TestSNLIModel(unittest.TestCase): - def test_snli(self): - model = ESIM((VOCAB_SIZE, 10), num_labels=NUM_CLS, dropout_rate=0) - RUNNER.run_model_with_task(NLI, model) diff --git a/tests/models/test_star_trans.py b/tests/models/test_star_trans.py deleted file mode 100644 index eae19b24..00000000 --- a/tests/models/test_star_trans.py +++ /dev/null @@ -1,16 +0,0 @@ -from .model_runner import * -from fastNLP.models.star_transformer import STNLICls, STSeqCls, STSeqLabel - - -# add star-transformer tests, for 3 kinds of tasks. -def test_cls(): - model = STSeqCls((VOCAB_SIZE, 10), NUM_CLS, dropout=0) - RUNNER.run_model_with_task(TEXT_CLS, model) - -def test_nli(): - model = STNLICls((VOCAB_SIZE, 10), NUM_CLS, dropout=0) - RUNNER.run_model_with_task(NLI, model) - -def test_seq_label(): - model = STSeqLabel((VOCAB_SIZE, 10), NUM_CLS, dropout=0) - RUNNER.run_model_with_task(POS_TAGGING, model) diff --git a/tests/modules/__init__.py b/tests/modules/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/modules/decoder/__init__.py b/tests/modules/decoder/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/modules/decoder/test_CRF.py b/tests/modules/decoder/test_CRF.py deleted file mode 100644 index adac3c40..00000000 --- a/tests/modules/decoder/test_CRF.py +++ /dev/null @@ -1,323 +0,0 @@ - -import unittest -from fastNLP import Vocabulary - -class TestCRF(unittest.TestCase): - def test_case1(self): - # 检查allowed_transitions()能否正确使用 - from fastNLP.modules.decoder.crf import allowed_transitions - - id2label = {0: 'B', 1: 'I', 2:'O'} - expected_res = {(0, 0), (0, 1), (0, 2), (0, 4), (1, 0), (1, 1), (1, 2), (1, 4), (2, 0), (2, 2), - (2, 4), (3, 0), (3, 2)} - self.assertSetEqual(expected_res, set(allowed_transitions(id2label, include_start_end=True))) - - id2label = {0: 'B', 1:'M', 2:'E', 3:'S'} - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 5), (3, 0), (3, 3), (3, 5), (4, 0), (4, 3)} - self.assertSetEqual(expected_res, set( - allowed_transitions(id2label, encoding_type='BMES', include_start_end=True))) - - id2label = {0: 'B', 1: 'I', 2:'O', 3: '', 4:""} - allowed_transitions(id2label, include_start_end=True) - - labels = ['O'] - for label in ['X', 'Y']: - for tag in 'BI': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx:label for idx, label in enumerate(labels)} - expected_res = {(0, 0), (0, 1), (0, 3), (0, 6), (1, 0), (1, 1), (1, 2), (1, 3), (1, 6), (2, 0), (2, 1), - (2, 2), (2, 3), (2, 6), (3, 0), (3, 1), (3, 3), (3, 4), (3, 6), (4, 0), (4, 1), (4, 3), - (4, 4), (4, 6), (5, 0), (5, 1), (5, 3)} - self.assertSetEqual(expected_res, set(allowed_transitions(id2label, include_start_end=True))) - - labels = [] - for label in ['X', 'Y']: - for tag in 'BMES': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 4), (2, 7), (2, 9), (3, 0), (3, 3), (3, 4), - (3, 7), (3, 9), (4, 5), (4, 6), (5, 5), (5, 6), (6, 0), (6, 3), (6, 4), (6, 7), (6, 9), (7, 0), - (7, 3), (7, 4), (7, 7), (7, 9), (8, 0), (8, 3), (8, 4), (8, 7)} - self.assertSetEqual(expected_res, set( - allowed_transitions(id2label, include_start_end=True))) - - def test_case11(self): - # 测试自动推断encoding类型 - from fastNLP.modules.decoder.crf import allowed_transitions - - id2label = {0: 'B', 1: 'I', 2: 'O'} - expected_res = {(0, 0), (0, 1), (0, 2), (0, 4), (1, 0), (1, 1), (1, 2), (1, 4), (2, 0), (2, 2), - (2, 4), (3, 0), (3, 2)} - self.assertSetEqual(expected_res, set(allowed_transitions(id2label, include_start_end=True))) - - id2label = {0: 'B', 1: 'M', 2: 'E', 3: 'S'} - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 5), (3, 0), (3, 3), (3, 5), (4, 0), (4, 3)} - self.assertSetEqual(expected_res, set( - allowed_transitions(id2label, include_start_end=True))) - - id2label = {0: 'B', 1: 'I', 2: 'O', 3: '', 4: ""} - allowed_transitions(id2label, include_start_end=True) - - labels = ['O'] - for label in ['X', 'Y']: - for tag in 'BI': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - expected_res = {(0, 0), (0, 1), (0, 3), (0, 6), (1, 0), (1, 1), (1, 2), (1, 3), (1, 6), (2, 0), (2, 1), - (2, 2), (2, 3), (2, 6), (3, 0), (3, 1), (3, 3), (3, 4), (3, 6), (4, 0), (4, 1), (4, 3), - (4, 4), (4, 6), (5, 0), (5, 1), (5, 3)} - self.assertSetEqual(expected_res, set(allowed_transitions(id2label, include_start_end=True))) - - labels = [] - for label in ['X', 'Y']: - for tag in 'BMES': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 4), (2, 7), (2, 9), (3, 0), (3, 3), (3, 4), - (3, 7), (3, 9), (4, 5), (4, 6), (5, 5), (5, 6), (6, 0), (6, 3), (6, 4), (6, 7), (6, 9), (7, 0), - (7, 3), (7, 4), (7, 7), (7, 9), (8, 0), (8, 3), (8, 4), (8, 7)} - self.assertSetEqual(expected_res, set( - allowed_transitions(id2label, include_start_end=True))) - - def test_case12(self): - # 测试能否通过vocab生成转移矩阵 - from fastNLP.modules.decoder.crf import allowed_transitions - - id2label = {0: 'B', 1: 'I', 2: 'O'} - vocab = Vocabulary(unknown=None, padding=None) - for idx, tag in id2label.items(): - vocab.add_word(tag) - expected_res = {(0, 0), (0, 1), (0, 2), (0, 4), (1, 0), (1, 1), (1, 2), (1, 4), (2, 0), (2, 2), - (2, 4), (3, 0), (3, 2)} - self.assertSetEqual(expected_res, set(allowed_transitions(vocab, include_start_end=True))) - - id2label = {0: 'B', 1: 'M', 2: 'E', 3: 'S'} - vocab = Vocabulary(unknown=None, padding=None) - for idx, tag in id2label.items(): - vocab.add_word(tag) - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 5), (3, 0), (3, 3), (3, 5), (4, 0), (4, 3)} - self.assertSetEqual(expected_res, set( - allowed_transitions(vocab, include_start_end=True))) - - id2label = {0: 'B', 1: 'I', 2: 'O', 3: '', 4: ""} - vocab = Vocabulary() - for idx, tag in id2label.items(): - vocab.add_word(tag) - allowed_transitions(vocab, include_start_end=True) - - labels = ['O'] - for label in ['X', 'Y']: - for tag in 'BI': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - expected_res = {(0, 0), (0, 1), (0, 3), (0, 6), (1, 0), (1, 1), (1, 2), (1, 3), (1, 6), (2, 0), (2, 1), - (2, 2), (2, 3), (2, 6), (3, 0), (3, 1), (3, 3), (3, 4), (3, 6), (4, 0), (4, 1), (4, 3), - (4, 4), (4, 6), (5, 0), (5, 1), (5, 3)} - vocab = Vocabulary(unknown=None, padding=None) - for idx, tag in id2label.items(): - vocab.add_word(tag) - self.assertSetEqual(expected_res, set(allowed_transitions(vocab, include_start_end=True))) - - labels = [] - for label in ['X', 'Y']: - for tag in 'BMES': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - vocab = Vocabulary(unknown=None, padding=None) - for idx, tag in id2label.items(): - vocab.add_word(tag) - expected_res = {(0, 1), (0, 2), (1, 1), (1, 2), (2, 0), (2, 3), (2, 4), (2, 7), (2, 9), (3, 0), (3, 3), (3, 4), - (3, 7), (3, 9), (4, 5), (4, 6), (5, 5), (5, 6), (6, 0), (6, 3), (6, 4), (6, 7), (6, 9), (7, 0), - (7, 3), (7, 4), (7, 7), (7, 9), (8, 0), (8, 3), (8, 4), (8, 7)} - self.assertSetEqual(expected_res, set( - allowed_transitions(vocab, include_start_end=True))) - - # def test_case2(self): - # # 测试CRF能否避免解码出非法跃迁, 使用allennlp做了验证。 - # pass - # import torch - # from fastNLP import seq_len_to_mask - # - # labels = ['O'] - # for label in ['X', 'Y']: - # for tag in 'BI': - # labels.append('{}-{}'.format(tag, label)) - # id2label = {idx: label for idx, label in enumerate(labels)} - # num_tags = len(id2label) - # max_len = 10 - # batch_size = 4 - # bio_logits = torch.nn.functional.softmax(torch.rand(size=(batch_size, max_len, num_tags)), dim=-1).log() - # from allennlp.modules.conditional_random_field import ConditionalRandomField, allowed_transitions - # allen_CRF = ConditionalRandomField(num_tags=num_tags, constraints=allowed_transitions('BIO', id2label), - # include_start_end_transitions=False) - # bio_trans_m = allen_CRF.transitions - # bio_seq_lens = torch.randint(1, max_len, size=(batch_size,)) - # bio_seq_lens[0] = 1 - # bio_seq_lens[-1] = max_len - # mask = seq_len_to_mask(bio_seq_lens) - # allen_res = allen_CRF.viterbi_tags(bio_logits, mask) - # - # from fastNLP.modules.decoder.crf import ConditionalRandomField, allowed_transitions - # fast_CRF = ConditionalRandomField(num_tags=num_tags, allowed_transitions=allowed_transitions(id2label, - # include_start_end=True)) - # fast_CRF.trans_m = bio_trans_m - # fast_res = fast_CRF.viterbi_decode(bio_logits, mask, unpad=True) - # bio_scores = [round(score, 4) for _, score in allen_res] - # # score equal - # self.assertListEqual(bio_scores, [round(s, 4) for s in fast_res[1].tolist()]) - # # seq equal - # bio_path = [_ for _, score in allen_res] - # self.assertListEqual(bio_path, fast_res[0]) - # - # labels = [] - # for label in ['X', 'Y']: - # for tag in 'BMES': - # labels.append('{}-{}'.format(tag, label)) - # id2label = {idx: label for idx, label in enumerate(labels)} - # num_tags = len(id2label) - # - # from allennlp.modules.conditional_random_field import ConditionalRandomField, allowed_transitions - # allen_CRF = ConditionalRandomField(num_tags=num_tags, constraints=allowed_transitions('BMES', id2label), - # include_start_end_transitions=False) - # bmes_logits = torch.nn.functional.softmax(torch.rand(size=(batch_size, max_len, num_tags)), dim=-1).log() - # bmes_trans_m = allen_CRF.transitions - # bmes_seq_lens = torch.randint(1, max_len, size=(batch_size,)) - # bmes_seq_lens[0] = 1 - # bmes_seq_lens[-1] = max_len - # mask = seq_len_to_mask(bmes_seq_lens) - # allen_res = allen_CRF.viterbi_tags(bmes_logits, mask) - # - # from fastNLP.modules.decoder.crf import ConditionalRandomField, allowed_transitions - # fast_CRF = ConditionalRandomField(num_tags=num_tags, allowed_transitions=allowed_transitions(id2label, - # encoding_type='BMES', - # include_start_end=True)) - # fast_CRF.trans_m = bmes_trans_m - # fast_res = fast_CRF.viterbi_decode(bmes_logits, mask, unpad=True) - # # score equal - # bmes_scores = [round(score, 4) for _, score in allen_res] - # self.assertListEqual(bmes_scores, [round(s, 4) for s in fast_res[1].tolist()]) - # # seq equal - # bmes_path = [_ for _, score in allen_res] - # self.assertListEqual(bmes_path, fast_res[0]) - # - # data = { - # 'bio_logits': bio_logits.tolist(), - # 'bio_scores': bio_scores, - # 'bio_path': bio_path, - # 'bio_trans_m': bio_trans_m.tolist(), - # 'bio_seq_lens': bio_seq_lens.tolist(), - # 'bmes_logits': bmes_logits.tolist(), - # 'bmes_scores': bmes_scores, - # 'bmes_path': bmes_path, - # 'bmes_trans_m': bmes_trans_m.tolist(), - # 'bmes_seq_lens': bmes_seq_lens.tolist(), - # } - # - # with open('weights.json', 'w') as f: - # import json - # json.dump(data, f) - - def test_case2(self): - # 测试CRF是否正常work。 - import json - import torch - from fastNLP import seq_len_to_mask - - with open('tests/data_for_tests/modules/decoder/crf.json', 'r') as f: - data = json.load(f) - - bio_logits = torch.FloatTensor(data['bio_logits']) - bio_scores = data['bio_scores'] - bio_path = data['bio_path'] - bio_trans_m = torch.FloatTensor(data['bio_trans_m']) - bio_seq_lens = torch.LongTensor(data['bio_seq_lens']) - - bmes_logits = torch.FloatTensor(data['bmes_logits']) - bmes_scores = data['bmes_scores'] - bmes_path = data['bmes_path'] - bmes_trans_m = torch.FloatTensor(data['bmes_trans_m']) - bmes_seq_lens = torch.LongTensor(data['bmes_seq_lens']) - - labels = ['O'] - for label in ['X', 'Y']: - for tag in 'BI': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - num_tags = len(id2label) - - mask = seq_len_to_mask(bio_seq_lens) - - from fastNLP.modules.decoder.crf import ConditionalRandomField, allowed_transitions - fast_CRF = ConditionalRandomField(num_tags=num_tags, allowed_transitions=allowed_transitions(id2label, - include_start_end=True)) - fast_CRF.trans_m.data = bio_trans_m - fast_res = fast_CRF.viterbi_decode(bio_logits, mask, unpad=True) - # score equal - self.assertListEqual(bio_scores, [round(s, 4) for s in fast_res[1].tolist()]) - # seq equal - self.assertListEqual(bio_path, fast_res[0]) - - labels = [] - for label in ['X', 'Y']: - for tag in 'BMES': - labels.append('{}-{}'.format(tag, label)) - id2label = {idx: label for idx, label in enumerate(labels)} - num_tags = len(id2label) - - mask = seq_len_to_mask(bmes_seq_lens) - - from fastNLP.modules.decoder.crf import ConditionalRandomField, allowed_transitions - fast_CRF = ConditionalRandomField(num_tags=num_tags, allowed_transitions=allowed_transitions(id2label, - encoding_type='BMES', - include_start_end=True)) - fast_CRF.trans_m.data = bmes_trans_m - fast_res = fast_CRF.viterbi_decode(bmes_logits, mask, unpad=True) - # score equal - self.assertListEqual(bmes_scores, [round(s, 4) for s in fast_res[1].tolist()]) - # seq equal - self.assertListEqual(bmes_path, fast_res[0]) - - def test_case3(self): - # 测试crf的loss不会出现负数 - import torch - from fastNLP.modules.decoder.crf import ConditionalRandomField - from fastNLP.core.utils import seq_len_to_mask - from torch import optim - from torch import nn - - num_tags, include_start_end_trans = 4, True - num_samples = 4 - lengths = torch.randint(3, 50, size=(num_samples, )).long() - max_len = lengths.max() - tags = torch.randint(num_tags, size=(num_samples, max_len)) - masks = seq_len_to_mask(lengths) - feats = nn.Parameter(torch.randn(num_samples, max_len, num_tags)) - crf = ConditionalRandomField(num_tags, include_start_end_trans) - optimizer = optim.SGD([param for param in crf.parameters() if param.requires_grad] + [feats], lr=0.1) - for _ in range(10): - loss = crf(feats, tags, masks).mean() - optimizer.zero_grad() - loss.backward() - optimizer.step() - if _%1000==0: - print(loss) - self.assertGreater(loss.item(), 0, "CRF loss cannot be less than 0.") - - def test_masking(self): - # 测试crf的pad masking正常运行 - import torch - from fastNLP.modules.decoder.crf import ConditionalRandomField - max_len = 5 - n_tags = 5 - pad_len = 5 - - torch.manual_seed(4) - logit = torch.rand(1, max_len+pad_len, n_tags) - # logit[0, -1, :] = 0.0 - mask = torch.ones(1, max_len+pad_len) - mask[0,-pad_len] = 0 - model = ConditionalRandomField(n_tags) - pred, score = model.viterbi_decode(logit[:,:-pad_len], mask[:,:-pad_len]) - mask_pred, mask_score = model.viterbi_decode(logit, mask) - self.assertEqual(pred[0].tolist(), mask_pred[0,:-pad_len].tolist()) - diff --git a/tests/modules/decoder/test_bert.py b/tests/modules/decoder/test_bert.py deleted file mode 100644 index 56946f5d..00000000 --- a/tests/modules/decoder/test_bert.py +++ /dev/null @@ -1,22 +0,0 @@ - -import unittest - -import torch - -from fastNLP.modules.encoder.bert import BertModel - - -class TestBert(unittest.TestCase): - def test_bert_1(self): - from fastNLP.modules.encoder.bert import BertConfig - config = BertConfig(32000) - model = BertModel(config) - - input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) - input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - - all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask) - for layer in all_encoder_layers: - self.assertEqual(tuple(layer.shape), (2, 3, 768)) - self.assertEqual(tuple(pooled_output.shape), (2, 768)) diff --git a/tests/modules/decoder/test_seq2seq_decoder.py b/tests/modules/decoder/test_seq2seq_decoder.py deleted file mode 100644 index 00437edb..00000000 --- a/tests/modules/decoder/test_seq2seq_decoder.py +++ /dev/null @@ -1,50 +0,0 @@ -import unittest - -import torch - -from fastNLP import Vocabulary -from fastNLP.embeddings import StaticEmbedding -from fastNLP.modules import TransformerSeq2SeqDecoder -from fastNLP.modules import LSTMSeq2SeqDecoder -from fastNLP import seq_len_to_mask - - -class TestTransformerSeq2SeqDecoder(unittest.TestCase): - def test_case(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - vocab.add_word_lst("Another test !".split()) - embed = StaticEmbedding(vocab, embedding_dim=10) - - encoder_output = torch.randn(2, 3, 10) - src_seq_len = torch.LongTensor([3, 2]) - encoder_mask = seq_len_to_mask(src_seq_len) - - for flag in [True, False]: - with self.subTest(bind_decoder_input_output_embed=flag): - decoder = TransformerSeq2SeqDecoder(embed=embed, pos_embed = None, - d_model = 10, num_layers=2, n_head = 5, dim_ff = 20, dropout = 0.1, - bind_decoder_input_output_embed = True) - state = decoder.init_state(encoder_output, encoder_mask) - output = decoder(tokens=torch.randint(0, len(vocab), size=(2, 4)), state=state) - self.assertEqual(output.size(), (2, 4, len(vocab))) - - -class TestLSTMDecoder(unittest.TestCase): - def test_case(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - vocab.add_word_lst("Another test !".split()) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=10) - - encoder_output = torch.randn(2, 3, 10) - tgt_words_idx = torch.LongTensor([[1, 2, 3, 4], [2, 3, 0, 0]]) - src_seq_len = torch.LongTensor([3, 2]) - encoder_mask = seq_len_to_mask(src_seq_len) - - for flag in [True, False]: - for attention in [True, False]: - with self.subTest(bind_decoder_input_output_embed=flag, attention=attention): - decoder = LSTMSeq2SeqDecoder(embed=embed, num_layers = 2, hidden_size = 10, - dropout = 0.3, bind_decoder_input_output_embed=flag, attention=attention) - state = decoder.init_state(encoder_output, encoder_mask) - output = decoder(tgt_words_idx, state) - self.assertEqual(tuple(output.size()), (2, 4, len(vocab))) diff --git a/tests/modules/encoder/__init__.py b/tests/modules/encoder/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/modules/encoder/test_pooling.py b/tests/modules/encoder/test_pooling.py deleted file mode 100644 index 5adca4ff..00000000 --- a/tests/modules/encoder/test_pooling.py +++ /dev/null @@ -1,41 +0,0 @@ -import unittest - -import torch - -from fastNLP.modules.encoder.pooling import MaxPool, MaxPoolWithMask, KMaxPool, AvgPool, AvgPoolWithMask - - -class TestPooling(unittest.TestCase): - def test_MaxPool(self): - max_pool_1d = MaxPool(dimension=1) - x = torch.randn(5, 6, 7) - self.assertEqual(max_pool_1d(x).size(), (5, 7)) - - max_pool_2d = MaxPool(dimension=2) - self.assertEqual(max_pool_2d(x).size(), (5, 1)) - - max_pool_3d = MaxPool(dimension=3) - x = torch.randn(4, 5, 6, 7) - self.assertEqual(max_pool_3d(x).size(), (4, 1, 1)) - - def test_MaxPoolWithMask(self): - pool = MaxPoolWithMask() - x = torch.randn(5, 6, 7) - mask = (torch.randn(5, 6) > 0).long() - self.assertEqual(pool(x, mask).size(), (5, 7)) - - def test_KMaxPool(self): - k_pool = KMaxPool(k=3) - x = torch.randn(4, 5, 6) - self.assertEqual(k_pool(x).size(), (4, 15)) - - def test_AvgPool(self): - pool = AvgPool() - x = torch.randn(4, 5, 6) - self.assertEqual(pool(x).size(), (4, 5)) - - def test_AvgPoolWithMask(self): - pool = AvgPoolWithMask() - x = torch.randn(5, 6, 7) - mask = (torch.randn(5, 6) > 0).long() - self.assertEqual(pool(x, mask).size(), (5, 7)) diff --git a/tests/modules/encoder/test_seq2seq_encoder.py b/tests/modules/encoder/test_seq2seq_encoder.py deleted file mode 100644 index 08c03145..00000000 --- a/tests/modules/encoder/test_seq2seq_encoder.py +++ /dev/null @@ -1,30 +0,0 @@ -import unittest - -import torch - -from fastNLP.modules.encoder.seq2seq_encoder import TransformerSeq2SeqEncoder, LSTMSeq2SeqEncoder -from fastNLP import Vocabulary -from fastNLP.embeddings import StaticEmbedding - - -class TestTransformerSeq2SeqEncoder(unittest.TestCase): - def test_case(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = StaticEmbedding(vocab, embedding_dim=5) - encoder = TransformerSeq2SeqEncoder(embed, num_layers=2, d_model=10, n_head=2) - words_idx = torch.LongTensor([0, 1, 2]).unsqueeze(0) - seq_len = torch.LongTensor([3]) - encoder_output, encoder_mask = encoder(words_idx, seq_len) - self.assertEqual(encoder_output.size(), (1, 3, 10)) - - -class TestBiLSTMEncoder(unittest.TestCase): - def test_case(self): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - embed = StaticEmbedding(vocab, embedding_dim=5) - encoder = LSTMSeq2SeqEncoder(embed, hidden_size=5, num_layers=1) - words_idx = torch.LongTensor([0, 1, 2]).unsqueeze(0) - seq_len = torch.LongTensor([3]) - - encoder_output, encoder_mask = encoder(words_idx, seq_len) - self.assertEqual(encoder_mask.size(), (1, 3)) diff --git a/tests/modules/generator/__init__.py b/tests/modules/generator/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/tests/modules/generator/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/tests/modules/generator/test_seq2seq_generator.py b/tests/modules/generator/test_seq2seq_generator.py deleted file mode 100644 index 2a2f9d78..00000000 --- a/tests/modules/generator/test_seq2seq_generator.py +++ /dev/null @@ -1,144 +0,0 @@ -import unittest - -import torch -from fastNLP.modules.generator import SequenceGenerator -from fastNLP.modules import TransformerSeq2SeqDecoder, LSTMSeq2SeqDecoder, Seq2SeqDecoder, State -from fastNLP import Vocabulary -from fastNLP.embeddings import StaticEmbedding -from torch import nn -from fastNLP import seq_len_to_mask - - -def prepare_env(): - vocab = Vocabulary().add_word_lst("This is a test .".split()) - vocab.add_word_lst("Another test !".split()) - embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=5) - - encoder_output = torch.randn(2, 3, 10) - src_seq_len = torch.LongTensor([3, 2]) - encoder_mask = seq_len_to_mask(src_seq_len) - - return embed, encoder_output, encoder_mask - - -class GreedyDummyDecoder(Seq2SeqDecoder): - def __init__(self, decoder_output): - super().__init__() - self.cur_length = 0 - self.decoder_output = decoder_output - - def decode(self, tokens, state): - self.cur_length += 1 - scores = self.decoder_output[:, self.cur_length] - return scores - - -class DummyState(State): - def __init__(self, decoder): - super().__init__() - self.decoder = decoder - - def reorder_state(self, indices: torch.LongTensor): - self.decoder.decoder_output = self._reorder_state(self.decoder.decoder_output, indices, dim=0) - - -class TestSequenceGenerator(unittest.TestCase): - def test_run(self): - # 测试能否运行 (1) 初始化decoder,(2) decode一发 - embed, encoder_output, encoder_mask = prepare_env() - - for do_sample in [True, False]: - for num_beams in [1, 3, 5]: - with self.subTest(do_sample=do_sample, num_beams=num_beams): - decoder = LSTMSeq2SeqDecoder(embed=embed, num_layers=1, hidden_size=10, - dropout=0.3, bind_decoder_input_output_embed=True, attention=True) - state = decoder.init_state(encoder_output, encoder_mask) - generator = SequenceGenerator(decoder=decoder, max_length=20, num_beams=num_beams, - do_sample=do_sample, temperature=1.0, top_k=50, top_p=1.0, bos_token_id=1, eos_token_id=None, - repetition_penalty=1, length_penalty=1.0, pad_token_id=0) - generator.generate(state=state, tokens=None) - - decoder = TransformerSeq2SeqDecoder(embed=embed, pos_embed=nn.Embedding(10, embed.embedding_dim), - d_model=encoder_output.size(-1), num_layers=2, n_head=2, dim_ff=10, dropout=0.1, - bind_decoder_input_output_embed=True) - state = decoder.init_state(encoder_output, encoder_mask) - generator = SequenceGenerator(decoder=decoder, max_length=5, num_beams=num_beams, - do_sample=do_sample, temperature=1.0, top_k=50, top_p=1.0, bos_token_id=1, eos_token_id=None, - repetition_penalty=1, length_penalty=1.0, pad_token_id=0) - generator.generate(state=state, tokens=None) - - # 测试一下其它值 - decoder = TransformerSeq2SeqDecoder(embed=embed, pos_embed=nn.Embedding(10, embed.embedding_dim), - d_model=encoder_output.size(-1), num_layers=2, n_head=2, dim_ff=10, - dropout=0.1, - bind_decoder_input_output_embed=True) - state = decoder.init_state(encoder_output, encoder_mask) - generator = SequenceGenerator(decoder=decoder, max_length=5, num_beams=num_beams, - do_sample=do_sample, temperature=0.9, top_k=50, top_p=0.5, bos_token_id=1, - eos_token_id=3, repetition_penalty=2, length_penalty=1.5, pad_token_id=0) - generator.generate(state=state, tokens=None) - - def test_greedy_decode(self): - # 测试能否正确的generate - # greedy - for beam_search in [1, 3]: - decoder_output = torch.randn(2, 10, 5) - path = decoder_output.argmax(dim=-1) # 2 x 10 - decoder = GreedyDummyDecoder(decoder_output) - with self.subTest(msg=beam_search, beam_search=beam_search): - generator = SequenceGenerator(decoder=decoder, max_length=decoder_output.size(1), num_beams=beam_search, - do_sample=False, temperature=1, top_k=50, top_p=1, bos_token_id=1, - eos_token_id=None, repetition_penalty=1, length_penalty=1, pad_token_id=0) - decode_path = generator.generate(DummyState(decoder), tokens=decoder_output[:, 0].argmax(dim=-1, keepdim=True)) - - self.assertEqual(decode_path.eq(path).sum(), path.numel()) - - # greedy check eos_token_id - for beam_search in [1, 3]: - decoder_output = torch.randn(2, 10, 5) - decoder_output[:, :7, 4].fill_(-100) - decoder_output[0, 7, 4] = 1000 # 在第8个结束 - decoder_output[1, 5, 4] = 1000 - path = decoder_output.argmax(dim=-1) # 2 x 4 - decoder = GreedyDummyDecoder(decoder_output) - with self.subTest(beam_search=beam_search): - generator = SequenceGenerator(decoder=decoder, max_length=decoder_output.size(1), num_beams=beam_search, - do_sample=False, temperature=1, top_k=50, top_p=0.5, bos_token_id=1, - eos_token_id=4, repetition_penalty=1, length_penalty=1, pad_token_id=0) - decode_path = generator.generate(DummyState(decoder), - tokens=decoder_output[:, 0].argmax(dim=-1, keepdim=True)) - self.assertEqual(decode_path.size(1), 8) # 长度为8 - self.assertEqual(decode_path[0].eq(path[0, :8]).sum(), 8) - self.assertEqual(decode_path[1, :6].eq(path[1, :6]).sum(), 6) - - def test_sample_decoder(self): - # greedy check eos_token_id - for beam_search in [1, 3]: - with self.subTest(beam_search=beam_search): - decode_paths = [] - # 因为是随机,所以需要测试100次,如果至少有一次是对的,应该就问题不大 - num_tests = 10 - for i in range(num_tests): - decoder_output = torch.randn(2, 10, 5) * 10 - decoder_output[:, :7, 4].fill_(-100) - decoder_output[0, 7, 4] = 10000 # 在第8个结束 - decoder_output[1, 5, 4] = 10000 - path = decoder_output.argmax(dim=-1) # 2 x 4 - decoder = GreedyDummyDecoder(decoder_output) - generator = SequenceGenerator(decoder=decoder, max_length=decoder_output.size(1), num_beams=beam_search, - do_sample=True, temperature=1, top_k=50, top_p=0.5, bos_token_id=1, - eos_token_id=4, repetition_penalty=1, length_penalty=1, pad_token_id=0) - decode_path = generator.generate(DummyState(decoder), - tokens=decoder_output[:, 0].argmax(dim=-1, keepdim=True)) - decode_paths.append([decode_path, path]) - sizes = [] - eqs = [] - eq2s = [] - for i in range(num_tests): - decode_path, path = decode_paths[i] - sizes.append(decode_path.size(1)==8) - eqs.append(decode_path[0].eq(path[0, :8]).sum()==8) - eq2s.append(decode_path[1, :6].eq(path[1, :6]).sum()==6) - self.assertTrue(any(sizes)) - self.assertTrue(any(eqs)) - self.assertTrue(any(eq2s)) \ No newline at end of file diff --git a/tests/modules/test_char_encoder.py b/tests/modules/test_char_encoder.py deleted file mode 100644 index cf3ec15e..00000000 --- a/tests/modules/test_char_encoder.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest - -import torch - -from fastNLP.modules.encoder.char_encoder import ConvolutionCharEncoder, LSTMCharEncoder - - -class TestCharEmbed(unittest.TestCase): - def test_case_1(self): - batch_size = 128 - char_emb = 100 - word_length = 1 - x = torch.Tensor(batch_size, char_emb, word_length) - x = x.transpose(1, 2) - - cce = ConvolutionCharEncoder(char_emb) - y = cce(x) - self.assertEqual(tuple(x.shape), (batch_size, word_length, char_emb)) - print("CNN Char Emb input: ", x.shape) - self.assertEqual(tuple(y.shape), (batch_size, char_emb, 1)) - print("CNN Char Emb output: ", y.shape) # [128, 100] - - lce = LSTMCharEncoder(char_emb) - o = lce(x) - self.assertEqual(tuple(x.shape), (batch_size, word_length, char_emb)) - print("LSTM Char Emb input: ", x.shape) - self.assertEqual(tuple(o.shape), (batch_size, char_emb, 1)) - print("LSTM Char Emb size: ", o.shape) diff --git a/tests/modules/test_other_modules.py b/tests/modules/test_other_modules.py deleted file mode 100644 index c5462623..00000000 --- a/tests/modules/test_other_modules.py +++ /dev/null @@ -1,15 +0,0 @@ -import unittest - -import torch - -from fastNLP.modules.encoder.star_transformer import StarTransformer - - -class TestStarTransformer(unittest.TestCase): - def test_1(self): - model = StarTransformer(num_layers=6, hidden_size=100, num_head=8, head_dim=20, max_len=100) - x = torch.rand(16, 45, 100) - mask = torch.ones(16, 45).byte() - y, yn = model(x, mask) - self.assertEqual(tuple(y.size()), (16, 45, 100)) - self.assertEqual(tuple(yn.size()), (16, 100)) diff --git a/tests/modules/test_utils.py b/tests/modules/test_utils.py deleted file mode 100644 index 340fedd9..00000000 --- a/tests/modules/test_utils.py +++ /dev/null @@ -1,20 +0,0 @@ -import unittest - -import torch - -from fastNLP.models import CNNText -from fastNLP.modules.utils import get_dropout_mask, summary - - -class TestUtil(unittest.TestCase): - def test_get_dropout_mask(self): - tensor = torch.randn(3, 4) - mask = get_dropout_mask(0.3, tensor) - self.assertSequenceEqual(mask.size(), torch.Size([3, 4])) - - def test_summary(self): - model = CNNText(embed=(4, 4), num_classes=2, kernel_nums=(9,5), kernel_sizes=(1,3)) - # 4 * 4 + 4 * (9 * 1 + 5 * 3) + 2 * (9 + 5 + 1) = 142 - self.assertSequenceEqual((142, 142, 0), summary(model)) - model.embed.requires_grad = False - self.assertSequenceEqual((142, 126, 16), summary(model)) diff --git a/tests/modules/test_variational_rnn.py b/tests/modules/test_variational_rnn.py deleted file mode 100644 index c3806f60..00000000 --- a/tests/modules/test_variational_rnn.py +++ /dev/null @@ -1,25 +0,0 @@ -import unittest - -import numpy as np -import torch - -from fastNLP.modules.encoder.variational_rnn import VarLSTM - - -class TestMaskedRnn(unittest.TestCase): - def test_case_1(self): - masked_rnn = VarLSTM(input_size=1, hidden_size=1, bidirectional=True, batch_first=True) - x = torch.tensor([[[1.0], [2.0]]]) - print(x.size()) - y = masked_rnn(x) - - - def test_case_2(self): - input_size = 12 - batch = 16 - hidden = 10 - masked_rnn = VarLSTM(input_size=input_size, hidden_size=hidden, bidirectional=False, batch_first=True) - - xx = torch.randn((batch, 32, input_size)) - y, _ = masked_rnn(xx) - self.assertEqual(tuple(y.shape), (batch, 32, hidden)) diff --git a/tests/modules/tokenizer/test_bert_tokenizer.py b/tests/modules/tokenizer/test_bert_tokenizer.py deleted file mode 100644 index 441e7658..00000000 --- a/tests/modules/tokenizer/test_bert_tokenizer.py +++ /dev/null @@ -1,19 +0,0 @@ -import unittest -from fastNLP.modules.tokenizer import BertTokenizer - - -class TestBertTokenizer(unittest.TestCase): - def test_run(self): - # 测试支持的两种encode方式 - tokenizer = BertTokenizer.from_pretrained('tests/data_for_tests/embedding/small_bert') - - tokens1 = tokenizer.encode("This is a demo") - tokens2 = tokenizer.encode("This is a demo", add_special_tokens=False) - tokens3 = tokenizer.encode("This is a demo".split()) - tokens4 = tokenizer.encode("This is a demo".split(), add_special_tokens=False) - - self.assertEqual(len(tokens1)-2, len(tokens2)) - self.assertEqual(len(tokens3)-2, len(tokens4)) - - self.assertEqual(tokens1[0], tokenizer.cls_index) - self.assertEqual(tokens1[-1], tokenizer.sep_index) \ No newline at end of file diff --git a/tests/test_tutorials.py b/tests/test_tutorials.py deleted file mode 100644 index 2a224f05..00000000 --- a/tests/test_tutorials.py +++ /dev/null @@ -1,231 +0,0 @@ -import unittest - -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import Vocabulary -from fastNLP.core.losses import CrossEntropyLoss -from fastNLP.core.metrics import AccuracyMetric -from fastNLP.io.loader import CSVLoader - - -class TestTutorial(unittest.TestCase): - def test_tutorial_1_data_preprocess(self): - from fastNLP import DataSet - data = {'raw_words': ["This is the first instance .", "Second instance .", "Third instance ."], - 'words': [['this', 'is', 'the', 'first', 'instance', '.'], ['Second', 'instance', '.'], - ['Third', 'instance', '.']], - 'seq_len': [6, 3, 3]} - dataset = DataSet(data) - # 传入的dict的每个key的value应该为具有相同长度的list - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet() - instance = Instance(raw_words="This is the first instance", - words=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6) - dataset.append(instance) - - from fastNLP import DataSet - from fastNLP import Instance - dataset = DataSet([ - Instance(raw_words="This is the first instance", - words=['this', 'is', 'the', 'first', 'instance', '.'], - seq_len=6), - Instance(raw_words="Second instance .", - words=['Second', 'instance', '.'], - seq_len=3) - ]) - - from fastNLP import DataSet - dataset = DataSet({'a': range(-5, 5), 'c': [0] * 10}) - - # 不改变dataset,生成一个删除了满足条件的instance的新 DataSet - dropped_dataset = dataset.drop(lambda ins: ins['a'] < 0, inplace=False) - # 在dataset中删除满足条件的instance - dataset.drop(lambda ins: ins['a'] < 0) - # 删除第3个instance - dataset.delete_instance(2) - # 删除名为'a'的field - dataset.delete_field('a') - - # 检查是否存在名为'a'的field - print(dataset.has_field('a')) # 或 ('a' in dataset) - # 将名为'a'的field改名为'b' - dataset.rename_field('c', 'b') - # DataSet的长度 - len(dataset) - - from fastNLP import DataSet - data = {'raw_words': ["This is the first instance .", "Second instance .", "Third instance ."]} - dataset = DataSet(data) - - # 将句子分成单词形式, 详见DataSet.apply()方法 - dataset.apply(lambda ins: ins['raw_words'].split(), new_field_name='words') - - # 或使用DataSet.apply_field() - dataset.apply_field(lambda sent: sent.split(), field_name='raw_words', new_field_name='words') - - # 除了匿名函数,也可以定义函数传递进去 - def get_words(instance): - sentence = instance['raw_words'] - words = sentence.split() - return words - - dataset.apply(get_words, new_field_name='words') - - def setUp(self): - import os - self._init_wd = os.path.abspath(os.curdir) - - def tearDown(self): - import os - os.chdir(self._init_wd) - -class TestOldTutorial(unittest.TestCase): - def test_fastnlp_10min_tutorial(self): - # 从csv读取数据到DataSet - sample_path = "tests/data_for_tests/tutorial_sample_dataset.csv" - dataset = CSVLoader(headers=['raw_sentence', 'label'], sep=' ')._load(sample_path) - print(len(dataset)) - print(dataset[0]) - print(dataset[-3]) - - dataset.append(Instance(raw_sentence='fake data', label='0')) - # 将所有数字转为小写 - dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') - # label转int - dataset.apply(lambda x: int(x['label']), new_field_name='label') - - # 使用空格分割句子 - def split_sent(ins): - return ins['raw_sentence'].split() - - dataset.apply(split_sent, new_field_name='words') - - # 增加长度信息 - dataset.apply(lambda x: len(x['words']), new_field_name='seq_len') - print(len(dataset)) - print(dataset[0]) - - # DataSet.drop(func)筛除数据 - dataset.drop(lambda x: x['seq_len'] <= 3, inplace=True) - print(len(dataset)) - - # 设置DataSet中,哪些field要转为tensor - # set target,loss或evaluate中的golden,计算loss,模型评估时使用 - dataset.set_target("label") - # set input,模型forward时使用 - dataset.set_input("words", "seq_len") - - # 分出测试集、训练集 - test_data, train_data = dataset.split(0.5) - print(len(test_data)) - print(len(train_data)) - - # 构建词表, Vocabulary.add(word) - vocab = Vocabulary(min_freq=2) - train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) - vocab.build_vocab() - - # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words') - test_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words') - print(test_data[0]) - - # 如果你们需要做强化学习或者GAN之类的项目,你们也可以使用这些数据预处理的工具 - from fastNLP.core.batch import DataSetIter - from fastNLP.core.sampler import RandomSampler - - batch_iterator = DataSetIter(dataset=train_data, batch_size=2, sampler=RandomSampler()) - for batch_x, batch_y in batch_iterator: - print("batch_x has: ", batch_x) - print("batch_y has: ", batch_y) - break - - from fastNLP.models import CNNText - model = CNNText((len(vocab), 50), num_classes=5, dropout=0.1) - - from fastNLP import Trainer - from copy import deepcopy - - # 更改DataSet中对应field的名称,要以模型的forward等参数名一致 - train_data.rename_field('label', 'label_seq') - test_data.rename_field('label', 'label_seq') - - loss = CrossEntropyLoss(target="label_seq") - metric = AccuracyMetric(target="label_seq") - - # 实例化Trainer,传入模型和数据,进行训练 - # 先在test_data拟合(确保模型的实现是正确的) - copy_model = deepcopy(model) - overfit_trainer = Trainer(train_data=test_data, model=copy_model, loss=loss, batch_size=32, n_epochs=5, - dev_data=test_data, metrics=metric, save_path=None) - overfit_trainer.train() - - # 用train_data训练,在test_data验证 - trainer = Trainer(model=model, train_data=train_data, dev_data=test_data, - loss=CrossEntropyLoss(target="label_seq"), - metrics=AccuracyMetric(target="label_seq"), - save_path=None, - batch_size=32, - n_epochs=5) - trainer.train() - print('Train finished!') - - # 调用Tester在test_data上评价效果 - from fastNLP import Tester - - tester = Tester(data=test_data, model=model, metrics=AccuracyMetric(target="label_seq"), - batch_size=4) - acc = tester.test() - print(acc) - - def test_fastnlp_1min_tutorial(self): - # tutorials/fastnlp_1min_tutorial.ipynb - data_path = "tests/data_for_tests/tutorial_sample_dataset.csv" - ds = CSVLoader(headers=['raw_sentence', 'label'], sep=' ')._load(data_path) - print(ds[1]) - - # 将所有数字转为小写 - ds.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') - # label转int - ds.apply(lambda x: int(x['label']), new_field_name='target', is_target=True) - - def split_sent(ins): - return ins['raw_sentence'].split() - - ds.apply(split_sent, new_field_name='words', is_input=True) - - # 分割训练集/验证集 - train_data, dev_data = ds.split(0.3) - print("Train size: ", len(train_data)) - print("Test size: ", len(dev_data)) - - from fastNLP import Vocabulary - vocab = Vocabulary(min_freq=2) - train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) - - # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words', - is_input=True) - dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words', - is_input=True) - - from fastNLP.models import CNNText - model = CNNText((len(vocab), 50), num_classes=5, dropout=0.1) - - from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam - - trainer = Trainer(train_data=train_data, model=model, optimizer=Adam(), loss=CrossEntropyLoss(), - dev_data=dev_data, metrics=AccuracyMetric(target='target')) - trainer.train() - print('Train finished!') - - def setUp(self): - import os - self._init_wd = os.path.abspath(os.curdir) - - def tearDown(self): - import os - os.chdir(self._init_wd) diff --git a/tutorials/README.md b/tutorials/README.md deleted file mode 100644 index d4f7b7de..00000000 --- a/tutorials/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# fastNLP 教程 - -这里是 fastNLP 文档中的**快速入门**和**详细教程**部分的 jupyter notebook 文件。 \ No newline at end of file diff --git a/tutorials/cn_cls_example.png b/tutorials/cn_cls_example.png deleted file mode 100644 index 5055bb02..00000000 Binary files a/tutorials/cn_cls_example.png and /dev/null differ diff --git a/tutorials/extend_1_bert_embedding.ipynb b/tutorials/extend_1_bert_embedding.ipynb deleted file mode 100644 index 2169c8b5..00000000 --- a/tutorials/extend_1_bert_embedding.ipynb +++ /dev/null @@ -1,260 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# BertEmbedding的各种用法\n", - "Bert自从在 BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding 中被提出后,因其性能卓越受到了极大的关注,在这里我们展示一下在fastNLP中如何使用Bert进行各类任务。其中中文Bert我们使用的模型的权重来自于 中文Bert预训练 。\n", - "\n", - "为了方便大家的使用,fastNLP提供了预训练的Embedding权重及数据集的自动下载,支持自动下载的Embedding和数据集见 数据集 。或您可从 使用Embedding模块将文本转成向量 与 使用Loader和Pipe加载并处理数据集 了解更多相关信息\n", - "\n", - "\n", - "下面我们将介绍通过使用Bert来进行文本分类, 中文命名实体识别, 文本匹配, 中文问答。\n", - "\n", - "## 1. 使用Bert进行文本分类\n", - "\n", - "文本分类是指给定一段文字,判定其所属的类别。例如下面的文本情感分类\n", - "\n", - " *1, 商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错!*\n", - "\n", - "这里我们使用fastNLP提供自动下载的微博分类进行测试" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import WeiboSenti100kPipe\n", - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForSequenceClassification\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam\n", - "import torch\n", - "\n", - "data_bundle =WeiboSenti100kPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "# 载入BertEmbedding\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True)\n", - "\n", - "# 载入模型\n", - "model = BertForSequenceClassification(embed, len(data_bundle.get_vocab('target')))\n", - "\n", - "# 训练模型\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model,\n", - " optimizer=Adam(model_params=model.parameters(), lr=2e-5),\n", - " loss=CrossEntropyLoss(), device=device,\n", - " batch_size=8, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=AccuracyMetric(), n_epochs=2, print_every=1)\n", - "trainer.train()\n", - "\n", - "# 测试结果\n", - "from fastNLP import Tester\n", - "\n", - "tester = Tester(data_bundle.get_dataset('test'), model, batch_size=128, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2. 使用Bert进行命名实体识别\n", - "\n", - "命名实体识别是给定一句话,标记出其中的实体。一般序列标注的任务都使用conll格式,conll格式是至一行中通过制表符分隔不同的内容,使用空行分隔 两句话,例如下面的例子\n", - "\n", - "```\n", - " 中 B-ORG\n", - " 共 I-ORG\n", - " 中 I-ORG\n", - " 央 I-ORG\n", - " 致 O\n", - " 中 B-ORG\n", - " 国 I-ORG\n", - " 致 I-ORG\n", - " 公 I-ORG\n", - " 党 I-ORG\n", - " 十 I-ORG\n", - " 一 I-ORG\n", - " 大 I-ORG\n", - " 的 O\n", - " 贺 O\n", - " 词 O\n", - "```\n", - "\n", - "这部分内容请参考 快速实现序列标注模型\n", - "\n", - "## 3. 使用Bert进行文本匹配\n", - "\n", - "文本匹配任务是指给定两句话判断他们的关系。比如,给定两句话判断前一句是否和后一句具有因果关系或是否是矛盾关系;或者给定两句话判断两句话是否 具有相同的意思。这里我们使用" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import CNXNLIBertPipe\n", - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForSentenceMatching\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric, Adam\n", - "from fastNLP.core.optimizer import AdamW\n", - "from fastNLP.core.callback import WarmupCallback\n", - "from fastNLP import Tester\n", - "import torch\n", - "\n", - "data_bundle = CNXNLIBertPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "print(data_bundle)\n", - "\n", - "# 载入BertEmbedding\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn-wwm', include_cls_sep=True)\n", - "\n", - "# 载入模型\n", - "model = BertForSentenceMatching(embed, len(data_bundle.get_vocab('target')))\n", - "\n", - "# 训练模型\n", - "callbacks = [WarmupCallback(warmup=0.1, schedule='linear'), ]\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model,\n", - " optimizer=AdamW(params=model.parameters(), lr=4e-5),\n", - " loss=CrossEntropyLoss(), device=device,\n", - " batch_size=8, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=AccuracyMetric(), n_epochs=5, print_every=1,\n", - " update_every=8, callbacks=callbacks)\n", - "trainer.train()\n", - "\n", - "tester = Tester(data_bundle.get_dataset('test'), model, batch_size=8, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 4. 使用Bert进行中文问答\n", - "\n", - "问答任务是给定一段内容,以及一个问题,需要从这段内容中找到答案。 例如:\n", - "\n", - "```\n", - "\"context\": \"锣鼓经是大陆传统器乐及戏曲里面常用的打击乐记谱方法,以中文字的声音模拟敲击乐的声音,纪录打击乐的各种不同的演奏方法。常\n", - "用的节奏型称为「锣鼓点」。而锣鼓是戏曲节奏的支柱,除了加强演员身段动作的节奏感,也作为音乐的引子和尾声,提示音乐的板式和速度,以及\n", - "作为唱腔和念白的伴奏,令诗句的韵律更加抑扬顿锉,段落分明。锣鼓的运用有约定俗成的程式,依照角色行当的身份、性格、情绪以及环境,配合\n", - "相应的锣鼓点。锣鼓亦可以模仿大自然的音响效果,如雷电、波浪等等。戏曲锣鼓所运用的敲击乐器主要分为鼓、锣、钹和板四类型:鼓类包括有单\n", - "皮鼓(板鼓)、大鼓、大堂鼓(唐鼓)、小堂鼓、怀鼓、花盆鼓等;锣类有大锣、小锣(手锣)、钲锣、筛锣、马锣、镗锣、云锣;钹类有铙钹、大\n", - "钹、小钹、水钹、齐钹、镲钹、铰子、碰钟等;打拍子用的檀板、木鱼、梆子等。因为京剧的锣鼓通常由四位乐师负责,又称为四大件,领奏的师\n", - "傅称为:「鼓佬」,其职责有如西方乐队的指挥,负责控制速度以及利用各种手势提示乐师演奏不同的锣鼓点。粤剧吸收了部份京剧的锣鼓,但以木鱼\n", - "和沙的代替了京剧的板和鼓,作为打拍子的主要乐器。以下是京剧、昆剧和粤剧锣鼓中乐器对应的口诀用字:\",\n", - "\"question\": \"锣鼓经是什么?\",\n", - "\"answers\": [\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " },\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " },\n", - " {\n", - " \"text\": \"大陆传统器乐及戏曲里面常用的打击乐记谱方法\",\n", - " \"answer_start\": 4\n", - " }\n", - "]\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "您可以通过以下的代码训练 (原文代码:[CMRC2018](https://github.com/ymcui/cmrc2018) )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP.models import BertForQuestionAnswering\n", - "from fastNLP.core.losses import CMRC2018Loss\n", - "from fastNLP.core.metrics import CMRC2018Metric\n", - "from fastNLP.io.pipe.qa import CMRC2018BertPipe\n", - "from fastNLP import Trainer, BucketSampler\n", - "from fastNLP import WarmupCallback, GradientClipCallback\n", - "from fastNLP.core.optimizer import AdamW\n", - "import torch\n", - "\n", - "data_bundle = CMRC2018BertPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "print(data_bundle)\n", - "\n", - "embed = BertEmbedding(data_bundle.get_vocab('words'), model_dir_or_name='cn', requires_grad=True, include_cls_sep=False, auto_truncate=True,\n", - " dropout=0.5, word_dropout=0.01)\n", - "model = BertForQuestionAnswering(embed)\n", - "loss = CMRC2018Loss()\n", - "metric = CMRC2018Metric()\n", - "\n", - "wm_callback = WarmupCallback(schedule='linear')\n", - "gc_callback = GradientClipCallback(clip_value=1, clip_type='norm')\n", - "callbacks = [wm_callback, gc_callback]\n", - "\n", - "optimizer = AdamW(model.parameters(), lr=5e-5)\n", - "\n", - "device = 0 if torch.cuda.is_available() else 'cpu' \n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer,\n", - " sampler=BucketSampler(seq_len_field_name='context_len'),\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric,\n", - " callbacks=callbacks, device=device, batch_size=6, num_workers=2, n_epochs=2, print_every=1,\n", - " test_use_tqdm=False, update_every=10)\n", - "trainer.train(load_best_model=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "训练结果(和论文中报道的基本一致):\n", - "\n", - "```\n", - " In Epoch:2/Step:1692, got best dev performance:\n", - " CMRC2018Metric: f1=85.61, em=66.08\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_1_data_preprocess.ipynb b/tutorials/tutorial_1_data_preprocess.ipynb deleted file mode 100644 index a987e7f2..00000000 --- a/tutorials/tutorial_1_data_preprocess.ipynb +++ /dev/null @@ -1,292 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# fastNLP中的DataSet" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+------------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+------------------------------+---------------------------------------------+---------+\n", - "| This is the first instance . | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "| Second instance . | ['Second', 'instance', '.'] | 3 |\n", - "| Third instance . | ['Third', 'instance', '.'] | 3 |\n", - "+------------------------------+---------------------------------------------+---------+\n" - ] - } - ], - "source": [ - "from fastNLP import DataSet\n", - "data = {'raw_words':[\"This is the first instance .\", \"Second instance .\", \"Third instance .\"],\n", - " 'words': [['this', 'is', 'the', 'first', 'instance', '.'], ['Second', 'instance', '.'], ['Third', 'instance', '.']],\n", - " 'seq_len': [6, 3, 3]}\n", - "dataset = DataSet(data)\n", - "# 传入的dict的每个key的value应该为具有相同长度的list\n", - "print(dataset)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSet的构建" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+----------------------------+---------------------------------------------+---------+\n", - "| This is the first instance | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "+----------------------------+---------------------------------------------+---------+" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "dataset = DataSet()\n", - "instance = Instance(raw_words=\"This is the first instance\",\n", - " words=['this', 'is', 'the', 'first', 'instance', '.'],\n", - " seq_len=6)\n", - "dataset.append(instance)\n", - "dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----------------------------+---------------------------------------------+---------+\n", - "| raw_words | words | seq_len |\n", - "+----------------------------+---------------------------------------------+---------+\n", - "| This is the first instance | ['this', 'is', 'the', 'first', 'instance... | 6 |\n", - "| Second instance . | ['Second', 'instance', '.'] | 3 |\n", - "+----------------------------+---------------------------------------------+---------+" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "dataset = DataSet([\n", - " Instance(raw_words=\"This is the first instance\",\n", - " words=['this', 'is', 'the', 'first', 'instance', '.'],\n", - " seq_len=6),\n", - " Instance(raw_words=\"Second instance .\",\n", - " words=['Second', 'instance', '.'],\n", - " seq_len=3)\n", - " ])\n", - "dataset" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSet的删除" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+----+---+\n", - "| a | c |\n", - "+----+---+\n", - "| -5 | 0 |\n", - "| -4 | 0 |\n", - "| -3 | 0 |\n", - "| -2 | 0 |\n", - "| -1 | 0 |\n", - "| 0 | 0 |\n", - "| 1 | 0 |\n", - "| 2 | 0 |\n", - "| 3 | 0 |\n", - "| 4 | 0 |\n", - "+----+---+" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "dataset = DataSet({'a': range(-5, 5), 'c': [0]*10})\n", - "dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+---+\n", - "| c |\n", - "+---+\n", - "| 0 |\n", - "| 0 |\n", - "| 0 |\n", - "| 0 |\n", - "+---+" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 不改变dataset,生成一个删除了满足条件的instance的新 DataSet\n", - "dropped_dataset = dataset.drop(lambda ins:ins['a']<0, inplace=False)\n", - "# 在dataset中删除满足条件的instance\n", - "dataset.drop(lambda ins:ins['a']<0)\n", - "# 删除第3个instance\n", - "dataset.delete_instance(2)\n", - "# 删除名为'a'的field\n", - "dataset.delete_field('a')\n", - "dataset" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 简单的数据预处理" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "False\n" - ] - }, - { - "data": { - "text/plain": [ - "4" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 检查是否存在名为'a'的field\n", - "print(dataset.has_field('a')) # 或 ('a' in dataset)\n", - "# 将名为'a'的field改名为'b'\n", - "dataset.rename_field('c', 'b')\n", - "# DataSet的长度\n", - "len(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "+------------------------------+-------------------------------------------------+\n", - "| raw_words | words |\n", - "+------------------------------+-------------------------------------------------+\n", - "| This is the first instance . | ['This', 'is', 'the', 'first', 'instance', '.'] |\n", - "| Second instance . | ['Second', 'instance', '.'] |\n", - "| Third instance . | ['Third', 'instance', '.'] |\n", - "+------------------------------+-------------------------------------------------+" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import DataSet\n", - "data = {'raw_words':[\"This is the first instance .\", \"Second instance .\", \"Third instance .\"]}\n", - "dataset = DataSet(data)\n", - "\n", - "# 将句子分成单词形式, 详见DataSet.apply()方法\n", - "dataset.apply(lambda ins: ins['raw_words'].split(), new_field_name='words')\n", - "\n", - "# 或使用DataSet.apply_field()\n", - "dataset.apply_field(lambda sent:sent.split(), field_name='raw_words', new_field_name='words')\n", - "\n", - "# 除了匿名函数,也可以定义函数传递进去\n", - "def get_words(instance):\n", - " sentence = instance['raw_words']\n", - " words = sentence.split()\n", - " return words\n", - "dataset.apply(get_words, new_field_name='words')\n", - "dataset" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_2_vocabulary.ipynb b/tutorials/tutorial_2_vocabulary.ipynb deleted file mode 100644 index 50862293..00000000 --- a/tutorials/tutorial_2_vocabulary.ipynb +++ /dev/null @@ -1,343 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# fastNLP中的 Vocabulary\n", - "## 构建 Vocabulary" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(['复', '旦', '大', '学']) # 加入新的字\n", - "vocab.add_word('上海') # `上海`会作为一个整体\n", - "vocab.to_index('复') # 应该会为3\n", - "vocab.to_index('我') # 会输出1,Vocabulary中默认pad的index为0, unk(没有找到的词)的index为1\n", - "\n", - "# 在构建target的Vocabulary时,词表中应该用不上pad和unk,可以通过以下的初始化\n", - "vocab = Vocabulary(unknown=None, padding=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['positive', 'negative']...)" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab.add_word_lst(['positive', 'negative'])" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "data": { - "text/plain": [ - "0" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab.to_index('positive')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 没有设置 unk 的情况" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "ename": "ValueError", - "evalue": "word `neutral` not in vocabulary", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mvocab\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_index\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'neutral'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# 会报错,因为没有unk这种情况\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36mto_index\u001b[0;34m(self, w)\u001b[0m\n\u001b[1;32m 414\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m \u001b[0mint\u001b[0m \u001b[0mindex\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mnumber\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 415\u001b[0m \"\"\"\n\u001b[0;32m--> 416\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__getitem__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 417\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 418\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36m_wrapper\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_word2idx\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrebuild\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuild_vocab\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 44\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 45\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0m_wrapper\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/core/vocabulary.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, w)\u001b[0m\n\u001b[1;32m 272\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_word2idx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munknown\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 274\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"word `{}` not in vocabulary\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 275\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 276\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0m_check_build_vocab\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: word `neutral` not in vocabulary" - ] - } - ], - "source": [ - "vocab.to_index('neutral') # 会报错,因为没有unk这种情况" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 设置 unk 的情况" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(0, '')" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary(unknown='', padding=None)\n", - "vocab.add_word_lst(['positive', 'negative'])\n", - "vocab.to_index('neutral'), vocab.to_word(vocab.to_index('neutral'))" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['positive', 'negative']...)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "vocab" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+---------------------------------------------------+--------+\n", - "| chars | target |\n", - "+---------------------------------------------------+--------+\n", - "| [4, 2, 2, 5, 6, 7, 3] | 0 |\n", - "| [8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 3] | 1 |\n", - "+---------------------------------------------------+--------+\n" - ] - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "from fastNLP import DataSet\n", - "\n", - "dataset = DataSet({'chars': [\n", - " ['今', '天', '天', '气', '很', '好', '。'],\n", - " ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。']\n", - " ],\n", - " 'target': ['neutral', 'negative']\n", - "})\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.from_dataset(dataset, field_name='chars')\n", - "vocab.index_dataset(dataset, field_name='chars')\n", - "\n", - "target_vocab = Vocabulary(padding=None, unknown=None)\n", - "target_vocab.from_dataset(dataset, field_name='target')\n", - "target_vocab.index_dataset(dataset, field_name='target')\n", - "print(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Vocabulary(['今', '天', '心', '情', '很']...)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "from fastNLP import DataSet\n", - "\n", - "tr_data = DataSet({'chars': [\n", - " ['今', '天', '心', '情', '很', '好', '。'],\n", - " ['被', '这', '部', '电', '影', '浪', '费', '了', '两', '个', '小', '时', '。']\n", - " ],\n", - " 'target': ['positive', 'negative']\n", - "})\n", - "dev_data = DataSet({'chars': [\n", - " ['住', '宿', '条', '件', '还', '不', '错'],\n", - " ['糟', '糕', '的', '天', '气', ',', '无', '法', '出', '行', '。']\n", - " ],\n", - " 'target': ['positive', 'negative']\n", - "})\n", - "\n", - "vocab = Vocabulary()\n", - "# 将验证集或者测试集在建立词表是放入no_create_entry_dataset这个参数中。\n", - "vocab.from_dataset(tr_data, field_name='chars', no_create_entry_dataset=[dev_data])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - " 4%|▎ | 2.31M/63.5M [00:00<00:02, 22.9MB/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "http://212.129.155.247/embedding/glove.6B.50d.zip not found in cache, downloading to /tmp/tmpvziobj_e\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 63.5M/63.5M [00:01<00:00, 41.3MB/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Finish download from http://212.129.155.247/embedding/glove.6B.50d.zip\n", - "Copy file to /remote-home/ynzheng/.fastNLP/embedding/glove.6B.50d\n", - "Found 2 out of 6 words in the pre-training embedding.\n", - "tensor([[ 0.9497, 0.3433, 0.8450, -0.8852, -0.7208, -0.2931, -0.7468, 0.6512,\n", - " 0.4730, -0.7401, 0.1877, -0.3828, -0.5590, 0.4295, -0.2698, -0.4238,\n", - " -0.3124, 1.3423, -0.7857, -0.6302, 0.9182, 0.2113, -0.5744, 1.4549,\n", - " 0.7546, -1.6165, -0.0085, 0.0029, 0.5130, -0.4745, 2.5306, 0.8594,\n", - " -0.3067, 0.0578, 0.6623, 0.2080, 0.6424, -0.5246, -0.0534, 1.1404,\n", - " -0.1370, -0.1836, 0.4546, -0.5096, -0.0255, -0.0286, 0.1805, -0.4483,\n", - " 0.4053, -0.3682]], grad_fn=)\n", - "tensor([[ 0.1320, -0.2392, 0.1732, -0.2390, -0.0463, 0.0494, 0.0488, -0.0886,\n", - " 0.0224, -0.1300, 0.0369, 0.1800, 0.0750, -0.0183, 0.2264, 0.1628,\n", - " 0.1261, -0.1259, 0.1663, -0.1230, -0.1904, -0.0532, 0.1397, -0.0259,\n", - " -0.1799, 0.0226, 0.1858, 0.1981, 0.1338, 0.2394, 0.0248, 0.0203,\n", - " -0.1722, -0.1683, -0.1892, 0.0874, 0.0562, -0.0394, 0.0306, -0.1761,\n", - " 0.1015, -0.0171, 0.1172, 0.1357, 0.1519, -0.0011, 0.1572, 0.1265,\n", - " -0.2391, -0.0258]], grad_fn=)\n", - "tensor([[ 0.1318, -0.2552, -0.0679, 0.2619, -0.2616, 0.2357, 0.1308, -0.0118,\n", - " 1.7659, 0.2078, 0.2620, -0.1643, -0.8464, 0.0201, 0.0702, 0.3978,\n", - " 0.1528, -0.2021, -1.6184, -0.5433, -0.1786, 0.5389, 0.4987, -0.1017,\n", - " 0.6626, -1.7051, 0.0572, -0.3241, -0.6683, 0.2665, 2.8420, 0.2684,\n", - " -0.5954, -0.5004, 1.5199, 0.0396, 1.6659, 0.9976, -0.5597, -0.7049,\n", - " -0.0309, -0.2830, -0.1356, 0.6429, 0.4149, 1.2362, 0.7659, 0.9780,\n", - " 0.5851, -0.3018]], grad_fn=)\n", - "tensor([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0.]], grad_fn=)\n", - "tensor([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", - " 0., 0.]], grad_fn=)\n" - ] - } - ], - "source": [ - "import torch\n", - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word('train')\n", - "vocab.add_word('only_in_train') # 仅在train出现,但肯定在预训练词表中不存在\n", - "vocab.add_word('test', no_create_entry=True) # 该词只在dev或test中出现\n", - "vocab.add_word('only_in_test', no_create_entry=True) # 这个词在预训练的词表中找不到\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "print(embed(torch.LongTensor([vocab.to_index('train')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('only_in_train')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('test')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('only_in_test')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_3_embedding.ipynb b/tutorials/tutorial_3_embedding.ipynb deleted file mode 100644 index 154a0756..00000000 --- a/tutorials/tutorial_3_embedding.ipynb +++ /dev/null @@ -1,524 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 5 out of 7 words in the pre-training embedding.\n", - "torch.Size([1, 5, 50])\n" - ] - } - ], - "source": [ - "import torch\n", - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]]) # 将文本转为index\n", - "print(embed(words).size()) # StaticEmbedding的使用和pytorch的nn.Embedding是类似的" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([1, 5, 30])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = StaticEmbedding(vocab, model_dir_or_name=None, embedding_dim=30)\n", - "\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 256])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import ElmoEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 512])\n" - ] - } - ], - "source": [ - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=False, layers='1,2')\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "torch.Size([1, 5, 256])\n" - ] - } - ], - "source": [ - "embed = ElmoEmbedding(vocab, model_dir_or_name='en-small', requires_grad=True, layers='mix')\n", - "print(embed(words).size()) # 三层输出按照权重element-wise的加起来" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 768])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import BertEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased')\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 1536])\n" - ] - } - ], - "source": [ - "# 使用后面两层的输出\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='10,11')\n", - "print(embed(words).size()) # 结果将是在最后一维做拼接" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 7, 768])\n" - ] - } - ], - "source": [ - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', include_cls_sep=True)\n", - "print(embed(words).size()) # 结果将在序列维度上增加2\n", - "# 取出句子的cls表示\n", - "cls_reps = embed(words)[:, 0] # shape: [batch_size, 768]" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "torch.Size([1, 5, 768])\n" - ] - } - ], - "source": [ - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max')\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 10 words out of 10.\n", - "torch.Size([1, 9, 768])\n" - ] - } - ], - "source": [ - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo . [SEP] another sentence .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', layers='-1', pool_method='max')\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo . [SEP] another sentence .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "torch.Size([1, 5, 64])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import CNNCharEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "# character的embedding维度大小为50,返回的embedding结果维度大小为64。\n", - "embed = CNNCharEmbedding(vocab, embed_size=64, char_emb_size=50)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "torch.Size([1, 5, 64])\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import LSTMCharEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "# character的embedding维度大小为50,返回的embedding结果维度大小为64。\n", - "embed = LSTMCharEmbedding(vocab, embed_size=64, char_emb_size=50)\n", - "words = torch.LongTensor([[vocab.to_index(word) for word in \"this is a demo .\".split()]])\n", - "print(embed(words).size())" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 5 out of 7 words in the pre-training embedding.\n", - "50\n", - "Start constructing character vocabulary.\n", - "In total, there are 8 distinct characters.\n", - "30\n", - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "256\n", - "22 out of 22 characters were found in pretrained elmo embedding.\n", - "512\n", - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "768\n", - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n", - "1536\n", - "80\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import *\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "static_embed = StaticEmbedding(vocab, model_dir_or_name='en-glove-6b-50d')\n", - "print(static_embed.embedding_dim) # 50\n", - "char_embed = CNNCharEmbedding(vocab, embed_size=30)\n", - "print(char_embed.embedding_dim) # 30\n", - "elmo_embed_1 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='2')\n", - "print(elmo_embed_1.embedding_dim) # 256\n", - "elmo_embed_2 = ElmoEmbedding(vocab, model_dir_or_name='en-small', layers='1,2')\n", - "print(elmo_embed_2.embedding_dim) # 512\n", - "bert_embed_1 = BertEmbedding(vocab, layers='-1', model_dir_or_name='en-base-cased')\n", - "print(bert_embed_1.embedding_dim) # 768\n", - "bert_embed_2 = BertEmbedding(vocab, layers='2,-1', model_dir_or_name='en-base-cased')\n", - "print(bert_embed_2.embedding_dim) # 1536\n", - "stack_embed = StackEmbedding([static_embed, char_embed])\n", - "print(stack_embed.embedding_dim) # 80" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-base-cased/pytorch_model.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 7 words out of 7.\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import *\n", - "\n", - "vocab = Vocabulary()\n", - "vocab.add_word_lst(\"this is a demo .\".split())\n", - "\n", - "embed = BertEmbedding(vocab, model_dir_or_name='en-base-cased', requires_grad=True) # 初始化时设定为需要更新\n", - "embed.requires_grad = False # 修改BertEmbedding的权重为不更新" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tensor([[ 0.3633, -0.2091, -0.0353, -0.3771, -0.5193]],\n", - " grad_fn=)\n", - "tensor([[ 0.0926, -0.4812, -0.7744, 0.4836, -0.5475]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"The the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练词向量时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5)\n", - "print(embed(torch.LongTensor([vocab.to_index('The')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "All word in the vocab have been lowered. There are 6 words, 4 unique lowered words.\n", - "tensor([[ 0.4530, -0.1558, -0.1941, 0.3203, 0.0355]],\n", - " grad_fn=)\n", - "tensor([[ 0.4530, -0.1558, -0.1941, 0.3203, 0.0355]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"The the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, lower=True)\n", - "print(embed(torch.LongTensor([vocab.to_index('The')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1 out of 4 words have frequency less than 2.\n", - "tensor([[ 0.4724, -0.7277, -0.6350, -0.5258, -0.6063]],\n", - " grad_fn=)\n", - "tensor([[ 0.7638, -0.0552, 0.1625, -0.2210, 0.4993]],\n", - " grad_fn=)\n", - "tensor([[ 0.7638, -0.0552, 0.1625, -0.2210, 0.4993]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"the the the a\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2)\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('a')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 out of 5 words have frequency less than 2.\n", - "All word in the vocab have been lowered. There are 5 words, 4 unique lowered words.\n", - "tensor([[ 0.1943, 0.3739, 0.2769, -0.4746, -0.3181]],\n", - " grad_fn=)\n", - "tensor([[ 0.5892, -0.6916, 0.7319, -0.3803, 0.4979]],\n", - " grad_fn=)\n", - "tensor([[ 0.5892, -0.6916, 0.7319, -0.3803, 0.4979]],\n", - " grad_fn=)\n", - "tensor([[-0.1348, -0.2172, -0.0071, 0.5704, -0.2607]],\n", - " grad_fn=)\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "from fastNLP import Vocabulary\n", - "\n", - "vocab = Vocabulary().add_word_lst(\"the the the a A\".split())\n", - "# 下面用随机的StaticEmbedding演示,但与使用预训练时效果是一致的\n", - "embed = StaticEmbedding(vocab, model_name_or_dir=None, embedding_dim=5, min_freq=2, lower=True)\n", - "print(embed(torch.LongTensor([vocab.to_index('the')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('a')])))\n", - "print(embed(torch.LongTensor([vocab.to_index('A')])))\n", - "print(embed(torch.LongTensor([vocab.unknown_idx])))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_4_load_dataset.ipynb b/tutorials/tutorial_4_load_dataset.ipynb deleted file mode 100644 index f6de83bc..00000000 --- a/tutorials/tutorial_4_load_dataset.ipynb +++ /dev/null @@ -1,309 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Loader和Pipe加载并处理数据集\n", - "\n", - "这一部分是关于如何加载数据集的教程\n", - "\n", - "## Part I: 数据集容器DataBundle\n", - "\n", - "而由于对于同一个任务,训练集,验证集和测试集会共用同一个词表以及具有相同的目标值,所以在fastNLP中我们使用了 DataBundle 来承载同一个任务的多个数据集 DataSet 以及它们的词表 Vocabulary 。下面会有例子介绍 DataBundle 的相关使用。\n", - "\n", - "DataBundle 在fastNLP中主要在各个 Loader 和 Pipe 中被使用。 下面我们先介绍一下 Loader 和 Pipe 。\n", - "\n", - "## Part II: 加载的各种数据集的Loader\n", - "\n", - "在fastNLP中,所有的 Loader 都可以通过其文档判断其支持读取的数据格式,以及读取之后返回的 DataSet 的格式, 例如 ChnSentiCorpLoader \n", - "\n", - "- download() 函数:自动将该数据集下载到缓存地址,默认缓存地址为~/.fastNLP/datasets/。由于版权等原因,不是所有的Loader都实现了该方法。该方法会返回下载后文件所处的缓存地址。\n", - "\n", - "- _load() 函数:从一个数据文件中读取数据,返回一个 DataSet 。返回的DataSet的格式可从Loader文档判断。\n", - "\n", - "- load() 函数:从文件或者文件夹中读取数据为 DataSet 并将它们组装成 DataBundle。支持接受的参数类型有以下的几种\n", - "\n", - " - None, 将尝试读取自动缓存的数据,仅支持提供了自动下载数据的Loader\n", - " - 文件夹路径, 默认将尝试在该文件夹下匹配文件名中含有 train , test , dev 的文件,如果有多个文件含有相同的关键字,将无法通过该方式读取\n", - " - dict, 例如{'train':\"/path/to/tr.conll\", 'dev':\"/to/validate.conll\", \"test\":\"/to/te.conll\"}。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1944 instances.\n", - "\ttrain has 17196 instances.\n", - "\tdev has 1858 instances.\n", - "\n" - ] - } - ], - "source": [ - "from fastNLP.io import CWSLoader\n", - "\n", - "loader = CWSLoader(dataset_name='pku')\n", - "data_bundle = loader.load()\n", - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这里表示一共有3个数据集。其中:\n", - "\n", - " 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance\n", - "\n", - "也可以取出DataSet,并打印DataSet中的具体内容" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------------------------------------------------------+\n", - "| raw_words |\n", - "+----------------------------------------------------------------+\n", - "| 迈向 充满 希望 的 新 世纪 —— 一九九八年 新年 讲话 ... |\n", - "| 中共中央 总书记 、 国家 主席 江 泽民 |\n", - "+----------------------------------------------------------------+\n" - ] - } - ], - "source": [ - "tr_data = data_bundle.get_dataset('train')\n", - "print(tr_data[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Part III: 使用Pipe对数据集进行预处理\n", - "\n", - "通过 Loader 可以将文本数据读入,但并不能直接被神经网络使用,还需要进行一定的预处理。\n", - "\n", - "在fastNLP中,我们使用 Pipe 的子类作为数据预处理的类, Loader 和 Pipe 一般具备一一对应的关系,该关系可以从其名称判断, 例如 CWSLoader 与 CWSPipe 是一一对应的。一般情况下Pipe处理包含以下的几个过程,\n", - "1. 将raw_words或 raw_chars进行tokenize以切分成不同的词或字; \n", - "2. 再建立词或字的 Vocabulary , 并将词或字转换为index; \n", - "3. 将target 列建立词表并将target列转为index;\n", - "\n", - "所有的Pipe都可通过其文档查看该Pipe支持处理的 DataSet 以及返回的 DataBundle 中的Vocabulary的情况; 如 OntoNotesNERPipe\n", - "\n", - "各种数据集的Pipe当中,都包含了以下的两个函数:\n", - "\n", - "- process() 函数:对输入的 DataBundle 进行处理, 然后返回处理之后的 DataBundle 。process函数的文档中包含了该Pipe支持处理的DataSet的格式。\n", - "- process_from_file() 函数:输入数据集所在文件夹,使用对应的Loader读取数据(所以该函数支持的参数类型是由于其对应的Loader的load函数决定的),然后调用相对应的process函数对数据进行预处理。相当于是把Load和process放在一个函数中执行。\n", - "\n", - "接着上面 CWSLoader 的例子,我们展示一下 CWSPipe 的功能:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1944 instances.\n", - "\ttrain has 17196 instances.\n", - "\tdev has 1858 instances.\n", - "In total 2 vocabs:\n", - "\tchars has 4777 entries.\n", - "\ttarget has 4 entries.\n", - "\n" - ] - } - ], - "source": [ - "from fastNLP.io import CWSPipe\n", - "\n", - "data_bundle = CWSPipe().process(data_bundle)\n", - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "表示一共有3个数据集和2个词表。其中:\n", - "\n", - "- 3个数据集的名称分别为train、dev、test,分别有17223、1831、1944个instance\n", - "- 2个词表分别为chars词表与target词表。其中chars词表为句子文本所构建的词表,一共有4777个不同的字;target词表为目标标签所构建的词表,一共有4种标签。\n", - "\n", - "相较于之前CWSLoader读取的DataBundle,新增了两个Vocabulary。 我们可以打印一下处理之后的DataSet" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+---------------------+---------------------+---------------------+---------+\n", - "| raw_words | chars | target | seq_len |\n", - "+---------------------+---------------------+---------------------+---------+\n", - "| 迈向 充满 希望... | [1224, 178, 674,... | [0, 1, 0, 1, 0, ... | 29 |\n", - "| 中共中央 总书记... | [11, 212, 11, 33... | [0, 3, 3, 1, 0, ... | 15 |\n", - "+---------------------+---------------------+---------------------+---------+\n" - ] - } - ], - "source": [ - "tr_data = data_bundle.get_dataset('train')\n", - "print(tr_data[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到有两列为int的field: chars和target。这两列的名称同时也是DataBundle中的Vocabulary的名称。可以通过下列的代码获取并查看Vocabulary的 信息" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary(['B', 'E', 'S', 'M']...)\n" - ] - } - ], - "source": [ - "vocab = data_bundle.get_vocab('target')\n", - "print(vocab)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Part IV: fastNLP封装好的Loader和Pipe\n", - "\n", - "fastNLP封装了多种任务/数据集的 Loader 和 Pipe 并提供自动下载功能,具体参见文档 [数据集](https://docs.qq.com/sheet/DVnpkTnF6VW9UeXdh?c=A1A0A0)\n", - "\n", - "## Part V: 不同格式类型的基础Loader\n", - "\n", - "除了上面提到的针对具体任务的Loader,我们还提供了CSV格式和JSON格式的Loader\n", - "\n", - "**CSVLoader** 读取CSV类型的数据集文件。例子如下:\n", - "\n", - "```python\n", - "from fastNLP.io.loader import CSVLoader\n", - "data_set_loader = CSVLoader(\n", - " headers=('raw_words', 'target'), sep='\\t'\n", - ")\n", - "```\n", - "\n", - "表示将CSV文件中每一行的第一项将填入'raw_words' field,第二项填入'target' field。其中项之间由'\\t'分割开来\n", - "\n", - "```python\n", - "data_set = data_set_loader._load('path/to/your/file')\n", - "```\n", - "\n", - "文件内容样例如下\n", - "\n", - "```csv\n", - "But it does not leave you with much . 1\n", - "You could hate it for the same reason . 1\n", - "The performances are an absolute joy . 4\n", - "```\n", - "\n", - "读取之后的DataSet具有以下的field\n", - "\n", - "| raw_words | target |\n", - "| --------------------------------------- | ------ |\n", - "| But it does not leave you with much . | 1 |\n", - "| You could hate it for the same reason . | 1 |\n", - "| The performances are an absolute joy . | 4 |\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**JsonLoader** 读取Json类型的数据集文件,数据必须按行存储,每行是一个包含各类属性的Json对象。例子如下\n", - "\n", - "```python\n", - "from fastNLP.io.loader import JsonLoader\n", - "loader = JsonLoader(\n", - " fields={'sentence1': 'raw_words1', 'sentence2': 'raw_words2', 'gold_label': 'target'}\n", - ")\n", - "```\n", - "\n", - "表示将Json对象中'sentence1'、'sentence2'和'gold_label'对应的值赋给'raw_words1'、'raw_words2'、'target'这三个fields\n", - "\n", - "```python\n", - "data_set = loader._load('path/to/your/file')\n", - "```\n", - "\n", - "数据集内容样例如下\n", - "```\n", - "{\"annotator_labels\": [\"neutral\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"neutral\", ... }\n", - "{\"annotator_labels\": [\"contradiction\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"contradiction\", ... }\n", - "{\"annotator_labels\": [\"entailment\"], \"captionID\": \"3416050480.jpg#4\", \"gold_label\": \"entailment\", ... }\n", - "```\n", - "\n", - "读取之后的DataSet具有以下的field\n", - "\n", - "| raw_words0 | raw_words1 | target |\n", - "| ------------------------------------------------------ | ------------------------------------------------- | ------------- |\n", - "| A person on a horse jumps over a broken down airplane. | A person is training his horse for a competition. | neutral |\n", - "| A person on a horse jumps over a broken down airplane. | A person is at a diner, ordering an omelette. | contradiction |\n", - "| A person on a horse jumps over a broken down airplane. | A person is outdoors, on a horse. | entailment |" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_5_loss_optimizer.ipynb b/tutorials/tutorial_5_loss_optimizer.ipynb deleted file mode 100644 index cba78175..00000000 --- a/tutorials/tutorial_5_loss_optimizer.ipynb +++ /dev/null @@ -1,603 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Trainer和Tester快速训练和测试" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据读入和处理" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/remote-home/ynzheng/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/io/loader/classification.py:340: UserWarning: SST2's test file has no target.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1821 instances.\n", - "\ttrain has 67349 instances.\n", - "\tdev has 872 instances.\n", - "In total 2 vocabs:\n", - "\twords has 16292 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| raw_words | target | words | seq_len |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| hide new secretions from the p... | 1 | [4110, 97, 12009, 39, 2, 6843,... | 7 |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...)\n" - ] - } - ], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "\n", - "pipe = SST2Pipe()\n", - "databundle = pipe.process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "print(databundle)\n", - "print(databundle.get_dataset('train')[0])\n", - "print(databundle.get_vocab('words'))" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "4925 872 75\n" - ] - } - ], - "source": [ - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "print(len(train_data),len(dev_data),len(test_data))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------------+-----------+--------+-------+---------+\n", - "| field_names | raw_words | target | words | seq_len |\n", - "+-------------+-----------+--------+-------+---------+\n", - "| is_input | False | False | True | True |\n", - "| is_target | False | True | False | False |\n", - "| ignore_type | | False | False | False |\n", - "| pad_value | | 0 | 0 | 0 |\n", - "+-------------+-----------+--------+-------+---------+\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "train_data.print_field_meta()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用内置模型训练" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.models import CNNText\n", - "\n", - "#词嵌入的维度\n", - "EMBED_DIM = 100\n", - "\n", - "#使用CNNText的时候第一个参数输入一个tuple,作为模型定义embedding的参数\n", - "#还可以传入 kernel_nums, kernel_sizes, padding, dropout的自定义值\n", - "model_cnn = CNNText((len(vocab),EMBED_DIM), num_classes=2, dropout=0.1)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import AccuracyMetric\n", - "from fastNLP import Const\n", - "\n", - "# metrics=AccuracyMetric() 在本例中与下面这行代码等价\n", - "metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import CrossEntropyLoss\n", - "\n", - "# loss = CrossEntropyLoss() 在本例中与下面这行代码等价\n", - "loss = CrossEntropyLoss(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "# 这表示构建了一个损失函数类,由func计算损失函数,其中将从模型返回值或者DataSet的target=True的field\n", - "# 当中找到一个参数名为`pred`的参数传入func一个参数名为`input`的参数;找到一个参数名为`label`的参数\n", - "# 传入func作为一个名为`target`的参数\n", - "#下面自己构建了一个交叉熵函数,和之后直接使用fastNLP中的交叉熵函数是一个效果\n", - "import torch\n", - "from fastNLP import LossFunc\n", - "func = torch.nn.functional.cross_entropy\n", - "loss_func = LossFunc(func, input=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "import torch.optim as optim\n", - "\n", - "#使用 torch.optim 定义优化器\n", - "optimizer=optim.RMSprop(model_cnn.parameters(), lr=0.01, alpha=0.99, eps=1e-08, weight_decay=0, momentum=0, centered=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-11-31-25\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=3080.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.75 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:308/3080: \n", - "\r", - "AccuracyMetric: acc=0.751147\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.83 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:616/3080: \n", - "\r", - "AccuracyMetric: acc=0.755734\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:924/3080: \n", - "\r", - "AccuracyMetric: acc=0.758028\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.88 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:1232/3080: \n", - "\r", - "AccuracyMetric: acc=0.741972\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.96 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:1540/3080: \n", - "\r", - "AccuracyMetric: acc=0.728211\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.87 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:1848/3080: \n", - "\r", - "AccuracyMetric: acc=0.755734\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.04 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:2156/3080: \n", - "\r", - "AccuracyMetric: acc=0.732798\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.57 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:2464/3080: \n", - "\r", - "AccuracyMetric: acc=0.747706\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:2772/3080: \n", - "\r", - "AccuracyMetric: acc=0.732798\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:3080/3080: \n", - "\r", - "AccuracyMetric: acc=0.740826\n", - "\n", - "\r\n", - "In Epoch:3/Step:924, got best dev performance:\n", - "AccuracyMetric: acc=0.758028\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.758028}},\n", - " 'best_epoch': 3,\n", - " 'best_step': 924,\n", - " 'seconds': 160.58}" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Trainer\n", - "\n", - "#训练的轮数和batch size\n", - "N_EPOCHS = 10\n", - "BATCH_SIZE = 16\n", - "\n", - "#如果在定义trainer的时候没有传入optimizer参数,模型默认的优化器为torch.optim.Adam且learning rate为lr=4e-3\n", - "#这里只使用了loss作为损失函数输入,感兴趣可以尝试其他损失函数(如之前自定义的loss_func)作为输入\n", - "trainer = Trainer(model=model_cnn, train_data=train_data, dev_data=dev_data, loss=loss, metrics=metrics,\n", - "optimizer=optimizer,n_epochs=N_EPOCHS, batch_size=BATCH_SIZE)\n", - "trainer.train()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=5.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.773333\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.773333}}" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Tester\n", - "\n", - "tester = Tester(test_data, model_cnn, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_6_datasetiter.ipynb b/tutorials/tutorial_6_datasetiter.ipynb deleted file mode 100644 index 2caa4cc2..00000000 --- a/tutorials/tutorial_6_datasetiter.ipynb +++ /dev/null @@ -1,681 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Trainer和Tester快速训练和测试" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 数据读入和处理" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/remote-home/ynzheng/anaconda3/envs/now/lib/python3.8/site-packages/FastNLP-0.5.0-py3.8.egg/fastNLP/io/loader/classification.py:340: UserWarning: SST2's test file has no target.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1821 instances.\n", - "\ttrain has 67349 instances.\n", - "\tdev has 872 instances.\n", - "In total 2 vocabs:\n", - "\twords has 16292 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| raw_words | target | words | seq_len |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "| hide new secretions from the p... | 1 | [4110, 97, 12009, 39, 2, 6843,... | 7 |\n", - "+-----------------------------------+--------+-----------------------------------+---------+\n", - "Vocabulary(['hide', 'new', 'secretions', 'from', 'the']...)\n" - ] - } - ], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "\n", - "pipe = SST2Pipe()\n", - "databundle = pipe.process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "print(databundle)\n", - "print(databundle.get_dataset('train')[0])\n", - "print(databundle.get_vocab('words'))" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "4925 872 75\n" - ] - } - ], - "source": [ - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "print(len(train_data),len(dev_data),len(test_data))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-------------+-----------+--------+-------+---------+\n", - "| field_names | raw_words | target | words | seq_len |\n", - "+-------------+-----------+--------+-------+---------+\n", - "| is_input | False | False | True | True |\n", - "| is_target | False | True | False | False |\n", - "| ignore_type | | False | False | False |\n", - "| pad_value | | 0 | 0 | 0 |\n", - "+-------------+-----------+--------+-------+---------+\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "train_data.print_field_meta()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import AccuracyMetric\n", - "from fastNLP import Const\n", - "\n", - "# metrics=AccuracyMetric() 在本例中与下面这行代码等价\n", - "metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## DataSetIter初探" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7],\n", - " [15618, 3204, 5, 1675, 0]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7],\n", - " [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 12])}\n", - "batch_y: {'target': tensor([0, 1])}\n" - ] - } - ], - "source": [ - "from fastNLP import BucketSampler\n", - "from fastNLP import DataSetIter\n", - "\n", - "tmp_data = dev_data[:10]\n", - "# 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。\n", - "# 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket)\n", - "sampler = BucketSampler(batch_size=2, seq_len_field_name='seq_len')\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, -1, -1, -1, -1, -1, -1, -1, -1, -1,\n", - " -1, -1, -1]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7],\n", - " [15618, 3204, 5, 1675, -1]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n", - "batch_x: {'words': tensor([[ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7],\n", - " [ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, -1, -1, -1, -1, -1, -1, -1, -1]]), 'seq_len': tensor([20, 12])}\n", - "batch_y: {'target': tensor([0, 1])}\n" - ] - } - ], - "source": [ - "tmp_data.set_pad_val('words',-1)\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "batch_x: {'words': tensor([[ 45, 752, 327, 180, 10, 15621, 16, 72, 8904, 9,\n", - " 1217, 7, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 879, 96, 8, 1026, 12, 8067, 11, 13623, 8, 15619,\n", - " 4, 673, 662, 15, 4, 1154, 240, 639, 417, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([12, 20])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 13, 830, 7746, 174, 3, 47, 6, 83, 5752, 15,\n", - " 2177, 15, 63, 57, 406, 84, 1009, 4973, 27, 17,\n", - " 13785, 3, 533, 3687, 15623, 39, 375, 8, 15624, 8,\n", - " 1323, 4398, 7, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 1045, 11113, 16, 104, 5, 4, 176, 1824, 1704, 3,\n", - " 2, 18, 11, 4, 1018, 432, 143, 33, 245, 308,\n", - " 7, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([33, 21])}\n", - "batch_y: {'target': tensor([1, 0])}\n", - "batch_x: {'words': tensor([[ 14, 10, 4, 311, 5, 154, 1418, 609, 7, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0],\n", - " [ 14, 10, 437, 32, 78, 3, 78, 437, 7, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0]]), 'seq_len': tensor([9, 9])}\n", - "batch_y: {'target': tensor([0, 1])}\n", - "batch_x: {'words': tensor([[ 2, 155, 3, 4426, 3, 239, 3, 739, 5, 1136,\n", - " 41, 43, 2427, 736, 2, 648, 10, 15620, 2285, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [ 24, 95, 28, 46, 8, 336, 38, 239, 8, 2133,\n", - " 2, 18, 10, 15622, 1421, 6, 61, 5, 387, 7,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([20, 20])}\n", - "batch_y: {'target': tensor([0, 0])}\n", - "batch_x: {'words': tensor([[ 4, 277, 685, 18, 7, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [15618, 3204, 5, 1675, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'seq_len': tensor([5, 4])}\n", - "batch_y: {'target': tensor([1, 1])}\n" - ] - } - ], - "source": [ - "from fastNLP.core.field import Padder\n", - "import numpy as np\n", - "class FixLengthPadder(Padder):\n", - " def __init__(self, pad_val=0, length=None):\n", - " super().__init__(pad_val=pad_val)\n", - " self.length = length\n", - " assert self.length is not None, \"Creating FixLengthPadder with no specific length!\"\n", - "\n", - " def __call__(self, contents, field_name, field_ele_dtype, dim):\n", - " #计算当前contents中的最大长度\n", - " max_len = max(map(len, contents))\n", - " #如果当前contents中的最大长度大于指定的padder length的话就报错\n", - " assert max_len <= self.length, \"Fixed padder length smaller than actual length! with length {}\".format(max_len)\n", - " array = np.full((len(contents), self.length), self.pad_val, dtype=field_ele_dtype)\n", - " for i, content_i in enumerate(contents):\n", - " array[i, :len(content_i)] = content_i\n", - " return array\n", - "\n", - "#设定FixLengthPadder的固定长度为40\n", - "tmp_padder = FixLengthPadder(pad_val=0,length=40)\n", - "#利用dataset的set_padder函数设定words field的padder\n", - "tmp_data.set_padder('words',tmp_padder)\n", - "batch = DataSetIter(batch_size=2, dataset=tmp_data, sampler=sampler)\n", - "for batch_x, batch_y in batch:\n", - " print(\"batch_x: \",batch_x)\n", - " print(\"batch_y: \", batch_y)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用DataSetIter自己编写训练过程\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-----start training-----\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.68 seconds!\n", - "Epoch 0 Avg Loss: 0.66 AccuracyMetric: acc=0.708716 29307ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.38 seconds!\n", - "Epoch 1 Avg Loss: 0.41 AccuracyMetric: acc=0.770642 52200ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.51 seconds!\n", - "Epoch 2 Avg Loss: 0.16 AccuracyMetric: acc=0.747706 70268ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.96 seconds!\n", - "Epoch 3 Avg Loss: 0.06 AccuracyMetric: acc=0.741972 90349ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.04 seconds!\n", - "Epoch 4 Avg Loss: 0.03 AccuracyMetric: acc=0.740826 114250ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "Epoch 5 Avg Loss: 0.02 AccuracyMetric: acc=0.738532 134742ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.65 seconds!\n", - "Epoch 6 Avg Loss: 0.01 AccuracyMetric: acc=0.731651 154503ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "Epoch 7 Avg Loss: 0.01 AccuracyMetric: acc=0.738532 175397ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.36 seconds!\n", - "Epoch 8 Avg Loss: 0.01 AccuracyMetric: acc=0.733945 192384ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=55.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.84 seconds!\n", - "Epoch 9 Avg Loss: 0.01 AccuracyMetric: acc=0.744266 214417ms\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=5.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.04 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.786667\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.786667}}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import BucketSampler\n", - "from fastNLP import DataSetIter\n", - "from fastNLP.models import CNNText\n", - "from fastNLP import Tester\n", - "import torch\n", - "import time\n", - "\n", - "embed_dim = 100\n", - "model = CNNText((len(vocab),embed_dim), num_classes=2, dropout=0.1)\n", - "\n", - "def train(epoch, data, devdata):\n", - " optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", - " lossfunc = torch.nn.CrossEntropyLoss()\n", - " batch_size = 32\n", - "\n", - " # 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。\n", - " # 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket)\n", - " train_sampler = BucketSampler(batch_size=batch_size, seq_len_field_name='seq_len')\n", - " train_batch = DataSetIter(batch_size=batch_size, dataset=data, sampler=train_sampler)\n", - "\n", - " start_time = time.time()\n", - " print(\"-\"*5+\"start training\"+\"-\"*5)\n", - " for i in range(epoch):\n", - " loss_list = []\n", - " for batch_x, batch_y in train_batch:\n", - " optimizer.zero_grad()\n", - " output = model(batch_x['words'])\n", - " loss = lossfunc(output['pred'], batch_y['target'])\n", - " loss.backward()\n", - " optimizer.step()\n", - " loss_list.append(loss.item())\n", - "\n", - " #这里verbose如果为0,在调用Tester对象的test()函数时不输出任何信息,返回评估信息; 如果为1,打印出验证结果,返回评估信息\n", - " #在调用过Tester对象的test()函数后,调用其_format_eval_results(res)函数,结构化输出验证结果\n", - " tester_tmp = Tester(devdata, model, metrics=AccuracyMetric(), verbose=0)\n", - " res=tester_tmp.test()\n", - "\n", - " print('Epoch {:d} Avg Loss: {:.2f}'.format(i, sum(loss_list) / len(loss_list)),end=\" \")\n", - " print(tester_tmp._format_eval_results(res),end=\" \")\n", - " print('{:d}ms'.format(round((time.time()-start_time)*1000)))\n", - " loss_list.clear()\n", - "\n", - "train(10, train_data, dev_data)\n", - "#使用tester进行快速测试\n", - "tester = Tester(test_data, model, metrics=AccuracyMetric())\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_7_metrics.ipynb b/tutorials/tutorial_7_metrics.ipynb deleted file mode 100644 index ef791683..00000000 --- a/tutorials/tutorial_7_metrics.ipynb +++ /dev/null @@ -1,1206 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Metric快速评测你的模型\n", - "\n", - "和上一篇教程一样的实验准备代码" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", - "from fastNLP.models import CNNText\n", - "import torch\n", - "\n", - "databundle = SST2Pipe().process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "\n", - "model = CNNText((len(vocab),100), num_classes=2, dropout=0.1)\n", - "loss = CrossEntropyLoss()\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "进行训练时,fastNLP提供了各种各样的 metrics 。 如前面的教程中所介绍,AccuracyMetric 类的对象被直接传到 Trainer 中用于训练" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-37-08\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.28 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.747706\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.17 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.745413\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.19 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.74656\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.15 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.762615\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.736239\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.16 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.761468\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.727064\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.731651\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.52 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.760321\n", - "\n", - "\r\n", - "In Epoch:4/Step:616, got best dev performance:\n", - "AccuracyMetric: acc=0.762615\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.762615}},\n", - " 'best_epoch': 4,\n", - " 'best_step': 616,\n", - " 'seconds': 32.63}" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=metric)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "除了 AccuracyMetric 之外,SpanFPreRecMetric 也是一种非常见的评价指标, 例如在序列标注问题中,常以span的方式计算 F-measure, precision, recall。\n", - "\n", - "另外,fastNLP 还实现了用于抽取式QA(如SQuAD)的metric ExtractiveQAMetric。 用户可以参考下面这个表格。\n", - "\n", - "| 名称 | 介绍 |\n", - "| -------------------- | ------------------------------------------------- |\n", - "| `MetricBase` | 自定义metrics需继承的基类 |\n", - "| `AccuracyMetric` | 简单的正确率metric |\n", - "| `SpanFPreRecMetric` | 同时计算 F-measure, precision, recall 值的 metric |\n", - "| `ExtractiveQAMetric` | 用于抽取式QA任务 的metric |\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 定义自己的metrics\n", - "\n", - "在定义自己的metrics类时需继承 fastNLP 的 MetricBase, 并覆盖写入 evaluate 和 get_metric 方法。\n", - "\n", - "- evaluate(xxx) 中传入一个批次的数据,将针对一个批次的预测结果做评价指标的累计\n", - "\n", - "- get_metric(xxx) 当所有数据处理完毕时调用该方法,它将根据 evaluate函数累计的评价指标统计量来计算最终的评价结果\n", - "\n", - "以分类问题中,Accuracy计算为例,假设model的forward返回dict中包含 pred 这个key, 并且该key需要用于Accuracy:\n", - "\n", - "```python\n", - "class Model(nn.Module):\n", - " def __init__(xxx):\n", - " # do something\n", - " def forward(self, xxx):\n", - " # do something\n", - " return {'pred': pred, 'other_keys':xxx} # pred's shape: batch_size x num_classes\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Version 1\n", - "\n", - "假设dataset中 `target` 这个 field 是需要预测的值,并且该 field 被设置为了 target 对应的 `AccMetric` 可以按如下的定义" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import MetricBase\n", - "\n", - "class AccMetric(MetricBase):\n", - "\n", - " def __init__(self):\n", - " super().__init__()\n", - " # 根据你的情况自定义指标\n", - " self.total = 0\n", - " self.acc_count = 0\n", - "\n", - " # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value\n", - " # pred, target 的参数是 fastNLP 的默认配置\n", - " def evaluate(self, pred, target):\n", - " # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric\n", - " self.total += target.size(0)\n", - " self.acc_count += target.eq(pred).sum().item()\n", - "\n", - " def get_metric(self, reset=True): # 在这里定义如何计算metric\n", - " acc = self.acc_count/self.total\n", - " if reset: # 是否清零以便重新计算\n", - " self.acc_count = 0\n", - " self.total = 0\n", - " return {'acc': acc}\n", - " # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-37-41\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.27 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccMetric: acc=0.7431192660550459\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccMetric: acc=0.7522935779816514\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.51 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccMetric: acc=0.7477064220183486\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccMetric: acc=0.7442660550458715\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.5 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccMetric: acc=0.7362385321100917\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.45 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccMetric: acc=0.7293577981651376\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.33 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccMetric: acc=0.7190366972477065\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccMetric: acc=0.7419724770642202\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.34 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccMetric: acc=0.7350917431192661\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.18 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccMetric: acc=0.6846330275229358\n", - "\n", - "\r\n", - "In Epoch:2/Step:308, got best dev performance:\n", - "AccMetric: acc=0.7522935779816514\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccMetric': {'acc': 0.7522935779816514}},\n", - " 'best_epoch': 2,\n", - " 'best_step': 308,\n", - " 'seconds': 42.7}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=AccMetric())\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Version 2\n", - "\n", - "如果需要复用 metric,比如下一次使用 `AccMetric` 时,dataset中目标field不叫 `target` 而叫 `y` ,或者model的输出不是 `pred`\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "class AccMetric(MetricBase):\n", - " def __init__(self, pred=None, target=None):\n", - " \"\"\"\n", - " 假设在另一场景使用时,目标field叫y,model给出的key为pred_y。则只需要在初始化AccMetric时,\n", - " acc_metric = AccMetric(pred='pred_y', target='y')即可。\n", - " 当初始化为acc_metric = AccMetric() 时,fastNLP会直接使用 'pred', 'target' 作为key去索取对应的的值\n", - " \"\"\"\n", - "\n", - " super().__init__()\n", - "\n", - " # 如果没有注册该则效果与 Version 1 就是一样的\n", - " self._init_param_map(pred=pred, target=target) # 该方法会注册label和pred. 仅需要注册evaluate()方法会用到的参数名即可\n", - "\n", - " # 根据你的情况自定义指标\n", - " self.total = 0\n", - " self.acc_count = 0\n", - "\n", - " # evaluate的参数需要和DataSet 中 field 名以及模型输出的结果 field 名一致,不然找不到对应的value\n", - " # pred, target 的参数是 fastNLP 的默认配置\n", - " def evaluate(self, pred, target):\n", - " # dev或test时,每个batch结束会调用一次该方法,需要实现如何根据每个batch累加metric\n", - " self.total += target.size(0)\n", - " self.acc_count += target.eq(pred).sum().item()\n", - "\n", - " def get_metric(self, reset=True): # 在这里定义如何计算metric\n", - " acc = self.acc_count/self.total\n", - " if reset: # 是否清零以便重新计算\n", - " self.acc_count = 0\n", - " self.total = 0\n", - " return {'acc': acc}\n", - " # 需要返回一个dict,key为该metric的名称,该名称会显示到Trainer的progress bar中" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 4]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-38-24\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccMetric: acc=0.7511467889908257\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccMetric: acc=0.7454128440366973\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccMetric: acc=0.7224770642201835\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccMetric: acc=0.7534403669724771\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.41 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccMetric: acc=0.7396788990825688\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccMetric: acc=0.7442660550458715\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.45 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccMetric: acc=0.6903669724770642\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.25 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccMetric: acc=0.7293577981651376\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccMetric: acc=0.7006880733944955\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccMetric: acc=0.7339449541284404\n", - "\n", - "\r\n", - "In Epoch:4/Step:616, got best dev performance:\n", - "AccMetric: acc=0.7534403669724771\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccMetric': {'acc': 0.7534403669724771}},\n", - " 'best_epoch': 4,\n", - " 'best_step': 616,\n", - " 'seconds': 34.74}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, model=model,\n", - " loss=loss, device=device, metrics=AccMetric())\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "``MetricBase`` 将会在输入的字典 ``pred_dict`` 和 ``target_dict`` 中进行检查.\n", - "``pred_dict`` 是模型当中 ``forward()`` 函数或者 ``predict()`` 函数的返回值.\n", - "``target_dict`` 是DataSet当中的ground truth, 判定ground truth的条件是field的 ``is_target`` 被设置为True.\n", - "\n", - "``MetricBase`` 会进行以下的类型检测:\n", - "\n", - "1. self.evaluate当中是否有 varargs, 这是不支持的.\n", - "2. self.evaluate当中所需要的参数是否既不在 ``pred_dict`` 也不在 ``target_dict`` .\n", - "3. self.evaluate当中所需要的参数是否既在 ``pred_dict`` 也在 ``target_dict`` .\n", - "\n", - "除此以外,在参数被传入self.evaluate以前,这个函数会检测 ``pred_dict`` 和 ``target_dict`` 当中没有被用到的参数\n", - "如果kwargs是self.evaluate的参数,则不会检测\n", - "\n", - "self.evaluate将计算一个批次(batch)的评价指标,并累计。 没有返回值\n", - "self.get_metric将统计当前的评价指标并返回评价结果, 返回值需要是一个dict, key是指标名称,value是指标的值\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_8_modules_models.ipynb b/tutorials/tutorial_8_modules_models.ipynb deleted file mode 100644 index 2784cca1..00000000 --- a/tutorials/tutorial_8_modules_models.ipynb +++ /dev/null @@ -1,1014 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用Modules和Models快速搭建自定义模型\n", - "\n", - "modules 和 models 用于构建 fastNLP 所需的神经网络模型,它可以和 torch.nn 中的模型一起使用。 下面我们会分三节介绍编写构建模型的具体方法。\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们首先准备好和上篇教程一样的基础实验代码" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import SST2Pipe\n", - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", - "import torch\n", - "\n", - "databundle = SST2Pipe().process_from_file()\n", - "vocab = databundle.get_vocab('words')\n", - "train_data = databundle.get_dataset('train')[:5000]\n", - "train_data, test_data = train_data.split(0.015)\n", - "dev_data = databundle.get_dataset('dev')\n", - "\n", - "loss = CrossEntropyLoss()\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 models 中的模型\n", - "\n", - "fastNLP 在 models 模块中内置了如 CNNText 、 SeqLabeling 等完整的模型,以供用户直接使用。 以文本分类的任务为例,我们从 models 中导入 CNNText 模型,用它进行训练。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-56-04\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.760321\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.727064\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.758028\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.759174\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.47 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.743119\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.15 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.75344\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.12 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.752294\n", - "\n", - "\r\n", - "In Epoch:1/Step:154, got best dev performance:\n", - "AccuracyMetric: acc=0.760321\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.760321}},\n", - " 'best_epoch': 1,\n", - " 'best_step': 154,\n", - " 'seconds': 29.3}" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP.models import CNNText\n", - "\n", - "model_cnn = CNNText((len(vocab),100), num_classes=2, dropout=0.1)\n", - "\n", - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_cnn)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "在 iPython 环境输入 model_cnn ,我们可以看到 model_cnn 的网络结构" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "CNNText(\n", - " (embed): Embedding(\n", - " (embed): Embedding(16292, 100)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (conv_pool): ConvMaxpool(\n", - " (convs): ModuleList(\n", - " (0): Conv1d(100, 30, kernel_size=(1,), stride=(1,), bias=False)\n", - " (1): Conv1d(100, 40, kernel_size=(3,), stride=(1,), padding=(1,), bias=False)\n", - " (2): Conv1d(100, 50, kernel_size=(5,), stride=(1,), padding=(2,), bias=False)\n", - " )\n", - " )\n", - " (dropout): Dropout(p=0.1, inplace=False)\n", - " (fc): Linear(in_features=120, out_features=2, bias=True)\n", - ")" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_cnn" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 nn.torch 编写模型\n", - "\n", - "FastNLP 完全支持使用 pyTorch 编写的模型,但与 pyTorch 中编写模型的常见方法不同, 用于 fastNLP 的模型中 forward 函数需要返回一个字典,字典中至少需要包含 pred 这个字段。\n", - "\n", - "下面是使用 pyTorch 中的 torch.nn 模块编写的文本分类,注意观察代码中标注的向量维度。 由于 pyTorch 使用了约定俗成的维度设置,使得 forward 中需要多次处理维度顺序" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "import torch.nn as nn\n", - "\n", - "class LSTMText(nn.Module):\n", - " def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5):\n", - " super().__init__()\n", - "\n", - " self.embedding = nn.Embedding(vocab_size, embedding_dim)\n", - " self.lstm = nn.LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True, dropout=dropout)\n", - " self.fc = nn.Linear(hidden_dim * 2, output_dim)\n", - " self.dropout = nn.Dropout(dropout)\n", - "\n", - " def forward(self, words):\n", - " # (input) words : (batch_size, seq_len)\n", - " words = words.permute(1,0)\n", - " # words : (seq_len, batch_size)\n", - "\n", - " embedded = self.dropout(self.embedding(words))\n", - " # embedded : (seq_len, batch_size, embedding_dim)\n", - " output, (hidden, cell) = self.lstm(embedded)\n", - " # output: (seq_len, batch_size, hidden_dim * 2)\n", - " # hidden: (num_layers * 2, batch_size, hidden_dim)\n", - " # cell: (num_layers * 2, batch_size, hidden_dim)\n", - "\n", - " hidden = torch.cat((hidden[-2, :, :], hidden[-1, :, :]), dim=1)\n", - " hidden = self.dropout(hidden)\n", - " # hidden: (batch_size, hidden_dim * 2)\n", - "\n", - " pred = self.fc(hidden.squeeze(0))\n", - " # result: (batch_size, output_dim)\n", - " return {\"pred\":pred}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "我们同样可以在 iPython 环境中查看这个模型的网络结构" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "LSTMText(\n", - " (embedding): Embedding(16292, 100)\n", - " (lstm): LSTM(100, 64, num_layers=2, dropout=0.5, bidirectional=True)\n", - " (fc): Linear(in_features=128, out_features=2, bias=True)\n", - " (dropout): Dropout(p=0.5, inplace=False)\n", - ")" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_lstm = LSTMText(len(vocab), 100, 2)\n", - "model_lstm " - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-56-34\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.36 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.59289\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.35 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.674312\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.21 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:462/1540: \n", - "\r", - "AccuracyMetric: acc=0.724771\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.4 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:616/1540: \n", - "\r", - "AccuracyMetric: acc=0.748853\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:770/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.29 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:924/1540: \n", - "\r", - "AccuracyMetric: acc=0.741972\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.32 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:1078/1540: \n", - "\r", - "AccuracyMetric: acc=0.754587\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.24 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:1232/1540: \n", - "\r", - "AccuracyMetric: acc=0.756881\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.28 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:1386/1540: \n", - "\r", - "AccuracyMetric: acc=0.740826\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.23 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1540/1540: \n", - "\r", - "AccuracyMetric: acc=0.751147\n", - "\n", - "\r\n", - "In Epoch:5/Step:770, got best dev performance:\n", - "AccuracyMetric: acc=0.756881\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.756881}},\n", - " 'best_epoch': 5,\n", - " 'best_step': 770,\n", - " 'seconds': 45.69}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_lstm)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用 modules 编写模型\n", - "\n", - "下面我们使用 fastNLP.modules 中的组件来构建同样的网络。由于 fastNLP 统一把 batch_size 放在第一维, 在编写代码的过程中会有一定的便利。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "MyText(\n", - " (embedding): Embedding(\n", - " (embed): Embedding(16292, 100)\n", - " (dropout): Dropout(p=0.0, inplace=False)\n", - " )\n", - " (lstm): LSTM(\n", - " (lstm): LSTM(100, 64, num_layers=2, batch_first=True, bidirectional=True)\n", - " )\n", - " (mlp): MLP(\n", - " (hiddens): ModuleList()\n", - " (output): Linear(in_features=128, out_features=2, bias=True)\n", - " (dropout): Dropout(p=0.5, inplace=False)\n", - " )\n", - ")" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP.modules import LSTM, MLP\n", - "from fastNLP.embeddings import Embedding\n", - "\n", - "\n", - "class MyText(nn.Module):\n", - " def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5):\n", - " super().__init__()\n", - "\n", - " self.embedding = Embedding((vocab_size, embedding_dim))\n", - " self.lstm = LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True)\n", - " self.mlp = MLP([hidden_dim*2,output_dim], dropout=dropout)\n", - "\n", - " def forward(self, words):\n", - " embedded = self.embedding(words)\n", - " _,(hidden,_) = self.lstm(embedded)\n", - " pred = self.mlp(torch.cat((hidden[-1],hidden[-2]),dim=1))\n", - " return {\"pred\":pred}\n", - " \n", - "model_text = MyText(len(vocab), 100, 2)\n", - "model_text" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 41]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-28-00-57-19\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "16a35f2b0ef0457dae15c5f240a19a3a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1540.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.38 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:154/1540: \n", - "\r", - "AccuracyMetric: acc=0.767202\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=28.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.22 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:308/1540: \n", - "\r", - "AccuracyMetric: acc=0.743119\n", - "\n" - ] - } - ], - "source": [ - "trainer = Trainer(train_data=train_data, dev_data=dev_data, metrics=metric,\n", - " loss=loss, device=device, model=model_lstm)\n", - "trainer.train()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/tutorial_9_callback.ipynb b/tutorials/tutorial_9_callback.ipynb deleted file mode 100644 index ed71a9b0..00000000 --- a/tutorials/tutorial_9_callback.ipynb +++ /dev/null @@ -1,622 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 使用 Callback 自定义你的训练过程" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- 什么是 Callback\n", - "- 使用 Callback \n", - "- 一些常用的 Callback\n", - "- 自定义实现 Callback" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "什么是Callback\n", - "------\n", - "\n", - "Callback 是与 Trainer 紧密结合的模块,利用 Callback 可以在 Trainer 训练时,加入自定义的操作,比如梯度裁剪,学习率调节,测试模型的性能等。定义的 Callback 会在训练的特定阶段被调用。\n", - "\n", - "fastNLP 中提供了很多常用的 Callback ,开箱即用。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用 Callback\n", - " ------\n", - "\n", - "使用 Callback 很简单,将需要的 callback 按 list 存储,以对应参数 ``callbacks`` 传入对应的 Trainer。Trainer 在训练时就会自动执行这些 Callback 指定的操作了。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:34:46.465871Z", - "start_time": "2019-09-17T07:34:30.648758Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\ttest has 1200 instances.\n", - "\ttrain has 9600 instances.\n", - "\tdev has 1200 instances.\n", - "In total 2 vocabs:\n", - "\tchars has 4409 entries.\n", - "\ttarget has 2 entries.\n", - "\n", - "training epochs started 2019-09-17-03-34-34\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.863333\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.886667\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.890833\n", - "\n", - "\r\n", - "In Epoch:3/Step:900, got best dev performance:\n", - "AccuracyMetric: acc=0.890833\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import (Callback, EarlyStopCallback,\n", - " Trainer, CrossEntropyLoss, AccuracyMetric)\n", - "from fastNLP.models import CNNText\n", - "import torch.cuda\n", - "\n", - "# prepare data\n", - "def get_data():\n", - " from fastNLP.io import ChnSentiCorpPipe as pipe\n", - " data = pipe().process_from_file()\n", - " print(data)\n", - " data.rename_field('chars', 'words')\n", - " train_data = data.datasets['train']\n", - " dev_data = data.datasets['dev']\n", - " test_data = data.datasets['test']\n", - " vocab = data.vocabs['words']\n", - " tgt_vocab = data.vocabs['target']\n", - " return train_data, dev_data, test_data, vocab, tgt_vocab\n", - "\n", - "# prepare model\n", - "train_data, dev_data, _, vocab, tgt_vocab = get_data()\n", - "device = 'cuda:0' if torch.cuda.is_available() else 'cpu'\n", - "model = CNNText((len(vocab),50), num_classes=len(tgt_vocab))\n", - "\n", - "# define callback\n", - "callbacks=[EarlyStopCallback(5)]\n", - "\n", - "# pass callbacks to Trainer\n", - "def train_with_callback(cb_list):\n", - " trainer = Trainer(\n", - " device=device,\n", - " n_epochs=3,\n", - " model=model, \n", - " train_data=train_data, \n", - " dev_data=dev_data, \n", - " loss=CrossEntropyLoss(), \n", - " metrics=AccuracyMetric(), \n", - " callbacks=cb_list, \n", - " check_code_level=-1\n", - " )\n", - " trainer.train()\n", - "\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "fastNLP 中的 Callback\n", - "-------\n", - "fastNLP 中提供了很多常用的 Callback,如梯度裁剪,训练时早停和测试验证集,fitlog 等等。具体 Callback 请参考 fastNLP.core.callbacks" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:35:02.182727Z", - "start_time": "2019-09-17T07:34:49.443863Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "training epochs started 2019-09-17-03-34-49\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.13 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.12 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.890833\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.890833\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.09 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.09 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.8875\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.8875\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on data-test:\n", - "AccuracyMetric: acc=0.885\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.885\n", - "\n", - "\r\n", - "In Epoch:1/Step:300, got best dev performance:\n", - "AccuracyMetric: acc=0.890833\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import EarlyStopCallback, GradientClipCallback, EvaluateCallback\n", - "callbacks = [\n", - " EarlyStopCallback(5),\n", - " GradientClipCallback(clip_value=5, clip_type='value'),\n", - " EvaluateCallback(dev_data)\n", - "]\n", - "\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "自定义 Callback\n", - "------\n", - "\n", - "这里我们以一个简单的 Callback作为例子,它的作用是打印每一个 Epoch 平均训练 loss。\n", - "\n", - "#### 创建 Callback\n", - " \n", - "要自定义 Callback,我们要实现一个类,继承 fastNLP.Callback。\n", - "\n", - "这里我们定义 MyCallBack ,继承 fastNLP.Callback 。\n", - "\n", - "#### 指定 Callback 调用的阶段\n", - " \n", - "Callback 中所有以 on_ 开头的类方法会在 Trainer 的训练中在特定阶段调用。 如 on_train_begin() 会在训练开始时被调用,on_epoch_end() 会在每个 epoch 结束时调用。 具体有哪些类方法,参见 Callback 文档。\n", - "\n", - "这里, MyCallBack 在求得loss时调用 on_backward_begin() 记录当前 loss ,在每一个 epoch 结束时调用 on_epoch_end() ,求当前 epoch 平均loss并输出。\n", - "\n", - "#### 使用 Callback 的属性访问 Trainer 的内部信息\n", - " \n", - "为了方便使用,可以使用 Callback 的属性,访问 Trainer 中的对应信息,如 optimizer, epoch, n_epochs,分别对应训练时的优化器,当前 epoch 数,和总 epoch 数。 具体可访问的属性,参见文档 Callback 。\n", - "\n", - "这里, MyCallBack 为了求平均 loss ,需要知道当前 epoch 的总步数,可以通过 self.step 属性得到当前训练了多少步。\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "ExecuteTime": { - "end_time": "2019-09-17T07:43:10.907139Z", - "start_time": "2019-09-17T07:42:58.488177Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "training epochs started 2019-09-17-03-42-58\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=900), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.11 seconds!\n", - "Evaluation on dev at Epoch 1/3. Step:300/900: \n", - "AccuracyMetric: acc=0.883333\n", - "\n", - "Avg loss at epoch 1, 0.100254\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.1 seconds!\n", - "Evaluation on dev at Epoch 2/3. Step:600/900: \n", - "AccuracyMetric: acc=0.8775\n", - "\n", - "Avg loss at epoch 2, 0.183511\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 0.13 seconds!\n", - "Evaluation on dev at Epoch 3/3. Step:900/900: \n", - "AccuracyMetric: acc=0.875833\n", - "\n", - "Avg loss at epoch 3, 0.257103\n", - "\r\n", - "In Epoch:1/Step:300, got best dev performance:\n", - "AccuracyMetric: acc=0.883333\n", - "Reloaded the best model.\n" - ] - } - ], - "source": [ - "from fastNLP import Callback\n", - "from fastNLP import logger\n", - "\n", - "class MyCallBack(Callback):\n", - " \"\"\"Print average loss in each epoch\"\"\"\n", - " def __init__(self):\n", - " super().__init__()\n", - " self.total_loss = 0\n", - " self.start_step = 0\n", - " \n", - " def on_backward_begin(self, loss):\n", - " self.total_loss += loss.item()\n", - " \n", - " def on_epoch_end(self):\n", - " n_steps = self.step - self.start_step\n", - " avg_loss = self.total_loss / n_steps\n", - " logger.info('Avg loss at epoch %d, %.6f', self.epoch, avg_loss)\n", - " self.start_step = self.step\n", - "\n", - "callbacks = [MyCallBack()]\n", - "train_with_callback(callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/tutorials/序列标注.ipynb b/tutorials/序列标注.ipynb deleted file mode 100644 index 15118708..00000000 --- a/tutorials/序列标注.ipynb +++ /dev/null @@ -1,912 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 序列标注\n", - "\n", - "这一部分的内容主要展示如何使用fastNLP实现序列标注(Sequence labeling)任务。您可以使用fastNLP的各个组件快捷,方便地完成序列标注任务,达到出色的效果。 在阅读这篇教程前,希望您已经熟悉了fastNLP的基础使用,尤其是数据的载入以及模型的构建,通过这个小任务的能让您进一步熟悉fastNLP的使用。\n", - "\n", - "## 命名实体识别(name entity recognition, NER)\n", - "\n", - "命名实体识别任务是从文本中抽取出具有特殊意义或者指代性非常强的实体,通常包括人名、地名、机构名和时间等。 如下面的例子中\n", - "\n", - "*我来自复旦大学*\n", - "\n", - "其中“复旦大学”就是一个机构名,命名实体识别就是要从中识别出“复旦大学”这四个字是一个整体,且属于机构名这个类别。这个问题在实际做的时候会被 转换为序列标注问题\n", - "\n", - "针对\"我来自复旦大学\"这句话,我们的预测目标将是[O, O, O, B-ORG, I-ORG, I-ORG, I-ORG],其中O表示out,即不是一个实体,B-ORG是ORG( organization的缩写)这个类别的开头(Begin),I-ORG是ORG类别的中间(Inside)。\n", - "\n", - "在本tutorial中我们将通过fastNLP尝试写出一个能够执行以上任务的模型。\n", - "\n", - "## 载入数据\n", - "\n", - "fastNLP的数据载入主要是由Loader与Pipe两个基类衔接完成的,您可以通过《使用Loader和Pipe处理数据》了解如何使用fastNLP提供的数据加载函数。下面我们以微博命名实体任务来演示一下在fastNLP进行序列标注任务。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n", - "| raw_chars | target | chars | seq_len |\n", - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n", - "| ['科', '技', '全', '方', '位',... | [0, 0, 0, 0, 0, 0, 0, 0, 0, 0,... | [792, 1015, 156, 198, 291, 714... | 26 |\n", - "| ['对', ',', '输', '给', '一',... | [0, 0, 0, 0, 0, 0, 3, 1, 0, 0,... | [123, 2, 1205, 115, 8, 24, 101... | 15 |\n", - "+-----------------------------------+-----------------------------------+-----------------------------------+---------+\n" - ] - } - ], - "source": [ - "from fastNLP.io import WeiboNERPipe\n", - "data_bundle = WeiboNERPipe().process_from_file()\n", - "print(data_bundle.get_dataset('train')[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型构建\n", - "\n", - "首先选择需要使用的Embedding类型。关于Embedding的相关说明可以参见《使用Embedding模块将文本转成向量》。 在这里我们使用通过word2vec预训练的中文汉字embedding。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 3321 out of 3471 words in the pre-training embedding.\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "\n", - "embed = StaticEmbedding(vocab=data_bundle.get_vocab('chars'), model_dir_or_name='cn-char-fastnlp-100d')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "选择好Embedding之后,我们可以使用fastNLP中自带的 fastNLP.models.BiLSTMCRF 作为模型。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.models import BiLSTMCRF\n", - "\n", - "data_bundle.rename_field('chars', 'words') # 这是由于BiLSTMCRF模型的forward函数接受的words,而不是chars,所以需要把这一列重新命名\n", - "model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5,\n", - " target_vocab=data_bundle.get_vocab('target'))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 进行训练\n", - "下面我们选择用来评估模型的metric,以及优化用到的优化函数。" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import SpanFPreRecMetric\n", - "from torch.optim import Adam\n", - "from fastNLP import LossInForward\n", - "\n", - "metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target'))\n", - "optimizer = Adam(model.parameters(), lr=1e-2)\n", - "loss = LossInForward()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "使用Trainer进行训练, 您可以通过修改 device 的值来选择显卡。" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-13-53-24\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=430.0), HTML(value='')), layout=Layout(di…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.89 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:43/430: \n", - "\r", - "SpanFPreRecMetric: f=0.067797, pre=0.192771, rec=0.041131\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.9 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:86/430: \n", - "\r", - "SpanFPreRecMetric: f=0.344086, pre=0.568047, rec=0.246787\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.88 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:129/430: \n", - "\r", - "SpanFPreRecMetric: f=0.446701, pre=0.653465, rec=0.339332\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.81 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:172/430: \n", - "\r", - "SpanFPreRecMetric: f=0.479871, pre=0.642241, rec=0.383033\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.91 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:215/430: \n", - "\r", - "SpanFPreRecMetric: f=0.486312, pre=0.650862, rec=0.388175\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.87 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:258/430: \n", - "\r", - "SpanFPreRecMetric: f=0.541401, pre=0.711297, rec=0.437018\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.86 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:301/430: \n", - "\r", - "SpanFPreRecMetric: f=0.430335, pre=0.685393, rec=0.313625\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.82 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:344/430: \n", - "\r", - "SpanFPreRecMetric: f=0.477759, pre=0.665138, rec=0.372751\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.81 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:387/430: \n", - "\r", - "SpanFPreRecMetric: f=0.500759, pre=0.611111, rec=0.424165\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=9.0), HTML(value='')), layout=Layout(disp…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.8 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:430/430: \n", - "\r", - "SpanFPreRecMetric: f=0.496025, pre=0.65, rec=0.401028\n", - "\n", - "\r\n", - "In Epoch:6/Step:258, got best dev performance:\n", - "SpanFPreRecMetric: f=0.541401, pre=0.711297, rec=0.437018\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'SpanFPreRecMetric': {'f': 0.541401,\n", - " 'pre': 0.711297,\n", - " 'rec': 0.437018}},\n", - " 'best_epoch': 6,\n", - " 'best_step': 258,\n", - " 'seconds': 121.39}" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Trainer\n", - "import torch\n", - "\n", - "device= 0 if torch.cuda.is_available() else 'cpu'\n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer,\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device)\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 进行测试\n", - "训练结束之后过,可以通过 Tester 测试其在测试集上的性能" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=17.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.54 seconds!\n", - "[tester] \n", - "SpanFPreRecMetric: f=0.439024, pre=0.685279, rec=0.322967\n" - ] - }, - { - "data": { - "text/plain": [ - "{'SpanFPreRecMetric': {'f': 0.439024, 'pre': 0.685279, 'rec': 0.322967}}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Tester\n", - "tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric)\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 使用更强的Bert做序列标注\n", - "\n", - "在fastNLP使用Bert进行任务,您只需要把fastNLP.embeddings.StaticEmbedding 切换为 fastNLP.embeddings.BertEmbedding(可修改 device 选择显卡)。" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /remote-home/ynzheng/.fastNLP/embedding/bert-chinese-wwm/vocab.txt\n", - "Load pre-trained BERT parameters from file /remote-home/ynzheng/.fastNLP/embedding/bert-chinese-wwm/chinese_wwm_pytorch.bin.\n", - "Start to generate word pieces for word.\n", - "Found(Or segment into word pieces) 3384 words out of 3471.\n", - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2020-02-27-13-58-51\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=1130.0), HTML(value='')), layout=Layout(d…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.7 seconds!\n", - "Evaluation on dev at Epoch 1/10. Step:113/1130: \n", - "SpanFPreRecMetric: f=0.008114, pre=0.019231, rec=0.005141\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.49 seconds!\n", - "Evaluation on dev at Epoch 2/10. Step:226/1130: \n", - "SpanFPreRecMetric: f=0.467866, pre=0.467866, rec=0.467866\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.6 seconds!\n", - "Evaluation on dev at Epoch 3/10. Step:339/1130: \n", - "SpanFPreRecMetric: f=0.566879, pre=0.482821, rec=0.686375\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.56 seconds!\n", - "Evaluation on dev at Epoch 4/10. Step:452/1130: \n", - "SpanFPreRecMetric: f=0.651972, pre=0.59408, rec=0.722365\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.69 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:565/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.640909, pre=0.574338, rec=0.724936\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.52 seconds!\n", - "Evaluation on dev at Epoch 6/10. Step:678/1130: \n", - "SpanFPreRecMetric: f=0.661836, pre=0.624146, rec=0.70437\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.67 seconds!\n", - "Evaluation on dev at Epoch 7/10. Step:791/1130: \n", - "SpanFPreRecMetric: f=0.683429, pre=0.615226, rec=0.768638\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.37 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:904/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.674699, pre=0.634921, rec=0.719794\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluate data in 2.42 seconds!\n", - "Evaluation on dev at Epoch 9/10. Step:1017/1130: \n", - "SpanFPreRecMetric: f=0.693878, pre=0.650901, rec=0.742931\n", - "\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=23.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 2.46 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:1130/1130: \n", - "\r", - "SpanFPreRecMetric: f=0.686845, pre=0.62766, rec=0.758355\n", - "\n", - "\r\n", - "In Epoch:9/Step:1017, got best dev performance:\n", - "SpanFPreRecMetric: f=0.693878, pre=0.650901, rec=0.742931\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, layout=Layout(flex='2'), max=17.0), HTML(value='')), layout=Layout(dis…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 1.96 seconds!\n", - "[tester] \n", - "SpanFPreRecMetric: f=0.626561, pre=0.596112, rec=0.660287\n" - ] - }, - { - "data": { - "text/plain": [ - "{'SpanFPreRecMetric': {'f': 0.626561, 'pre': 0.596112, 'rec': 0.660287}}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "\n", - "from fastNLP.io import WeiboNERPipe\n", - "data_bundle = WeiboNERPipe().process_from_file()\n", - "data_bundle.rename_field('chars', 'words')\n", - "\n", - "from fastNLP.embeddings import BertEmbedding\n", - "embed = BertEmbedding(vocab=data_bundle.get_vocab('words'), model_dir_or_name='cn')\n", - "model = BiLSTMCRF(embed=embed, num_classes=len(data_bundle.get_vocab('target')), num_layers=1, hidden_size=200, dropout=0.5,\n", - " target_vocab=data_bundle.get_vocab('target'))\n", - "\n", - "from fastNLP import SpanFPreRecMetric\n", - "from torch.optim import Adam\n", - "from fastNLP import LossInForward\n", - "metric = SpanFPreRecMetric(tag_vocab=data_bundle.get_vocab('target'))\n", - "optimizer = Adam(model.parameters(), lr=2e-5)\n", - "loss = LossInForward()\n", - "\n", - "from fastNLP import Trainer\n", - "import torch\n", - "device= 5 if torch.cuda.is_available() else 'cpu'\n", - "trainer = Trainer(data_bundle.get_dataset('train'), model, loss=loss, optimizer=optimizer, batch_size=12,\n", - " dev_data=data_bundle.get_dataset('dev'), metrics=metric, device=device)\n", - "trainer.train()\n", - "\n", - "from fastNLP import Tester\n", - "tester = Tester(data_bundle.get_dataset('test'), model, metrics=metric)\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python Now", - "language": "python", - "name": "now" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/文本分类.ipynb b/tutorials/文本分类.ipynb deleted file mode 100644 index de29f632..00000000 --- a/tutorials/文本分类.ipynb +++ /dev/null @@ -1,834 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 文本分类(Text classification)\n", - "文本分类任务是将一句话或一段话划分到某个具体的类别。比如垃圾邮件识别,文本情绪分类等。\n", - "\n", - "Example:: \n", - "1,商务大床房,房间很大,床有2M宽,整体感觉经济实惠不错!\n", - "\n", - "\n", - "其中开头的1是只这条评论的标签,表示是正面的情绪。我们将使用到的数据可以通过http://dbcloud.irocn.cn:8989/api/public/dl/dataset/chn_senti_corp.zip 下载并解压,当然也可以通过fastNLP自动下载该数据。\n", - "\n", - "数据中的内容如下图所示。接下来,我们将用fastNLP在这个数据上训练一个分类网络。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![jupyter](./cn_cls_example.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 步骤\n", - "一共有以下的几个步骤 \n", - "(1) 读取数据 \n", - "(2) 预处理数据 \n", - "(3) 选择预训练词向量 \n", - "(4) 创建模型 \n", - "(5) 训练模型 " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (1) 读取数据\n", - "fastNLP提供多种数据的自动下载与自动加载功能,对于这里我们要用到的数据,我们可以用\\ref{Loader}自动下载并加载该数据。更多有关Loader的使用可以参考\\ref{Loader}" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import ChnSentiCorpLoader\n", - "\n", - "loader = ChnSentiCorpLoader() # 初始化一个中文情感分类的loader\n", - "data_dir = loader.download() # 这一行代码将自动下载数据到默认的缓存地址, 并将该地址返回\n", - "data_bundle = loader.load(data_dir) # 这一行代码将从{data_dir}处读取数据至DataBundle" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "DataBundle的相关介绍,可以参考\\ref{}。我们可以打印该data_bundle的基本信息。" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\tdev has 1200 instances.\n", - "\ttrain has 9600 instances.\n", - "\ttest has 1200 instances.\n", - "In total 0 vocabs:\n", - "\n" - ] - } - ], - "source": [ - "print(data_bundle)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看出,该data_bundle中一个含有三个\\ref{DataSet}。通过下面的代码,我们可以查看DataSet的基本情况" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DataSet({'raw_chars': 选择珠江花园的原因就是方便,有电动扶梯直接到达海边,周围餐馆、食廊、商场、超市、摊位一应俱全。酒店装修一般,但还算整洁。 泳池在大堂的屋顶,因此很小,不过女儿倒是喜欢。 包的早餐是西式的,还算丰富。 服务吗,一般 type=str,\n", - "'target': 1 type=str},\n", - "{'raw_chars': 15.4寸笔记本的键盘确实爽,基本跟台式机差不多了,蛮喜欢数字小键盘,输数字特方便,样子也很美观,做工也相当不错 type=str,\n", - "'target': 1 type=str})\n" - ] - } - ], - "source": [ - "print(data_bundle.get_dataset('train')[:2]) # 查看Train集前两个sample" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (2) 预处理数据\n", - "在NLP任务中,预处理一般包括: (a)将一整句话切分成汉字或者词; (b)将文本转换为index \n", - "\n", - "fastNLP中也提供了多种数据集的处理类,这里我们直接使用fastNLP的ChnSentiCorpPipe。更多关于Pipe的说明可以参考\\ref{Pipe}。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.io import ChnSentiCorpPipe\n", - "\n", - "pipe = ChnSentiCorpPipe()\n", - "data_bundle = pipe.process(data_bundle) # 所有的Pipe都实现了process()方法,且输入输出都为DataBundle类型" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "In total 3 datasets:\n", - "\tdev has 1200 instances.\n", - "\ttrain has 9600 instances.\n", - "\ttest has 1200 instances.\n", - "In total 2 vocabs:\n", - "\tchars has 4409 entries.\n", - "\ttarget has 2 entries.\n", - "\n" - ] - } - ], - "source": [ - "print(data_bundle) # 打印data_bundle,查看其变化" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "可以看到除了之前已经包含的3个\\ref{DataSet}, 还新增了两个\\ref{Vocabulary}。我们可以打印DataSet中的内容" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DataSet({'raw_chars': 选择珠江花园的原因就是方便,有电动扶梯直接到达海边,周围餐馆、食廊、商场、超市、摊位一应俱全。酒店装修一般,但还算整洁。 泳池在大堂的屋顶,因此很小,不过女儿倒是喜欢。 包的早餐是西式的,还算丰富。 服务吗,一般 type=str,\n", - "'target': 1 type=int,\n", - "'chars': [338, 464, 1400, 784, 468, 739, 3, 289, 151, 21, 5, 88, 143, 2, 9, 81, 134, 2573, 766, 233, 196, 23, 536, 342, 297, 2, 405, 698, 132, 281, 74, 744, 1048, 74, 420, 387, 74, 412, 433, 74, 2021, 180, 8, 219, 1929, 213, 4, 34, 31, 96, 363, 8, 230, 2, 66, 18, 229, 331, 768, 4, 11, 1094, 479, 17, 35, 593, 3, 1126, 967, 2, 151, 245, 12, 44, 2, 6, 52, 260, 263, 635, 5, 152, 162, 4, 11, 336, 3, 154, 132, 5, 236, 443, 3, 2, 18, 229, 761, 700, 4, 11, 48, 59, 653, 2, 8, 230] type=list,\n", - "'seq_len': 106 type=int},\n", - "{'raw_chars': 15.4寸笔记本的键盘确实爽,基本跟台式机差不多了,蛮喜欢数字小键盘,输数字特方便,样子也很美观,做工也相当不错 type=str,\n", - "'target': 1 type=int,\n", - "'chars': [50, 133, 20, 135, 945, 520, 343, 24, 3, 301, 176, 350, 86, 785, 2, 456, 24, 461, 163, 443, 128, 109, 6, 47, 7, 2, 916, 152, 162, 524, 296, 44, 301, 176, 2, 1384, 524, 296, 259, 88, 143, 2, 92, 67, 26, 12, 277, 269, 2, 188, 223, 26, 228, 83, 6, 63] type=list,\n", - "'seq_len': 56 type=int})\n" - ] - } - ], - "source": [ - "print(data_bundle.get_dataset('train')[:2])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "新增了一列为数字列表的chars,以及变为数字的target列。可以看出这两列的名称和刚好与data_bundle中两个Vocabulary的名称是一致的,我们可以打印一下Vocabulary看一下里面的内容。" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Vocabulary(['选', '择', '珠', '江', '花']...)\n" - ] - } - ], - "source": [ - "char_vocab = data_bundle.get_vocab('chars')\n", - "print(char_vocab)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Vocabulary是一个记录着词语与index之间映射关系的类,比如" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'选'的index是338\n", - "index:338对应的汉字是选\n" - ] - } - ], - "source": [ - "index = char_vocab.to_index('选')\n", - "print(\"'选'的index是{}\".format(index)) # 这个值与上面打印出来的第一个instance的chars的第一个index是一致的\n", - "print(\"index:{}对应的汉字是{}\".format(index, char_vocab.to_word(index))) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (3) 选择预训练词向量 \n", - "由于Word2vec, Glove, Elmo, Bert等预训练模型可以增强模型的性能,所以在训练具体任务前,选择合适的预训练词向量非常重要。在fastNLP中我们提供了多种Embedding使得加载这些预训练模型的过程变得更加便捷。更多关于Embedding的说明可以参考\\ref{Embedding}。这里我们先给出一个使用word2vec的中文汉字预训练的示例,之后再给出一个使用Bert的文本分类。这里使用的预训练词向量为'cn-fastnlp-100d',fastNLP将自动下载该embedding至本地缓存,fastNLP支持使用名字指定的Embedding以及相关说明可以参见\\ref{Embedding}" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 4321 out of 4409 words in the pre-training embedding.\n" - ] - } - ], - "source": [ - "from fastNLP.embeddings import StaticEmbedding\n", - "\n", - "word2vec_embed = StaticEmbedding(char_vocab, model_dir_or_name='cn-char-fastnlp-100d')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (4) 创建模型\n", - "这里我们使用到的模型结构如下所示,补图" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "from torch import nn\n", - "from fastNLP.modules import LSTM\n", - "import torch\n", - "\n", - "# 定义模型\n", - "class BiLSTMMaxPoolCls(nn.Module):\n", - " def __init__(self, embed, num_classes, hidden_size=400, num_layers=1, dropout=0.3):\n", - " super().__init__()\n", - " self.embed = embed\n", - " \n", - " self.lstm = LSTM(self.embed.embedding_dim, hidden_size=hidden_size//2, num_layers=num_layers, \n", - " batch_first=True, bidirectional=True)\n", - " self.dropout_layer = nn.Dropout(dropout)\n", - " self.fc = nn.Linear(hidden_size, num_classes)\n", - " \n", - " def forward(self, chars, seq_len): # 这里的名称必须和DataSet中相应的field对应,比如之前我们DataSet中有chars,这里就必须为chars\n", - " # chars:[batch_size, max_len]\n", - " # seq_len: [batch_size, ]\n", - " chars = self.embed(chars)\n", - " outputs, _ = self.lstm(chars, seq_len)\n", - " outputs = self.dropout_layer(outputs)\n", - " outputs, _ = torch.max(outputs, dim=1)\n", - " outputs = self.fc(outputs)\n", - " \n", - " return {'pred':outputs} # [batch_size,], 返回值必须是dict类型,且预测值的key建议设为pred\n", - "\n", - "# 初始化模型\n", - "model = BiLSTMMaxPoolCls(word2vec_embed, len(data_bundle.get_vocab('target')))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### (5) 训练模型\n", - "fastNLP提供了Trainer对象来组织训练过程,包括完成loss计算(所以在初始化Trainer的时候需要指定loss类型),梯度更新(所以在初始化Trainer的时候需要提供优化器optimizer)以及在验证集上的性能验证(所以在初始化时需要提供一个Metric)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\tchars: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 106]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "Evaluate data in 0.01 seconds!\n", - "training epochs started 2019-09-03-23-57-10\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=3000), HTML(value='')), layout=Layout(display…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/10. Step:300/3000: \n", - "\r", - "AccuracyMetric: acc=0.81\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/10. Step:600/3000: \n", - "\r", - "AccuracyMetric: acc=0.8675\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/10. Step:900/3000: \n", - "\r", - "AccuracyMetric: acc=0.878333\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "\r", - "Evaluation on dev at Epoch 4/10. Step:1200/3000: \n", - "\r", - "AccuracyMetric: acc=0.873333\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 5/10. Step:1500/3000: \n", - "\r", - "AccuracyMetric: acc=0.878333\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.42 seconds!\n", - "\r", - "Evaluation on dev at Epoch 6/10. Step:1800/3000: \n", - "\r", - "AccuracyMetric: acc=0.895833\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.44 seconds!\n", - "\r", - "Evaluation on dev at Epoch 7/10. Step:2100/3000: \n", - "\r", - "AccuracyMetric: acc=0.8975\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "\r", - "Evaluation on dev at Epoch 8/10. Step:2400/3000: \n", - "\r", - "AccuracyMetric: acc=0.894167\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.48 seconds!\n", - "\r", - "Evaluation on dev at Epoch 9/10. Step:2700/3000: \n", - "\r", - "AccuracyMetric: acc=0.8875\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=38), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.43 seconds!\n", - "\r", - "Evaluation on dev at Epoch 10/10. Step:3000/3000: \n", - "\r", - "AccuracyMetric: acc=0.895833\n", - "\n", - "\r\n", - "In Epoch:7/Step:2100, got best dev performance:\n", - "AccuracyMetric: acc=0.8975\n", - "Reloaded the best model.\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=19), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 0.34 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.8975\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.8975}}" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Trainer\n", - "from fastNLP import CrossEntropyLoss\n", - "from torch.optim import Adam\n", - "from fastNLP import AccuracyMetric\n", - "\n", - "loss = CrossEntropyLoss()\n", - "optimizer = Adam(model.parameters(), lr=0.001)\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快\n", - "\n", - "trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, \n", - " optimizer=optimizer, batch_size=32, dev_data=data_bundle.get_dataset('dev'),\n", - " metrics=metric, device=device)\n", - "trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型\n", - "\n", - "# 在测试集上测试一下模型的性能\n", - "from fastNLP import Tester\n", - "print(\"Performance on test is:\")\n", - "tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device)\n", - "tester.test()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 使用Bert进行文本分类" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "loading vocabulary file /home/yh/.fastNLP/embedding/bert-chinese-wwm/vocab.txt\n", - "Load pre-trained BERT parameters from file /home/yh/.fastNLP/embedding/bert-chinese-wwm/chinese_wwm_pytorch.bin.\n", - "Start to generating word pieces for word.\n", - "Found(Or segment into word pieces) 4286 words out of 4409.\n", - "input fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\tchars: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 106]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "target fields after batch(if batch size is 2):\n", - "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "Evaluate data in 0.05 seconds!\n", - "training epochs started 2019-09-04-00-02-37\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=3600), HTML(value='')), layout=Layout(display…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=150), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 15.89 seconds!\n", - "\r", - "Evaluation on dev at Epoch 1/3. Step:1200/3600: \n", - "\r", - "AccuracyMetric: acc=0.9\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=150), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 15.92 seconds!\n", - "\r", - "Evaluation on dev at Epoch 2/3. Step:2400/3600: \n", - "\r", - "AccuracyMetric: acc=0.904167\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=150), HTML(value='')), layout=Layout(display=…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 15.91 seconds!\n", - "\r", - "Evaluation on dev at Epoch 3/3. Step:3600/3600: \n", - "\r", - "AccuracyMetric: acc=0.918333\n", - "\n", - "\r\n", - "In Epoch:3/Step:3600, got best dev performance:\n", - "AccuracyMetric: acc=0.918333\n", - "Reloaded the best model.\n", - "Performance on test is:\n" - ] - }, - { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=19), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\r", - "Evaluate data in 29.24 seconds!\n", - "[tester] \n", - "AccuracyMetric: acc=0.919167\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.919167}}" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 只需要切换一下Embedding即可\n", - "from fastNLP.embeddings import BertEmbedding\n", - "\n", - "# 这里为了演示一下效果,所以默认Bert不更新权重\n", - "bert_embed = BertEmbedding(char_vocab, model_dir_or_name='cn', auto_truncate=True, requires_grad=False)\n", - "model = BiLSTMMaxPoolCls(bert_embed, len(data_bundle.get_vocab('target')), )\n", - "\n", - "\n", - "import torch\n", - "from fastNLP import Trainer\n", - "from fastNLP import CrossEntropyLoss\n", - "from torch.optim import Adam\n", - "from fastNLP import AccuracyMetric\n", - "\n", - "loss = CrossEntropyLoss()\n", - "optimizer = Adam(model.parameters(), lr=2e-5)\n", - "metric = AccuracyMetric()\n", - "device = 0 if torch.cuda.is_available() else 'cpu' # 如果有gpu的话在gpu上运行,训练速度会更快\n", - "\n", - "trainer = Trainer(train_data=data_bundle.get_dataset('train'), model=model, loss=loss, \n", - " optimizer=optimizer, batch_size=16, dev_data=data_bundle.get_dataset('test'),\n", - " metrics=metric, device=device, n_epochs=3)\n", - "trainer.train() # 开始训练,训练完成之后默认会加载在dev上表现最好的模型\n", - "\n", - "# 在测试集上测试一下模型的性能\n", - "from fastNLP import Tester\n", - "print(\"Performance on test is:\")\n", - "tester = Tester(data=data_bundle.get_dataset('test'), model=model, metrics=metric, batch_size=64, device=device)\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}